max_stars_repo_path stringlengths 4 286 | max_stars_repo_name stringlengths 5 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.03M | content_cleaned stringlengths 6 1.03M | language stringclasses 111 values | language_score float64 0.03 1 | comments stringlengths 0 556k | edu_score float64 0.32 5.03 | edu_int_score int64 0 5 |
|---|---|---|---|---|---|---|---|---|---|---|
src/plugin.py | JuggOrNot/battle.net | 0 | 6622451 | <filename>src/plugin.py<gh_stars>0
import asyncio
import json
import os
import sys
import multiprocessing
import webbrowser
import requests
import requests.cookies
import pathlib
import logging as log
import subprocess
import errno
from threading import Thread
from threading import Lock
from version import __version__ as version
from galaxy.api.consts import LocalGameState, Platform
from galaxy.api.plugin import Plugin, create_and_run_plugin
from galaxy.api.types import Achievement, Game, LicenseInfo, LocalGame, FriendInfo
from process import ProcessProvider
from local_client import LocalClient, Uninstaller, ClientNotInstalledError
from parsers import ConfigParser, DatabaseParser
from backend import BackendClient, AccessTokenExpired
from definitions import Blizzard, DataclassJSONEncoder, License_Map, ClassicGame
from game import InstalledGame
from watcher import FileWatcher
from consts import CONFIG_PATH, AGENT_PATH, SYSTEM
from consts import Platform as pf
from http_client import AuthenticatedHttpClient
from social import SocialFeatures
from galaxy.api.errors import ( AuthenticationRequired,
BackendTimeout, BackendNotAvailable, BackendError, NetworkError, UnknownError, InvalidCredentials
)
def load_product_db(product_db_path):
with open(product_db_path, 'rb') as f:
pdb = f.read()
return pdb
def load_config(battlenet_config_path):
with open(battlenet_config_path, 'rb') as f:
config = json.load(f)
return config
class BNetPlugin(Plugin):
PRODUCT_DB_PATH = pathlib.Path(AGENT_PATH) / 'product.db'
CONFIG_PATH = CONFIG_PATH
def __init__(self, reader, writer, token):
super().__init__(Platform.Battlenet, version, reader, writer, token)
log.info(f"Starting Battle.net plugin, version {version}")
self.bnet_client = None
self.local_client = LocalClient()
self.authentication_client = AuthenticatedHttpClient(self)
self.backend_client = BackendClient(self, self.authentication_client)
self.social_features = SocialFeatures(self.authentication_client)
self.error_state = False
self.running_task = None
self.database_parser = None
self.config_parser = None
self.uninstaller = None
self.owned_games_cache = []
self._classic_games_thread = None
self._battlenet_games_thread = None
self._installed_battlenet_games = {}
self._installed_battlenet_games_lock = Lock()
self.installed_games = self._parse_local_data()
self.watched_running_games = set()
self.notifications_enabled = False
loop = asyncio.get_event_loop()
loop.create_task(self._register_local_data_watcher())
async def _register_local_data_watcher(self):
async def ping(event, interval):
while True:
await asyncio.sleep(interval)
if not self.watched_running_games:
if not event.is_set():
event.set()
parse_local_data_event = asyncio.Event()
FileWatcher(self.CONFIG_PATH, parse_local_data_event, interval=1)
FileWatcher(self.PRODUCT_DB_PATH, parse_local_data_event, interval=2.5)
asyncio.create_task(ping(parse_local_data_event, 30))
while True:
await parse_local_data_event.wait()
refreshed_games = self._parse_local_data()
if not self.notifications_enabled:
self._update_statuses(refreshed_games, self.installed_games)
self.installed_games = refreshed_games
parse_local_data_event.clear()
async def _notify_about_game_stop(self, game, starting_timeout):
id_to_watch = game.info.id
if id_to_watch in self.watched_running_games:
log.debug(f'Game {id_to_watch} is already watched. Skipping')
return
try:
self.watched_running_games.add(id_to_watch)
await asyncio.sleep(starting_timeout)
ProcessProvider().update_games_processes([game])
log.info(f'Setuping process watcher for {game._processes}')
loop = asyncio.get_event_loop()
await loop.run_in_executor(None, game.wait_until_game_stops)
finally:
self.update_local_game_status(LocalGame(id_to_watch, LocalGameState.Installed))
self.watched_running_games.remove(id_to_watch)
def _update_statuses(self, refreshed_games, previous_games):
for blizz_id, refr in refreshed_games.items():
prev = previous_games.get(blizz_id, None)
if prev is None:
if refr.playable:
log.debug('Detected playable game')
state = LocalGameState.Installed
else:
log.debug('Detected installation begin')
state = LocalGameState.None_
elif refr.playable and not prev.playable:
log.debug('Detected playable game')
state = LocalGameState.Installed
elif refr.last_played != prev.last_played:
log.debug('Detected launched game')
state = LocalGameState.Installed | LocalGameState.Running
asyncio.create_task(self._notify_about_game_stop(refr, 5))
else:
continue
log.info(f'Changing game {blizz_id} state to {state}')
self.update_local_game_status(LocalGame(blizz_id, state))
for blizz_id, prev in previous_games.items():
refr = refreshed_games.get(blizz_id, None)
if refr is None:
log.debug('Detected uninstalled game')
state = LocalGameState.None_
self.update_local_game_status(LocalGame(blizz_id, state))
def _load_local_files(self):
try:
product_db = load_product_db(self.PRODUCT_DB_PATH)
self.database_parser = DatabaseParser(product_db)
except FileNotFoundError as e:
log.warning(f"product.db not found: {repr(e)}")
return False
except WindowsError as e:
# 5 WindowsError access denied
if e.winerror == 5:
log.warning(f"product.db not accessible: {repr(e)}")
self.config_parser = ConfigParser(None)
return False
else:
raise ()
except OSError as e:
if e.errno == errno.EACCES:
log.warning(f"product.db not accessible: {repr(e)}")
self.config_parser = ConfigParser(None)
return False
else:
raise ()
else:
if self.local_client.is_installed != self.database_parser.battlenet_present:
self.local_client.refresh()
try:
config = load_config(self.CONFIG_PATH)
self.config_parser = ConfigParser(config)
except FileNotFoundError as e:
log.warning(f"config file not found: {repr(e)}")
self.config_parser = ConfigParser(None)
return False
except WindowsError as e:
# 5 WindowsError access denied
if e.winerror == 5:
log.warning(f"config file not accessible: {repr(e)}")
self.config_parser = ConfigParser(None)
return False
else:
raise ()
except OSError as e:
if e.errno == errno.EACCES:
log.warning(f"config file not accessible: {repr(e)}")
self.config_parser = ConfigParser(None)
return False
else:
raise ()
return True
def _get_battlenet_installed_games(self):
def _add_battlenet_game(config_game, db_game):
if config_game.uninstall_tag != db_game.uninstall_tag:
return None
try:
blizzard_game = Blizzard[config_game.uid]
except KeyError:
log.warning(f'[{config_game.uid}] is not known blizzard game. Skipping')
return None
try:
log.info(f"Adding {blizzard_game.blizzard_id} {blizzard_game.name} to installed games")
return InstalledGame(
blizzard_game,
config_game.uninstall_tag,
db_game.version,
config_game.last_played,
db_game.install_path,
db_game.playable
)
except FileNotFoundError as e:
log.warning(str(e) + '. Probably outdated product.db after uninstall. Skipping')
return None
games = {}
for db_game in self.database_parser.games:
for config_game in self.config_parser.games:
installed_game = _add_battlenet_game(config_game, db_game)
if installed_game:
games[installed_game.info.id] = installed_game
self._installed_battlenet_games_lock.acquire()
self._installed_battlenet_games = games
self._installed_battlenet_games_lock.release()
def _parse_local_data(self):
"""Game is considered as installed when present in both config and product.db"""
games = {}
# give threads 4 seconds to finish
join_timeout = 4
if not self._classic_games_thread or not self._classic_games_thread.isAlive():
self._classic_games_thread = Thread(target=self.local_client.find_classic_games, daemon=True)
self._classic_games_thread.start()
log.info("Started classic games thread")
if not self._load_local_files():
self._classic_games_thread.join(join_timeout)
if not self.local_client.classics_lock.acquire(False):
return []
else:
installed_classics = self.local_client.installed_classics
self.local_client.classics_lock.release()
return installed_classics
try:
if SYSTEM == pf.WINDOWS and self.uninstaller is None:
uninstaller_path = pathlib.Path(AGENT_PATH) / 'Blizzard Uninstaller.exe'
self.uninstaller = Uninstaller(uninstaller_path)
except FileNotFoundError as e:
log.warning('uninstaller not found' + str(e))
try:
if self.local_client.is_installed != self.database_parser.battlenet_present:
self.local_client.refresh()
log.info(f"Games found in db {self.database_parser.games}")
log.info(f"Games found in config {self.config_parser.games}")
if not self._battlenet_games_thread or not self._battlenet_games_thread.isAlive():
self._battlenet_games_thread = Thread(target=self._get_battlenet_installed_games, daemon=True)
self._battlenet_games_thread.start()
log.info("Started classic games thread")
except Exception as e:
log.exception(str(e))
finally:
self._classic_games_thread.join(join_timeout)
self._battlenet_games_thread.join(join_timeout)
if self.local_client.classics_lock.acquire(False):
games = self.local_client.installed_classics
self.local_client.classics_lock.release()
if self._installed_battlenet_games_lock.acquire(False):
games = {**self._installed_battlenet_games, **games}
self._installed_battlenet_games_lock.release()
return games
def log_out(self):
if self.backend_client:
asyncio.create_task(self.authentication_client.shutdown())
self.authentication_client.user_details = None
self.owned_games_cache = []
async def open_battlenet_browser(self):
url = f"https://www.blizzard.com/apps/battle.net/desktop"
log.info(f'Opening battle.net website: {url}')
loop = asyncio.get_running_loop()
await loop.run_in_executor(None, lambda x: webbrowser.open(x, autoraise=True), url)
async def install_game(self, game_id):
if not self.authentication_client.is_authenticated():
raise AuthenticationRequired()
installed_game = self.installed_games.get(game_id, None)
if installed_game and os.access(installed_game.install_path, os.F_OK):
log.warning("Received install command on an already installed game")
return await self.launch_game(game_id)
if game_id in Blizzard.legacy_game_ids:
if SYSTEM == pf.WINDOWS:
platform = 'windows'
elif SYSTEM == pf.MACOS:
platform = 'macos'
webbrowser.open(f"https://www.blizzard.com/download/confirmation?platform={platform}&locale=enUS&version=LIVE&id={game_id}")
return
try:
self.local_client.refresh()
log.info(f'Installing game of id {game_id}')
self.local_client.install_game(game_id)
except ClientNotInstalledError as e:
log.warning(e)
await self.open_battlenet_browser()
except Exception as e:
log.exception(f"Installing game {game_id} failed: {e}")
def _open_battlenet_at_id(self, game_id):
try:
self.local_client.refresh()
self.local_client.open_battlenet(game_id)
except Exception as e:
log.exception(f"Opening battlenet client on specific game_id {game_id} failed {e}")
try:
self.local_client.open_battlenet()
except Exception as e:
log.exception(f"Opening battlenet client failed {e}")
async def uninstall_game(self, game_id):
if not self.authentication_client.is_authenticated():
raise AuthenticationRequired()
if game_id == 'wow_classic':
# attempting to uninstall classic wow through protocol gives you a message that the game cannot
# be uninstalled through protocol and you should use battle.net
return self._open_battlenet_at_id(game_id)
if SYSTEM == pf.MACOS:
self._open_battlenet_at_id(game_id)
else:
try:
installed_game = self.installed_games.get(game_id, None)
if installed_game is None or not os.access(installed_game.install_path, os.F_OK):
log.error(f'Cannot uninstall {Blizzard[game_id].uid}')
self.update_local_game_status(LocalGame(game_id, LocalGameState.None_))
return
if not isinstance(installed_game.info, ClassicGame):
if self.uninstaller is None:
raise FileNotFoundError('Uninstaller not found')
uninstall_tag = installed_game.uninstall_tag
client_lang = self.config_parser.locale_language
self.uninstaller.uninstall_game(installed_game, uninstall_tag, client_lang)
except Exception as e:
log.exception(f'Uninstalling game {game_id} failed: {e}')
async def launch_game(self, game_id):
if not self.authentication_client.is_authenticated():
raise AuthenticationRequired()
try:
if self.installed_games is None:
log.error(f'Launching game that is not installed: {game_id}')
return await self.install_game(game_id)
game = self.installed_games.get(game_id, None)
if game is None:
log.error(f'Launching game that is not installed: {game_id}')
return await self.install_game(game_id)
if isinstance(game.info, ClassicGame):
log.info(f'Launching game of id: {game_id}, {game} at path {os.path.join(game.install_path, game.info.exe)}')
if SYSTEM == pf.WINDOWS:
subprocess.Popen(os.path.join(game.install_path, game.info.exe))
elif SYSTEM == pf.MACOS:
if not game.info.bundle_id:
log.warning(f"{game.name} has no bundle id, help by providing us bundle id of this game")
subprocess.Popen(['open', '-b', game.info.bundle_id])
self.update_local_game_status(LocalGame(game_id, LocalGameState.Installed | LocalGameState.Running))
asyncio.create_task(self._notify_about_game_stop(game, 6))
return
self.local_client.refresh()
log.info(f'Launching game of id: {game_id}, {game}')
await self.local_client.launch_game(game, wait_sec=60)
self.update_local_game_status(LocalGame(game_id, LocalGameState.Installed | LocalGameState.Running))
self.local_client.close_window()
asyncio.create_task(self._notify_about_game_stop(game, 3))
except ClientNotInstalledError as e:
log.warning(e)
await self.open_battlenet_browser()
except TimeoutError as e:
log.warning(str(e))
except Exception as e:
log.exception(f"Launching game {game_id} failed: {e}")
async def authenticate(self, stored_credentials=None):
try:
if stored_credentials:
auth_data = self.authentication_client.process_stored_credentials(stored_credentials)
try:
await self.authentication_client.create_session()
await self.backend_client.refresh_cookies()
auth_status = await self.backend_client.validate_access_token(auth_data.access_token)
except (BackendNotAvailable, BackendError, NetworkError, UnknownError, BackendTimeout) as e:
raise e
except Exception:
raise InvalidCredentials()
if self.authentication_client.validate_auth_status(auth_status):
self.authentication_client.user_details = await self.backend_client.get_user_info()
return self.authentication_client.parse_user_details()
else:
return self.authentication_client.authenticate_using_login()
except Exception as e:
raise e
async def pass_login_credentials(self, step, credentials, cookies):
if "logout&app=oauth" in credentials['end_uri']:
# 2fa expired, repeat authentication
return self.authentication_client.authenticate_using_login()
if self.authentication_client.attempted_to_set_battle_tag:
self.authentication_client.user_details = await self.backend_client.get_user_info()
return self.authentication_client.parse_auth_after_setting_battletag()
cookie_jar = self.authentication_client.parse_cookies(cookies)
auth_data = await self.authentication_client.get_auth_data_login(cookie_jar, credentials)
try:
await self.authentication_client.create_session()
await self.backend_client.refresh_cookies()
except (BackendNotAvailable, BackendError, NetworkError, UnknownError, BackendTimeout) as e:
raise e
except Exception:
raise InvalidCredentials()
auth_status = await self.backend_client.validate_access_token(auth_data.access_token)
if not ("authorities" in auth_status and "IS_AUTHENTICATED_FULLY" in auth_status["authorities"]):
raise InvalidCredentials()
self.authentication_client.user_details = await self.backend_client.get_user_info()
self.authentication_client.set_credentials()
return self.authentication_client.parse_battletag()
async def get_friends(self):
if not self.authentication_client.is_authenticated():
raise AuthenticationRequired()
friends_list = await self.social_features.get_friends()
return [FriendInfo(user_id=friend.id.low, user_name='') for friend in friends_list]
async def get_owned_games(self):
if not self.authentication_client.is_authenticated():
raise AuthenticationRequired()
def _parse_classic_games(classic_games):
for classic_game in classic_games["classicGames"]:
log.info(f"looking for {classic_game} in classic games")
try:
blizzard_game = Blizzard[classic_game["localizedGameName"].replace(u'\xa0', ' ')]
log.info(f"match! {blizzard_game}")
classic_game["titleId"] = blizzard_game.uid
classic_game["gameAccountStatus"] = "Good"
except KeyError:
continue
return classic_games
def _get_not_added_free_games(owned_games):
owned_games_ids = []
for game in owned_games:
if "titleId" in game:
owned_games_ids.append(str(game["titleId"]))
return [{"titleId": game.blizzard_id,
"localizedGameName": game.name,
"gameAccountStatus": "Free"}
for game in Blizzard.free_games if game.blizzard_id not in owned_games_ids]
try:
games = await self.backend_client.get_owned_games()
classic_games = _parse_classic_games(await self.backend_client.get_owned_classic_games())
owned_games = games["gameAccounts"] + classic_games["classicGames"]
# Add wow classic if retail wow is present in owned games
for owned_game in owned_games.copy():
if 'titleId' in owned_game:
if owned_game['titleId'] == 5730135:
owned_games.append({'titleId': 'wow_classic',
'localizedGameName': 'World of Warcraft Classic',
'gameAccountStatus': owned_game['gameAccountStatus']})
free_games_to_add = _get_not_added_free_games(owned_games)
owned_games += free_games_to_add
log.info(f"Owned games {owned_games} with free games")
self.owned_games_cache = owned_games
return [
Game(
str(game["titleId"]),
game["localizedGameName"],
[],
LicenseInfo(License_Map[game["gameAccountStatus"]]),
)
for game in self.owned_games_cache if "titleId" in game
]
except Exception as e:
log.exception(f"failed to get owned games: {repr(e)}")
raise
async def get_local_games(self):
try:
local_games = []
running_games = ProcessProvider().update_games_processes(self.installed_games.values())
log.info(f"Installed games {self.installed_games.items()}")
log.info(f"Running games {running_games}")
for id_, game in self.installed_games.items():
if game.playable:
state = LocalGameState.Installed
if id_ in running_games:
state |= LocalGameState.Running
else:
state = LocalGameState.None_
local_games.append(LocalGame(id_, state))
return local_games
except Exception as e:
log.exception(f"failed to get local games: {str(e)}")
raise
finally:
self.enable_notifications = True
async def _get_wow_achievements(self):
achievements = []
try:
characters_data = await self.backend_client.get_wow_character_data()
characters_data = characters_data["characters"]
wow_character_data = await asyncio.gather(
*[
self.backend_client.get_wow_character_achievements(character["realm"], character["name"])
for character in characters_data
],
return_exceptions=True,
)
for data in wow_character_data:
if isinstance(data, requests.Timeout) or isinstance(data, requests.ConnectionError):
raise data
wow_achievement_data = [
list(
zip(
data["achievements"]["achievementsCompleted"],
data["achievements"]["achievementsCompletedTimestamp"],
)
)
for data in wow_character_data
if type(data) is dict
]
already_in = set()
for char_ach in wow_achievement_data:
for ach in char_ach:
if ach[0] not in already_in:
achievements.append(Achievement(achievement_id=ach[0], unlock_time=int(ach[1] / 1000)))
already_in.add(ach[0])
except (AccessTokenExpired, BackendError) as e:
log.exception(str(e))
with open('wow.json', 'w') as f:
f.write(json.dumps(achievements, cls=DataclassJSONEncoder))
return achievements
async def _get_sc2_achievements(self):
account_data = await self.backend_client.get_sc2_player_data(self.authentication_client.user_details["id"])
# TODO what if more sc2 accounts?
assert len(account_data) == 1
account_data = account_data[0]
profile_data = await self.backend_client.get_sc2_profile_data(
account_data["regionId"], account_data["realmId"],
account_data["profileId"]
)
sc2_achievement_data = [
Achievement(achievement_id=achievement["achievementId"], unlock_time=achievement["completionDate"])
for achievement in profile_data["earnedAchievements"]
if achievement["isComplete"]
]
with open('sc2.json', 'w') as f:
f.write(json.dumps(sc2_achievement_data, cls=DataclassJSONEncoder))
return sc2_achievement_data
# async def get_unlocked_achievements(self, game_id):
# if not self.website_client.is_authenticated():
# raise AuthenticationRequired()
# try:
# if game_id == "21298":
# return await self._get_sc2_achievements()
# elif game_id == "5730135":
# return await self._get_wow_achievements()
# else:
# return []
# except requests.Timeout:
# raise BackendTimeout()
# except requests.ConnectionError:
# raise NetworkError()
# except Exception as e:
# log.exception(str(e))
# return []
async def _tick_runner(self):
if not self.bnet_client:
return
try:
self.error_state = await self.bnet_client.tick()
except Exception as e:
self.error_state = True
log.exception(f"error state: {str(e)}")
raise
def tick(self):
if not self.error_state and (not self.running_task or self.running_task.done()):
self.running_task = asyncio.create_task(self._tick_runner())
elif self.error_state:
sys.exit(1)
def shutdown(self):
log.info("Plugin shutdown.")
asyncio.create_task(self.authentication_client.shutdown())
def main():
multiprocessing.freeze_support()
create_and_run_plugin(BNetPlugin, sys.argv)
if __name__ == "__main__":
main()
| <filename>src/plugin.py<gh_stars>0
import asyncio
import json
import os
import sys
import multiprocessing
import webbrowser
import requests
import requests.cookies
import pathlib
import logging as log
import subprocess
import errno
from threading import Thread
from threading import Lock
from version import __version__ as version
from galaxy.api.consts import LocalGameState, Platform
from galaxy.api.plugin import Plugin, create_and_run_plugin
from galaxy.api.types import Achievement, Game, LicenseInfo, LocalGame, FriendInfo
from process import ProcessProvider
from local_client import LocalClient, Uninstaller, ClientNotInstalledError
from parsers import ConfigParser, DatabaseParser
from backend import BackendClient, AccessTokenExpired
from definitions import Blizzard, DataclassJSONEncoder, License_Map, ClassicGame
from game import InstalledGame
from watcher import FileWatcher
from consts import CONFIG_PATH, AGENT_PATH, SYSTEM
from consts import Platform as pf
from http_client import AuthenticatedHttpClient
from social import SocialFeatures
from galaxy.api.errors import ( AuthenticationRequired,
BackendTimeout, BackendNotAvailable, BackendError, NetworkError, UnknownError, InvalidCredentials
)
def load_product_db(product_db_path):
with open(product_db_path, 'rb') as f:
pdb = f.read()
return pdb
def load_config(battlenet_config_path):
with open(battlenet_config_path, 'rb') as f:
config = json.load(f)
return config
class BNetPlugin(Plugin):
PRODUCT_DB_PATH = pathlib.Path(AGENT_PATH) / 'product.db'
CONFIG_PATH = CONFIG_PATH
def __init__(self, reader, writer, token):
super().__init__(Platform.Battlenet, version, reader, writer, token)
log.info(f"Starting Battle.net plugin, version {version}")
self.bnet_client = None
self.local_client = LocalClient()
self.authentication_client = AuthenticatedHttpClient(self)
self.backend_client = BackendClient(self, self.authentication_client)
self.social_features = SocialFeatures(self.authentication_client)
self.error_state = False
self.running_task = None
self.database_parser = None
self.config_parser = None
self.uninstaller = None
self.owned_games_cache = []
self._classic_games_thread = None
self._battlenet_games_thread = None
self._installed_battlenet_games = {}
self._installed_battlenet_games_lock = Lock()
self.installed_games = self._parse_local_data()
self.watched_running_games = set()
self.notifications_enabled = False
loop = asyncio.get_event_loop()
loop.create_task(self._register_local_data_watcher())
async def _register_local_data_watcher(self):
async def ping(event, interval):
while True:
await asyncio.sleep(interval)
if not self.watched_running_games:
if not event.is_set():
event.set()
parse_local_data_event = asyncio.Event()
FileWatcher(self.CONFIG_PATH, parse_local_data_event, interval=1)
FileWatcher(self.PRODUCT_DB_PATH, parse_local_data_event, interval=2.5)
asyncio.create_task(ping(parse_local_data_event, 30))
while True:
await parse_local_data_event.wait()
refreshed_games = self._parse_local_data()
if not self.notifications_enabled:
self._update_statuses(refreshed_games, self.installed_games)
self.installed_games = refreshed_games
parse_local_data_event.clear()
async def _notify_about_game_stop(self, game, starting_timeout):
id_to_watch = game.info.id
if id_to_watch in self.watched_running_games:
log.debug(f'Game {id_to_watch} is already watched. Skipping')
return
try:
self.watched_running_games.add(id_to_watch)
await asyncio.sleep(starting_timeout)
ProcessProvider().update_games_processes([game])
log.info(f'Setuping process watcher for {game._processes}')
loop = asyncio.get_event_loop()
await loop.run_in_executor(None, game.wait_until_game_stops)
finally:
self.update_local_game_status(LocalGame(id_to_watch, LocalGameState.Installed))
self.watched_running_games.remove(id_to_watch)
def _update_statuses(self, refreshed_games, previous_games):
for blizz_id, refr in refreshed_games.items():
prev = previous_games.get(blizz_id, None)
if prev is None:
if refr.playable:
log.debug('Detected playable game')
state = LocalGameState.Installed
else:
log.debug('Detected installation begin')
state = LocalGameState.None_
elif refr.playable and not prev.playable:
log.debug('Detected playable game')
state = LocalGameState.Installed
elif refr.last_played != prev.last_played:
log.debug('Detected launched game')
state = LocalGameState.Installed | LocalGameState.Running
asyncio.create_task(self._notify_about_game_stop(refr, 5))
else:
continue
log.info(f'Changing game {blizz_id} state to {state}')
self.update_local_game_status(LocalGame(blizz_id, state))
for blizz_id, prev in previous_games.items():
refr = refreshed_games.get(blizz_id, None)
if refr is None:
log.debug('Detected uninstalled game')
state = LocalGameState.None_
self.update_local_game_status(LocalGame(blizz_id, state))
def _load_local_files(self):
try:
product_db = load_product_db(self.PRODUCT_DB_PATH)
self.database_parser = DatabaseParser(product_db)
except FileNotFoundError as e:
log.warning(f"product.db not found: {repr(e)}")
return False
except WindowsError as e:
# 5 WindowsError access denied
if e.winerror == 5:
log.warning(f"product.db not accessible: {repr(e)}")
self.config_parser = ConfigParser(None)
return False
else:
raise ()
except OSError as e:
if e.errno == errno.EACCES:
log.warning(f"product.db not accessible: {repr(e)}")
self.config_parser = ConfigParser(None)
return False
else:
raise ()
else:
if self.local_client.is_installed != self.database_parser.battlenet_present:
self.local_client.refresh()
try:
config = load_config(self.CONFIG_PATH)
self.config_parser = ConfigParser(config)
except FileNotFoundError as e:
log.warning(f"config file not found: {repr(e)}")
self.config_parser = ConfigParser(None)
return False
except WindowsError as e:
# 5 WindowsError access denied
if e.winerror == 5:
log.warning(f"config file not accessible: {repr(e)}")
self.config_parser = ConfigParser(None)
return False
else:
raise ()
except OSError as e:
if e.errno == errno.EACCES:
log.warning(f"config file not accessible: {repr(e)}")
self.config_parser = ConfigParser(None)
return False
else:
raise ()
return True
def _get_battlenet_installed_games(self):
def _add_battlenet_game(config_game, db_game):
if config_game.uninstall_tag != db_game.uninstall_tag:
return None
try:
blizzard_game = Blizzard[config_game.uid]
except KeyError:
log.warning(f'[{config_game.uid}] is not known blizzard game. Skipping')
return None
try:
log.info(f"Adding {blizzard_game.blizzard_id} {blizzard_game.name} to installed games")
return InstalledGame(
blizzard_game,
config_game.uninstall_tag,
db_game.version,
config_game.last_played,
db_game.install_path,
db_game.playable
)
except FileNotFoundError as e:
log.warning(str(e) + '. Probably outdated product.db after uninstall. Skipping')
return None
games = {}
for db_game in self.database_parser.games:
for config_game in self.config_parser.games:
installed_game = _add_battlenet_game(config_game, db_game)
if installed_game:
games[installed_game.info.id] = installed_game
self._installed_battlenet_games_lock.acquire()
self._installed_battlenet_games = games
self._installed_battlenet_games_lock.release()
def _parse_local_data(self):
"""Game is considered as installed when present in both config and product.db"""
games = {}
# give threads 4 seconds to finish
join_timeout = 4
if not self._classic_games_thread or not self._classic_games_thread.isAlive():
self._classic_games_thread = Thread(target=self.local_client.find_classic_games, daemon=True)
self._classic_games_thread.start()
log.info("Started classic games thread")
if not self._load_local_files():
self._classic_games_thread.join(join_timeout)
if not self.local_client.classics_lock.acquire(False):
return []
else:
installed_classics = self.local_client.installed_classics
self.local_client.classics_lock.release()
return installed_classics
try:
if SYSTEM == pf.WINDOWS and self.uninstaller is None:
uninstaller_path = pathlib.Path(AGENT_PATH) / 'Blizzard Uninstaller.exe'
self.uninstaller = Uninstaller(uninstaller_path)
except FileNotFoundError as e:
log.warning('uninstaller not found' + str(e))
try:
if self.local_client.is_installed != self.database_parser.battlenet_present:
self.local_client.refresh()
log.info(f"Games found in db {self.database_parser.games}")
log.info(f"Games found in config {self.config_parser.games}")
if not self._battlenet_games_thread or not self._battlenet_games_thread.isAlive():
self._battlenet_games_thread = Thread(target=self._get_battlenet_installed_games, daemon=True)
self._battlenet_games_thread.start()
log.info("Started classic games thread")
except Exception as e:
log.exception(str(e))
finally:
self._classic_games_thread.join(join_timeout)
self._battlenet_games_thread.join(join_timeout)
if self.local_client.classics_lock.acquire(False):
games = self.local_client.installed_classics
self.local_client.classics_lock.release()
if self._installed_battlenet_games_lock.acquire(False):
games = {**self._installed_battlenet_games, **games}
self._installed_battlenet_games_lock.release()
return games
def log_out(self):
if self.backend_client:
asyncio.create_task(self.authentication_client.shutdown())
self.authentication_client.user_details = None
self.owned_games_cache = []
async def open_battlenet_browser(self):
url = f"https://www.blizzard.com/apps/battle.net/desktop"
log.info(f'Opening battle.net website: {url}')
loop = asyncio.get_running_loop()
await loop.run_in_executor(None, lambda x: webbrowser.open(x, autoraise=True), url)
async def install_game(self, game_id):
if not self.authentication_client.is_authenticated():
raise AuthenticationRequired()
installed_game = self.installed_games.get(game_id, None)
if installed_game and os.access(installed_game.install_path, os.F_OK):
log.warning("Received install command on an already installed game")
return await self.launch_game(game_id)
if game_id in Blizzard.legacy_game_ids:
if SYSTEM == pf.WINDOWS:
platform = 'windows'
elif SYSTEM == pf.MACOS:
platform = 'macos'
webbrowser.open(f"https://www.blizzard.com/download/confirmation?platform={platform}&locale=enUS&version=LIVE&id={game_id}")
return
try:
self.local_client.refresh()
log.info(f'Installing game of id {game_id}')
self.local_client.install_game(game_id)
except ClientNotInstalledError as e:
log.warning(e)
await self.open_battlenet_browser()
except Exception as e:
log.exception(f"Installing game {game_id} failed: {e}")
def _open_battlenet_at_id(self, game_id):
try:
self.local_client.refresh()
self.local_client.open_battlenet(game_id)
except Exception as e:
log.exception(f"Opening battlenet client on specific game_id {game_id} failed {e}")
try:
self.local_client.open_battlenet()
except Exception as e:
log.exception(f"Opening battlenet client failed {e}")
async def uninstall_game(self, game_id):
if not self.authentication_client.is_authenticated():
raise AuthenticationRequired()
if game_id == 'wow_classic':
# attempting to uninstall classic wow through protocol gives you a message that the game cannot
# be uninstalled through protocol and you should use battle.net
return self._open_battlenet_at_id(game_id)
if SYSTEM == pf.MACOS:
self._open_battlenet_at_id(game_id)
else:
try:
installed_game = self.installed_games.get(game_id, None)
if installed_game is None or not os.access(installed_game.install_path, os.F_OK):
log.error(f'Cannot uninstall {Blizzard[game_id].uid}')
self.update_local_game_status(LocalGame(game_id, LocalGameState.None_))
return
if not isinstance(installed_game.info, ClassicGame):
if self.uninstaller is None:
raise FileNotFoundError('Uninstaller not found')
uninstall_tag = installed_game.uninstall_tag
client_lang = self.config_parser.locale_language
self.uninstaller.uninstall_game(installed_game, uninstall_tag, client_lang)
except Exception as e:
log.exception(f'Uninstalling game {game_id} failed: {e}')
async def launch_game(self, game_id):
if not self.authentication_client.is_authenticated():
raise AuthenticationRequired()
try:
if self.installed_games is None:
log.error(f'Launching game that is not installed: {game_id}')
return await self.install_game(game_id)
game = self.installed_games.get(game_id, None)
if game is None:
log.error(f'Launching game that is not installed: {game_id}')
return await self.install_game(game_id)
if isinstance(game.info, ClassicGame):
log.info(f'Launching game of id: {game_id}, {game} at path {os.path.join(game.install_path, game.info.exe)}')
if SYSTEM == pf.WINDOWS:
subprocess.Popen(os.path.join(game.install_path, game.info.exe))
elif SYSTEM == pf.MACOS:
if not game.info.bundle_id:
log.warning(f"{game.name} has no bundle id, help by providing us bundle id of this game")
subprocess.Popen(['open', '-b', game.info.bundle_id])
self.update_local_game_status(LocalGame(game_id, LocalGameState.Installed | LocalGameState.Running))
asyncio.create_task(self._notify_about_game_stop(game, 6))
return
self.local_client.refresh()
log.info(f'Launching game of id: {game_id}, {game}')
await self.local_client.launch_game(game, wait_sec=60)
self.update_local_game_status(LocalGame(game_id, LocalGameState.Installed | LocalGameState.Running))
self.local_client.close_window()
asyncio.create_task(self._notify_about_game_stop(game, 3))
except ClientNotInstalledError as e:
log.warning(e)
await self.open_battlenet_browser()
except TimeoutError as e:
log.warning(str(e))
except Exception as e:
log.exception(f"Launching game {game_id} failed: {e}")
async def authenticate(self, stored_credentials=None):
try:
if stored_credentials:
auth_data = self.authentication_client.process_stored_credentials(stored_credentials)
try:
await self.authentication_client.create_session()
await self.backend_client.refresh_cookies()
auth_status = await self.backend_client.validate_access_token(auth_data.access_token)
except (BackendNotAvailable, BackendError, NetworkError, UnknownError, BackendTimeout) as e:
raise e
except Exception:
raise InvalidCredentials()
if self.authentication_client.validate_auth_status(auth_status):
self.authentication_client.user_details = await self.backend_client.get_user_info()
return self.authentication_client.parse_user_details()
else:
return self.authentication_client.authenticate_using_login()
except Exception as e:
raise e
async def pass_login_credentials(self, step, credentials, cookies):
if "logout&app=oauth" in credentials['end_uri']:
# 2fa expired, repeat authentication
return self.authentication_client.authenticate_using_login()
if self.authentication_client.attempted_to_set_battle_tag:
self.authentication_client.user_details = await self.backend_client.get_user_info()
return self.authentication_client.parse_auth_after_setting_battletag()
cookie_jar = self.authentication_client.parse_cookies(cookies)
auth_data = await self.authentication_client.get_auth_data_login(cookie_jar, credentials)
try:
await self.authentication_client.create_session()
await self.backend_client.refresh_cookies()
except (BackendNotAvailable, BackendError, NetworkError, UnknownError, BackendTimeout) as e:
raise e
except Exception:
raise InvalidCredentials()
auth_status = await self.backend_client.validate_access_token(auth_data.access_token)
if not ("authorities" in auth_status and "IS_AUTHENTICATED_FULLY" in auth_status["authorities"]):
raise InvalidCredentials()
self.authentication_client.user_details = await self.backend_client.get_user_info()
self.authentication_client.set_credentials()
return self.authentication_client.parse_battletag()
async def get_friends(self):
if not self.authentication_client.is_authenticated():
raise AuthenticationRequired()
friends_list = await self.social_features.get_friends()
return [FriendInfo(user_id=friend.id.low, user_name='') for friend in friends_list]
async def get_owned_games(self):
if not self.authentication_client.is_authenticated():
raise AuthenticationRequired()
def _parse_classic_games(classic_games):
for classic_game in classic_games["classicGames"]:
log.info(f"looking for {classic_game} in classic games")
try:
blizzard_game = Blizzard[classic_game["localizedGameName"].replace(u'\xa0', ' ')]
log.info(f"match! {blizzard_game}")
classic_game["titleId"] = blizzard_game.uid
classic_game["gameAccountStatus"] = "Good"
except KeyError:
continue
return classic_games
def _get_not_added_free_games(owned_games):
owned_games_ids = []
for game in owned_games:
if "titleId" in game:
owned_games_ids.append(str(game["titleId"]))
return [{"titleId": game.blizzard_id,
"localizedGameName": game.name,
"gameAccountStatus": "Free"}
for game in Blizzard.free_games if game.blizzard_id not in owned_games_ids]
try:
games = await self.backend_client.get_owned_games()
classic_games = _parse_classic_games(await self.backend_client.get_owned_classic_games())
owned_games = games["gameAccounts"] + classic_games["classicGames"]
# Add wow classic if retail wow is present in owned games
for owned_game in owned_games.copy():
if 'titleId' in owned_game:
if owned_game['titleId'] == 5730135:
owned_games.append({'titleId': 'wow_classic',
'localizedGameName': 'World of Warcraft Classic',
'gameAccountStatus': owned_game['gameAccountStatus']})
free_games_to_add = _get_not_added_free_games(owned_games)
owned_games += free_games_to_add
log.info(f"Owned games {owned_games} with free games")
self.owned_games_cache = owned_games
return [
Game(
str(game["titleId"]),
game["localizedGameName"],
[],
LicenseInfo(License_Map[game["gameAccountStatus"]]),
)
for game in self.owned_games_cache if "titleId" in game
]
except Exception as e:
log.exception(f"failed to get owned games: {repr(e)}")
raise
async def get_local_games(self):
try:
local_games = []
running_games = ProcessProvider().update_games_processes(self.installed_games.values())
log.info(f"Installed games {self.installed_games.items()}")
log.info(f"Running games {running_games}")
for id_, game in self.installed_games.items():
if game.playable:
state = LocalGameState.Installed
if id_ in running_games:
state |= LocalGameState.Running
else:
state = LocalGameState.None_
local_games.append(LocalGame(id_, state))
return local_games
except Exception as e:
log.exception(f"failed to get local games: {str(e)}")
raise
finally:
self.enable_notifications = True
async def _get_wow_achievements(self):
achievements = []
try:
characters_data = await self.backend_client.get_wow_character_data()
characters_data = characters_data["characters"]
wow_character_data = await asyncio.gather(
*[
self.backend_client.get_wow_character_achievements(character["realm"], character["name"])
for character in characters_data
],
return_exceptions=True,
)
for data in wow_character_data:
if isinstance(data, requests.Timeout) or isinstance(data, requests.ConnectionError):
raise data
wow_achievement_data = [
list(
zip(
data["achievements"]["achievementsCompleted"],
data["achievements"]["achievementsCompletedTimestamp"],
)
)
for data in wow_character_data
if type(data) is dict
]
already_in = set()
for char_ach in wow_achievement_data:
for ach in char_ach:
if ach[0] not in already_in:
achievements.append(Achievement(achievement_id=ach[0], unlock_time=int(ach[1] / 1000)))
already_in.add(ach[0])
except (AccessTokenExpired, BackendError) as e:
log.exception(str(e))
with open('wow.json', 'w') as f:
f.write(json.dumps(achievements, cls=DataclassJSONEncoder))
return achievements
async def _get_sc2_achievements(self):
account_data = await self.backend_client.get_sc2_player_data(self.authentication_client.user_details["id"])
# TODO what if more sc2 accounts?
assert len(account_data) == 1
account_data = account_data[0]
profile_data = await self.backend_client.get_sc2_profile_data(
account_data["regionId"], account_data["realmId"],
account_data["profileId"]
)
sc2_achievement_data = [
Achievement(achievement_id=achievement["achievementId"], unlock_time=achievement["completionDate"])
for achievement in profile_data["earnedAchievements"]
if achievement["isComplete"]
]
with open('sc2.json', 'w') as f:
f.write(json.dumps(sc2_achievement_data, cls=DataclassJSONEncoder))
return sc2_achievement_data
# async def get_unlocked_achievements(self, game_id):
# if not self.website_client.is_authenticated():
# raise AuthenticationRequired()
# try:
# if game_id == "21298":
# return await self._get_sc2_achievements()
# elif game_id == "5730135":
# return await self._get_wow_achievements()
# else:
# return []
# except requests.Timeout:
# raise BackendTimeout()
# except requests.ConnectionError:
# raise NetworkError()
# except Exception as e:
# log.exception(str(e))
# return []
async def _tick_runner(self):
if not self.bnet_client:
return
try:
self.error_state = await self.bnet_client.tick()
except Exception as e:
self.error_state = True
log.exception(f"error state: {str(e)}")
raise
def tick(self):
if not self.error_state and (not self.running_task or self.running_task.done()):
self.running_task = asyncio.create_task(self._tick_runner())
elif self.error_state:
sys.exit(1)
def shutdown(self):
log.info("Plugin shutdown.")
asyncio.create_task(self.authentication_client.shutdown())
def main():
multiprocessing.freeze_support()
create_and_run_plugin(BNetPlugin, sys.argv)
if __name__ == "__main__":
main()
| en | 0.691143 | # 5 WindowsError access denied # 5 WindowsError access denied Game is considered as installed when present in both config and product.db # give threads 4 seconds to finish # attempting to uninstall classic wow through protocol gives you a message that the game cannot # be uninstalled through protocol and you should use battle.net # 2fa expired, repeat authentication # Add wow classic if retail wow is present in owned games # TODO what if more sc2 accounts? # async def get_unlocked_achievements(self, game_id): # if not self.website_client.is_authenticated(): # raise AuthenticationRequired() # try: # if game_id == "21298": # return await self._get_sc2_achievements() # elif game_id == "5730135": # return await self._get_wow_achievements() # else: # return [] # except requests.Timeout: # raise BackendTimeout() # except requests.ConnectionError: # raise NetworkError() # except Exception as e: # log.exception(str(e)) # return [] | 1.922734 | 2 |
features/steps/sum_ABCs.py | percebus/leetcode-excercises | 0 | 6622452 | from aloe import step, world
from problems.meta.coding.puzzles.warmup.sum_abc import get_sum
@step("three integers (?P<A>.+), (?P<B>.+), and (?P<C>.+)")
def step_impl(self, A, B, C):
world.A = int(A)
world.B = int(B)
world.C = int(C)
@step("I call getSum")
def step_impl(self):
world.result = get_sum(world.A, world.B, world.C)
@step("it determines their (?P<result>.+)")
def step_impl(self, result):
expected = int(result)
assert expected == world.result, f'expected:{expected}, got:{world.result}'
| from aloe import step, world
from problems.meta.coding.puzzles.warmup.sum_abc import get_sum
@step("three integers (?P<A>.+), (?P<B>.+), and (?P<C>.+)")
def step_impl(self, A, B, C):
world.A = int(A)
world.B = int(B)
world.C = int(C)
@step("I call getSum")
def step_impl(self):
world.result = get_sum(world.A, world.B, world.C)
@step("it determines their (?P<result>.+)")
def step_impl(self, result):
expected = int(result)
assert expected == world.result, f'expected:{expected}, got:{world.result}'
| none | 1 | 3.054697 | 3 | |
src/errors.py | Maskime/complex-extract | 0 | 6622453 | from types import TracebackType
from typing import Optional
class ComplexExtractException(Exception):
def __init__(self, *args: object) -> None:
super().__init__(*args)
def with_traceback(self, tb: Optional[TracebackType]) -> BaseException:
return super().with_traceback(tb)
| from types import TracebackType
from typing import Optional
class ComplexExtractException(Exception):
def __init__(self, *args: object) -> None:
super().__init__(*args)
def with_traceback(self, tb: Optional[TracebackType]) -> BaseException:
return super().with_traceback(tb)
| none | 1 | 2.91983 | 3 | |
strongholds-and-followers/retainer/check.py | kbsletten/AvraeAliases | 0 | 6622454 | embed
<drac2>
argv = &ARGS&
args = argparse(argv)
GVARS = load_json(get_gvar("c1ee7d0f-750d-4f92-8d87-70fa22c07a81"))
CLASSES = [load_json(get_gvar(gvar)) for gvar in GVARS]
ABILITIES = {
"dex": "dex",
"dexterity": "dex",
"cha": "cha",
"charisma": "cha",
"con": "con",
"constitution": "con",
"int": "int",
"intelligence": "int",
"str": "str",
"strength": "str",
"wis": "wis",
"wisdom": "wis"
}
SKILLS = {
"acrobatics": "dex",
"animalhandling": "wis",
"athletics": "str",
"arcana": "int",
"deception": "cha",
"history": "int",
"investigation": "int",
"insight": "wis",
"intimidation": "cha",
"medicine": "wis",
"nature": "int",
"perception": "wis",
"performance": "cha",
"persuasion": "cha",
"religion": "int",
"sleightofhand": "dex",
"survival": "wis",
"stealth": "dex"
}
DISPLAY = {
"acrobatics": "Acrobatics",
"animalhandling": "Animal Handling",
"athletics": "Athletics",
"arcana": "Arcana",
"deception": "Deception",
"dex": "Dexterity",
"dexterity": "Dexterity",
"cha": "Charisma",
"charisma": "Charisma",
"con": "Constitution",
"constitution": "Constitution",
"history": "History",
"investigation": "Investigation",
"insight": "Insight",
"int": "Intelligence",
"intelligence": "Intelligence",
"intimidation": "Intimidation",
"medicine": "Medicine",
"nature": "Nature",
"perception": "Perception",
"performance": "Performance",
"persuasion": "Persuasion",
"religion": "Religion",
"sleightofhand": "Sleight of Hand",
"survival": "Survival",
"stealth": "Stealth",
"str": "Strength",
"strength": "Strength",
"wis": "Wisdom",
"wisdom": "Wisdom"
}
char = character()
ret_name = get("_retainerName")
ret_class = get("_retainerClass")
init = combat()
ret_comb = init.get_combatant(ret_name) if init and ret_name else None
ret_name = "An unknown creature" if "-h" in argv else ret_comb.name if ret_comb else ret_name
cl_info = [c for c in CLASSES if c["name"] == ret_class]
cl_info = cl_info[0] if cl_info else None
check_name = argv[0] if argv else ''
check_name = [name for name in DISPLAY.keys() if check_name in name] if check_name else None
check_name = check_name[0] if check_name else 'acrobatics'
if check_name in ABILITIES.keys():
ability_name = ABILITIES[check_name]
check_name = None
else:
ability_name = argv[1] if argv and len(argv) > 1 else None
ability_name = [name for name in ABILITIES.keys() if ability_name in name] if ability_name else None
ability_name = ABILITIES[ability_name[0] if ability_name else SKILLS[check_name]]
formatted = f"{DISPLAY[ability_name]} ({DISPLAY[check_name]})" if check_name else DISPLAY[ability_name]
is_primary = cl_info and ability_name == cl_info["primary"]
is_proficient = "pro" in argv or (cl_info and check_name and check_name in cl_info["skills"])
ret_bonus = 3 + (1 if is_primary else 0) + (2 if is_proficient else 0)
title = f"{char.name} doesn't have a retainer!"
if ret_name:
title = f"{ret_name} makes a {formatted} check!"
fields = ""
desc = ""
base_bonus = [str(ret_bonus)] + args.get("b") + ([effect.effect["cb"] for effect in ret_comb.effects if "cb" in effect.effect] if ret_comb else [])
reroll = max(1, int(args.last("rr", 1)))
dc = int(args.last("dc")) if args.last("dc") else None
if dc:
desc = f"""**DC {dc}**
"""
success = 0
failure = 0
for i in range(1, reroll + 1):
has_adv = "adv" in argv or f"adv{i}" in argv
has_dis = "dis" in argv or f"dis{i}" in argv
adv = 1 if has_adv and not has_dis else -1 if has_dis and not has_adv else 0
adv = 2 if adv == 1 and "ea" in argv else adv
check = {
-1: "2d20kl1",
0: "1d20",
1: "2d20kh1",
2: "3d20kh1"
}[adv]
check = f"""{check}mi{args.last("mc")}""" if args.last("mc") else check
bonus = base_bonus + args.get(f"b{i}")
check_roll = vroll("+".join([check] + bonus))
if dc:
if check_roll.total >= dc:
success += 1
else:
failure += 1
if reroll == 1:
desc += f"{check_roll}"
else:
fields += f"""-f "Check {i}|{check_roll}|inline" """
fields += "\n".join([f"""-f "{field}" """ for field in args.get("f")])
</drac2>
-title "{{args.last("title").replace("[name]", ret_name).replace("[cname]", formatted) if args.last("title") else title}}"
{{f"""-phrase "{args.last("phrase")}" """ if args.last("phrase") else ""}}
{{f"""-desc "{desc}" """ if desc else ""}}
{{fields}}
-footer "{{f"{success} Successes | {failure} Failures" if reroll > 1 and dc else "Success!" if success else "Failure!" if failure else "!retainer check | kbsletten#5710"}}"
-color <color> -thumb {{get("_retainerImage") if "-h" not in argv else ""}}
| embed
<drac2>
argv = &ARGS&
args = argparse(argv)
GVARS = load_json(get_gvar("c1ee7d0f-750d-4f92-8d87-70fa22c07a81"))
CLASSES = [load_json(get_gvar(gvar)) for gvar in GVARS]
ABILITIES = {
"dex": "dex",
"dexterity": "dex",
"cha": "cha",
"charisma": "cha",
"con": "con",
"constitution": "con",
"int": "int",
"intelligence": "int",
"str": "str",
"strength": "str",
"wis": "wis",
"wisdom": "wis"
}
SKILLS = {
"acrobatics": "dex",
"animalhandling": "wis",
"athletics": "str",
"arcana": "int",
"deception": "cha",
"history": "int",
"investigation": "int",
"insight": "wis",
"intimidation": "cha",
"medicine": "wis",
"nature": "int",
"perception": "wis",
"performance": "cha",
"persuasion": "cha",
"religion": "int",
"sleightofhand": "dex",
"survival": "wis",
"stealth": "dex"
}
DISPLAY = {
"acrobatics": "Acrobatics",
"animalhandling": "Animal Handling",
"athletics": "Athletics",
"arcana": "Arcana",
"deception": "Deception",
"dex": "Dexterity",
"dexterity": "Dexterity",
"cha": "Charisma",
"charisma": "Charisma",
"con": "Constitution",
"constitution": "Constitution",
"history": "History",
"investigation": "Investigation",
"insight": "Insight",
"int": "Intelligence",
"intelligence": "Intelligence",
"intimidation": "Intimidation",
"medicine": "Medicine",
"nature": "Nature",
"perception": "Perception",
"performance": "Performance",
"persuasion": "Persuasion",
"religion": "Religion",
"sleightofhand": "Sleight of Hand",
"survival": "Survival",
"stealth": "Stealth",
"str": "Strength",
"strength": "Strength",
"wis": "Wisdom",
"wisdom": "Wisdom"
}
char = character()
ret_name = get("_retainerName")
ret_class = get("_retainerClass")
init = combat()
ret_comb = init.get_combatant(ret_name) if init and ret_name else None
ret_name = "An unknown creature" if "-h" in argv else ret_comb.name if ret_comb else ret_name
cl_info = [c for c in CLASSES if c["name"] == ret_class]
cl_info = cl_info[0] if cl_info else None
check_name = argv[0] if argv else ''
check_name = [name for name in DISPLAY.keys() if check_name in name] if check_name else None
check_name = check_name[0] if check_name else 'acrobatics'
if check_name in ABILITIES.keys():
ability_name = ABILITIES[check_name]
check_name = None
else:
ability_name = argv[1] if argv and len(argv) > 1 else None
ability_name = [name for name in ABILITIES.keys() if ability_name in name] if ability_name else None
ability_name = ABILITIES[ability_name[0] if ability_name else SKILLS[check_name]]
formatted = f"{DISPLAY[ability_name]} ({DISPLAY[check_name]})" if check_name else DISPLAY[ability_name]
is_primary = cl_info and ability_name == cl_info["primary"]
is_proficient = "pro" in argv or (cl_info and check_name and check_name in cl_info["skills"])
ret_bonus = 3 + (1 if is_primary else 0) + (2 if is_proficient else 0)
title = f"{char.name} doesn't have a retainer!"
if ret_name:
title = f"{ret_name} makes a {formatted} check!"
fields = ""
desc = ""
base_bonus = [str(ret_bonus)] + args.get("b") + ([effect.effect["cb"] for effect in ret_comb.effects if "cb" in effect.effect] if ret_comb else [])
reroll = max(1, int(args.last("rr", 1)))
dc = int(args.last("dc")) if args.last("dc") else None
if dc:
desc = f"""**DC {dc}**
"""
success = 0
failure = 0
for i in range(1, reroll + 1):
has_adv = "adv" in argv or f"adv{i}" in argv
has_dis = "dis" in argv or f"dis{i}" in argv
adv = 1 if has_adv and not has_dis else -1 if has_dis and not has_adv else 0
adv = 2 if adv == 1 and "ea" in argv else adv
check = {
-1: "2d20kl1",
0: "1d20",
1: "2d20kh1",
2: "3d20kh1"
}[adv]
check = f"""{check}mi{args.last("mc")}""" if args.last("mc") else check
bonus = base_bonus + args.get(f"b{i}")
check_roll = vroll("+".join([check] + bonus))
if dc:
if check_roll.total >= dc:
success += 1
else:
failure += 1
if reroll == 1:
desc += f"{check_roll}"
else:
fields += f"""-f "Check {i}|{check_roll}|inline" """
fields += "\n".join([f"""-f "{field}" """ for field in args.get("f")])
</drac2>
-title "{{args.last("title").replace("[name]", ret_name).replace("[cname]", formatted) if args.last("title") else title}}"
{{f"""-phrase "{args.last("phrase")}" """ if args.last("phrase") else ""}}
{{f"""-desc "{desc}" """ if desc else ""}}
{{fields}}
-footer "{{f"{success} Successes | {failure} Failures" if reroll > 1 and dc else "Success!" if success else "Failure!" if failure else "!retainer check | kbsletten#5710"}}"
-color <color> -thumb {{get("_retainerImage") if "-h" not in argv else ""}}
| en | 0.313211 | **DC {dc}** {check}mi{args.last("mc")} -f "Check {i}|{check_roll}|inline" -f "{field}" -phrase "{args.last("phrase")}" -desc "{desc}" #5710"}}" | 2.550015 | 3 |
poppy/bootstrap.py | jqxin2006/poppy | 0 | 6622455 | <reponame>jqxin2006/poppy<gh_stars>0
# Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo.config import cfg
from stevedore import driver
from stevedore import named
from poppy.common import decorators
from poppy.openstack.common import log
LOG = log.getLogger(__name__)
_DEFAULT_OPTIONS = [
cfg.StrOpt('datacenter', default='',
help='Host datacenter of the API'),
cfg.BoolOpt('project_id_in_url', default=False,
help='Indicating if the project id'
' should be presented in the url')
]
_DRIVER_OPTIONS = [
cfg.StrOpt('transport', default='pecan',
help='Transport driver to use'),
cfg.StrOpt('manager', default='default',
help='Manager driver to use'),
cfg.StrOpt('storage', default='mockdb',
help='Storage driver to use'),
cfg.ListOpt('providers', default=['mock'],
help='Provider driver(s) to use'),
cfg.StrOpt('dns', default='default',
help='DNS driver to use'),
]
_DRIVER_GROUP = 'drivers'
class Bootstrap(object):
"""Defines the CDN bootstrapper.
The bootstrap loads up drivers per a given configuration, and
manages their lifetimes.
"""
def __init__(self, conf):
self.conf = conf
self.conf.register_opts(_DEFAULT_OPTIONS)
self.conf.register_opts(_DRIVER_OPTIONS, group=_DRIVER_GROUP)
self.driver_conf = self.conf[_DRIVER_GROUP]
log.setup('poppy')
LOG.debug("init bootstrap")
@decorators.lazy_property(write=False)
def dns(self):
"""DNS."""
LOG.debug((u'Loading DNS driver'))
# create the driver manager to load the appropriate drivers
dns_type = 'poppy.dns'
dns_name = self.driver_conf.dns
args = [self.conf]
try:
mgr = driver.DriverManager(namespace=dns_type,
name=dns_name,
invoke_on_load=True,
invoke_args=args)
return mgr.driver
except RuntimeError as exc:
LOG.exception(exc)
@decorators.lazy_property(write=False)
def provider(self):
"""provider.
:returns mgr
"""
LOG.debug((u'Loading provider extension(s)'))
# create the driver manager to load the appropriate drivers
provider_type = 'poppy.provider'
args = [self.conf]
provider_names = self.driver_conf.providers
mgr = named.NamedExtensionManager(namespace=provider_type,
names=provider_names,
invoke_on_load=True,
invoke_args=args)
return mgr
@decorators.lazy_property(write=False)
def storage(self):
"""storage.
:returns mgr driver
"""
LOG.debug((u'Loading storage driver'))
# create the driver manager to load the appropriate drivers
storage_type = 'poppy.storage'
storage_name = self.driver_conf.storage
args = [self.conf]
try:
mgr = driver.DriverManager(namespace=storage_type,
name=storage_name,
invoke_on_load=True,
invoke_args=args)
return mgr.driver
except RuntimeError as exc:
LOG.exception(exc)
@decorators.lazy_property(write=False)
def manager(self):
"""manager.
:returns mgr driver
"""
LOG.debug((u'Loading manager driver'))
# create the driver manager to load the appropriate drivers
manager_type = 'poppy.manager'
manager_name = self.driver_conf.manager
args = [self.conf, self.storage, self.provider, self.dns]
try:
mgr = driver.DriverManager(namespace=manager_type,
name=manager_name,
invoke_on_load=True,
invoke_args=args)
return mgr.driver
except RuntimeError as exc:
LOG.exception(exc)
@decorators.lazy_property(write=False)
def transport(self):
"""transport.
:returns mgr driver
"""
LOG.debug("loading transport")
# create the driver manager to load the appropriate drivers
transport_type = 'poppy.transport'
transport_name = self.driver_conf.transport
args = [self.conf, self.manager]
LOG.debug((u'Loading transport driver: %s'), transport_name)
try:
mgr = driver.DriverManager(namespace=transport_type,
name=transport_name,
invoke_on_load=True,
invoke_args=args)
return mgr.driver
except RuntimeError as exc:
LOG.exception(exc)
def run(self):
self.transport.listen()
| # Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo.config import cfg
from stevedore import driver
from stevedore import named
from poppy.common import decorators
from poppy.openstack.common import log
LOG = log.getLogger(__name__)
_DEFAULT_OPTIONS = [
cfg.StrOpt('datacenter', default='',
help='Host datacenter of the API'),
cfg.BoolOpt('project_id_in_url', default=False,
help='Indicating if the project id'
' should be presented in the url')
]
_DRIVER_OPTIONS = [
cfg.StrOpt('transport', default='pecan',
help='Transport driver to use'),
cfg.StrOpt('manager', default='default',
help='Manager driver to use'),
cfg.StrOpt('storage', default='mockdb',
help='Storage driver to use'),
cfg.ListOpt('providers', default=['mock'],
help='Provider driver(s) to use'),
cfg.StrOpt('dns', default='default',
help='DNS driver to use'),
]
_DRIVER_GROUP = 'drivers'
class Bootstrap(object):
"""Defines the CDN bootstrapper.
The bootstrap loads up drivers per a given configuration, and
manages their lifetimes.
"""
def __init__(self, conf):
self.conf = conf
self.conf.register_opts(_DEFAULT_OPTIONS)
self.conf.register_opts(_DRIVER_OPTIONS, group=_DRIVER_GROUP)
self.driver_conf = self.conf[_DRIVER_GROUP]
log.setup('poppy')
LOG.debug("init bootstrap")
@decorators.lazy_property(write=False)
def dns(self):
"""DNS."""
LOG.debug((u'Loading DNS driver'))
# create the driver manager to load the appropriate drivers
dns_type = 'poppy.dns'
dns_name = self.driver_conf.dns
args = [self.conf]
try:
mgr = driver.DriverManager(namespace=dns_type,
name=dns_name,
invoke_on_load=True,
invoke_args=args)
return mgr.driver
except RuntimeError as exc:
LOG.exception(exc)
@decorators.lazy_property(write=False)
def provider(self):
"""provider.
:returns mgr
"""
LOG.debug((u'Loading provider extension(s)'))
# create the driver manager to load the appropriate drivers
provider_type = 'poppy.provider'
args = [self.conf]
provider_names = self.driver_conf.providers
mgr = named.NamedExtensionManager(namespace=provider_type,
names=provider_names,
invoke_on_load=True,
invoke_args=args)
return mgr
@decorators.lazy_property(write=False)
def storage(self):
"""storage.
:returns mgr driver
"""
LOG.debug((u'Loading storage driver'))
# create the driver manager to load the appropriate drivers
storage_type = 'poppy.storage'
storage_name = self.driver_conf.storage
args = [self.conf]
try:
mgr = driver.DriverManager(namespace=storage_type,
name=storage_name,
invoke_on_load=True,
invoke_args=args)
return mgr.driver
except RuntimeError as exc:
LOG.exception(exc)
@decorators.lazy_property(write=False)
def manager(self):
"""manager.
:returns mgr driver
"""
LOG.debug((u'Loading manager driver'))
# create the driver manager to load the appropriate drivers
manager_type = 'poppy.manager'
manager_name = self.driver_conf.manager
args = [self.conf, self.storage, self.provider, self.dns]
try:
mgr = driver.DriverManager(namespace=manager_type,
name=manager_name,
invoke_on_load=True,
invoke_args=args)
return mgr.driver
except RuntimeError as exc:
LOG.exception(exc)
@decorators.lazy_property(write=False)
def transport(self):
"""transport.
:returns mgr driver
"""
LOG.debug("loading transport")
# create the driver manager to load the appropriate drivers
transport_type = 'poppy.transport'
transport_name = self.driver_conf.transport
args = [self.conf, self.manager]
LOG.debug((u'Loading transport driver: %s'), transport_name)
try:
mgr = driver.DriverManager(namespace=transport_type,
name=transport_name,
invoke_on_load=True,
invoke_args=args)
return mgr.driver
except RuntimeError as exc:
LOG.exception(exc)
def run(self):
self.transport.listen() | en | 0.82973 | # Copyright (c) 2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. Defines the CDN bootstrapper. The bootstrap loads up drivers per a given configuration, and manages their lifetimes. DNS. # create the driver manager to load the appropriate drivers provider. :returns mgr # create the driver manager to load the appropriate drivers storage. :returns mgr driver # create the driver manager to load the appropriate drivers manager. :returns mgr driver # create the driver manager to load the appropriate drivers transport. :returns mgr driver # create the driver manager to load the appropriate drivers | 1.867443 | 2 |
script.py | krishkiran/Unlimited-Filter-Bot | 0 | 6622456 | <gh_stars>0
class Script(object):
START_MSG = """Hi <b>{}, </b> I'm <b>Samantha v2.O</b> Auto Filter Bot Specially Made For 2kTamilmovies Controlled By [This Person](https://t.me/DChoco_Boy) 😎
"""
HELP_MSG = """
<i>No One Gonna Help You 🤣</i>
"""
ABOUT_MSG = """ Hi I'm <b>Samantha v2.O</b> Auto Filter Bot Specially Made For 2kTamilmovies Controlled By [This Person](https://t.me/DChoco_Boy) 😎
"""
| class Script(object):
START_MSG = """Hi <b>{}, </b> I'm <b>Samantha v2.O</b> Auto Filter Bot Specially Made For 2kTamilmovies Controlled By [This Person](https://t.me/DChoco_Boy) 😎
"""
HELP_MSG = """
<i>No One Gonna Help You 🤣</i>
"""
ABOUT_MSG = """ Hi I'm <b>Samantha v2.O</b> Auto Filter Bot Specially Made For 2kTamilmovies Controlled By [This Person](https://t.me/DChoco_Boy) 😎
""" | en | 0.414802 | Hi <b>{}, </b> I'm <b>Samantha v2.O</b> Auto Filter Bot Specially Made For 2kTamilmovies Controlled By [This Person](https://t.me/DChoco_Boy) 😎 <i>No One Gonna Help You 🤣</i> Hi I'm <b>Samantha v2.O</b> Auto Filter Bot Specially Made For 2kTamilmovies Controlled By [This Person](https://t.me/DChoco_Boy) 😎 | 2.168456 | 2 |
PublicTools/bin/tools/PackTool/src/util/data/xxtea.py | lyzardiar/RETools | 2 | 6622457 | <filename>PublicTools/bin/tools/PackTool/src/util/data/xxtea.py
import struct
_DELTA = 0x9E3779B9
ENCODE_FORMAT = "utf-8"
def _long2str(v, w):
n = (len(v) - 1) << 2
if w:
m = v[-1]
if (m < n - 3) or (m > n): return ''
n = m
s = struct.pack('<%iL' % len(v), *v)
return s[0:n] if w else s
def _str2long(s, w):
n = len(s)
m = (4 - (n & 3) & 3) + n
s = s.ljust(m, b"\0")
v = list(struct.unpack('<%iL' % (m >> 2), s))
if w: v.append(n)
return v
def Is(absFilePath, head = "DDTX"):
tmpFile = open(absFilePath, "rb")
content = tmpFile.read()
tmpFile.close()
# already encoded
if len(content) >=4 and content[0:4] == b'DDTX':
return True
return False
def encrypt(sourceObj, key, head = b"DDTX"):
if len(sourceObj) >=4 and sourceObj[0:4] == head:
return sourceObj
if isinstance(key,str):
key = key.encode(ENCODE_FORMAT)
if sourceObj == '': return sourceObj
v = _str2long(sourceObj, True)
k = _str2long(key.ljust(16, b"\0"), False)
n = len(v) - 1
z = v[n]
y = v[0]
sum = 0
q = 6 + 52 // (n + 1)
while q > 0:
sum = (sum + _DELTA) & 0xffffffff
e = sum >> 2 & 3
for p in range(n):
y = v[p + 1]
v[p] = (v[p] + ((z >> 5 ^ y << 2) + (y >> 3 ^ z << 4) ^ (sum ^ y) + (k[p & 3 ^ e] ^ z))) & 0xffffffff
z = v[p]
y = v[0]
v[n] = (v[n] + ((z >> 5 ^ y << 2) + (y >> 3 ^ z << 4) ^ (sum ^ y) + (k[n & 3 ^ e] ^ z))) & 0xffffffff
z = v[n]
q -= 1
return _long2str(v, False)
def decrypt(str, key):
if str == '': return str
v = _str2long(str, False)
k = _str2long(key.ljust(16, b"\0"), False)
n = len(v) - 1
z = v[n]
y = v[0]
q = 6 + 52 // (n + 1)
sum = (q * _DELTA) & 0xffffffff
while (sum != 0):
e = sum >> 2 & 3
for p in range(n, 0, -1):
z = v[p - 1]
v[p] = (v[p] - ((z >> 5 ^ y << 2) + (y >> 3 ^ z << 4) ^ (sum ^ y) + (k[p & 3 ^ e] ^ z))) & 0xffffffff
y = v[p]
z = v[n]
v[0] = (v[0] - ((z >> 5 ^ y << 2) + (y >> 3 ^ z << 4) ^ (sum ^ y) + (k[0 & 3 ^ e] ^ z))) & 0xffffffff
y = v[0]
sum = (sum - _DELTA) & 0xffffffff
return _long2str(v, True) | <filename>PublicTools/bin/tools/PackTool/src/util/data/xxtea.py
import struct
_DELTA = 0x9E3779B9
ENCODE_FORMAT = "utf-8"
def _long2str(v, w):
n = (len(v) - 1) << 2
if w:
m = v[-1]
if (m < n - 3) or (m > n): return ''
n = m
s = struct.pack('<%iL' % len(v), *v)
return s[0:n] if w else s
def _str2long(s, w):
n = len(s)
m = (4 - (n & 3) & 3) + n
s = s.ljust(m, b"\0")
v = list(struct.unpack('<%iL' % (m >> 2), s))
if w: v.append(n)
return v
def Is(absFilePath, head = "DDTX"):
tmpFile = open(absFilePath, "rb")
content = tmpFile.read()
tmpFile.close()
# already encoded
if len(content) >=4 and content[0:4] == b'DDTX':
return True
return False
def encrypt(sourceObj, key, head = b"DDTX"):
if len(sourceObj) >=4 and sourceObj[0:4] == head:
return sourceObj
if isinstance(key,str):
key = key.encode(ENCODE_FORMAT)
if sourceObj == '': return sourceObj
v = _str2long(sourceObj, True)
k = _str2long(key.ljust(16, b"\0"), False)
n = len(v) - 1
z = v[n]
y = v[0]
sum = 0
q = 6 + 52 // (n + 1)
while q > 0:
sum = (sum + _DELTA) & 0xffffffff
e = sum >> 2 & 3
for p in range(n):
y = v[p + 1]
v[p] = (v[p] + ((z >> 5 ^ y << 2) + (y >> 3 ^ z << 4) ^ (sum ^ y) + (k[p & 3 ^ e] ^ z))) & 0xffffffff
z = v[p]
y = v[0]
v[n] = (v[n] + ((z >> 5 ^ y << 2) + (y >> 3 ^ z << 4) ^ (sum ^ y) + (k[n & 3 ^ e] ^ z))) & 0xffffffff
z = v[n]
q -= 1
return _long2str(v, False)
def decrypt(str, key):
if str == '': return str
v = _str2long(str, False)
k = _str2long(key.ljust(16, b"\0"), False)
n = len(v) - 1
z = v[n]
y = v[0]
q = 6 + 52 // (n + 1)
sum = (q * _DELTA) & 0xffffffff
while (sum != 0):
e = sum >> 2 & 3
for p in range(n, 0, -1):
z = v[p - 1]
v[p] = (v[p] - ((z >> 5 ^ y << 2) + (y >> 3 ^ z << 4) ^ (sum ^ y) + (k[p & 3 ^ e] ^ z))) & 0xffffffff
y = v[p]
z = v[n]
v[0] = (v[0] - ((z >> 5 ^ y << 2) + (y >> 3 ^ z << 4) ^ (sum ^ y) + (k[0 & 3 ^ e] ^ z))) & 0xffffffff
y = v[0]
sum = (sum - _DELTA) & 0xffffffff
return _long2str(v, True) | en | 0.883657 | # already encoded | 2.212017 | 2 |
Decoradores em Python/preservando_metadata_wraps.py | GlennSR/Curso-Python-Udemy | 0 | 6622458 | '''
Preservando metadata com wraps
Metadados -> São dados intrísecos em arquivos
wraps -> São funções que envolvem elementos com diversas finalidades
'''
# Problema
def ver_log(funcao):
def logar(*args, **kwargs):
'''Eu sou uma função (logar) dentro de outra'''
print(f'Você está chamando {funcao.__name__}')
print(f'Aqui está sua documentação: {funcao.__doc__}')
return funcao(*args, **kwargs)
return logar
@ver_log
def soma(a, b):
"""Soma dois números."""
return a + b
print(soma(10, 30))
# Mas usando uma outra maneira
print(soma.__name__) # logar
print(soma.__doc__) # Eu sou uma função (logar) dentro de outra
print(help(soma))
print()
# Resolução do problema
from functools import wraps
def ver_log(funcao):
@wraps(funcao)
def logar(*args, **kwargs):
'''Eu sou uma função (logar) dentro de outra'''
print(f'Você está chamando {funcao.__name__}')
print(f'Aqui está sua documentação: {funcao.__doc__}')
return funcao(*args, **kwargs)
return logar
@ver_log
def soma(a, b):
"""Soma dois números."""
return a + b
print(soma(10, 30))
print(soma.__name__) # soma
print(soma.__doc__) # Soma dois números
print(help(soma))
| '''
Preservando metadata com wraps
Metadados -> São dados intrísecos em arquivos
wraps -> São funções que envolvem elementos com diversas finalidades
'''
# Problema
def ver_log(funcao):
def logar(*args, **kwargs):
'''Eu sou uma função (logar) dentro de outra'''
print(f'Você está chamando {funcao.__name__}')
print(f'Aqui está sua documentação: {funcao.__doc__}')
return funcao(*args, **kwargs)
return logar
@ver_log
def soma(a, b):
"""Soma dois números."""
return a + b
print(soma(10, 30))
# Mas usando uma outra maneira
print(soma.__name__) # logar
print(soma.__doc__) # Eu sou uma função (logar) dentro de outra
print(help(soma))
print()
# Resolução do problema
from functools import wraps
def ver_log(funcao):
@wraps(funcao)
def logar(*args, **kwargs):
'''Eu sou uma função (logar) dentro de outra'''
print(f'Você está chamando {funcao.__name__}')
print(f'Aqui está sua documentação: {funcao.__doc__}')
return funcao(*args, **kwargs)
return logar
@ver_log
def soma(a, b):
"""Soma dois números."""
return a + b
print(soma(10, 30))
print(soma.__name__) # soma
print(soma.__doc__) # Soma dois números
print(help(soma))
| pt | 0.997445 | Preservando metadata com wraps Metadados -> São dados intrísecos em arquivos wraps -> São funções que envolvem elementos com diversas finalidades # Problema Eu sou uma função (logar) dentro de outra Soma dois números. # Mas usando uma outra maneira # logar # Eu sou uma função (logar) dentro de outra # Resolução do problema Eu sou uma função (logar) dentro de outra Soma dois números. # soma # Soma dois números | 3.377772 | 3 |
Solutions/Tree Problems/zig_zag_print.py | maharshi365/Python-Practice | 0 | 6622459 | # Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def zigzagLevelOrder(self, root):
"""
:type root: TreeNode
:rtype: List[List[int]]
"""
if root is None:
return None
out = list()
q = [root]
i = 1
while len(q) > 0:
if i == 1:
out.append([x.val for x in q])
else:
out.append([x.val for x in reversed(q)])
nq = list()
for item in q:
if item.left:
nq.append(item.left)
if item.right:
nq.append(item.right)
q = nq
i = 1 - i
return out
| # Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def zigzagLevelOrder(self, root):
"""
:type root: TreeNode
:rtype: List[List[int]]
"""
if root is None:
return None
out = list()
q = [root]
i = 1
while len(q) > 0:
if i == 1:
out.append([x.val for x in q])
else:
out.append([x.val for x in reversed(q)])
nq = list()
for item in q:
if item.left:
nq.append(item.left)
if item.right:
nq.append(item.right)
q = nq
i = 1 - i
return out
| en | 0.407069 | # Definition for a binary tree node. # class TreeNode(object): # def __init__(self, x): # self.val = x # self.left = None # self.right = None :type root: TreeNode :rtype: List[List[int]] | 3.643921 | 4 |
tests/utils/test_Misc.py | anejaalekh/OSBot-Utils | 0 | 6622460 | <reponame>anejaalekh/OSBot-Utils<filename>tests/utils/test_Misc.py
import collections
import datetime
import sys
import time
import timeit
from typing import Generator
from unittest import TestCase
from osbot_utils.utils import Misc
from osbot_utils.utils.Files import Files, file_extension
from osbot_utils.utils.Misc import bytes_to_base64, base64_to_bytes, date_now, class_name, str_to_date, get_value, \
get_random_color, is_number, none_or_empty, random_filename, random_port, random_number, random_string, \
random_string_and_numbers, str_md5, random_uuid, trim, to_int, wait, word_wrap, word_wrap_escaped, \
convert_to_number, \
remove_html_tags, get_field, last_letter, random_text, random_password, split_lines, under_debugger, str_sha256
class test_Misc(TestCase):
def test_array_add(self):
array = ['aaa']
self.assertEqual (Misc.array_add(array,'abc'), 'abc' )
self.assertIsNone (Misc.array_add(array, None) )
self.assertEqual (array ,['aaa','abc'])
def test_array_find(self):
array = ['1',2,'3']
self.assertEqual (Misc.array_find(array, '1' ), 0)
self.assertEqual (Misc.array_find(array, 2 ), 1)
self.assertEqual (Misc.array_find(array, '3' ), 2)
self.assertEqual (Misc.array_find(array, 'a' ), -1)
self.assertEqual (Misc.array_find(array, None), -1)
self.assertRaises (Exception, Misc.array_find, None, None)
self.assertRaises (Exception, Misc.array_find, 'a', None)
def test_array_get(self):
array = ['1',2,'3']
assert Misc.array_get(array, 0 ) == '1'
assert Misc.array_get(array, 1 ) == 2
assert Misc.array_get(array, 2 ) == '3'
assert Misc.array_get(array, -1 ) is None
assert Misc.array_get(array, 3 ) is None
assert Misc.array_get(array, None) is None
assert Misc.array_get(None , None) is None
def test_array_pop(self):
array = ['1',2,'3']
assert Misc.array_pop(array) == '3'
assert Misc.array_pop(array) == 2
assert Misc.array_pop(array) == '1'
assert Misc.array_pop(array) is None
assert Misc.array_pop(None) is None
array = ['1', 2, '3']
assert Misc.array_pop(array, 1) == 2
assert Misc.array_pop(array, 1) == '3'
assert Misc.array_pop(array, 1) is None
assert Misc.array_pop(array, 0) == '1'
assert Misc.array_pop(array, 0) is None
def test_array_pop_and_trim(self):
array = [' 1 ',2,'3']
assert Misc.array_pop_and_trim(array, 1 ) == 2
assert Misc.array_pop_and_trim(array, 1 ) == '3'
assert Misc.array_pop_and_trim(array, 0 ) == '1'
assert Misc.array_pop_and_trim(array, None) is None
def test_base64_to_bytes__bytes_to_base64(self):
bytes = b"\x89PNG__"
bytes_base64 = "iVBOR19f"
assert bytes_to_base64(bytes ) == bytes_base64
assert base64_to_bytes(bytes_base64 ) == bytes
assert base64_to_bytes(bytes_base64.encode()) == bytes
def test_chunks(self):
array = ['1',2,'3',4 ,'5']
assert list(Misc.chunks(array, 2 )) == [['1', 2 ], ['3', 4], ['5']]
assert list(Misc.chunks(array, 3 )) == [['1', 2,'3'], [ 4 , '5' ]]
assert list(Misc.chunks(array, 0 )) == []
assert list(Misc.chunks(array, None)) == []
assert type(Misc.chunks(None , 0)).__name__ == 'generator'
assert list(Misc.chunks(None , 0)) == []
def test_class_name(self):
assert class_name(TestCase) == "type"
assert class_name(TestCase()) == "TestCase"
def test_date_now(self):
now = date_now()
assert type(str_to_date(now)) == datetime.datetime
def test_get_field(self):
print()
print(self.__module__)
assert str(get_field(self, '__module__')) == "test_Misc"
assert get_field({}, None ) == None
assert get_field({}, None, default=42 ) == 42
def test_get_value(self):
assert get_value({}, 'a' ) is None
assert get_value({}, 'a', 'default') == 'default'
assert get_value({}, None , 'd' ) == 'd'
assert get_value({}, None ) is None
assert get_value({'a': 42}, 'a' ) == 42
assert get_value(None, 'a' ) == None
def test_get_random_color(self):
assert get_random_color() in ['skyblue', 'darkseagreen', 'palevioletred', 'coral', 'darkgray']
def test_is_number(self):
assert is_number(42 ) is True
assert is_number(4.2) is True
assert is_number(-1 ) is True
assert is_number(True) is False
assert is_number('42') is False
assert is_number(None) is False
def test_last_letter(self):
assert last_letter("abc") == "c"
assert last_letter("" ) == None
assert last_letter(None ) == None
def test_none_or_empty(self):
assert none_or_empty(None, None) is True
assert none_or_empty(None, 'aa') is True
assert none_or_empty('aa', None) is True
assert none_or_empty({} , 'aa') is True
assert none_or_empty({'a': 42}, 'b') is True
assert none_or_empty({'a': 42}, 'a') is False
def test_random_filename(self):
result = random_filename()
assert len(result) == 14
assert ".tmp" in result
def test_random_password(self):
result = random_password() # todo: improve test to also check for the password complexity
assert len(result) == 24
def test_random_text(self):
result = random_text()
assert len(result) == 17
assert result[:5] == "text_"
assert len(random_text(length=37)) == 42
assert random_text(prefix='abc_')[:4] == "abc_"
assert random_text(prefix='abc' )[:4] == "abc_"
def test_exists(self):
assert Files.exists(Files.current_folder()) is True
assert Files.exists('aaaa_bbb_ccc' ) is False
assert Files.exists(None ) is False
def test_is_number(self):
assert Misc.is_number(123 ) is True
assert Misc.is_number('123' ) is True
assert Misc.is_number('abc' ) is False
assert Misc.is_number(None ) is False
assert Misc.is_number([] ) is False
def test_split_lines(self):
text="aaa\nbbbbb\r\ncccc"
assert split_lines(text) == ['aaa', 'bbbbb','cccc']
def test_random_filename(self):
assert len(random_filename())==14
assert len(random_filename(length=20)) == 24
assert file_extension(random_filename()) == '.tmp'
assert file_extension(random_filename(extension='txt' )) == '.txt'
assert file_extension(random_filename(extension='.txt')) == '.txt'
def test_def_random_port(self):
assert 19999 < random_port() < 65001
assert 19 < random_port(20,22) < 23
assert 20 < random_port(21, 22) < 23
assert random_port(20, 20) == 20
def test_def_random_number(self):
assert 0 < random_number() < 65001
def test_def_random_string(self):
assert len(random_string()) == 6
assert len(random_string(length=12)) == 12
assert len(random_string(prefix="prefix_")) == 13
assert random_string(prefix="prefix_")[:7] == "prefix_"
def test_random_string_and_numbers(self):
assert len(random_string_and_numbers()) == 6
def test_md5(self):
assert str_md5('admin') == '21232f297a57a5a743894a0e4a801fc3'
assert str_md5(None ) is None
def test_sha256(self):
assert str_sha256('admin') == '8c6976e5b5410415bde908bd4dee15dfb167a9c873fc4bb8a81f6f2ab448a918'
assert str_sha256(None ) is None
def test_random_uuid(self):
assert len(random_uuid()) == 36
assert len(random_uuid().split('-')) == 5
def test_trim(self):
assert trim(' aaa ') == 'aaa'
assert trim('\naaa\n') == 'aaa'
assert trim('' ) == ''
assert trim(' ') == ''
assert trim(' \t \n ') == ''
assert trim(None ) == ''
assert trim({} ) == ''
def test_to_int(self):
assert to_int('12' ) == 12
assert to_int('aaa' ) == 0
assert to_int('aaa',1) == 1
def test_wait(self):
delay = 0.001 # time to wait (in seconds)
start = time.time()
wait(delay)
end = time.time()
assert end - start > delay
def test_word_wrap(self):
text = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua."
assert word_wrap(text) == """AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAALorem ipsum dolor
sit amet, consectetur adipiscing elit,
sed do eiusmod tempor incididunt ut
labore et dolore magna aliqua."""
assert word_wrap(text, length=60) == """AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed
do eiusmod tempor incididunt ut labore et dolore magna
aliqua."""
def test_word_wrap_escaped(self):
text = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua."
assert word_wrap_escaped(text) == "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\\nAAAAAAAAAAAAAAAAAAAALorem ipsum dolor\\nsit amet, consectetur adipiscing elit,\\nsed do eiusmod tempor incididunt ut\\nlabore et dolore magna aliqua."
def test_convert_to_number(self):
assert convert_to_number("123" ) == 123
assert convert_to_number("123.45") == 123.45
assert convert_to_number("1234.5") == 1234.5
assert convert_to_number("£123.4") == 123.4
assert convert_to_number("€123.4") == 123.4
assert convert_to_number("$123.4") == 123.4
assert convert_to_number("#123.4") == 0
assert convert_to_number("1,235" ) == 0
assert convert_to_number("abc" ) == 0
assert convert_to_number(None ) == 0
def test_remove_html_tags(self):
assert remove_html_tags("<b>42</b>" ) == "42"
assert remove_html_tags("<a href='abc'>42</a>") == "42"
assert remove_html_tags("<a href='abc'>42</b>") == "42"
def test_under_debugger(self):
if 'pydevd' in sys.modules:
assert under_debugger() is True
else:
assert under_debugger() is False | import collections
import datetime
import sys
import time
import timeit
from typing import Generator
from unittest import TestCase
from osbot_utils.utils import Misc
from osbot_utils.utils.Files import Files, file_extension
from osbot_utils.utils.Misc import bytes_to_base64, base64_to_bytes, date_now, class_name, str_to_date, get_value, \
get_random_color, is_number, none_or_empty, random_filename, random_port, random_number, random_string, \
random_string_and_numbers, str_md5, random_uuid, trim, to_int, wait, word_wrap, word_wrap_escaped, \
convert_to_number, \
remove_html_tags, get_field, last_letter, random_text, random_password, split_lines, under_debugger, str_sha256
class test_Misc(TestCase):
def test_array_add(self):
array = ['aaa']
self.assertEqual (Misc.array_add(array,'abc'), 'abc' )
self.assertIsNone (Misc.array_add(array, None) )
self.assertEqual (array ,['aaa','abc'])
def test_array_find(self):
array = ['1',2,'3']
self.assertEqual (Misc.array_find(array, '1' ), 0)
self.assertEqual (Misc.array_find(array, 2 ), 1)
self.assertEqual (Misc.array_find(array, '3' ), 2)
self.assertEqual (Misc.array_find(array, 'a' ), -1)
self.assertEqual (Misc.array_find(array, None), -1)
self.assertRaises (Exception, Misc.array_find, None, None)
self.assertRaises (Exception, Misc.array_find, 'a', None)
def test_array_get(self):
array = ['1',2,'3']
assert Misc.array_get(array, 0 ) == '1'
assert Misc.array_get(array, 1 ) == 2
assert Misc.array_get(array, 2 ) == '3'
assert Misc.array_get(array, -1 ) is None
assert Misc.array_get(array, 3 ) is None
assert Misc.array_get(array, None) is None
assert Misc.array_get(None , None) is None
def test_array_pop(self):
array = ['1',2,'3']
assert Misc.array_pop(array) == '3'
assert Misc.array_pop(array) == 2
assert Misc.array_pop(array) == '1'
assert Misc.array_pop(array) is None
assert Misc.array_pop(None) is None
array = ['1', 2, '3']
assert Misc.array_pop(array, 1) == 2
assert Misc.array_pop(array, 1) == '3'
assert Misc.array_pop(array, 1) is None
assert Misc.array_pop(array, 0) == '1'
assert Misc.array_pop(array, 0) is None
def test_array_pop_and_trim(self):
array = [' 1 ',2,'3']
assert Misc.array_pop_and_trim(array, 1 ) == 2
assert Misc.array_pop_and_trim(array, 1 ) == '3'
assert Misc.array_pop_and_trim(array, 0 ) == '1'
assert Misc.array_pop_and_trim(array, None) is None
def test_base64_to_bytes__bytes_to_base64(self):
bytes = b"\x89PNG__"
bytes_base64 = "iVBOR19f"
assert bytes_to_base64(bytes ) == bytes_base64
assert base64_to_bytes(bytes_base64 ) == bytes
assert base64_to_bytes(bytes_base64.encode()) == bytes
def test_chunks(self):
array = ['1',2,'3',4 ,'5']
assert list(Misc.chunks(array, 2 )) == [['1', 2 ], ['3', 4], ['5']]
assert list(Misc.chunks(array, 3 )) == [['1', 2,'3'], [ 4 , '5' ]]
assert list(Misc.chunks(array, 0 )) == []
assert list(Misc.chunks(array, None)) == []
assert type(Misc.chunks(None , 0)).__name__ == 'generator'
assert list(Misc.chunks(None , 0)) == []
def test_class_name(self):
assert class_name(TestCase) == "type"
assert class_name(TestCase()) == "TestCase"
def test_date_now(self):
now = date_now()
assert type(str_to_date(now)) == datetime.datetime
def test_get_field(self):
print()
print(self.__module__)
assert str(get_field(self, '__module__')) == "test_Misc"
assert get_field({}, None ) == None
assert get_field({}, None, default=42 ) == 42
def test_get_value(self):
assert get_value({}, 'a' ) is None
assert get_value({}, 'a', 'default') == 'default'
assert get_value({}, None , 'd' ) == 'd'
assert get_value({}, None ) is None
assert get_value({'a': 42}, 'a' ) == 42
assert get_value(None, 'a' ) == None
def test_get_random_color(self):
assert get_random_color() in ['skyblue', 'darkseagreen', 'palevioletred', 'coral', 'darkgray']
def test_is_number(self):
assert is_number(42 ) is True
assert is_number(4.2) is True
assert is_number(-1 ) is True
assert is_number(True) is False
assert is_number('42') is False
assert is_number(None) is False
def test_last_letter(self):
assert last_letter("abc") == "c"
assert last_letter("" ) == None
assert last_letter(None ) == None
def test_none_or_empty(self):
assert none_or_empty(None, None) is True
assert none_or_empty(None, 'aa') is True
assert none_or_empty('aa', None) is True
assert none_or_empty({} , 'aa') is True
assert none_or_empty({'a': 42}, 'b') is True
assert none_or_empty({'a': 42}, 'a') is False
def test_random_filename(self):
result = random_filename()
assert len(result) == 14
assert ".tmp" in result
def test_random_password(self):
result = random_password() # todo: improve test to also check for the password complexity
assert len(result) == 24
def test_random_text(self):
result = random_text()
assert len(result) == 17
assert result[:5] == "text_"
assert len(random_text(length=37)) == 42
assert random_text(prefix='abc_')[:4] == "abc_"
assert random_text(prefix='abc' )[:4] == "abc_"
def test_exists(self):
assert Files.exists(Files.current_folder()) is True
assert Files.exists('aaaa_bbb_ccc' ) is False
assert Files.exists(None ) is False
def test_is_number(self):
assert Misc.is_number(123 ) is True
assert Misc.is_number('123' ) is True
assert Misc.is_number('abc' ) is False
assert Misc.is_number(None ) is False
assert Misc.is_number([] ) is False
def test_split_lines(self):
text="aaa\nbbbbb\r\ncccc"
assert split_lines(text) == ['aaa', 'bbbbb','cccc']
def test_random_filename(self):
assert len(random_filename())==14
assert len(random_filename(length=20)) == 24
assert file_extension(random_filename()) == '.tmp'
assert file_extension(random_filename(extension='txt' )) == '.txt'
assert file_extension(random_filename(extension='.txt')) == '.txt'
def test_def_random_port(self):
assert 19999 < random_port() < 65001
assert 19 < random_port(20,22) < 23
assert 20 < random_port(21, 22) < 23
assert random_port(20, 20) == 20
def test_def_random_number(self):
assert 0 < random_number() < 65001
def test_def_random_string(self):
assert len(random_string()) == 6
assert len(random_string(length=12)) == 12
assert len(random_string(prefix="prefix_")) == 13
assert random_string(prefix="prefix_")[:7] == "prefix_"
def test_random_string_and_numbers(self):
assert len(random_string_and_numbers()) == 6
def test_md5(self):
assert str_md5('admin') == '21232f297a57a5a743894a0e4a801fc3'
assert str_md5(None ) is None
def test_sha256(self):
assert str_sha256('admin') == '8c6976e5b5410415bde908bd4dee15dfb167a9c873fc4bb8a81f6f2ab448a918'
assert str_sha256(None ) is None
def test_random_uuid(self):
assert len(random_uuid()) == 36
assert len(random_uuid().split('-')) == 5
def test_trim(self):
assert trim(' aaa ') == 'aaa'
assert trim('\naaa\n') == 'aaa'
assert trim('' ) == ''
assert trim(' ') == ''
assert trim(' \t \n ') == ''
assert trim(None ) == ''
assert trim({} ) == ''
def test_to_int(self):
assert to_int('12' ) == 12
assert to_int('aaa' ) == 0
assert to_int('aaa',1) == 1
def test_wait(self):
delay = 0.001 # time to wait (in seconds)
start = time.time()
wait(delay)
end = time.time()
assert end - start > delay
def test_word_wrap(self):
text = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua."
assert word_wrap(text) == """AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAALorem ipsum dolor
sit amet, consectetur adipiscing elit,
sed do eiusmod tempor incididunt ut
labore et dolore magna aliqua."""
assert word_wrap(text, length=60) == """AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed
do eiusmod tempor incididunt ut labore et dolore magna
aliqua."""
def test_word_wrap_escaped(self):
text = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua."
assert word_wrap_escaped(text) == "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\\nAAAAAAAAAAAAAAAAAAAALorem ipsum dolor\\nsit amet, consectetur adipiscing elit,\\nsed do eiusmod tempor incididunt ut\\nlabore et dolore magna aliqua."
def test_convert_to_number(self):
assert convert_to_number("123" ) == 123
assert convert_to_number("123.45") == 123.45
assert convert_to_number("1234.5") == 1234.5
assert convert_to_number("£123.4") == 123.4
assert convert_to_number("€123.4") == 123.4
assert convert_to_number("$123.4") == 123.4
assert convert_to_number("#123.4") == 0
assert convert_to_number("1,235" ) == 0
assert convert_to_number("abc" ) == 0
assert convert_to_number(None ) == 0
def test_remove_html_tags(self):
assert remove_html_tags("<b>42</b>" ) == "42"
assert remove_html_tags("<a href='abc'>42</a>") == "42"
assert remove_html_tags("<a href='abc'>42</b>") == "42"
def test_under_debugger(self):
if 'pydevd' in sys.modules:
assert under_debugger() is True
else:
assert under_debugger() is False | en | 0.888187 | # todo: improve test to also check for the password complexity # time to wait (in seconds) AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAALorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. | 2.354158 | 2 |
AtCoder/ABC/000-159/ABC127_B.py | sireline/PyCode | 0 | 6622461 | <filename>AtCoder/ABC/000-159/ABC127_B.py
r, D, x2k = [int(n) for n in input().split()]
X = [x2k]
for i in range(10):
X.append(r*X[i]-D)
print(X[i+1])
| <filename>AtCoder/ABC/000-159/ABC127_B.py
r, D, x2k = [int(n) for n in input().split()]
X = [x2k]
for i in range(10):
X.append(r*X[i]-D)
print(X[i+1])
| none | 1 | 2.659848 | 3 | |
meiduo_mall/utils/views.py | Vent-Any/meiduo_mall_cangku | 0 | 6622462 | <reponame>Vent-Any/meiduo_mall_cangku
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import JsonResponse
class LoginRequiredJsonMixin(LoginRequiredMixin):
def handle_no_permission(self):
return JsonResponse({'code': 400, 'errmsg': "OK"}) | from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import JsonResponse
class LoginRequiredJsonMixin(LoginRequiredMixin):
def handle_no_permission(self):
return JsonResponse({'code': 400, 'errmsg': "OK"}) | none | 1 | 1.868851 | 2 | |
readcsv.py | nicholas-long/github-exploit-code-repository-index | 0 | 6622463 | #!/usr/bin/env python3
import sys
import csv
def fix(col: str):
result = col
result = result.replace('\"', "")
result = result.replace('"', "")
result = result.replace("\n", " ")
result = result.replace("|", " ")
return result
reader = csv.reader(open(sys.argv[1], "r"))
for row in reader:
xf = [ fix(x) for x in row ]
xf[2] = xf[2].split('T')[0]
meta = [xf[i] for i in range(1,7)]
output = " ".join(meta) + "|"
if len(xf) >= 8: output += xf[7]
print(output)
| #!/usr/bin/env python3
import sys
import csv
def fix(col: str):
result = col
result = result.replace('\"', "")
result = result.replace('"', "")
result = result.replace("\n", " ")
result = result.replace("|", " ")
return result
reader = csv.reader(open(sys.argv[1], "r"))
for row in reader:
xf = [ fix(x) for x in row ]
xf[2] = xf[2].split('T')[0]
meta = [xf[i] for i in range(1,7)]
output = " ".join(meta) + "|"
if len(xf) >= 8: output += xf[7]
print(output)
| fr | 0.221828 | #!/usr/bin/env python3 | 3.444174 | 3 |
app/run.py | bkontonis/NLP---Messages-Classification | 0 | 6622464 | import re
import sys
import json
import nltk
import random
import joblib
import plotly
import pandas as pd
from flask import Flask
from plotly.graph_objs import Bar, Scatter
from sqlalchemy import create_engine
from sklearn.pipeline import Pipeline
from sklearn.linear_model import SGDClassifier
from flask import render_template, request, jsonify
from sklearn.model_selection import train_test_split
from sklearn.multioutput import MultiOutputClassifier
from dependencies.tokenize import tokenize
app = Flask(__name__)
# load data
engine = create_engine('sqlite:///../data/DisasterResponse.db')
df = pd.read_sql_table('LabeledMessages', engine)
df_no_childalone = df.drop(['child_alone'], axis=1)
# load model
model = joblib.load("../models/classifier.pkl")
# Below I extract data needed for visuals
# Visual 1: Genre counts
genre_counts = df.groupby('genre').count()['message']
genre_names = list(genre_counts.index)
# Visual 2: Category counts
category_columns = list(df.columns[4:])
category_counts = df[category_columns].sum(axis =0).sort_values(ascending = False)
# index webpage displays cool visuals and receives user input text for model
@app.route('/')
@app.route('/index')
def index():
# create visuals
graphs = [
{
'data': [
Bar(
x=genre_names,
y=genre_counts
)
],
'layout': {
'title': 'Distribution of Message Genres',
'yaxis': {
'title': "Count"
},
'xaxis': {
'title': "Genre"
}
}
}
,
{
'data': [
Bar(
x=list(category_counts.index),
y=category_counts.values
)
],
'layout': {
'title': 'Distribution of Message Categories',
'yaxis': {
'title': "Count"
},
'xaxis': {
'title': "Category"
}
}
}
]
# encode plotly graphs in JSON
ids = ["graph-{}".format(i) for i, _ in enumerate(graphs)]
graphJSON = json.dumps(graphs, cls=plotly.utils.PlotlyJSONEncoder)
# render web page with plotly graphs
return render_template('master.html', ids=ids, graphJSON=graphJSON)
# web page that handles user query and displays model results
@app.route('/go')
def go():
# save user input in query
query = request.args.get('query', '')
# use model to predict classification for query
classification_labels = model.predict([query])[0]
classification_results = dict(zip(df_no_childalone.columns[4:], classification_labels))
# This will render the go.html Please see that file.
return render_template(
'go.html',
query=query,
classification_result=classification_results
)
def main():
app.run(host='0.0.0.0', port=3001, debug=True)
if __name__ == '__main__':
main() | import re
import sys
import json
import nltk
import random
import joblib
import plotly
import pandas as pd
from flask import Flask
from plotly.graph_objs import Bar, Scatter
from sqlalchemy import create_engine
from sklearn.pipeline import Pipeline
from sklearn.linear_model import SGDClassifier
from flask import render_template, request, jsonify
from sklearn.model_selection import train_test_split
from sklearn.multioutput import MultiOutputClassifier
from dependencies.tokenize import tokenize
app = Flask(__name__)
# load data
engine = create_engine('sqlite:///../data/DisasterResponse.db')
df = pd.read_sql_table('LabeledMessages', engine)
df_no_childalone = df.drop(['child_alone'], axis=1)
# load model
model = joblib.load("../models/classifier.pkl")
# Below I extract data needed for visuals
# Visual 1: Genre counts
genre_counts = df.groupby('genre').count()['message']
genre_names = list(genre_counts.index)
# Visual 2: Category counts
category_columns = list(df.columns[4:])
category_counts = df[category_columns].sum(axis =0).sort_values(ascending = False)
# index webpage displays cool visuals and receives user input text for model
@app.route('/')
@app.route('/index')
def index():
# create visuals
graphs = [
{
'data': [
Bar(
x=genre_names,
y=genre_counts
)
],
'layout': {
'title': 'Distribution of Message Genres',
'yaxis': {
'title': "Count"
},
'xaxis': {
'title': "Genre"
}
}
}
,
{
'data': [
Bar(
x=list(category_counts.index),
y=category_counts.values
)
],
'layout': {
'title': 'Distribution of Message Categories',
'yaxis': {
'title': "Count"
},
'xaxis': {
'title': "Category"
}
}
}
]
# encode plotly graphs in JSON
ids = ["graph-{}".format(i) for i, _ in enumerate(graphs)]
graphJSON = json.dumps(graphs, cls=plotly.utils.PlotlyJSONEncoder)
# render web page with plotly graphs
return render_template('master.html', ids=ids, graphJSON=graphJSON)
# web page that handles user query and displays model results
@app.route('/go')
def go():
# save user input in query
query = request.args.get('query', '')
# use model to predict classification for query
classification_labels = model.predict([query])[0]
classification_results = dict(zip(df_no_childalone.columns[4:], classification_labels))
# This will render the go.html Please see that file.
return render_template(
'go.html',
query=query,
classification_result=classification_results
)
def main():
app.run(host='0.0.0.0', port=3001, debug=True)
if __name__ == '__main__':
main() | en | 0.768421 | # load data # load model # Below I extract data needed for visuals # Visual 1: Genre counts # Visual 2: Category counts # index webpage displays cool visuals and receives user input text for model # create visuals # encode plotly graphs in JSON # render web page with plotly graphs # web page that handles user query and displays model results # save user input in query # use model to predict classification for query # This will render the go.html Please see that file. | 2.579997 | 3 |
chris_cunningham/day_06.py | techartorg/Advent_of_code_2021 | 0 | 6622465 | from collections import deque
from copy import copy
def main():
with open("inputs/day_06.txt", 'r') as f:
inputs = deque([0] * 9)
for i in f.read().split(","):
inputs[int(i)] += 1
print(f"Part One: {solve(inputs, 80)}")
print(f"Part Two: {solve(inputs, 256)}")
def solve(data: deque[int], days: int) -> int:
data = copy(data)
for _ in range(days):
data.rotate(-1)
data[6] += data[8]
return sum(data)
if __name__ == '__main__':
main()
| from collections import deque
from copy import copy
def main():
with open("inputs/day_06.txt", 'r') as f:
inputs = deque([0] * 9)
for i in f.read().split(","):
inputs[int(i)] += 1
print(f"Part One: {solve(inputs, 80)}")
print(f"Part Two: {solve(inputs, 256)}")
def solve(data: deque[int], days: int) -> int:
data = copy(data)
for _ in range(days):
data.rotate(-1)
data[6] += data[8]
return sum(data)
if __name__ == '__main__':
main()
| none | 1 | 3.104658 | 3 | |
Student-Management-system/code.py | meenakshidolhare/greyatom-python-for-data-science | 0 | 6622466 | <gh_stars>0
# --------------
class_1=['<NAME>','<NAME>','<NAME>','<NAME>']
class_2=['<NAME>','<NAME>','<NAME>']
new_class=class_1+class_2
new_class
new_class.append('<NAME>')
new_class
new_class.remove('<NAME>')
new_class
courses={'Maths':65,'English':70,'History':80,'French':70,'Science':60}
courses_values=courses.values()
courses_values
Total=sum(courses_values)
Total
percentage=Total/500*100
percentage
Mathematics={'<NAME>':78,'<NAME>':95,'<NAME>':65,'<NAME>':50
,'<NAME>':70,'<NAME>':66,'<NAME>':75}
max_marks=max(Mathematics.values())
max_marks
for key, value in Mathematics.items():
if value == max_marks:
topper=key
print(topper)
split_topper=topper.split()
split_topper
First_Name=split_topper[0]
First_Name
Last_Name=split_topper[1]
Last_Name
Full_Name=(Last_Name+' '+First_Name)
certificate_name=Full_Name.upper()
certificate_name
| # --------------
class_1=['<NAME>','<NAME>','<NAME>','<NAME>']
class_2=['<NAME>','<NAME>','<NAME>']
new_class=class_1+class_2
new_class
new_class.append('<NAME>')
new_class
new_class.remove('<NAME>')
new_class
courses={'Maths':65,'English':70,'History':80,'French':70,'Science':60}
courses_values=courses.values()
courses_values
Total=sum(courses_values)
Total
percentage=Total/500*100
percentage
Mathematics={'<NAME>':78,'<NAME>':95,'<NAME>':65,'<NAME>':50
,'<NAME>':70,'<NAME>':66,'<NAME>':75}
max_marks=max(Mathematics.values())
max_marks
for key, value in Mathematics.items():
if value == max_marks:
topper=key
print(topper)
split_topper=topper.split()
split_topper
First_Name=split_topper[0]
First_Name
Last_Name=split_topper[1]
Last_Name
Full_Name=(Last_Name+' '+First_Name)
certificate_name=Full_Name.upper()
certificate_name | en | 0.125318 | # -------------- | 3.445799 | 3 |
bspump/abc/source.py | gitter-badger/bspump | 1 | 6622467 | <gh_stars>1-10
import abc
import logging
import asyncio
from .config import ConfigObject
#
L = logging.getLogger(__name__)
#
class Source(abc.ABC, ConfigObject):
'''
Each source represent a coroutine/Future/Task that is running in the context of the main loop.
The coroutine method main() contains an implementation of each particular source.
Source MUST await a pipeline ready state prior producing the event.
It is acomplished by `await self.Pipeline.ready()` call.
'''
def __init__(self, app, pipeline, id=None, config=None):
super().__init__("pipeline:{}:{}".format(pipeline.Id, id if id is not None else self.__class__.__name__), config=config)
self.Id = id if id is not None else self.__class__.__name__
self.Pipeline = pipeline
self.MainCoro = None # Contains a main coroutine `main()` if Pipeline is started
async def process(self, event, context=None):
'''
This method is used to emit event into a pipeline.
'''
while not self.Pipeline._ready.is_set():
await self.Pipeline.ready()
return self.Pipeline.process(event, context=context)
def start(self, loop):
if self.MainCoro is not None: return
self.MainCoro = asyncio.ensure_future(self.main(), loop=loop)
async def stop(self):
if self.MainCoro is None: return # Source is not started
self.MainCoro.cancel()
await self.MainCoro
if not self.MainCoro.done():
L.warning("Source '{}' refused to stop: {}".format(self.Id, self.MainCoro))
@abc.abstractmethod
async def main(self):
raise NotImplemented()
async def stopped(self):
'''
Helper that simplyfies the implementation of sources:
async def main(self):
... initialize resources here
await self.stopped()
... finalize resources here
'''
try:
while True:
await asyncio.sleep(60)
except asyncio.CancelledError:
pass
def rest_get(self):
return {
"Id": self.Id,
"Class": self.__class__.__name__
}
#
class TriggerSource(Source):
'''
This is an abstract source class intended as a base for implementation of 'cyclic' sources such as file readers, SQL extractors etc.
You need to provide a trigger class and implement cycle() method.
You also may overload the main() method to provide additional parameters for a cycle() method.
async def main(self):
async with aiohttp.ClientSession(loop=self.Loop) as session:
await super().main(session)
async def cycle(self, session):
session.get(...)
'''
def __init__(self, app, pipeline, id=None, config=None):
super().__init__(app, pipeline, id=id, config=config)
self.TriggerEvent = asyncio.Event(loop=app.Loop)
self.TriggerEvent.clear()
self.Triggers = set()
def on(self, trigger):
'''
Add trigger
'''
trigger.add(self)
self.Triggers.add(trigger)
return self
async def main(self, *args, **kwags):
while True:
# Wait for pipeline is ready
await self.Pipeline.ready()
# Wait for a trigger
await self.TriggerEvent.wait()
# Send begin on a cycle event
self.Pipeline.PubSub.publish("bspump.pipeline.cycle_begin!", pipeline=self.Pipeline)
# Execute one cycle
try:
await self.cycle(*args, **kwags)
except BaseException as e:
self.Pipeline.set_error(e, None)
# Send end of a cycle event
self.Pipeline.PubSub.publish("bspump.pipeline.cycle_end!", pipeline=self.Pipeline)
self.TriggerEvent.clear()
for trigger in self.Triggers:
trigger.done(self)
@abc.abstractmethod
async def cycle(self, *args, **kwags):
raise NotImplemented()
def rest_get(self):
return super().rest_get().update({
"triggered": self.TriggerEvent.is_set()
})
| import abc
import logging
import asyncio
from .config import ConfigObject
#
L = logging.getLogger(__name__)
#
class Source(abc.ABC, ConfigObject):
'''
Each source represent a coroutine/Future/Task that is running in the context of the main loop.
The coroutine method main() contains an implementation of each particular source.
Source MUST await a pipeline ready state prior producing the event.
It is acomplished by `await self.Pipeline.ready()` call.
'''
def __init__(self, app, pipeline, id=None, config=None):
super().__init__("pipeline:{}:{}".format(pipeline.Id, id if id is not None else self.__class__.__name__), config=config)
self.Id = id if id is not None else self.__class__.__name__
self.Pipeline = pipeline
self.MainCoro = None # Contains a main coroutine `main()` if Pipeline is started
async def process(self, event, context=None):
'''
This method is used to emit event into a pipeline.
'''
while not self.Pipeline._ready.is_set():
await self.Pipeline.ready()
return self.Pipeline.process(event, context=context)
def start(self, loop):
if self.MainCoro is not None: return
self.MainCoro = asyncio.ensure_future(self.main(), loop=loop)
async def stop(self):
if self.MainCoro is None: return # Source is not started
self.MainCoro.cancel()
await self.MainCoro
if not self.MainCoro.done():
L.warning("Source '{}' refused to stop: {}".format(self.Id, self.MainCoro))
@abc.abstractmethod
async def main(self):
raise NotImplemented()
async def stopped(self):
'''
Helper that simplyfies the implementation of sources:
async def main(self):
... initialize resources here
await self.stopped()
... finalize resources here
'''
try:
while True:
await asyncio.sleep(60)
except asyncio.CancelledError:
pass
def rest_get(self):
return {
"Id": self.Id,
"Class": self.__class__.__name__
}
#
class TriggerSource(Source):
'''
This is an abstract source class intended as a base for implementation of 'cyclic' sources such as file readers, SQL extractors etc.
You need to provide a trigger class and implement cycle() method.
You also may overload the main() method to provide additional parameters for a cycle() method.
async def main(self):
async with aiohttp.ClientSession(loop=self.Loop) as session:
await super().main(session)
async def cycle(self, session):
session.get(...)
'''
def __init__(self, app, pipeline, id=None, config=None):
super().__init__(app, pipeline, id=id, config=config)
self.TriggerEvent = asyncio.Event(loop=app.Loop)
self.TriggerEvent.clear()
self.Triggers = set()
def on(self, trigger):
'''
Add trigger
'''
trigger.add(self)
self.Triggers.add(trigger)
return self
async def main(self, *args, **kwags):
while True:
# Wait for pipeline is ready
await self.Pipeline.ready()
# Wait for a trigger
await self.TriggerEvent.wait()
# Send begin on a cycle event
self.Pipeline.PubSub.publish("bspump.pipeline.cycle_begin!", pipeline=self.Pipeline)
# Execute one cycle
try:
await self.cycle(*args, **kwags)
except BaseException as e:
self.Pipeline.set_error(e, None)
# Send end of a cycle event
self.Pipeline.PubSub.publish("bspump.pipeline.cycle_end!", pipeline=self.Pipeline)
self.TriggerEvent.clear()
for trigger in self.Triggers:
trigger.done(self)
@abc.abstractmethod
async def cycle(self, *args, **kwags):
raise NotImplemented()
def rest_get(self):
return super().rest_get().update({
"triggered": self.TriggerEvent.is_set()
}) | en | 0.836491 | # # Each source represent a coroutine/Future/Task that is running in the context of the main loop. The coroutine method main() contains an implementation of each particular source. Source MUST await a pipeline ready state prior producing the event. It is acomplished by `await self.Pipeline.ready()` call. # Contains a main coroutine `main()` if Pipeline is started This method is used to emit event into a pipeline. # Source is not started Helper that simplyfies the implementation of sources: async def main(self): ... initialize resources here await self.stopped() ... finalize resources here # This is an abstract source class intended as a base for implementation of 'cyclic' sources such as file readers, SQL extractors etc. You need to provide a trigger class and implement cycle() method. You also may overload the main() method to provide additional parameters for a cycle() method. async def main(self): async with aiohttp.ClientSession(loop=self.Loop) as session: await super().main(session) async def cycle(self, session): session.get(...) Add trigger # Wait for pipeline is ready # Wait for a trigger # Send begin on a cycle event # Execute one cycle # Send end of a cycle event | 2.747837 | 3 |
527 Word Abbreviation.py | krishna13052001/LeetCode | 872 | 6622468 | #!/usr/bin/python3
"""
premium question
"""
from typing import List
from collections import defaultdict
class Solution:
def wordsAbbreviation(self, words: List[str]) -> List[str]:
"""
Sort the word, check prefix and last word
Group by first and last char, group by prefix and last char
then make a trie - hard to implement? TrieNode lambda
Need to count the #appearances in the TrieNode
"""
hm = defaultdict(list)
ret = [None for _ in words]
for i, w in enumerate(words):
hm[w[0], w[-1], len(w)].append(i)
TrieNode = lambda: defaultdict(TrieNode)
for lst in hm.values():
root = TrieNode()
for i in lst:
w = words[i]
cur = root
for c in w:
cur = cur[c]
cur["count"] = cur.get("count", 0) + 1
for i in lst:
w = words[i]
prefix_l = 0
cur = root
for c in w:
prefix_l += 1
cur = cur[c]
if cur["count"] == 1:
break
ret[i] = self.abbrev(w, prefix_l)
return ret
def abbrev(self, w, prefix_l):
abbrev_l = len(w) - 2 - prefix_l + 1
if abbrev_l > 1:
return w[:prefix_l] + str(abbrev_l) + w[-1]
return w
if __name__ == "__main__":
assert Solution().wordsAbbreviation(["like", "god", "internal", "me", "internet", "interval", "intension", "face", "intrusion"]) == ["l2e","god","internal","me","i6t","interval","inte4n","f2e","intr4n"]
| #!/usr/bin/python3
"""
premium question
"""
from typing import List
from collections import defaultdict
class Solution:
def wordsAbbreviation(self, words: List[str]) -> List[str]:
"""
Sort the word, check prefix and last word
Group by first and last char, group by prefix and last char
then make a trie - hard to implement? TrieNode lambda
Need to count the #appearances in the TrieNode
"""
hm = defaultdict(list)
ret = [None for _ in words]
for i, w in enumerate(words):
hm[w[0], w[-1], len(w)].append(i)
TrieNode = lambda: defaultdict(TrieNode)
for lst in hm.values():
root = TrieNode()
for i in lst:
w = words[i]
cur = root
for c in w:
cur = cur[c]
cur["count"] = cur.get("count", 0) + 1
for i in lst:
w = words[i]
prefix_l = 0
cur = root
for c in w:
prefix_l += 1
cur = cur[c]
if cur["count"] == 1:
break
ret[i] = self.abbrev(w, prefix_l)
return ret
def abbrev(self, w, prefix_l):
abbrev_l = len(w) - 2 - prefix_l + 1
if abbrev_l > 1:
return w[:prefix_l] + str(abbrev_l) + w[-1]
return w
if __name__ == "__main__":
assert Solution().wordsAbbreviation(["like", "god", "internal", "me", "internet", "interval", "intension", "face", "intrusion"]) == ["l2e","god","internal","me","i6t","interval","inte4n","f2e","intr4n"]
| en | 0.753489 | #!/usr/bin/python3 premium question Sort the word, check prefix and last word Group by first and last char, group by prefix and last char then make a trie - hard to implement? TrieNode lambda Need to count the #appearances in the TrieNode | 3.699549 | 4 |
tests/train/helper.py | korepwx/madoka | 0 | 6622469 | <reponame>korepwx/madoka
# -*- coding: utf-8 -*-
from logging import getLogger
import numpy as np
import six
import tensorflow as tf
from sklearn.base import BaseEstimator
from madoka.train import Monitor, EnsembleTrainer
from madoka.utils import flatten_list
from madoka.utils.tfhelper import ensure_default_session
if six.PY2:
from itertools import izip_longest as zip_longest
else:
from itertools import zip_longest
class Event(object):
def __init__(self, name, args):
self.name = name
self.args = sorted(six.iteritems(args))
def __repr__(self):
if self.args:
args_repr = ','.join('%s=%s' % a for a in self.args)
return '%s:%s' % (self.name, args_repr)
else:
return self.name
class EventCollector(object):
def __init__(self):
self._events = []
def add(self, e):
self._events.append(e)
def dump(self):
return '\n'.join(repr(r) for r in self._events)
def match(self, patterns):
mismatch = False
marks = []
for i, (e, p) in enumerate(zip(self._events, patterns)):
if isinstance(p, six.string_types):
not_match = p != repr(e)
elif hasattr(p, 'match'):
not_match = not p.match(repr(e))
elif callable(p):
not_match = not p(e)
else:
raise TypeError('%r is not a pattern.' % (p,))
if not_match:
mismatch = True
marks.append('[x] ')
else:
marks.append(' ')
if len(self._events) > len(patterns):
mismatch = True
marks += ['[+] '] * (len(self._events) - len(patterns))
if len(self._events) < len(patterns):
mismatch = True
marks += ['[-] '] * (len(patterns) - len(self._events))
if mismatch:
msg = [
'Events mismatch:',
' expected events:',
'\n'.join(' %s' %
(e.pattern if hasattr(e, 'pattern') else e)
for e in patterns),
' actual events:',
'\n'.join('%s%s' % (m, e or '(null)')
for e, m in zip_longest(self._events, marks))
]
raise AssertionError('\n'.join(msg))
def __iter__(self):
return iter(self._events)
def __len__(self):
return len(self._events)
class MonitorEventLogger(Monitor):
def __init__(self, name='MonitorEventLogger', break_after_steps=None,
collector=None):
super(MonitorEventLogger, self).__init__(name)
self.events = EventCollector() if collector is None else collector
self.break_after_steps = break_after_steps
def add_event(self, name, **kwargs):
self.events.add(Event(name, kwargs))
def before_training(self):
self.add_event('before_training')
def start_training(self, batch_size, epoch_steps, max_steps, initial_step):
self.add_event(
'start_training', batch_size=batch_size, epoch_steps=epoch_steps,
max_steps=max_steps, initial_step=initial_step
)
def end_training(self, has_error=False):
"""Notify the monitor that a training process has finished.
It will be triggered whether or not any error has taken place.
Parameters
----------
has_error : bool
Whether or not any error has occurred during training.
"""
self.add_event('end_training', has_error=has_error)
def start_epoch(self, epoch):
"""Notify the monitor that a training epoch will start.
Parameters
----------
epoch : int
Index of the epoch, starting from 0.
"""
self.add_event('start_epoch', epoch=epoch)
def end_epoch(self, epoch, avg_loss):
"""Notify the monitor that a training epoch has completed.
Parameters
----------
epoch : int
Index of the epoch, starting from 0.
avg_loss : float
Average training loss of all steps in this epoch.
Would be None if the training process does not evolve a loss.
"""
self.add_event('end_epoch', epoch=epoch, avg_loss=avg_loss)
def start_step(self, step):
"""Notify the monitor that a training step (mini-batch) will start.
Parameters
----------
step : int
Index of the step, starting from 0.
This should be the total number of steps have ever been performed
since the whole training process started, not from the start of
this epoch.
"""
self.add_event('start_step', step=step)
def end_step(self, step, loss):
"""Notify the monitor that a training step (mini-batch) has completed.
Parameters
----------
step : int
Index of the step, starting from 0.
loss : float
Training loss of this step.
Would be None if the training process does not evolve a loss.
"""
self.add_event('end_step', step=step, loss=loss)
# Note that `step` counts from zero.
if self.break_after_steps is not None \
and self.break_after_steps <= step + 1:
raise KeyboardInterrupt()
class SummaryEvent(Event):
def __init__(self, step, summary):
s = tf.Summary()
s.ParseFromString(summary)
values = {}
for val in s.value:
values[val.tag] = val.simple_value
super(SummaryEvent, self).__init__(step, values)
class SummaryWriterLogger(object):
def __init__(self, collector=None):
self.events = EventCollector() if collector is None else collector
def write(self, summary, global_step=None, givens=None):
session = ensure_default_session()
if isinstance(summary, (list, tuple)):
summary = flatten_list(summary)
if isinstance(summary, (tf.Tensor, tf.Variable)):
summary = session.run(summary, feed_dict=givens)
self.events.add(SummaryEvent(global_step, summary))
return self
class _EnsembleTrainerMixin:
def _init_mixin(self, collector=None):
self.events = EventCollector() if collector is None else collector
def make_wrapper(method, logger):
@six.wraps(method)
def inner(*args, **kwargs):
logger(*args, **kwargs)
return method(*args, **kwargs)
return inner
for k in ('_prepare_data_flow_for_child',
'_before_training', '_after_training',
'_before_child_training', '_after_child_training'):
mk = '_m' + k
setattr(self, k, make_wrapper(getattr(self, k), getattr(self, mk)))
def add_event(self, name, **kwargs):
self.events.add(Event(name, kwargs))
def _m_prepare_data_flow_for_child(self, model_id, train_flow):
self.add_event('prepare_data_flow_for_child', model_id=model_id)
def _m_before_training(self, initial_model_id):
self.add_event('before_training', initial_model_id=initial_model_id)
def _m_after_training(self):
self.add_event('after_training')
def _m_before_child_training(self, model_id):
self.add_event('before_child_training', model_id=model_id)
def _m_after_child_training(self, model_id):
self.add_event('after_child_training', model_id=model_id)
class EnsembleTrainerWithLogger(EnsembleTrainer, _EnsembleTrainerMixin):
def __init__(self, collector=None, **kwargs):
super(EnsembleTrainerWithLogger, self).__init__(**kwargs)
self._init_mixin(collector=collector)
class BinarySplitter(BaseEstimator):
def __init__(self, boundary=.0):
self.boundary = boundary
self.classes_ = np.asarray([0, 1], dtype=np.int32)
def fit(self, X, y, sample_weight=None):
getLogger(__name__).debug(
list(np.asarray(sample_weight, dtype=np.float32)))
return self
def predict(self, X):
return np.asarray(X >= 0., dtype=np.int32).reshape([-1])
def predict_proba(self, X):
p = 1. / (1 + np.exp(-X))
return np.concatenate([1 - p, p], axis=1)
class MultimodalSplitter(BaseEstimator):
def __init__(self, boundaries):
self.boundaries = np.asarray(boundaries, dtype=np.float32)
self.middles = (self.boundaries[1:] + self.boundaries[:-1]) * .5
self.distances = self.boundaries[1:] - self.boundaries[:-1]
self.classes_ = np.arange(len(boundaries)+1, dtype=np.int32)
def fit(self, X, y, sample_weight=None):
getLogger(__name__).debug(
list(np.asarray(sample_weight, dtype=np.float32)))
def predict(self, X):
return np.argmax(self.predict_proba(X), axis=1)
def _make_proba_components(self, X, sigmoid, select):
proba = []
# the first component
left = self.boundaries[0] - self.distances[0] * .5
proba.append(sigmoid((left - X) / self.distances[0] * 10.))
# the middle components
for b, d in zip(self.boundaries, self.distances):
m = b + d * .5
proba.append(
select(
X < m,
sigmoid((X - m) / d * 10.),
sigmoid((m - X) / d * 10.)
)
)
# the last component
right = self.boundaries[-1] + self.distances[-1] * .5
proba.append(sigmoid((X - right) / self.distances[-1] * 10.))
return proba
def predict_proba(self, X):
def sigmoid(x):
return 1./(1.+np.exp(-x))
def select(cond, x, y):
cond = np.asarray(cond, dtype=np.int32)
return cond * x + (1 - cond) * y
if len(X.shape) == 2:
X = X.reshape([-1])
proba = self._make_proba_components(X, sigmoid, select)
# gather and normalize the probability
for i in range(len(proba)):
proba[i] = proba[i].reshape([-1, 1])
proba = np.concatenate(proba, axis=1)
proba = proba / np.sum(proba, axis=1).reshape([-1, 1])
return proba
def make_multimodal_tf_splitter(boundaries, x):
sp = MultimodalSplitter(boundaries)
proba = sp._make_proba_components(x, tf.sigmoid, tf.select)
# gather and normalize the probability
for i in range(len(proba)):
proba[i] = tf.reshape(proba[i], [-1, 1])
proba = tf.concat(1, proba)
proba = proba / tf.reduce_sum(proba, [1], keep_dims=True)
return proba
| # -*- coding: utf-8 -*-
from logging import getLogger
import numpy as np
import six
import tensorflow as tf
from sklearn.base import BaseEstimator
from madoka.train import Monitor, EnsembleTrainer
from madoka.utils import flatten_list
from madoka.utils.tfhelper import ensure_default_session
if six.PY2:
from itertools import izip_longest as zip_longest
else:
from itertools import zip_longest
class Event(object):
def __init__(self, name, args):
self.name = name
self.args = sorted(six.iteritems(args))
def __repr__(self):
if self.args:
args_repr = ','.join('%s=%s' % a for a in self.args)
return '%s:%s' % (self.name, args_repr)
else:
return self.name
class EventCollector(object):
def __init__(self):
self._events = []
def add(self, e):
self._events.append(e)
def dump(self):
return '\n'.join(repr(r) for r in self._events)
def match(self, patterns):
mismatch = False
marks = []
for i, (e, p) in enumerate(zip(self._events, patterns)):
if isinstance(p, six.string_types):
not_match = p != repr(e)
elif hasattr(p, 'match'):
not_match = not p.match(repr(e))
elif callable(p):
not_match = not p(e)
else:
raise TypeError('%r is not a pattern.' % (p,))
if not_match:
mismatch = True
marks.append('[x] ')
else:
marks.append(' ')
if len(self._events) > len(patterns):
mismatch = True
marks += ['[+] '] * (len(self._events) - len(patterns))
if len(self._events) < len(patterns):
mismatch = True
marks += ['[-] '] * (len(patterns) - len(self._events))
if mismatch:
msg = [
'Events mismatch:',
' expected events:',
'\n'.join(' %s' %
(e.pattern if hasattr(e, 'pattern') else e)
for e in patterns),
' actual events:',
'\n'.join('%s%s' % (m, e or '(null)')
for e, m in zip_longest(self._events, marks))
]
raise AssertionError('\n'.join(msg))
def __iter__(self):
return iter(self._events)
def __len__(self):
return len(self._events)
class MonitorEventLogger(Monitor):
def __init__(self, name='MonitorEventLogger', break_after_steps=None,
collector=None):
super(MonitorEventLogger, self).__init__(name)
self.events = EventCollector() if collector is None else collector
self.break_after_steps = break_after_steps
def add_event(self, name, **kwargs):
self.events.add(Event(name, kwargs))
def before_training(self):
self.add_event('before_training')
def start_training(self, batch_size, epoch_steps, max_steps, initial_step):
self.add_event(
'start_training', batch_size=batch_size, epoch_steps=epoch_steps,
max_steps=max_steps, initial_step=initial_step
)
def end_training(self, has_error=False):
"""Notify the monitor that a training process has finished.
It will be triggered whether or not any error has taken place.
Parameters
----------
has_error : bool
Whether or not any error has occurred during training.
"""
self.add_event('end_training', has_error=has_error)
def start_epoch(self, epoch):
"""Notify the monitor that a training epoch will start.
Parameters
----------
epoch : int
Index of the epoch, starting from 0.
"""
self.add_event('start_epoch', epoch=epoch)
def end_epoch(self, epoch, avg_loss):
"""Notify the monitor that a training epoch has completed.
Parameters
----------
epoch : int
Index of the epoch, starting from 0.
avg_loss : float
Average training loss of all steps in this epoch.
Would be None if the training process does not evolve a loss.
"""
self.add_event('end_epoch', epoch=epoch, avg_loss=avg_loss)
def start_step(self, step):
"""Notify the monitor that a training step (mini-batch) will start.
Parameters
----------
step : int
Index of the step, starting from 0.
This should be the total number of steps have ever been performed
since the whole training process started, not from the start of
this epoch.
"""
self.add_event('start_step', step=step)
def end_step(self, step, loss):
"""Notify the monitor that a training step (mini-batch) has completed.
Parameters
----------
step : int
Index of the step, starting from 0.
loss : float
Training loss of this step.
Would be None if the training process does not evolve a loss.
"""
self.add_event('end_step', step=step, loss=loss)
# Note that `step` counts from zero.
if self.break_after_steps is not None \
and self.break_after_steps <= step + 1:
raise KeyboardInterrupt()
class SummaryEvent(Event):
def __init__(self, step, summary):
s = tf.Summary()
s.ParseFromString(summary)
values = {}
for val in s.value:
values[val.tag] = val.simple_value
super(SummaryEvent, self).__init__(step, values)
class SummaryWriterLogger(object):
def __init__(self, collector=None):
self.events = EventCollector() if collector is None else collector
def write(self, summary, global_step=None, givens=None):
session = ensure_default_session()
if isinstance(summary, (list, tuple)):
summary = flatten_list(summary)
if isinstance(summary, (tf.Tensor, tf.Variable)):
summary = session.run(summary, feed_dict=givens)
self.events.add(SummaryEvent(global_step, summary))
return self
class _EnsembleTrainerMixin:
def _init_mixin(self, collector=None):
self.events = EventCollector() if collector is None else collector
def make_wrapper(method, logger):
@six.wraps(method)
def inner(*args, **kwargs):
logger(*args, **kwargs)
return method(*args, **kwargs)
return inner
for k in ('_prepare_data_flow_for_child',
'_before_training', '_after_training',
'_before_child_training', '_after_child_training'):
mk = '_m' + k
setattr(self, k, make_wrapper(getattr(self, k), getattr(self, mk)))
def add_event(self, name, **kwargs):
self.events.add(Event(name, kwargs))
def _m_prepare_data_flow_for_child(self, model_id, train_flow):
self.add_event('prepare_data_flow_for_child', model_id=model_id)
def _m_before_training(self, initial_model_id):
self.add_event('before_training', initial_model_id=initial_model_id)
def _m_after_training(self):
self.add_event('after_training')
def _m_before_child_training(self, model_id):
self.add_event('before_child_training', model_id=model_id)
def _m_after_child_training(self, model_id):
self.add_event('after_child_training', model_id=model_id)
class EnsembleTrainerWithLogger(EnsembleTrainer, _EnsembleTrainerMixin):
def __init__(self, collector=None, **kwargs):
super(EnsembleTrainerWithLogger, self).__init__(**kwargs)
self._init_mixin(collector=collector)
class BinarySplitter(BaseEstimator):
def __init__(self, boundary=.0):
self.boundary = boundary
self.classes_ = np.asarray([0, 1], dtype=np.int32)
def fit(self, X, y, sample_weight=None):
getLogger(__name__).debug(
list(np.asarray(sample_weight, dtype=np.float32)))
return self
def predict(self, X):
return np.asarray(X >= 0., dtype=np.int32).reshape([-1])
def predict_proba(self, X):
p = 1. / (1 + np.exp(-X))
return np.concatenate([1 - p, p], axis=1)
class MultimodalSplitter(BaseEstimator):
def __init__(self, boundaries):
self.boundaries = np.asarray(boundaries, dtype=np.float32)
self.middles = (self.boundaries[1:] + self.boundaries[:-1]) * .5
self.distances = self.boundaries[1:] - self.boundaries[:-1]
self.classes_ = np.arange(len(boundaries)+1, dtype=np.int32)
def fit(self, X, y, sample_weight=None):
getLogger(__name__).debug(
list(np.asarray(sample_weight, dtype=np.float32)))
def predict(self, X):
return np.argmax(self.predict_proba(X), axis=1)
def _make_proba_components(self, X, sigmoid, select):
proba = []
# the first component
left = self.boundaries[0] - self.distances[0] * .5
proba.append(sigmoid((left - X) / self.distances[0] * 10.))
# the middle components
for b, d in zip(self.boundaries, self.distances):
m = b + d * .5
proba.append(
select(
X < m,
sigmoid((X - m) / d * 10.),
sigmoid((m - X) / d * 10.)
)
)
# the last component
right = self.boundaries[-1] + self.distances[-1] * .5
proba.append(sigmoid((X - right) / self.distances[-1] * 10.))
return proba
def predict_proba(self, X):
def sigmoid(x):
return 1./(1.+np.exp(-x))
def select(cond, x, y):
cond = np.asarray(cond, dtype=np.int32)
return cond * x + (1 - cond) * y
if len(X.shape) == 2:
X = X.reshape([-1])
proba = self._make_proba_components(X, sigmoid, select)
# gather and normalize the probability
for i in range(len(proba)):
proba[i] = proba[i].reshape([-1, 1])
proba = np.concatenate(proba, axis=1)
proba = proba / np.sum(proba, axis=1).reshape([-1, 1])
return proba
def make_multimodal_tf_splitter(boundaries, x):
sp = MultimodalSplitter(boundaries)
proba = sp._make_proba_components(x, tf.sigmoid, tf.select)
# gather and normalize the probability
for i in range(len(proba)):
proba[i] = tf.reshape(proba[i], [-1, 1])
proba = tf.concat(1, proba)
proba = proba / tf.reduce_sum(proba, [1], keep_dims=True)
return proba | en | 0.872053 | # -*- coding: utf-8 -*- Notify the monitor that a training process has finished. It will be triggered whether or not any error has taken place. Parameters ---------- has_error : bool Whether or not any error has occurred during training. Notify the monitor that a training epoch will start. Parameters ---------- epoch : int Index of the epoch, starting from 0. Notify the monitor that a training epoch has completed. Parameters ---------- epoch : int Index of the epoch, starting from 0. avg_loss : float Average training loss of all steps in this epoch. Would be None if the training process does not evolve a loss. Notify the monitor that a training step (mini-batch) will start. Parameters ---------- step : int Index of the step, starting from 0. This should be the total number of steps have ever been performed since the whole training process started, not from the start of this epoch. Notify the monitor that a training step (mini-batch) has completed. Parameters ---------- step : int Index of the step, starting from 0. loss : float Training loss of this step. Would be None if the training process does not evolve a loss. # Note that `step` counts from zero. # the first component # the middle components # the last component # gather and normalize the probability # gather and normalize the probability | 2.128741 | 2 |
rio/commands/__init__.py | soasme/rio | 0 | 6622470 | # -*- coding: utf-8 -*-
from .runworker import RunWorkerCommand
from .syncproject import SyncProjectCommand
| # -*- coding: utf-8 -*-
from .runworker import RunWorkerCommand
from .syncproject import SyncProjectCommand
| en | 0.769321 | # -*- coding: utf-8 -*- | 1.005646 | 1 |
api/__init__.py | yezz123/Apollo | 18 | 6622471 | <reponame>yezz123/Apollo
#!/usr/bin/python3
from sqlalchemy.orm import Session
from models import models
from schemas import schemas
import bcrypt
import requests
from requests.auth import HTTPBasicAuth
import json
from datetime import datetime
import base64 | #!/usr/bin/python3
from sqlalchemy.orm import Session
from models import models
from schemas import schemas
import bcrypt
import requests
from requests.auth import HTTPBasicAuth
import json
from datetime import datetime
import base64 | fr | 0.386793 | #!/usr/bin/python3 | 1.40665 | 1 |
api/ai_tools/ai_model.py | urbrob/ai-pacman | 0 | 6622472 | from pymongo import MongoClient
from tensorflow.keras.datasets import mnist
from tensorflow.keras import models, layers
from tensorflow.keras.utils import to_categorical
from numpy import array
direction_map = {"w": 1, "s": 0, "d": 2, "a": 3}
def format_database_data_to_list(cursor):
formated_data_x = []
formated_data_y = []
for row in cursor:
formated_data_x.append([
row["pacman"]["x"],
row["pacman"]["y"],
row["ghost_1"]["x"],
row["ghost_1"]["y"],
row["ghost_2"]["x"],
row["ghost_2"]["y"],
row["ghost_3"]["x"],
row["ghost_3"]["y"],
row["ghost_4"]["x"],
row["ghost_4"]["y"],
])
formated_data_y.append(direction_map[row["direction"]])
return formated_data_x, formated_data_y
client = MongoClient("mongodb://root:pass@mongodb:27017/db?authSource=admin")
db = client.db.move
database_records = db.find({})
data_x, data_y = format_database_data_to_list(database_records)
data_x = array(data_x)
data_y = to_categorical(data_y)
model = models.Sequential()
model.add(layers.Dense(512, activation="relu", input_shape=(10,)))
model.add(layers.Dense(4, activation="softmax"))
model.compile(optimizer="rmsprop", loss="categorical_crossentropy", metrics=["accuracy"])
model.fit(data_x, data_y, epochs=35, batch_size=128)
model.save("pacman.h5")
| from pymongo import MongoClient
from tensorflow.keras.datasets import mnist
from tensorflow.keras import models, layers
from tensorflow.keras.utils import to_categorical
from numpy import array
direction_map = {"w": 1, "s": 0, "d": 2, "a": 3}
def format_database_data_to_list(cursor):
formated_data_x = []
formated_data_y = []
for row in cursor:
formated_data_x.append([
row["pacman"]["x"],
row["pacman"]["y"],
row["ghost_1"]["x"],
row["ghost_1"]["y"],
row["ghost_2"]["x"],
row["ghost_2"]["y"],
row["ghost_3"]["x"],
row["ghost_3"]["y"],
row["ghost_4"]["x"],
row["ghost_4"]["y"],
])
formated_data_y.append(direction_map[row["direction"]])
return formated_data_x, formated_data_y
client = MongoClient("mongodb://root:pass@mongodb:27017/db?authSource=admin")
db = client.db.move
database_records = db.find({})
data_x, data_y = format_database_data_to_list(database_records)
data_x = array(data_x)
data_y = to_categorical(data_y)
model = models.Sequential()
model.add(layers.Dense(512, activation="relu", input_shape=(10,)))
model.add(layers.Dense(4, activation="softmax"))
model.compile(optimizer="rmsprop", loss="categorical_crossentropy", metrics=["accuracy"])
model.fit(data_x, data_y, epochs=35, batch_size=128)
model.save("pacman.h5")
| none | 1 | 2.494221 | 2 | |
b3j0f/conf/parser/test/core.py | b3j0f/configuration | 3 | 6622473 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# --------------------------------------------------------------------
# The MIT License (MIT)
#
# Copyright (c) 2014 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# --------------------------------------------------------------------
"""parser UTs"""
from unittest import main
from b3j0f.utils.ut import UTCase
from ...model.conf import Configuration, configuration
from ...model.cat import category
from ...model.param import Parameter
from ..core import (
REGEX_REF, REGEX_FORMAT, REGEX_STR, REGEX_EXPR,
parse, serialize, _ref, ParserError, _strparser
)
class RegexRefTest(UTCase):
"""Test the regex ref."""
def test_pname(self):
test = '@test'
path, cname, history, pname = REGEX_REF.match(test).group(
'path', 'cname', 'history', 'pname'
)
self.assertEqual(pname, 'test')
self.assertFalse(path)
self.assertFalse(cname)
self.assertFalse(history)
def test_history_pname(self):
test = '@...test'
path, cname, history, pname = REGEX_REF.match(test).group(
'path', 'cname', 'history', 'pname'
)
self.assertEqual(pname, 'test')
self.assertEqual(history, '...')
self.assertFalse(path)
self.assertFalse(cname)
def test_cname_pname(self):
test = '@cat...test'
path, cname, history, pname = REGEX_REF.match(test).group(
'path', 'cname', 'history', 'pname'
)
self.assertEqual(pname, 'test')
self.assertEqual(history, '..')
self.assertEqual(cname, 'cat')
self.assertFalse(path)
def test_path_pname(self):
test = '@ex\@mpl/e/..test'
path, cname, history, pname = REGEX_REF.match(test).group(
'path', 'cname', 'history', 'pname'
)
self.assertEqual(pname, 'test')
self.assertEqual(history, '..')
self.assertFalse(cname)
self.assertEqual(path, 'ex\@mpl/e')
def test_path_cname_pname(self):
test = '@ex\@mpl/e/cat...test'
path, cname, history, pname = REGEX_REF.match(test).group(
'path', 'cname', 'history', 'pname'
)
self.assertEqual(pname, 'test')
self.assertEqual(history, '..')
self.assertEqual(cname, 'cat')
self.assertEqual(path, 'ex\@mpl/e')
class RegexFormatTest(UTCase):
"""Test format expression regex."""
def test_expr(self):
test = '%test%'
lang, expr = REGEX_FORMAT.search(test).group('lang', 'expr')
self.assertFalse(lang)
self.assertEqual(expr, 'test')
def test_lang(self):
test = '%py:test%'
lang, expr = REGEX_FORMAT.search(test).group('lang', 'expr')
self.assertEqual(lang, 'py')
self.assertEqual(expr, 'test')
class RegexStrTest(UTCase):
"""Test str expression regex."""
def test_expr(self):
test = '%test%'
lang, expr = REGEX_STR.search(test).group('lang', 'expr')
self.assertFalse(lang)
self.assertEqual(expr, 'test')
def test_lang(self):
test = '%py:test%'
lang, expr = REGEX_STR.search(test).group('lang', 'expr')
self.assertEqual(lang, 'py')
self.assertEqual(expr, 'test')
def test_pname(self):
test = '@test'
path, cname, history, pname = REGEX_STR.match(test).group(
'path', 'cname', 'history', 'pname'
)
self.assertEqual(pname, 'test')
self.assertFalse(path)
self.assertFalse(cname)
self.assertFalse(history)
def test_history_pname(self):
test = '@...test'
path, cname, history, pname = REGEX_STR.match(test).group(
'path', 'cname', 'history', 'pname'
)
self.assertEqual(pname, 'test')
self.assertEqual(history, '...')
self.assertFalse(path)
self.assertFalse(cname)
def test_cname_pname(self):
test = '@cat...test'
path, cname, history, pname = REGEX_STR.match(test).group(
'path', 'cname', 'history', 'pname'
)
self.assertEqual(pname, 'test')
self.assertEqual(history, '..')
self.assertEqual(cname, 'cat')
self.assertFalse(path)
def test_path_pname(self):
test = '@ex\@mpl/e/..test'
path, cname, history, pname = REGEX_STR.search(test).group(
'path', 'cname', 'history', 'pname'
)
self.assertEqual(pname, 'test')
self.assertEqual(history, '..')
self.assertFalse(cname)
self.assertEqual(path, 'ex\@mpl/e')
def test_path_cname_pname(self):
test = '@ex\@mpl/e/cat...test'
path, cname, history, pname = REGEX_STR.search(test).group(
'path', 'cname', 'history', 'pname'
)
self.assertEqual(pname, 'test')
self.assertEqual(history, '..')
self.assertEqual(cname, 'cat')
self.assertEqual(path, 'ex\@mpl/e')
def test_both(self):
test = u'@a%b%@c%js:d%@e'
matches = REGEX_STR.finditer(test)
values = [
['pname', 'a'],
['expr', 'b'],
['pname', 'c'],
['expr', 'd'],
['pname', 'e']
]
for index, match in enumerate(matches):
groupdict = match.groupdict()
value = values[index]
self.assertEqual(groupdict[value[0]], value[1])
def test_wrong_both(self):
test = '\%@a%b%\\@k@c%js:d%@e\@\\'
matches = REGEX_STR.finditer(test)
values = [
['antislash', '%'],
['pname', 'a'],
['expr', 'b'],
['antislash', '@'],
['pname', 'c'],
['expr', 'd'],
['pname', 'e'],
['antislash', '@'],
['antislash', '\\'],
]
for index, match in enumerate(matches):
groupdict = match.groupdict()
value = values[index]
self.assertEqual(groupdict[value[0]], value[1])
class ExprTest(UTCase):
"""Test full expression regex."""
def test_expr(self):
test = '=test'
lang, expr = REGEX_EXPR.match(test).group('lang', 'expr')
self.assertFalse(lang)
self.assertEqual(expr, 'test')
def test_lang(self):
test = '=py:test'
lang, expr = REGEX_EXPR.match(test).group('lang', 'expr')
self.assertEqual(lang, 'py')
self.assertEqual(expr, 'test')
class RefTest(UTCase):
"""Test the _ref function."""
def setUp(self):
self.pname = 'test'
self.count = 5
self.conf = Configuration()
for i in range(self.count):
cat = category(str(i), Parameter(name=self.pname, value=i))
self.conf += cat
def test_error(self):
self.assertRaises(ParserError, _ref, pname=self.pname)
def test_pname(self):
val = _ref(pname=self.pname, conf=self.conf)
self.assertEqual(self.count - 1, val.value)
def test_cname(self):
val = _ref(pname=self.pname, conf=self.conf, cname=str(self.count - 2))
self.assertEqual(val.value, self.count - 2)
def test_history(self):
val = _ref(pname=self.pname, history=0, conf=self.conf)
self.assertEqual(val.value, self.count - 1)
val = _ref(pname=self.pname, history=1, conf=self.conf)
self.assertEqual(val.value, self.count - 2)
def test_history_cname(self):
val = _ref(pname=self.pname, history=0, conf=self.conf, cname=str(self.count - 2))
self.assertEqual(val.value, self.count - 2)
val = _ref(pname=self.pname, history=1, conf=self.conf, cname=str(self.count - 2))
self.assertEqual(val.value, self.count - 3)
class StrParserTest(UTCase):
def test_empty(self):
value = _strparser(svalue='')
self.assertEqual(value, '')
def test_bool(self):
val = _strparser(svalue='0', ptype=bool)
self.assertIs(val, False)
val = _strparser(svalue='1', ptype=bool)
self.assertIs(val, True)
val = _strparser(svalue='true', ptype=bool)
self.assertIs(val, True)
val = _strparser(svalue='True', ptype=bool)
self.assertIs(val, True)
def test_list(self):
val = _strparser(svalue='1', ptype=list)
self.assertEqual(val, ['1'])
val = _strparser(svalue='', ptype=list)
self.assertFalse(val)
val = _strparser(svalue='1, 2, 3', ptype=list)
self.assertEqual(val, ['1', '2', '3'])
def test_format(self):
conf = configuration(category('', Parameter('se', value='es')))
svalue = 't%"es"%t'
val = _strparser(svalue=svalue, conf=conf, scope=None)
self.assertEqual(val, 'test')
def test_format_expr(self):
conf = configuration(category('', Parameter('se', value='es')))
svalue = '%"t"%@se%"t"%'
val = _strparser(svalue=svalue, conf=conf, scope=None)
self.assertEqual(val, 'test')
def test_wrong_format_expr(self):
conf = configuration(category('', Parameter('se', value='es')))
svalue = '\@e%"t"%\%@se%"t"%\\'
val = _strparser(svalue=svalue, conf=conf, scope=None)
self.assertEqual(val, '@et%est\\')
class ConfigurationTest(UTCase):
"""Base class of test which uses a local configuration."""
def setUp(self):
self.count = 5
self.cnames = [None] * self.count
self.pname = 'param'
self.pvalues = [None] * self.count
self.conf = Configuration()
for i in range(self.count):
self.cnames[i] = 'c{0}'.format(i)
self.pvalues[i] = i + 1
cat = category(
self.cnames[i],
Parameter('param', value=self.pvalues[i])
)
self.conf += cat
class SerializerTest(ConfigurationTest):
"""Test the function serializer."""
def test_str(self):
"""Test to serialize a string."""
value = 'test'
serialized = serialize(value)
self.assertEqual(value, serialized)
def test_none(self):
"""Test to serialize None."""
serialized = serialize(None)
self.assertIsNone(serialized)
def test_other(self):
"""Test to serialize other."""
types = [int, float, complex, dict, list, set]
for _type in types:
value = _type()
serialized = serialize(value)
self.assertEqual(serialized, '=py:{0}'.format(value))
class ParseTest(ConfigurationTest):
"""Test the parse function."""
def test_default(self):
"""Test default params."""
value = parse(svalue='=2')
self.assertEqual(value, 2)
def test_cname(self):
"""Test with cname."""
value = parse(svalue='2')
self.assertEqual(value, '2')
def test_nocname(self):
"""Test when category name does not exist."""
value = parse(svalue='t%"es"%t')
self.assertEqual(value, 'test')
def test_nopname(self):
"""Test when parameter name does not exist."""
value = parse(svalue='="test"')
self.assertEqual(value, 'test')
def test_expr_ref(self):
pname = 'test'
conf = configuration(category('', Parameter('test', value='ify')))
value = parse(svalue='="test" + @test', conf=conf)
self.assertEqual(value, 'testify')
def test_expr_wrong(self):
pname = 'test'
conf = configuration(category('', Parameter('test', value='ify')))
value = parse(svalue='="test" + "\@fgg"', conf=conf)
self.assertEqual(value, 'test@fgg')
if __name__ == '__main__':
main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# --------------------------------------------------------------------
# The MIT License (MIT)
#
# Copyright (c) 2014 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# --------------------------------------------------------------------
"""parser UTs"""
from unittest import main
from b3j0f.utils.ut import UTCase
from ...model.conf import Configuration, configuration
from ...model.cat import category
from ...model.param import Parameter
from ..core import (
REGEX_REF, REGEX_FORMAT, REGEX_STR, REGEX_EXPR,
parse, serialize, _ref, ParserError, _strparser
)
class RegexRefTest(UTCase):
"""Test the regex ref."""
def test_pname(self):
test = '@test'
path, cname, history, pname = REGEX_REF.match(test).group(
'path', 'cname', 'history', 'pname'
)
self.assertEqual(pname, 'test')
self.assertFalse(path)
self.assertFalse(cname)
self.assertFalse(history)
def test_history_pname(self):
test = '@...test'
path, cname, history, pname = REGEX_REF.match(test).group(
'path', 'cname', 'history', 'pname'
)
self.assertEqual(pname, 'test')
self.assertEqual(history, '...')
self.assertFalse(path)
self.assertFalse(cname)
def test_cname_pname(self):
test = '@cat...test'
path, cname, history, pname = REGEX_REF.match(test).group(
'path', 'cname', 'history', 'pname'
)
self.assertEqual(pname, 'test')
self.assertEqual(history, '..')
self.assertEqual(cname, 'cat')
self.assertFalse(path)
def test_path_pname(self):
test = '@ex\@mpl/e/..test'
path, cname, history, pname = REGEX_REF.match(test).group(
'path', 'cname', 'history', 'pname'
)
self.assertEqual(pname, 'test')
self.assertEqual(history, '..')
self.assertFalse(cname)
self.assertEqual(path, 'ex\@mpl/e')
def test_path_cname_pname(self):
test = '@ex\@mpl/e/cat...test'
path, cname, history, pname = REGEX_REF.match(test).group(
'path', 'cname', 'history', 'pname'
)
self.assertEqual(pname, 'test')
self.assertEqual(history, '..')
self.assertEqual(cname, 'cat')
self.assertEqual(path, 'ex\@mpl/e')
class RegexFormatTest(UTCase):
"""Test format expression regex."""
def test_expr(self):
test = '%test%'
lang, expr = REGEX_FORMAT.search(test).group('lang', 'expr')
self.assertFalse(lang)
self.assertEqual(expr, 'test')
def test_lang(self):
test = '%py:test%'
lang, expr = REGEX_FORMAT.search(test).group('lang', 'expr')
self.assertEqual(lang, 'py')
self.assertEqual(expr, 'test')
class RegexStrTest(UTCase):
"""Test str expression regex."""
def test_expr(self):
test = '%test%'
lang, expr = REGEX_STR.search(test).group('lang', 'expr')
self.assertFalse(lang)
self.assertEqual(expr, 'test')
def test_lang(self):
test = '%py:test%'
lang, expr = REGEX_STR.search(test).group('lang', 'expr')
self.assertEqual(lang, 'py')
self.assertEqual(expr, 'test')
def test_pname(self):
test = '@test'
path, cname, history, pname = REGEX_STR.match(test).group(
'path', 'cname', 'history', 'pname'
)
self.assertEqual(pname, 'test')
self.assertFalse(path)
self.assertFalse(cname)
self.assertFalse(history)
def test_history_pname(self):
test = '@...test'
path, cname, history, pname = REGEX_STR.match(test).group(
'path', 'cname', 'history', 'pname'
)
self.assertEqual(pname, 'test')
self.assertEqual(history, '...')
self.assertFalse(path)
self.assertFalse(cname)
def test_cname_pname(self):
test = '@cat...test'
path, cname, history, pname = REGEX_STR.match(test).group(
'path', 'cname', 'history', 'pname'
)
self.assertEqual(pname, 'test')
self.assertEqual(history, '..')
self.assertEqual(cname, 'cat')
self.assertFalse(path)
def test_path_pname(self):
test = '@ex\@mpl/e/..test'
path, cname, history, pname = REGEX_STR.search(test).group(
'path', 'cname', 'history', 'pname'
)
self.assertEqual(pname, 'test')
self.assertEqual(history, '..')
self.assertFalse(cname)
self.assertEqual(path, 'ex\@mpl/e')
def test_path_cname_pname(self):
test = '@ex\@mpl/e/cat...test'
path, cname, history, pname = REGEX_STR.search(test).group(
'path', 'cname', 'history', 'pname'
)
self.assertEqual(pname, 'test')
self.assertEqual(history, '..')
self.assertEqual(cname, 'cat')
self.assertEqual(path, 'ex\@mpl/e')
def test_both(self):
test = u'@a%b%@c%js:d%@e'
matches = REGEX_STR.finditer(test)
values = [
['pname', 'a'],
['expr', 'b'],
['pname', 'c'],
['expr', 'd'],
['pname', 'e']
]
for index, match in enumerate(matches):
groupdict = match.groupdict()
value = values[index]
self.assertEqual(groupdict[value[0]], value[1])
def test_wrong_both(self):
test = '\%@a%b%\\@k@c%js:d%@e\@\\'
matches = REGEX_STR.finditer(test)
values = [
['antislash', '%'],
['pname', 'a'],
['expr', 'b'],
['antislash', '@'],
['pname', 'c'],
['expr', 'd'],
['pname', 'e'],
['antislash', '@'],
['antislash', '\\'],
]
for index, match in enumerate(matches):
groupdict = match.groupdict()
value = values[index]
self.assertEqual(groupdict[value[0]], value[1])
class ExprTest(UTCase):
"""Test full expression regex."""
def test_expr(self):
test = '=test'
lang, expr = REGEX_EXPR.match(test).group('lang', 'expr')
self.assertFalse(lang)
self.assertEqual(expr, 'test')
def test_lang(self):
test = '=py:test'
lang, expr = REGEX_EXPR.match(test).group('lang', 'expr')
self.assertEqual(lang, 'py')
self.assertEqual(expr, 'test')
class RefTest(UTCase):
"""Test the _ref function."""
def setUp(self):
self.pname = 'test'
self.count = 5
self.conf = Configuration()
for i in range(self.count):
cat = category(str(i), Parameter(name=self.pname, value=i))
self.conf += cat
def test_error(self):
self.assertRaises(ParserError, _ref, pname=self.pname)
def test_pname(self):
val = _ref(pname=self.pname, conf=self.conf)
self.assertEqual(self.count - 1, val.value)
def test_cname(self):
val = _ref(pname=self.pname, conf=self.conf, cname=str(self.count - 2))
self.assertEqual(val.value, self.count - 2)
def test_history(self):
val = _ref(pname=self.pname, history=0, conf=self.conf)
self.assertEqual(val.value, self.count - 1)
val = _ref(pname=self.pname, history=1, conf=self.conf)
self.assertEqual(val.value, self.count - 2)
def test_history_cname(self):
val = _ref(pname=self.pname, history=0, conf=self.conf, cname=str(self.count - 2))
self.assertEqual(val.value, self.count - 2)
val = _ref(pname=self.pname, history=1, conf=self.conf, cname=str(self.count - 2))
self.assertEqual(val.value, self.count - 3)
class StrParserTest(UTCase):
def test_empty(self):
value = _strparser(svalue='')
self.assertEqual(value, '')
def test_bool(self):
val = _strparser(svalue='0', ptype=bool)
self.assertIs(val, False)
val = _strparser(svalue='1', ptype=bool)
self.assertIs(val, True)
val = _strparser(svalue='true', ptype=bool)
self.assertIs(val, True)
val = _strparser(svalue='True', ptype=bool)
self.assertIs(val, True)
def test_list(self):
val = _strparser(svalue='1', ptype=list)
self.assertEqual(val, ['1'])
val = _strparser(svalue='', ptype=list)
self.assertFalse(val)
val = _strparser(svalue='1, 2, 3', ptype=list)
self.assertEqual(val, ['1', '2', '3'])
def test_format(self):
conf = configuration(category('', Parameter('se', value='es')))
svalue = 't%"es"%t'
val = _strparser(svalue=svalue, conf=conf, scope=None)
self.assertEqual(val, 'test')
def test_format_expr(self):
conf = configuration(category('', Parameter('se', value='es')))
svalue = '%"t"%@se%"t"%'
val = _strparser(svalue=svalue, conf=conf, scope=None)
self.assertEqual(val, 'test')
def test_wrong_format_expr(self):
conf = configuration(category('', Parameter('se', value='es')))
svalue = '\@e%"t"%\%@se%"t"%\\'
val = _strparser(svalue=svalue, conf=conf, scope=None)
self.assertEqual(val, '@et%est\\')
class ConfigurationTest(UTCase):
"""Base class of test which uses a local configuration."""
def setUp(self):
self.count = 5
self.cnames = [None] * self.count
self.pname = 'param'
self.pvalues = [None] * self.count
self.conf = Configuration()
for i in range(self.count):
self.cnames[i] = 'c{0}'.format(i)
self.pvalues[i] = i + 1
cat = category(
self.cnames[i],
Parameter('param', value=self.pvalues[i])
)
self.conf += cat
class SerializerTest(ConfigurationTest):
"""Test the function serializer."""
def test_str(self):
"""Test to serialize a string."""
value = 'test'
serialized = serialize(value)
self.assertEqual(value, serialized)
def test_none(self):
"""Test to serialize None."""
serialized = serialize(None)
self.assertIsNone(serialized)
def test_other(self):
"""Test to serialize other."""
types = [int, float, complex, dict, list, set]
for _type in types:
value = _type()
serialized = serialize(value)
self.assertEqual(serialized, '=py:{0}'.format(value))
class ParseTest(ConfigurationTest):
"""Test the parse function."""
def test_default(self):
"""Test default params."""
value = parse(svalue='=2')
self.assertEqual(value, 2)
def test_cname(self):
"""Test with cname."""
value = parse(svalue='2')
self.assertEqual(value, '2')
def test_nocname(self):
"""Test when category name does not exist."""
value = parse(svalue='t%"es"%t')
self.assertEqual(value, 'test')
def test_nopname(self):
"""Test when parameter name does not exist."""
value = parse(svalue='="test"')
self.assertEqual(value, 'test')
def test_expr_ref(self):
pname = 'test'
conf = configuration(category('', Parameter('test', value='ify')))
value = parse(svalue='="test" + @test', conf=conf)
self.assertEqual(value, 'testify')
def test_expr_wrong(self):
pname = 'test'
conf = configuration(category('', Parameter('test', value='ify')))
value = parse(svalue='="test" + "\@fgg"', conf=conf)
self.assertEqual(value, 'test@fgg')
if __name__ == '__main__':
main()
| en | 0.668956 | #!/usr/bin/env python # -*- coding: utf-8 -*- # -------------------------------------------------------------------- # The MIT License (MIT) # # Copyright (c) 2014 <NAME> <<EMAIL>> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # -------------------------------------------------------------------- parser UTs Test the regex ref. Test format expression regex. Test str expression regex. Test full expression regex. Test the _ref function. Base class of test which uses a local configuration. Test the function serializer. Test to serialize a string. Test to serialize None. Test to serialize other. Test the parse function. Test default params. Test with cname. Test when category name does not exist. Test when parameter name does not exist. | 1.477993 | 1 |
finetune/src/dataset.py | affjljoo3581/Samsung-AI-Challenge-for-Scientific-Discovery | 35 | 6622474 | import os
from typing import Dict, List, Optional, Tuple, Union
import numpy as np
import pandas as pd
from torch.utils.data import Dataset
from encoding import MolecularEncoder
ST1_ENERGY_GAP_MEAN = 0.8486
ST1_ENERGY_GAP_STD = 0.3656
class SSDDataset(Dataset):
"""A dataset class for `Samsung AI Challenge For Scientific Discovery` competition.
Args:
dataset: A pandas dataframe object containing energy informations.
structure_files: A list of SDF molfiles.
encoder: A molecular structure encoder.
bond_drop_prob: The probability of dropping molecular bonds. Default is `0.1`.
"""
def __init__(
self,
dataset: pd.DataFrame,
structure_files: List[str],
encoder: MolecularEncoder,
bond_drop_prob: float = 0.1,
):
self.examples = []
self.encoder = encoder
self.bond_drop_prob = bond_drop_prob
for structure_file in structure_files:
example = {"uid": os.path.basename(structure_file)[:-4]}
with open(structure_file, "r") as fp:
example["structure"] = parse_mol_structure(fp.read())
if example["structure"] is None:
continue
if "S1_energy(eV)" in dataset and "T1_energy(eV)" in dataset:
s1_energy = dataset.loc[example["uid"], "S1_energy(eV)"]
t1_energy = dataset.loc[example["uid"], "T1_energy(eV)"]
labels = s1_energy - t1_energy
labels = (labels - ST1_ENERGY_GAP_MEAN) / ST1_ENERGY_GAP_STD
example["labels"] = labels
self.examples.append(example)
def __len__(self) -> int:
return len(self.examples)
def __getitem__(
self, index: int
) -> Tuple[str, Dict[str, Union[str, List[Union[int, float]]]]]:
example = self.examples[index]
if np.random.rand() < self.bond_drop_prob:
# We will drop the molecular bonds with probability of 15%. That is, the
# expectation of the number of dropped molecular bonds is 85% of the
# original one. Note that you can only control the molecular selecting
# probability, not the individual bond dropping probability.
structure = example["structure"].copy()
structure["bonds"] = [
bond for bond in structure["bonds"] if np.random.rand() > 0.15
]
example["structure"] = structure
encoding = self.encoder.encode(example["structure"])
if "labels" in example:
encoding["labels"] = example["labels"]
return (example["uid"], encoding)
def parse_mol_structure(data: str) -> Optional[Dict]:
"""Parse a SDF molecular file to the simple structure dictionary.
Args:
data: The content of SDF molfile.
Returns:
The parsed 3D molecular structure dictionary.
"""
data = data.splitlines()
if len(data) < 4:
return None
data = data[3:]
num_atoms, num_bonds = int(data[0][:3]), int(data[0][3:6])
atoms = []
for line in data[1 : 1 + num_atoms]:
x, y, z = float(line[:10]), float(line[10:20]), float(line[20:30])
charge = [0, 3, 2, 1, "^", -1, -2, -3][int(line[36:39])]
atoms.append([x, y, z, line[31:34].strip(), charge])
bonds = []
for line in data[1 + num_atoms : 1 + num_atoms + num_bonds]:
bonds.append([int(line[:3]) - 1, int(line[3:6]) - 1, int(line[6:9])])
for line in data[1 + num_atoms + num_bonds :]:
if not line.startswith("M CHG") and not line.startswith("M RAD"):
continue
for i in range(int(line[6:9])):
idx = int(line[10 + 8 * i : 13 + 8 * i]) - 1
value = int(line[14 + 8 * i : 17 + 8 * i])
atoms[idx][4] = (
[":", "^", "^^"][value - 1] if line.startswith("M RAD") else value
)
return {"atoms": atoms, "bonds": bonds}
| import os
from typing import Dict, List, Optional, Tuple, Union
import numpy as np
import pandas as pd
from torch.utils.data import Dataset
from encoding import MolecularEncoder
ST1_ENERGY_GAP_MEAN = 0.8486
ST1_ENERGY_GAP_STD = 0.3656
class SSDDataset(Dataset):
"""A dataset class for `Samsung AI Challenge For Scientific Discovery` competition.
Args:
dataset: A pandas dataframe object containing energy informations.
structure_files: A list of SDF molfiles.
encoder: A molecular structure encoder.
bond_drop_prob: The probability of dropping molecular bonds. Default is `0.1`.
"""
def __init__(
self,
dataset: pd.DataFrame,
structure_files: List[str],
encoder: MolecularEncoder,
bond_drop_prob: float = 0.1,
):
self.examples = []
self.encoder = encoder
self.bond_drop_prob = bond_drop_prob
for structure_file in structure_files:
example = {"uid": os.path.basename(structure_file)[:-4]}
with open(structure_file, "r") as fp:
example["structure"] = parse_mol_structure(fp.read())
if example["structure"] is None:
continue
if "S1_energy(eV)" in dataset and "T1_energy(eV)" in dataset:
s1_energy = dataset.loc[example["uid"], "S1_energy(eV)"]
t1_energy = dataset.loc[example["uid"], "T1_energy(eV)"]
labels = s1_energy - t1_energy
labels = (labels - ST1_ENERGY_GAP_MEAN) / ST1_ENERGY_GAP_STD
example["labels"] = labels
self.examples.append(example)
def __len__(self) -> int:
return len(self.examples)
def __getitem__(
self, index: int
) -> Tuple[str, Dict[str, Union[str, List[Union[int, float]]]]]:
example = self.examples[index]
if np.random.rand() < self.bond_drop_prob:
# We will drop the molecular bonds with probability of 15%. That is, the
# expectation of the number of dropped molecular bonds is 85% of the
# original one. Note that you can only control the molecular selecting
# probability, not the individual bond dropping probability.
structure = example["structure"].copy()
structure["bonds"] = [
bond for bond in structure["bonds"] if np.random.rand() > 0.15
]
example["structure"] = structure
encoding = self.encoder.encode(example["structure"])
if "labels" in example:
encoding["labels"] = example["labels"]
return (example["uid"], encoding)
def parse_mol_structure(data: str) -> Optional[Dict]:
"""Parse a SDF molecular file to the simple structure dictionary.
Args:
data: The content of SDF molfile.
Returns:
The parsed 3D molecular structure dictionary.
"""
data = data.splitlines()
if len(data) < 4:
return None
data = data[3:]
num_atoms, num_bonds = int(data[0][:3]), int(data[0][3:6])
atoms = []
for line in data[1 : 1 + num_atoms]:
x, y, z = float(line[:10]), float(line[10:20]), float(line[20:30])
charge = [0, 3, 2, 1, "^", -1, -2, -3][int(line[36:39])]
atoms.append([x, y, z, line[31:34].strip(), charge])
bonds = []
for line in data[1 + num_atoms : 1 + num_atoms + num_bonds]:
bonds.append([int(line[:3]) - 1, int(line[3:6]) - 1, int(line[6:9])])
for line in data[1 + num_atoms + num_bonds :]:
if not line.startswith("M CHG") and not line.startswith("M RAD"):
continue
for i in range(int(line[6:9])):
idx = int(line[10 + 8 * i : 13 + 8 * i]) - 1
value = int(line[14 + 8 * i : 17 + 8 * i])
atoms[idx][4] = (
[":", "^", "^^"][value - 1] if line.startswith("M RAD") else value
)
return {"atoms": atoms, "bonds": bonds}
| en | 0.754438 | A dataset class for `Samsung AI Challenge For Scientific Discovery` competition. Args: dataset: A pandas dataframe object containing energy informations. structure_files: A list of SDF molfiles. encoder: A molecular structure encoder. bond_drop_prob: The probability of dropping molecular bonds. Default is `0.1`. # We will drop the molecular bonds with probability of 15%. That is, the # expectation of the number of dropped molecular bonds is 85% of the # original one. Note that you can only control the molecular selecting # probability, not the individual bond dropping probability. Parse a SDF molecular file to the simple structure dictionary. Args: data: The content of SDF molfile. Returns: The parsed 3D molecular structure dictionary. | 2.651193 | 3 |
backend/app/alembic/versions/6d371a5e1414_add_locations_table.py | dmitritruf/distiller-django-react | 2 | 6622475 | """Add locations table
Revision ID: 6d371a5e1414
Revises: <PASSWORD>2<PASSWORD>
Create Date: 2021-08-08 21:03:35.623650
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '<KEY>'
down_revision = 'a3b2fff032fd'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('locations',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('host', sa.String(), nullable=True),
sa.Column('path', sa.String(), nullable=True),
sa.Column('scan_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['scan_id'], ['scans.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_locations_id'), 'locations', ['id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_locations_id'), table_name='locations')
op.drop_table('locations')
# ### end Alembic commands ###
| """Add locations table
Revision ID: 6d371a5e1414
Revises: <PASSWORD>2<PASSWORD>
Create Date: 2021-08-08 21:03:35.623650
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '<KEY>'
down_revision = 'a3b2fff032fd'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('locations',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('host', sa.String(), nullable=True),
sa.Column('path', sa.String(), nullable=True),
sa.Column('scan_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['scan_id'], ['scans.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_locations_id'), 'locations', ['id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_locations_id'), table_name='locations')
op.drop_table('locations')
# ### end Alembic commands ###
| en | 0.538731 | Add locations table Revision ID: 6d371a5e1414 Revises: <PASSWORD>2<PASSWORD> Create Date: 2021-08-08 21:03:35.623650 # revision identifiers, used by Alembic. # ### commands auto generated by Alembic - please adjust! ### # ### end Alembic commands ### # ### commands auto generated by Alembic - please adjust! ### # ### end Alembic commands ### | 1.708187 | 2 |
restructuredBootstrap/media.py | lakhman/restructuredBootstrap | 1 | 6622476 | <reponame>lakhman/restructuredBootstrap<filename>restructuredBootstrap/media.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2017 - <NAME> - MIT License
# -----------------------------------------------------------------------------
from docutils import nodes
from docutils.parsers.rst import Directive, directives
from sphinx.util.nodes import set_source_info
class media(nodes.General, nodes.Element):
pass
def visit_generic_media(self, node):
tag = 'div'
if isinstance(node.parent, nodes.bullet_list):
tag = 'li'
self.body.append(self.starttag(node, tag, CLASS=" ".join(node['classes'])))
def depart_generic_media(self, node):
tag = 'div'
if isinstance(node.parent, nodes.bullet_list):
tag = 'li'
if not self.body[-1].endswith("\n"):
self.body[-1] += "\n"
self.body.append('</%s>\n' % tag)
class Media(Directive):
required_arguments, optional_arguments = 0, 1
final_argument_whitespace = True
has_content = True
option_spec = {
'name': directives.unchanged,
'class': directives.class_option,
}
def run(self):
self.assert_has_content()
node = media()
node.document = self.state.document
set_source_info(self, node)
node['classes'] = ['media']
node['classes'] += self.options.get('class', [])
if self.arguments:
node['classes'] += directives.class_option(self.arguments[0])
self.add_name(node)
self.state.nested_parse(self.content, self.content_offset, node)
if isinstance(node.children[0], media) and \
'media-right' in node.children[0]['classes']:
# Move media-right to after media-body
node.children.append(node.children.pop(0))
return [node]
class MediaList(Directive):
required_arguments, optional_arguments = 0, 1
final_argument_whitespace = True
has_content = True
option_spec = {
'name': directives.unchanged,
'class': directives.class_option,
}
def run(self):
self.assert_has_content()
node = nodes.bullet_list()
node.document = self.state.document
set_source_info(self, node)
node['classes'] = ['media-list']
node['classes'] += self.options.get('class', [])
if self.arguments:
node['classes'] += directives.class_option(self.arguments[0])
self.add_name(node)
self.state.nested_parse(self.content, self.content_offset, node)
return [node]
class MediaLeftRight(Directive):
required_arguments, optional_arguments = 0, 1
final_argument_whitespace = True
has_content = True
option_spec = {
'name': directives.unchanged,
'class': directives.class_option,
}
def run(self):
self.assert_has_content()
node = media()
node.document = self.state.document
set_source_info(self, node)
node['classes'] = [self.name]
node['classes'] += self.options.get('class', [])
try:
if self.arguments:
node['classes'] += directives.class_option(self.arguments[0])
except ValueError:
err = 'Invalid class attribute value for "%s" directive: "%s".'
raise self.error(err % (self.name, self.arguments[0]))
self.add_name(node)
self.state.nested_parse(self.content, self.content_offset, node)
# Add media-object class to child images
if isinstance(node.children[0], nodes.reference):
child_is_image = isinstance(node.children[0].children[0],
nodes.image)
if len(node.children[0].children) and child_is_image:
node.children[0].children[0]['classes'] += ['media-object']
elif isinstance(node.children[0], nodes.image):
node.children[0]['classes'] += ['media-object']
return [node]
class MediaBody(Directive):
required_arguments, optional_arguments = 0, 1
final_argument_whitespace = True
has_content = True
option_spec = {
'name': directives.unchanged,
'class': directives.class_option,
'heading-target': directives.uri,
'page-header': directives.flag,
}
def run(self):
self.assert_has_content()
node = media()
node.document = self.state.document
set_source_info(self, node)
node['classes'] = [self.name]
node['classes'] += self.options.get('class', [])
if self.arguments:
p = nodes.paragraph()
if 'heading-target' in self.options:
uri = self.options['heading-target']
ref_options = {
'internal': False,
'refuri': uri,
'classes': ['h4', 'media-heading']
}
heading = nodes.reference(self.arguments[0], self.arguments[0],
**ref_options)
p.append(heading)
else:
heading = self.arguments[0]
children, msg = self.state.inliner.parse(
heading, 0, self.state_machine, self.state.parent)
p = nodes.paragraph()
p['classes'] += ['h4', 'media-heading']
p += children
if 'page-header' in self.options:
page_header = nodes.container()
page_header['classes'] += ['page-header']
page_header += p
node.insert(0, page_header)
else:
node.insert(0, p)
self.add_name(node)
self.state.nested_parse(self.content, self.content_offset, node)
return [node]
def setup(app):
app.add_node(media, html=(visit_generic_media, depart_generic_media))
app.add_directive('media', Media)
app.add_directive('media-list', MediaList)
app.add_directive('media-left', MediaLeftRight)
app.add_directive('media-right', MediaLeftRight)
app.add_directive('media-body', MediaBody)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2017 - <NAME> - MIT License
# -----------------------------------------------------------------------------
from docutils import nodes
from docutils.parsers.rst import Directive, directives
from sphinx.util.nodes import set_source_info
class media(nodes.General, nodes.Element):
pass
def visit_generic_media(self, node):
tag = 'div'
if isinstance(node.parent, nodes.bullet_list):
tag = 'li'
self.body.append(self.starttag(node, tag, CLASS=" ".join(node['classes'])))
def depart_generic_media(self, node):
tag = 'div'
if isinstance(node.parent, nodes.bullet_list):
tag = 'li'
if not self.body[-1].endswith("\n"):
self.body[-1] += "\n"
self.body.append('</%s>\n' % tag)
class Media(Directive):
required_arguments, optional_arguments = 0, 1
final_argument_whitespace = True
has_content = True
option_spec = {
'name': directives.unchanged,
'class': directives.class_option,
}
def run(self):
self.assert_has_content()
node = media()
node.document = self.state.document
set_source_info(self, node)
node['classes'] = ['media']
node['classes'] += self.options.get('class', [])
if self.arguments:
node['classes'] += directives.class_option(self.arguments[0])
self.add_name(node)
self.state.nested_parse(self.content, self.content_offset, node)
if isinstance(node.children[0], media) and \
'media-right' in node.children[0]['classes']:
# Move media-right to after media-body
node.children.append(node.children.pop(0))
return [node]
class MediaList(Directive):
required_arguments, optional_arguments = 0, 1
final_argument_whitespace = True
has_content = True
option_spec = {
'name': directives.unchanged,
'class': directives.class_option,
}
def run(self):
self.assert_has_content()
node = nodes.bullet_list()
node.document = self.state.document
set_source_info(self, node)
node['classes'] = ['media-list']
node['classes'] += self.options.get('class', [])
if self.arguments:
node['classes'] += directives.class_option(self.arguments[0])
self.add_name(node)
self.state.nested_parse(self.content, self.content_offset, node)
return [node]
class MediaLeftRight(Directive):
required_arguments, optional_arguments = 0, 1
final_argument_whitespace = True
has_content = True
option_spec = {
'name': directives.unchanged,
'class': directives.class_option,
}
def run(self):
self.assert_has_content()
node = media()
node.document = self.state.document
set_source_info(self, node)
node['classes'] = [self.name]
node['classes'] += self.options.get('class', [])
try:
if self.arguments:
node['classes'] += directives.class_option(self.arguments[0])
except ValueError:
err = 'Invalid class attribute value for "%s" directive: "%s".'
raise self.error(err % (self.name, self.arguments[0]))
self.add_name(node)
self.state.nested_parse(self.content, self.content_offset, node)
# Add media-object class to child images
if isinstance(node.children[0], nodes.reference):
child_is_image = isinstance(node.children[0].children[0],
nodes.image)
if len(node.children[0].children) and child_is_image:
node.children[0].children[0]['classes'] += ['media-object']
elif isinstance(node.children[0], nodes.image):
node.children[0]['classes'] += ['media-object']
return [node]
class MediaBody(Directive):
required_arguments, optional_arguments = 0, 1
final_argument_whitespace = True
has_content = True
option_spec = {
'name': directives.unchanged,
'class': directives.class_option,
'heading-target': directives.uri,
'page-header': directives.flag,
}
def run(self):
self.assert_has_content()
node = media()
node.document = self.state.document
set_source_info(self, node)
node['classes'] = [self.name]
node['classes'] += self.options.get('class', [])
if self.arguments:
p = nodes.paragraph()
if 'heading-target' in self.options:
uri = self.options['heading-target']
ref_options = {
'internal': False,
'refuri': uri,
'classes': ['h4', 'media-heading']
}
heading = nodes.reference(self.arguments[0], self.arguments[0],
**ref_options)
p.append(heading)
else:
heading = self.arguments[0]
children, msg = self.state.inliner.parse(
heading, 0, self.state_machine, self.state.parent)
p = nodes.paragraph()
p['classes'] += ['h4', 'media-heading']
p += children
if 'page-header' in self.options:
page_header = nodes.container()
page_header['classes'] += ['page-header']
page_header += p
node.insert(0, page_header)
else:
node.insert(0, p)
self.add_name(node)
self.state.nested_parse(self.content, self.content_offset, node)
return [node]
def setup(app):
app.add_node(media, html=(visit_generic_media, depart_generic_media))
app.add_directive('media', Media)
app.add_directive('media-list', MediaList)
app.add_directive('media-left', MediaLeftRight)
app.add_directive('media-right', MediaLeftRight)
app.add_directive('media-body', MediaBody) | en | 0.329289 | #!/usr/bin/env python # -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright (c) 2017 - <NAME> - MIT License # ----------------------------------------------------------------------------- # Move media-right to after media-body # Add media-object class to child images | 2.184102 | 2 |
attachment/base_admin.py | chunkai-meng/django-rest-attachment | 0 | 6622477 | <reponame>chunkai-meng/django-rest-attachment
from django.contrib import admin
from django.utils.safestring import mark_safe
class BaseAttachmentAdmin(admin.ModelAdmin):
list_display = ('id', 'name', 'uploaded_at', 'uploaded_by', 'doc_tag')
list_filter = ('uploaded_at', 'uploaded_by',)
list_per_page = 20
readonly_fields = ('doc_tag',)
list_select_related = ('uploaded_by',)
raw_id_fields = ('uploaded_by',)
def doc_tag(self, obj):
return mark_safe('<a href="{}" height="150">{}</a>'.format(obj.file.url, obj.name))
doc_tag.short_description = u'Document'
def save_model(self, request, obj, form, change):
user = request.user
instance = form.save(commit=False)
if not change or not instance.owner:
instance.uploaded_by = user
instance.save()
form.save_m2m()
return instance
| from django.contrib import admin
from django.utils.safestring import mark_safe
class BaseAttachmentAdmin(admin.ModelAdmin):
list_display = ('id', 'name', 'uploaded_at', 'uploaded_by', 'doc_tag')
list_filter = ('uploaded_at', 'uploaded_by',)
list_per_page = 20
readonly_fields = ('doc_tag',)
list_select_related = ('uploaded_by',)
raw_id_fields = ('uploaded_by',)
def doc_tag(self, obj):
return mark_safe('<a href="{}" height="150">{}</a>'.format(obj.file.url, obj.name))
doc_tag.short_description = u'Document'
def save_model(self, request, obj, form, change):
user = request.user
instance = form.save(commit=False)
if not change or not instance.owner:
instance.uploaded_by = user
instance.save()
form.save_m2m()
return instance | none | 1 | 2.014212 | 2 | |
Tareaejercicios/Example11.py | Dan-Elioth/Ejerciciospararesolver | 0 | 6622478 | <gh_stars>0
print("Bienvenido al programa de bono al empleado de nuestra tienda")
#Variables
bono=0
bono1=100
bono2=200
bono3=300
bono4=400
bono5=500
bonomayor=1000
#Datos de entrada
años=float(input("Ingrese los años que labora en la tienda:"))
#Proceso
if años==1:
bono=bono1
if años==2:
bono=bono2
if años==3:
bono=bono3
if años==4:
bono=bono4
if años==5:
bono=bono5
if años>5:
bono=bonomayor
#Datos de salida
print(f"El bono con el que usted cuenta es de: ${bono:.2f}")
| print("Bienvenido al programa de bono al empleado de nuestra tienda")
#Variables
bono=0
bono1=100
bono2=200
bono3=300
bono4=400
bono5=500
bonomayor=1000
#Datos de entrada
años=float(input("Ingrese los años que labora en la tienda:"))
#Proceso
if años==1:
bono=bono1
if años==2:
bono=bono2
if años==3:
bono=bono3
if años==4:
bono=bono4
if años==5:
bono=bono5
if años>5:
bono=bonomayor
#Datos de salida
print(f"El bono con el que usted cuenta es de: ${bono:.2f}") | es | 0.782248 | #Variables #Datos de entrada #Proceso #Datos de salida | 3.632758 | 4 |
{{cookiecutter.project_name}}/{{cookiecutter.app_name}}/commons/utils/dictlist_util.py | juforg/cookiecutter-flask-restful | 1 | 6622479 | <reponame>juforg/cookiecutter-flask-restful
from typing import List, Dict, Tuple
from {{cookiecutter.app_name}}.commons.exception import BizException
def group_by_for_list(data_list: List[Dict], by: List[str] = None,key_error: bool = False) -> Dict[Tuple, List]:
"""
json数据group by
:param key_error:
:param data_list: 数据列表
:param by: group by 列表
:return: 分组后的数据
"""
if data_list is None or by is None:
return data_list
grouped_by_data = {}
for data in data_list:
key = []
for b in by:
if not b in data.keys() and key_error:
raise BizException("key:[{}]不存在".format(b))
key.append(data.get(b))
key_tuple = tuple(key)
values = grouped_by_data.get(key_tuple)
if values is None:
values = []
grouped_by_data[key_tuple] = values
values.append(data)
return grouped_by_data
| from typing import List, Dict, Tuple
from {{cookiecutter.app_name}}.commons.exception import BizException
def group_by_for_list(data_list: List[Dict], by: List[str] = None,key_error: bool = False) -> Dict[Tuple, List]:
"""
json数据group by
:param key_error:
:param data_list: 数据列表
:param by: group by 列表
:return: 分组后的数据
"""
if data_list is None or by is None:
return data_list
grouped_by_data = {}
for data in data_list:
key = []
for b in by:
if not b in data.keys() and key_error:
raise BizException("key:[{}]不存在".format(b))
key.append(data.get(b))
key_tuple = tuple(key)
values = grouped_by_data.get(key_tuple)
if values is None:
values = []
grouped_by_data[key_tuple] = values
values.append(data)
return grouped_by_data | zh | 0.758788 | json数据group by :param key_error: :param data_list: 数据列表 :param by: group by 列表 :return: 分组后的数据 | 2.647984 | 3 |
src/pywikibot_extensions/textlib.py | JJMC89/pywikibot-extensions | 0 | 6622480 | """
Functions for manipulating wikitext.
This module extends pywikibot.textlib.
"""
from typing import Iterable
from pywikibot.page import BasePage
# The namespace names must be substituted into this regex,
# e.g., FILE_LINK_REGEX.format("|".join(site.namespaces.FILE)).
# Adds named groups to pywikibot.textlib.FILE_LINK_REGEX
# and uses str.format instead of %.
FILE_LINK_REGEX = r"""
\[\[\s*
(?P<namespace>{})
\s*:
(?=(?P<filename>
[^]|]*
))(?P=filename)
(
\|
(
(
(?=(?P<inner_link>
\[\[.*?\]\]
))(?P=inner_link)
)?
(?=(?P<other_chars>
[^\[\]]*
))(?P=other_chars)
|
(?=(?P<not_wikilink>
\[[^]]*\]
))(?P=not_wikilink)
)*?
)??
\]\]
"""
def iterable_to_wikitext(
items: Iterable[object], *, prefix: str = "\n* "
) -> str:
"""
Convert iterable to wikitext.
Pages are converted to links.
All other objects use their string representation.
:param items: Items to iterate
:param prefix: Prefix for each item when there is more than one item
"""
if not items:
return ""
if len(list(items)) == 1:
prefix = ""
text = ""
for item in items:
if isinstance(item, BasePage):
item = item.title(as_link=True, textlink=True)
text += f"{prefix}{item}"
return text
| """
Functions for manipulating wikitext.
This module extends pywikibot.textlib.
"""
from typing import Iterable
from pywikibot.page import BasePage
# The namespace names must be substituted into this regex,
# e.g., FILE_LINK_REGEX.format("|".join(site.namespaces.FILE)).
# Adds named groups to pywikibot.textlib.FILE_LINK_REGEX
# and uses str.format instead of %.
FILE_LINK_REGEX = r"""
\[\[\s*
(?P<namespace>{})
\s*:
(?=(?P<filename>
[^]|]*
))(?P=filename)
(
\|
(
(
(?=(?P<inner_link>
\[\[.*?\]\]
))(?P=inner_link)
)?
(?=(?P<other_chars>
[^\[\]]*
))(?P=other_chars)
|
(?=(?P<not_wikilink>
\[[^]]*\]
))(?P=not_wikilink)
)*?
)??
\]\]
"""
def iterable_to_wikitext(
items: Iterable[object], *, prefix: str = "\n* "
) -> str:
"""
Convert iterable to wikitext.
Pages are converted to links.
All other objects use their string representation.
:param items: Items to iterate
:param prefix: Prefix for each item when there is more than one item
"""
if not items:
return ""
if len(list(items)) == 1:
prefix = ""
text = ""
for item in items:
if isinstance(item, BasePage):
item = item.title(as_link=True, textlink=True)
text += f"{prefix}{item}"
return text
| en | 0.331379 | Functions for manipulating wikitext. This module extends pywikibot.textlib. # The namespace names must be substituted into this regex, # e.g., FILE_LINK_REGEX.format("|".join(site.namespaces.FILE)). # Adds named groups to pywikibot.textlib.FILE_LINK_REGEX # and uses str.format instead of %. \[\[\s* (?P<namespace>{}) \s*: (?=(?P<filename> [^]|]* ))(?P=filename) ( \| ( ( (?=(?P<inner_link> \[\[.*?\]\] ))(?P=inner_link) )? (?=(?P<other_chars> [^\[\]]* ))(?P=other_chars) | (?=(?P<not_wikilink> \[[^]]*\] ))(?P=not_wikilink) )*? )?? \]\] Convert iterable to wikitext. Pages are converted to links. All other objects use their string representation. :param items: Items to iterate :param prefix: Prefix for each item when there is more than one item | 3.420689 | 3 |
api.py | AdityaSingh17/DogsVsCats | 0 | 6622481 | <filename>api.py
import requests
import json
# Define API Endpoint and Headers.
ApiEndpoint = "http://dogorcat.pythonanywhere.com/query"
Headers = {"Content-Type": "application/json"}
# Specify the URL.
URL = "<URL HERE>"
# Convert the data into JSON.
Data = json.dumps({"url": URL})
# Generate a POST request at the API Endpoint with Headers and JSON data.
response = requests.post(ApiEndpoint, headers=Headers, data=Data)
try:
# A text response is returned, convert it to JSON.
JSONResponse = json.loads(response.text)
# Print the result.
print("Predicted result:", JSONResponse["Prediction"])
except:
# In case of any error, print the response text as is.
print(response.text)
| <filename>api.py
import requests
import json
# Define API Endpoint and Headers.
ApiEndpoint = "http://dogorcat.pythonanywhere.com/query"
Headers = {"Content-Type": "application/json"}
# Specify the URL.
URL = "<URL HERE>"
# Convert the data into JSON.
Data = json.dumps({"url": URL})
# Generate a POST request at the API Endpoint with Headers and JSON data.
response = requests.post(ApiEndpoint, headers=Headers, data=Data)
try:
# A text response is returned, convert it to JSON.
JSONResponse = json.loads(response.text)
# Print the result.
print("Predicted result:", JSONResponse["Prediction"])
except:
# In case of any error, print the response text as is.
print(response.text)
| en | 0.742293 | # Define API Endpoint and Headers. # Specify the URL. # Convert the data into JSON. # Generate a POST request at the API Endpoint with Headers and JSON data. # A text response is returned, convert it to JSON. # Print the result. # In case of any error, print the response text as is. | 3.441069 | 3 |
theseus/segmentation/metrics/projection.py | kaylode/shrec22-pothole | 1 | 6622482 | import os
import torch
from typing import Any, Dict, Optional, List
from theseus.base.metrics.metric_template import Metric
import numpy as np
import hashlib
from theseus.utilities.visualization.visualizer import Visualizer
from theseus.utilities.loggers import LoggerObserver
class EmbeddingProjection(Metric):
"""
Visualize embedding project for classification
"""
def __init__(self, save_dir='.temp', **kwargs):
super().__init__(**kwargs)
self.save_dir = save_dir
self.visualizer = Visualizer()
self.logger = LoggerObserver.getLogger('main')
self.reset()
os.makedirs(self.save_dir, exist_ok=True)
def update(self, outputs: torch.Tensor, batch: Dict[str, Any]):
"""
Perform calculation based on prediction and targets
"""
features = outputs.detach().cpu().numpy() # (B, C, H, W)
img_names = batch['img_names']
for i in range(len(features)):
filename = hashlib.md5(img_names[i].encode('utf-8')).hexdigest()
embedding_path = self.save_dir + r"/" + filename + '_feat.npy'
np.save(embedding_path, features[i])
self.embeddings.append(embedding_path)
def reset(self):
self.embeddings = []
def value(self):
return {'projection': "Saved prediction as numy"} | import os
import torch
from typing import Any, Dict, Optional, List
from theseus.base.metrics.metric_template import Metric
import numpy as np
import hashlib
from theseus.utilities.visualization.visualizer import Visualizer
from theseus.utilities.loggers import LoggerObserver
class EmbeddingProjection(Metric):
"""
Visualize embedding project for classification
"""
def __init__(self, save_dir='.temp', **kwargs):
super().__init__(**kwargs)
self.save_dir = save_dir
self.visualizer = Visualizer()
self.logger = LoggerObserver.getLogger('main')
self.reset()
os.makedirs(self.save_dir, exist_ok=True)
def update(self, outputs: torch.Tensor, batch: Dict[str, Any]):
"""
Perform calculation based on prediction and targets
"""
features = outputs.detach().cpu().numpy() # (B, C, H, W)
img_names = batch['img_names']
for i in range(len(features)):
filename = hashlib.md5(img_names[i].encode('utf-8')).hexdigest()
embedding_path = self.save_dir + r"/" + filename + '_feat.npy'
np.save(embedding_path, features[i])
self.embeddings.append(embedding_path)
def reset(self):
self.embeddings = []
def value(self):
return {'projection': "Saved prediction as numy"} | en | 0.793803 | Visualize embedding project for classification Perform calculation based on prediction and targets # (B, C, H, W) | 2.260472 | 2 |
Chapter11/dcn_da.py | PacktPublishing/Mastering-Machine-Learning-Algorithms-Second-Edition | 40 | 6622483 | import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
import tensorflow as tf
# Set random seed for reproducibility
np.random.seed(1000)
nb_classes = 10
train_batch_size = 256
test_batch_size = 100
nb_epochs = 100
steps_per_epoch = 1500
if __name__ == '__main__':
# Load the dataset
(X_train, Y_train), (X_test, Y_test) = \
tf.keras.datasets.fashion_mnist.load_data()
# Create the augmented data generators
train_idg = tf.keras.preprocessing.image.\
ImageDataGenerator(
rescale=1.0 / 255.0,
samplewise_center=True,
samplewise_std_normalization=True,
horizontal_flip=True,
rotation_range=10.0,
shear_range=np.pi / 12.0,
zoom_range=0.25)
train_dg = train_idg.flow(
x=np.expand_dims(X_train, axis=3),
y=tf.keras.utils.to_categorical(
Y_train, num_classes=nb_classes),
batch_size=train_batch_size,
shuffle=True,
seed=1000)
test_idg = tf.keras.preprocessing.image.\
ImageDataGenerator(
rescale=1.0 / 255.0,
samplewise_center=True,
samplewise_std_normalization=True)
test_dg = train_idg.flow(
x=np.expand_dims(X_test, axis=3),
y=tf.keras.utils.to_categorical(
Y_test, num_classes=nb_classes),
shuffle=False,
batch_size=test_batch_size,
seed=1000)
# Create the model
model = tf.keras.models.Sequential([
tf.keras.layers.Conv2D(32,
kernel_size=(3, 3),
padding='same',
input_shape=(X_train.shape[1],
X_train.shape[2],
1)),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.LeakyReLU(alpha=0.1),
tf.keras.layers.Conv2D(64,
kernel_size=(3, 3),
padding='same'),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.LeakyReLU(alpha=0.1),
tf.keras.layers.Conv2D(128,
kernel_size=(3, 3),
padding='same'),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.LeakyReLU(alpha=0.1),
tf.keras.layers.Conv2D(128,
kernel_size=(3, 3),
padding='same'),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.LeakyReLU(alpha=0.1),
tf.keras.layers.MaxPooling2D(pool_size=(2, 2)),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(1024),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.LeakyReLU(alpha=0.1),
tf.keras.layers.Dense(1024),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.LeakyReLU(alpha=0.1),
tf.keras.layers.Dense(nb_classes,
activation='softmax')
])
# Compile the model
model.compile(loss='categorical_crossentropy',
optimizer=tf.keras.optimizers.Adam(
lr=0.0001, decay=1e-5),
metrics=['accuracy'])
# Train the model
history = model.fit_generator(
generator=train_dg,
epochs=nb_epochs,
steps_per_epoch=steps_per_epoch,
validation_data=test_dg,
validation_steps=int(X_test.shape[0] /
test_batch_size),
callbacks=[
tf.keras.callbacks.ReduceLROnPlateau(
factor=0.1, patience=1,
cooldown=1, min_lr=1e-6)
])
# Show the results
sns.set()
fig, ax = plt.subplots(1, 2, figsize=(18, 6))
ax[0].plot(history.history['accuracy'], label='Training accuracy')
ax[0].plot(history.history['val_accuracy'], label='Validation accuracy')
ax[0].set_xlabel('Epoch', fontsize=20)
ax[0].set_ylabel('Accuracy', fontsize=20)
ax[0].legend(fontsize=20)
ax[0].grid(True)
ax[1].plot(history.history['loss'], label='Training loss')
ax[1].plot(history.history['val_loss'], label='Validation loss')
ax[1].set_xlabel('Epoch', fontsize=20)
ax[1].set_ylabel('Loss', fontsize=20)
ax[1].set_yticks(np.linspace(0.0, 1.0, 10))
ax[1].legend(fontsize=20)
ax[1].grid(True)
plt.show()
| import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
import tensorflow as tf
# Set random seed for reproducibility
np.random.seed(1000)
nb_classes = 10
train_batch_size = 256
test_batch_size = 100
nb_epochs = 100
steps_per_epoch = 1500
if __name__ == '__main__':
# Load the dataset
(X_train, Y_train), (X_test, Y_test) = \
tf.keras.datasets.fashion_mnist.load_data()
# Create the augmented data generators
train_idg = tf.keras.preprocessing.image.\
ImageDataGenerator(
rescale=1.0 / 255.0,
samplewise_center=True,
samplewise_std_normalization=True,
horizontal_flip=True,
rotation_range=10.0,
shear_range=np.pi / 12.0,
zoom_range=0.25)
train_dg = train_idg.flow(
x=np.expand_dims(X_train, axis=3),
y=tf.keras.utils.to_categorical(
Y_train, num_classes=nb_classes),
batch_size=train_batch_size,
shuffle=True,
seed=1000)
test_idg = tf.keras.preprocessing.image.\
ImageDataGenerator(
rescale=1.0 / 255.0,
samplewise_center=True,
samplewise_std_normalization=True)
test_dg = train_idg.flow(
x=np.expand_dims(X_test, axis=3),
y=tf.keras.utils.to_categorical(
Y_test, num_classes=nb_classes),
shuffle=False,
batch_size=test_batch_size,
seed=1000)
# Create the model
model = tf.keras.models.Sequential([
tf.keras.layers.Conv2D(32,
kernel_size=(3, 3),
padding='same',
input_shape=(X_train.shape[1],
X_train.shape[2],
1)),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.LeakyReLU(alpha=0.1),
tf.keras.layers.Conv2D(64,
kernel_size=(3, 3),
padding='same'),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.LeakyReLU(alpha=0.1),
tf.keras.layers.Conv2D(128,
kernel_size=(3, 3),
padding='same'),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.LeakyReLU(alpha=0.1),
tf.keras.layers.Conv2D(128,
kernel_size=(3, 3),
padding='same'),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.LeakyReLU(alpha=0.1),
tf.keras.layers.MaxPooling2D(pool_size=(2, 2)),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(1024),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.LeakyReLU(alpha=0.1),
tf.keras.layers.Dense(1024),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.LeakyReLU(alpha=0.1),
tf.keras.layers.Dense(nb_classes,
activation='softmax')
])
# Compile the model
model.compile(loss='categorical_crossentropy',
optimizer=tf.keras.optimizers.Adam(
lr=0.0001, decay=1e-5),
metrics=['accuracy'])
# Train the model
history = model.fit_generator(
generator=train_dg,
epochs=nb_epochs,
steps_per_epoch=steps_per_epoch,
validation_data=test_dg,
validation_steps=int(X_test.shape[0] /
test_batch_size),
callbacks=[
tf.keras.callbacks.ReduceLROnPlateau(
factor=0.1, patience=1,
cooldown=1, min_lr=1e-6)
])
# Show the results
sns.set()
fig, ax = plt.subplots(1, 2, figsize=(18, 6))
ax[0].plot(history.history['accuracy'], label='Training accuracy')
ax[0].plot(history.history['val_accuracy'], label='Validation accuracy')
ax[0].set_xlabel('Epoch', fontsize=20)
ax[0].set_ylabel('Accuracy', fontsize=20)
ax[0].legend(fontsize=20)
ax[0].grid(True)
ax[1].plot(history.history['loss'], label='Training loss')
ax[1].plot(history.history['val_loss'], label='Validation loss')
ax[1].set_xlabel('Epoch', fontsize=20)
ax[1].set_ylabel('Loss', fontsize=20)
ax[1].set_yticks(np.linspace(0.0, 1.0, 10))
ax[1].legend(fontsize=20)
ax[1].grid(True)
plt.show()
| en | 0.644169 | # Set random seed for reproducibility # Load the dataset # Create the augmented data generators # Create the model # Compile the model # Train the model # Show the results | 2.942063 | 3 |
code/util/nectar_s3_task.py | madconsulting/datanectar | 5 | 6622484 | #1/usr/bin/env python
import os
import sys
import time
import hashlib
from datetime import datetime
from boto import connect_s3
import luigi
from luigi.s3 import S3Client
from pathutils import project_path
class NectarS3Task(luigi.Task):
def __init__(self, *args, **kwargs):
super(NectarS3Task, self).__init__(*args, **kwargs)
self.KEY_TIME_FORMAT = <KEY>'
self.s3client = S3Client(
os.getenv('AWS_ACCESS_KEY_ID'),
os.getenv('AWS_SECRET_ACCESS_KEY')
)
self.boto_s3_client = connect_s3(
os.getenv('AWS_ACCESS_KEY_ID'),
os.getenv('AWS_SECRET_ACCESS_KEY')
)
self.bucket = self.boto_s3_client.get_bucket('%s.%s' % (os.getenv('ENV', 'local'), os.getenv('PROJECT_BUCKET', 'datanectar')))
def output(self):
"""
Custom implementation for s3
"""
return luigi.s3.S3Target(
self.get_s3target_path(),
client=self.s3client
)
def get_s3target_path_base(self):
"""
Returns
-------
target_path : str of target path
"""
module_name = sys.modules[self.__module__].__file__.split('/')[-1]
module_name = module_name.strip('.py')
module_name = module_name.strip('.pyc')
# this could be an ENV variable that we first check
# then default to datanectar
return 's3://{0}.{1}/chains/{2}/{3}/{4}/{5}/%s'.format(
os.getenv('ENV', 'local'),
os.getenv('PROJECT_BUCKET', 'datanectar'),
self.chain_type(),
module_name,
self.__class__.__name__,
self.hash_params()
)
def get_s3target_path(self):
"""
Returns
-------
target_path : str of target path
"""
name = 'out.txt'
self._target_path = self.get_s3target_path_base() % name
return self._target_path
def get_s3stdout_log_path(self):
"""
Returns
--------
s3stdout_log_path : str of url to s3 stdout lot path for this task
"""
return self.get_s3target_path_base() % 'log/%s' % 'stdout.log'
def get_s3stderr_log_path(self):
"""
Returns
--------
s3stderr_log_path : str of url to s3 stderr lot path for this task
"""
return self.get_s3target_path_base() % 'log/%s' % 'stderr.log'
def get_s3params_path(self):
"""
Returns
--------
s3params_path : str of s3 url to params for this task
"""
return self.get_s3target_path_base() % 'params.txt'
def get_s3target_relative_path_base(self):
"""
Returns
------
target_path : str of relative target path unformatted
"""
base = self.get_s3target_path_base()
return '/'.join(base.split('/')[base.split('/').index('chains'):])
def get_s3target_relative_path(self):
"""
Returns
-------
relative_path : str like chains/test/TestS3Task/blah
"""
name = 'out.txt'
return self.get_s3target_relative_path_base() % name
def get_s3stdout_log_relative_path(self):
"""
Returns
--------
s3stdout_log_path : str of url to s3 stdout lot path for this task
"""
return self.get_s3target_relative_path_base() % 'log/%s' % 'stdout.log'
def get_s3stderr_log_relative_path(self):
"""
Returns
--------
s3stderr_log_path : str of url to s3 stderr lot path for this task
"""
return self.get_s3target_relative_path_base() % 'log/%s' % 'stderr.log'
def get_s3params_relative_path(self):
"""
Returns
--------
s3params_path : str of s3 url to params for this task
"""
return self.get_s3target_relative_path_base() % 'params.txt'
def get_s3target_url(self, expires_in=600):
"""
Parameters
---------
expires_in : int expires_in how long url is good for (defaults to 600 [10 minutes])
Returns
---------
url : temporary url for target
"""
if hasattr(self, '_target_path'):
k = self.bucket.get_key(getattr(self, '_target_path'))
if k:
return k.generate_url(expires_in=expires_in)
k = self.bucket.get_key(self.get_s3target_relative_path())
if not k:
raise type('TargetNotFoundException', (Exception,), {})("Couldn't find target with name: %s" % self.get_s3target_relative_path())
return k.generate_url(expires_in=expires_in)
def get_key_like(self, like=''):
"""
Parameters
---------
like : str of keys to get with this substr
Returns
--------
keys : list of s3 keys with the like str inside
"""
return filter(lambda x: like in x.name, self.bucket.get_all_keys())
def join_params(self, sep='_', frequency='day'):
"""
Parameters
----------
sep : str of params joined
frequency : frequency of job so we can make sure this is unique per execution frequency
Returns
--------
joined_params : str of the joined params
"""
keys_sorted_by_letter = list(sorted([k for k in self.to_str_params().keys()], key=lambda x: sum(map(ord, [l for l in x]))))
return datetime.strftime(datetime.now(), '%Y-%m-%d' if frequency == 'day' else '%Y-%m-%d %H') \
if not len(keys_sorted_by_letter) \
else sep.join([self.to_str_params()[k] for k in keys_sorted_by_letter])
def hash_params(self):
"""
Returns
-------
hashed_params : returns hashed params
"""
return hashlib.md5(self.join_params()).hexdigest()
def chain_type(self):
"""
Returns
-------
type : str of the type of chain this task belongs to
"""
module_path = os.path.join(
project_path(),
os.path.abspath(sys.modules[self.__module__].__file__)
)
dir_path = os.path.dirname(module_path)
return dir_path.split(os.sep)[-1]
| #1/usr/bin/env python
import os
import sys
import time
import hashlib
from datetime import datetime
from boto import connect_s3
import luigi
from luigi.s3 import S3Client
from pathutils import project_path
class NectarS3Task(luigi.Task):
def __init__(self, *args, **kwargs):
super(NectarS3Task, self).__init__(*args, **kwargs)
self.KEY_TIME_FORMAT = <KEY>'
self.s3client = S3Client(
os.getenv('AWS_ACCESS_KEY_ID'),
os.getenv('AWS_SECRET_ACCESS_KEY')
)
self.boto_s3_client = connect_s3(
os.getenv('AWS_ACCESS_KEY_ID'),
os.getenv('AWS_SECRET_ACCESS_KEY')
)
self.bucket = self.boto_s3_client.get_bucket('%s.%s' % (os.getenv('ENV', 'local'), os.getenv('PROJECT_BUCKET', 'datanectar')))
def output(self):
"""
Custom implementation for s3
"""
return luigi.s3.S3Target(
self.get_s3target_path(),
client=self.s3client
)
def get_s3target_path_base(self):
"""
Returns
-------
target_path : str of target path
"""
module_name = sys.modules[self.__module__].__file__.split('/')[-1]
module_name = module_name.strip('.py')
module_name = module_name.strip('.pyc')
# this could be an ENV variable that we first check
# then default to datanectar
return 's3://{0}.{1}/chains/{2}/{3}/{4}/{5}/%s'.format(
os.getenv('ENV', 'local'),
os.getenv('PROJECT_BUCKET', 'datanectar'),
self.chain_type(),
module_name,
self.__class__.__name__,
self.hash_params()
)
def get_s3target_path(self):
"""
Returns
-------
target_path : str of target path
"""
name = 'out.txt'
self._target_path = self.get_s3target_path_base() % name
return self._target_path
def get_s3stdout_log_path(self):
"""
Returns
--------
s3stdout_log_path : str of url to s3 stdout lot path for this task
"""
return self.get_s3target_path_base() % 'log/%s' % 'stdout.log'
def get_s3stderr_log_path(self):
"""
Returns
--------
s3stderr_log_path : str of url to s3 stderr lot path for this task
"""
return self.get_s3target_path_base() % 'log/%s' % 'stderr.log'
def get_s3params_path(self):
"""
Returns
--------
s3params_path : str of s3 url to params for this task
"""
return self.get_s3target_path_base() % 'params.txt'
def get_s3target_relative_path_base(self):
"""
Returns
------
target_path : str of relative target path unformatted
"""
base = self.get_s3target_path_base()
return '/'.join(base.split('/')[base.split('/').index('chains'):])
def get_s3target_relative_path(self):
"""
Returns
-------
relative_path : str like chains/test/TestS3Task/blah
"""
name = 'out.txt'
return self.get_s3target_relative_path_base() % name
def get_s3stdout_log_relative_path(self):
"""
Returns
--------
s3stdout_log_path : str of url to s3 stdout lot path for this task
"""
return self.get_s3target_relative_path_base() % 'log/%s' % 'stdout.log'
def get_s3stderr_log_relative_path(self):
"""
Returns
--------
s3stderr_log_path : str of url to s3 stderr lot path for this task
"""
return self.get_s3target_relative_path_base() % 'log/%s' % 'stderr.log'
def get_s3params_relative_path(self):
"""
Returns
--------
s3params_path : str of s3 url to params for this task
"""
return self.get_s3target_relative_path_base() % 'params.txt'
def get_s3target_url(self, expires_in=600):
"""
Parameters
---------
expires_in : int expires_in how long url is good for (defaults to 600 [10 minutes])
Returns
---------
url : temporary url for target
"""
if hasattr(self, '_target_path'):
k = self.bucket.get_key(getattr(self, '_target_path'))
if k:
return k.generate_url(expires_in=expires_in)
k = self.bucket.get_key(self.get_s3target_relative_path())
if not k:
raise type('TargetNotFoundException', (Exception,), {})("Couldn't find target with name: %s" % self.get_s3target_relative_path())
return k.generate_url(expires_in=expires_in)
def get_key_like(self, like=''):
"""
Parameters
---------
like : str of keys to get with this substr
Returns
--------
keys : list of s3 keys with the like str inside
"""
return filter(lambda x: like in x.name, self.bucket.get_all_keys())
def join_params(self, sep='_', frequency='day'):
"""
Parameters
----------
sep : str of params joined
frequency : frequency of job so we can make sure this is unique per execution frequency
Returns
--------
joined_params : str of the joined params
"""
keys_sorted_by_letter = list(sorted([k for k in self.to_str_params().keys()], key=lambda x: sum(map(ord, [l for l in x]))))
return datetime.strftime(datetime.now(), '%Y-%m-%d' if frequency == 'day' else '%Y-%m-%d %H') \
if not len(keys_sorted_by_letter) \
else sep.join([self.to_str_params()[k] for k in keys_sorted_by_letter])
def hash_params(self):
"""
Returns
-------
hashed_params : returns hashed params
"""
return hashlib.md5(self.join_params()).hexdigest()
def chain_type(self):
"""
Returns
-------
type : str of the type of chain this task belongs to
"""
module_path = os.path.join(
project_path(),
os.path.abspath(sys.modules[self.__module__].__file__)
)
dir_path = os.path.dirname(module_path)
return dir_path.split(os.sep)[-1]
| en | 0.597763 | #1/usr/bin/env python Custom implementation for s3 Returns ------- target_path : str of target path # this could be an ENV variable that we first check # then default to datanectar Returns ------- target_path : str of target path Returns -------- s3stdout_log_path : str of url to s3 stdout lot path for this task Returns -------- s3stderr_log_path : str of url to s3 stderr lot path for this task Returns -------- s3params_path : str of s3 url to params for this task Returns ------ target_path : str of relative target path unformatted Returns ------- relative_path : str like chains/test/TestS3Task/blah Returns -------- s3stdout_log_path : str of url to s3 stdout lot path for this task Returns -------- s3stderr_log_path : str of url to s3 stderr lot path for this task Returns -------- s3params_path : str of s3 url to params for this task Parameters --------- expires_in : int expires_in how long url is good for (defaults to 600 [10 minutes]) Returns --------- url : temporary url for target Parameters --------- like : str of keys to get with this substr Returns -------- keys : list of s3 keys with the like str inside Parameters ---------- sep : str of params joined frequency : frequency of job so we can make sure this is unique per execution frequency Returns -------- joined_params : str of the joined params Returns ------- hashed_params : returns hashed params Returns ------- type : str of the type of chain this task belongs to | 2.114394 | 2 |
leetcode/179.py | windniw/just-for-fun | 1 | 6622485 | """
link: https://leetcode-cn.com/problems/largest-number
problem: 计算数字拼凑最大值
solution: 排序。两两比对放置前后顺序的大小
"""
class Solution:
def largestNumber(self, nums: List[int]) -> str:
def ncmp(a, b: int) -> int:
aa, bb = str(a) + str(b), str(b) + str(a)
return 1 if aa > bb else -1
if len(nums) == 0:
return "0"
nums.sort(key=functools.cmp_to_key(ncmp), reverse=True)
return "0" if nums[0] == 0 else "".join([str(x) for x in nums])
| """
link: https://leetcode-cn.com/problems/largest-number
problem: 计算数字拼凑最大值
solution: 排序。两两比对放置前后顺序的大小
"""
class Solution:
def largestNumber(self, nums: List[int]) -> str:
def ncmp(a, b: int) -> int:
aa, bb = str(a) + str(b), str(b) + str(a)
return 1 if aa > bb else -1
if len(nums) == 0:
return "0"
nums.sort(key=functools.cmp_to_key(ncmp), reverse=True)
return "0" if nums[0] == 0 else "".join([str(x) for x in nums])
| en | 0.318123 | link: https://leetcode-cn.com/problems/largest-number problem: 计算数字拼凑最大值 solution: 排序。两两比对放置前后顺序的大小 | 3.611338 | 4 |
LitterFilter/PirSource.py | mattdbartlett/LitterFilter | 0 | 6622486 |
from LitterFilter.Event import EventSource
import qwiic_pir
import logging
class PirSource(EventSource):
"""
An event generator backed by a I2C base PIR device
"""
def __init__(self, activeEvent, inactiveEvent, address=None):
self.__device = qwiic_pir.QwiicPIR(address=address)
self.__activeEvent = activeEvent
self.__inactiveEvent = inactiveEvent
self.__lastValue = None
if self.__device.begin() == False:
raise Exception("Failed to connect to PIR sensor")
def Evaluate(self, stateMachine):
curValue = self.__device.raw_reading()
#logging.debug("PIR value is {0}".format(curValue))
if self.__lastValue is None or self.__lastValue != curValue:
self.__lastValue = curValue
if curValue:
stateMachine.ProcessEvent(self.__activeEvent)
else:
stateMachine.ProcessEvent(self.__inactiveEvent)
|
from LitterFilter.Event import EventSource
import qwiic_pir
import logging
class PirSource(EventSource):
"""
An event generator backed by a I2C base PIR device
"""
def __init__(self, activeEvent, inactiveEvent, address=None):
self.__device = qwiic_pir.QwiicPIR(address=address)
self.__activeEvent = activeEvent
self.__inactiveEvent = inactiveEvent
self.__lastValue = None
if self.__device.begin() == False:
raise Exception("Failed to connect to PIR sensor")
def Evaluate(self, stateMachine):
curValue = self.__device.raw_reading()
#logging.debug("PIR value is {0}".format(curValue))
if self.__lastValue is None or self.__lastValue != curValue:
self.__lastValue = curValue
if curValue:
stateMachine.ProcessEvent(self.__activeEvent)
else:
stateMachine.ProcessEvent(self.__inactiveEvent)
| en | 0.741491 | An event generator backed by a I2C base PIR device #logging.debug("PIR value is {0}".format(curValue)) | 2.653761 | 3 |
tests/linalg/utils.py | brisvag/pygfx | 60 | 6622487 | from pygfx.linalg import (
Euler,
Matrix4,
Quaternion,
)
def matrix_equals(a: Matrix4, b: Matrix4, tolerance: float = 0.0001):
if len(a.elements) != len(b.elements):
return False
return all(abs(x - y) < tolerance for x, y in zip(a.elements, b.elements))
def euler_equals(a: Euler, b: Euler, tolerance: float = 0.0001):
return (abs(a.x - b.x) + abs(a.y - b.y) + abs(a.z - b.z)) < tolerance
def quat_equals(a: Quaternion, b: Quaternion, tolerance: float = 0.0001):
return (
abs(a.x - b.x) + abs(a.y - b.y) + abs(a.z - b.z) + abs(a.w - b.w)
) < tolerance
| from pygfx.linalg import (
Euler,
Matrix4,
Quaternion,
)
def matrix_equals(a: Matrix4, b: Matrix4, tolerance: float = 0.0001):
if len(a.elements) != len(b.elements):
return False
return all(abs(x - y) < tolerance for x, y in zip(a.elements, b.elements))
def euler_equals(a: Euler, b: Euler, tolerance: float = 0.0001):
return (abs(a.x - b.x) + abs(a.y - b.y) + abs(a.z - b.z)) < tolerance
def quat_equals(a: Quaternion, b: Quaternion, tolerance: float = 0.0001):
return (
abs(a.x - b.x) + abs(a.y - b.y) + abs(a.z - b.z) + abs(a.w - b.w)
) < tolerance
| none | 1 | 2.55544 | 3 | |
Intro/24 - minesweeper.py | lucasalme1da/codesignal | 2 | 6622488 | def minesweeper(matrix):
raw = []
array = []
for row in range(len(matrix)):
for col in range(len(matrix[0])):
aux = 0
# Corner
if (row == 0 and col == 0): # topLeftCorner
if(matrix[row + 1][col]):
aux += 1
if(matrix[row][col + 1]):
aux += 1
if(matrix[row + 1][col + 1]):
aux += 1
array.append(aux)
continue
if (row == 0 and col == (len(matrix[0]) - 1)): # topRightCorner
if(matrix[row + 1][col]):
aux += 1
if(matrix[row][col - 1]):
aux += 1
if(matrix[row + 1][col - 1]):
aux += 1
array.append(aux)
continue
if ((row == len(matrix) - 1) and col == 0): # bottomLeftCorner
if(matrix[row - 1][col]):
aux += 1
if(matrix[row][col + 1]):
aux += 1
if(matrix[row - 1][col + 1]):
aux += 1
array.append(aux)
continue
if ((row == len(matrix) - 1) and (col == len(matrix[0]) - 1)):
if(matrix[row - 1][col]):
aux += 1
if(matrix[row][col - 1]):
aux += 1
if(matrix[row - 1][col - 1]):
aux += 1
array.append(aux)
continue
# Sides
if (col == 0): # verticalLeft
if(matrix[row - 1][col]):
aux += 1
if(matrix[row - 1][col + 1]):
aux += 1
if(matrix[row][col + 1]):
aux += 1
if(matrix[row + 1][col + 1]):
aux += 1
if(matrix[row + 1][col]):
aux += 1
array.append(aux)
continue
if (col == len(matrix[0]) - 1): # verticalRight
if(matrix[row - 1][col]):
aux += 1
if(matrix[row - 1][col - 1]):
aux += 1
if(matrix[row][col - 1]):
aux += 1
if(matrix[row + 1][col - 1]):
aux += 1
if(matrix[row + 1][col]):
aux += 1
array.append(aux)
continue
if (row == 0): # horizontalTop
if(matrix[row + 1][col]):
aux += 1
if(matrix[row + 1][col - 1]):
aux += 1
if(matrix[row + 1][col + 1]):
aux += 1
if(matrix[row][col - 1]):
aux += 1
if(matrix[row][col + 1]):
aux += 1
array.append(aux)
continue
if (row == len(matrix) - 1): # horizontalBottom
if(matrix[row - 1][col]):
aux += 1
if(matrix[row - 1][col - 1]):
aux += 1
if(matrix[row - 1][col + 1]):
aux += 1
if(matrix[row][col - 1]):
aux += 1
if(matrix[row][col + 1]):
aux += 1
array.append(aux)
continue
# Middle
if(matrix[row - 1][col - 1]):
aux += 1
if(matrix[row - 1][col]):
aux += 1
if(matrix[row - 1][col + 1]):
aux += 1
if(matrix[row][col - 1]):
aux += 1
if(matrix[row][col + 1]):
aux += 1
if(matrix[row + 1][col - 1]):
aux += 1
if(matrix[row + 1][col]):
aux += 1
if(matrix[row + 1][col + 1]):
aux += 1
array.append(aux)
raw.append(array)
array = []
return raw
| def minesweeper(matrix):
raw = []
array = []
for row in range(len(matrix)):
for col in range(len(matrix[0])):
aux = 0
# Corner
if (row == 0 and col == 0): # topLeftCorner
if(matrix[row + 1][col]):
aux += 1
if(matrix[row][col + 1]):
aux += 1
if(matrix[row + 1][col + 1]):
aux += 1
array.append(aux)
continue
if (row == 0 and col == (len(matrix[0]) - 1)): # topRightCorner
if(matrix[row + 1][col]):
aux += 1
if(matrix[row][col - 1]):
aux += 1
if(matrix[row + 1][col - 1]):
aux += 1
array.append(aux)
continue
if ((row == len(matrix) - 1) and col == 0): # bottomLeftCorner
if(matrix[row - 1][col]):
aux += 1
if(matrix[row][col + 1]):
aux += 1
if(matrix[row - 1][col + 1]):
aux += 1
array.append(aux)
continue
if ((row == len(matrix) - 1) and (col == len(matrix[0]) - 1)):
if(matrix[row - 1][col]):
aux += 1
if(matrix[row][col - 1]):
aux += 1
if(matrix[row - 1][col - 1]):
aux += 1
array.append(aux)
continue
# Sides
if (col == 0): # verticalLeft
if(matrix[row - 1][col]):
aux += 1
if(matrix[row - 1][col + 1]):
aux += 1
if(matrix[row][col + 1]):
aux += 1
if(matrix[row + 1][col + 1]):
aux += 1
if(matrix[row + 1][col]):
aux += 1
array.append(aux)
continue
if (col == len(matrix[0]) - 1): # verticalRight
if(matrix[row - 1][col]):
aux += 1
if(matrix[row - 1][col - 1]):
aux += 1
if(matrix[row][col - 1]):
aux += 1
if(matrix[row + 1][col - 1]):
aux += 1
if(matrix[row + 1][col]):
aux += 1
array.append(aux)
continue
if (row == 0): # horizontalTop
if(matrix[row + 1][col]):
aux += 1
if(matrix[row + 1][col - 1]):
aux += 1
if(matrix[row + 1][col + 1]):
aux += 1
if(matrix[row][col - 1]):
aux += 1
if(matrix[row][col + 1]):
aux += 1
array.append(aux)
continue
if (row == len(matrix) - 1): # horizontalBottom
if(matrix[row - 1][col]):
aux += 1
if(matrix[row - 1][col - 1]):
aux += 1
if(matrix[row - 1][col + 1]):
aux += 1
if(matrix[row][col - 1]):
aux += 1
if(matrix[row][col + 1]):
aux += 1
array.append(aux)
continue
# Middle
if(matrix[row - 1][col - 1]):
aux += 1
if(matrix[row - 1][col]):
aux += 1
if(matrix[row - 1][col + 1]):
aux += 1
if(matrix[row][col - 1]):
aux += 1
if(matrix[row][col + 1]):
aux += 1
if(matrix[row + 1][col - 1]):
aux += 1
if(matrix[row + 1][col]):
aux += 1
if(matrix[row + 1][col + 1]):
aux += 1
array.append(aux)
raw.append(array)
array = []
return raw
| en | 0.361774 | # Corner # topLeftCorner # topRightCorner # bottomLeftCorner # Sides # verticalLeft # verticalRight # horizontalTop # horizontalBottom # Middle | 3.354874 | 3 |
scripts/desafio015.py | User7558/Python3 | 0 | 6622489 | <reponame>User7558/Python3
#desafio15: um programa que pergunte a quantidade de km percorridos por um carro alugado e a quantidade de dias pelos quais ele foi alugado. Calcule o preço a pagar, sabendo que o carro custa R$60 por dia e R$0.15 por km rodado
#
km=float(input('Quantos km o carro alugado percorreu? '))
d=int(input('Por quantos dias ele foi alugado? '))
a = (60*d)+(0.15*km)
print('O preço que deverá ser pago pelo aluguel do carro será de R${:.2f}.'.format(a))
| #desafio15: um programa que pergunte a quantidade de km percorridos por um carro alugado e a quantidade de dias pelos quais ele foi alugado. Calcule o preço a pagar, sabendo que o carro custa R$60 por dia e R$0.15 por km rodado
#
km=float(input('Quantos km o carro alugado percorreu? '))
d=int(input('Por quantos dias ele foi alugado? '))
a = (60*d)+(0.15*km)
print('O preço que deverá ser pago pelo aluguel do carro será de R${:.2f}.'.format(a)) | pt | 0.938643 | #desafio15: um programa que pergunte a quantidade de km percorridos por um carro alugado e a quantidade de dias pelos quais ele foi alugado. Calcule o preço a pagar, sabendo que o carro custa R$60 por dia e R$0.15 por km rodado # | 3.855968 | 4 |
distrobuild/routes/bootstrap.py | rocky-linux/distrobuild | 30 | 6622490 | # Copyright (c) 2021 The Distrobuild Authors
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from datetime import datetime
from fastapi import APIRouter, Request
from fastapi.responses import JSONResponse
from tortoise.transactions import atomic
from distrobuild.bootstrap import process_repo_dump, process_module_dump
from distrobuild.common import get_user
from distrobuild.models import Repo, Package, Build, Import, ImportStatus, ImportCommit, BuildStatus
from distrobuild.session import koji_session
from distrobuild.settings import settings
router = APIRouter(prefix="/bootstrap")
@atomic()
async def import_build_from_koji(username: str, package: Package, koji_build):
new_import = await Import.create(package_id=package.id, status=ImportStatus.SUCCEEDED,
executor_username=username, version=settings.version)
commit = koji_build["source"].split("#")[1]
import_commit = await ImportCommit.create(branch=f"{settings.original_import_branch_prefix}{settings.version}",
commit=commit, import__id=new_import.id)
package.last_import = datetime.now()
package.last_build = datetime.now()
await Build.create(package_id=package.id, status=BuildStatus.SUCCEEDED,
executor_username=username,
point_release=f"{settings.version}_{settings.default_point_release}",
import_commit_id=import_commit.id, koji_id=koji_build["task_id"])
await package.save()
@router.post("/modules")
async def bootstrap_modules(request: Request):
user = get_user(request)
await process_module_dump(user["preferred_username"])
return JSONResponse(content={})
@router.post("/import_from_koji", status_code=202)
async def import_from_koji(request: Request):
user = get_user(request)
all_koji_builds = koji_session.listBuilds()
packages_without_builds = await Package.filter(last_build__isnull=True).all()
for package in packages_without_builds:
for koji_build in all_koji_builds:
if package.name == koji_build[
"name"] and not package.is_module and not package.part_of_module and \
package.repo != Repo.MODULAR_CANDIDATE and koji_build["state"] == 1:
await import_build_from_koji(user["preferred_username"], package, koji_build)
return {}
@router.post("/{repo}")
async def bootstrap_repo(request: Request, repo: Repo):
user = get_user(request)
await process_repo_dump(repo, user["preferred_username"])
return JSONResponse(content={})
| # Copyright (c) 2021 The Distrobuild Authors
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from datetime import datetime
from fastapi import APIRouter, Request
from fastapi.responses import JSONResponse
from tortoise.transactions import atomic
from distrobuild.bootstrap import process_repo_dump, process_module_dump
from distrobuild.common import get_user
from distrobuild.models import Repo, Package, Build, Import, ImportStatus, ImportCommit, BuildStatus
from distrobuild.session import koji_session
from distrobuild.settings import settings
router = APIRouter(prefix="/bootstrap")
@atomic()
async def import_build_from_koji(username: str, package: Package, koji_build):
new_import = await Import.create(package_id=package.id, status=ImportStatus.SUCCEEDED,
executor_username=username, version=settings.version)
commit = koji_build["source"].split("#")[1]
import_commit = await ImportCommit.create(branch=f"{settings.original_import_branch_prefix}{settings.version}",
commit=commit, import__id=new_import.id)
package.last_import = datetime.now()
package.last_build = datetime.now()
await Build.create(package_id=package.id, status=BuildStatus.SUCCEEDED,
executor_username=username,
point_release=f"{settings.version}_{settings.default_point_release}",
import_commit_id=import_commit.id, koji_id=koji_build["task_id"])
await package.save()
@router.post("/modules")
async def bootstrap_modules(request: Request):
user = get_user(request)
await process_module_dump(user["preferred_username"])
return JSONResponse(content={})
@router.post("/import_from_koji", status_code=202)
async def import_from_koji(request: Request):
user = get_user(request)
all_koji_builds = koji_session.listBuilds()
packages_without_builds = await Package.filter(last_build__isnull=True).all()
for package in packages_without_builds:
for koji_build in all_koji_builds:
if package.name == koji_build[
"name"] and not package.is_module and not package.part_of_module and \
package.repo != Repo.MODULAR_CANDIDATE and koji_build["state"] == 1:
await import_build_from_koji(user["preferred_username"], package, koji_build)
return {}
@router.post("/{repo}")
async def bootstrap_repo(request: Request, repo: Repo):
user = get_user(request)
await process_repo_dump(repo, user["preferred_username"])
return JSONResponse(content={})
| en | 0.769406 | # Copyright (c) 2021 The Distrobuild Authors # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. | 1.648092 | 2 |
excel/writeexcel.py | ty68/alped | 0 | 6622491 | import xlwt
#https://www.cnblogs.com/nancyzhu/p/8401552.html
students = [
['姓名','年龄','性别','分数'],
['mary',20,'女',89.9],
['mary',20,'女',89.9],
['mary',20,'女',89.9],
['mary',20,'女',90.9],
]
def write_excel(data,filename):
#新建一个excel对象
book = xlwt.Workbook()
# 给这个book添加一个sheet
sheet = book.add_sheet('info')
row = 0
for stu in data:
col = 0
for i in stu:
#给指定单元格写入数据
sheet.write(row,col, i)
col += 1
row += 1
#保存写入的数据到文件
book.save(filename)
if __name__ == '__main__':
write_excel(students, 'aaa.xls')
| import xlwt
#https://www.cnblogs.com/nancyzhu/p/8401552.html
students = [
['姓名','年龄','性别','分数'],
['mary',20,'女',89.9],
['mary',20,'女',89.9],
['mary',20,'女',89.9],
['mary',20,'女',90.9],
]
def write_excel(data,filename):
#新建一个excel对象
book = xlwt.Workbook()
# 给这个book添加一个sheet
sheet = book.add_sheet('info')
row = 0
for stu in data:
col = 0
for i in stu:
#给指定单元格写入数据
sheet.write(row,col, i)
col += 1
row += 1
#保存写入的数据到文件
book.save(filename)
if __name__ == '__main__':
write_excel(students, 'aaa.xls')
| zh | 0.87457 | #https://www.cnblogs.com/nancyzhu/p/8401552.html #新建一个excel对象 # 给这个book添加一个sheet #给指定单元格写入数据 #保存写入的数据到文件 | 3.46069 | 3 |
mysite/urls.py | gurupratap-matharu/django-rest-booking-api | 35 | 6622492 | from api import views
from django.contrib import admin
from django.urls import include, path
from rest_framework.routers import DefaultRouter
from rest_framework.schemas import get_schema_view
from rest_framework.documentation import include_docs_urls
router = DefaultRouter()
router.register(r'match', views.MatchViewSet)
schema_view = get_schema_view(title='Bookings API',
description='An API to book matches or update odds.')
urlpatterns = [
path('admin/', admin.site.urls),
path('api/', include(router.urls)),
path('schema/', schema_view),
path('docs/', include_docs_urls(title='Bookings API'))
]
| from api import views
from django.contrib import admin
from django.urls import include, path
from rest_framework.routers import DefaultRouter
from rest_framework.schemas import get_schema_view
from rest_framework.documentation import include_docs_urls
router = DefaultRouter()
router.register(r'match', views.MatchViewSet)
schema_view = get_schema_view(title='Bookings API',
description='An API to book matches or update odds.')
urlpatterns = [
path('admin/', admin.site.urls),
path('api/', include(router.urls)),
path('schema/', schema_view),
path('docs/', include_docs_urls(title='Bookings API'))
]
| none | 1 | 2.010706 | 2 | |
hwtLib/peripheral/usb/usb2/utmi_usb_agent.py | Nic30/hwtLib | 24 | 6622493 | <gh_stars>10-100
from collections import deque
from typing import Deque, Union
from hwt.hdl.types.bits import Bits
from hwt.hdl.types.struct import HStruct
from hwt.simulator.agentBase import SyncAgentBase
from hwtLib.peripheral.usb.constants import usb_addr_t, usb_endp_t, \
usb_crc5_t, usb_pid_t, USB_PID
from hwtLib.peripheral.usb.sim.agent_base import UsbPacketToken, UsbPacketData, \
UsbPacketHandshake
from hwtLib.peripheral.usb.sim.usb_agent_device import UsbDevAgent
from hwtLib.peripheral.usb.sim.usb_agent_host import UsbHostAgent
from hwtLib.peripheral.usb.usb2.utmi_agent import Utmi_8bAgent
from hwtSimApi.hdlSimulator import HdlSimulator
from hwtSimApi.process_utils import CallbackLoop
class UtmiUsbHostProcAgent(UsbHostAgent):
"""
A simulation agent for :class:`hwtLib.peripheral.usb.usb2.utmi.Utmi_8b` interface
with the functionality of the host.
"""
usb_packet_token_t = HStruct(
(usb_pid_t, "pid"),
(usb_pid_t, "pid_inv"), # inversion of the pid
(usb_addr_t, "addr"),
(usb_endp_t, "endp"),
(usb_crc5_t, "crc5"), # :note: does not involve USB_PID, only addr, endp
)
def parse_packet_pid_and_bytes(self, pid: int, p: Deque[int]):
if USB_PID.is_token(pid):
return UsbPacketToken.from_pid_and_body_bytes(pid, p)
elif USB_PID.is_data(pid):
crc16_h = p.pop()
crc16_l = p.pop()
crc16 = (crc16_h << 8 | crc16_l)
new_p = UsbPacketData(pid, p)
expected_crc = new_p.crc16()
assert crc16 == expected_crc, (crc16, expected_crc, p)
return new_p
elif USB_PID.is_hs(pid):
return UsbPacketHandshake(pid)
else:
raise NotImplementedError(pid)
def parse_packet(self, p: Deque[int]):
# need to cut of ulpi tx_cmd header
pid = int(p.popleft())
pid_inv = (pid & 0xf0) >> 4
pid &= 0xf
assert pid == (~pid_inv & 0xf), (pid, pid_inv)
return self.parse_packet_pid_and_bytes(pid, p)
def deparse_packet(self, p: Union[UsbPacketToken, UsbPacketData, UsbPacketHandshake]):
cls = type(p)
v = deque()
if cls is UsbPacketToken:
v0 = self.usb_packet_token_t.from_py({
"pid": p.pid,
"pid_inv":~p.pid & 0xf,
"addr": p.addr,
"endp": p.endp,
"crc5": p.crc5(),
})
v1 = v0._reinterpret_cast(Bits(8)[self.usb_packet_token_t.bit_length() // 8])
v.extend(int(_v) for _v in v1)
elif cls is UsbPacketData:
v.append(((~p.pid & 0xf) << 4) | p.pid)
v.extend(p.data)
crc16 = p.crc16()
v.append(crc16 & 0xff)
v.append(crc16 >> 8)
elif cls is UsbPacketHandshake:
v.append(((~p.pid & 0xf) << 4) | p.pid)
else:
raise NotImplementedError(cls, p)
return v
class UtmiUsbDevProcAgent(UsbDevAgent):
def parse_packet_pid_and_bytes(self, pid: int, p: Deque[int]):
return UtmiUsbHostProcAgent.parse_packet_pid_and_bytes(self, pid, p)
def parse_packet(self, p):
return UtmiUsbHostProcAgent.parse_packet(self, p)
def deparse_packet(self, p):
# need to add ulpi tx_cmd header
return UtmiUsbHostProcAgent.deparse_packet(self, p)
class UtmiUsbAgent(Utmi_8bAgent, SyncAgentBase):
"""
:class:` hwtLib.peripheral.usb.usb2.utmi_agent.Utmi_8bAgent`
with device host logic and USB stack
"""
def __init__(self, sim:HdlSimulator, intf:Utmi_8bAgent, allowNoReset=False,
wrap_monitor_and_driver_in_edge_callback=True):
Utmi_8bAgent.__init__(self, sim, intf)
self.descriptors = None
self.usb_driver = None
self.usb_driver_proc = None
self.clk = self.intf._getAssociatedClk()
self.rst = self._discoverReset(intf, allowNoReset)
self.monitor = CallbackLoop(sim, self.clk, self.monitor, self.getEnable)
self.driver = CallbackLoop(sim, self.clk, self.driver, self.getEnable)
def run_usb_driver(self):
if self.usb_driver_proc is not None:
try:
next(self.usb_driver_proc)
except StopIteration:
pass
def driver_init(self):
yield from Utmi_8bAgent.driver(self)
def driver(self):
self.run_usb_driver()
def getDrivers(self):
# PHY/host
if self.usb_driver is None:
self.usb_driver = UtmiUsbHostProcAgent(self.link_to_phy_packets,
self.phy_to_link_packets)
self.usb_driver_proc = self.usb_driver.proc()
return [
self.driver_init(),
*Utmi_8bAgent.getDrivers(self)
]
def monitor_init(self):
yield from Utmi_8bAgent.monitor(self)
def monitor(self):
self.run_usb_driver()
def getMonitors(self):
# link/device
assert self.descriptors is not None
if self.usb_driver is None:
self.usb_driver = UtmiUsbDevProcAgent(self.phy_to_link_packets,
self.link_to_phy_packets,
self.descriptors)
self.usb_driver_proc = self.usb_driver.proc()
return [
self.monitor_init(),
*Utmi_8bAgent.getMonitors(self)
]
| from collections import deque
from typing import Deque, Union
from hwt.hdl.types.bits import Bits
from hwt.hdl.types.struct import HStruct
from hwt.simulator.agentBase import SyncAgentBase
from hwtLib.peripheral.usb.constants import usb_addr_t, usb_endp_t, \
usb_crc5_t, usb_pid_t, USB_PID
from hwtLib.peripheral.usb.sim.agent_base import UsbPacketToken, UsbPacketData, \
UsbPacketHandshake
from hwtLib.peripheral.usb.sim.usb_agent_device import UsbDevAgent
from hwtLib.peripheral.usb.sim.usb_agent_host import UsbHostAgent
from hwtLib.peripheral.usb.usb2.utmi_agent import Utmi_8bAgent
from hwtSimApi.hdlSimulator import HdlSimulator
from hwtSimApi.process_utils import CallbackLoop
class UtmiUsbHostProcAgent(UsbHostAgent):
"""
A simulation agent for :class:`hwtLib.peripheral.usb.usb2.utmi.Utmi_8b` interface
with the functionality of the host.
"""
usb_packet_token_t = HStruct(
(usb_pid_t, "pid"),
(usb_pid_t, "pid_inv"), # inversion of the pid
(usb_addr_t, "addr"),
(usb_endp_t, "endp"),
(usb_crc5_t, "crc5"), # :note: does not involve USB_PID, only addr, endp
)
def parse_packet_pid_and_bytes(self, pid: int, p: Deque[int]):
if USB_PID.is_token(pid):
return UsbPacketToken.from_pid_and_body_bytes(pid, p)
elif USB_PID.is_data(pid):
crc16_h = p.pop()
crc16_l = p.pop()
crc16 = (crc16_h << 8 | crc16_l)
new_p = UsbPacketData(pid, p)
expected_crc = new_p.crc16()
assert crc16 == expected_crc, (crc16, expected_crc, p)
return new_p
elif USB_PID.is_hs(pid):
return UsbPacketHandshake(pid)
else:
raise NotImplementedError(pid)
def parse_packet(self, p: Deque[int]):
# need to cut of ulpi tx_cmd header
pid = int(p.popleft())
pid_inv = (pid & 0xf0) >> 4
pid &= 0xf
assert pid == (~pid_inv & 0xf), (pid, pid_inv)
return self.parse_packet_pid_and_bytes(pid, p)
def deparse_packet(self, p: Union[UsbPacketToken, UsbPacketData, UsbPacketHandshake]):
cls = type(p)
v = deque()
if cls is UsbPacketToken:
v0 = self.usb_packet_token_t.from_py({
"pid": p.pid,
"pid_inv":~p.pid & 0xf,
"addr": p.addr,
"endp": p.endp,
"crc5": p.crc5(),
})
v1 = v0._reinterpret_cast(Bits(8)[self.usb_packet_token_t.bit_length() // 8])
v.extend(int(_v) for _v in v1)
elif cls is UsbPacketData:
v.append(((~p.pid & 0xf) << 4) | p.pid)
v.extend(p.data)
crc16 = p.crc16()
v.append(crc16 & 0xff)
v.append(crc16 >> 8)
elif cls is UsbPacketHandshake:
v.append(((~p.pid & 0xf) << 4) | p.pid)
else:
raise NotImplementedError(cls, p)
return v
class UtmiUsbDevProcAgent(UsbDevAgent):
def parse_packet_pid_and_bytes(self, pid: int, p: Deque[int]):
return UtmiUsbHostProcAgent.parse_packet_pid_and_bytes(self, pid, p)
def parse_packet(self, p):
return UtmiUsbHostProcAgent.parse_packet(self, p)
def deparse_packet(self, p):
# need to add ulpi tx_cmd header
return UtmiUsbHostProcAgent.deparse_packet(self, p)
class UtmiUsbAgent(Utmi_8bAgent, SyncAgentBase):
"""
:class:` hwtLib.peripheral.usb.usb2.utmi_agent.Utmi_8bAgent`
with device host logic and USB stack
"""
def __init__(self, sim:HdlSimulator, intf:Utmi_8bAgent, allowNoReset=False,
wrap_monitor_and_driver_in_edge_callback=True):
Utmi_8bAgent.__init__(self, sim, intf)
self.descriptors = None
self.usb_driver = None
self.usb_driver_proc = None
self.clk = self.intf._getAssociatedClk()
self.rst = self._discoverReset(intf, allowNoReset)
self.monitor = CallbackLoop(sim, self.clk, self.monitor, self.getEnable)
self.driver = CallbackLoop(sim, self.clk, self.driver, self.getEnable)
def run_usb_driver(self):
if self.usb_driver_proc is not None:
try:
next(self.usb_driver_proc)
except StopIteration:
pass
def driver_init(self):
yield from Utmi_8bAgent.driver(self)
def driver(self):
self.run_usb_driver()
def getDrivers(self):
# PHY/host
if self.usb_driver is None:
self.usb_driver = UtmiUsbHostProcAgent(self.link_to_phy_packets,
self.phy_to_link_packets)
self.usb_driver_proc = self.usb_driver.proc()
return [
self.driver_init(),
*Utmi_8bAgent.getDrivers(self)
]
def monitor_init(self):
yield from Utmi_8bAgent.monitor(self)
def monitor(self):
self.run_usb_driver()
def getMonitors(self):
# link/device
assert self.descriptors is not None
if self.usb_driver is None:
self.usb_driver = UtmiUsbDevProcAgent(self.phy_to_link_packets,
self.link_to_phy_packets,
self.descriptors)
self.usb_driver_proc = self.usb_driver.proc()
return [
self.monitor_init(),
*Utmi_8bAgent.getMonitors(self)
] | en | 0.653978 | A simulation agent for :class:`hwtLib.peripheral.usb.usb2.utmi.Utmi_8b` interface with the functionality of the host. # inversion of the pid # :note: does not involve USB_PID, only addr, endp # need to cut of ulpi tx_cmd header # need to add ulpi tx_cmd header :class:` hwtLib.peripheral.usb.usb2.utmi_agent.Utmi_8bAgent` with device host logic and USB stack # PHY/host # link/device | 2.06327 | 2 |
Sunphotometer/spdata.py | baikangwang/SunPhotometer | 0 | 6622494 | <filename>Sunphotometer/spdata.py
# -*- coding: utf-8 -*-
import os
import sys
import time
import shutil
import subprocess as sub
import threading
from datetime import datetime
from datetime import timedelta
from ftplib import FTP
from sunphotometer import DataProcess
def download(stime, etime=None, stations=None, ftp_dir='/data2/cawas', data_dir='S:/data', ftp_ip='10.32.8.175',
user='cawas', pword='<PASSWORD>'):
'''
Download AOD data files from CAMS ftp server.
:param stime: (*datetime*) Start time.
:param etime: (*datetime*) End time.
:param stations: (*list*) Station list. Default is None, all stations will be included.
:param ftp_dir: (*string*) Ftp server directory.
:param data_dir: (*string*) Local data directory.
:param ftp_ip: (*string*) Ftp address.
:param user: (*string*) Ftp user name.
:param pword: (*string*) Ftp password.
'''
# Set directories
net = 'AC'
type = 'AOD'
ftp_dir = ftp_dir + '/cawn_dat/aer-' + type
# Login ftp
print 'Login ftp host...'
ftp = FTP(ftp_ip)
ftp.encoding = 'utf-8'
try:
ftp.login(user, pword)
except:
raise Exception('[ftp]: Login failed')
# ftp=FTP()
# ftp.connect(ftp_ip,21)
# ftp.sendcmd('USER '+user)
# ftp.sendcmd('PASS '+pword)
# access data dir
try:
try:
host_dir = '{0}/{1}'.format(ftp_dir, stime.strftime('%Y%m'))
ftp.cwd(host_dir)
print host_dir
except:
raise IOError('[ftp]: {0} not exists'.format(host_dir))
# Download the data
print 'Start download data...'
filelist = []
ftp.dir('*.*', filelist.append)
n = 0
for f in filelist:
name = f.split(' ')[-1]
if stations is None:
if stime.strftime('%Y%m%d') in name:
print ' ' + name
local_dir = os.path.join(data_dir, stime.strftime('%Y%m'))
print local_dir
if not os.path.isdir(local_dir):
os.makedirs(local_dir)
try:
ftp.retrbinary('RETR %s' % name, open(
os.path.join(local_dir, name), 'wb').write)
n += 1
except:
print 'Failed to download {0}'.format(f)
else:
for st in stations.tolist():
if stime.strftime('%Y%m%d') in name and st.stId in name:
print ' ' + name
local_dir = os.path.join(
data_dir, st.stId, stime.strftime('%Y%m'))
print local_dir
if not os.path.isdir(local_dir):
os.makedirs(local_dir)
try:
ftp.retrbinary('RETR %s' % name, open(
os.path.join(local_dir, name), 'wb').write)
n += 1
except:
print 'Failed to download {0}'.format(f)
print 'Total file number: ' + str(n)
print 'Download Finished!'
finally:
# Quit ftp
ftp.quit()
def unrar(rarfn, dest_dir):
'''
Unzip RAR file.
:param rarfn: (*string*) RAR data file.
:param dest_dir: (*string*) Destination folder.
'''
DataProcess.unrar(rarfn, dest_dir)
def merge_files(infns, outfn):
'''
Merge multiple k7 data files to one file.
:param infns: (*list*) Input k7 data files.
:param outfn: (*string*) Output k7 data file.
'''
DataProcess.mergeFiles(infns, outfn)
def decode(k7fn):
'''
Decode k7 file to data list.
:param k7fn: (*string*) K7 file name.
:returns: (*list*) Data list.
'''
r = DataProcess.decode(k7fn)
return r
def extract(data, type='NSU'):
'''
Extract data by type.
:param data: (*list*) Data list.
:param type: (*string*) Data type.
:returns: (*list*) Extracted data list.
'''
r = DataProcess.getDataByType(data, type)
return r
def save(data, fn):
'''
Save data to an ASCII file.
:param data: (*list*) Data list.
:param fn: (*string*) Output file name.
'''
DataProcess.writeASCIIFile(data, fn)
def cal_aot(wdir, calfn, taofn, nsufn, lat, lon, alt, insnum=1, cloud=1, no2=-2,
no2fn=None, ozone=-1, ozonefn='ozono.dat', alpha=1, nwave=4, waves='2 3 4 5',
press=-1):
'''
Calculate AOT.
:param exefn: (*string*) Excution file name.
:param inputfn: (*string*) Input file name.
:param calfn: (*string*) Calibration file name.
:param taofn: (*string*) Result AOT file
:param lat: (*float*) Latitude.
:param lon: (*float*) Longitude.
:param alt: (*float*) Altitude.
:param ozonefn: (*string*) Ozone file name.
:param nsufn: (*string*) NSU data file name.
'''
# DataProcess.calAOT(exefn, inputfn, calfn, taofn, lat, lon, alt, ozonefn, nsufn)
# wdir = r'C:\Program Files (x86)\ASTPWin\PlugIns\Aot'
exefn = os.path.join(wdir, 'ESPESOR.EXE')
inputfn = os.path.join(wdir, 'inputpar.dat')
if ozone == -1:
ozonefn = os.path.join(wdir, ozonefn)
with open(inputfn, 'w') as f:
# Instrument number (according with “instruments.dat”)
f.write(str(insnum) + '\n')
f.write(calfn + '\n') # Path of the calibration file
f.write(taofn + '\n') # Path of the output file
f.write(str(cloud) + '\n') # Cloud filtering (1 yes, 0 no)
f.write(str(lat) + '\n') # Latitude of measurement place
f.write(str(lon) + '\n') # Longitude of measurement place
f.write(str(alt) + '\n') # Surface height (m a.s.l)
f.write(str(no2) + '\n') # no2 mean latitude values
if no2 == -1:
f.write(no2fn + '\n') # Path of the no2 file
f.write(str(ozone) + '\n') # ozone dobson units
if ozone == -1:
f.write(ozonefn + '\n') # Path of the ozone file
f.write(str(alpha) + '\n') # alfa parameter calculation (0 no, 1 yes)
# number of wavelength used in alpha calculation
f.write(str(nwave) + '\n')
# position of the wavelength used in alpha calculation
f.write(waves + '\n')
f.write(str(press) + '\n') # Surface pressure
# f.write(sskfn + '\n') #Path of ascii ssk file (blank as colums separator)
# Path of ascii nsu file (blank as colums separator)
f.write(nsufn + '\n')
f.close()
# os.system('ESPESOR.EXE inputpar.dat /G0')
command=[exefn,inputfn,'/G0']
# __run_command(command,wdir,5)
RunCmd(command,wdir,5).Run()
def test():
print 'Test passed!'
class RunCmd(threading.Thread):
def __init__(self, cmd, cwd, timeout):
threading.Thread.__init__(self)
self.cmd = cmd
self.timeout = timeout
self.cwd = cwd
def run(self):
self.p = sub.Popen(self.cmd, cwd=self.cwd, shell=False)
self.p.wait()
def Run(self):
self.start()
self.join(self.timeout)
if self.is_alive():
print "Command {0} timed out after {1} seconds".format(self.cmd,self.timeout)
self.p.terminate() # use self.p.kill() if process needs a kill -9
self.join()
| <filename>Sunphotometer/spdata.py
# -*- coding: utf-8 -*-
import os
import sys
import time
import shutil
import subprocess as sub
import threading
from datetime import datetime
from datetime import timedelta
from ftplib import FTP
from sunphotometer import DataProcess
def download(stime, etime=None, stations=None, ftp_dir='/data2/cawas', data_dir='S:/data', ftp_ip='10.32.8.175',
user='cawas', pword='<PASSWORD>'):
'''
Download AOD data files from CAMS ftp server.
:param stime: (*datetime*) Start time.
:param etime: (*datetime*) End time.
:param stations: (*list*) Station list. Default is None, all stations will be included.
:param ftp_dir: (*string*) Ftp server directory.
:param data_dir: (*string*) Local data directory.
:param ftp_ip: (*string*) Ftp address.
:param user: (*string*) Ftp user name.
:param pword: (*string*) Ftp password.
'''
# Set directories
net = 'AC'
type = 'AOD'
ftp_dir = ftp_dir + '/cawn_dat/aer-' + type
# Login ftp
print 'Login ftp host...'
ftp = FTP(ftp_ip)
ftp.encoding = 'utf-8'
try:
ftp.login(user, pword)
except:
raise Exception('[ftp]: Login failed')
# ftp=FTP()
# ftp.connect(ftp_ip,21)
# ftp.sendcmd('USER '+user)
# ftp.sendcmd('PASS '+pword)
# access data dir
try:
try:
host_dir = '{0}/{1}'.format(ftp_dir, stime.strftime('%Y%m'))
ftp.cwd(host_dir)
print host_dir
except:
raise IOError('[ftp]: {0} not exists'.format(host_dir))
# Download the data
print 'Start download data...'
filelist = []
ftp.dir('*.*', filelist.append)
n = 0
for f in filelist:
name = f.split(' ')[-1]
if stations is None:
if stime.strftime('%Y%m%d') in name:
print ' ' + name
local_dir = os.path.join(data_dir, stime.strftime('%Y%m'))
print local_dir
if not os.path.isdir(local_dir):
os.makedirs(local_dir)
try:
ftp.retrbinary('RETR %s' % name, open(
os.path.join(local_dir, name), 'wb').write)
n += 1
except:
print 'Failed to download {0}'.format(f)
else:
for st in stations.tolist():
if stime.strftime('%Y%m%d') in name and st.stId in name:
print ' ' + name
local_dir = os.path.join(
data_dir, st.stId, stime.strftime('%Y%m'))
print local_dir
if not os.path.isdir(local_dir):
os.makedirs(local_dir)
try:
ftp.retrbinary('RETR %s' % name, open(
os.path.join(local_dir, name), 'wb').write)
n += 1
except:
print 'Failed to download {0}'.format(f)
print 'Total file number: ' + str(n)
print 'Download Finished!'
finally:
# Quit ftp
ftp.quit()
def unrar(rarfn, dest_dir):
'''
Unzip RAR file.
:param rarfn: (*string*) RAR data file.
:param dest_dir: (*string*) Destination folder.
'''
DataProcess.unrar(rarfn, dest_dir)
def merge_files(infns, outfn):
'''
Merge multiple k7 data files to one file.
:param infns: (*list*) Input k7 data files.
:param outfn: (*string*) Output k7 data file.
'''
DataProcess.mergeFiles(infns, outfn)
def decode(k7fn):
'''
Decode k7 file to data list.
:param k7fn: (*string*) K7 file name.
:returns: (*list*) Data list.
'''
r = DataProcess.decode(k7fn)
return r
def extract(data, type='NSU'):
'''
Extract data by type.
:param data: (*list*) Data list.
:param type: (*string*) Data type.
:returns: (*list*) Extracted data list.
'''
r = DataProcess.getDataByType(data, type)
return r
def save(data, fn):
'''
Save data to an ASCII file.
:param data: (*list*) Data list.
:param fn: (*string*) Output file name.
'''
DataProcess.writeASCIIFile(data, fn)
def cal_aot(wdir, calfn, taofn, nsufn, lat, lon, alt, insnum=1, cloud=1, no2=-2,
no2fn=None, ozone=-1, ozonefn='ozono.dat', alpha=1, nwave=4, waves='2 3 4 5',
press=-1):
'''
Calculate AOT.
:param exefn: (*string*) Excution file name.
:param inputfn: (*string*) Input file name.
:param calfn: (*string*) Calibration file name.
:param taofn: (*string*) Result AOT file
:param lat: (*float*) Latitude.
:param lon: (*float*) Longitude.
:param alt: (*float*) Altitude.
:param ozonefn: (*string*) Ozone file name.
:param nsufn: (*string*) NSU data file name.
'''
# DataProcess.calAOT(exefn, inputfn, calfn, taofn, lat, lon, alt, ozonefn, nsufn)
# wdir = r'C:\Program Files (x86)\ASTPWin\PlugIns\Aot'
exefn = os.path.join(wdir, 'ESPESOR.EXE')
inputfn = os.path.join(wdir, 'inputpar.dat')
if ozone == -1:
ozonefn = os.path.join(wdir, ozonefn)
with open(inputfn, 'w') as f:
# Instrument number (according with “instruments.dat”)
f.write(str(insnum) + '\n')
f.write(calfn + '\n') # Path of the calibration file
f.write(taofn + '\n') # Path of the output file
f.write(str(cloud) + '\n') # Cloud filtering (1 yes, 0 no)
f.write(str(lat) + '\n') # Latitude of measurement place
f.write(str(lon) + '\n') # Longitude of measurement place
f.write(str(alt) + '\n') # Surface height (m a.s.l)
f.write(str(no2) + '\n') # no2 mean latitude values
if no2 == -1:
f.write(no2fn + '\n') # Path of the no2 file
f.write(str(ozone) + '\n') # ozone dobson units
if ozone == -1:
f.write(ozonefn + '\n') # Path of the ozone file
f.write(str(alpha) + '\n') # alfa parameter calculation (0 no, 1 yes)
# number of wavelength used in alpha calculation
f.write(str(nwave) + '\n')
# position of the wavelength used in alpha calculation
f.write(waves + '\n')
f.write(str(press) + '\n') # Surface pressure
# f.write(sskfn + '\n') #Path of ascii ssk file (blank as colums separator)
# Path of ascii nsu file (blank as colums separator)
f.write(nsufn + '\n')
f.close()
# os.system('ESPESOR.EXE inputpar.dat /G0')
command=[exefn,inputfn,'/G0']
# __run_command(command,wdir,5)
RunCmd(command,wdir,5).Run()
def test():
print 'Test passed!'
class RunCmd(threading.Thread):
def __init__(self, cmd, cwd, timeout):
threading.Thread.__init__(self)
self.cmd = cmd
self.timeout = timeout
self.cwd = cwd
def run(self):
self.p = sub.Popen(self.cmd, cwd=self.cwd, shell=False)
self.p.wait()
def Run(self):
self.start()
self.join(self.timeout)
if self.is_alive():
print "Command {0} timed out after {1} seconds".format(self.cmd,self.timeout)
self.p.terminate() # use self.p.kill() if process needs a kill -9
self.join()
| en | 0.471417 | # -*- coding: utf-8 -*- Download AOD data files from CAMS ftp server. :param stime: (*datetime*) Start time. :param etime: (*datetime*) End time. :param stations: (*list*) Station list. Default is None, all stations will be included. :param ftp_dir: (*string*) Ftp server directory. :param data_dir: (*string*) Local data directory. :param ftp_ip: (*string*) Ftp address. :param user: (*string*) Ftp user name. :param pword: (*string*) Ftp password. # Set directories # Login ftp # ftp=FTP() # ftp.connect(ftp_ip,21) # ftp.sendcmd('USER '+user) # ftp.sendcmd('PASS '+pword) # access data dir # Download the data # Quit ftp Unzip RAR file. :param rarfn: (*string*) RAR data file. :param dest_dir: (*string*) Destination folder. Merge multiple k7 data files to one file. :param infns: (*list*) Input k7 data files. :param outfn: (*string*) Output k7 data file. Decode k7 file to data list. :param k7fn: (*string*) K7 file name. :returns: (*list*) Data list. Extract data by type. :param data: (*list*) Data list. :param type: (*string*) Data type. :returns: (*list*) Extracted data list. Save data to an ASCII file. :param data: (*list*) Data list. :param fn: (*string*) Output file name. Calculate AOT. :param exefn: (*string*) Excution file name. :param inputfn: (*string*) Input file name. :param calfn: (*string*) Calibration file name. :param taofn: (*string*) Result AOT file :param lat: (*float*) Latitude. :param lon: (*float*) Longitude. :param alt: (*float*) Altitude. :param ozonefn: (*string*) Ozone file name. :param nsufn: (*string*) NSU data file name. # DataProcess.calAOT(exefn, inputfn, calfn, taofn, lat, lon, alt, ozonefn, nsufn) # wdir = r'C:\Program Files (x86)\ASTPWin\PlugIns\Aot' # Instrument number (according with “instruments.dat”) # Path of the calibration file # Path of the output file # Cloud filtering (1 yes, 0 no) # Latitude of measurement place # Longitude of measurement place # Surface height (m a.s.l) # no2 mean latitude values # Path of the no2 file # ozone dobson units # Path of the ozone file # alfa parameter calculation (0 no, 1 yes) # number of wavelength used in alpha calculation # position of the wavelength used in alpha calculation # Surface pressure # f.write(sskfn + '\n') #Path of ascii ssk file (blank as colums separator) # Path of ascii nsu file (blank as colums separator) # os.system('ESPESOR.EXE inputpar.dat /G0') # __run_command(command,wdir,5) # use self.p.kill() if process needs a kill -9 | 2.574269 | 3 |
db/projects.py | gfoo/fastapi-demo | 0 | 6622495 | <filename>db/projects.py
from core.security import get_password_hash
from models.project import DBProject
from models.user import DBUser
from schemas.project import Project
from sqlalchemy import or_
from sqlalchemy.orm import Session
def create_project(db: Session, project: Project, owner_id: int):
db_project = DBProject(**project.dict(), owner_id=owner_id)
db.add(db_project)
db.commit()
db.refresh(db_project)
return db_project
def get_projects(db: Session, skip: int = 0, limit: int = 100):
return db.query(DBProject).offset(skip).limit(limit).all()
def get_project(db: Session, project_id: int):
return db.query(DBProject).filter(DBProject.id == project_id).first()
def get_project_by_name(db: Session, name: str):
return db.query(DBProject).filter(DBProject.name == name).first()
def get_projects_by_owner(db: Session, owner_id: int, skip: int = 0, limit: int = 100):
return (db.query(DBProject)
.filter(or_(DBProject.owner_id == owner_id, DBProject.private == False))
.offset(skip)
.limit(limit)
.all()
)
def delete_project(db: Session, project_id: int):
db.query(DBProject).filter(
DBProject.id == project_id).delete()
db.commit()
def update_project(db: Session, project_id: int,
name: str, description: str, private: bool):
db.query(DBProject).filter(
DBProject.id == project_id).update({
DBProject.name: name,
DBProject.description: description,
DBProject.private: private
})
db.commit()
| <filename>db/projects.py
from core.security import get_password_hash
from models.project import DBProject
from models.user import DBUser
from schemas.project import Project
from sqlalchemy import or_
from sqlalchemy.orm import Session
def create_project(db: Session, project: Project, owner_id: int):
db_project = DBProject(**project.dict(), owner_id=owner_id)
db.add(db_project)
db.commit()
db.refresh(db_project)
return db_project
def get_projects(db: Session, skip: int = 0, limit: int = 100):
return db.query(DBProject).offset(skip).limit(limit).all()
def get_project(db: Session, project_id: int):
return db.query(DBProject).filter(DBProject.id == project_id).first()
def get_project_by_name(db: Session, name: str):
return db.query(DBProject).filter(DBProject.name == name).first()
def get_projects_by_owner(db: Session, owner_id: int, skip: int = 0, limit: int = 100):
return (db.query(DBProject)
.filter(or_(DBProject.owner_id == owner_id, DBProject.private == False))
.offset(skip)
.limit(limit)
.all()
)
def delete_project(db: Session, project_id: int):
db.query(DBProject).filter(
DBProject.id == project_id).delete()
db.commit()
def update_project(db: Session, project_id: int,
name: str, description: str, private: bool):
db.query(DBProject).filter(
DBProject.id == project_id).update({
DBProject.name: name,
DBProject.description: description,
DBProject.private: private
})
db.commit()
| none | 1 | 2.641767 | 3 | |
2019/src/Advent2019_14.py | davidxbuck/advent2018 | 1 | 6622496 | from math import ceil
def calculate_ore(reactions, chem_qty, fuel_required):
chem_qty['FUEL'] = -fuel_required
ore = 0
satisfied = False
while not satisfied:
satisfied = True
for output_chemical in chem_qty.keys():
if output_chemical == "ORE" and chem_qty[output_chemical] < 0:
ore -= chem_qty[output_chemical]
chem_qty[output_chemical] = 0
continue
if chem_qty[output_chemical] < 0:
satisfied = False
output_qty, input_chemicals = reactions[output_chemical]
batches = ceil(abs(chem_qty[output_chemical]) / output_qty)
chem_qty[output_chemical] += output_qty * batches
for input_chemical in input_chemicals.keys():
qty = input_chemicals[input_chemical]
chem_qty[input_chemical] -= qty * batches
return ore
def main():
with open('../inputs/Advent2019_14.txt', 'r') as f:
equations = [x.strip().split() for x in f]
reactions = {}
chem_qty = {}
for equation in equations:
chemical, quantity = equation[-1], int(equation[-2])
inputs = {y.strip(','): int(x) for x, y in zip(equation[0:-3:2], equation[1:-3:2])}
reactions[chemical] = [quantity, inputs]
chem_qty[chemical] = 0
chem_qty['ORE'] = 0
fuel_required = 1
print(f'AoC 2019 Day 14, Part 1: Ore consumed {calculate_ore(reactions, chem_qty, fuel_required)}')
adjustment = 2 ** 40
fuel_required = 1000000
while adjustment > 0:
ore = calculate_ore(reactions, chem_qty, fuel_required)
if ore > 1000000000000:
fuel_required = fuel_required - adjustment
else:
fuel_required = fuel_required + adjustment
adjustment //= 2
print(f'AoC 2019 Day 14, Part 2: Fuel produced {fuel_required}')
if __name__ == '__main__':
main()
| from math import ceil
def calculate_ore(reactions, chem_qty, fuel_required):
chem_qty['FUEL'] = -fuel_required
ore = 0
satisfied = False
while not satisfied:
satisfied = True
for output_chemical in chem_qty.keys():
if output_chemical == "ORE" and chem_qty[output_chemical] < 0:
ore -= chem_qty[output_chemical]
chem_qty[output_chemical] = 0
continue
if chem_qty[output_chemical] < 0:
satisfied = False
output_qty, input_chemicals = reactions[output_chemical]
batches = ceil(abs(chem_qty[output_chemical]) / output_qty)
chem_qty[output_chemical] += output_qty * batches
for input_chemical in input_chemicals.keys():
qty = input_chemicals[input_chemical]
chem_qty[input_chemical] -= qty * batches
return ore
def main():
with open('../inputs/Advent2019_14.txt', 'r') as f:
equations = [x.strip().split() for x in f]
reactions = {}
chem_qty = {}
for equation in equations:
chemical, quantity = equation[-1], int(equation[-2])
inputs = {y.strip(','): int(x) for x, y in zip(equation[0:-3:2], equation[1:-3:2])}
reactions[chemical] = [quantity, inputs]
chem_qty[chemical] = 0
chem_qty['ORE'] = 0
fuel_required = 1
print(f'AoC 2019 Day 14, Part 1: Ore consumed {calculate_ore(reactions, chem_qty, fuel_required)}')
adjustment = 2 ** 40
fuel_required = 1000000
while adjustment > 0:
ore = calculate_ore(reactions, chem_qty, fuel_required)
if ore > 1000000000000:
fuel_required = fuel_required - adjustment
else:
fuel_required = fuel_required + adjustment
adjustment //= 2
print(f'AoC 2019 Day 14, Part 2: Fuel produced {fuel_required}')
if __name__ == '__main__':
main()
| none | 1 | 3.16873 | 3 | |
Tools/my_globals.py | yuanfangtardis/vscode_project | 0 | 6622497 | <gh_stars>0
#!/usr/bin/env python2.7
# yuafang
# 2017.11.26
easy_globals=dict()
# if __name__=='__main__':
# print 'testing gloabls ...'
# print ES,Programs,CS,Ploter,Programs | #!/usr/bin/env python2.7
# yuafang
# 2017.11.26
easy_globals=dict()
# if __name__=='__main__':
# print 'testing gloabls ...'
# print ES,Programs,CS,Ploter,Programs | en | 0.234468 | #!/usr/bin/env python2.7 # yuafang # 2017.11.26 # if __name__=='__main__': # print 'testing gloabls ...' # print ES,Programs,CS,Ploter,Programs | 1.238534 | 1 |
auth/auth_server.py | ivmfnal/dm_common | 0 | 6622498 | <reponame>ivmfnal/dm_common
from dm_common.auth import BaseApp, AuthHandler
import time, os, yaml
from urllib.parse import quote_plus, unquote_plus
class AuthApp(BaseApp):
pass
def create_application(config_path=None):
config_path = config_path or os.environ.get("AUTH_SERVER_CFG")
if not config_path:
print("Config file is not defined. Use AUTH_SERVER_CFG environment variable")
config = yaml.load(open(config_path, "r"), Loader=yaml.SafeLoader)
app = AuthApp(config, AuthHandler)
templdir = config.get("templates", "")
if templdir.startswith("$"):
templdir = os.environ[templdir[1:]]
app.initJinjaEnvironment(tempdirs=[templdir, "."])
return app
if __name__ == "__main__":
from webpie import HTTPServer
import sys, getopt
Usage = """
python auth_server.py [-p <port>] [-c <config.yaml>]
"""
opts, args = getopt.getopt(sys.argv[1:], "c:p:")
opts = dict(opts)
config_file = opts.get("-c", os.environ.get("AUTH_SERVER_CFG"))
if not config_file:
print("Configuration file must be provided either using -c command line option or via AUTH_SERVER_CFG environment variable")
sys.exit(1)
config = yaml.load(open(config_file, "r"), Loader=yaml.SafeLoader)
auth_config = config["authentication"]
port = int(opts.get("-p", auth_config.get("port", -1)))
if port == -1:
print("AuthServer port is not configured")
sys.exit(1)
key = cert = ca_file = None
if "ssl" in auth_config:
key = auth_config["ssl"]["key"]
cert = auth_config["ssl"]["cert"]
ca_file = auth_config["ssl"]["ca_bundle"]
application = create_application(config_file)
server = HTTPServer(port, application, certfile=cert, keyfile=key, verify="optional", ca_file=ca_file, debug=sys.stdout, logging=True)
server.run()
else:
application = create_application()
| from dm_common.auth import BaseApp, AuthHandler
import time, os, yaml
from urllib.parse import quote_plus, unquote_plus
class AuthApp(BaseApp):
pass
def create_application(config_path=None):
config_path = config_path or os.environ.get("AUTH_SERVER_CFG")
if not config_path:
print("Config file is not defined. Use AUTH_SERVER_CFG environment variable")
config = yaml.load(open(config_path, "r"), Loader=yaml.SafeLoader)
app = AuthApp(config, AuthHandler)
templdir = config.get("templates", "")
if templdir.startswith("$"):
templdir = os.environ[templdir[1:]]
app.initJinjaEnvironment(tempdirs=[templdir, "."])
return app
if __name__ == "__main__":
from webpie import HTTPServer
import sys, getopt
Usage = """
python auth_server.py [-p <port>] [-c <config.yaml>]
"""
opts, args = getopt.getopt(sys.argv[1:], "c:p:")
opts = dict(opts)
config_file = opts.get("-c", os.environ.get("AUTH_SERVER_CFG"))
if not config_file:
print("Configuration file must be provided either using -c command line option or via AUTH_SERVER_CFG environment variable")
sys.exit(1)
config = yaml.load(open(config_file, "r"), Loader=yaml.SafeLoader)
auth_config = config["authentication"]
port = int(opts.get("-p", auth_config.get("port", -1)))
if port == -1:
print("AuthServer port is not configured")
sys.exit(1)
key = cert = ca_file = None
if "ssl" in auth_config:
key = auth_config["ssl"]["key"]
cert = auth_config["ssl"]["cert"]
ca_file = auth_config["ssl"]["ca_bundle"]
application = create_application(config_file)
server = HTTPServer(port, application, certfile=cert, keyfile=key, verify="optional", ca_file=ca_file, debug=sys.stdout, logging=True)
server.run()
else:
application = create_application() | en | 0.174407 | python auth_server.py [-p <port>] [-c <config.yaml>] | 2.207873 | 2 |
tests/test_template.py | czerwe/zabbixmgm | 0 | 6622499 | import unittest2
import zabbixmgm
from mock import Mock, call
from pprint import pprint
class template_tests(unittest2.TestCase):
def setUp(self):
self.apimock = Mock()
def tearDown(self):
pass
def test_template_init(self):
ttemplate = zabbixmgm.zbxtemplate(self.apimock, name='mytemplate')
self.assertTrue('templateid' in ttemplate.difffields)
self.assertTrue('templateid' in ttemplate.readonlyfields)
def test_template_new(self):
ttemplate = zabbixmgm.zbxtemplate(self.apimock, name='mytemplate')
self.assertEqual(ttemplate.id, None)
self.assertEqual(ttemplate.templateid, None)
def test_template_masking(self):
mask = {'available': u'0', 'status': u'3', 'templateid': u'10001'}
ttemplate = zabbixmgm.zbxtemplate(self.apimock, mask=mask, name='mytemplate')
self.assertEqual(ttemplate.id, '10001')
self.assertEqual(ttemplate.templateid, '10001')
| import unittest2
import zabbixmgm
from mock import Mock, call
from pprint import pprint
class template_tests(unittest2.TestCase):
def setUp(self):
self.apimock = Mock()
def tearDown(self):
pass
def test_template_init(self):
ttemplate = zabbixmgm.zbxtemplate(self.apimock, name='mytemplate')
self.assertTrue('templateid' in ttemplate.difffields)
self.assertTrue('templateid' in ttemplate.readonlyfields)
def test_template_new(self):
ttemplate = zabbixmgm.zbxtemplate(self.apimock, name='mytemplate')
self.assertEqual(ttemplate.id, None)
self.assertEqual(ttemplate.templateid, None)
def test_template_masking(self):
mask = {'available': u'0', 'status': u'3', 'templateid': u'10001'}
ttemplate = zabbixmgm.zbxtemplate(self.apimock, mask=mask, name='mytemplate')
self.assertEqual(ttemplate.id, '10001')
self.assertEqual(ttemplate.templateid, '10001')
| none | 1 | 2.523102 | 3 | |
pyboretum/__init__.py | picwell/pyboretum | 1 | 6622500 | from __future__ import absolute_import
from .decision_tree import DecisionTree
from . import splitters
from .node import (
Node,
MeanNode,
MedianNode,
MeanMedianAnalysisNode,
)
from .tree import (
LinkedTree,
ListTree,
)
from .training_data import TrainingData | from __future__ import absolute_import
from .decision_tree import DecisionTree
from . import splitters
from .node import (
Node,
MeanNode,
MedianNode,
MeanMedianAnalysisNode,
)
from .tree import (
LinkedTree,
ListTree,
)
from .training_data import TrainingData | none | 1 | 1.313974 | 1 | |
src/cania/analysis/cohort.py | Cancer-Image-Analysis/cania-core | 1 | 6622501 | <filename>src/cania/analysis/cohort.py
#!/usr/bin/env python
"""Provides Generic Classes to represent cohort data.
"""
__author__ = "<NAME>"
__copyright__ = "Copyright 2020, Cancer Image Analysis"
__credits__ = ["<NAME>"]
__license__ = "MIT"
__version__ = "1.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Production"
class Patient(object):
def __init__(self, data_path):
self.data_path = data_path
class ChromogenicCohort(object):
def __init__(self, metadata):
self.metadata = metadata
self.patients = {}
self.setup()
def setup(self):
for index, row in self.metadata.iterrows():
slide_path = row.PATH
slide_name = row[0]
if row.TODO == 1 and slide_path != 0:
self.patients[slide_name] = Patient(slide_path)
print(self.patients)
def get_slides(self):
for patient_id, patient in self.patients.items():
yield ChromogenicSlide(patient.data_path, patient_id) | <filename>src/cania/analysis/cohort.py
#!/usr/bin/env python
"""Provides Generic Classes to represent cohort data.
"""
__author__ = "<NAME>"
__copyright__ = "Copyright 2020, Cancer Image Analysis"
__credits__ = ["<NAME>"]
__license__ = "MIT"
__version__ = "1.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Production"
class Patient(object):
def __init__(self, data_path):
self.data_path = data_path
class ChromogenicCohort(object):
def __init__(self, metadata):
self.metadata = metadata
self.patients = {}
self.setup()
def setup(self):
for index, row in self.metadata.iterrows():
slide_path = row.PATH
slide_name = row[0]
if row.TODO == 1 and slide_path != 0:
self.patients[slide_name] = Patient(slide_path)
print(self.patients)
def get_slides(self):
for patient_id, patient in self.patients.items():
yield ChromogenicSlide(patient.data_path, patient_id) | en | 0.49507 | #!/usr/bin/env python Provides Generic Classes to represent cohort data. | 2.770883 | 3 |
django_smartfarm_git/main/apps.py | jyjy318/smartfarm | 0 | 6622502 | <reponame>jyjy318/smartfarm<gh_stars>0
from django.apps import AppConfig
class CityTest2Config(AppConfig):
name = 'city_test2'
| from django.apps import AppConfig
class CityTest2Config(AppConfig):
name = 'city_test2' | none | 1 | 1.241333 | 1 | |
yahoo_evaluate_experssion.py | loghmanb/daily-coding-problem | 0 | 6622503 | '''
Evaluate Expression
Asked in: Yahoo, Google, Facebook
https://www.interviewbit.com/problems/evaluate-expression/
Evaluate the value of an arithmetic expression in Reverse Polish Notation.
Valid operators are +, -, *, /. Each operand may be an integer or another expression.
Input Format
The only argument given is character array A.
Output Format
Return the value of arithmetic expression formed using reverse Polish Notation.
For Example
Input 1:
A = ["2", "1", "+", "3", "*"]
Output 1:
9
Explaination 1:
starting from backside:
*: ( )*( )
3: ()*(3)
+: ( () + () )*(3)
1: ( () + (1) )*(3)
2: ( (2) + (1) )*(3)
((2)+(1))*(3) = 9
Input 2:
A = ["4", "13", "5", "/", "+"]
Output 2:
6
Explaination 2:
+: ()+()
/: ()+(() / ())
5: ()+(() / (5))
1: ()+((13) / (5))
4: (4)+((13) / (5))
(4)+((13) / (5)) = 6
'''
# @param A : list of strings
# @return an integer
def evalRPN(A):
if not A: return 0
x = A.pop()
if x in ('+', '-', '*', '/'):
b = evalRPN(A)
a = evalRPN(A)
if x=='+':
return a+b
elif x=='-':
return a-b
elif x=='*':
return a*b
else:
return a/b
else:
return int(x)
if __name__ == "__main__":
data = [
[["2", "1", "+", "3", "*"], 9]
]
for d in data:
print('input', d[0], 'output', evalRPN(d[0][:])) | '''
Evaluate Expression
Asked in: Yahoo, Google, Facebook
https://www.interviewbit.com/problems/evaluate-expression/
Evaluate the value of an arithmetic expression in Reverse Polish Notation.
Valid operators are +, -, *, /. Each operand may be an integer or another expression.
Input Format
The only argument given is character array A.
Output Format
Return the value of arithmetic expression formed using reverse Polish Notation.
For Example
Input 1:
A = ["2", "1", "+", "3", "*"]
Output 1:
9
Explaination 1:
starting from backside:
*: ( )*( )
3: ()*(3)
+: ( () + () )*(3)
1: ( () + (1) )*(3)
2: ( (2) + (1) )*(3)
((2)+(1))*(3) = 9
Input 2:
A = ["4", "13", "5", "/", "+"]
Output 2:
6
Explaination 2:
+: ()+()
/: ()+(() / ())
5: ()+(() / (5))
1: ()+((13) / (5))
4: (4)+((13) / (5))
(4)+((13) / (5)) = 6
'''
# @param A : list of strings
# @return an integer
def evalRPN(A):
if not A: return 0
x = A.pop()
if x in ('+', '-', '*', '/'):
b = evalRPN(A)
a = evalRPN(A)
if x=='+':
return a+b
elif x=='-':
return a-b
elif x=='*':
return a*b
else:
return a/b
else:
return int(x)
if __name__ == "__main__":
data = [
[["2", "1", "+", "3", "*"], 9]
]
for d in data:
print('input', d[0], 'output', evalRPN(d[0][:])) | en | 0.631855 | Evaluate Expression Asked in: Yahoo, Google, Facebook https://www.interviewbit.com/problems/evaluate-expression/ Evaluate the value of an arithmetic expression in Reverse Polish Notation. Valid operators are +, -, *, /. Each operand may be an integer or another expression. Input Format The only argument given is character array A. Output Format Return the value of arithmetic expression formed using reverse Polish Notation. For Example Input 1: A = ["2", "1", "+", "3", "*"] Output 1: 9 Explaination 1: starting from backside: *: ( )*( ) 3: ()*(3) +: ( () + () )*(3) 1: ( () + (1) )*(3) 2: ( (2) + (1) )*(3) ((2)+(1))*(3) = 9 Input 2: A = ["4", "13", "5", "/", "+"] Output 2: 6 Explaination 2: +: ()+() /: ()+(() / ()) 5: ()+(() / (5)) 1: ()+((13) / (5)) 4: (4)+((13) / (5)) (4)+((13) / (5)) = 6 # @param A : list of strings # @return an integer | 4.174996 | 4 |
images/bot/src/bioconda_bot/automerge.py | bioconda/bioconda-containers | 2 | 6622504 | import logging
import os
from typing import Any, Dict, List, Optional, Set, Tuple
from aiohttp import ClientSession
from yaml import safe_load
from .common import (
get_job_context,
get_prs_for_sha,
get_sha_for_status_check,
get_sha_for_workflow_run,
)
from .merge import MergeState, request_merge
logger = logging.getLogger(__name__)
log = logger.info
async def get_pr_labels(session: ClientSession, pr: int) -> Set[str]:
token = os.environ["BOT_TOKEN"]
url = f"https://api.github.com/repos/bioconda/bioconda-recipes/issues/{pr}/labels"
headers = {
"Authorization": f"token {token}",
"User-Agent": "BiocondaCommentResponder",
}
async with session.get(url, headers=headers) as response:
response.raise_for_status()
res = await response.text()
labels = safe_load(res)
return {label["name"] for label in labels}
async def is_automerge_labeled(session: ClientSession, pr: int) -> bool:
labels = await get_pr_labels(session, pr)
return "automerge" in labels
async def merge_if_labeled(session: ClientSession, pr: int) -> MergeState:
if not await is_automerge_labeled(session, pr):
return MergeState.UNKNOWN
return await request_merge(session, pr)
async def get_check_runs(session: ClientSession, sha: str) -> Any:
url = f"https://api.github.com/repos/bioconda/bioconda-recipes/commits/{sha}/check-runs"
headers = {
"User-Agent": "BiocondaCommentResponder",
"Accept": "application/vnd.github.antiope-preview+json",
}
async with session.get(url, headers=headers) as response:
response.raise_for_status()
res = await response.text()
check_runs = [
check_run
for check_run in safe_load(res)["check_runs"] or []
if check_run["name"] != "bioconda-bot automerge"
]
log("Got %d check_runs for SHA %s", len(check_runs or []), sha)
return check_runs
async def all_checks_completed(session: ClientSession, sha: str) -> bool:
check_runs = await get_check_runs(session, sha)
is_all_completed = all(check_run["status"] == "completed" for check_run in check_runs)
if not is_all_completed:
log("Some check_runs are not completed yet.")
for i, check_run in enumerate(check_runs, 1):
log("check_run %d / %d: %s", i, len(check_runs), check_run)
return is_all_completed
async def all_checks_passed(session: ClientSession, sha: str) -> bool:
check_runs = await get_check_runs(session, sha)
# TODO: "neutral" might be a valid conclusion to consider in the future.
valid_conclusions = {"success", "skipped"}
if any(check_run["conclusion"] not in valid_conclusions for check_run in check_runs):
log(f"Some check_runs are not marked as {'/'.join(valid_conclusions)} yet.")
for i, check_run in enumerate(check_runs, 1):
log("check_run %d / %d: %s", i, len(check_runs), check_run)
return False
return True
async def merge_automerge_passed(sha: str) -> None:
async with ClientSession() as session:
if not await all_checks_passed(session, sha):
return
prs = await get_prs_for_sha(session, sha)
if not prs:
log("No PRs found for SHA %s", sha)
for pr in prs:
merge_state = await merge_if_labeled(session, pr)
log("PR %d has merge state %s", pr, merge_state)
if merge_state is MergeState.MERGED:
break
async def get_sha_for_review(job_context: Dict[str, Any]) -> Optional[str]:
if job_context["event_name"] != "pull_request_review":
return None
log("Got %s event", "pull_request_review")
event = job_context["event"]
if event["review"]["state"] != "approved":
return None
sha: Optional[str] = event["pull_request"]["head"]["sha"]
log("Use %s event SHA %s", "pull_request_review", sha)
return sha
async def get_sha_for_labeled_pr(job_context: Dict[str, Any]) -> Optional[str]:
if job_context["event_name"] != "pull_request":
return None
log("Got %s event", "pull_request")
event = job_context["event"]
if event["action"] != "labeled" or event["label"]["name"] != "automerge":
return None
sha: Optional[str] = event["pull_request"]["head"]["sha"]
log("Use %s event SHA %s", "pull_request", sha)
return sha
# This requires that a JOB_CONTEXT environment variable, which is made with `toJson(github)`
async def main() -> None:
job_context = await get_job_context()
sha = (
await get_sha_for_status_check(job_context)
or await get_sha_for_workflow_run(job_context)
or await get_sha_for_review(job_context)
or await get_sha_for_labeled_pr(job_context)
)
if sha:
await merge_automerge_passed(sha)
| import logging
import os
from typing import Any, Dict, List, Optional, Set, Tuple
from aiohttp import ClientSession
from yaml import safe_load
from .common import (
get_job_context,
get_prs_for_sha,
get_sha_for_status_check,
get_sha_for_workflow_run,
)
from .merge import MergeState, request_merge
logger = logging.getLogger(__name__)
log = logger.info
async def get_pr_labels(session: ClientSession, pr: int) -> Set[str]:
token = os.environ["BOT_TOKEN"]
url = f"https://api.github.com/repos/bioconda/bioconda-recipes/issues/{pr}/labels"
headers = {
"Authorization": f"token {token}",
"User-Agent": "BiocondaCommentResponder",
}
async with session.get(url, headers=headers) as response:
response.raise_for_status()
res = await response.text()
labels = safe_load(res)
return {label["name"] for label in labels}
async def is_automerge_labeled(session: ClientSession, pr: int) -> bool:
labels = await get_pr_labels(session, pr)
return "automerge" in labels
async def merge_if_labeled(session: ClientSession, pr: int) -> MergeState:
if not await is_automerge_labeled(session, pr):
return MergeState.UNKNOWN
return await request_merge(session, pr)
async def get_check_runs(session: ClientSession, sha: str) -> Any:
url = f"https://api.github.com/repos/bioconda/bioconda-recipes/commits/{sha}/check-runs"
headers = {
"User-Agent": "BiocondaCommentResponder",
"Accept": "application/vnd.github.antiope-preview+json",
}
async with session.get(url, headers=headers) as response:
response.raise_for_status()
res = await response.text()
check_runs = [
check_run
for check_run in safe_load(res)["check_runs"] or []
if check_run["name"] != "bioconda-bot automerge"
]
log("Got %d check_runs for SHA %s", len(check_runs or []), sha)
return check_runs
async def all_checks_completed(session: ClientSession, sha: str) -> bool:
check_runs = await get_check_runs(session, sha)
is_all_completed = all(check_run["status"] == "completed" for check_run in check_runs)
if not is_all_completed:
log("Some check_runs are not completed yet.")
for i, check_run in enumerate(check_runs, 1):
log("check_run %d / %d: %s", i, len(check_runs), check_run)
return is_all_completed
async def all_checks_passed(session: ClientSession, sha: str) -> bool:
check_runs = await get_check_runs(session, sha)
# TODO: "neutral" might be a valid conclusion to consider in the future.
valid_conclusions = {"success", "skipped"}
if any(check_run["conclusion"] not in valid_conclusions for check_run in check_runs):
log(f"Some check_runs are not marked as {'/'.join(valid_conclusions)} yet.")
for i, check_run in enumerate(check_runs, 1):
log("check_run %d / %d: %s", i, len(check_runs), check_run)
return False
return True
async def merge_automerge_passed(sha: str) -> None:
async with ClientSession() as session:
if not await all_checks_passed(session, sha):
return
prs = await get_prs_for_sha(session, sha)
if not prs:
log("No PRs found for SHA %s", sha)
for pr in prs:
merge_state = await merge_if_labeled(session, pr)
log("PR %d has merge state %s", pr, merge_state)
if merge_state is MergeState.MERGED:
break
async def get_sha_for_review(job_context: Dict[str, Any]) -> Optional[str]:
if job_context["event_name"] != "pull_request_review":
return None
log("Got %s event", "pull_request_review")
event = job_context["event"]
if event["review"]["state"] != "approved":
return None
sha: Optional[str] = event["pull_request"]["head"]["sha"]
log("Use %s event SHA %s", "pull_request_review", sha)
return sha
async def get_sha_for_labeled_pr(job_context: Dict[str, Any]) -> Optional[str]:
if job_context["event_name"] != "pull_request":
return None
log("Got %s event", "pull_request")
event = job_context["event"]
if event["action"] != "labeled" or event["label"]["name"] != "automerge":
return None
sha: Optional[str] = event["pull_request"]["head"]["sha"]
log("Use %s event SHA %s", "pull_request", sha)
return sha
# This requires that a JOB_CONTEXT environment variable, which is made with `toJson(github)`
async def main() -> None:
job_context = await get_job_context()
sha = (
await get_sha_for_status_check(job_context)
or await get_sha_for_workflow_run(job_context)
or await get_sha_for_review(job_context)
or await get_sha_for_labeled_pr(job_context)
)
if sha:
await merge_automerge_passed(sha)
| en | 0.897912 | # TODO: "neutral" might be a valid conclusion to consider in the future. # This requires that a JOB_CONTEXT environment variable, which is made with `toJson(github)` | 2.199264 | 2 |
cloudmesh_install/util.py | JulienPalard/cloudmesh | 0 | 6622505 | """Cloudmesh install util functions.
This file contains basic utility functions that must not need any
import from cloudmesh OR any other non-standard python
modules. Everything in this file must execute on a clean python 2.7.x
environment.
"""
import platform
from string import Template
import os
import sys
def get_system():
if is_ubuntu():
return "ubuntu"
elif is_centos():
return "centos"
elif is_osx():
return "osx"
else:
return "unsupported"
def is_ubuntu():
"""test sif the platform is ubuntu"""
(dist, version, release) = platform.dist()
if dist == "ubuntu" and version not in ["14.04"]:
print("ERROR: %s %s is not tested" % (dist, version))
return dist == 'Ubuntu'
def is_centos():
"""test if the platform is centos"""
(dist, version, release) = platform.dist()
if dist == "centos" and version not in ["6.5"]:
print("WARNING: %s %s is not tested" % (dist, version))
return dist == "centos"
def is_osx():
osx = platform.system().lower() == 'darwin'
if osx:
os_version = platform.mac_ver()[0]
if os_version not in ['10.9.5',
'10.10',
'10.10.1',
'10.10.2',
'10.10.3']:
osx = False
print("WARNING: %s %s is not tested" % ('OSX', os_version))
return osx
| """Cloudmesh install util functions.
This file contains basic utility functions that must not need any
import from cloudmesh OR any other non-standard python
modules. Everything in this file must execute on a clean python 2.7.x
environment.
"""
import platform
from string import Template
import os
import sys
def get_system():
if is_ubuntu():
return "ubuntu"
elif is_centos():
return "centos"
elif is_osx():
return "osx"
else:
return "unsupported"
def is_ubuntu():
"""test sif the platform is ubuntu"""
(dist, version, release) = platform.dist()
if dist == "ubuntu" and version not in ["14.04"]:
print("ERROR: %s %s is not tested" % (dist, version))
return dist == 'Ubuntu'
def is_centos():
"""test if the platform is centos"""
(dist, version, release) = platform.dist()
if dist == "centos" and version not in ["6.5"]:
print("WARNING: %s %s is not tested" % (dist, version))
return dist == "centos"
def is_osx():
osx = platform.system().lower() == 'darwin'
if osx:
os_version = platform.mac_ver()[0]
if os_version not in ['10.9.5',
'10.10',
'10.10.1',
'10.10.2',
'10.10.3']:
osx = False
print("WARNING: %s %s is not tested" % ('OSX', os_version))
return osx
| en | 0.78696 | Cloudmesh install util functions. This file contains basic utility functions that must not need any import from cloudmesh OR any other non-standard python modules. Everything in this file must execute on a clean python 2.7.x environment. test sif the platform is ubuntu test if the platform is centos | 2.703818 | 3 |
servers/wstream.py | sodicarus/channels | 0 | 6622506 | <filename>servers/wstream.py<gh_stars>0
# -*- coding: utf-8 -*-
# ------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para wstream.video
# http://www.mimediacenter.info/foro/viewtopic.php?f=36&t=7808
# by DrZ3r0
# ------------------------------------------------------------
import re
import time
import urllib
import xbmc
from core import httptools
from lib import jsunpack
from core import config
from core import logger
from core import scrapertools
headers = [['User-Agent', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:62.0) Gecko/20100101 Firefox/62.0']]
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
logger.info("[wstream.py] url=" + page_url)
video_urls = []
data = scrapertools.cache_page(page_url, headers=headers)
vid = scrapertools.find_multiple_matches(data,'download_video.*?>.*?<.*?<td>([^\,,\s]+)')
headers.append(['Referer', page_url])
post_data = scrapertools.find_single_match(data, "</div>\s*<script type='text/javascript'>(eval.function.p,a,c,k,e,.*?)\s*</script>")
if post_data != "":
from lib import jsunpack
data = jsunpack.unpack(post_data)
media_url = scrapertools.find_multiple_matches(data, '(http.*?\.mp4)')
_headers = urllib.urlencode(dict(headers))
i=0
for media_url in media_url:
video_urls.append([vid[i] + " mp4 [wstream] ", media_url + '|' + _headers])
i=i+1
for video_url in video_urls:
logger.info("[wstream.py] %s - %s" % (video_url[0], video_url[1]))
return video_urls
def find_videos(data):
encontrados = set()
devuelve = []
patronvideos = r"wstream.video/(?:embed-)?([a-z0-9A-Z]+)"
logger.info("[wstream.py] find_videos #" + patronvideos + "#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[wstream]"
url = 'http://wstream.video/%s' % match
if url not in encontrados:
logger.info(" url=" + url)
devuelve.append([titulo, url, 'wstream'])
encontrados.add(url)
else:
logger.info(" url duplicada=" + url)
return devuelve
| <filename>servers/wstream.py<gh_stars>0
# -*- coding: utf-8 -*-
# ------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para wstream.video
# http://www.mimediacenter.info/foro/viewtopic.php?f=36&t=7808
# by DrZ3r0
# ------------------------------------------------------------
import re
import time
import urllib
import xbmc
from core import httptools
from lib import jsunpack
from core import config
from core import logger
from core import scrapertools
headers = [['User-Agent', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:62.0) Gecko/20100101 Firefox/62.0']]
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
logger.info("[wstream.py] url=" + page_url)
video_urls = []
data = scrapertools.cache_page(page_url, headers=headers)
vid = scrapertools.find_multiple_matches(data,'download_video.*?>.*?<.*?<td>([^\,,\s]+)')
headers.append(['Referer', page_url])
post_data = scrapertools.find_single_match(data, "</div>\s*<script type='text/javascript'>(eval.function.p,a,c,k,e,.*?)\s*</script>")
if post_data != "":
from lib import jsunpack
data = jsunpack.unpack(post_data)
media_url = scrapertools.find_multiple_matches(data, '(http.*?\.mp4)')
_headers = urllib.urlencode(dict(headers))
i=0
for media_url in media_url:
video_urls.append([vid[i] + " mp4 [wstream] ", media_url + '|' + _headers])
i=i+1
for video_url in video_urls:
logger.info("[wstream.py] %s - %s" % (video_url[0], video_url[1]))
return video_urls
def find_videos(data):
encontrados = set()
devuelve = []
patronvideos = r"wstream.video/(?:embed-)?([a-z0-9A-Z]+)"
logger.info("[wstream.py] find_videos #" + patronvideos + "#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[wstream]"
url = 'http://wstream.video/%s' % match
if url not in encontrados:
logger.info(" url=" + url)
devuelve.append([titulo, url, 'wstream'])
encontrados.add(url)
else:
logger.info(" url duplicada=" + url)
return devuelve
| en | 0.244892 | # -*- coding: utf-8 -*- # ------------------------------------------------------------ # pelisalacarta - XBMC Plugin # Conector para wstream.video # http://www.mimediacenter.info/foro/viewtopic.php?f=36&t=7808 # by DrZ3r0 # ------------------------------------------------------------ #" + patronvideos + "#") | 2.272462 | 2 |
physionet-django/user/management/commands/loadtypes.py | Lucas-Mc/physionet-build | 36 | 6622507 | """
Command to:
- reset and load fixtures for project structures
"""
import os
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
# Load project types
project_types_fixtures = os.path.join(settings.BASE_DIR, 'project',
'fixtures', 'project-types.json')
call_command('loaddata', project_types_fixtures, verbosity=1) | """
Command to:
- reset and load fixtures for project structures
"""
import os
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
# Load project types
project_types_fixtures = os.path.join(settings.BASE_DIR, 'project',
'fixtures', 'project-types.json')
call_command('loaddata', project_types_fixtures, verbosity=1) | en | 0.888724 | Command to: - reset and load fixtures for project structures # Load project types | 1.978988 | 2 |
MazeSolve.py | Shadowbomb/MazeLearning | 0 | 6622508 | from collections import deque
def display(maze):
print("------------------------------------------")
for i in range(len(maze)):
line = ""
for j in range(len(maze[i])):
if maze[i][j] == 1:
line += "O"
else:
line += "*"
print(line)
print("------------------------------------------")
def solve(width,height,maze_count,read_file_name,write_file_name):
directions = [[0,-1],[1,0],[0,1],[-1,0]]
write_line = ""
with open(read_file_name,'r') as scan:
for i in range(maze_count):
if maze_count >= 4 and i % (maze_count/4) == 0:
print(str(100.0*i/maze_count) + "%")
maze = []
for j in range(height):
line = []
for k in range(width):
line.append(int(scan.read(1)))
maze.append(line)
scan.readline()
q = deque()
pos = (1,1)
maze[pos[0]][pos[1]] = 1
previous = [None] * width
for j in range(height):
previous[j] = [None] * height
path = [0] * width
for j in range(height):
path[j] = [0] * height
q.append(pos)
while q:
pos = q.popleft()
if pos[0] == width-2 and pos[1] == height-2:
while not pos == None:
path[pos[0]][pos[1]] = 1
pos = previous[pos[0]][pos[1]]
break
maze[pos[0]][pos[1]] = 1
for dir in directions:
new = (pos[0] + dir[0],pos[1]+dir[1])
if maze[new[0]][new[1]] == 0:
q.append(new)
previous[new[0]][new[1]] = pos
for j in range(len(path)):
for k in range(len(path[j])):
write_line += str(path[j][k])
if i < maze_count - 1:
write_line += "\n"
scan.close()
with open(write_file_name, 'w') as saveFile:
saveFile.write(write_line)
saveFile.close()
print("100.0%\nDone")
| from collections import deque
def display(maze):
print("------------------------------------------")
for i in range(len(maze)):
line = ""
for j in range(len(maze[i])):
if maze[i][j] == 1:
line += "O"
else:
line += "*"
print(line)
print("------------------------------------------")
def solve(width,height,maze_count,read_file_name,write_file_name):
directions = [[0,-1],[1,0],[0,1],[-1,0]]
write_line = ""
with open(read_file_name,'r') as scan:
for i in range(maze_count):
if maze_count >= 4 and i % (maze_count/4) == 0:
print(str(100.0*i/maze_count) + "%")
maze = []
for j in range(height):
line = []
for k in range(width):
line.append(int(scan.read(1)))
maze.append(line)
scan.readline()
q = deque()
pos = (1,1)
maze[pos[0]][pos[1]] = 1
previous = [None] * width
for j in range(height):
previous[j] = [None] * height
path = [0] * width
for j in range(height):
path[j] = [0] * height
q.append(pos)
while q:
pos = q.popleft()
if pos[0] == width-2 and pos[1] == height-2:
while not pos == None:
path[pos[0]][pos[1]] = 1
pos = previous[pos[0]][pos[1]]
break
maze[pos[0]][pos[1]] = 1
for dir in directions:
new = (pos[0] + dir[0],pos[1]+dir[1])
if maze[new[0]][new[1]] == 0:
q.append(new)
previous[new[0]][new[1]] = pos
for j in range(len(path)):
for k in range(len(path[j])):
write_line += str(path[j][k])
if i < maze_count - 1:
write_line += "\n"
scan.close()
with open(write_file_name, 'w') as saveFile:
saveFile.write(write_line)
saveFile.close()
print("100.0%\nDone")
| none | 1 | 3.471472 | 3 | |
api/utils/custom/schema/aggregation.py | forging2012/opencmdb-backend | 1 | 6622509 | <reponame>forging2012/opencmdb-backend<filename>api/utils/custom/schema/aggregation.py
from marshmallow import fields
from api.utils.custom.schema.base import BaseSchema
from api.utils.custom.validators import validate_code
class AggregationSchema(BaseSchema):
code = fields.Str(validate=validate_code, required=True)
name = fields.Str(required=True)
layer_id = fields.Str(load_only=True)
class Meta:
strict = True
| from marshmallow import fields
from api.utils.custom.schema.base import BaseSchema
from api.utils.custom.validators import validate_code
class AggregationSchema(BaseSchema):
code = fields.Str(validate=validate_code, required=True)
name = fields.Str(required=True)
layer_id = fields.Str(load_only=True)
class Meta:
strict = True | none | 1 | 1.958434 | 2 | |
smoke/features/smoke.py | unicefuganda/mtrack | 1 | 6622510 | from lettuce import *
from lxml import html
from nose.tools import assert_equals
from splinter import Browser
import sys
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
def disable(self):
self.HEADER = ''
self.OKBLUE = ''
self.OKGREEN = ''
self.WARNING = ''
self.FAIL = ''
self.ENDC = ''
browser = Browser()
def print_ok(text):
print bcolors.OKBLUE + text + bcolors.ENDC
def print_success(text):
print bcolors.OKGREEN + text + bcolors.ENDC
def print_fail(text):
print bcolors.FAIL + "Failed : " + text + bcolors.ENDC
def assert_true(boolean, text):
if boolean != True:
raise Exception(bcolors.FAIL + "Failed : " + text + bcolors.ENDC)
def close_browser():
browser.quit();
def assert_text_presence(*texts):
for text in texts:
assert_true(browser.is_text_present(text), "Text '"+ text +"' is present")
def assert_current_url(url, text):
assert_true(browser.url == url, text)
URL = {
'home' : 'http://localhost:8000/',
'login' : 'http://localhost:8000/account/login/',
'approve' : 'http://localhost:8000/approve/',
'health_data' : 'http://localhost:8000/mtrack/stats/',
'mgt_data' : 'http://localhost:8000/mtrack/mgt/stats/',
'facility' : 'http://localhost:8000/cvs/facility/',
'users' : 'http://localhost:8000/cvs/reporter/',
'messages' : 'http://localhost:8000/cvs/messagelog/',
'anonymous_reports' : 'http://localhost:8000/anonymousreports/',
'stock_data' : 'http://localhost:8000/mtrack/logistics',
}
def assert_landing_page():
"""Landing page"""
browser.visit(URL['home'])
assert_text_presence('Alerts', 'Approve HMIS Reports', 'User Management', 'Incoming Messages')
print_ok("Landing page ok!")
def assert_login_page():
"""Login page"""
browser.visit(URL['login'])
browser.fill("username", "smoke")
browser.fill("password", "password")
browser.find_by_css("input[type=submit]").first.click()
assert_current_url(URL['home'], 'Login success!')
print_ok("Login page ok!")
def assert_approve_page():
"""Approve page"""
browser.visit(URL['approve'])
assert_text_presence('Actions', 'Last Reporting Period Results')
print_ok("Approve page ok!")
def assert_health_data_page():
"""Health data page"""
browser.visit(URL['health_data'])
assert_text_presence('Variation of Fever (VHT)')
print_ok("Health data page ok!")
def assert_mgt_data_page():
"""Mgt Data page"""
browser.visit(URL['mgt_data'])
assert_text_presence('District', 'VHTs', 'Health Centers')
print_ok("Mgt Data page ok!")
def assert_faciliy_page():
"""Facility page"""
browser.visit(URL['facility'])
assert_text_presence('Filters', 'Health Facilities')
print_ok("Facility page ok!")
def assert_users_page():
"""Users page"""
browser.visit(URL['users'])
assert_text_presence('Filters', 'Actions', 'Registered Users')
print_ok("Users page ok!")
def assert_messaging_page():
"""Messaging page"""
browser.visit(URL['messages'])
assert_text_presence('Filters', 'Actions', 'Results')
print_ok("Messaging page ok!")
def assert_anonymous_reports_page():
"""Anonymous reports page"""
browser.visit(URL['anonymous_reports'])
assert_text_presence('Actions', 'Anonymous Reports')
print_ok("Anonymous reports page ok!")
def assert_stock_data_page():
"""Stock Data page"""
browser.visit(URL['stock_data'])
assert_text_presence()
print_ok("Stock Data page ok!")
def smoke():
"""Main suite"""
try:
assert_landing_page()
assert_login_page()
assert_approve_page()
assert_health_data_page()
assert_mgt_data_page()
assert_faciliy_page()
assert_users_page()
assert_messaging_page()
assert_anonymous_reports_page()
# assert_stock_data_page() #commenting this out since it takes a lot of time, uncomment this once the issue is resolved
close_browser()
return 0
except Exception, e:
print e
close_browser()
return 1
if __name__ == '__main__':
sys.exit(smoke()) | from lettuce import *
from lxml import html
from nose.tools import assert_equals
from splinter import Browser
import sys
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
def disable(self):
self.HEADER = ''
self.OKBLUE = ''
self.OKGREEN = ''
self.WARNING = ''
self.FAIL = ''
self.ENDC = ''
browser = Browser()
def print_ok(text):
print bcolors.OKBLUE + text + bcolors.ENDC
def print_success(text):
print bcolors.OKGREEN + text + bcolors.ENDC
def print_fail(text):
print bcolors.FAIL + "Failed : " + text + bcolors.ENDC
def assert_true(boolean, text):
if boolean != True:
raise Exception(bcolors.FAIL + "Failed : " + text + bcolors.ENDC)
def close_browser():
browser.quit();
def assert_text_presence(*texts):
for text in texts:
assert_true(browser.is_text_present(text), "Text '"+ text +"' is present")
def assert_current_url(url, text):
assert_true(browser.url == url, text)
URL = {
'home' : 'http://localhost:8000/',
'login' : 'http://localhost:8000/account/login/',
'approve' : 'http://localhost:8000/approve/',
'health_data' : 'http://localhost:8000/mtrack/stats/',
'mgt_data' : 'http://localhost:8000/mtrack/mgt/stats/',
'facility' : 'http://localhost:8000/cvs/facility/',
'users' : 'http://localhost:8000/cvs/reporter/',
'messages' : 'http://localhost:8000/cvs/messagelog/',
'anonymous_reports' : 'http://localhost:8000/anonymousreports/',
'stock_data' : 'http://localhost:8000/mtrack/logistics',
}
def assert_landing_page():
"""Landing page"""
browser.visit(URL['home'])
assert_text_presence('Alerts', 'Approve HMIS Reports', 'User Management', 'Incoming Messages')
print_ok("Landing page ok!")
def assert_login_page():
"""Login page"""
browser.visit(URL['login'])
browser.fill("username", "smoke")
browser.fill("password", "password")
browser.find_by_css("input[type=submit]").first.click()
assert_current_url(URL['home'], 'Login success!')
print_ok("Login page ok!")
def assert_approve_page():
"""Approve page"""
browser.visit(URL['approve'])
assert_text_presence('Actions', 'Last Reporting Period Results')
print_ok("Approve page ok!")
def assert_health_data_page():
"""Health data page"""
browser.visit(URL['health_data'])
assert_text_presence('Variation of Fever (VHT)')
print_ok("Health data page ok!")
def assert_mgt_data_page():
"""Mgt Data page"""
browser.visit(URL['mgt_data'])
assert_text_presence('District', 'VHTs', 'Health Centers')
print_ok("Mgt Data page ok!")
def assert_faciliy_page():
"""Facility page"""
browser.visit(URL['facility'])
assert_text_presence('Filters', 'Health Facilities')
print_ok("Facility page ok!")
def assert_users_page():
"""Users page"""
browser.visit(URL['users'])
assert_text_presence('Filters', 'Actions', 'Registered Users')
print_ok("Users page ok!")
def assert_messaging_page():
"""Messaging page"""
browser.visit(URL['messages'])
assert_text_presence('Filters', 'Actions', 'Results')
print_ok("Messaging page ok!")
def assert_anonymous_reports_page():
"""Anonymous reports page"""
browser.visit(URL['anonymous_reports'])
assert_text_presence('Actions', 'Anonymous Reports')
print_ok("Anonymous reports page ok!")
def assert_stock_data_page():
"""Stock Data page"""
browser.visit(URL['stock_data'])
assert_text_presence()
print_ok("Stock Data page ok!")
def smoke():
"""Main suite"""
try:
assert_landing_page()
assert_login_page()
assert_approve_page()
assert_health_data_page()
assert_mgt_data_page()
assert_faciliy_page()
assert_users_page()
assert_messaging_page()
assert_anonymous_reports_page()
# assert_stock_data_page() #commenting this out since it takes a lot of time, uncomment this once the issue is resolved
close_browser()
return 0
except Exception, e:
print e
close_browser()
return 1
if __name__ == '__main__':
sys.exit(smoke()) | en | 0.801087 | Landing page Login page Approve page Health data page Mgt Data page Facility page Users page Messaging page Anonymous reports page Stock Data page Main suite # assert_stock_data_page() #commenting this out since it takes a lot of time, uncomment this once the issue is resolved | 2.704479 | 3 |
src/terial/classifier/data/register_data.py | keunhong/photoshape | 81 | 6622511 | import argparse
from pathlib import Path
import ujson as json
import tqdm
from terial import models
from terial.database import session_scope
parser = argparse.ArgumentParser()
parser.add_argument('dataset_path', type=Path)
args = parser.parse_args()
def main():
if not args.dataset_path.exists():
print(f'Given path does not exist.')
return
dataset_name = args.dataset_path.name
print('Determining number of paths.')
count = len(list(args.dataset_path.glob('**/*params.json')))
pbar = tqdm.tqdm(total=count)
with session_scope() as sess:
materials = sess.query(models.Material).all()
material_by_id = {m.id: m for m in materials}
for client_dir in args.dataset_path.iterdir():
client = client_dir.name.split('=')[1]
for epoch_dir in client_dir.iterdir():
epoch = int(epoch_dir.name.split('=')[1])
for split_set_dir in epoch_dir.iterdir():
split_set = models.SplitSet[split_set_dir.name.upper()]
for path in split_set_dir.glob('*.params.json'):
pbar.update(1)
prefix = path.name.split('.')[0]
if (sess.query(models.Rendering)
.filter_by(dataset_name=dataset_name,
client=client,
split_set=split_set,
epoch=epoch,
prefix=prefix).count() > 0):
continue
rendering = register(
sess, dataset_name, client, epoch, split_set,
prefix, path, material_by_id)
pbar.set_description(f'{rendering.id}')
def register(sess, dataset, client, epoch, split_set, prefix, path,
materials_by_id):
with path.open('r') as f:
params = json.load(f)
material_ids = set(params['segment']['materials'].values())
pair_id, index = prefix.split('_')
rendering = models.Rendering(
dataset_name=dataset,
client=client,
epoch=epoch,
split_set=split_set,
pair_id=int(pair_id),
index=int(index),
prefix=prefix,
saturated_frac=params['saturated_frac'],
rend_time=params['time_elapsed'],
)
for material_id in material_ids:
rendering.materials.append(materials_by_id[material_id])
sess.add(rendering)
sess.commit()
return rendering
if __name__ == '__main__':
main()
| import argparse
from pathlib import Path
import ujson as json
import tqdm
from terial import models
from terial.database import session_scope
parser = argparse.ArgumentParser()
parser.add_argument('dataset_path', type=Path)
args = parser.parse_args()
def main():
if not args.dataset_path.exists():
print(f'Given path does not exist.')
return
dataset_name = args.dataset_path.name
print('Determining number of paths.')
count = len(list(args.dataset_path.glob('**/*params.json')))
pbar = tqdm.tqdm(total=count)
with session_scope() as sess:
materials = sess.query(models.Material).all()
material_by_id = {m.id: m for m in materials}
for client_dir in args.dataset_path.iterdir():
client = client_dir.name.split('=')[1]
for epoch_dir in client_dir.iterdir():
epoch = int(epoch_dir.name.split('=')[1])
for split_set_dir in epoch_dir.iterdir():
split_set = models.SplitSet[split_set_dir.name.upper()]
for path in split_set_dir.glob('*.params.json'):
pbar.update(1)
prefix = path.name.split('.')[0]
if (sess.query(models.Rendering)
.filter_by(dataset_name=dataset_name,
client=client,
split_set=split_set,
epoch=epoch,
prefix=prefix).count() > 0):
continue
rendering = register(
sess, dataset_name, client, epoch, split_set,
prefix, path, material_by_id)
pbar.set_description(f'{rendering.id}')
def register(sess, dataset, client, epoch, split_set, prefix, path,
materials_by_id):
with path.open('r') as f:
params = json.load(f)
material_ids = set(params['segment']['materials'].values())
pair_id, index = prefix.split('_')
rendering = models.Rendering(
dataset_name=dataset,
client=client,
epoch=epoch,
split_set=split_set,
pair_id=int(pair_id),
index=int(index),
prefix=prefix,
saturated_frac=params['saturated_frac'],
rend_time=params['time_elapsed'],
)
for material_id in material_ids:
rendering.materials.append(materials_by_id[material_id])
sess.add(rendering)
sess.commit()
return rendering
if __name__ == '__main__':
main()
| none | 1 | 2.233909 | 2 | |
scripts/folder_to_gosh.py | INAETICS/AILS_Demonstrator | 2 | 6622512 | from os import listdir
import argparse
import re
# Only execute this code if this module is not imported. So:
# $ python folder_to_gosh.py
# or
# $ ./folder_to_gosh.py
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Generate a (partly) GoSH ' +
'file given a directory of jars')
# this should have been type=open, however we have to check that the
# input_directory is actually a dir, which is not possible with the
# argparse library
parser.add_argument('input_directory')
parser.add_argument('-o', dest='output', help='File to output the GoSH ' +
'file to, stdout if kept empty')
args = parser.parse_args()
output_text = ""
try:
# list a folder, remove any file (or directory) that does not end
# with .jar
files = filter(lambda x: x.endswith('.jar'),
listdir(args.input_directory))
# Pattern that matches version numbers in jar names, the group <vnum>
# is the actual version number, the total match is dash versionnumber
pattern = re.compile('(-)?(?P<vnum>\d+(.\d+(.\d+)?)?)\.jar')
for jar in files:
version = 0
# find the first occurence of a version string in the jar name
match = pattern.search(jar)
if match:
# the version of this artifact was matched, and is to be found
# in the <vnum> captured group
version = match.groupdict()['vnum']
# remove the version number from the artifact name for the
# config. The sub method just substitutes the first match of
# the pattern, so in this case the whole version string
jar = pattern.sub('', jar)
output_text += '["identity"="{}" "version"="{}"]\n'.format(jar,
version)
# only write to file if there was one specified
if args.output:
try:
# only keep the file reference for as long as we need it
with open(args.output, 'w+') as f:
f.write(output_text)
except IOError as e:
print('unable to write to output file')
else:
print(output_text)
except FileNotFoundError:
print('input directory is not a valid directory!')
| from os import listdir
import argparse
import re
# Only execute this code if this module is not imported. So:
# $ python folder_to_gosh.py
# or
# $ ./folder_to_gosh.py
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Generate a (partly) GoSH ' +
'file given a directory of jars')
# this should have been type=open, however we have to check that the
# input_directory is actually a dir, which is not possible with the
# argparse library
parser.add_argument('input_directory')
parser.add_argument('-o', dest='output', help='File to output the GoSH ' +
'file to, stdout if kept empty')
args = parser.parse_args()
output_text = ""
try:
# list a folder, remove any file (or directory) that does not end
# with .jar
files = filter(lambda x: x.endswith('.jar'),
listdir(args.input_directory))
# Pattern that matches version numbers in jar names, the group <vnum>
# is the actual version number, the total match is dash versionnumber
pattern = re.compile('(-)?(?P<vnum>\d+(.\d+(.\d+)?)?)\.jar')
for jar in files:
version = 0
# find the first occurence of a version string in the jar name
match = pattern.search(jar)
if match:
# the version of this artifact was matched, and is to be found
# in the <vnum> captured group
version = match.groupdict()['vnum']
# remove the version number from the artifact name for the
# config. The sub method just substitutes the first match of
# the pattern, so in this case the whole version string
jar = pattern.sub('', jar)
output_text += '["identity"="{}" "version"="{}"]\n'.format(jar,
version)
# only write to file if there was one specified
if args.output:
try:
# only keep the file reference for as long as we need it
with open(args.output, 'w+') as f:
f.write(output_text)
except IOError as e:
print('unable to write to output file')
else:
print(output_text)
except FileNotFoundError:
print('input directory is not a valid directory!')
| en | 0.893992 | # Only execute this code if this module is not imported. So: # $ python folder_to_gosh.py # or # $ ./folder_to_gosh.py # this should have been type=open, however we have to check that the # input_directory is actually a dir, which is not possible with the # argparse library # list a folder, remove any file (or directory) that does not end # with .jar # Pattern that matches version numbers in jar names, the group <vnum> # is the actual version number, the total match is dash versionnumber # find the first occurence of a version string in the jar name # the version of this artifact was matched, and is to be found # in the <vnum> captured group # remove the version number from the artifact name for the # config. The sub method just substitutes the first match of # the pattern, so in this case the whole version string # only write to file if there was one specified # only keep the file reference for as long as we need it | 2.805011 | 3 |
PythonFIAP/5_1_Wazeyes/Converter_geolocalizacao.py | DanielGMesquita/StudyPath | 0 | 6622513 | from geopy.geocoders import Nominatim
geolocator = Nominatim(user_agent="wazeyes")
endereco=input("Digite um endereco com número e cidade. ")
resultado = str(geolocator.geocode(endereco)).split(",")
if resultado[0]!='None':
print("Endereço completo.: ", resultado)
print("Bairro............: ", resultado[1])
print("Cidade............: ", resultado[2])
print("Regiao............: ", resultado[3])
| from geopy.geocoders import Nominatim
geolocator = Nominatim(user_agent="wazeyes")
endereco=input("Digite um endereco com número e cidade. ")
resultado = str(geolocator.geocode(endereco)).split(",")
if resultado[0]!='None':
print("Endereço completo.: ", resultado)
print("Bairro............: ", resultado[1])
print("Cidade............: ", resultado[2])
print("Regiao............: ", resultado[3])
| none | 1 | 3.340258 | 3 | |
blogs/migrations/0001_initial.py | laodeaksar/post | 1 | 6622514 | <reponame>laodeaksar/post<gh_stars>1-10
# Generated by Django 3.0.8 on 2020-07-16 14:12
import ckeditor_uploader.fields
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Author',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('avatar', models.ImageField(default='avatar.png', upload_to='avatars')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
('slug', models.SlugField(blank=True, null=True, unique=True)),
],
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100, verbose_name='title')),
('slug', models.SlugField(blank=True, null=True, unique=True)),
('content', ckeditor_uploader.fields.RichTextUploadingField()),
('description', models.TextField(max_length=500)),
('status', models.CharField(choices=[('D', 'Draft'), ('P', 'Published')], default='D', max_length=1)),
('thumbnail', models.ImageField(blank=True, upload_to='posts/%Y/%m/%d', validators=[django.core.validators.FileExtensionValidator(['png', 'jpg', 'jpeg'])])),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('featured', models.BooleanField(default=False)),
('restrict_comment', models.BooleanField(default=False)),
('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='blogs.Author')),
('category', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='blogs.Category')),
('likes', models.ManyToManyField(blank=True, related_name='likes', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-created'],
},
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
('slug', models.SlugField(blank=True, null=True, unique=True)),
],
options={
'verbose_name': 'Tag',
'verbose_name_plural': 'Tags',
'unique_together': {('name', 'slug')},
},
),
migrations.CreateModel(
name='PostView',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blogs.Post')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Post View',
'verbose_name_plural': 'Post Views',
},
),
migrations.AddField(
model_name='post',
name='tags',
field=models.ManyToManyField(to='blogs.Tag'),
),
migrations.AlterUniqueTogether(
name='post',
unique_together={('title', 'slug')},
),
]
| # Generated by Django 3.0.8 on 2020-07-16 14:12
import ckeditor_uploader.fields
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Author',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('avatar', models.ImageField(default='avatar.png', upload_to='avatars')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
('slug', models.SlugField(blank=True, null=True, unique=True)),
],
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100, verbose_name='title')),
('slug', models.SlugField(blank=True, null=True, unique=True)),
('content', ckeditor_uploader.fields.RichTextUploadingField()),
('description', models.TextField(max_length=500)),
('status', models.CharField(choices=[('D', 'Draft'), ('P', 'Published')], default='D', max_length=1)),
('thumbnail', models.ImageField(blank=True, upload_to='posts/%Y/%m/%d', validators=[django.core.validators.FileExtensionValidator(['png', 'jpg', 'jpeg'])])),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('featured', models.BooleanField(default=False)),
('restrict_comment', models.BooleanField(default=False)),
('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='blogs.Author')),
('category', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='blogs.Category')),
('likes', models.ManyToManyField(blank=True, related_name='likes', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-created'],
},
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
('slug', models.SlugField(blank=True, null=True, unique=True)),
],
options={
'verbose_name': 'Tag',
'verbose_name_plural': 'Tags',
'unique_together': {('name', 'slug')},
},
),
migrations.CreateModel(
name='PostView',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blogs.Post')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Post View',
'verbose_name_plural': 'Post Views',
},
),
migrations.AddField(
model_name='post',
name='tags',
field=models.ManyToManyField(to='blogs.Tag'),
),
migrations.AlterUniqueTogether(
name='post',
unique_together={('title', 'slug')},
),
] | en | 0.806495 | # Generated by Django 3.0.8 on 2020-07-16 14:12 | 1.813402 | 2 |
videocompress.py | point86/videocompress | 3 | 6622515 | <reponame>point86/videocompress
from PyQt5.QtGui import *
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
import os
import sys
from taskPerformer import TaskPerformer
class MainWindow(QWidget):
DESC = "Videocompress is a simple utility that runs ffmpeg over a single file or an entire directory (thus recursively on all\n"\
"files and subdirs).\n"\
"If input is an entire directory, this program will create another folder with the same structure and all the same files, the\n"\
"only difference is that all video files will be the output of ffmpeg execution (non video files will be copied)."
FFMPEG_SITE = "Need more options? <a href=\"https://ffmpeg.org/\">FFmpeg offical site</a> has all the documentation you will need."
REPOSITORY = "Created by <NAME> and distribuited under the MIT license. <a href=\"https://github.com/point86/Videocompress\">Fork me on GitHub</a>"
FFMPEGSETTINGS_DEF = "-c:v libx264 -crf 26"
inputPath = None
def __init__(self, parent = None):
QWidget.__init__(self, parent)
#vertical layout
vbox = QVBoxLayout()
#introduction labels
l_intro = QLabel(self.DESC)
vbox.addWidget(l_intro)
l_site = QLabel(self.FFMPEG_SITE)
l_site.setOpenExternalLinks(True)
l_repo = QLabel(self.REPOSITORY)
l_repo.setOpenExternalLinks(True)
vbox.addWidget(l_site)
vbox.addWidget(l_repo)
#input area
l_input = QLabel("Input path:")
vbox.addWidget(l_input)
self.treeView = QTreeView()
self.model = QFileSystemModel() #data model for treeView
self.model.setRootPath(QDir().rootPath())
self.treeView.setModel(self.model)
self.treeView.setColumnWidth(0,200)#name column
self.treeView.setMinimumSize(QSize(400, 200))
self.treeView.clicked.connect(self.selectInput)
vbox.addWidget(self.treeView)
#output area
l_output = QLabel("Output path:")
vbox.addWidget(l_output)
outputHBox = QHBoxLayout()
self.tb_output = QLineEdit("")
outputHBox.addWidget(self.tb_output)
self.btt_output = QPushButton("Select")
self.btt_output.clicked.connect(self.selectOutputDir_btt)
outputHBox.addWidget(self.btt_output)
vbox.addLayout(outputHBox)
#settings area
l_encoding = QLabel("Encoding settings:")
vbox.addWidget(l_encoding)
#2 radio buttons
self.std_rb = QRadioButton("H.264 - CRF 28")
self.std_rb.setChecked(True)
self.std_rb.toggled.connect(self.changeStack)
self.adv_rb = QRadioButton("Advanced - (Custom command line options)")
self.adv_rb.toggled.connect(self.changeStack)
radiobttHBox = QHBoxLayout()
radiobttHBox.addWidget(self.std_rb)
radiobttHBox.addWidget(self.adv_rb)
vbox.addLayout(radiobttHBox)
#simple settings (x264), with slider
simple_sett_layout = QHBoxLayout()
l_min = QLabel("High quality\n(Bigger size)")
l_max = QLabel("Low quality\n(Smaller size)")
self.slider = QSlider(Qt.Horizontal)
#total CRF range is 0-51, but 18-28 is a sane range
self.slider.setMinimum(18) #visually lossless or nearly so
self.slider.setMaximum(28) #higher values than 28 results in very poor quality
self.slider.setValue(26)
self.slider.setTickPosition(QSlider.TicksBelow)
self.slider.setTickInterval(1)
self.slider.valueChanged.connect(self.sliderValuechange)
simple_sett_layout.addWidget(l_min)
simple_sett_layout.addWidget(self.slider)
simple_sett_layout.addWidget(l_max)
#advanced settings (explicit commands)
adv_sett_layout = QHBoxLayout()
self.tb_advsett = QLineEdit(self.FFMPEGSETTINGS_DEF)
self.tb_advsett.setEnabled(True) #useless
self.btt_restdef = QPushButton("Restore defalut")
self.btt_restdef.clicked.connect(self.ffmpeg_btt)
adv_sett_layout.addWidget(self.btt_restdef)
adv_sett_layout.addWidget(self.tb_advsett)
#encoding stack
stack0 = QWidget()
stack1 = QWidget()
stack0.setLayout(simple_sett_layout)
stack1.setLayout(adv_sett_layout)
self.settingsStack = QStackedWidget()
self.settingsStack.addWidget(stack0)
self.settingsStack.addWidget(stack1)
vbox.addWidget(self.settingsStack)
#start button
vbox.addStretch(1)
startHbox = QHBoxLayout()
self.btt_start = QPushButton("Start")
self.btt_start.setStyleSheet("color: green")
self.btt_start.clicked.connect(self.start_btt)
self.btt_start.setFixedWidth(100)
startHbox.addWidget(self.btt_start)
vbox.addLayout(startHbox)
self.setLayout(vbox)
self.resize(600, 400)
self.setWindowTitle("VideoCompress")
#when user change sliver value, update radio button label
def sliderValuechange(self):
value = self.slider.value()
self.std_rb.setText("H.264 - CRF " + str(value))
#toggled when user use radio buttons, to change setting options area
def changeStack(self):
if self.adv_rb.isChecked() == True:
self.settingsStack.setCurrentIndex(1)
else:
self.settingsStack.setCurrentIndex(0)
#select input folder (QTreeView) callback
def selectInput(self, index):
self.inputPath = os.path.normpath(self.model.filePath(index))
#select output folder callback
def selectOutputDir_btt(self,window):
t = QFileDialog.getExistingDirectory(None, "Output Folder", None)
self.tb_output.setText(os.path.normpath(t))
#restore ffmpeg settings callback
def ffmpeg_btt(self, window):
self.tb_advsett.setText(self.FFMPEGSETTINGS_DEF)
#start conversion dialog
def start_btt(self, window):
#if input and output are not set, show error dialog and do not start conversion
if (self.inputPath == None) or (self.tb_output.text() == None) or (self.inputPath == self.tb_output.text()):
msg = QMessageBox()
msg.setIcon(QMessageBox.Critical)
msg.setWindowTitle("Error")
msg.setText("Please verify ipunt and output path.")
msg.setStandardButtons(QMessageBox.Ok)
msg.exec_()
return
#user in stack0 (preset H.264, simple options): retrieve slider value
#and add to ffmpeg command
if self.settingsStack.currentIndex() == 0:
ffmpegOpt = " -c:v libx264 -crf " + str(self.slider.value())
#user in stack1 (custom ffmpeg options, textbox):
else:
ffmpegOpt = self.tb_advsett.text()
performer = TaskPerformer(self.inputPath, self.tb_output.text(), ffmpegOpt)
performer.start_conversion()
if __name__ == "__main__":
app = QApplication(sys.argv)
window = MainWindow()
window.show()
sys.exit(app.exec_())
| from PyQt5.QtGui import *
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
import os
import sys
from taskPerformer import TaskPerformer
class MainWindow(QWidget):
DESC = "Videocompress is a simple utility that runs ffmpeg over a single file or an entire directory (thus recursively on all\n"\
"files and subdirs).\n"\
"If input is an entire directory, this program will create another folder with the same structure and all the same files, the\n"\
"only difference is that all video files will be the output of ffmpeg execution (non video files will be copied)."
FFMPEG_SITE = "Need more options? <a href=\"https://ffmpeg.org/\">FFmpeg offical site</a> has all the documentation you will need."
REPOSITORY = "Created by <NAME> and distribuited under the MIT license. <a href=\"https://github.com/point86/Videocompress\">Fork me on GitHub</a>"
FFMPEGSETTINGS_DEF = "-c:v libx264 -crf 26"
inputPath = None
def __init__(self, parent = None):
QWidget.__init__(self, parent)
#vertical layout
vbox = QVBoxLayout()
#introduction labels
l_intro = QLabel(self.DESC)
vbox.addWidget(l_intro)
l_site = QLabel(self.FFMPEG_SITE)
l_site.setOpenExternalLinks(True)
l_repo = QLabel(self.REPOSITORY)
l_repo.setOpenExternalLinks(True)
vbox.addWidget(l_site)
vbox.addWidget(l_repo)
#input area
l_input = QLabel("Input path:")
vbox.addWidget(l_input)
self.treeView = QTreeView()
self.model = QFileSystemModel() #data model for treeView
self.model.setRootPath(QDir().rootPath())
self.treeView.setModel(self.model)
self.treeView.setColumnWidth(0,200)#name column
self.treeView.setMinimumSize(QSize(400, 200))
self.treeView.clicked.connect(self.selectInput)
vbox.addWidget(self.treeView)
#output area
l_output = QLabel("Output path:")
vbox.addWidget(l_output)
outputHBox = QHBoxLayout()
self.tb_output = QLineEdit("")
outputHBox.addWidget(self.tb_output)
self.btt_output = QPushButton("Select")
self.btt_output.clicked.connect(self.selectOutputDir_btt)
outputHBox.addWidget(self.btt_output)
vbox.addLayout(outputHBox)
#settings area
l_encoding = QLabel("Encoding settings:")
vbox.addWidget(l_encoding)
#2 radio buttons
self.std_rb = QRadioButton("H.264 - CRF 28")
self.std_rb.setChecked(True)
self.std_rb.toggled.connect(self.changeStack)
self.adv_rb = QRadioButton("Advanced - (Custom command line options)")
self.adv_rb.toggled.connect(self.changeStack)
radiobttHBox = QHBoxLayout()
radiobttHBox.addWidget(self.std_rb)
radiobttHBox.addWidget(self.adv_rb)
vbox.addLayout(radiobttHBox)
#simple settings (x264), with slider
simple_sett_layout = QHBoxLayout()
l_min = QLabel("High quality\n(Bigger size)")
l_max = QLabel("Low quality\n(Smaller size)")
self.slider = QSlider(Qt.Horizontal)
#total CRF range is 0-51, but 18-28 is a sane range
self.slider.setMinimum(18) #visually lossless or nearly so
self.slider.setMaximum(28) #higher values than 28 results in very poor quality
self.slider.setValue(26)
self.slider.setTickPosition(QSlider.TicksBelow)
self.slider.setTickInterval(1)
self.slider.valueChanged.connect(self.sliderValuechange)
simple_sett_layout.addWidget(l_min)
simple_sett_layout.addWidget(self.slider)
simple_sett_layout.addWidget(l_max)
#advanced settings (explicit commands)
adv_sett_layout = QHBoxLayout()
self.tb_advsett = QLineEdit(self.FFMPEGSETTINGS_DEF)
self.tb_advsett.setEnabled(True) #useless
self.btt_restdef = QPushButton("Restore defalut")
self.btt_restdef.clicked.connect(self.ffmpeg_btt)
adv_sett_layout.addWidget(self.btt_restdef)
adv_sett_layout.addWidget(self.tb_advsett)
#encoding stack
stack0 = QWidget()
stack1 = QWidget()
stack0.setLayout(simple_sett_layout)
stack1.setLayout(adv_sett_layout)
self.settingsStack = QStackedWidget()
self.settingsStack.addWidget(stack0)
self.settingsStack.addWidget(stack1)
vbox.addWidget(self.settingsStack)
#start button
vbox.addStretch(1)
startHbox = QHBoxLayout()
self.btt_start = QPushButton("Start")
self.btt_start.setStyleSheet("color: green")
self.btt_start.clicked.connect(self.start_btt)
self.btt_start.setFixedWidth(100)
startHbox.addWidget(self.btt_start)
vbox.addLayout(startHbox)
self.setLayout(vbox)
self.resize(600, 400)
self.setWindowTitle("VideoCompress")
#when user change sliver value, update radio button label
def sliderValuechange(self):
value = self.slider.value()
self.std_rb.setText("H.264 - CRF " + str(value))
#toggled when user use radio buttons, to change setting options area
def changeStack(self):
if self.adv_rb.isChecked() == True:
self.settingsStack.setCurrentIndex(1)
else:
self.settingsStack.setCurrentIndex(0)
#select input folder (QTreeView) callback
def selectInput(self, index):
self.inputPath = os.path.normpath(self.model.filePath(index))
#select output folder callback
def selectOutputDir_btt(self,window):
t = QFileDialog.getExistingDirectory(None, "Output Folder", None)
self.tb_output.setText(os.path.normpath(t))
#restore ffmpeg settings callback
def ffmpeg_btt(self, window):
self.tb_advsett.setText(self.FFMPEGSETTINGS_DEF)
#start conversion dialog
def start_btt(self, window):
#if input and output are not set, show error dialog and do not start conversion
if (self.inputPath == None) or (self.tb_output.text() == None) or (self.inputPath == self.tb_output.text()):
msg = QMessageBox()
msg.setIcon(QMessageBox.Critical)
msg.setWindowTitle("Error")
msg.setText("Please verify ipunt and output path.")
msg.setStandardButtons(QMessageBox.Ok)
msg.exec_()
return
#user in stack0 (preset H.264, simple options): retrieve slider value
#and add to ffmpeg command
if self.settingsStack.currentIndex() == 0:
ffmpegOpt = " -c:v libx264 -crf " + str(self.slider.value())
#user in stack1 (custom ffmpeg options, textbox):
else:
ffmpegOpt = self.tb_advsett.text()
performer = TaskPerformer(self.inputPath, self.tb_output.text(), ffmpegOpt)
performer.start_conversion()
if __name__ == "__main__":
app = QApplication(sys.argv)
window = MainWindow()
window.show()
sys.exit(app.exec_()) | en | 0.553703 | #vertical layout #introduction labels #input area #data model for treeView #name column #output area #settings area #2 radio buttons #simple settings (x264), with slider #total CRF range is 0-51, but 18-28 is a sane range #visually lossless or nearly so #higher values than 28 results in very poor quality #advanced settings (explicit commands) #useless #encoding stack #start button #when user change sliver value, update radio button label #toggled when user use radio buttons, to change setting options area #select input folder (QTreeView) callback #select output folder callback #restore ffmpeg settings callback #start conversion dialog #if input and output are not set, show error dialog and do not start conversion #user in stack0 (preset H.264, simple options): retrieve slider value #and add to ffmpeg command #user in stack1 (custom ffmpeg options, textbox): | 2.717688 | 3 |
tests/test_utils.py | abersheeran/baize | 36 | 6622516 | from baize.utils import cached_property
def test_cached_property():
class T:
@cached_property
def li(self):
return object()
assert T.li.__name__ == "li"
assert not callable(T.li)
| from baize.utils import cached_property
def test_cached_property():
class T:
@cached_property
def li(self):
return object()
assert T.li.__name__ == "li"
assert not callable(T.li)
| none | 1 | 2.508597 | 3 | |
tests/utils.py | xqliu/incubator-superset | 0 | 6622517 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import json
from os import path
from superset import db
from superset.models import core as models
FIXTURES_DIR = 'tests/fixtures'
def load_fixture(fixture_file_name):
with open(path.join(FIXTURES_DIR, fixture_file_name)) as fixture_file:
return json.load(fixture_file)
def get_main_database(session):
return (
db.session.query(models.Database)
.filter_by(database_name='main')
.first()
)
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import json
from os import path
from superset import db
from superset.models import core as models
FIXTURES_DIR = 'tests/fixtures'
def load_fixture(fixture_file_name):
with open(path.join(FIXTURES_DIR, fixture_file_name)) as fixture_file:
return json.load(fixture_file)
def get_main_database(session):
return (
db.session.query(models.Database)
.filter_by(database_name='main')
.first()
)
| en | 0.769321 | # -*- coding: utf-8 -*- | 2.022279 | 2 |
cincoconfig/version.py | ameily/cincoconfig | 6 | 6622518 | <gh_stars>1-10
#
# Copyright (C) 2021 <NAME>
#
# This file is subject to the terms and conditions defined in the file 'LICENSE', which is part of
# this source code package.
#
'''
Cincoconfig version.
'''
__version__ = '0.8.0'
| #
# Copyright (C) 2021 <NAME>
#
# This file is subject to the terms and conditions defined in the file 'LICENSE', which is part of
# this source code package.
#
'''
Cincoconfig version.
'''
__version__ = '0.8.0' | en | 0.897181 | # # Copyright (C) 2021 <NAME> # # This file is subject to the terms and conditions defined in the file 'LICENSE', which is part of # this source code package. # Cincoconfig version. | 0.927164 | 1 |
litex_boards/platforms/rz_easyfpga.py | danielgusvt/litex-boards | 0 | 6622519 | #
# This file is part of LiteX-Boards.
#
# Copyright (c) 2021 <NAME> <<EMAIL>>
# Copyright (c) 2022 <NAME> <<EMAIL>>
# SPDX-License-Identifier: BSD-2-Clause
from litex.build.generic_platform import *
from litex.build.altera import AlteraPlatform
from litex.build.altera.programmer import USBBlaster
# IOs ----------------------------------------------------------------------------------------------
_io = [
# Clk / Rst
("clk50", 0, Pins("23"), IOStandard("3.3-V LVTTL")),
("rst_n", 0, Pins("25"), IOStandard("3.3-V LVTTL")),
# Leds
("user_led", 0, Pins("84"), IOStandard("3.3-V LVTTL")),
("user_led", 1, Pins("85"), IOStandard("3.3-V LVTTL")),
("user_led", 2, Pins("86"), IOStandard("3.3-V LVTTL")),
("user_led", 3, Pins("87"), IOStandard("3.3-V LVTTL")),
# DIP switches shared with buttons
("key", 0, Pins("88"), IOStandard("3.3-V LVTTL")),
("key", 1, Pins("89"), IOStandard("3.3-V LVTTL")),
("key", 2, Pins("90"), IOStandard("3.3-V LVTTL")),
("key", 3, Pins("91"), IOStandard("3.3-V LVTTL")),
# Passive buzzer
("buzzer", 0, Pins("110"), IOStandard("3.3-V LVTTL")),
# LM75A temperature sensor (I2C)
("temp_i2c", 0,
Subsignal("scl", Pins("112")),
Subsignal("sda", Pins("113")),
IOStandard("3.3-V LVTTL")
),
# AT24C08 EEPROM (I2C)
("eeprom_i2c", 0,
Subsignal("scl", Pins("99")),
Subsignal("sda", Pins("98")),
IOStandard("3.3-V LVTTL")
),
# W25Q16JV SPI Flash, IO2 and IO3 pins are hardwired to 3v3 so we cannot use QSPI mode.
("spiflash", 0,
Subsignal("cs_n", Pins("8")),
Subsignal("clk", Pins("12")),
Subsignal("mosi", Pins("6")),
Subsignal("miso", Pins("13")),
IOStandard("3.3-V LVTTL"),
),
# Serial - using DB9 connector through SP3232EEN
("serial", 0,
Subsignal("tx", Pins("114")),
Subsignal("rx", Pins("119")),
IOStandard("3.3-V LVTTL")
),
# VGA
("vga", 0,
Subsignal("hsync", Pins("101")),
Subsignal("vsync", Pins("103")),
Subsignal("r", Pins("106")),
Subsignal("g", Pins("105")),
Subsignal("b", Pins("104")),
IOStandard("3.3-V LVTTL"),
),
# LCD display (HD44780 compatible)
("lcd_display", 0,
Subsignal("data", Pins("142 1 144 3 2 10 7 11")),
Subsignal("rs", Pins("141")),
Subsignal("rw", Pins("138")),
Subsignal("e", Pins("143")),
# Board provides 5V to display power pins, but it works fine with 3V3 on logic pins.
IOStandard("3.3-V LVTTL")
),
# 7 segment display
("segled", 0,
Subsignal("ca", Pins("128")),
Subsignal("cb", Pins("121")),
Subsignal("cc", Pins("125")),
Subsignal("cd", Pins("129")),
Subsignal("ce", Pins("132")),
Subsignal("cf", Pins("126")),
Subsignal("cg", Pins("124")),
Subsignal("dp", Pins("127")),
Subsignal("digits", Pins("133 135 136 137")),
IOStandard("3.3-V LVTTL")
),
# PS/2
("ps2", 0,
Subsignal("clk", Pins("119")),
Subsignal("data", Pins("120")),
IOStandard("3.3-V LVTTL")
),
# IrDA receiver
("irda", 0,
Subsignal("rxd", Pins("100")),
IOStandard("3.3-V LVTTL")
),
# GPIO
# There are only 2 free gpio pins, the rest of the pins in the headers
# are shared with the other peripherals
("gpio", 0, Pins(
"24 111"),
IOStandard("3.3-V LVTTL")
),
# Hynix HY57V641620FTP-7 SDRAM
("sdram_clock", 0, Pins("43"), IOStandard("3.3-V LVTTL")),
("sdram", 0,
Subsignal("a", Pins(
"76 77 80 83 68 67 66 65",
"64 60 75 59")),
Subsignal("ba", Pins("73 74")),
Subsignal("cs_n", Pins("72")),
Subsignal("cke", Pins("58")),
Subsignal("ras_n", Pins("71")),
Subsignal("cas_n", Pins("70")),
Subsignal("we_n", Pins("69")),
Subsignal("dq", Pins(
"28 30 31 32 33 34 38 39",
"54 53 52 51 50 49 46 44")),
Subsignal("dm", Pins("42 55")),
IOStandard("3.3-V LVTTL")
),
]
# Platform -----------------------------------------------------------------------------------------
class Platform(AlteraPlatform):
default_clk_name = "clk50"
default_clk_period = 1e9/50e6
def __init__(self, toolchain="quartus"):
AlteraPlatform.__init__(self, "EP4CE6E22C8", _io, toolchain=toolchain)
self.add_platform_command("set_global_assignment -name FAMILY \"Cyclone IV E\"")
self.add_platform_command("set_global_assignment -name RESERVE_DATA0_AFTER_CONFIGURATION \"USE AS REGULAR IO\"")
self.add_platform_command("set_global_assignment -name RESERVE_DATA1_AFTER_CONFIGURATION \"USE AS REGULAR IO\"")
self.add_platform_command("set_global_assignment -name RESERVE_DCLK_AFTER_CONFIGURATION \"USE AS REGULAR IO\"")
self.add_platform_command("set_global_assignment -name CYCLONEII_RESERVE_NCEO_AFTER_CONFIGURATION \"USE AS REGULAR IO\"")
self.add_platform_command("set_global_assignment -name RESERVE_FLASH_NCE_AFTER_CONFIGURATION \"USE AS REGULAR IO\"")
self.add_platform_command("set_global_assignment -name ENABLE_BOOT_SEL_PIN ON")
self.add_platform_command("set_global_assignment -name ENABLE_CONFIGURATION_PINS ON")
def create_programmer(self):
return USBBlaster()
def do_finalize(self, fragment):
AlteraPlatform.do_finalize(self, fragment)
self.add_period_constraint(self.lookup_request("clk50", loose=True), 1e9/50e6)
| #
# This file is part of LiteX-Boards.
#
# Copyright (c) 2021 <NAME> <<EMAIL>>
# Copyright (c) 2022 <NAME> <<EMAIL>>
# SPDX-License-Identifier: BSD-2-Clause
from litex.build.generic_platform import *
from litex.build.altera import AlteraPlatform
from litex.build.altera.programmer import USBBlaster
# IOs ----------------------------------------------------------------------------------------------
_io = [
# Clk / Rst
("clk50", 0, Pins("23"), IOStandard("3.3-V LVTTL")),
("rst_n", 0, Pins("25"), IOStandard("3.3-V LVTTL")),
# Leds
("user_led", 0, Pins("84"), IOStandard("3.3-V LVTTL")),
("user_led", 1, Pins("85"), IOStandard("3.3-V LVTTL")),
("user_led", 2, Pins("86"), IOStandard("3.3-V LVTTL")),
("user_led", 3, Pins("87"), IOStandard("3.3-V LVTTL")),
# DIP switches shared with buttons
("key", 0, Pins("88"), IOStandard("3.3-V LVTTL")),
("key", 1, Pins("89"), IOStandard("3.3-V LVTTL")),
("key", 2, Pins("90"), IOStandard("3.3-V LVTTL")),
("key", 3, Pins("91"), IOStandard("3.3-V LVTTL")),
# Passive buzzer
("buzzer", 0, Pins("110"), IOStandard("3.3-V LVTTL")),
# LM75A temperature sensor (I2C)
("temp_i2c", 0,
Subsignal("scl", Pins("112")),
Subsignal("sda", Pins("113")),
IOStandard("3.3-V LVTTL")
),
# AT24C08 EEPROM (I2C)
("eeprom_i2c", 0,
Subsignal("scl", Pins("99")),
Subsignal("sda", Pins("98")),
IOStandard("3.3-V LVTTL")
),
# W25Q16JV SPI Flash, IO2 and IO3 pins are hardwired to 3v3 so we cannot use QSPI mode.
("spiflash", 0,
Subsignal("cs_n", Pins("8")),
Subsignal("clk", Pins("12")),
Subsignal("mosi", Pins("6")),
Subsignal("miso", Pins("13")),
IOStandard("3.3-V LVTTL"),
),
# Serial - using DB9 connector through SP3232EEN
("serial", 0,
Subsignal("tx", Pins("114")),
Subsignal("rx", Pins("119")),
IOStandard("3.3-V LVTTL")
),
# VGA
("vga", 0,
Subsignal("hsync", Pins("101")),
Subsignal("vsync", Pins("103")),
Subsignal("r", Pins("106")),
Subsignal("g", Pins("105")),
Subsignal("b", Pins("104")),
IOStandard("3.3-V LVTTL"),
),
# LCD display (HD44780 compatible)
("lcd_display", 0,
Subsignal("data", Pins("142 1 144 3 2 10 7 11")),
Subsignal("rs", Pins("141")),
Subsignal("rw", Pins("138")),
Subsignal("e", Pins("143")),
# Board provides 5V to display power pins, but it works fine with 3V3 on logic pins.
IOStandard("3.3-V LVTTL")
),
# 7 segment display
("segled", 0,
Subsignal("ca", Pins("128")),
Subsignal("cb", Pins("121")),
Subsignal("cc", Pins("125")),
Subsignal("cd", Pins("129")),
Subsignal("ce", Pins("132")),
Subsignal("cf", Pins("126")),
Subsignal("cg", Pins("124")),
Subsignal("dp", Pins("127")),
Subsignal("digits", Pins("133 135 136 137")),
IOStandard("3.3-V LVTTL")
),
# PS/2
("ps2", 0,
Subsignal("clk", Pins("119")),
Subsignal("data", Pins("120")),
IOStandard("3.3-V LVTTL")
),
# IrDA receiver
("irda", 0,
Subsignal("rxd", Pins("100")),
IOStandard("3.3-V LVTTL")
),
# GPIO
# There are only 2 free gpio pins, the rest of the pins in the headers
# are shared with the other peripherals
("gpio", 0, Pins(
"24 111"),
IOStandard("3.3-V LVTTL")
),
# Hynix HY57V641620FTP-7 SDRAM
("sdram_clock", 0, Pins("43"), IOStandard("3.3-V LVTTL")),
("sdram", 0,
Subsignal("a", Pins(
"76 77 80 83 68 67 66 65",
"64 60 75 59")),
Subsignal("ba", Pins("73 74")),
Subsignal("cs_n", Pins("72")),
Subsignal("cke", Pins("58")),
Subsignal("ras_n", Pins("71")),
Subsignal("cas_n", Pins("70")),
Subsignal("we_n", Pins("69")),
Subsignal("dq", Pins(
"28 30 31 32 33 34 38 39",
"54 53 52 51 50 49 46 44")),
Subsignal("dm", Pins("42 55")),
IOStandard("3.3-V LVTTL")
),
]
# Platform -----------------------------------------------------------------------------------------
class Platform(AlteraPlatform):
default_clk_name = "clk50"
default_clk_period = 1e9/50e6
def __init__(self, toolchain="quartus"):
AlteraPlatform.__init__(self, "EP4CE6E22C8", _io, toolchain=toolchain)
self.add_platform_command("set_global_assignment -name FAMILY \"Cyclone IV E\"")
self.add_platform_command("set_global_assignment -name RESERVE_DATA0_AFTER_CONFIGURATION \"USE AS REGULAR IO\"")
self.add_platform_command("set_global_assignment -name RESERVE_DATA1_AFTER_CONFIGURATION \"USE AS REGULAR IO\"")
self.add_platform_command("set_global_assignment -name RESERVE_DCLK_AFTER_CONFIGURATION \"USE AS REGULAR IO\"")
self.add_platform_command("set_global_assignment -name CYCLONEII_RESERVE_NCEO_AFTER_CONFIGURATION \"USE AS REGULAR IO\"")
self.add_platform_command("set_global_assignment -name RESERVE_FLASH_NCE_AFTER_CONFIGURATION \"USE AS REGULAR IO\"")
self.add_platform_command("set_global_assignment -name ENABLE_BOOT_SEL_PIN ON")
self.add_platform_command("set_global_assignment -name ENABLE_CONFIGURATION_PINS ON")
def create_programmer(self):
return USBBlaster()
def do_finalize(self, fragment):
AlteraPlatform.do_finalize(self, fragment)
self.add_period_constraint(self.lookup_request("clk50", loose=True), 1e9/50e6)
| en | 0.630356 | # # This file is part of LiteX-Boards. # # Copyright (c) 2021 <NAME> <<EMAIL>> # Copyright (c) 2022 <NAME> <<EMAIL>> # SPDX-License-Identifier: BSD-2-Clause # IOs ---------------------------------------------------------------------------------------------- # Clk / Rst # Leds # DIP switches shared with buttons # Passive buzzer # LM75A temperature sensor (I2C) # AT24C08 EEPROM (I2C) # W25Q16JV SPI Flash, IO2 and IO3 pins are hardwired to 3v3 so we cannot use QSPI mode. # Serial - using DB9 connector through SP3232EEN # VGA # LCD display (HD44780 compatible) # Board provides 5V to display power pins, but it works fine with 3V3 on logic pins. # 7 segment display # PS/2 # IrDA receiver # GPIO # There are only 2 free gpio pins, the rest of the pins in the headers # are shared with the other peripherals # Hynix HY57V641620FTP-7 SDRAM # Platform ----------------------------------------------------------------------------------------- | 1.341279 | 1 |
pyrpiic/sensor/ldc161x.py | Samtec-ASH/pyrpiic | 0 | 6622520 | <filename>pyrpiic/sensor/ldc161x.py
from typing import Tuple
from pyrpiic.sensor.ldc1x1y import LDC1X1Y
class LDC161X(LDC1X1Y):
''' TI 28-bit LDC1612/LDC1614 inductive sensor API. '''
def __init__(self, bus, address=0x2A):
# Manufacturing ID: 0x5449
# Device ID: 0x3055
super().__init__(bus, address=address)
def get_channel_data(self, ch: int) -> Tuple[int, int]:
''' Get channel data and error flags.
Returns:
int: Channel computed conversion value
int: Error code
0x8: Under range error bit
0x4: Over range error bit
0x2: Watchdog timeout error bit
0x1: Amplitude error bit
'''
ch_msb = self.get_register(self.LDC1X1Y_DATA_BASE + 2*ch)
ch_lsb = self.get_register(self.LDC1X1Y_DATA_BASE + 2*ch + 1)
value = ((0x0FFF & ch_msb) << 16) & ch_lsb
err_code = (ch_msb & 0xF000) >> 12
return value, err_code
| <filename>pyrpiic/sensor/ldc161x.py
from typing import Tuple
from pyrpiic.sensor.ldc1x1y import LDC1X1Y
class LDC161X(LDC1X1Y):
''' TI 28-bit LDC1612/LDC1614 inductive sensor API. '''
def __init__(self, bus, address=0x2A):
# Manufacturing ID: 0x5449
# Device ID: 0x3055
super().__init__(bus, address=address)
def get_channel_data(self, ch: int) -> Tuple[int, int]:
''' Get channel data and error flags.
Returns:
int: Channel computed conversion value
int: Error code
0x8: Under range error bit
0x4: Over range error bit
0x2: Watchdog timeout error bit
0x1: Amplitude error bit
'''
ch_msb = self.get_register(self.LDC1X1Y_DATA_BASE + 2*ch)
ch_lsb = self.get_register(self.LDC1X1Y_DATA_BASE + 2*ch + 1)
value = ((0x0FFF & ch_msb) << 16) & ch_lsb
err_code = (ch_msb & 0xF000) >> 12
return value, err_code
| en | 0.54831 | TI 28-bit LDC1612/LDC1614 inductive sensor API. # Manufacturing ID: 0x5449 # Device ID: 0x3055 Get channel data and error flags. Returns: int: Channel computed conversion value int: Error code 0x8: Under range error bit 0x4: Over range error bit 0x2: Watchdog timeout error bit 0x1: Amplitude error bit | 2.568205 | 3 |
discussion/pathfinder/pathfinder_test.py | jakee417/probability-1 | 3,670 | 6622521 | # Copyright 2021 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import jax
import jax.numpy as jnp
import numpy as np
import pytest
from discussion.pathfinder import pathfinder
import tensorflow_probability.substrates.jax as tfp
tfd = tfp.distributions
@pytest.mark.parametrize("dim, history", [(12, 2), (12, 0), (4, 4)])
def test_bfgs_inverse_hessian(dim, history):
position_diffs = jnp.ones((dim, history))
gradient_diffs = jnp.ones((dim, history))
alpha, beta, gamma = pathfinder.bfgs_inverse_hessian(
updates_of_position_differences=position_diffs,
updates_of_gradient_differences=gradient_diffs)
assert alpha.shape == (dim,)
assert beta.shape == (dim, 2 * history)
assert gamma.shape == (2 * history, 2 * history)
def test_cov_estimate():
positions = np.random.randn(10, 2)
gradients = -2 * positions # x^2 + y^2
ret = pathfinder.cov_estimate(
optimization_path=positions, optimization_path_grads=gradients, history=3)
assert len(ret) == 9
def test_lbfgs():
target = tfd.Independent(
tfd.Normal(jnp.array([1., 2.]), jnp.ones(2)),
reinterpreted_batch_ndims=1).log_prob
optimization_path, _ = pathfinder.lbfgs(
log_target_density=target,
initial_value=jnp.array([-1.0, -4.0]),
max_iters=10)
assert jnp.all(optimization_path[-1] == jnp.array([1.0, 2.0]))
def test_bfgs_sample():
dim, history = 12, 2
position_diffs = jnp.ones((dim, history))
gradient_diffs = jnp.ones((dim, history))
diagonal_estimate, thin_factors, scaling_outer_product = pathfinder.bfgs_inverse_hessian(
updates_of_position_differences=position_diffs,
updates_of_gradient_differences=gradient_diffs)
log_density = lambda x: jnp.sum(-x * x)
grad_log_density = jax.grad(log_density)
value = jnp.ones(dim)
key = jax.random.PRNGKey(0)
num_draws = 5
draws, probs = pathfinder.bfgs_sample(
value=value,
grad_density=grad_log_density(value),
diagonal_estimate=diagonal_estimate,
thin_factors=thin_factors,
scaling_outer_product=scaling_outer_product,
num_draws=num_draws,
key=key,
)
assert draws.shape[0] == num_draws
assert draws.shape[1] == dim
assert probs.shape[0] == num_draws
def test_pathfinder():
target = tfd.Independent(
tfd.StudentT(3., loc=jnp.array([1., 2]), scale=jnp.array([1., 1.])),
reinterpreted_batch_ndims=1).log_prob
key = jax.random.PRNGKey(3)
init = jnp.zeros(2)
draws, _ = pathfinder.pathfinder(
target_density=target,
initial_value=init,
lbfgs_max_iters=20,
num_draws=1000,
key=key,
)
assert (jnp.linalg.norm(
jnp.mean(draws, axis=0) - jnp.array([1., 2.]), ord=2) < 0.1)
def test_multipath_pathfinder():
target = tfd.Independent(
tfd.StudentT(2., loc=jnp.array([1., 2]), scale=jnp.array([1., 1.])),
reinterpreted_batch_ndims=1).log_prob
key, init_key = jax.random.split(jax.random.PRNGKey(3))
init = jax.random.normal(init_key, (5, 2))
draws = pathfinder.multipath_pathfinder(
target_density=target,
initial_values=init,
key=key,
lbfgs_max_iters=20,
num_pathfinder_draws=10,
num_draws=10,
)
assert draws.shape == (10, 2)
if __name__ == "__main__":
pytest.main([__file__])
| # Copyright 2021 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import jax
import jax.numpy as jnp
import numpy as np
import pytest
from discussion.pathfinder import pathfinder
import tensorflow_probability.substrates.jax as tfp
tfd = tfp.distributions
@pytest.mark.parametrize("dim, history", [(12, 2), (12, 0), (4, 4)])
def test_bfgs_inverse_hessian(dim, history):
position_diffs = jnp.ones((dim, history))
gradient_diffs = jnp.ones((dim, history))
alpha, beta, gamma = pathfinder.bfgs_inverse_hessian(
updates_of_position_differences=position_diffs,
updates_of_gradient_differences=gradient_diffs)
assert alpha.shape == (dim,)
assert beta.shape == (dim, 2 * history)
assert gamma.shape == (2 * history, 2 * history)
def test_cov_estimate():
positions = np.random.randn(10, 2)
gradients = -2 * positions # x^2 + y^2
ret = pathfinder.cov_estimate(
optimization_path=positions, optimization_path_grads=gradients, history=3)
assert len(ret) == 9
def test_lbfgs():
target = tfd.Independent(
tfd.Normal(jnp.array([1., 2.]), jnp.ones(2)),
reinterpreted_batch_ndims=1).log_prob
optimization_path, _ = pathfinder.lbfgs(
log_target_density=target,
initial_value=jnp.array([-1.0, -4.0]),
max_iters=10)
assert jnp.all(optimization_path[-1] == jnp.array([1.0, 2.0]))
def test_bfgs_sample():
dim, history = 12, 2
position_diffs = jnp.ones((dim, history))
gradient_diffs = jnp.ones((dim, history))
diagonal_estimate, thin_factors, scaling_outer_product = pathfinder.bfgs_inverse_hessian(
updates_of_position_differences=position_diffs,
updates_of_gradient_differences=gradient_diffs)
log_density = lambda x: jnp.sum(-x * x)
grad_log_density = jax.grad(log_density)
value = jnp.ones(dim)
key = jax.random.PRNGKey(0)
num_draws = 5
draws, probs = pathfinder.bfgs_sample(
value=value,
grad_density=grad_log_density(value),
diagonal_estimate=diagonal_estimate,
thin_factors=thin_factors,
scaling_outer_product=scaling_outer_product,
num_draws=num_draws,
key=key,
)
assert draws.shape[0] == num_draws
assert draws.shape[1] == dim
assert probs.shape[0] == num_draws
def test_pathfinder():
target = tfd.Independent(
tfd.StudentT(3., loc=jnp.array([1., 2]), scale=jnp.array([1., 1.])),
reinterpreted_batch_ndims=1).log_prob
key = jax.random.PRNGKey(3)
init = jnp.zeros(2)
draws, _ = pathfinder.pathfinder(
target_density=target,
initial_value=init,
lbfgs_max_iters=20,
num_draws=1000,
key=key,
)
assert (jnp.linalg.norm(
jnp.mean(draws, axis=0) - jnp.array([1., 2.]), ord=2) < 0.1)
def test_multipath_pathfinder():
target = tfd.Independent(
tfd.StudentT(2., loc=jnp.array([1., 2]), scale=jnp.array([1., 1.])),
reinterpreted_batch_ndims=1).log_prob
key, init_key = jax.random.split(jax.random.PRNGKey(3))
init = jax.random.normal(init_key, (5, 2))
draws = pathfinder.multipath_pathfinder(
target_density=target,
initial_values=init,
key=key,
lbfgs_max_iters=20,
num_pathfinder_draws=10,
num_draws=10,
)
assert draws.shape == (10, 2)
if __name__ == "__main__":
pytest.main([__file__])
| en | 0.807582 | # Copyright 2021 The TensorFlow Probability Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ # x^2 + y^2 | 1.677192 | 2 |
test/test_point_masses.py | richannan/read-GRACE-harmonics | 9 | 6622522 | <filename>test/test_point_masses.py
#!/usr/bin/env python
u"""
test_point_masses.py (02/2021)
"""
import pytest
import numpy as np
from gravity_toolkit.utilities import get_data_path
from gravity_toolkit.read_love_numbers import read_love_numbers
from gravity_toolkit.gen_point_load import gen_point_load
from gravity_toolkit.gen_stokes import gen_stokes
# parameterize the number of point masses
@pytest.mark.parametrize("NPTS", np.random.randint(2,2000,size=1))
def test_point_masses(NPTS):
# create spatial grid
dlon,dlat = (1.0,1.0)
lat = np.arange(90.0 - dlat/2.0, -90.0 - dlat/2.0, -dlat)
lon = np.arange(-180.0 + dlon/2.0, 180.0 + dlon/2.0, dlon)
gridlon,gridlat = np.meshgrid(lon,lat)
nlat,nlon = np.shape(gridlon)
# parameterize point masses
LAT = lat[0]-dlat*np.random.randint(0,nlat,size=NPTS)
LON = lon[0]+dlon*np.random.randint(0,nlon,size=NPTS)
MASS = 100.0 - 200.0*np.random.randn(NPTS)
# create test gridded field
data = np.zeros((nlat,nlon))
for i in range(NPTS):
indy,indx = np.nonzero((gridlat == LAT[i]) & (gridlon == LON[i]))
data[indy,indx] += MASS[i]
# path to load Love numbers file
love_numbers_file = get_data_path(['data','love_numbers'])
# read load Love numbers
hl,kl,ll = read_love_numbers(love_numbers_file)
# calculate harmonics and degree amplitudes for each case
grid_Ylms = gen_stokes(data, lon, lat, LMAX=60, UNITS=2, LOVE=(hl,kl,ll))
grid_Ylms.amplitude()
point_Ylms = gen_point_load(MASS, LON, LAT, LMAX=60, UNITS=2, LOVE=(hl,kl,ll))
point_Ylms.amplitude()
# check that harmonic data is equal to machine precision
difference_Ylms = grid_Ylms.copy()
difference_Ylms.subtract(point_Ylms)
harmonic_eps = np.finfo(np.float32).eps
assert np.all(np.abs(difference_Ylms.clm) < harmonic_eps)
# verify that the degree amplitudes are within tolerance
assert np.all(np.abs(grid_Ylms.amp - point_Ylms.amp) < harmonic_eps)
| <filename>test/test_point_masses.py
#!/usr/bin/env python
u"""
test_point_masses.py (02/2021)
"""
import pytest
import numpy as np
from gravity_toolkit.utilities import get_data_path
from gravity_toolkit.read_love_numbers import read_love_numbers
from gravity_toolkit.gen_point_load import gen_point_load
from gravity_toolkit.gen_stokes import gen_stokes
# parameterize the number of point masses
@pytest.mark.parametrize("NPTS", np.random.randint(2,2000,size=1))
def test_point_masses(NPTS):
# create spatial grid
dlon,dlat = (1.0,1.0)
lat = np.arange(90.0 - dlat/2.0, -90.0 - dlat/2.0, -dlat)
lon = np.arange(-180.0 + dlon/2.0, 180.0 + dlon/2.0, dlon)
gridlon,gridlat = np.meshgrid(lon,lat)
nlat,nlon = np.shape(gridlon)
# parameterize point masses
LAT = lat[0]-dlat*np.random.randint(0,nlat,size=NPTS)
LON = lon[0]+dlon*np.random.randint(0,nlon,size=NPTS)
MASS = 100.0 - 200.0*np.random.randn(NPTS)
# create test gridded field
data = np.zeros((nlat,nlon))
for i in range(NPTS):
indy,indx = np.nonzero((gridlat == LAT[i]) & (gridlon == LON[i]))
data[indy,indx] += MASS[i]
# path to load Love numbers file
love_numbers_file = get_data_path(['data','love_numbers'])
# read load Love numbers
hl,kl,ll = read_love_numbers(love_numbers_file)
# calculate harmonics and degree amplitudes for each case
grid_Ylms = gen_stokes(data, lon, lat, LMAX=60, UNITS=2, LOVE=(hl,kl,ll))
grid_Ylms.amplitude()
point_Ylms = gen_point_load(MASS, LON, LAT, LMAX=60, UNITS=2, LOVE=(hl,kl,ll))
point_Ylms.amplitude()
# check that harmonic data is equal to machine precision
difference_Ylms = grid_Ylms.copy()
difference_Ylms.subtract(point_Ylms)
harmonic_eps = np.finfo(np.float32).eps
assert np.all(np.abs(difference_Ylms.clm) < harmonic_eps)
# verify that the degree amplitudes are within tolerance
assert np.all(np.abs(grid_Ylms.amp - point_Ylms.amp) < harmonic_eps)
| en | 0.644198 | #!/usr/bin/env python test_point_masses.py (02/2021) # parameterize the number of point masses # create spatial grid # parameterize point masses # create test gridded field # path to load Love numbers file # read load Love numbers # calculate harmonics and degree amplitudes for each case # check that harmonic data is equal to machine precision # verify that the degree amplitudes are within tolerance | 2.464975 | 2 |
Data Structures and Algorithms/HackerRank Algo Solutions/EASY PROBLEMS/BeautifulTriplets.py | akkik04/Python-DataStructures-and-Algorithms | 1 | 6622523 | # BEAUTIFUL TRIPLETS HACKERRANK SOLUTION:
# creating a function to return the number of beautiful triplets.
def beautifulTriplets(d, arr):
# creating a variable to track the count.
count = 0
# creating a for-loop to iterate for the length of the array.
for i in range(len(arr)):
# creating a nested for-loop to iterate based on the first loop.
for j in range(i + 1, len(arr)):
# creating a nested if-statement to check if the condition is met between elements i and j.
if arr[j] - arr[i] == d:
# creating a nested for-loop to iterate based on the second loop.
for k in range(j + 1, len(arr)):
# creating a nested if-statement to check if the condition is met between elements j and k.
if arr[k] - arr[j] == d:
# code to increment the count variable if the condition is met.
count += 1
break
# returning the value of the count.
return count
# receiving input.
first_multiple_input = input().rstrip().split()
n = int(first_multiple_input[0])
d = int(first_multiple_input[1])
arr = list(map(int, input().rstrip().split()))
# code to print the final output, which indicates the number of beautiful triplets.
result = beautifulTriplets(d, arr)
print(result) | # BEAUTIFUL TRIPLETS HACKERRANK SOLUTION:
# creating a function to return the number of beautiful triplets.
def beautifulTriplets(d, arr):
# creating a variable to track the count.
count = 0
# creating a for-loop to iterate for the length of the array.
for i in range(len(arr)):
# creating a nested for-loop to iterate based on the first loop.
for j in range(i + 1, len(arr)):
# creating a nested if-statement to check if the condition is met between elements i and j.
if arr[j] - arr[i] == d:
# creating a nested for-loop to iterate based on the second loop.
for k in range(j + 1, len(arr)):
# creating a nested if-statement to check if the condition is met between elements j and k.
if arr[k] - arr[j] == d:
# code to increment the count variable if the condition is met.
count += 1
break
# returning the value of the count.
return count
# receiving input.
first_multiple_input = input().rstrip().split()
n = int(first_multiple_input[0])
d = int(first_multiple_input[1])
arr = list(map(int, input().rstrip().split()))
# code to print the final output, which indicates the number of beautiful triplets.
result = beautifulTriplets(d, arr)
print(result) | en | 0.845168 | # BEAUTIFUL TRIPLETS HACKERRANK SOLUTION: # creating a function to return the number of beautiful triplets. # creating a variable to track the count. # creating a for-loop to iterate for the length of the array. # creating a nested for-loop to iterate based on the first loop. # creating a nested if-statement to check if the condition is met between elements i and j. # creating a nested for-loop to iterate based on the second loop. # creating a nested if-statement to check if the condition is met between elements j and k. # code to increment the count variable if the condition is met. # returning the value of the count. # receiving input. # code to print the final output, which indicates the number of beautiful triplets. | 4.299064 | 4 |
bindings/examples/test.py | byungsook/FleX_PyBind11 | 0 | 6622524 | <reponame>byungsook/FleX_PyBind11
#Written by <NAME>. November 15, 2018.
import pyflex
from time import time
from random import random
import numpy as np
main_loop_quit = False
time_step = 100
dim_position = 4
dim_velocity = 3
pyflex.initialize()
bbox = [pyflex.get_scene_lower(), pyflex.get_scene_upper()]
d_size = pyflex.get_scene_upper()- pyflex.get_scene_lower()
n_particles = pyflex.get_n_particles()
print("Scene Lower:", bbox[0])
print("Scene Upper:", bbox[1])
print('domain size', d_size)
print("Num particles:", n_particles)
r = 0.05
rest_dist = r*0.65
dt = 1 / 60
positions = np.zeros((time_step, n_particles, dim_position))
velocities = np.zeros((time_step, n_particles, dim_velocity))
t = 0
while t < time_step: # main_loop_quit == False or
pyflex.update_frame()
orig = time()
# pos_array = np.zeros((1000, 3))
# for i in range(0, 1000):
# pos_array[i, 0] = pyflex.grab_x_pos_particle(i)
# pos_array[i, 0] = pyflex.grab_y_pos_particle(i)
# pos_array[i, 2] = pyflex.grab_z_pos_particle(i)
positions[t] = pyflex.get_positions().reshape(-1, dim_position)
velocities[t] = pyflex.get_velocities().reshape(-1, dim_velocity)
# px, py, pz = positions[t,...,0], positions[t,...,1], positions[t,...,2]
# vx, vy, vz = velocities[t,...,0], velocities[t,...,1], velocities[t,...,2]
# print('posmin', px.min(), py.min(), pz.min())
# print('posmax', px.max(), py.max(), pz.max())
# print('velmin', vx.min(), vy.min(), vz.min())
# print('velmax', vx.max(), vy.max(), vz.max())
t += 1
main_loop_quit = pyflex.sdl_main()
pyflex.destroy_scene()
positions = positions[...,:-1] # mass == 1
px, py, pz = positions[...,0], positions[...,1], positions[...,2]
vx, vy, vz = velocities[...,0], velocities[...,1], velocities[...,2]
print('posmin', px.min(), py.min(), pz.min())
print('posmax', px.max(), py.max(), pz.max())
print('velmin', vx.min(), vy.min(), vz.min())
print('velmax', vx.max(), vy.max(), vz.max())
lower = np.array([px.min(), py.min(), pz.min()])
positions -= lower[None,None,:]
d_size = np.array([px.max(), py.max(), pz.max()])
print('domain_size', d_size)
res_ = d_size / rest_dist
res = np.ceil(d_size / rest_dist).astype(np.int32)
for i in range(3):
if res[i] == res_[i]:
res[i] += 1
print('radius', r)
print('rest_distance', rest_dist)
print('res', res)
# convert to domain coordinate
positions /= rest_dist
velocities /= rest_dist
lower = [px.min(), py.min(), pz.min()]
upper = [px.max(), py.max(), pz.max()]
print(lower, upper, res)
print('velmin', vx.min(), vy.min(), vz.min())
print('velmax', vx.max(), vy.max(), vz.max())
r = np.stack([positions[0,0]]*positions.shape[1], axis=0)
r = np.sqrt(np.sum((r - positions[0])**2, axis=-1))
print('shortest dist from p0 to others', r[1:].min()) # smaller than or equal to 1
# file_path = 'dambreak.npz'
# np.savez_compressed(file_path,
# p=positions,
# v=velocities,
# res=res,
# )
# print(file_path, 'saved')
import open3d as o3d
from matplotlib import cm
def vis_vel(pt, vel, dt):
geom = []
# bounding box
px, py, pz = pt[...,0], pt[...,1], pt[...,2]
bbox = [
[px.min(), py.min(), pz.min()],
[px.max(), py.max(), pz.max()]
]
bp = []
for i in range(2):
for j in range(2):
for k in range(2):
bp.append([bbox[i][0],bbox[j][1],bbox[k][2]])
bl = [[0, 1], [0, 2], [1, 3], [2, 3], [4, 5], [4, 6], [5, 7], [6, 7],
[0, 4], [1, 5], [2, 6], [3, 7]]
bbox_line = o3d.geometry.LineSet()
bbox_line.points = o3d.utility.Vector3dVector(bp)
bbox_line.lines = o3d.utility.Vector2iVector(bl)
geom.append(bbox_line)
# gizmo
gizmo = o3d.geometry.TriangleMesh.create_coordinate_frame(
size=1, origin=[0, 0, 0])
geom.append(gizmo)
# velocity
line_set = o3d.geometry.LineSet()
geom.append(line_set)
# particles
pcd = o3d.geometry.PointCloud()
geom.append(pcd)
# pcd_idx = len(geom)
# for i in range(pt.shape[1]):
# mesh_sphere = o3d.geometry.TriangleMesh.create_sphere(radius=0.5, resolution=10)
# geom.append(mesh_sphere)
# if i == 20: break
# vis_vel.vol = None
vis_vel.t = 0
def loadframe(vis):
print('frame', vis_vel.t)
p, v = pt[vis_vel.t], vel[vis_vel.t]
pcd.points = o3d.utility.Vector3dVector(p)
# if vis_vel.vol is not None: vis.remove_geometry(vis_vel.vol)
# vis_vel.vol = o3d.geometry.VoxelGrid.create_from_point_cloud(pcd, 1)
# vis.add_geometry(vis_vel.vol)
# for i in range(p.shape[0]):
# geom[pcd_idx+i].translate(translation=p[i])
# geom[pcd_idx+i].compute_vertex_normals()
# if i == 20: break
p_ = p + v*dt
p = np.concatenate((p,p_), axis=0)
l0 = np.arange(v.shape[0])
l1 = np.arange(v.shape[0],2*v.shape[0])
l = np.stack((l0,l1), axis=-1)
c = np.sqrt(np.sum(v**2, axis=-1))
c /= c.max()
c = cm.Blues(1 - c)[...,:-1]
pcd.colors = o3d.utility.Vector3dVector(c)
# for i in range(c.shape[0]):
# geom[pcd_idx+i].paint_uniform_color(c[i])
# if i == 20: break
line_set.points = o3d.utility.Vector3dVector(p)
line_set.lines = o3d.utility.Vector2iVector(l)
line_set.colors = o3d.utility.Vector3dVector(c)
vis.update_geometry()
vis.poll_events()
vis.update_renderer()
cam = vis.get_view_control()
if cam is not None:
param = cam.convert_to_pinhole_camera_parameters()
print('intrisic', param.intrinsic.width, param.intrinsic.height)
print(param.intrinsic.intrinsic_matrix)
print('extrisic\n', param.extrinsic)
# p = pt[vis_vel.t]
# for i in range(p.shape[0]):
# geom[pcd_idx+i].translate(translation=-p[i])
# if i == 20: break
vis = o3d.visualization.Visualizer()
loadframe(vis) # for the first frame
def nextframe(vis):
# print('nextframe')
vis_vel.t += 1
if vis_vel.t == pt.shape[0]:
vis_vel.t = 0
loadframe(vis)
return False
def prevframe(vis):
# print('prevframe')
vis_vel.t -= 1
if vis_vel.t == -1:
vis_vel.t = pt.shape[0]-1
loadframe(vis)
return False
key_to_callback = {}
key_to_callback[ord(",")] = prevframe
key_to_callback[ord(".")] = nextframe
o3d.visualization.draw_geometries_with_key_callbacks(geom, key_to_callback)
vis_vel(positions, velocities, dt)
# print(main_loop_quit)
print("got here in python!!!")
| #Written by <NAME>. November 15, 2018.
import pyflex
from time import time
from random import random
import numpy as np
main_loop_quit = False
time_step = 100
dim_position = 4
dim_velocity = 3
pyflex.initialize()
bbox = [pyflex.get_scene_lower(), pyflex.get_scene_upper()]
d_size = pyflex.get_scene_upper()- pyflex.get_scene_lower()
n_particles = pyflex.get_n_particles()
print("Scene Lower:", bbox[0])
print("Scene Upper:", bbox[1])
print('domain size', d_size)
print("Num particles:", n_particles)
r = 0.05
rest_dist = r*0.65
dt = 1 / 60
positions = np.zeros((time_step, n_particles, dim_position))
velocities = np.zeros((time_step, n_particles, dim_velocity))
t = 0
while t < time_step: # main_loop_quit == False or
pyflex.update_frame()
orig = time()
# pos_array = np.zeros((1000, 3))
# for i in range(0, 1000):
# pos_array[i, 0] = pyflex.grab_x_pos_particle(i)
# pos_array[i, 0] = pyflex.grab_y_pos_particle(i)
# pos_array[i, 2] = pyflex.grab_z_pos_particle(i)
positions[t] = pyflex.get_positions().reshape(-1, dim_position)
velocities[t] = pyflex.get_velocities().reshape(-1, dim_velocity)
# px, py, pz = positions[t,...,0], positions[t,...,1], positions[t,...,2]
# vx, vy, vz = velocities[t,...,0], velocities[t,...,1], velocities[t,...,2]
# print('posmin', px.min(), py.min(), pz.min())
# print('posmax', px.max(), py.max(), pz.max())
# print('velmin', vx.min(), vy.min(), vz.min())
# print('velmax', vx.max(), vy.max(), vz.max())
t += 1
main_loop_quit = pyflex.sdl_main()
pyflex.destroy_scene()
positions = positions[...,:-1] # mass == 1
px, py, pz = positions[...,0], positions[...,1], positions[...,2]
vx, vy, vz = velocities[...,0], velocities[...,1], velocities[...,2]
print('posmin', px.min(), py.min(), pz.min())
print('posmax', px.max(), py.max(), pz.max())
print('velmin', vx.min(), vy.min(), vz.min())
print('velmax', vx.max(), vy.max(), vz.max())
lower = np.array([px.min(), py.min(), pz.min()])
positions -= lower[None,None,:]
d_size = np.array([px.max(), py.max(), pz.max()])
print('domain_size', d_size)
res_ = d_size / rest_dist
res = np.ceil(d_size / rest_dist).astype(np.int32)
for i in range(3):
if res[i] == res_[i]:
res[i] += 1
print('radius', r)
print('rest_distance', rest_dist)
print('res', res)
# convert to domain coordinate
positions /= rest_dist
velocities /= rest_dist
lower = [px.min(), py.min(), pz.min()]
upper = [px.max(), py.max(), pz.max()]
print(lower, upper, res)
print('velmin', vx.min(), vy.min(), vz.min())
print('velmax', vx.max(), vy.max(), vz.max())
r = np.stack([positions[0,0]]*positions.shape[1], axis=0)
r = np.sqrt(np.sum((r - positions[0])**2, axis=-1))
print('shortest dist from p0 to others', r[1:].min()) # smaller than or equal to 1
# file_path = 'dambreak.npz'
# np.savez_compressed(file_path,
# p=positions,
# v=velocities,
# res=res,
# )
# print(file_path, 'saved')
import open3d as o3d
from matplotlib import cm
def vis_vel(pt, vel, dt):
geom = []
# bounding box
px, py, pz = pt[...,0], pt[...,1], pt[...,2]
bbox = [
[px.min(), py.min(), pz.min()],
[px.max(), py.max(), pz.max()]
]
bp = []
for i in range(2):
for j in range(2):
for k in range(2):
bp.append([bbox[i][0],bbox[j][1],bbox[k][2]])
bl = [[0, 1], [0, 2], [1, 3], [2, 3], [4, 5], [4, 6], [5, 7], [6, 7],
[0, 4], [1, 5], [2, 6], [3, 7]]
bbox_line = o3d.geometry.LineSet()
bbox_line.points = o3d.utility.Vector3dVector(bp)
bbox_line.lines = o3d.utility.Vector2iVector(bl)
geom.append(bbox_line)
# gizmo
gizmo = o3d.geometry.TriangleMesh.create_coordinate_frame(
size=1, origin=[0, 0, 0])
geom.append(gizmo)
# velocity
line_set = o3d.geometry.LineSet()
geom.append(line_set)
# particles
pcd = o3d.geometry.PointCloud()
geom.append(pcd)
# pcd_idx = len(geom)
# for i in range(pt.shape[1]):
# mesh_sphere = o3d.geometry.TriangleMesh.create_sphere(radius=0.5, resolution=10)
# geom.append(mesh_sphere)
# if i == 20: break
# vis_vel.vol = None
vis_vel.t = 0
def loadframe(vis):
print('frame', vis_vel.t)
p, v = pt[vis_vel.t], vel[vis_vel.t]
pcd.points = o3d.utility.Vector3dVector(p)
# if vis_vel.vol is not None: vis.remove_geometry(vis_vel.vol)
# vis_vel.vol = o3d.geometry.VoxelGrid.create_from_point_cloud(pcd, 1)
# vis.add_geometry(vis_vel.vol)
# for i in range(p.shape[0]):
# geom[pcd_idx+i].translate(translation=p[i])
# geom[pcd_idx+i].compute_vertex_normals()
# if i == 20: break
p_ = p + v*dt
p = np.concatenate((p,p_), axis=0)
l0 = np.arange(v.shape[0])
l1 = np.arange(v.shape[0],2*v.shape[0])
l = np.stack((l0,l1), axis=-1)
c = np.sqrt(np.sum(v**2, axis=-1))
c /= c.max()
c = cm.Blues(1 - c)[...,:-1]
pcd.colors = o3d.utility.Vector3dVector(c)
# for i in range(c.shape[0]):
# geom[pcd_idx+i].paint_uniform_color(c[i])
# if i == 20: break
line_set.points = o3d.utility.Vector3dVector(p)
line_set.lines = o3d.utility.Vector2iVector(l)
line_set.colors = o3d.utility.Vector3dVector(c)
vis.update_geometry()
vis.poll_events()
vis.update_renderer()
cam = vis.get_view_control()
if cam is not None:
param = cam.convert_to_pinhole_camera_parameters()
print('intrisic', param.intrinsic.width, param.intrinsic.height)
print(param.intrinsic.intrinsic_matrix)
print('extrisic\n', param.extrinsic)
# p = pt[vis_vel.t]
# for i in range(p.shape[0]):
# geom[pcd_idx+i].translate(translation=-p[i])
# if i == 20: break
vis = o3d.visualization.Visualizer()
loadframe(vis) # for the first frame
def nextframe(vis):
# print('nextframe')
vis_vel.t += 1
if vis_vel.t == pt.shape[0]:
vis_vel.t = 0
loadframe(vis)
return False
def prevframe(vis):
# print('prevframe')
vis_vel.t -= 1
if vis_vel.t == -1:
vis_vel.t = pt.shape[0]-1
loadframe(vis)
return False
key_to_callback = {}
key_to_callback[ord(",")] = prevframe
key_to_callback[ord(".")] = nextframe
o3d.visualization.draw_geometries_with_key_callbacks(geom, key_to_callback)
vis_vel(positions, velocities, dt)
# print(main_loop_quit)
print("got here in python!!!") | en | 0.340054 | #Written by <NAME>. November 15, 2018. # main_loop_quit == False or # pos_array = np.zeros((1000, 3)) # for i in range(0, 1000): # pos_array[i, 0] = pyflex.grab_x_pos_particle(i) # pos_array[i, 0] = pyflex.grab_y_pos_particle(i) # pos_array[i, 2] = pyflex.grab_z_pos_particle(i) # px, py, pz = positions[t,...,0], positions[t,...,1], positions[t,...,2] # vx, vy, vz = velocities[t,...,0], velocities[t,...,1], velocities[t,...,2] # print('posmin', px.min(), py.min(), pz.min()) # print('posmax', px.max(), py.max(), pz.max()) # print('velmin', vx.min(), vy.min(), vz.min()) # print('velmax', vx.max(), vy.max(), vz.max()) # mass == 1 # convert to domain coordinate # smaller than or equal to 1 # file_path = 'dambreak.npz' # np.savez_compressed(file_path, # p=positions, # v=velocities, # res=res, # ) # print(file_path, 'saved') # bounding box # gizmo # velocity # particles # pcd_idx = len(geom) # for i in range(pt.shape[1]): # mesh_sphere = o3d.geometry.TriangleMesh.create_sphere(radius=0.5, resolution=10) # geom.append(mesh_sphere) # if i == 20: break # vis_vel.vol = None # if vis_vel.vol is not None: vis.remove_geometry(vis_vel.vol) # vis_vel.vol = o3d.geometry.VoxelGrid.create_from_point_cloud(pcd, 1) # vis.add_geometry(vis_vel.vol) # for i in range(p.shape[0]): # geom[pcd_idx+i].translate(translation=p[i]) # geom[pcd_idx+i].compute_vertex_normals() # if i == 20: break # for i in range(c.shape[0]): # geom[pcd_idx+i].paint_uniform_color(c[i]) # if i == 20: break # p = pt[vis_vel.t] # for i in range(p.shape[0]): # geom[pcd_idx+i].translate(translation=-p[i]) # if i == 20: break # for the first frame # print('nextframe') # print('prevframe') # print(main_loop_quit) | 2.71291 | 3 |
exercises/ali/Hello World - Deep Q learning .py | alik604/ra | 0 | 6622525 | import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import gym
import math
import numpy as np
import random
class Network(torch.nn.Module):
def __init__(self, alpha, inputShape, numActions):
super().__init__()
self.inputShape = inputShape
self.numActions = numActions
self.fc1Dims = 1024
self.fc2Dims = 512
self.fc1 = nn.Linear(*self.inputShape, self.fc1Dims)
self.fc2 = nn.Linear(self.fc1Dims, self.fc2Dims)
self.fc3 = nn.Linear(self.fc2Dims, numActions)
self.optimizer = optim.Adam(self.parameters(), lr=alpha)
self.loss = nn.MSELoss()
# self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
self.device = torch.device("cpu")
self.to(self.device)
def forward(self, x):
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
return x
class Agent():
def __init__(self, lr, inputShape, numActions):
self.network = Network(lr, inputShape, numActions)
def chooseAction(self, observation):
state = torch.tensor(observation).float().detach()
state = state.to(self.network.device)
state = state.unsqueeze(0)
qValues = self.network(state)
action = torch.argmax(qValues).item()
chanceOfAsparagus = random.randint(1, 10)
if chanceOfAsparagus == 1: # 10% chance
action = random.randint(0, 1)
# print("qValues: {}, action {}".format(qValues.detach(), action))
return action
def learn(self, state, action, reward, state_, done):
self.network.optimizer.zero_grad()
state = torch.tensor(state).float().detach().to(self.network.device).unsqueeze(0)
state_ = torch.tensor(state_).float().detach().to(self.network.device).unsqueeze(0)
reward = torch.tensor(reward).float().detach().to(self.network.device)
qValues = self.network(state)
nextQValues = self.network(state_)
predictedValueOfNow = qValues[0][action] # interpret the past
futureActionValue = nextQValues[0].max() # interpret the future
trueValueOfNow = reward + futureActionValue * (1 - done)
loss = self.network.loss(trueValueOfNow, predictedValueOfNow)
loss.backward()
self.network.optimizer.step()
if __name__ == '__main__':
env = gym.make('CartPole-v1').unwrapped
agent = Agent(lr=0.0001, inputShape=(4,), numActions=2)
highScore = -math.inf
episode = 0
for i in range(5000):
done = False
state = env.reset()
score, frame = 0, 1
while not done:
env.render()
action = agent.chooseAction(state)
state_, reward, done, info = env.step(action)
agent.learn(state, action, reward, state_, done)
state = state_
score += reward
frame += 1
# print("reward {}".format(reward))
highScore = max(highScore, score)
print(( "ep {}: high-score {:12.3f}, "
"score {:12.3f}, last-episode-time {:4d}").format(
episode, highScore, score,frame))
episode += 1
| import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import gym
import math
import numpy as np
import random
class Network(torch.nn.Module):
def __init__(self, alpha, inputShape, numActions):
super().__init__()
self.inputShape = inputShape
self.numActions = numActions
self.fc1Dims = 1024
self.fc2Dims = 512
self.fc1 = nn.Linear(*self.inputShape, self.fc1Dims)
self.fc2 = nn.Linear(self.fc1Dims, self.fc2Dims)
self.fc3 = nn.Linear(self.fc2Dims, numActions)
self.optimizer = optim.Adam(self.parameters(), lr=alpha)
self.loss = nn.MSELoss()
# self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
self.device = torch.device("cpu")
self.to(self.device)
def forward(self, x):
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
return x
class Agent():
def __init__(self, lr, inputShape, numActions):
self.network = Network(lr, inputShape, numActions)
def chooseAction(self, observation):
state = torch.tensor(observation).float().detach()
state = state.to(self.network.device)
state = state.unsqueeze(0)
qValues = self.network(state)
action = torch.argmax(qValues).item()
chanceOfAsparagus = random.randint(1, 10)
if chanceOfAsparagus == 1: # 10% chance
action = random.randint(0, 1)
# print("qValues: {}, action {}".format(qValues.detach(), action))
return action
def learn(self, state, action, reward, state_, done):
self.network.optimizer.zero_grad()
state = torch.tensor(state).float().detach().to(self.network.device).unsqueeze(0)
state_ = torch.tensor(state_).float().detach().to(self.network.device).unsqueeze(0)
reward = torch.tensor(reward).float().detach().to(self.network.device)
qValues = self.network(state)
nextQValues = self.network(state_)
predictedValueOfNow = qValues[0][action] # interpret the past
futureActionValue = nextQValues[0].max() # interpret the future
trueValueOfNow = reward + futureActionValue * (1 - done)
loss = self.network.loss(trueValueOfNow, predictedValueOfNow)
loss.backward()
self.network.optimizer.step()
if __name__ == '__main__':
env = gym.make('CartPole-v1').unwrapped
agent = Agent(lr=0.0001, inputShape=(4,), numActions=2)
highScore = -math.inf
episode = 0
for i in range(5000):
done = False
state = env.reset()
score, frame = 0, 1
while not done:
env.render()
action = agent.chooseAction(state)
state_, reward, done, info = env.step(action)
agent.learn(state, action, reward, state_, done)
state = state_
score += reward
frame += 1
# print("reward {}".format(reward))
highScore = max(highScore, score)
print(( "ep {}: high-score {:12.3f}, "
"score {:12.3f}, last-episode-time {:4d}").format(
episode, highScore, score,frame))
episode += 1
| en | 0.215413 | # self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") # 10% chance # print("qValues: {}, action {}".format(qValues.detach(), action)) # interpret the past # interpret the future # print("reward {}".format(reward)) | 2.934658 | 3 |
main.py | kamkry-zz/LANwaker-android-public | 0 | 6622526 | <gh_stars>0
from kivy.app import App
from kivy.uix.button import Button
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.spinner import Spinner
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.textinput import TextInput
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.checkbox import CheckBox
# ###
import socket
import paramiko as pr
import os
import requests
import re
# Below.: these dependencies are for successful build on Android platform; unused but required!
import cryptography
import bcrypt
import nacl.utils
import nacl.public
import nacl.secret
import nacl.signing
# ###
# BACKEND
# ###
from kivy.utils import platform
if platform == "android":
from android.permissions import request_permissions, Permission
request_permissions([Permission.READ_EXTERNAL_STORAGE, Permission.WRITE_EXTERNAL_STORAGE, Permission.ACCESS_NETWORK_STATE, Permission.ACCESS_WIFI_STATE, Permission.INTERNET])
# Below.: is the BROADCAST address for your network; change accordingly
udp_ip = '255.255.255.255'
udp_port = 9
# Below.: Definition of constants
database_file = 'db.tx'
credentials_file = 'db_creds.tx'
# Below.: Simple conversion of MAC for further use within app; trimming of ':'
def prepare_mac(raw_mac):
try:
mac = raw_mac.replace(":", "")
mac = mac.lower()
mac = bytearray.fromhex(mac)
return mac
except:
GUI.generic_info_popup("Invalid MAC address!")
# Below.: by passing IP address, the function returns the MAC address of the device
def translate_ip_to_mac(ip):
return ip_mac_database.get(ip)
# Below.: reach for the database file and load it as a dictionary; create a new one if not found
def database_loader():
try:
file = open(database_file, 'r')
except:
file = open(database_file, 'x')
file.write('\n')
file.close()
file = open(database_file, 'r')
try:
file_credentials = open(credentials_file, 'r')
except:
file_credentials = open(credentials_file, 'x')
file_credentials.write('\n')
file_credentials.close()
file_credentials = open(credentials_file, 'r')
data = file.readlines()[-1] # reads the last line of the file; this entry holds the latest additions to the database
try:
data_credentials = file_credentials.readlines()[-1]
except:
GUI.generic_info_popup("No SSH credentials found!")
data_credentials = '\n'
file_credentials.close()
file.close()
global ssh_credentials
global ip_mac_database
if data == '\n': # if the file is empty, create a new entry to it
ip_mac_database = {}
ip_mac_database['0.0.0.0'] = '00:00:00:00:00:00' # Placeholder for IP - MAC mapping
else:
ip_mac_database = eval(data) # if the file is not empty, load it as a dictionary
try:
del ip_mac_database['']
except:
file.close()
file.close()
if data_credentials == '\n': # if the file is empty, create a new entry to it
ssh_credentials = {}
else:
ssh_credentials = eval(data_credentials) # if the file is not empty, load it as a dictionary
file_credentials.close()
# Below.: STRINGIFY the dictionaries and write them to the database files
def dump_database_to_file():
file = open(database_file, 'a')
if "0.0.0.0" in ip_mac_database.keys():
print(ip_mac_database["0.0.0.0"])
del ip_mac_database["0.0.0.0"]
pretreated_data = str(ip_mac_database)
file.write('\n')
file.write(pretreated_data)
file.close()
file_credentials = open(credentials_file, 'a')
pretreated_data_credentials = str(ssh_credentials)
file_credentials.write('\n')
file_credentials.write(pretreated_data_credentials)
file_credentials.close()
# Below.: ssh_handler CLASS holds functions crucial for the SSH connection
class ssh_handler:
def test_function(host_to_connect):
ssh_client = pr.SSHClient()
ssh_client.set_missing_host_key_policy(pr.AutoAddPolicy())
ssh_credentials_list = list(ssh_credentials.items())
usr, passwd = ssh_credentials_list[-1][0], ssh_credentials_list[-1][1]
ssh_client.connect(host_to_connect, username=usr, password=<PASSWORD>)
stdin, stdout, stderr = ssh_client.exec_command('uname -a')
def shutdown_proxmox_via_ssh(selected_ip):
if selected_ip == "0.0.0.0":
GUI.info_popup_wrong_ip()
return
if udp_socket.ping_selected_ip(str(selected_ip)) == True:
pass
else:
GUI.generic_info_popup("No ECHO REPLY from selected IP")
return
ssh_client = pr.SSHClient()
ssh_client.set_missing_host_key_policy(pr.AutoAddPolicy())
ssh_credentials_list = list(ssh_credentials.items())
try:
usr, passwd = ssh_credentials_list[-1][0], ssh_credentials_list[-1][1]
except:
GUI.generic_info_popup("No SSH credentials found!")
return
ssh_client.connect(selected_ip,username=usr,password=<PASSWORD>)
stdin, stdout, stderr = ssh_client.exec_command('net_dev=$(ip a | grep -Eo "en[a-z0-9]+" | grep -Eo "^enp[0-9][a-z]0\b");' # get the network device name
' ethtool -s $net_dev wol g;' # set the WOL mode to magic packet
'qm list | grep "running" | awk `{print $1}` | xargs -n1 shutdown ;' # shutdown all running VMs
'shutdown -h now') # shutdown the host
# Below.: terminate established SSH connection
@staticmethod
def close_ssh_connection(*cls):
ssh_client = pr.SSHClient()
ssh_client.close()
# Below.: udp_socket CLASS holds functions crucial for opening and closing UDP socket PLUS generating the UDP WOL packet
class udp_socket:
def send_magic_packet(selected_mac):
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
sock.sendto(b"\xff"*6 + prepare_mac(selected_mac)*16, (udp_ip, udp_port)) # payload for the UDP WOL packet
sock.close()
except:
GUI.generic_info_popup("Could not send WOL packet\nCheck if the selected MAC is correct")
def ping_selected_ip(selected_ip):
if str(selected_ip) == "Pick item":
return False
ping_result = os.system("ping -c1 -W1 " + str(selected_ip)) # ping the selected IP
if ping_result == 0:
return True
else:
return False
class discovery:
global proxmox_ips
proxmox_ips = []
global proxmox_ips_mac
proxmox_ips_mac = {}
def convert_CIDR(subnet_and_port):
subnetwork_port = list(subnet_and_port.values())[0]
ip_start = list(subnet_and_port.keys())[0][0]
ip_range = list(subnet_and_port.keys())[0][1]
ip_end = ip_start.split('.')
if len(ip_end) != 4:
GUI.generic_info_popup("Wrong IP format")
return
ip_end[3] = str(int(ip_end[3]) + int(ip_range))
if int(ip_end[3]) > 255:
ip_end[3] = str(int(ip_end[3]) - 255)
ip_end[2] = str(int(ip_end[2]) + 1)
ip_end = '.'.join(ip_end)
return discovery.scan_network(start_ip=ip_start, end_ip=ip_end, subnetwork_port=subnetwork_port)
def scan_network(*self, start_ip, end_ip, subnetwork_port):
ip_masked_address = start_ip.split('.')
ip_masked_address = ip_masked_address[0] + '.' + ip_masked_address[1] + '.' + ip_masked_address[2] + '.'
#
# WHAT IF THIRD OCTET HAS INCREASED?
#
port = str(subnetwork_port)
scan_start_range = int(start_ip.split('.')[-1])
scan_end_range = int(end_ip.split('.')[-1])
ip_range = [ip_masked_address + str(i) for i in range(scan_start_range, scan_end_range)]
for ip in ip_range:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(1)
try:
result = sock.connect_ex((ip, int(port)))
except:
GUI.generic_info_popup("Could not connect\nto the selected IPs")
return
if result == 0:
proxmox_ips.append(ip)
else:
continue
sock.close()
return discovery.verify_if_proxmox(proxmox_ips)
def verify_if_proxmox(proxmox_ips):
port = "8006"
for ip in proxmox_ips:
https_response = requests.get("https://" + str(ip) + ":" + str(port), verify=False)
tx = https_response.text
response = re.split("<title>", tx)
response = re.split("</title>", response[1])
response = response[0]
if "Proxmox" in response:
continue
else:
proxmox_ips.remove(ip)
continue
return discovery.retrive_proxmox_mac(proxmox_ips)
def retrive_proxmox_mac(proxmox_ips):
for ip in proxmox_ips:
ssh_client = pr.SSHClient()
ssh_client.set_missing_host_key_policy(pr.AutoAddPolicy())
ssh_credentials_list = list(ssh_credentials.items())
try:
usr, passwd = ssh_credentials_list[-1][0], ssh_credentials_list[-1][1]
except:
GUI.generic_info_popup("No SSH credentials found!")
return
ssh_client.connect(ip, username=usr, password=<PASSWORD>)
ip = str(ip)
stdin, stdout, stderr = ssh_client.exec_command(f'webui_ip={ip};'
'ip a | grep -B1 "$webui_ip" | grep -Eo "([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})" | grep -Ev "(ff:){5}ff"')
proxmox_mac = stdout.read().decode('ascii').strip("\n")
proxmox_ips_mac[ip] = proxmox_mac
return discovery.append_ips_mac_to_list()
@staticmethod
def append_ips_mac_to_list():
amount_found = 0
for ip, mac in proxmox_ips_mac.items():
ip_mac_database[ip] = mac
amount_found += 1
return dump_database_to_file(), GUI.generic_info_popup(f'Discovered {amount_found} Proxmoxes')
# ###
# FRONTEND
# ###
# Below.: simple colors for app
red = [1,0,0,1]
green = [0,1,0,1]
blue = [0,0,1,1]
purple = [1,0,1,1]
gray = [0.5,0.5,0.5,1]
yellow = [1,1,0,1]
# Below.: main function for frontend
class GUI(App):
check_ref = {}
# Below.: event saving data to the database upon clicking the 'Save DB' button
def dump_data(self, event):
return dump_database_to_file()
def database_manager(self, *event):
container = FloatLayout()
vertical_position = 0.8 # starting vertical position of the widgets
self.header_ip = Label(text="<IP value>", font_size=35, pos_hint={'center_x': 0.15, 'center_y': vertical_position+0.1}, color=green)
container.add_widget(self.header_ip)
self.header_mac = Label(text="<MAC value>", font_size=35, pos_hint={'center_x': 0.5, 'center_y': vertical_position+0.1}, color=green)
container.add_widget(self.header_mac)
self.header_about_to_remove = Label(text="Remove entry?", font_size=35, pos_hint={'center_x': 0.85, 'center_y': vertical_position+0.1}, color=red)
container.add_widget(self.header_about_to_remove)
for each_key in ip_mac_database.keys(): # Iterating through each entry in the database
self.ip_entry = Label(text=each_key, pos_hint={'center_x': 0.15, 'center_y': vertical_position}, font_size=30)
container.add_widget(self.ip_entry)
self.mac_entry = Label(text=ip_mac_database[each_key], pos_hint={'center_x': 0.5, 'center_y': vertical_position}, font_size=30)
container.add_widget(self.mac_entry)
self.about_to_remove_checkbox = CheckBox(pos_hint={'center_x': 0.85, 'center_y': vertical_position}, size_hint=(0.1, 0.1))
container.add_widget(self.about_to_remove_checkbox)
self.check_ref[str(vertical_position)] = self.about_to_remove_checkbox, self.ip_entry, self.mac_entry # A handy dictonary for referencing the checkboxes and labels (IPs and MACs)
vertical_position -= 0.125 # This determines the spacing between the labels
self.add_ip = TextInput(hint_text="<IP addr>", multiline=False, size_hint=(0.3, 0.05), pos_hint={'center_x': 0.15, 'center_y': vertical_position}, halign='center')
container.add_widget(self.add_ip)
self.add_mac = TextInput(hint_text="<MAC addr>", multiline=False, size_hint=(0.3, 0.05), pos_hint={'center_x': 0.5, 'center_y': vertical_position}, halign='center')
container.add_widget(self.add_mac)
self.save = Button(text="Save changes", font_size=35, pos_hint={'center_x': 0.5, 'center_y': vertical_position-0.15}, size_hint=(0.35, 0.125), background_color=green)
self.save.bind(on_release=self.getcheckboxes_active)
container.add_widget(self.save)
popup = Popup(title='Manage database', content=container, size_hint=(0.8, 0.8))
popup.open()
def getcheckboxes_active(self, *arg): # This function is called when the 'Save changes' button is clicked
for idx, wgt in self.check_ref.items():
if wgt[0].active:
ip_to_remove = wgt[1].text
ip_mac_database.pop(ip_to_remove)
if self.add_ip.text != "<IP addr>" and self.add_mac.text != "<MAC addr>": # Do not add an entry if the IP and MAC are not changed
ip_mac_database[self.add_ip.text] = self.add_mac.text
def add_new_credentials(self, event):
container = FloatLayout()
self.username = TextInput(hint_text='<Username>', multiline=False, size_hint=(.75, 0.125), pos_hint={'center_x': .5, 'center_y': .85},
halign='center', font_size=30)
container.add_widget(Label(text='Enter <Username>.:', font_size='25sp', size_hint=(1, 0.2), pos_hint={'center_x': .5, 'center_y': .95}))
container.add_widget(self.username)
self.password = TextInput(hint_text='<Password>', multiline=False, size_hint=(.75, 0.125), pos_hint={'center_x': .5, 'center_y': .55},
halign='center', font_size=30, password=True)
container.add_widget(Label(text='Enter <Password>.:', font_size='25sp', size_hint=(1, 0.2), pos_hint={'center_x': .5, 'center_y': .65}))
container.add_widget(self.password)
popup = Popup(title='Add new <Username> : <Password>',
content=container,
size_hint=(0.8, 0.8))
execute = Button(text='Save', background_color=green, size_hint=(0.8, 0.15), pos_hint={'center_x': .5, 'center_y': .15}, halign='center')
execute.bind(on_press= self.append_to_credentials_on_tap, on_release=popup.dismiss)
container.add_widget(execute)
popup.open()
def input_subnet_and_port(self, event):
container = FloatLayout()
self.ip_start = TextInput(hint_text='<IP start>', multiline=False, size_hint=(.75, 0.125), pos_hint={'center_x': .5, 'center_y': .85},
halign='center', font_size=30)
container.add_widget(Label(text='Enter <IP start>.:', font_size='25sp', size_hint=(1, 0.2), pos_hint={'center_x': .5, 'center_y': .95}))
container.add_widget(self.ip_start)
self.ip_end = TextInput(hint_text='<IPs to scan?>', multiline=False, size_hint=(.75, 0.125), pos_hint={'center_x': .5, 'center_y': .65},
halign='center', font_size=30)
container.add_widget(Label(text='How many IPs.:', font_size='25sp', size_hint=(1, 0.2), pos_hint={'center_x': .5, 'center_y': .75}))
container.add_widget(self.ip_end)
self.port = TextInput(hint_text='<Port>', multiline=False, size_hint=(.75, 0.125), pos_hint={'center_x': .5, 'center_y': .45},
halign='center', font_size=30)
container.add_widget(Label(text='Enter <Port>.:', font_size='25sp', size_hint=(1, 0.2), pos_hint={'center_x': .5, 'center_y': .55}))
container.add_widget(self.port)
popup = Popup(title='Add new <Subnet> : <Port>',
content=container,
size_hint=(0.8, 0.8))
execute = Button(text='Save', background_color=green, size_hint=(0.8, 0.15), pos_hint={'center_x': .5, 'center_y': .15}, halign='center')
execute.bind(on_press= self.append_to_subnet_and_port_on_tap, on_release=popup.dismiss)
container.add_widget(execute)
popup.open()
def append_to_subnet_and_port_on_tap(self, *arg):
subnet_and_port_database = {}
ip_range = (self.ip_start.text, self.ip_end.text)
if int(self.ip_end.text) > 255:
GUI.generic_info_popup('<IPs to scan?>\nmust be less than 255')
return
subnet_and_port_database[ip_range] = self.port.text
discovery.convert_CIDR(subnet_and_port_database)
def append_to_credentials_on_tap(self, event):
ssh_credentials[self.username.text] = self.password.text
def append_to_database_on_tap(self, event):
ip_mac_database[self.ip.text] = self.mac.text
def shutdown_on_tap(self, event):
ssh_handler.shutdown_proxmox_via_ssh(self.spinner.text)
ssh_handler.close_ssh_connection()
def send_wol_on_tap(self, event):
udp_socket.send_magic_packet(translate_ip_to_mac(self.spinner.text))
def change_color_on_status_check(self, event):
colors = [red, green, blue, purple, yellow, gray]
if udp_socket.ping_selected_ip(str(self.spinner.text)) == True:
self.spinner.background_color = colors[1]
else:
self.spinner.background_color = colors[0]
@staticmethod
def info_popup_wrong_ip():
info_popup_wrong_ip = Popup(title='Wrong IP', content=Label(text='Please enter a valid IP address!'),
size_hint=(None, None), size=(300, 200))
info_popup_wrong_ip.open()
def generic_info_popup(reason):
# for ~30 chars in line; width should be 0.65
# single line should have 0.25 height
n_characters = int(len(str(reason)))
n_lines = int(n_characters // 30)
if n_characters % 30 > 0:
n_lines += 1
y_dimension = int(0.25)
y_dimension += n_lines * 0.25
info_popup = Popup(title='Info', content=Label(text=reason), size_hint=(0.65, y_dimension))
info_popup.open()
# Below.: packing the frontend up
def build(self):
layout = FloatLayout(size=(100, 100))
colors = [red, green, blue, purple, yellow, gray]
self.spinner = Spinner(text='Pick item', values=(ip_mac_database.keys()),background_color=colors[2], size_hint=(0.8, 0.2), pos_hint={'x': 0.1, 'y': 0.75})
btn_wol = Button(text='Send WOL',size_hint=(0.35,0.4),background_color=colors[1],pos_hint={'x':0.05,'y':0.1})
btn_shutdown = Button(text='Shutdown',size_hint=(0.35,0.4),background_color=colors[0],pos_hint={'x':0.6,'y':0.1})
btn_dump = Button(text='Save DB',size_hint=(0.25,0.1),background_color=colors[3],pos_hint={'x':0.05,'y':0.6})
btn_new_entry = Button(text='Add IP-MAC',size_hint=(0.25,0.1),background_color=colors[3],pos_hint={'x':0.7,'y':0.6})
btn_proxmox_credentials = Button(text=' Enter\nCredentials',size_hint=(0.25,0.1),background_color=colors[3],pos_hint={'x':0.375,'y':0.6})
btn_check_status = Button(text='Server Status',size_hint=(0.375,0.075),background_color=colors[5],pos_hint={'x':0.05,'y':0.5125})
btn_discover_proxmoxes = Button(text='Discover Proxmoxes',size_hint=(0.375,0.075),background_color=colors[4],pos_hint={'x':0.575,'y':0.5125})
btn_shutdown.bind(on_release = self.shutdown_on_tap)
btn_wol.bind(on_release = self.send_wol_on_tap)
btn_dump.bind(on_release = self.dump_data)
btn_new_entry.bind(on_release = self.database_manager) # <= REPLACED!!
btn_proxmox_credentials.bind(on_release = self.add_new_credentials)
btn_check_status.bind(on_release = self.change_color_on_status_check)
btn_discover_proxmoxes.bind(on_release = self.input_subnet_and_port)
layout.add_widget(self.spinner)
layout.add_widget(btn_wol)
layout.add_widget(btn_shutdown)
layout.add_widget(btn_dump)
layout.add_widget(btn_new_entry)
layout.add_widget(btn_proxmox_credentials)
layout.add_widget(btn_check_status)
layout.add_widget(btn_discover_proxmoxes)
return layout
if __name__ == "__main__":
database_loader()
app = GUI()
app.run() | from kivy.app import App
from kivy.uix.button import Button
from kivy.uix.floatlayout import FloatLayout
from kivy.uix.spinner import Spinner
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.uix.textinput import TextInput
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.checkbox import CheckBox
# ###
import socket
import paramiko as pr
import os
import requests
import re
# Below.: these dependencies are for successful build on Android platform; unused but required!
import cryptography
import bcrypt
import nacl.utils
import nacl.public
import nacl.secret
import nacl.signing
# ###
# BACKEND
# ###
from kivy.utils import platform
if platform == "android":
from android.permissions import request_permissions, Permission
request_permissions([Permission.READ_EXTERNAL_STORAGE, Permission.WRITE_EXTERNAL_STORAGE, Permission.ACCESS_NETWORK_STATE, Permission.ACCESS_WIFI_STATE, Permission.INTERNET])
# Below.: is the BROADCAST address for your network; change accordingly
udp_ip = '255.255.255.255'
udp_port = 9
# Below.: Definition of constants
database_file = 'db.tx'
credentials_file = 'db_creds.tx'
# Below.: Simple conversion of MAC for further use within app; trimming of ':'
def prepare_mac(raw_mac):
try:
mac = raw_mac.replace(":", "")
mac = mac.lower()
mac = bytearray.fromhex(mac)
return mac
except:
GUI.generic_info_popup("Invalid MAC address!")
# Below.: by passing IP address, the function returns the MAC address of the device
def translate_ip_to_mac(ip):
return ip_mac_database.get(ip)
# Below.: reach for the database file and load it as a dictionary; create a new one if not found
def database_loader():
try:
file = open(database_file, 'r')
except:
file = open(database_file, 'x')
file.write('\n')
file.close()
file = open(database_file, 'r')
try:
file_credentials = open(credentials_file, 'r')
except:
file_credentials = open(credentials_file, 'x')
file_credentials.write('\n')
file_credentials.close()
file_credentials = open(credentials_file, 'r')
data = file.readlines()[-1] # reads the last line of the file; this entry holds the latest additions to the database
try:
data_credentials = file_credentials.readlines()[-1]
except:
GUI.generic_info_popup("No SSH credentials found!")
data_credentials = '\n'
file_credentials.close()
file.close()
global ssh_credentials
global ip_mac_database
if data == '\n': # if the file is empty, create a new entry to it
ip_mac_database = {}
ip_mac_database['0.0.0.0'] = '00:00:00:00:00:00' # Placeholder for IP - MAC mapping
else:
ip_mac_database = eval(data) # if the file is not empty, load it as a dictionary
try:
del ip_mac_database['']
except:
file.close()
file.close()
if data_credentials == '\n': # if the file is empty, create a new entry to it
ssh_credentials = {}
else:
ssh_credentials = eval(data_credentials) # if the file is not empty, load it as a dictionary
file_credentials.close()
# Below.: STRINGIFY the dictionaries and write them to the database files
def dump_database_to_file():
file = open(database_file, 'a')
if "0.0.0.0" in ip_mac_database.keys():
print(ip_mac_database["0.0.0.0"])
del ip_mac_database["0.0.0.0"]
pretreated_data = str(ip_mac_database)
file.write('\n')
file.write(pretreated_data)
file.close()
file_credentials = open(credentials_file, 'a')
pretreated_data_credentials = str(ssh_credentials)
file_credentials.write('\n')
file_credentials.write(pretreated_data_credentials)
file_credentials.close()
# Below.: ssh_handler CLASS holds functions crucial for the SSH connection
class ssh_handler:
def test_function(host_to_connect):
ssh_client = pr.SSHClient()
ssh_client.set_missing_host_key_policy(pr.AutoAddPolicy())
ssh_credentials_list = list(ssh_credentials.items())
usr, passwd = ssh_credentials_list[-1][0], ssh_credentials_list[-1][1]
ssh_client.connect(host_to_connect, username=usr, password=<PASSWORD>)
stdin, stdout, stderr = ssh_client.exec_command('uname -a')
def shutdown_proxmox_via_ssh(selected_ip):
if selected_ip == "0.0.0.0":
GUI.info_popup_wrong_ip()
return
if udp_socket.ping_selected_ip(str(selected_ip)) == True:
pass
else:
GUI.generic_info_popup("No ECHO REPLY from selected IP")
return
ssh_client = pr.SSHClient()
ssh_client.set_missing_host_key_policy(pr.AutoAddPolicy())
ssh_credentials_list = list(ssh_credentials.items())
try:
usr, passwd = ssh_credentials_list[-1][0], ssh_credentials_list[-1][1]
except:
GUI.generic_info_popup("No SSH credentials found!")
return
ssh_client.connect(selected_ip,username=usr,password=<PASSWORD>)
stdin, stdout, stderr = ssh_client.exec_command('net_dev=$(ip a | grep -Eo "en[a-z0-9]+" | grep -Eo "^enp[0-9][a-z]0\b");' # get the network device name
' ethtool -s $net_dev wol g;' # set the WOL mode to magic packet
'qm list | grep "running" | awk `{print $1}` | xargs -n1 shutdown ;' # shutdown all running VMs
'shutdown -h now') # shutdown the host
# Below.: terminate established SSH connection
@staticmethod
def close_ssh_connection(*cls):
ssh_client = pr.SSHClient()
ssh_client.close()
# Below.: udp_socket CLASS holds functions crucial for opening and closing UDP socket PLUS generating the UDP WOL packet
class udp_socket:
def send_magic_packet(selected_mac):
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
sock.sendto(b"\xff"*6 + prepare_mac(selected_mac)*16, (udp_ip, udp_port)) # payload for the UDP WOL packet
sock.close()
except:
GUI.generic_info_popup("Could not send WOL packet\nCheck if the selected MAC is correct")
def ping_selected_ip(selected_ip):
if str(selected_ip) == "Pick item":
return False
ping_result = os.system("ping -c1 -W1 " + str(selected_ip)) # ping the selected IP
if ping_result == 0:
return True
else:
return False
class discovery:
global proxmox_ips
proxmox_ips = []
global proxmox_ips_mac
proxmox_ips_mac = {}
def convert_CIDR(subnet_and_port):
subnetwork_port = list(subnet_and_port.values())[0]
ip_start = list(subnet_and_port.keys())[0][0]
ip_range = list(subnet_and_port.keys())[0][1]
ip_end = ip_start.split('.')
if len(ip_end) != 4:
GUI.generic_info_popup("Wrong IP format")
return
ip_end[3] = str(int(ip_end[3]) + int(ip_range))
if int(ip_end[3]) > 255:
ip_end[3] = str(int(ip_end[3]) - 255)
ip_end[2] = str(int(ip_end[2]) + 1)
ip_end = '.'.join(ip_end)
return discovery.scan_network(start_ip=ip_start, end_ip=ip_end, subnetwork_port=subnetwork_port)
def scan_network(*self, start_ip, end_ip, subnetwork_port):
ip_masked_address = start_ip.split('.')
ip_masked_address = ip_masked_address[0] + '.' + ip_masked_address[1] + '.' + ip_masked_address[2] + '.'
#
# WHAT IF THIRD OCTET HAS INCREASED?
#
port = str(subnetwork_port)
scan_start_range = int(start_ip.split('.')[-1])
scan_end_range = int(end_ip.split('.')[-1])
ip_range = [ip_masked_address + str(i) for i in range(scan_start_range, scan_end_range)]
for ip in ip_range:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(1)
try:
result = sock.connect_ex((ip, int(port)))
except:
GUI.generic_info_popup("Could not connect\nto the selected IPs")
return
if result == 0:
proxmox_ips.append(ip)
else:
continue
sock.close()
return discovery.verify_if_proxmox(proxmox_ips)
def verify_if_proxmox(proxmox_ips):
port = "8006"
for ip in proxmox_ips:
https_response = requests.get("https://" + str(ip) + ":" + str(port), verify=False)
tx = https_response.text
response = re.split("<title>", tx)
response = re.split("</title>", response[1])
response = response[0]
if "Proxmox" in response:
continue
else:
proxmox_ips.remove(ip)
continue
return discovery.retrive_proxmox_mac(proxmox_ips)
def retrive_proxmox_mac(proxmox_ips):
for ip in proxmox_ips:
ssh_client = pr.SSHClient()
ssh_client.set_missing_host_key_policy(pr.AutoAddPolicy())
ssh_credentials_list = list(ssh_credentials.items())
try:
usr, passwd = ssh_credentials_list[-1][0], ssh_credentials_list[-1][1]
except:
GUI.generic_info_popup("No SSH credentials found!")
return
ssh_client.connect(ip, username=usr, password=<PASSWORD>)
ip = str(ip)
stdin, stdout, stderr = ssh_client.exec_command(f'webui_ip={ip};'
'ip a | grep -B1 "$webui_ip" | grep -Eo "([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})" | grep -Ev "(ff:){5}ff"')
proxmox_mac = stdout.read().decode('ascii').strip("\n")
proxmox_ips_mac[ip] = proxmox_mac
return discovery.append_ips_mac_to_list()
@staticmethod
def append_ips_mac_to_list():
amount_found = 0
for ip, mac in proxmox_ips_mac.items():
ip_mac_database[ip] = mac
amount_found += 1
return dump_database_to_file(), GUI.generic_info_popup(f'Discovered {amount_found} Proxmoxes')
# ###
# FRONTEND
# ###
# Below.: simple colors for app
red = [1,0,0,1]
green = [0,1,0,1]
blue = [0,0,1,1]
purple = [1,0,1,1]
gray = [0.5,0.5,0.5,1]
yellow = [1,1,0,1]
# Below.: main function for frontend
class GUI(App):
check_ref = {}
# Below.: event saving data to the database upon clicking the 'Save DB' button
def dump_data(self, event):
return dump_database_to_file()
def database_manager(self, *event):
container = FloatLayout()
vertical_position = 0.8 # starting vertical position of the widgets
self.header_ip = Label(text="<IP value>", font_size=35, pos_hint={'center_x': 0.15, 'center_y': vertical_position+0.1}, color=green)
container.add_widget(self.header_ip)
self.header_mac = Label(text="<MAC value>", font_size=35, pos_hint={'center_x': 0.5, 'center_y': vertical_position+0.1}, color=green)
container.add_widget(self.header_mac)
self.header_about_to_remove = Label(text="Remove entry?", font_size=35, pos_hint={'center_x': 0.85, 'center_y': vertical_position+0.1}, color=red)
container.add_widget(self.header_about_to_remove)
for each_key in ip_mac_database.keys(): # Iterating through each entry in the database
self.ip_entry = Label(text=each_key, pos_hint={'center_x': 0.15, 'center_y': vertical_position}, font_size=30)
container.add_widget(self.ip_entry)
self.mac_entry = Label(text=ip_mac_database[each_key], pos_hint={'center_x': 0.5, 'center_y': vertical_position}, font_size=30)
container.add_widget(self.mac_entry)
self.about_to_remove_checkbox = CheckBox(pos_hint={'center_x': 0.85, 'center_y': vertical_position}, size_hint=(0.1, 0.1))
container.add_widget(self.about_to_remove_checkbox)
self.check_ref[str(vertical_position)] = self.about_to_remove_checkbox, self.ip_entry, self.mac_entry # A handy dictonary for referencing the checkboxes and labels (IPs and MACs)
vertical_position -= 0.125 # This determines the spacing between the labels
self.add_ip = TextInput(hint_text="<IP addr>", multiline=False, size_hint=(0.3, 0.05), pos_hint={'center_x': 0.15, 'center_y': vertical_position}, halign='center')
container.add_widget(self.add_ip)
self.add_mac = TextInput(hint_text="<MAC addr>", multiline=False, size_hint=(0.3, 0.05), pos_hint={'center_x': 0.5, 'center_y': vertical_position}, halign='center')
container.add_widget(self.add_mac)
self.save = Button(text="Save changes", font_size=35, pos_hint={'center_x': 0.5, 'center_y': vertical_position-0.15}, size_hint=(0.35, 0.125), background_color=green)
self.save.bind(on_release=self.getcheckboxes_active)
container.add_widget(self.save)
popup = Popup(title='Manage database', content=container, size_hint=(0.8, 0.8))
popup.open()
def getcheckboxes_active(self, *arg): # This function is called when the 'Save changes' button is clicked
for idx, wgt in self.check_ref.items():
if wgt[0].active:
ip_to_remove = wgt[1].text
ip_mac_database.pop(ip_to_remove)
if self.add_ip.text != "<IP addr>" and self.add_mac.text != "<MAC addr>": # Do not add an entry if the IP and MAC are not changed
ip_mac_database[self.add_ip.text] = self.add_mac.text
def add_new_credentials(self, event):
container = FloatLayout()
self.username = TextInput(hint_text='<Username>', multiline=False, size_hint=(.75, 0.125), pos_hint={'center_x': .5, 'center_y': .85},
halign='center', font_size=30)
container.add_widget(Label(text='Enter <Username>.:', font_size='25sp', size_hint=(1, 0.2), pos_hint={'center_x': .5, 'center_y': .95}))
container.add_widget(self.username)
self.password = TextInput(hint_text='<Password>', multiline=False, size_hint=(.75, 0.125), pos_hint={'center_x': .5, 'center_y': .55},
halign='center', font_size=30, password=True)
container.add_widget(Label(text='Enter <Password>.:', font_size='25sp', size_hint=(1, 0.2), pos_hint={'center_x': .5, 'center_y': .65}))
container.add_widget(self.password)
popup = Popup(title='Add new <Username> : <Password>',
content=container,
size_hint=(0.8, 0.8))
execute = Button(text='Save', background_color=green, size_hint=(0.8, 0.15), pos_hint={'center_x': .5, 'center_y': .15}, halign='center')
execute.bind(on_press= self.append_to_credentials_on_tap, on_release=popup.dismiss)
container.add_widget(execute)
popup.open()
def input_subnet_and_port(self, event):
container = FloatLayout()
self.ip_start = TextInput(hint_text='<IP start>', multiline=False, size_hint=(.75, 0.125), pos_hint={'center_x': .5, 'center_y': .85},
halign='center', font_size=30)
container.add_widget(Label(text='Enter <IP start>.:', font_size='25sp', size_hint=(1, 0.2), pos_hint={'center_x': .5, 'center_y': .95}))
container.add_widget(self.ip_start)
self.ip_end = TextInput(hint_text='<IPs to scan?>', multiline=False, size_hint=(.75, 0.125), pos_hint={'center_x': .5, 'center_y': .65},
halign='center', font_size=30)
container.add_widget(Label(text='How many IPs.:', font_size='25sp', size_hint=(1, 0.2), pos_hint={'center_x': .5, 'center_y': .75}))
container.add_widget(self.ip_end)
self.port = TextInput(hint_text='<Port>', multiline=False, size_hint=(.75, 0.125), pos_hint={'center_x': .5, 'center_y': .45},
halign='center', font_size=30)
container.add_widget(Label(text='Enter <Port>.:', font_size='25sp', size_hint=(1, 0.2), pos_hint={'center_x': .5, 'center_y': .55}))
container.add_widget(self.port)
popup = Popup(title='Add new <Subnet> : <Port>',
content=container,
size_hint=(0.8, 0.8))
execute = Button(text='Save', background_color=green, size_hint=(0.8, 0.15), pos_hint={'center_x': .5, 'center_y': .15}, halign='center')
execute.bind(on_press= self.append_to_subnet_and_port_on_tap, on_release=popup.dismiss)
container.add_widget(execute)
popup.open()
def append_to_subnet_and_port_on_tap(self, *arg):
subnet_and_port_database = {}
ip_range = (self.ip_start.text, self.ip_end.text)
if int(self.ip_end.text) > 255:
GUI.generic_info_popup('<IPs to scan?>\nmust be less than 255')
return
subnet_and_port_database[ip_range] = self.port.text
discovery.convert_CIDR(subnet_and_port_database)
def append_to_credentials_on_tap(self, event):
ssh_credentials[self.username.text] = self.password.text
def append_to_database_on_tap(self, event):
ip_mac_database[self.ip.text] = self.mac.text
def shutdown_on_tap(self, event):
ssh_handler.shutdown_proxmox_via_ssh(self.spinner.text)
ssh_handler.close_ssh_connection()
def send_wol_on_tap(self, event):
udp_socket.send_magic_packet(translate_ip_to_mac(self.spinner.text))
def change_color_on_status_check(self, event):
colors = [red, green, blue, purple, yellow, gray]
if udp_socket.ping_selected_ip(str(self.spinner.text)) == True:
self.spinner.background_color = colors[1]
else:
self.spinner.background_color = colors[0]
@staticmethod
def info_popup_wrong_ip():
info_popup_wrong_ip = Popup(title='Wrong IP', content=Label(text='Please enter a valid IP address!'),
size_hint=(None, None), size=(300, 200))
info_popup_wrong_ip.open()
def generic_info_popup(reason):
# for ~30 chars in line; width should be 0.65
# single line should have 0.25 height
n_characters = int(len(str(reason)))
n_lines = int(n_characters // 30)
if n_characters % 30 > 0:
n_lines += 1
y_dimension = int(0.25)
y_dimension += n_lines * 0.25
info_popup = Popup(title='Info', content=Label(text=reason), size_hint=(0.65, y_dimension))
info_popup.open()
# Below.: packing the frontend up
def build(self):
layout = FloatLayout(size=(100, 100))
colors = [red, green, blue, purple, yellow, gray]
self.spinner = Spinner(text='Pick item', values=(ip_mac_database.keys()),background_color=colors[2], size_hint=(0.8, 0.2), pos_hint={'x': 0.1, 'y': 0.75})
btn_wol = Button(text='Send WOL',size_hint=(0.35,0.4),background_color=colors[1],pos_hint={'x':0.05,'y':0.1})
btn_shutdown = Button(text='Shutdown',size_hint=(0.35,0.4),background_color=colors[0],pos_hint={'x':0.6,'y':0.1})
btn_dump = Button(text='Save DB',size_hint=(0.25,0.1),background_color=colors[3],pos_hint={'x':0.05,'y':0.6})
btn_new_entry = Button(text='Add IP-MAC',size_hint=(0.25,0.1),background_color=colors[3],pos_hint={'x':0.7,'y':0.6})
btn_proxmox_credentials = Button(text=' Enter\nCredentials',size_hint=(0.25,0.1),background_color=colors[3],pos_hint={'x':0.375,'y':0.6})
btn_check_status = Button(text='Server Status',size_hint=(0.375,0.075),background_color=colors[5],pos_hint={'x':0.05,'y':0.5125})
btn_discover_proxmoxes = Button(text='Discover Proxmoxes',size_hint=(0.375,0.075),background_color=colors[4],pos_hint={'x':0.575,'y':0.5125})
btn_shutdown.bind(on_release = self.shutdown_on_tap)
btn_wol.bind(on_release = self.send_wol_on_tap)
btn_dump.bind(on_release = self.dump_data)
btn_new_entry.bind(on_release = self.database_manager) # <= REPLACED!!
btn_proxmox_credentials.bind(on_release = self.add_new_credentials)
btn_check_status.bind(on_release = self.change_color_on_status_check)
btn_discover_proxmoxes.bind(on_release = self.input_subnet_and_port)
layout.add_widget(self.spinner)
layout.add_widget(btn_wol)
layout.add_widget(btn_shutdown)
layout.add_widget(btn_dump)
layout.add_widget(btn_new_entry)
layout.add_widget(btn_proxmox_credentials)
layout.add_widget(btn_check_status)
layout.add_widget(btn_discover_proxmoxes)
return layout
if __name__ == "__main__":
database_loader()
app = GUI()
app.run() | en | 0.809642 | # ### # Below.: these dependencies are for successful build on Android platform; unused but required! # ### # BACKEND # ### # Below.: is the BROADCAST address for your network; change accordingly # Below.: Definition of constants # Below.: Simple conversion of MAC for further use within app; trimming of ':' # Below.: by passing IP address, the function returns the MAC address of the device # Below.: reach for the database file and load it as a dictionary; create a new one if not found # reads the last line of the file; this entry holds the latest additions to the database # if the file is empty, create a new entry to it # Placeholder for IP - MAC mapping # if the file is not empty, load it as a dictionary # if the file is empty, create a new entry to it # if the file is not empty, load it as a dictionary # Below.: STRINGIFY the dictionaries and write them to the database files # Below.: ssh_handler CLASS holds functions crucial for the SSH connection # get the network device name # set the WOL mode to magic packet # shutdown all running VMs # shutdown the host # Below.: terminate established SSH connection # Below.: udp_socket CLASS holds functions crucial for opening and closing UDP socket PLUS generating the UDP WOL packet # payload for the UDP WOL packet # ping the selected IP # # WHAT IF THIRD OCTET HAS INCREASED? # # ### # FRONTEND # ### # Below.: simple colors for app # Below.: main function for frontend # Below.: event saving data to the database upon clicking the 'Save DB' button # starting vertical position of the widgets # Iterating through each entry in the database # A handy dictonary for referencing the checkboxes and labels (IPs and MACs) # This determines the spacing between the labels # This function is called when the 'Save changes' button is clicked # Do not add an entry if the IP and MAC are not changed # for ~30 chars in line; width should be 0.65 # single line should have 0.25 height # Below.: packing the frontend up # <= REPLACED!! | 2.486232 | 2 |
main.py | joeleeofficial/Python-Console | 0 | 6622527 | <reponame>joeleeofficial/Python-Console
user_color = "white"
console_color = "white"
pointer = "$JOE >"
pointer_color = "green"
# {} is the command given by the user
class error:
syntax_error = "Error: '{}' is not a valid command."
name_error = "Error: '{}' is not defined."
type_error = "Error: wrong type for '{}'"
invalid_parameter_error = "Error: {required_params} required parameters required and {optional_params} optional parameters needed but {params_given} given."
error_color = "red"
do_help_command = True
help_command = "help"
version = "1.5.2"
language_name = "Blitz"
author = "JoeLee"
clear_command = ["clear","clr"]
from inspect import signature as s, isfunction as f
from json import loads as parse, dumps as stringify
import config
colors = {
"white": "\033[0m",
"red": "\033[31m",
"green": "\033[32m",
"blue": "\033[34m",
"purple": "\033[35",
"cyan": "\033[36m",
"orange": "\033[33m"
}
def e(c):
exec('global i; i = %s' % c)
global i
return i
try:
user_color = colors[user_color]
console_color = colors[console_color]
pointer_color = colors[pointer_color]
error_color = colors[error_color]
except:
print("\033[31mInvalid colors in configuration.\033[0m")
if do_help_command:
print("{} {} 2021 © Copyright \nAll Right Reserved By Joe Lee\n > https://github.com/joeleeofficial".format(language_name,version,author))
else:
print("{} {} 2021 © Copyright \nAll Right Reserved By Joe Lee\n> https://github.com/joeleeofficial".format(language_name,version,author))
help = '== Help ==\nHello There, I am Joe. A Programmer, Developer.'
while True:
x = input(pointer_color + pointer + console_color + " ")
if x.startswith(help_command + " ") and do_help_command:
x = x.split(help_command + " ")[1]
try:
if f(e("config." + x)):
print("== Help | " + x + " ==")
h = []
prm = [0,0]
co = 0
sig = s(e("config." + x.split(" ")[0]))
for key in list(dict(sig.parameters).keys()):
if str(dict(sig.parameters)[key]).startswith("{}=".format(key)):
prm[1] += 1
else:
prm[0] += 1
for i in str(s(e("config." + x)))[1:-1].split(", "):
if co <= prm[0]:
h.append("[" + i.split("=")[0] + "]")
else:
h.append("(" + i.split("=")[0] + ")")
co += 1
print("Usage: " + x + " " + ' '.join(h) + "\nParams: " + " | ".join(str(s(e("config." + x)))[1:-1].split(",")))
except:
print(error_color + error.syntax_error.format(x))
elif x in clear_command:
print("\033c",end="",flush=True)
if do_help_command:
print("{} {} 2021 © Copyright \nAll Right Reserved By Joe Lee\n > https://github.com/joeleeofficial Type Help For More Information".format(language_name,version,author))
else:
print("{} {} 2021 © Copyright \nAll Right Reserved By Joe Lee\n > https://github.com/joeleeofficial Type Help For Information".format(language_name,version,author))
elif x.strip() != "":
y = x.split(" ")
c = x.split(" ")[0]
del(y[0])
y = ','.join(y)
sig = ''
prm = [0,0]
try:
if f(e("config." + c)):
sig = s(e("config." + x.split(" ")[0]))
for key in list(dict(sig.parameters).keys()):
if str(dict(sig.parameters)[key]).startswith("{}=".format(key)):
prm[1] += 1
else:
prm[0] += 1
if (len(y.split(",")) == prm[0] or y.split(",") == ['']) or len(y.split(",")) <= (prm[0] + prm[1]):
try:
if not y == "":
e("config." + c + "(" + y + ")")
else:
try:
e("config." + c + "()")
except:
print("<[function] {}>".format(c))
except TypeError:
print(error_color + error.type_error.format(x))
except NameError:
print(error_color + error.name_error.format(x))
else:
print(error_color + error.invalid_parameter_error.format(required_params=prm[0],optional_params=prm[1],params_given=len(y.split(","))))
else:
raise AttributeError
except (AttributeError, SyntaxError):
print(error_color + error.syntax_error.format(x))
| user_color = "white"
console_color = "white"
pointer = "$JOE >"
pointer_color = "green"
# {} is the command given by the user
class error:
syntax_error = "Error: '{}' is not a valid command."
name_error = "Error: '{}' is not defined."
type_error = "Error: wrong type for '{}'"
invalid_parameter_error = "Error: {required_params} required parameters required and {optional_params} optional parameters needed but {params_given} given."
error_color = "red"
do_help_command = True
help_command = "help"
version = "1.5.2"
language_name = "Blitz"
author = "JoeLee"
clear_command = ["clear","clr"]
from inspect import signature as s, isfunction as f
from json import loads as parse, dumps as stringify
import config
colors = {
"white": "\033[0m",
"red": "\033[31m",
"green": "\033[32m",
"blue": "\033[34m",
"purple": "\033[35",
"cyan": "\033[36m",
"orange": "\033[33m"
}
def e(c):
exec('global i; i = %s' % c)
global i
return i
try:
user_color = colors[user_color]
console_color = colors[console_color]
pointer_color = colors[pointer_color]
error_color = colors[error_color]
except:
print("\033[31mInvalid colors in configuration.\033[0m")
if do_help_command:
print("{} {} 2021 © Copyright \nAll Right Reserved By Joe Lee\n > https://github.com/joeleeofficial".format(language_name,version,author))
else:
print("{} {} 2021 © Copyright \nAll Right Reserved By Joe Lee\n> https://github.com/joeleeofficial".format(language_name,version,author))
help = '== Help ==\nHello There, I am Joe. A Programmer, Developer.'
while True:
x = input(pointer_color + pointer + console_color + " ")
if x.startswith(help_command + " ") and do_help_command:
x = x.split(help_command + " ")[1]
try:
if f(e("config." + x)):
print("== Help | " + x + " ==")
h = []
prm = [0,0]
co = 0
sig = s(e("config." + x.split(" ")[0]))
for key in list(dict(sig.parameters).keys()):
if str(dict(sig.parameters)[key]).startswith("{}=".format(key)):
prm[1] += 1
else:
prm[0] += 1
for i in str(s(e("config." + x)))[1:-1].split(", "):
if co <= prm[0]:
h.append("[" + i.split("=")[0] + "]")
else:
h.append("(" + i.split("=")[0] + ")")
co += 1
print("Usage: " + x + " " + ' '.join(h) + "\nParams: " + " | ".join(str(s(e("config." + x)))[1:-1].split(",")))
except:
print(error_color + error.syntax_error.format(x))
elif x in clear_command:
print("\033c",end="",flush=True)
if do_help_command:
print("{} {} 2021 © Copyright \nAll Right Reserved By Joe Lee\n > https://github.com/joeleeofficial Type Help For More Information".format(language_name,version,author))
else:
print("{} {} 2021 © Copyright \nAll Right Reserved By Joe Lee\n > https://github.com/joeleeofficial Type Help For Information".format(language_name,version,author))
elif x.strip() != "":
y = x.split(" ")
c = x.split(" ")[0]
del(y[0])
y = ','.join(y)
sig = ''
prm = [0,0]
try:
if f(e("config." + c)):
sig = s(e("config." + x.split(" ")[0]))
for key in list(dict(sig.parameters).keys()):
if str(dict(sig.parameters)[key]).startswith("{}=".format(key)):
prm[1] += 1
else:
prm[0] += 1
if (len(y.split(",")) == prm[0] or y.split(",") == ['']) or len(y.split(",")) <= (prm[0] + prm[1]):
try:
if not y == "":
e("config." + c + "(" + y + ")")
else:
try:
e("config." + c + "()")
except:
print("<[function] {}>".format(c))
except TypeError:
print(error_color + error.type_error.format(x))
except NameError:
print(error_color + error.name_error.format(x))
else:
print(error_color + error.invalid_parameter_error.format(required_params=prm[0],optional_params=prm[1],params_given=len(y.split(","))))
else:
raise AttributeError
except (AttributeError, SyntaxError):
print(error_color + error.syntax_error.format(x)) | en | 0.984118 | # {} is the command given by the user | 3.159999 | 3 |
pylibressl/utils.py | yl3dy/pylibressl | 2 | 6622528 | """Miscellaneous useful utilities from LibreSSL."""
from . import _libressl
def secure_compare(rhs, lhs):
"""Securely compare byte strings.
Constant-time comparison of byte strings using ``CRYPTO_memcmp`` from
LibreSSL. Note that when byte string lengths are different, exception is
raised at once.
"""
ffi, clib = _libressl.ffi, _libressl.lib
if type(rhs) != type(b'') or type(lhs) != type(b''):
raise ValueError('Comparison arguments should be byte strings')
if len(rhs) != len(lhs):
raise ValueError('Arguments should be of the same length')
c_rhs = ffi.new('unsigned char[]', rhs)
c_lhs = ffi.new('unsigned char[]', lhs)
length = len(rhs)
comp_result = clib.CRYPTO_memcmp(c_rhs, c_lhs, length)
return comp_result == 0
| """Miscellaneous useful utilities from LibreSSL."""
from . import _libressl
def secure_compare(rhs, lhs):
"""Securely compare byte strings.
Constant-time comparison of byte strings using ``CRYPTO_memcmp`` from
LibreSSL. Note that when byte string lengths are different, exception is
raised at once.
"""
ffi, clib = _libressl.ffi, _libressl.lib
if type(rhs) != type(b'') or type(lhs) != type(b''):
raise ValueError('Comparison arguments should be byte strings')
if len(rhs) != len(lhs):
raise ValueError('Arguments should be of the same length')
c_rhs = ffi.new('unsigned char[]', rhs)
c_lhs = ffi.new('unsigned char[]', lhs)
length = len(rhs)
comp_result = clib.CRYPTO_memcmp(c_rhs, c_lhs, length)
return comp_result == 0
| en | 0.74522 | Miscellaneous useful utilities from LibreSSL. Securely compare byte strings. Constant-time comparison of byte strings using ``CRYPTO_memcmp`` from LibreSSL. Note that when byte string lengths are different, exception is raised at once. | 3.139888 | 3 |
VirtualBox-5.0.0/src/VBox/ValidationKit/testmanager/core/report.py | egraba/vbox_openbsd | 1 | 6622529 | # -*- coding: utf-8 -*-
# $Id: report.py $
"""
Test Manager - Report models.
"""
__copyright__ = \
"""
Copyright (C) 2012-2015 Oracle Corporation
This file is part of VirtualBox Open Source Edition (OSE), as
available from http://www.virtualbox.org. This file is free software;
you can redistribute it and/or modify it under the terms of the GNU
General Public License (GPL) as published by the Free Software
Foundation, in version 2 as it comes in the "COPYING" file of the
VirtualBox OSE distribution. VirtualBox OSE is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
The contents of this file may alternatively be used under the terms
of the Common Development and Distribution License Version 1.0
(CDDL) only, as it comes in the "COPYING.CDDL" file of the
VirtualBox OSE distribution, in which case the provisions of the
CDDL are applicable instead of those of the GPL.
You may elect to license modified versions of this file under the
terms and conditions of either the GPL or the CDDL or both.
"""
__version__ = "$Revision: 101412 $"
# Validation Kit imports.
from testmanager.core.base import ModelLogicBase, TMExceptionBase;
from testmanager.core.build import BuildCategoryData;
from testmanager.core.dbobjcache import DatabaseObjCache;
from testmanager.core.testbox import TestBoxData;
from common import constants;
class ReportModelBase(ModelLogicBase): # pylint: disable=R0903
"""
Something all report logic(/miner) classes inherit from.
"""
## @name Report subjects
## @{
ksSubEverything = 'Everything';
ksSubSchedGroup = 'SchedGroup';
ksSubTestGroup = 'TestGroup';
ksSubTestCase = 'TestCase';
ksSubTestCaseArgs = 'TestCaseArgs';
ksSubTestBox = 'TestBox';
ksSubBuild = 'Build';
## @}
kasSubjects = [ ksSubEverything, ksSubSchedGroup, ksSubTestGroup, ksSubTestCase, ksSubTestBox, ksSubBuild, ];
## @name TestStatus_T
# @{
ksTestStatus_Running = 'running';
ksTestStatus_Success = 'success';
ksTestStatus_Skipped = 'skipped';
ksTestStatus_BadTestBox = 'bad-testbox';
ksTestStatus_Aborted = 'aborted';
ksTestStatus_Failure = 'failure';
ksTestStatus_TimedOut = 'timed-out';
ksTestStatus_Rebooted = 'rebooted';
## @}
def __init__(self, oDb, tsNow, cPeriods, cHoursPerPeriod, sSubject, aidSubjects):
ModelLogicBase.__init__(self, oDb);
# Public so the report generator can easily access them.
self.tsNow = tsNow; # (Can be None.)
self.cPeriods = cPeriods;
self.cHoursPerPeriod = cHoursPerPeriod;
self.sSubject = sSubject;
self.aidSubjects = aidSubjects;
def getExtraSubjectTables(self):
"""
Returns a string with any extra tables needed by the subject. Each
table name is prefixed by a comma, so can be appended to a FROM
statement.
"""
if self.sSubject == self.ksSubSchedGroup:
return ', TestBoxes';
return '';
def getExtraSubjectWhereExpr(self):
"""
Returns additional WHERE expression relating to the report subject. It starts
with an AND so that it can simply be appended to the WHERE clause.
"""
if self.sSubject == self.ksSubEverything:
return '';
if self.sSubject == self.ksSubSchedGroup:
sWhere = ' AND TestBoxes.idTestBox = TestSets.idTestBox\n' \
' AND TestBoxes.tsEffective > TestSets.tsCreated\n' \
' AND TestBoxes.tsExpire <= TestSets.tsCreated\n' \
' AND TestBoxes.idSchedGroup';
elif self.sSubject == self.ksSubTestGroup:
sWhere = ' AND TestSets.idTestGroup';
elif self.sSubject == self.ksSubTestCase:
sWhere = ' AND TestSets.idTestCase';
elif self.sSubject == self.ksSubTestCaseArgs:
sWhere = ' AND TestSets.idTestCaseArgs';
elif self.sSubject == self.ksSubTestBox:
sWhere = ' AND TestSets.idTestBox';
elif self.sSubject == self.ksSubBuild:
sWhere = ' AND TestSets.idBuild';
else:
raise TMExceptionBase(self.sSubject);
if len(self.aidSubjects) == 1:
sWhere += self._oDb.formatBindArgs(' = %s\n', (self.aidSubjects[0],));
else:
assert len(self.aidSubjects) > 0;
sWhere += self._oDb.formatBindArgs(' IN (%s', (self.aidSubjects[0],));
for i in range(1, len(self.aidSubjects)):
sWhere += self._oDb.formatBindArgs(', %s', (self.aidSubjects[i],));
sWhere += ')\n';
return sWhere;
def getExtraWhereExprForPeriod(self, iPeriod):
"""
Returns additional WHERE expression for getting test sets for the
specified period. It starts with an AND so that it can simply be
appended to the WHERE clause.
"""
if self.tsNow is None:
sNow = 'CURRENT_TIMESTAMP';
else:
sNow = self._oDb.formatBindArgs('%s::TIMESTAMP', (self.tsNow,));
cHoursStart = (self.cPeriods - iPeriod ) * self.cHoursPerPeriod;
cHoursEnd = (self.cPeriods - iPeriod - 1) * self.cHoursPerPeriod;
if cHoursEnd == 0:
return ' AND TestSets.tsDone >= (%s - interval \'%u hours\')\n' \
' AND TestSets.tsDone < %s\n' \
% (sNow, cHoursStart, sNow);
return ' AND TestSets.tsDone >= (%s - interval \'%u hours\')\n' \
' AND TestSets.tsDone < (%s - interval \'%u hours\')\n' \
% (sNow, cHoursStart, sNow, cHoursEnd);
def getPeriodDesc(self, iPeriod):
"""
Returns the period description, usually for graph data.
"""
if iPeriod == 0:
return 'now';
if self.cHoursPerPeriod == 24:
return '%dd ago' % (iPeriod, );
return '%dh ago' % (iPeriod * self.cHoursPerPeriod, );
class ReportLazyModel(ReportModelBase): # pylint: disable=R0903
"""
The 'lazy bird' report model class.
We may want to have several classes, maybe one for each report even. But,
I'm thinking that's a bit overkill so we'll start with this and split it
if/when it becomes necessary.
"""
kdsStatusSimplificationMap = {
ReportModelBase.ksTestStatus_Running: ReportModelBase.ksTestStatus_Running,
ReportModelBase.ksTestStatus_Success: ReportModelBase.ksTestStatus_Success,
ReportModelBase.ksTestStatus_Skipped: ReportModelBase.ksTestStatus_Skipped,
ReportModelBase.ksTestStatus_BadTestBox: ReportModelBase.ksTestStatus_Skipped,
ReportModelBase.ksTestStatus_Aborted: ReportModelBase.ksTestStatus_Skipped,
ReportModelBase.ksTestStatus_Failure: ReportModelBase.ksTestStatus_Failure,
ReportModelBase.ksTestStatus_TimedOut: ReportModelBase.ksTestStatus_Failure,
ReportModelBase.ksTestStatus_Rebooted: ReportModelBase.ksTestStatus_Failure,
};
def getSuccessRates(self):
"""
Gets the success rates of the subject in the specified period.
Returns an array of data per period (0 is the oldes, self.cPeriods-1 is
the latest) where each entry is a status (TestStatus_T) dictionary with
the number of occurences of each final status (i.e. not running).
"""
adPeriods = [];
for iPeriod in range(self.cPeriods):
self._oDb.execute('SELECT enmStatus, COUNT(TestSets.idTestSet)\n'
'FROM TestSets' + self.getExtraSubjectTables() +'\n'
'WHERE enmStatus <> \'running\'\n'
+ self.getExtraSubjectWhereExpr()
+ self.getExtraWhereExprForPeriod(iPeriod)
+
'GROUP BY enmStatus\n');
dRet = \
{
self.ksTestStatus_Skipped: 0,
self.ksTestStatus_Failure: 0,
self.ksTestStatus_Success: 0,
};
for aoRow in self._oDb.fetchAll():
sKey = self.kdsStatusSimplificationMap[aoRow[0]]
if sKey in dRet:
dRet[sKey] += aoRow[1];
else:
dRet[sKey] = aoRow[1];
assert len(dRet) == 3;
adPeriods.insert(0, dRet);
return adPeriods;
class ReportGraphModel(ReportModelBase): # pylint: disable=R0903
"""
Extended report model used when generating the more complicated graphs
detailing results, time elapsed and values over time.
"""
## @name Subject ID types.
## These prefix the values in the aidSubjects array. The prefix is
## followed by a colon and then a list of string IDs. Following the prefix
## is one or more string table IDs separated by colons. These are used to
## drill down the exact test result we're looking for, by matching against
## TestResult::idStrName (in the db).
## @{
ksTypeResult = 'result';
ksTypeElapsed = 'elapsed';
## The last string table ID gives the name of the value.
ksTypeValue = 'value';
## List of types.
kasTypes = (ksTypeResult, ksTypeElapsed, ksTypeValue);
## @}
class SampleSource(object):
""" A sample source. """
def __init__(self, sType, aidStrTests, idStrValue):
self.sType = sType;
self.aidStrTests = aidStrTests;
self.idStrValue = idStrValue;
def getTestResultTables(self):
""" Retrieves the list of TestResults tables to join with."""
sRet = '';
for i in range(len(self.aidStrTests)):
sRet += ' TestResults TR%u,\n' % (i,);
return sRet;
def getTestResultConditions(self):
""" Retrieves the join conditions for the TestResults tables."""
sRet = '';
cItems = len(self.aidStrTests);
for i in range(cItems - 1):
sRet += ' AND TR%u.idStrName = %u\n' \
' AND TR%u.idTestResultParent = TR%u.idTestResult\n' \
% ( i, self.aidStrTests[cItems - i - 1], i, i + 1 );
sRet += ' AND TR%u.idStrName = %u\n' % (cItems - 1, self.aidStrTests[0]);
return sRet;
class DataSeries(object):
""" A data series. """
def __init__(self, oCache, idBuildCategory, idTestBox, idTestCase, idTestCaseArgs, iUnit):
_ = oCache;
self.idBuildCategory = idBuildCategory;
self.oBuildCategory = oCache.getBuildCategory(idBuildCategory);
self.idTestBox = idTestBox;
self.oTestBox = oCache.getTestBox(idTestBox);
self.idTestCase = idTestCase;
self.idTestCaseArgs = idTestCaseArgs;
if idTestCase is not None:
self.oTestCase = oCache.getTestCase(idTestCase);
self.oTestCaseArgs = None;
else:
self.oTestCaseArgs = oCache.getTestCaseArgs(idTestCaseArgs);
self.oTestCase = oCache.getTestCase(self.oTestCaseArgs.idTestCase);
self.iUnit = iUnit;
# Six parallel arrays.
self.aiRevisions = []; # The X values.
self.aiValues = []; # The Y values.
self.aiErrorBarBelow = []; # The Y value minimum errorbars, relative to the Y value (positive).
self.aiErrorBarAbove = []; # The Y value maximum errorbars, relative to the Y value (positive).
self.acSamples = []; # The number of samples at this X value.
self.aoRevInfo = []; # VcsRevisionData objects for each revision. Empty/SQL-NULL objects if no info.
class DataSeriesCollection(object):
""" A collection of data series corresponding to one input sample source. """
def __init__(self, oSampleSrc, asTests, sValue = None):
self.sType = oSampleSrc.sType;
self.aidStrTests = oSampleSrc.aidStrTests;
self.asTests = list(asTests);
self.idStrValue = oSampleSrc.idStrValue;
self.sValue = sValue;
self.aoSeries = [];
def addDataSeries(self, oDataSeries):
""" Appends a data series to the collection. """
self.aoSeries.append(oDataSeries);
return oDataSeries;
def __init__(self, oDb, tsNow, cPeriods, cHoursPerPeriod, sSubject, aidSubjects, # pylint: disable=R0913
aidTestBoxes, aidBuildCats, aidTestCases, fSepTestVars):
assert(sSubject == self.ksSubEverything); # dummy
ReportModelBase.__init__(self, oDb, tsNow, cPeriods, cHoursPerPeriod, sSubject, aidSubjects);
self.aidTestBoxes = aidTestBoxes;
self.aidBuildCats = aidBuildCats;
self.aidTestCases = aidTestCases;
self.fOnTestCase = not fSepTestVars; # (Separates testcase variations into separate data series.)
self.oCache = DatabaseObjCache(self._oDb, self.tsNow, None, self.cPeriods * self.cHoursPerPeriod);
# Quickly validate and convert the subject "IDs".
self.aoLookups = [];
for sCur in self.aidSubjects:
asParts = sCur.split(':');
if len(asParts) < 2:
raise TMExceptionBase('Invalid graph value "%s"' % (sCur,));
sType = asParts[0];
if sType not in ReportGraphModel.kasTypes:
raise TMExceptionBase('Invalid graph value type "%s" (full: "%s")' % (sType, sCur,));
aidStrTests = [];
for sIdStr in asParts[1:]:
try: idStr = int(sIdStr);
except: raise TMExceptionBase('Invalid graph value id "%s" (full: "%s")' % (sIdStr, sCur,));
if idStr < 0:
raise TMExceptionBase('Invalid graph value id "%u" (full: "%s")' % (idStr, sCur,));
aidStrTests.append(idStr);
idStrValue = None;
if sType == ReportGraphModel.ksTypeValue:
idStrValue = aidStrTests.pop();
self.aoLookups.append(ReportGraphModel.SampleSource(sType, aidStrTests, idStrValue));
# done
def getExtraWhereExprForTotalPeriod(self, sTimestampField):
"""
Returns additional WHERE expression for getting test sets for the
specified period. It starts with an AND so that it can simply be
appended to the WHERE clause.
"""
return self.getExtraWhereExprForTotalPeriodEx(sTimestampField, sTimestampField, True);
def getExtraWhereExprForTotalPeriodEx(self, sStartField = 'tsCreated', sEndField = 'tsDone', fLeadingAnd = True):
"""
Returns additional WHERE expression for getting test sets for the
specified period.
"""
if self.tsNow is None:
sNow = 'CURRENT_TIMESTAMP';
else:
sNow = self._oDb.formatBindArgs('%s::TIMESTAMP', (self.tsNow,));
sRet = ' AND %s >= (%s - interval \'%u hours\')\n' \
' AND %s <= %s\n' \
% ( sStartField, sNow, self.cPeriods * self.cHoursPerPeriod,
sEndField, sNow);
if not fLeadingAnd:
assert sRet[8] == ' ' and sRet[7] == 'D';
return sRet[9:];
return sRet;
def _getEligibleTestSetPeriod(self, sPrefix = 'TestSets.', fLeadingAnd = False):
"""
Returns additional WHERE expression for getting TestSets rows
potentially relevant for the selected period.
"""
if self.tsNow is None:
sNow = 'CURRENT_TIMESTAMP';
else:
sNow = self._oDb.formatBindArgs('%s::TIMESTAMP', (self.tsNow,));
# The 2nd line is a performance hack on TestSets. It nudges postgresql
# into useing the TestSetsCreatedDoneIdx index instead of doing a table
# scan when we look for eligible bits there.
# ASSUMES no relevant test runs longer than 7 days!
sRet = ' AND %stsCreated <= %s\n' \
' AND %stsCreated >= (%s - interval \'%u hours\' - interval \'%u days\')\n' \
' AND %stsDone >= (%s - interval \'%u hours\')\n' \
% ( sPrefix, sNow,
sPrefix, sNow, self.cPeriods * self.cHoursPerPeriod, 7,
sPrefix, sNow, self.cPeriods * self.cHoursPerPeriod, );
if not fLeadingAnd:
assert sRet[8] == ' ' and sRet[7] == 'D';
return sRet[9:];
return sRet;
def _getNameStrings(self, aidStrTests):
""" Returns an array of names corresponding to the array of string table entries. """
return [self.oCache.getTestResultString(idStr) for idStr in aidStrTests];
def fetchGraphData(self):
""" returns data """
sWantedTestCaseId = 'idTestCase' if self.fOnTestCase else 'idTestCaseArgs';
aoRet = [];
for oLookup in self.aoLookups:
#
# Set up the result collection.
#
if oLookup.sType == self.ksTypeValue:
oCollection = self.DataSeriesCollection(oLookup, self._getNameStrings(oLookup.aidStrTests),
self.oCache.getTestResultString(oLookup.idStrValue));
else:
oCollection = self.DataSeriesCollection(oLookup, self._getNameStrings(oLookup.aidStrTests));
#
# Construct the query.
#
sQuery = 'SELECT Builds.iRevision,\n' \
' TestSets.idBuildCategory,\n' \
' TestSets.idTestBox,\n' \
' TestSets.' + sWantedTestCaseId + ',\n';
if oLookup.sType == self.ksTypeValue:
sQuery += ' TestResultValues.iUnit as iUnit,\n' \
' MIN(TestResultValues.lValue),\n' \
' CAST(ROUND(AVG(TestResultValues.lValue)) AS BIGINT),\n' \
' MAX(TestResultValues.lValue),\n' \
' COUNT(TestResultValues.lValue)\n';
elif oLookup.sType == self.ksTypeElapsed:
sQuery += ' %u as iUnit,\n' \
' CAST((EXTRACT(EPOCH FROM MIN(TR0.tsElapsed)) * 1000) AS INTEGER),\n' \
' CAST((EXTRACT(EPOCH FROM AVG(TR0.tsElapsed)) * 1000) AS INTEGER),\n' \
' CAST((EXTRACT(EPOCH FROM MAX(TR0.tsElapsed)) * 1000) AS INTEGER),\n' \
' COUNT(TR0.tsElapsed)\n' \
% (constants.valueunit.MS,);
else:
sQuery += ' %u as iUnit,\n'\
' MIN(TR0.cErrors),\n' \
' CAST(ROUND(AVG(TR0.cErrors)) AS INTEGER),\n' \
' MAX(TR0.cErrors),\n' \
' COUNT(TR0.cErrors)\n' \
% (constants.valueunit.OCCURRENCES,);
if oLookup.sType == self.ksTypeValue:
sQuery += 'FROM TestResultValues,\n';
sQuery += ' TestSets,\n'
sQuery += oLookup.getTestResultTables();
else:
sQuery += 'FROM ' + oLookup.getTestResultTables().lstrip();
sQuery += ' TestSets,\n';
sQuery += ' Builds\n';
if oLookup.sType == self.ksTypeValue:
sQuery += 'WHERE TestResultValues.idStrName = %u\n' % ( oLookup.idStrValue, );
sQuery += self.getExtraWhereExprForTotalPeriod('TestResultValues.tsCreated');
sQuery += ' AND TestResultValues.idTestSet = TestSets.idTestSet\n';
sQuery += self._getEligibleTestSetPeriod(fLeadingAnd = True);
else:
sQuery += 'WHERE ' + (self.getExtraWhereExprForTotalPeriod('TR0.tsCreated').lstrip()[4:]).lstrip();
sQuery += ' AND TR0.idTestSet = TestSets.idTestSet\n';
if len(self.aidTestBoxes) == 1:
sQuery += ' AND TestSets.idTestBox = %u\n' % (self.aidTestBoxes[0],);
elif len(self.aidTestBoxes) > 0:
sQuery += ' AND TestSets.idTestBox IN (' + ','.join([str(i) for i in self.aidTestBoxes]) + ')\n';
if len(self.aidBuildCats) == 1:
sQuery += ' AND TestSets.idBuildCategory = %u\n' % (self.aidBuildCats[0],);
elif len(self.aidBuildCats) > 0:
sQuery += ' AND TestSets.idBuildCategory IN (' + ','.join([str(i) for i in self.aidBuildCats]) + ')\n';
if len(self.aidTestCases) == 1:
sQuery += ' AND TestSets.idTestCase = %u\n' % (self.aidTestCases[0],);
elif len(self.aidTestCases) > 0:
sQuery += ' AND TestSets.idTestCase IN (' + ','.join([str(i) for i in self.aidTestCases]) + ')\n';
if oLookup.sType == self.ksTypeElapsed:
sQuery += ' AND TestSets.enmStatus = \'%s\'::TestStatus_T\n' % (self.ksTestStatus_Success,);
if oLookup.sType == self.ksTypeValue:
sQuery += ' AND TestResultValues.idTestResult = TR0.idTestResult\n'
sQuery += self.getExtraWhereExprForTotalPeriod('TR0.tsCreated'); # For better index matching in some cases.
if oLookup.sType != self.ksTypeResult:
sQuery += ' AND TR0.enmStatus = \'%s\'::TestStatus_T\n' % (self.ksTestStatus_Success,);
sQuery += oLookup.getTestResultConditions();
sQuery += ' AND TestSets.idBuild = Builds.idBuild\n';
sQuery += 'GROUP BY TestSets.idBuildCategory,\n' \
' TestSets.idTestBox,\n' \
' TestSets.' + sWantedTestCaseId + ',\n' \
' iUnit,\n' \
' Builds.iRevision\n';
sQuery += 'ORDER BY TestSets.idBuildCategory,\n' \
' TestSets.idTestBox,\n' \
' TestSets.' + sWantedTestCaseId + ',\n' \
' iUnit,\n' \
' Builds.iRevision\n';
#
# Execute it and collect the result.
#
sCurRepository = None;
dRevisions = {};
oLastSeries = None;
idLastBuildCat = -1;
idLastTestBox = -1;
idLastTestCase = -1;
iLastUnit = -1;
self._oDb.execute(sQuery);
for aoRow in self._oDb.fetchAll(): # Fetching all here so we can make cache queries below.
if aoRow[1] != idLastBuildCat \
or aoRow[2] != idLastTestBox \
or aoRow[3] != idLastTestCase \
or aoRow[4] != iLastUnit:
idLastBuildCat = aoRow[1];
idLastTestBox = aoRow[2];
idLastTestCase = aoRow[3];
iLastUnit = aoRow[4];
if self.fOnTestCase:
oLastSeries = self.DataSeries(self.oCache, idLastBuildCat, idLastTestBox,
idLastTestCase, None, iLastUnit);
else:
oLastSeries = self.DataSeries(self.oCache, idLastBuildCat, idLastTestBox,
None, idLastTestCase, iLastUnit);
oCollection.addDataSeries(oLastSeries);
if oLastSeries.oBuildCategory.sRepository != sCurRepository:
if sCurRepository is not None:
self.oCache.preloadVcsRevInfo(sCurRepository, dRevisions.keys());
sCurRepository = oLastSeries.oBuildCategory.sRepository
dRevisions = {};
oLastSeries.aiRevisions.append(aoRow[0]);
oLastSeries.aiValues.append(aoRow[6]);
oLastSeries.aiErrorBarBelow.append(aoRow[6] - aoRow[5]);
oLastSeries.aiErrorBarAbove.append(aoRow[7] - aoRow[6]);
oLastSeries.acSamples.append(aoRow[8]);
dRevisions[aoRow[0]] = 1;
if sCurRepository is not None:
self.oCache.preloadVcsRevInfo(sCurRepository, dRevisions.keys());
del dRevisions;
#
# Look up the VCS revision details.
#
for oSeries in oCollection.aoSeries:
for i in range(len(oSeries.aiRevisions)):
oSeries.aoRevInfo.append(self.oCache.getVcsRevInfo(sCurRepository, oSeries.aiRevisions[i]));
aoRet.append(oCollection);
return aoRet;
def getEligibleTestBoxes(self):
"""
Returns a list of TestBoxData objects with eligible testboxes for
the total period of time defined for this graph.
"""
# Taking the simple way out now, getting all active testboxes at the
# time without filtering out on sample sources.
# 1. Collect the relevant testbox generation IDs.
self._oDb.execute('SELECT DISTINCT idTestBox, idGenTestBox\n'
'FROM TestSets\n'
'WHERE ' + self._getEligibleTestSetPeriod(fLeadingAnd = False) +
'ORDER BY idTestBox, idGenTestBox DESC');
idPrevTestBox = -1;
asIdGenTestBoxes = [];
for _ in range(self._oDb.getRowCount()):
aoRow = self._oDb.fetchOne();
if aoRow[0] != idPrevTestBox:
idPrevTestBox = aoRow[0];
asIdGenTestBoxes.append(str(aoRow[1]));
# 2. Query all the testbox data in one go.
aoRet = [];
if len(asIdGenTestBoxes) > 0:
self._oDb.execute('SELECT *\n'
'FROM TestBoxes\n'
'WHERE idGenTestBox in (' + ','.join(asIdGenTestBoxes) + ')\n'
'ORDER BY sName');
for _ in range(self._oDb.getRowCount()):
aoRet.append(TestBoxData().initFromDbRow(self._oDb.fetchOne()));
return aoRet;
def getEligibleBuildCategories(self):
"""
Returns a list of BuildCategoryData objects with eligible build
categories for the total period of time defined for this graph. In
addition it will add any currently selected categories that aren't
really relevant to the period, just to simplify the WUI code.
"""
# Taking the simple way out now, getting all used build cat without
# any testbox or testcase filtering.
sSelectedBuildCats = '';
if len(self.aidBuildCats) > 0:
sSelectedBuildCats = ' OR idBuildCategory IN (' + ','.join([str(i) for i in self.aidBuildCats]) + ')\n';
self._oDb.execute('SELECT DISTINCT *\n'
'FROM BuildCategories\n'
'WHERE idBuildCategory IN (\n'
' SELECT DISTINCT idBuildCategory\n'
' FROM TestSets\n'
' WHERE ' + self._getEligibleTestSetPeriod(fLeadingAnd = False) +
')\n'
+ sSelectedBuildCats +
'ORDER BY sProduct,\n'
' sBranch,\n'
' asOsArches,\n'
' sType\n');
aoRet = [];
for _ in range(self._oDb.getRowCount()):
aoRet.append(BuildCategoryData().initFromDbRow(self._oDb.fetchOne()));
return aoRet;
| # -*- coding: utf-8 -*-
# $Id: report.py $
"""
Test Manager - Report models.
"""
__copyright__ = \
"""
Copyright (C) 2012-2015 Oracle Corporation
This file is part of VirtualBox Open Source Edition (OSE), as
available from http://www.virtualbox.org. This file is free software;
you can redistribute it and/or modify it under the terms of the GNU
General Public License (GPL) as published by the Free Software
Foundation, in version 2 as it comes in the "COPYING" file of the
VirtualBox OSE distribution. VirtualBox OSE is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
The contents of this file may alternatively be used under the terms
of the Common Development and Distribution License Version 1.0
(CDDL) only, as it comes in the "COPYING.CDDL" file of the
VirtualBox OSE distribution, in which case the provisions of the
CDDL are applicable instead of those of the GPL.
You may elect to license modified versions of this file under the
terms and conditions of either the GPL or the CDDL or both.
"""
__version__ = "$Revision: 101412 $"
# Validation Kit imports.
from testmanager.core.base import ModelLogicBase, TMExceptionBase;
from testmanager.core.build import BuildCategoryData;
from testmanager.core.dbobjcache import DatabaseObjCache;
from testmanager.core.testbox import TestBoxData;
from common import constants;
class ReportModelBase(ModelLogicBase): # pylint: disable=R0903
"""
Something all report logic(/miner) classes inherit from.
"""
## @name Report subjects
## @{
ksSubEverything = 'Everything';
ksSubSchedGroup = 'SchedGroup';
ksSubTestGroup = 'TestGroup';
ksSubTestCase = 'TestCase';
ksSubTestCaseArgs = 'TestCaseArgs';
ksSubTestBox = 'TestBox';
ksSubBuild = 'Build';
## @}
kasSubjects = [ ksSubEverything, ksSubSchedGroup, ksSubTestGroup, ksSubTestCase, ksSubTestBox, ksSubBuild, ];
## @name TestStatus_T
# @{
ksTestStatus_Running = 'running';
ksTestStatus_Success = 'success';
ksTestStatus_Skipped = 'skipped';
ksTestStatus_BadTestBox = 'bad-testbox';
ksTestStatus_Aborted = 'aborted';
ksTestStatus_Failure = 'failure';
ksTestStatus_TimedOut = 'timed-out';
ksTestStatus_Rebooted = 'rebooted';
## @}
def __init__(self, oDb, tsNow, cPeriods, cHoursPerPeriod, sSubject, aidSubjects):
ModelLogicBase.__init__(self, oDb);
# Public so the report generator can easily access them.
self.tsNow = tsNow; # (Can be None.)
self.cPeriods = cPeriods;
self.cHoursPerPeriod = cHoursPerPeriod;
self.sSubject = sSubject;
self.aidSubjects = aidSubjects;
def getExtraSubjectTables(self):
"""
Returns a string with any extra tables needed by the subject. Each
table name is prefixed by a comma, so can be appended to a FROM
statement.
"""
if self.sSubject == self.ksSubSchedGroup:
return ', TestBoxes';
return '';
def getExtraSubjectWhereExpr(self):
"""
Returns additional WHERE expression relating to the report subject. It starts
with an AND so that it can simply be appended to the WHERE clause.
"""
if self.sSubject == self.ksSubEverything:
return '';
if self.sSubject == self.ksSubSchedGroup:
sWhere = ' AND TestBoxes.idTestBox = TestSets.idTestBox\n' \
' AND TestBoxes.tsEffective > TestSets.tsCreated\n' \
' AND TestBoxes.tsExpire <= TestSets.tsCreated\n' \
' AND TestBoxes.idSchedGroup';
elif self.sSubject == self.ksSubTestGroup:
sWhere = ' AND TestSets.idTestGroup';
elif self.sSubject == self.ksSubTestCase:
sWhere = ' AND TestSets.idTestCase';
elif self.sSubject == self.ksSubTestCaseArgs:
sWhere = ' AND TestSets.idTestCaseArgs';
elif self.sSubject == self.ksSubTestBox:
sWhere = ' AND TestSets.idTestBox';
elif self.sSubject == self.ksSubBuild:
sWhere = ' AND TestSets.idBuild';
else:
raise TMExceptionBase(self.sSubject);
if len(self.aidSubjects) == 1:
sWhere += self._oDb.formatBindArgs(' = %s\n', (self.aidSubjects[0],));
else:
assert len(self.aidSubjects) > 0;
sWhere += self._oDb.formatBindArgs(' IN (%s', (self.aidSubjects[0],));
for i in range(1, len(self.aidSubjects)):
sWhere += self._oDb.formatBindArgs(', %s', (self.aidSubjects[i],));
sWhere += ')\n';
return sWhere;
def getExtraWhereExprForPeriod(self, iPeriod):
"""
Returns additional WHERE expression for getting test sets for the
specified period. It starts with an AND so that it can simply be
appended to the WHERE clause.
"""
if self.tsNow is None:
sNow = 'CURRENT_TIMESTAMP';
else:
sNow = self._oDb.formatBindArgs('%s::TIMESTAMP', (self.tsNow,));
cHoursStart = (self.cPeriods - iPeriod ) * self.cHoursPerPeriod;
cHoursEnd = (self.cPeriods - iPeriod - 1) * self.cHoursPerPeriod;
if cHoursEnd == 0:
return ' AND TestSets.tsDone >= (%s - interval \'%u hours\')\n' \
' AND TestSets.tsDone < %s\n' \
% (sNow, cHoursStart, sNow);
return ' AND TestSets.tsDone >= (%s - interval \'%u hours\')\n' \
' AND TestSets.tsDone < (%s - interval \'%u hours\')\n' \
% (sNow, cHoursStart, sNow, cHoursEnd);
def getPeriodDesc(self, iPeriod):
"""
Returns the period description, usually for graph data.
"""
if iPeriod == 0:
return 'now';
if self.cHoursPerPeriod == 24:
return '%dd ago' % (iPeriod, );
return '%dh ago' % (iPeriod * self.cHoursPerPeriod, );
class ReportLazyModel(ReportModelBase): # pylint: disable=R0903
"""
The 'lazy bird' report model class.
We may want to have several classes, maybe one for each report even. But,
I'm thinking that's a bit overkill so we'll start with this and split it
if/when it becomes necessary.
"""
kdsStatusSimplificationMap = {
ReportModelBase.ksTestStatus_Running: ReportModelBase.ksTestStatus_Running,
ReportModelBase.ksTestStatus_Success: ReportModelBase.ksTestStatus_Success,
ReportModelBase.ksTestStatus_Skipped: ReportModelBase.ksTestStatus_Skipped,
ReportModelBase.ksTestStatus_BadTestBox: ReportModelBase.ksTestStatus_Skipped,
ReportModelBase.ksTestStatus_Aborted: ReportModelBase.ksTestStatus_Skipped,
ReportModelBase.ksTestStatus_Failure: ReportModelBase.ksTestStatus_Failure,
ReportModelBase.ksTestStatus_TimedOut: ReportModelBase.ksTestStatus_Failure,
ReportModelBase.ksTestStatus_Rebooted: ReportModelBase.ksTestStatus_Failure,
};
def getSuccessRates(self):
"""
Gets the success rates of the subject in the specified period.
Returns an array of data per period (0 is the oldes, self.cPeriods-1 is
the latest) where each entry is a status (TestStatus_T) dictionary with
the number of occurences of each final status (i.e. not running).
"""
adPeriods = [];
for iPeriod in range(self.cPeriods):
self._oDb.execute('SELECT enmStatus, COUNT(TestSets.idTestSet)\n'
'FROM TestSets' + self.getExtraSubjectTables() +'\n'
'WHERE enmStatus <> \'running\'\n'
+ self.getExtraSubjectWhereExpr()
+ self.getExtraWhereExprForPeriod(iPeriod)
+
'GROUP BY enmStatus\n');
dRet = \
{
self.ksTestStatus_Skipped: 0,
self.ksTestStatus_Failure: 0,
self.ksTestStatus_Success: 0,
};
for aoRow in self._oDb.fetchAll():
sKey = self.kdsStatusSimplificationMap[aoRow[0]]
if sKey in dRet:
dRet[sKey] += aoRow[1];
else:
dRet[sKey] = aoRow[1];
assert len(dRet) == 3;
adPeriods.insert(0, dRet);
return adPeriods;
class ReportGraphModel(ReportModelBase): # pylint: disable=R0903
"""
Extended report model used when generating the more complicated graphs
detailing results, time elapsed and values over time.
"""
## @name Subject ID types.
## These prefix the values in the aidSubjects array. The prefix is
## followed by a colon and then a list of string IDs. Following the prefix
## is one or more string table IDs separated by colons. These are used to
## drill down the exact test result we're looking for, by matching against
## TestResult::idStrName (in the db).
## @{
ksTypeResult = 'result';
ksTypeElapsed = 'elapsed';
## The last string table ID gives the name of the value.
ksTypeValue = 'value';
## List of types.
kasTypes = (ksTypeResult, ksTypeElapsed, ksTypeValue);
## @}
class SampleSource(object):
""" A sample source. """
def __init__(self, sType, aidStrTests, idStrValue):
self.sType = sType;
self.aidStrTests = aidStrTests;
self.idStrValue = idStrValue;
def getTestResultTables(self):
""" Retrieves the list of TestResults tables to join with."""
sRet = '';
for i in range(len(self.aidStrTests)):
sRet += ' TestResults TR%u,\n' % (i,);
return sRet;
def getTestResultConditions(self):
""" Retrieves the join conditions for the TestResults tables."""
sRet = '';
cItems = len(self.aidStrTests);
for i in range(cItems - 1):
sRet += ' AND TR%u.idStrName = %u\n' \
' AND TR%u.idTestResultParent = TR%u.idTestResult\n' \
% ( i, self.aidStrTests[cItems - i - 1], i, i + 1 );
sRet += ' AND TR%u.idStrName = %u\n' % (cItems - 1, self.aidStrTests[0]);
return sRet;
class DataSeries(object):
""" A data series. """
def __init__(self, oCache, idBuildCategory, idTestBox, idTestCase, idTestCaseArgs, iUnit):
_ = oCache;
self.idBuildCategory = idBuildCategory;
self.oBuildCategory = oCache.getBuildCategory(idBuildCategory);
self.idTestBox = idTestBox;
self.oTestBox = oCache.getTestBox(idTestBox);
self.idTestCase = idTestCase;
self.idTestCaseArgs = idTestCaseArgs;
if idTestCase is not None:
self.oTestCase = oCache.getTestCase(idTestCase);
self.oTestCaseArgs = None;
else:
self.oTestCaseArgs = oCache.getTestCaseArgs(idTestCaseArgs);
self.oTestCase = oCache.getTestCase(self.oTestCaseArgs.idTestCase);
self.iUnit = iUnit;
# Six parallel arrays.
self.aiRevisions = []; # The X values.
self.aiValues = []; # The Y values.
self.aiErrorBarBelow = []; # The Y value minimum errorbars, relative to the Y value (positive).
self.aiErrorBarAbove = []; # The Y value maximum errorbars, relative to the Y value (positive).
self.acSamples = []; # The number of samples at this X value.
self.aoRevInfo = []; # VcsRevisionData objects for each revision. Empty/SQL-NULL objects if no info.
class DataSeriesCollection(object):
""" A collection of data series corresponding to one input sample source. """
def __init__(self, oSampleSrc, asTests, sValue = None):
self.sType = oSampleSrc.sType;
self.aidStrTests = oSampleSrc.aidStrTests;
self.asTests = list(asTests);
self.idStrValue = oSampleSrc.idStrValue;
self.sValue = sValue;
self.aoSeries = [];
def addDataSeries(self, oDataSeries):
""" Appends a data series to the collection. """
self.aoSeries.append(oDataSeries);
return oDataSeries;
def __init__(self, oDb, tsNow, cPeriods, cHoursPerPeriod, sSubject, aidSubjects, # pylint: disable=R0913
aidTestBoxes, aidBuildCats, aidTestCases, fSepTestVars):
assert(sSubject == self.ksSubEverything); # dummy
ReportModelBase.__init__(self, oDb, tsNow, cPeriods, cHoursPerPeriod, sSubject, aidSubjects);
self.aidTestBoxes = aidTestBoxes;
self.aidBuildCats = aidBuildCats;
self.aidTestCases = aidTestCases;
self.fOnTestCase = not fSepTestVars; # (Separates testcase variations into separate data series.)
self.oCache = DatabaseObjCache(self._oDb, self.tsNow, None, self.cPeriods * self.cHoursPerPeriod);
# Quickly validate and convert the subject "IDs".
self.aoLookups = [];
for sCur in self.aidSubjects:
asParts = sCur.split(':');
if len(asParts) < 2:
raise TMExceptionBase('Invalid graph value "%s"' % (sCur,));
sType = asParts[0];
if sType not in ReportGraphModel.kasTypes:
raise TMExceptionBase('Invalid graph value type "%s" (full: "%s")' % (sType, sCur,));
aidStrTests = [];
for sIdStr in asParts[1:]:
try: idStr = int(sIdStr);
except: raise TMExceptionBase('Invalid graph value id "%s" (full: "%s")' % (sIdStr, sCur,));
if idStr < 0:
raise TMExceptionBase('Invalid graph value id "%u" (full: "%s")' % (idStr, sCur,));
aidStrTests.append(idStr);
idStrValue = None;
if sType == ReportGraphModel.ksTypeValue:
idStrValue = aidStrTests.pop();
self.aoLookups.append(ReportGraphModel.SampleSource(sType, aidStrTests, idStrValue));
# done
def getExtraWhereExprForTotalPeriod(self, sTimestampField):
"""
Returns additional WHERE expression for getting test sets for the
specified period. It starts with an AND so that it can simply be
appended to the WHERE clause.
"""
return self.getExtraWhereExprForTotalPeriodEx(sTimestampField, sTimestampField, True);
def getExtraWhereExprForTotalPeriodEx(self, sStartField = 'tsCreated', sEndField = 'tsDone', fLeadingAnd = True):
"""
Returns additional WHERE expression for getting test sets for the
specified period.
"""
if self.tsNow is None:
sNow = 'CURRENT_TIMESTAMP';
else:
sNow = self._oDb.formatBindArgs('%s::TIMESTAMP', (self.tsNow,));
sRet = ' AND %s >= (%s - interval \'%u hours\')\n' \
' AND %s <= %s\n' \
% ( sStartField, sNow, self.cPeriods * self.cHoursPerPeriod,
sEndField, sNow);
if not fLeadingAnd:
assert sRet[8] == ' ' and sRet[7] == 'D';
return sRet[9:];
return sRet;
def _getEligibleTestSetPeriod(self, sPrefix = 'TestSets.', fLeadingAnd = False):
"""
Returns additional WHERE expression for getting TestSets rows
potentially relevant for the selected period.
"""
if self.tsNow is None:
sNow = 'CURRENT_TIMESTAMP';
else:
sNow = self._oDb.formatBindArgs('%s::TIMESTAMP', (self.tsNow,));
# The 2nd line is a performance hack on TestSets. It nudges postgresql
# into useing the TestSetsCreatedDoneIdx index instead of doing a table
# scan when we look for eligible bits there.
# ASSUMES no relevant test runs longer than 7 days!
sRet = ' AND %stsCreated <= %s\n' \
' AND %stsCreated >= (%s - interval \'%u hours\' - interval \'%u days\')\n' \
' AND %stsDone >= (%s - interval \'%u hours\')\n' \
% ( sPrefix, sNow,
sPrefix, sNow, self.cPeriods * self.cHoursPerPeriod, 7,
sPrefix, sNow, self.cPeriods * self.cHoursPerPeriod, );
if not fLeadingAnd:
assert sRet[8] == ' ' and sRet[7] == 'D';
return sRet[9:];
return sRet;
def _getNameStrings(self, aidStrTests):
""" Returns an array of names corresponding to the array of string table entries. """
return [self.oCache.getTestResultString(idStr) for idStr in aidStrTests];
def fetchGraphData(self):
""" returns data """
sWantedTestCaseId = 'idTestCase' if self.fOnTestCase else 'idTestCaseArgs';
aoRet = [];
for oLookup in self.aoLookups:
#
# Set up the result collection.
#
if oLookup.sType == self.ksTypeValue:
oCollection = self.DataSeriesCollection(oLookup, self._getNameStrings(oLookup.aidStrTests),
self.oCache.getTestResultString(oLookup.idStrValue));
else:
oCollection = self.DataSeriesCollection(oLookup, self._getNameStrings(oLookup.aidStrTests));
#
# Construct the query.
#
sQuery = 'SELECT Builds.iRevision,\n' \
' TestSets.idBuildCategory,\n' \
' TestSets.idTestBox,\n' \
' TestSets.' + sWantedTestCaseId + ',\n';
if oLookup.sType == self.ksTypeValue:
sQuery += ' TestResultValues.iUnit as iUnit,\n' \
' MIN(TestResultValues.lValue),\n' \
' CAST(ROUND(AVG(TestResultValues.lValue)) AS BIGINT),\n' \
' MAX(TestResultValues.lValue),\n' \
' COUNT(TestResultValues.lValue)\n';
elif oLookup.sType == self.ksTypeElapsed:
sQuery += ' %u as iUnit,\n' \
' CAST((EXTRACT(EPOCH FROM MIN(TR0.tsElapsed)) * 1000) AS INTEGER),\n' \
' CAST((EXTRACT(EPOCH FROM AVG(TR0.tsElapsed)) * 1000) AS INTEGER),\n' \
' CAST((EXTRACT(EPOCH FROM MAX(TR0.tsElapsed)) * 1000) AS INTEGER),\n' \
' COUNT(TR0.tsElapsed)\n' \
% (constants.valueunit.MS,);
else:
sQuery += ' %u as iUnit,\n'\
' MIN(TR0.cErrors),\n' \
' CAST(ROUND(AVG(TR0.cErrors)) AS INTEGER),\n' \
' MAX(TR0.cErrors),\n' \
' COUNT(TR0.cErrors)\n' \
% (constants.valueunit.OCCURRENCES,);
if oLookup.sType == self.ksTypeValue:
sQuery += 'FROM TestResultValues,\n';
sQuery += ' TestSets,\n'
sQuery += oLookup.getTestResultTables();
else:
sQuery += 'FROM ' + oLookup.getTestResultTables().lstrip();
sQuery += ' TestSets,\n';
sQuery += ' Builds\n';
if oLookup.sType == self.ksTypeValue:
sQuery += 'WHERE TestResultValues.idStrName = %u\n' % ( oLookup.idStrValue, );
sQuery += self.getExtraWhereExprForTotalPeriod('TestResultValues.tsCreated');
sQuery += ' AND TestResultValues.idTestSet = TestSets.idTestSet\n';
sQuery += self._getEligibleTestSetPeriod(fLeadingAnd = True);
else:
sQuery += 'WHERE ' + (self.getExtraWhereExprForTotalPeriod('TR0.tsCreated').lstrip()[4:]).lstrip();
sQuery += ' AND TR0.idTestSet = TestSets.idTestSet\n';
if len(self.aidTestBoxes) == 1:
sQuery += ' AND TestSets.idTestBox = %u\n' % (self.aidTestBoxes[0],);
elif len(self.aidTestBoxes) > 0:
sQuery += ' AND TestSets.idTestBox IN (' + ','.join([str(i) for i in self.aidTestBoxes]) + ')\n';
if len(self.aidBuildCats) == 1:
sQuery += ' AND TestSets.idBuildCategory = %u\n' % (self.aidBuildCats[0],);
elif len(self.aidBuildCats) > 0:
sQuery += ' AND TestSets.idBuildCategory IN (' + ','.join([str(i) for i in self.aidBuildCats]) + ')\n';
if len(self.aidTestCases) == 1:
sQuery += ' AND TestSets.idTestCase = %u\n' % (self.aidTestCases[0],);
elif len(self.aidTestCases) > 0:
sQuery += ' AND TestSets.idTestCase IN (' + ','.join([str(i) for i in self.aidTestCases]) + ')\n';
if oLookup.sType == self.ksTypeElapsed:
sQuery += ' AND TestSets.enmStatus = \'%s\'::TestStatus_T\n' % (self.ksTestStatus_Success,);
if oLookup.sType == self.ksTypeValue:
sQuery += ' AND TestResultValues.idTestResult = TR0.idTestResult\n'
sQuery += self.getExtraWhereExprForTotalPeriod('TR0.tsCreated'); # For better index matching in some cases.
if oLookup.sType != self.ksTypeResult:
sQuery += ' AND TR0.enmStatus = \'%s\'::TestStatus_T\n' % (self.ksTestStatus_Success,);
sQuery += oLookup.getTestResultConditions();
sQuery += ' AND TestSets.idBuild = Builds.idBuild\n';
sQuery += 'GROUP BY TestSets.idBuildCategory,\n' \
' TestSets.idTestBox,\n' \
' TestSets.' + sWantedTestCaseId + ',\n' \
' iUnit,\n' \
' Builds.iRevision\n';
sQuery += 'ORDER BY TestSets.idBuildCategory,\n' \
' TestSets.idTestBox,\n' \
' TestSets.' + sWantedTestCaseId + ',\n' \
' iUnit,\n' \
' Builds.iRevision\n';
#
# Execute it and collect the result.
#
sCurRepository = None;
dRevisions = {};
oLastSeries = None;
idLastBuildCat = -1;
idLastTestBox = -1;
idLastTestCase = -1;
iLastUnit = -1;
self._oDb.execute(sQuery);
for aoRow in self._oDb.fetchAll(): # Fetching all here so we can make cache queries below.
if aoRow[1] != idLastBuildCat \
or aoRow[2] != idLastTestBox \
or aoRow[3] != idLastTestCase \
or aoRow[4] != iLastUnit:
idLastBuildCat = aoRow[1];
idLastTestBox = aoRow[2];
idLastTestCase = aoRow[3];
iLastUnit = aoRow[4];
if self.fOnTestCase:
oLastSeries = self.DataSeries(self.oCache, idLastBuildCat, idLastTestBox,
idLastTestCase, None, iLastUnit);
else:
oLastSeries = self.DataSeries(self.oCache, idLastBuildCat, idLastTestBox,
None, idLastTestCase, iLastUnit);
oCollection.addDataSeries(oLastSeries);
if oLastSeries.oBuildCategory.sRepository != sCurRepository:
if sCurRepository is not None:
self.oCache.preloadVcsRevInfo(sCurRepository, dRevisions.keys());
sCurRepository = oLastSeries.oBuildCategory.sRepository
dRevisions = {};
oLastSeries.aiRevisions.append(aoRow[0]);
oLastSeries.aiValues.append(aoRow[6]);
oLastSeries.aiErrorBarBelow.append(aoRow[6] - aoRow[5]);
oLastSeries.aiErrorBarAbove.append(aoRow[7] - aoRow[6]);
oLastSeries.acSamples.append(aoRow[8]);
dRevisions[aoRow[0]] = 1;
if sCurRepository is not None:
self.oCache.preloadVcsRevInfo(sCurRepository, dRevisions.keys());
del dRevisions;
#
# Look up the VCS revision details.
#
for oSeries in oCollection.aoSeries:
for i in range(len(oSeries.aiRevisions)):
oSeries.aoRevInfo.append(self.oCache.getVcsRevInfo(sCurRepository, oSeries.aiRevisions[i]));
aoRet.append(oCollection);
return aoRet;
def getEligibleTestBoxes(self):
"""
Returns a list of TestBoxData objects with eligible testboxes for
the total period of time defined for this graph.
"""
# Taking the simple way out now, getting all active testboxes at the
# time without filtering out on sample sources.
# 1. Collect the relevant testbox generation IDs.
self._oDb.execute('SELECT DISTINCT idTestBox, idGenTestBox\n'
'FROM TestSets\n'
'WHERE ' + self._getEligibleTestSetPeriod(fLeadingAnd = False) +
'ORDER BY idTestBox, idGenTestBox DESC');
idPrevTestBox = -1;
asIdGenTestBoxes = [];
for _ in range(self._oDb.getRowCount()):
aoRow = self._oDb.fetchOne();
if aoRow[0] != idPrevTestBox:
idPrevTestBox = aoRow[0];
asIdGenTestBoxes.append(str(aoRow[1]));
# 2. Query all the testbox data in one go.
aoRet = [];
if len(asIdGenTestBoxes) > 0:
self._oDb.execute('SELECT *\n'
'FROM TestBoxes\n'
'WHERE idGenTestBox in (' + ','.join(asIdGenTestBoxes) + ')\n'
'ORDER BY sName');
for _ in range(self._oDb.getRowCount()):
aoRet.append(TestBoxData().initFromDbRow(self._oDb.fetchOne()));
return aoRet;
def getEligibleBuildCategories(self):
"""
Returns a list of BuildCategoryData objects with eligible build
categories for the total period of time defined for this graph. In
addition it will add any currently selected categories that aren't
really relevant to the period, just to simplify the WUI code.
"""
# Taking the simple way out now, getting all used build cat without
# any testbox or testcase filtering.
sSelectedBuildCats = '';
if len(self.aidBuildCats) > 0:
sSelectedBuildCats = ' OR idBuildCategory IN (' + ','.join([str(i) for i in self.aidBuildCats]) + ')\n';
self._oDb.execute('SELECT DISTINCT *\n'
'FROM BuildCategories\n'
'WHERE idBuildCategory IN (\n'
' SELECT DISTINCT idBuildCategory\n'
' FROM TestSets\n'
' WHERE ' + self._getEligibleTestSetPeriod(fLeadingAnd = False) +
')\n'
+ sSelectedBuildCats +
'ORDER BY sProduct,\n'
' sBranch,\n'
' asOsArches,\n'
' sType\n');
aoRet = [];
for _ in range(self._oDb.getRowCount()):
aoRet.append(BuildCategoryData().initFromDbRow(self._oDb.fetchOne()));
return aoRet;
| en | 0.806658 | # -*- coding: utf-8 -*- # $Id: report.py $ Test Manager - Report models. Copyright (C) 2012-2015 Oracle Corporation This file is part of VirtualBox Open Source Edition (OSE), as available from http://www.virtualbox.org. This file is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License (GPL) as published by the Free Software Foundation, in version 2 as it comes in the "COPYING" file of the VirtualBox OSE distribution. VirtualBox OSE is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY of any kind. The contents of this file may alternatively be used under the terms of the Common Development and Distribution License Version 1.0 (CDDL) only, as it comes in the "COPYING.CDDL" file of the VirtualBox OSE distribution, in which case the provisions of the CDDL are applicable instead of those of the GPL. You may elect to license modified versions of this file under the terms and conditions of either the GPL or the CDDL or both. # Validation Kit imports. # pylint: disable=R0903 Something all report logic(/miner) classes inherit from. ## @name Report subjects ## @{ ## @} ## @name TestStatus_T # @{ ## @} # Public so the report generator can easily access them. # (Can be None.) Returns a string with any extra tables needed by the subject. Each table name is prefixed by a comma, so can be appended to a FROM statement. Returns additional WHERE expression relating to the report subject. It starts with an AND so that it can simply be appended to the WHERE clause. Returns additional WHERE expression for getting test sets for the specified period. It starts with an AND so that it can simply be appended to the WHERE clause. Returns the period description, usually for graph data. # pylint: disable=R0903 The 'lazy bird' report model class. We may want to have several classes, maybe one for each report even. But, I'm thinking that's a bit overkill so we'll start with this and split it if/when it becomes necessary. Gets the success rates of the subject in the specified period. Returns an array of data per period (0 is the oldes, self.cPeriods-1 is the latest) where each entry is a status (TestStatus_T) dictionary with the number of occurences of each final status (i.e. not running). # pylint: disable=R0903 Extended report model used when generating the more complicated graphs detailing results, time elapsed and values over time. ## @name Subject ID types. ## These prefix the values in the aidSubjects array. The prefix is ## followed by a colon and then a list of string IDs. Following the prefix ## is one or more string table IDs separated by colons. These are used to ## drill down the exact test result we're looking for, by matching against ## TestResult::idStrName (in the db). ## @{ ## The last string table ID gives the name of the value. ## List of types. ## @} A sample source. Retrieves the list of TestResults tables to join with. Retrieves the join conditions for the TestResults tables. A data series. # Six parallel arrays. # The X values. # The Y values. # The Y value minimum errorbars, relative to the Y value (positive). # The Y value maximum errorbars, relative to the Y value (positive). # The number of samples at this X value. # VcsRevisionData objects for each revision. Empty/SQL-NULL objects if no info. A collection of data series corresponding to one input sample source. Appends a data series to the collection. # pylint: disable=R0913 # dummy # (Separates testcase variations into separate data series.) # Quickly validate and convert the subject "IDs". # done Returns additional WHERE expression for getting test sets for the specified period. It starts with an AND so that it can simply be appended to the WHERE clause. Returns additional WHERE expression for getting test sets for the specified period. Returns additional WHERE expression for getting TestSets rows potentially relevant for the selected period. # The 2nd line is a performance hack on TestSets. It nudges postgresql # into useing the TestSetsCreatedDoneIdx index instead of doing a table # scan when we look for eligible bits there. # ASSUMES no relevant test runs longer than 7 days! Returns an array of names corresponding to the array of string table entries. returns data # # Set up the result collection. # # # Construct the query. # # For better index matching in some cases. # # Execute it and collect the result. # # Fetching all here so we can make cache queries below. # # Look up the VCS revision details. # Returns a list of TestBoxData objects with eligible testboxes for the total period of time defined for this graph. # Taking the simple way out now, getting all active testboxes at the # time without filtering out on sample sources. # 1. Collect the relevant testbox generation IDs. # 2. Query all the testbox data in one go. Returns a list of BuildCategoryData objects with eligible build categories for the total period of time defined for this graph. In addition it will add any currently selected categories that aren't really relevant to the period, just to simplify the WUI code. # Taking the simple way out now, getting all used build cat without # any testbox or testcase filtering. | 1.573635 | 2 |
jplacer/rename_tree.py | johned0/EdwardsLab | 30 | 6622530 | <reponame>johned0/EdwardsLab<filename>jplacer/rename_tree.py
"""
Rename the internal nodes in the pplacer tree using my awesome NCBI taxonomy
"""
import os
import sys
import argparse
import json
import re
from ete3 import Tree
from ete3.parser.newick import NewickError
from taxon import get_taxonomy_db, get_taxonomy
def load_jplacer(jpf, verbose=False):
"""
load the jplacer file and return the tree
:param jpf: The jplacer file
:return: the data structure of the tree
"""
with open(jpf, 'r') as f:
data = json.load(f)
return data
def parse_jplacer_tree(data, verbose=False):
"""
Extract the tree from the jplacer data structure and make it into an ete3 object
:param data: the jplacer data structure
:return:
"""
try:
tree = Tree(data['tree'], quoted_node_names=True, format=1)
except NewickError as n:
tt = re.sub(r'(\:[\d\.]+){\d+}', r'\1', data['tree'])
tt = re.sub(r'{\d+};$', ';', tt)
tree = Tree(tt, quoted_node_names=True, format=1)
return tree
def rename_nodes(tree, verbose=False):
"""
Rename the nodes based on everything below me
"""
# connect to the SQL dataabase
c = get_taxonomy_db()
wanted_levels = ['superkingdom', 'phylum', 'class', 'order', 'family', 'genus', 'species', 'subspecies']
wanted_levels.reverse() # too lazy to write in reverse :)
taxonomy = {}
# first get all the leaves and their parents. This is just to speed things up ... maybe
for l in tree.get_leaves():
m = re.search('\[(\d+)\]', l.name)
if not m:
if verbose:
sys.stderr.write("No taxid in {}\n".format(l.name))
continue
tid = m.groups()[0]
taxonomy[l.name] = {}
t,n = get_taxonomy(tid, c)
if not t:
continue
while t.parent != 1 and t.taxid != 1:
if t.rank in wanted_levels:
taxonomy[l.name][t.rank] = n.scientific_name
t,n = get_taxonomy(t.parent, c)
# now traverse every node that is not a leaf and see if we can some up with a
# unique name for the node!
sys.stderr.write("Traversing the tree\n")
for n in tree.traverse("preorder"):
if n.is_leaf():
continue
sys.stderr.write("Checking {}\n".format(n.name))
taxs = {w:set() for w in wanted_levels}
for l in n.get_leaves():
if l.name not in taxonomy:
continue
for w in wanted_levels:
if w in taxonomy[l.name]:
taxs[w].add(taxonomy[l.name][w])
# which is the LOWEST level with a single taxonomy
for w in wanted_levels:
if len(taxs[w]) == 1:
newname = "{} r_{})".format(taxs[w].pop(), w)
if verbose:
True
sys.stderr.write("Changing name from: {} to {}\n".format(n.name, newname))
n.name = newname
break
return tree
def reroot_tree(tree):
"""
Reroot the tree between bacteria and archaea.
This will only work after renaming the leaves on the tree.
:param tree: the tree
"""
sys.stderr.write("rerooting\n")
for n in tree.traverse("preorder"):
childs = n.get_children()
cname = ""
for c in childs:
cname += "| {} |".format(c.name)
sys.stderr.write("{}\t{}\t{}\n".format(len(childs), n.name, cname))
if len(childs) == 2:
if ("Archaea r_superkingdom" in childs[0].name and "Eukaryota r_superkingdom" in childs[1].name) or ("Archaea r_superkingdom" in childs[1].name and "Eukaryota r_superkingdom" in childs[0].name):
tree.set_outgroup(n)
sys.stderr.write("Rerooted on {}\n".format(n.name))
break
if "Bacteria r_superkingdom" in childs[0].name and "Archaea r_superkingdom" in childs[1].name:
tree.set_outgroup(childs[0])
sys.stderr.write("Rerooted on {}\n".format(childs[0].name))
break
if "Bacteria r_superkingdom" in childs[1].name and "Archaea r_superkingdom" in childs[0].name:
tree.set_outgroup(childs[1])
sys.stderr.write("Rerooted on {}\n".format(childs[1].name))
break
return tree
def write_tree(tree, outputf):
"""
Write the tree to a file.
:param tree: The tree to write
:param outputf: The output filename
:return:
"""
tree.write(outfile=outputf, format=1)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Parse a jplacer file')
parser.add_argument('-j', help='jplacer file', required=True)
parser.add_argument('-o', help='output file to write the tree to', required=True)
parser.add_argument('-v', help='verbose', action='store_true')
args = parser.parse_args()
data = load_jplacer(args.j, args.v)
tree = parse_jplacer_tree(data, args.v)
tree = rename_nodes(tree, args.v)
tree = reroot_tree(tree)
write_tree(tree, args.o)
| """
Rename the internal nodes in the pplacer tree using my awesome NCBI taxonomy
"""
import os
import sys
import argparse
import json
import re
from ete3 import Tree
from ete3.parser.newick import NewickError
from taxon import get_taxonomy_db, get_taxonomy
def load_jplacer(jpf, verbose=False):
"""
load the jplacer file and return the tree
:param jpf: The jplacer file
:return: the data structure of the tree
"""
with open(jpf, 'r') as f:
data = json.load(f)
return data
def parse_jplacer_tree(data, verbose=False):
"""
Extract the tree from the jplacer data structure and make it into an ete3 object
:param data: the jplacer data structure
:return:
"""
try:
tree = Tree(data['tree'], quoted_node_names=True, format=1)
except NewickError as n:
tt = re.sub(r'(\:[\d\.]+){\d+}', r'\1', data['tree'])
tt = re.sub(r'{\d+};$', ';', tt)
tree = Tree(tt, quoted_node_names=True, format=1)
return tree
def rename_nodes(tree, verbose=False):
"""
Rename the nodes based on everything below me
"""
# connect to the SQL dataabase
c = get_taxonomy_db()
wanted_levels = ['superkingdom', 'phylum', 'class', 'order', 'family', 'genus', 'species', 'subspecies']
wanted_levels.reverse() # too lazy to write in reverse :)
taxonomy = {}
# first get all the leaves and their parents. This is just to speed things up ... maybe
for l in tree.get_leaves():
m = re.search('\[(\d+)\]', l.name)
if not m:
if verbose:
sys.stderr.write("No taxid in {}\n".format(l.name))
continue
tid = m.groups()[0]
taxonomy[l.name] = {}
t,n = get_taxonomy(tid, c)
if not t:
continue
while t.parent != 1 and t.taxid != 1:
if t.rank in wanted_levels:
taxonomy[l.name][t.rank] = n.scientific_name
t,n = get_taxonomy(t.parent, c)
# now traverse every node that is not a leaf and see if we can some up with a
# unique name for the node!
sys.stderr.write("Traversing the tree\n")
for n in tree.traverse("preorder"):
if n.is_leaf():
continue
sys.stderr.write("Checking {}\n".format(n.name))
taxs = {w:set() for w in wanted_levels}
for l in n.get_leaves():
if l.name not in taxonomy:
continue
for w in wanted_levels:
if w in taxonomy[l.name]:
taxs[w].add(taxonomy[l.name][w])
# which is the LOWEST level with a single taxonomy
for w in wanted_levels:
if len(taxs[w]) == 1:
newname = "{} r_{})".format(taxs[w].pop(), w)
if verbose:
True
sys.stderr.write("Changing name from: {} to {}\n".format(n.name, newname))
n.name = newname
break
return tree
def reroot_tree(tree):
"""
Reroot the tree between bacteria and archaea.
This will only work after renaming the leaves on the tree.
:param tree: the tree
"""
sys.stderr.write("rerooting\n")
for n in tree.traverse("preorder"):
childs = n.get_children()
cname = ""
for c in childs:
cname += "| {} |".format(c.name)
sys.stderr.write("{}\t{}\t{}\n".format(len(childs), n.name, cname))
if len(childs) == 2:
if ("Archaea r_superkingdom" in childs[0].name and "Eukaryota r_superkingdom" in childs[1].name) or ("Archaea r_superkingdom" in childs[1].name and "Eukaryota r_superkingdom" in childs[0].name):
tree.set_outgroup(n)
sys.stderr.write("Rerooted on {}\n".format(n.name))
break
if "Bacteria r_superkingdom" in childs[0].name and "Archaea r_superkingdom" in childs[1].name:
tree.set_outgroup(childs[0])
sys.stderr.write("Rerooted on {}\n".format(childs[0].name))
break
if "Bacteria r_superkingdom" in childs[1].name and "Archaea r_superkingdom" in childs[0].name:
tree.set_outgroup(childs[1])
sys.stderr.write("Rerooted on {}\n".format(childs[1].name))
break
return tree
def write_tree(tree, outputf):
"""
Write the tree to a file.
:param tree: The tree to write
:param outputf: The output filename
:return:
"""
tree.write(outfile=outputf, format=1)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Parse a jplacer file')
parser.add_argument('-j', help='jplacer file', required=True)
parser.add_argument('-o', help='output file to write the tree to', required=True)
parser.add_argument('-v', help='verbose', action='store_true')
args = parser.parse_args()
data = load_jplacer(args.j, args.v)
tree = parse_jplacer_tree(data, args.v)
tree = rename_nodes(tree, args.v)
tree = reroot_tree(tree)
write_tree(tree, args.o) | en | 0.889198 | Rename the internal nodes in the pplacer tree using my awesome NCBI taxonomy load the jplacer file and return the tree :param jpf: The jplacer file :return: the data structure of the tree Extract the tree from the jplacer data structure and make it into an ete3 object :param data: the jplacer data structure :return: Rename the nodes based on everything below me # connect to the SQL dataabase # too lazy to write in reverse :) # first get all the leaves and their parents. This is just to speed things up ... maybe # now traverse every node that is not a leaf and see if we can some up with a # unique name for the node! # which is the LOWEST level with a single taxonomy Reroot the tree between bacteria and archaea. This will only work after renaming the leaves on the tree. :param tree: the tree Write the tree to a file. :param tree: The tree to write :param outputf: The output filename :return: | 2.89513 | 3 |
daemon/core/gui/dialogs/alerts.py | b00ga/core | 0 | 6622531 | <filename>daemon/core/gui/dialogs/alerts.py
"""
check engine light
"""
import tkinter as tk
from tkinter import ttk
from typing import TYPE_CHECKING
from core.api.grpc.core_pb2 import ExceptionLevel
from core.gui.dialogs.dialog import Dialog
from core.gui.themes import PADX, PADY
from core.gui.widgets import CodeText
if TYPE_CHECKING:
from core.gui.app import Application
class AlertsDialog(Dialog):
def __init__(self, app: "Application"):
super().__init__(app, "Alerts")
self.tree = None
self.codetext = None
self.alarm_map = {}
self.draw()
def draw(self):
self.top.columnconfigure(0, weight=1)
self.top.rowconfigure(0, weight=1)
self.top.rowconfigure(1, weight=1)
frame = ttk.Frame(self.top)
frame.columnconfigure(0, weight=1)
frame.rowconfigure(0, weight=1)
frame.grid(sticky="nsew", pady=PADY)
self.tree = ttk.Treeview(
frame,
columns=("time", "level", "session_id", "node", "source"),
show="headings",
)
self.tree.grid(row=0, column=0, sticky="nsew")
self.tree.column("time", stretch=tk.YES)
self.tree.heading("time", text="Time")
self.tree.column("level", stretch=tk.YES, width=100)
self.tree.heading("level", text="Level")
self.tree.column("session_id", stretch=tk.YES, width=100)
self.tree.heading("session_id", text="Session ID")
self.tree.column("node", stretch=tk.YES, width=100)
self.tree.heading("node", text="Node")
self.tree.column("source", stretch=tk.YES, width=100)
self.tree.heading("source", text="Source")
self.tree.bind("<<TreeviewSelect>>", self.click_select)
for alarm in self.app.statusbar.core_alarms:
exception = alarm.exception_event
level_name = ExceptionLevel.Enum.Name(exception.level)
insert_id = self.tree.insert(
"",
tk.END,
text=exception.date,
values=(
exception.date,
level_name,
alarm.session_id,
exception.node_id,
exception.source,
),
tags=(level_name,),
)
self.alarm_map[insert_id] = alarm
error_name = ExceptionLevel.Enum.Name(ExceptionLevel.ERROR)
self.tree.tag_configure(error_name, background="#ff6666")
fatal_name = ExceptionLevel.Enum.Name(ExceptionLevel.FATAL)
self.tree.tag_configure(fatal_name, background="#d9d9d9")
warning_name = ExceptionLevel.Enum.Name(ExceptionLevel.WARNING)
self.tree.tag_configure(warning_name, background="#ffff99")
notice_name = ExceptionLevel.Enum.Name(ExceptionLevel.NOTICE)
self.tree.tag_configure(notice_name, background="#85e085")
yscrollbar = ttk.Scrollbar(frame, orient="vertical", command=self.tree.yview)
yscrollbar.grid(row=0, column=1, sticky="ns")
self.tree.configure(yscrollcommand=yscrollbar.set)
xscrollbar = ttk.Scrollbar(frame, orient="horizontal", command=self.tree.xview)
xscrollbar.grid(row=1, sticky="ew")
self.tree.configure(xscrollcommand=xscrollbar.set)
self.codetext = CodeText(self.top)
self.codetext.text.config(state=tk.DISABLED, height=11)
self.codetext.grid(sticky="nsew", pady=PADY)
frame = ttk.Frame(self.top)
frame.grid(sticky="ew")
frame.columnconfigure(0, weight=1)
frame.columnconfigure(1, weight=1)
button = ttk.Button(frame, text="Reset", command=self.reset_alerts)
button.grid(row=0, column=0, sticky="ew", padx=PADX)
button = ttk.Button(frame, text="Close", command=self.destroy)
button.grid(row=0, column=1, sticky="ew")
def reset_alerts(self):
self.codetext.text.delete("1.0", tk.END)
for item in self.tree.get_children():
self.tree.delete(item)
self.app.statusbar.core_alarms.clear()
def click_select(self, event: tk.Event):
current = self.tree.selection()[0]
alarm = self.alarm_map[current]
self.codetext.text.config(state=tk.NORMAL)
self.codetext.text.delete("1.0", "end")
self.codetext.text.insert("1.0", alarm.exception_event.text)
self.codetext.text.config(state=tk.DISABLED)
| <filename>daemon/core/gui/dialogs/alerts.py
"""
check engine light
"""
import tkinter as tk
from tkinter import ttk
from typing import TYPE_CHECKING
from core.api.grpc.core_pb2 import ExceptionLevel
from core.gui.dialogs.dialog import Dialog
from core.gui.themes import PADX, PADY
from core.gui.widgets import CodeText
if TYPE_CHECKING:
from core.gui.app import Application
class AlertsDialog(Dialog):
def __init__(self, app: "Application"):
super().__init__(app, "Alerts")
self.tree = None
self.codetext = None
self.alarm_map = {}
self.draw()
def draw(self):
self.top.columnconfigure(0, weight=1)
self.top.rowconfigure(0, weight=1)
self.top.rowconfigure(1, weight=1)
frame = ttk.Frame(self.top)
frame.columnconfigure(0, weight=1)
frame.rowconfigure(0, weight=1)
frame.grid(sticky="nsew", pady=PADY)
self.tree = ttk.Treeview(
frame,
columns=("time", "level", "session_id", "node", "source"),
show="headings",
)
self.tree.grid(row=0, column=0, sticky="nsew")
self.tree.column("time", stretch=tk.YES)
self.tree.heading("time", text="Time")
self.tree.column("level", stretch=tk.YES, width=100)
self.tree.heading("level", text="Level")
self.tree.column("session_id", stretch=tk.YES, width=100)
self.tree.heading("session_id", text="Session ID")
self.tree.column("node", stretch=tk.YES, width=100)
self.tree.heading("node", text="Node")
self.tree.column("source", stretch=tk.YES, width=100)
self.tree.heading("source", text="Source")
self.tree.bind("<<TreeviewSelect>>", self.click_select)
for alarm in self.app.statusbar.core_alarms:
exception = alarm.exception_event
level_name = ExceptionLevel.Enum.Name(exception.level)
insert_id = self.tree.insert(
"",
tk.END,
text=exception.date,
values=(
exception.date,
level_name,
alarm.session_id,
exception.node_id,
exception.source,
),
tags=(level_name,),
)
self.alarm_map[insert_id] = alarm
error_name = ExceptionLevel.Enum.Name(ExceptionLevel.ERROR)
self.tree.tag_configure(error_name, background="#ff6666")
fatal_name = ExceptionLevel.Enum.Name(ExceptionLevel.FATAL)
self.tree.tag_configure(fatal_name, background="#d9d9d9")
warning_name = ExceptionLevel.Enum.Name(ExceptionLevel.WARNING)
self.tree.tag_configure(warning_name, background="#ffff99")
notice_name = ExceptionLevel.Enum.Name(ExceptionLevel.NOTICE)
self.tree.tag_configure(notice_name, background="#85e085")
yscrollbar = ttk.Scrollbar(frame, orient="vertical", command=self.tree.yview)
yscrollbar.grid(row=0, column=1, sticky="ns")
self.tree.configure(yscrollcommand=yscrollbar.set)
xscrollbar = ttk.Scrollbar(frame, orient="horizontal", command=self.tree.xview)
xscrollbar.grid(row=1, sticky="ew")
self.tree.configure(xscrollcommand=xscrollbar.set)
self.codetext = CodeText(self.top)
self.codetext.text.config(state=tk.DISABLED, height=11)
self.codetext.grid(sticky="nsew", pady=PADY)
frame = ttk.Frame(self.top)
frame.grid(sticky="ew")
frame.columnconfigure(0, weight=1)
frame.columnconfigure(1, weight=1)
button = ttk.Button(frame, text="Reset", command=self.reset_alerts)
button.grid(row=0, column=0, sticky="ew", padx=PADX)
button = ttk.Button(frame, text="Close", command=self.destroy)
button.grid(row=0, column=1, sticky="ew")
def reset_alerts(self):
self.codetext.text.delete("1.0", tk.END)
for item in self.tree.get_children():
self.tree.delete(item)
self.app.statusbar.core_alarms.clear()
def click_select(self, event: tk.Event):
current = self.tree.selection()[0]
alarm = self.alarm_map[current]
self.codetext.text.config(state=tk.NORMAL)
self.codetext.text.delete("1.0", "end")
self.codetext.text.insert("1.0", alarm.exception_event.text)
self.codetext.text.config(state=tk.DISABLED)
| en | 0.666544 | check engine light | 2.366579 | 2 |
qa/tasks/util/rados.py | rpratap-bot/ceph | 4 | 6622532 | <filename>qa/tasks/util/rados.py
import logging
from teuthology import misc as teuthology
log = logging.getLogger(__name__)
def rados(ctx, remote, cmd, wait=True, check_status=False):
testdir = teuthology.get_testdir(ctx)
log.info("rados %s" % ' '.join(cmd))
pre = [
'adjust-ulimits',
'ceph-coverage',
'{tdir}/archive/coverage'.format(tdir=testdir),
'rados',
];
pre.extend(cmd)
proc = remote.run(
args=pre,
check_status=check_status,
wait=wait,
)
if wait:
return proc.exitstatus
else:
return proc
def create_ec_pool(remote, name, profile_name, pgnum, profile={}, cluster_name="ceph", application=None):
remote.run(args=['sudo', 'ceph'] +
cmd_erasure_code_profile(profile_name, profile) + ['--cluster', cluster_name])
remote.run(args=[
'sudo', 'ceph', 'osd', 'pool', 'create', name,
str(pgnum), str(pgnum), 'erasure', profile_name, '--cluster', cluster_name
])
if application:
remote.run(args=[
'sudo', 'ceph', 'osd', 'pool', 'application', 'enable', name, application, '--cluster', cluster_name
], check_status=False) # may fail as EINVAL when run in jewel upgrade test
def create_replicated_pool(remote, name, pgnum, cluster_name="ceph", application=None):
remote.run(args=[
'sudo', 'ceph', 'osd', 'pool', 'create', name, str(pgnum), str(pgnum), '--cluster', cluster_name
])
if application:
remote.run(args=[
'sudo', 'ceph', 'osd', 'pool', 'application', 'enable', name, application, '--cluster', cluster_name
], check_status=False)
def create_cache_pool(remote, base_name, cache_name, pgnum, size, cluster_name="ceph"):
remote.run(args=[
'sudo', 'ceph', 'osd', 'pool', 'create', cache_name, str(pgnum), '--cluster', cluster_name
])
remote.run(args=[
'sudo', 'ceph', 'osd', 'tier', 'add-cache', base_name, cache_name,
str(size), '--cluster', cluster_name
])
def cmd_erasure_code_profile(profile_name, profile):
"""
Return the shell command to run to create the erasure code profile
described by the profile parameter.
:param profile_name: a string matching [A-Za-z0-9-_.]+
:param profile: a map whose semantic depends on the erasure code plugin
:returns: a shell command as an array suitable for Remote.run
If profile is {}, it is replaced with
{ 'k': '2', 'm': '1', 'crush-failure-domain': 'osd'}
for backward compatibility. In previous versions of teuthology,
these values were hardcoded as function arguments and some yaml
files were designed with these implicit values. The teuthology
code should not know anything about the erasure code profile
content or semantic. The valid values and parameters are outside
its scope.
"""
if profile == {}:
profile = {
'k': '2',
'm': '1',
'crush-failure-domain': 'osd'
}
return [
'osd', 'erasure-code-profile', 'set',
profile_name
] + [ str(key) + '=' + str(value) for key, value in profile.items() ]
| <filename>qa/tasks/util/rados.py
import logging
from teuthology import misc as teuthology
log = logging.getLogger(__name__)
def rados(ctx, remote, cmd, wait=True, check_status=False):
testdir = teuthology.get_testdir(ctx)
log.info("rados %s" % ' '.join(cmd))
pre = [
'adjust-ulimits',
'ceph-coverage',
'{tdir}/archive/coverage'.format(tdir=testdir),
'rados',
];
pre.extend(cmd)
proc = remote.run(
args=pre,
check_status=check_status,
wait=wait,
)
if wait:
return proc.exitstatus
else:
return proc
def create_ec_pool(remote, name, profile_name, pgnum, profile={}, cluster_name="ceph", application=None):
remote.run(args=['sudo', 'ceph'] +
cmd_erasure_code_profile(profile_name, profile) + ['--cluster', cluster_name])
remote.run(args=[
'sudo', 'ceph', 'osd', 'pool', 'create', name,
str(pgnum), str(pgnum), 'erasure', profile_name, '--cluster', cluster_name
])
if application:
remote.run(args=[
'sudo', 'ceph', 'osd', 'pool', 'application', 'enable', name, application, '--cluster', cluster_name
], check_status=False) # may fail as EINVAL when run in jewel upgrade test
def create_replicated_pool(remote, name, pgnum, cluster_name="ceph", application=None):
remote.run(args=[
'sudo', 'ceph', 'osd', 'pool', 'create', name, str(pgnum), str(pgnum), '--cluster', cluster_name
])
if application:
remote.run(args=[
'sudo', 'ceph', 'osd', 'pool', 'application', 'enable', name, application, '--cluster', cluster_name
], check_status=False)
def create_cache_pool(remote, base_name, cache_name, pgnum, size, cluster_name="ceph"):
remote.run(args=[
'sudo', 'ceph', 'osd', 'pool', 'create', cache_name, str(pgnum), '--cluster', cluster_name
])
remote.run(args=[
'sudo', 'ceph', 'osd', 'tier', 'add-cache', base_name, cache_name,
str(size), '--cluster', cluster_name
])
def cmd_erasure_code_profile(profile_name, profile):
"""
Return the shell command to run to create the erasure code profile
described by the profile parameter.
:param profile_name: a string matching [A-Za-z0-9-_.]+
:param profile: a map whose semantic depends on the erasure code plugin
:returns: a shell command as an array suitable for Remote.run
If profile is {}, it is replaced with
{ 'k': '2', 'm': '1', 'crush-failure-domain': 'osd'}
for backward compatibility. In previous versions of teuthology,
these values were hardcoded as function arguments and some yaml
files were designed with these implicit values. The teuthology
code should not know anything about the erasure code profile
content or semantic. The valid values and parameters are outside
its scope.
"""
if profile == {}:
profile = {
'k': '2',
'm': '1',
'crush-failure-domain': 'osd'
}
return [
'osd', 'erasure-code-profile', 'set',
profile_name
] + [ str(key) + '=' + str(value) for key, value in profile.items() ]
| en | 0.866852 | # may fail as EINVAL when run in jewel upgrade test Return the shell command to run to create the erasure code profile described by the profile parameter. :param profile_name: a string matching [A-Za-z0-9-_.]+ :param profile: a map whose semantic depends on the erasure code plugin :returns: a shell command as an array suitable for Remote.run If profile is {}, it is replaced with { 'k': '2', 'm': '1', 'crush-failure-domain': 'osd'} for backward compatibility. In previous versions of teuthology, these values were hardcoded as function arguments and some yaml files were designed with these implicit values. The teuthology code should not know anything about the erasure code profile content or semantic. The valid values and parameters are outside its scope. | 2.074795 | 2 |
python/toy-problems/help_fruit_guy.py | vinnyhoward/til | 0 | 6622533 | <reponame>vinnyhoward/til
# Our fruit guy has a bag of fruit (represented as an array of strings) where some fruits are rotten. He wants to replace all the rotten pieces of fruit with fresh ones. For example, given ["apple","rottenBanana","apple"] the replaced array should be ["apple","banana","apple"]. Your task is to implement a method that accepts an array of strings containing fruits should returns an array of strings where all the rotten fruits are replaced by good ones.
# Notes
# If the array is null/nil/None or empty you should return empty array ([]).
# The rotten fruit name will be in this camelcase (rottenFruit).
# The returned array should be in lowercase.
def remove_rotten(bag_of_fruits):
new_fruit_arr = []
if bag_of_fruits is None:
return []
else:
for x in bag_of_fruits:
if x[0:6] == 'rotten':
new_fruit_arr.append(x[6:].lower())
else:
new_fruit_arr.append(x.lower())
return new_fruit_arr
remove_rotten([])
# def remove_rotten(bag_of_fruits):
# return [x.replace('rotten', '').lower() for x in bag_of_fruits] if bag_of_fruits else [] | # Our fruit guy has a bag of fruit (represented as an array of strings) where some fruits are rotten. He wants to replace all the rotten pieces of fruit with fresh ones. For example, given ["apple","rottenBanana","apple"] the replaced array should be ["apple","banana","apple"]. Your task is to implement a method that accepts an array of strings containing fruits should returns an array of strings where all the rotten fruits are replaced by good ones.
# Notes
# If the array is null/nil/None or empty you should return empty array ([]).
# The rotten fruit name will be in this camelcase (rottenFruit).
# The returned array should be in lowercase.
def remove_rotten(bag_of_fruits):
new_fruit_arr = []
if bag_of_fruits is None:
return []
else:
for x in bag_of_fruits:
if x[0:6] == 'rotten':
new_fruit_arr.append(x[6:].lower())
else:
new_fruit_arr.append(x.lower())
return new_fruit_arr
remove_rotten([])
# def remove_rotten(bag_of_fruits):
# return [x.replace('rotten', '').lower() for x in bag_of_fruits] if bag_of_fruits else [] | en | 0.823267 | # Our fruit guy has a bag of fruit (represented as an array of strings) where some fruits are rotten. He wants to replace all the rotten pieces of fruit with fresh ones. For example, given ["apple","rottenBanana","apple"] the replaced array should be ["apple","banana","apple"]. Your task is to implement a method that accepts an array of strings containing fruits should returns an array of strings where all the rotten fruits are replaced by good ones. # Notes # If the array is null/nil/None or empty you should return empty array ([]). # The rotten fruit name will be in this camelcase (rottenFruit). # The returned array should be in lowercase. # def remove_rotten(bag_of_fruits): # return [x.replace('rotten', '').lower() for x in bag_of_fruits] if bag_of_fruits else [] | 4.120198 | 4 |
src/cogs/memberlogs.py | Tocutoeltuco/hackweekbot | 4 | 6622534 | import objects.utils as utils
from discord.ext import commands
class MemberLogs(commands.Cog, name="Member Logs"):
def __init__(self, bot):
self.bot = bot
@commands.Cog.listener()
async def on_member_join(self, member):
config = await self.bot.get_guild_config(member.guild.id)
if config["welcome_channel"] != "" and config["welcome_message"] != "":
channel = member.guild.get_channel(int(config["welcome_channel"]))
if channel is not None:
try:
await channel.send(config["welcome_message"].format(
name=member.name, id=member.id, mention=f"<@{member.id}>", avatar=str(member.avatar_url),
guild_name=member.guild.name, guild_avatar=str(member.guild.icon_url) if member.guild.icon_url else "no avatar"
))
except:
pass
@commands.Cog.listener()
async def on_member_remove(self, member):
config = await self.bot.get_guild_config(member.guild.id)
if config["goodbye_channel"] != "" and config["goodbye_message"] != "":
channel = member.guild.get_channel(int(config["goodbye_channel"]))
if channel is not None:
try:
await channel.send(config["goodbye_message"].format(
name=member.name, id=member.id, mention=f"<@{member.id}>", avatar=str(member.avatar_url),
guild_name=member.guild.name, guild_avatar=str(member.guild.icon_url) if member.guild.icon_url else "no avatar"
))
except:
pass
def setup(bot):
bot.add_cog(MemberLogs(bot)) | import objects.utils as utils
from discord.ext import commands
class MemberLogs(commands.Cog, name="Member Logs"):
def __init__(self, bot):
self.bot = bot
@commands.Cog.listener()
async def on_member_join(self, member):
config = await self.bot.get_guild_config(member.guild.id)
if config["welcome_channel"] != "" and config["welcome_message"] != "":
channel = member.guild.get_channel(int(config["welcome_channel"]))
if channel is not None:
try:
await channel.send(config["welcome_message"].format(
name=member.name, id=member.id, mention=f"<@{member.id}>", avatar=str(member.avatar_url),
guild_name=member.guild.name, guild_avatar=str(member.guild.icon_url) if member.guild.icon_url else "no avatar"
))
except:
pass
@commands.Cog.listener()
async def on_member_remove(self, member):
config = await self.bot.get_guild_config(member.guild.id)
if config["goodbye_channel"] != "" and config["goodbye_message"] != "":
channel = member.guild.get_channel(int(config["goodbye_channel"]))
if channel is not None:
try:
await channel.send(config["goodbye_message"].format(
name=member.name, id=member.id, mention=f"<@{member.id}>", avatar=str(member.avatar_url),
guild_name=member.guild.name, guild_avatar=str(member.guild.icon_url) if member.guild.icon_url else "no avatar"
))
except:
pass
def setup(bot):
bot.add_cog(MemberLogs(bot)) | none | 1 | 2.492384 | 2 | |
HumanModeling/openai_gym/advanced_engagement_gym_coach.py | nesl/EngagementService | 6 | 6622535 | <gh_stars>1-10
import math
import sys
import os
import numpy as np
from gym.spaces import Box
from .advanced_engagement_gym_base import AdvancedEngagementGymBase
from human_modeling_utils import utils
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'Nurture', 'server', 'notification'))
from nurture.learning.state import State
class AdvancedEngagementGymCoach(AdvancedEngagementGymBase):
def get_observation_space(self):
return Box(
low=np.array([
0., # time of day
0., # day of week
0., # motion - stationary
0., # motion - walking
0., # motion - running
0., # motion - driving
0., # motion - biking
0., # location - home
0., # location - work
0., # location - other
0., # notificatoin time
0., # ringer mode - silent
0., # ringer mode - vibrate
0., # ringer mode - normal
0., # screen status
]),
high=np.array([
1., # time of day
1., # day of week
1., # motion - stationary
1., # motion - walking
1., # motion - running
1., # motion - driving
1., # motion - biking
1., # location - home
1., # location - work
1., # location - other
math.log(60.), # notificatoin time
1., # ringer mode - silent
1., # ringer mode - vibrate
1., # ringer mode - normal
1., # screen status
]),
)
def intepret_state(self, state):
return np.array([
state.timeOfDay,
state.dayOfWeek,
state.motion == State.MOTION_STATIONARY,
state.motion == State.MOTION_WALKING,
state.motion == State.MOTION_RUNNING,
state.motion == State.MOTION_DRIVING,
state.motion == State.MOTION_BIKING,
state.location == State.LOCATION_HOME,
state.location == State.LOCATION_WORK,
state.location == State.LOCATION_OTHER,
math.log(utils.clip(state.notificationTimeElapsed, 5.0, 60.0)),
state.ringerMode == State.RINGER_MODE_SILENT,
state.ringerMode == State.RINGER_MODE_VIBRATE,
state.ringerMode == State.RINGER_MODE_NORMAL,
state.screenStatus,
])
| import math
import sys
import os
import numpy as np
from gym.spaces import Box
from .advanced_engagement_gym_base import AdvancedEngagementGymBase
from human_modeling_utils import utils
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'Nurture', 'server', 'notification'))
from nurture.learning.state import State
class AdvancedEngagementGymCoach(AdvancedEngagementGymBase):
def get_observation_space(self):
return Box(
low=np.array([
0., # time of day
0., # day of week
0., # motion - stationary
0., # motion - walking
0., # motion - running
0., # motion - driving
0., # motion - biking
0., # location - home
0., # location - work
0., # location - other
0., # notificatoin time
0., # ringer mode - silent
0., # ringer mode - vibrate
0., # ringer mode - normal
0., # screen status
]),
high=np.array([
1., # time of day
1., # day of week
1., # motion - stationary
1., # motion - walking
1., # motion - running
1., # motion - driving
1., # motion - biking
1., # location - home
1., # location - work
1., # location - other
math.log(60.), # notificatoin time
1., # ringer mode - silent
1., # ringer mode - vibrate
1., # ringer mode - normal
1., # screen status
]),
)
def intepret_state(self, state):
return np.array([
state.timeOfDay,
state.dayOfWeek,
state.motion == State.MOTION_STATIONARY,
state.motion == State.MOTION_WALKING,
state.motion == State.MOTION_RUNNING,
state.motion == State.MOTION_DRIVING,
state.motion == State.MOTION_BIKING,
state.location == State.LOCATION_HOME,
state.location == State.LOCATION_WORK,
state.location == State.LOCATION_OTHER,
math.log(utils.clip(state.notificationTimeElapsed, 5.0, 60.0)),
state.ringerMode == State.RINGER_MODE_SILENT,
state.ringerMode == State.RINGER_MODE_VIBRATE,
state.ringerMode == State.RINGER_MODE_NORMAL,
state.screenStatus,
]) | en | 0.698636 | # time of day # day of week # motion - stationary # motion - walking # motion - running # motion - driving # motion - biking # location - home # location - work # location - other # notificatoin time # ringer mode - silent # ringer mode - vibrate # ringer mode - normal # screen status # time of day # day of week # motion - stationary # motion - walking # motion - running # motion - driving # motion - biking # location - home # location - work # location - other # notificatoin time # ringer mode - silent # ringer mode - vibrate # ringer mode - normal # screen status | 2.393576 | 2 |
train_classification_fastai.py | Data-drone/cv_experiments | 0 | 6622536 | <filename>train_classification_fastai.py
### Fastai v2 training script
# built on fastai v2.2.2
# testing to see how the presets compare to my hand tuning
## So this is training better than my pytorch lightning...
from fastai.vision.all import *
path = '../cv_data/cifar10'
### Setup Image transforms
item_transforms = [ToTensor, Resize(size=(300,300)),
RandomCrop(size=(250,250))
]
batch_transforms = [Dihedral(), Normalize()]
### Setup Data Loaders
dls = ImageDataLoaders.from_folder(path, train='train',
valid='test', device=1,
item_tfms=item_transforms,
batch_tfms=batch_transforms,
bs=164)
### Setup CNN Learner
learn = cnn_learner(dls, resnet18, pretrained=False,
metrics=[accuracy, top_k_accuracy])
learn.fit(n_epoch=50) | <filename>train_classification_fastai.py
### Fastai v2 training script
# built on fastai v2.2.2
# testing to see how the presets compare to my hand tuning
## So this is training better than my pytorch lightning...
from fastai.vision.all import *
path = '../cv_data/cifar10'
### Setup Image transforms
item_transforms = [ToTensor, Resize(size=(300,300)),
RandomCrop(size=(250,250))
]
batch_transforms = [Dihedral(), Normalize()]
### Setup Data Loaders
dls = ImageDataLoaders.from_folder(path, train='train',
valid='test', device=1,
item_tfms=item_transforms,
batch_tfms=batch_transforms,
bs=164)
### Setup CNN Learner
learn = cnn_learner(dls, resnet18, pretrained=False,
metrics=[accuracy, top_k_accuracy])
learn.fit(n_epoch=50) | en | 0.743165 | ### Fastai v2 training script # built on fastai v2.2.2 # testing to see how the presets compare to my hand tuning ## So this is training better than my pytorch lightning... ### Setup Image transforms ### Setup Data Loaders ### Setup CNN Learner | 2.267493 | 2 |
database_migrations/versions/6f1b895840a_change_report_parameters_to_varbinary.py | wikimedia/analytics-wikimetrics | 6 | 6622537 | <reponame>wikimedia/analytics-wikimetrics
"""change report parameters to varbinary
Revision ID: 6f1b895840a
Revises: <KEY>
Create Date: 2015-06-16 09:58:57.874087
"""
# revision identifiers, used by Alembic.
revision = '6f1b895840a'
down_revision = '<KEY>'
from alembic import op
from sqlalchemy.dialects.mysql import VARBINARY
from sqlalchemy import String
def upgrade():
op.alter_column('report', 'parameters', type_=VARBINARY(4000),
existing_type=String(4000, collation='utf8_general_ci'),
existing_nullable=True)
def downgrade():
op.alter_column('report', 'parameters',
type_=String(4000, collation='utf8_general_ci'),
existing_type=VARBINARY(4000), existing_nullable=True)
| """change report parameters to varbinary
Revision ID: 6f1b895840a
Revises: <KEY>
Create Date: 2015-06-16 09:58:57.874087
"""
# revision identifiers, used by Alembic.
revision = '6f1b895840a'
down_revision = '<KEY>'
from alembic import op
from sqlalchemy.dialects.mysql import VARBINARY
from sqlalchemy import String
def upgrade():
op.alter_column('report', 'parameters', type_=VARBINARY(4000),
existing_type=String(4000, collation='utf8_general_ci'),
existing_nullable=True)
def downgrade():
op.alter_column('report', 'parameters',
type_=String(4000, collation='utf8_general_ci'),
existing_type=VARBINARY(4000), existing_nullable=True) | en | 0.408453 | change report parameters to varbinary Revision ID: 6f1b895840a Revises: <KEY> Create Date: 2015-06-16 09:58:57.874087 # revision identifiers, used by Alembic. | 0.984417 | 1 |
utils/path_util.py | wooings/ViT-pytorch-1 | 0 | 6622538 | from os.path import join
from os.path import normpath
import platform
def pjoin(path, *paths):
p = join(path, *paths)
if platform.system() == "Windows":
return normpath(p).replace('\\','/')
else:
return p
| from os.path import join
from os.path import normpath
import platform
def pjoin(path, *paths):
p = join(path, *paths)
if platform.system() == "Windows":
return normpath(p).replace('\\','/')
else:
return p
| none | 1 | 2.982739 | 3 | |
python/scriptflow.py | tlamadon/balke-lamadon-2022-aer | 6 | 6622539 | #!/usr/bin/python3
# simple script flow
from email import policy
import os
import asyncio
import click
import numpy as np
import shutil
import glob
import asyncio
from time import sleep
from pathlib import Path
import scriptflowlib as sf
@click.group()
def cli():
pass
# ----------------------------- TASKS -----------------------
def task_cf_vdec_level(i):
target = "build/cf/vdec-level-noise0-rep{}.json".format(i)
t = sf.Task(["python", "-c", "import results as cf; cf.cf_simulate_level_var_dec({},{},False)".format("\"{}\"".format(target), i)])
t.output(target).uid("cf-vdec-level-{}".format(i)).add_deps("res_main_model.pkl")
return t
def task_cf_vdec_growth(i):
target = "build/cf/vdec-growth-noise0-weight0-rep{}.json".format(i)
t = sf.Task(["python", "-c", "import results as cf; cf.cf_vardec_growth_rep_one({},{},200,20000,False,False)".format(i, "\"{}\"".format(target), i)])
t.output(target).uid("cf-vdec-growth-{}".format(i)).add_deps("res_main_model.pkl")
return t
"""
Task that solves the model once taking a given file as input and
generating an output file next to it
"""
def task_solve_model(input_file):
file = Path(input_file)
target = file.with_name( file.stem + '_moments.pkl')
t = sf.Task(["python", "main_model_eval_once.py", "-p", str(file) , "-s", str(target) ])
t.uid("model-once-{}".format(file.stem))
t.output(str(target))
t.add_deps(str(file))
return(t)
def task_python(code):
return sf.Task(["python", "-c", code])
# ----------------------------- FLOWS -----------------------
async def flow_model():
# save the model
t1 = sf.Task(["python", "-c", "import results as cf; cf.save_model_solution('../results/parameters_at_optimal.json')"])
t1.output('res_main_model.pkl')
t1.add_deps('../results/parameters_at_optimal.json')
t1.uid("solve-model")
await t1
# save first best
t2 = sf.Task(["python", "-c", "import results as cf; cf.save_model_solution_fb()"])
t2.add_deps('res_main_model.pkl').output('res_main_model_fb.pkl')
t2.uid("solve-first-best")
await t2
# save fit
t2 = sf.Task(["python", "-c", "import results as cf; cf.save_model_fit()"])
t2.add_deps('res_main_model.pkl').output('../figures/table1-moments.tex')
t2.uid("get-model-fit")
await t2
"""
Flow that creates the table for cross-section decompositions
"""
async def flow_cf_vdec_level():
Path("build/cf").mkdir(parents=True, exist_ok=True)
tasks = [task_cf_vdec_level(i) for i in range(20)]
await asyncio.gather(*tasks)
# generate the table
table = sf.Task(["python", "-c", "import results; results.table_variance_decomposition_level()"])
table.uid("cf-vdec-level").output("../figures/table3-vdec-cs.tex")
table.add_deps(t.output_file for t in tasks)
await table
"""
Flow that creates the table for growth decompositions
"""
async def flow_cf_vdec_growth():
Path("build/cf").mkdir(parents=True, exist_ok=True)
tasks = [task_cf_vdec_growth(i) for i in range(20)]
await asyncio.gather(*tasks)
# # generate the table
table = sf.Task(["python", "-c", "import results; results.table_variance_decomposition_growth()"])
table.uid("cf-vdec-growth").output("../figures/table4-vdec-growth.tex")
table.add_deps(t.output_file for t in tasks)
await table
async def flow_model_to_life():
# # generate the table
t1 = sf.Task(["python", "-c", "import results as cf; cf.cf_model_to_life(False)"])
t1.uid("cf-model2life").output("../figures/figure3-ir-xshock.pdf")
t2 = sf.Task(["python", "-c", "import results as cf; cf.cf_model_to_life(True)"])
t2.uid("cf-model2life-fb").output("../figures/figurew5-ir-xshock-fb.pdf")
await asyncio.gather(t1,t2)
async def flow_passthrough():
t1 = sf.Task(["python", "-c", "import results as cf; cf.passthrough_analysis()"])
t1.uid("cf-passthough").output('../figures/table5-passthrough.tex').input('"res_main_model.pkl"')
await t1
async def flow_policy():
# we start by creating all the models to solve
policy_input1 = sf.Task(["python", "-c", "import results as cf; cf.cf_policy_gen_neutral()"])
policy_input1.uid("policy-inputs-no-noise")
policy_input1.output("build/policy/param_lhigh_direct_net0_nm.json")
await policy_input1
policy_input2 = sf.Task(["python", "-c", "import results as cf; cf.cf_policy_gen_neutral(True)"])
policy_input2.uid("policy-inputs-with-noise")
policy_input2.output("build/policy/param_lhigh_direct_net0.json")
await policy_input2
# get all input files that should be ran
p = Path('build/policy')
tasks = [task_solve_model(str(f)) for f in p.glob("param_*.json")]
await asyncio.gather(*tasks)
policy_collect = sf.Task(["python", "-c", "import results as cf; cf.cf_policy_collect()"])
policy_collect.uid("policy-collect")
policy_collect.output("../figures/table6-policy.tex")
await policy_collect
async def flow_slices():
Path('build/slices').mkdir(parents=True, exist_ok=True)
gen_inputs = sf.Task(["python", "-c", "import results as cf; cf.cf_slices_gen(25)"])
gen_inputs.uid("slices-inputs")
gen_inputs.output("build/slices/param_slice_zcorr_0.json")
gen_inputs.quiet=False
await gen_inputs
p = Path('build/slices')
tasks = [task_solve_model(str(f)) for f in p.glob("param_*.json")]
await asyncio.gather(*tasks)
slice_collect = sf.Task(["python", "-c", "import results as cf; cf.cf_slices_collect()"])
slice_collect.uid("slices-collect")
slice_collect.output("build/slices.csv")
slice_collect.quiet=False
await slice_collect
async def flow_bootstrap():
bs_gen = sf.Task(["python", "-c", "import results as cf; cf.cf_bootstrap_gen()"])
bs_gen.uid("bootstrap-gen")
bs_gen.output("build/bootstrap/param_pb_r0_p0.json")
await bs_gen
p = Path('build/bootstrap')
tasks = [task_solve_model(str(f)) for f in p.glob("param_*.json")]
await asyncio.gather(*tasks)
bs_collect = sf.Task(["python", "-c", "import results as cf; cf.cf_bootstrap_collect()"])
bs_collect.uid("bootstrap-collect")
bs_collect.output("../figures/table2-parameters.tex")
await bs_collect
async def flow_sensitivity_figures():
# we generate 3 tables
t4 = task_python("import results as cf; cf.cf_sensitivity_measure()").uid("fig-sensitivity")
await t4
async def flow_data_stats():
# we generate 3 tables
t4 = task_python("import results as cf; cf.table_stats()").uid("table-stats").output('../figures/tablew2-stats.tex')
await t4
async def flow_pdfs():
for f in Path("../figures").glob("*.tex"):
latex_task = sf.Task(["python", "-c", "import results as cf; cf.generate_alone_pdf(\"{}\")".format(f)])
latex_task.uid("latex_" + f.stem)
await latex_task
async def flow_surrogate():
sur_gen = sf.Task(["python", "-c", "import results as cf; cf.surrogate_gen(100)"])
sur_gen.uid("surrogate-gen").output('build/surrogate/param_slice_efcostce_0.json')
await sur_gen
p = Path('build/surrogate')
tasks = [task_solve_model(str(f)) for f in p.glob("param_*.json")]
await asyncio.gather(*tasks)
bs_collect = sf.Task(["python", "-c", "import results as cf; cf.surrogate_collect()"])
bs_collect.uid("bootstrap-collect")
bs_collect.output("../figures/figurew1-surrogate.pdf")
await bs_collect
# -----------------------------
# OPTIMIZER
# -----------------------------
async def flow_optimize():
opt_gen = sf.Task(["python", "-c", "import results as cf; cf.optimize_gen(1,100)"])
opt_gen.uid("opt-step1").output('build/opt/step1/param_slice_efcostce_0.json')
await opt_gen
for step in range(1,16):
# we generate the set of parameters
p = Path('build/opt/step{}'.format(step))
tasks = [task_solve_model(str(f)) for f in p.glob("param_*.json")]
await asyncio.gather(*tasks)
opt_gen = sf.Task(["python", "-c", "import results as cf; cf.optimize_gen({},100)".format(step+1)])
opt_gen.uid("opt-step1").output('build/opt/step1/param_slice_efcostce_0.json')
await opt_gen
async def flow_slices_and_bootstrap():
# 1) we then compute slices, associated plots
await flow_slices()
# 2) we run bootstrap to get standard errors
await flow_bootstrap()
async def flow_fast():
# we start by solving the model and the first best
await flow_model()
# we compute the variance decomposition tables and impulse response plots
await asyncio.gather(
flow_cf_vdec_level(),
flow_cf_vdec_growth(),
flow_model_to_life(),
flow_policy(),
flow_passthrough(),
flow_data_stats(),
flow_surrogate())
# reate pdfs for all latex files
await flow_pdfs()
async def flow_part3():
# 1) we start by solving the model and the first best
await flow_model()
# we compute the variance decomposition tables and impulse response plots
await asyncio.gather(
flow_cf_vdec_level(),
flow_cf_vdec_growth(),
flow_model_to_life(),
flow_policy(),
flow_passthrough(),
flow_slices_and_bootstrap(),
flow_data_stats())
# 4 final tables and pdf output
await flow_sensitivity_figures()
# 5 create pdfs for all latex files
await flow_pdfs()
async def flow_part2():
await flow_optimize()
# flow aliases
async def flow_all():
await asyncio.gather(
flow_part2(),
flow_part3()
)
# flow aliases
async def flow_clean_all():
shutil.rmtree('build')
os.remove("../results/param_reoptimized.json")
for f in glob.glob("../figures/*"):
os.remove(f)
await flow_all()
"""
Main flow
"""
async def main(func):
asyncio.create_task(sf.get_main_maestro().loop())
os.makedirs('.sf', exist_ok=True)
await func()
@cli.command()
@click.argument('name')
@click.option('-n','--nodes', default=1)
def run(name,nodes):
func_names = globals().keys()
flows = [w.replace("flow_","") for w in func_names if w.startswith("flow_")]
if name not in flows:
print("Flow {} is not available, values ares: {}".format(name, ", ".join(flows)))
return()
cr = sf.CommandRunner(nodes)
sf.set_main_maestro(cr)
func = globals()["flow_{}".format(name)]
asyncio.run(main(func))
if __name__ == '__main__':
cli()
| #!/usr/bin/python3
# simple script flow
from email import policy
import os
import asyncio
import click
import numpy as np
import shutil
import glob
import asyncio
from time import sleep
from pathlib import Path
import scriptflowlib as sf
@click.group()
def cli():
pass
# ----------------------------- TASKS -----------------------
def task_cf_vdec_level(i):
target = "build/cf/vdec-level-noise0-rep{}.json".format(i)
t = sf.Task(["python", "-c", "import results as cf; cf.cf_simulate_level_var_dec({},{},False)".format("\"{}\"".format(target), i)])
t.output(target).uid("cf-vdec-level-{}".format(i)).add_deps("res_main_model.pkl")
return t
def task_cf_vdec_growth(i):
target = "build/cf/vdec-growth-noise0-weight0-rep{}.json".format(i)
t = sf.Task(["python", "-c", "import results as cf; cf.cf_vardec_growth_rep_one({},{},200,20000,False,False)".format(i, "\"{}\"".format(target), i)])
t.output(target).uid("cf-vdec-growth-{}".format(i)).add_deps("res_main_model.pkl")
return t
"""
Task that solves the model once taking a given file as input and
generating an output file next to it
"""
def task_solve_model(input_file):
file = Path(input_file)
target = file.with_name( file.stem + '_moments.pkl')
t = sf.Task(["python", "main_model_eval_once.py", "-p", str(file) , "-s", str(target) ])
t.uid("model-once-{}".format(file.stem))
t.output(str(target))
t.add_deps(str(file))
return(t)
def task_python(code):
return sf.Task(["python", "-c", code])
# ----------------------------- FLOWS -----------------------
async def flow_model():
# save the model
t1 = sf.Task(["python", "-c", "import results as cf; cf.save_model_solution('../results/parameters_at_optimal.json')"])
t1.output('res_main_model.pkl')
t1.add_deps('../results/parameters_at_optimal.json')
t1.uid("solve-model")
await t1
# save first best
t2 = sf.Task(["python", "-c", "import results as cf; cf.save_model_solution_fb()"])
t2.add_deps('res_main_model.pkl').output('res_main_model_fb.pkl')
t2.uid("solve-first-best")
await t2
# save fit
t2 = sf.Task(["python", "-c", "import results as cf; cf.save_model_fit()"])
t2.add_deps('res_main_model.pkl').output('../figures/table1-moments.tex')
t2.uid("get-model-fit")
await t2
"""
Flow that creates the table for cross-section decompositions
"""
async def flow_cf_vdec_level():
Path("build/cf").mkdir(parents=True, exist_ok=True)
tasks = [task_cf_vdec_level(i) for i in range(20)]
await asyncio.gather(*tasks)
# generate the table
table = sf.Task(["python", "-c", "import results; results.table_variance_decomposition_level()"])
table.uid("cf-vdec-level").output("../figures/table3-vdec-cs.tex")
table.add_deps(t.output_file for t in tasks)
await table
"""
Flow that creates the table for growth decompositions
"""
async def flow_cf_vdec_growth():
Path("build/cf").mkdir(parents=True, exist_ok=True)
tasks = [task_cf_vdec_growth(i) for i in range(20)]
await asyncio.gather(*tasks)
# # generate the table
table = sf.Task(["python", "-c", "import results; results.table_variance_decomposition_growth()"])
table.uid("cf-vdec-growth").output("../figures/table4-vdec-growth.tex")
table.add_deps(t.output_file for t in tasks)
await table
async def flow_model_to_life():
# # generate the table
t1 = sf.Task(["python", "-c", "import results as cf; cf.cf_model_to_life(False)"])
t1.uid("cf-model2life").output("../figures/figure3-ir-xshock.pdf")
t2 = sf.Task(["python", "-c", "import results as cf; cf.cf_model_to_life(True)"])
t2.uid("cf-model2life-fb").output("../figures/figurew5-ir-xshock-fb.pdf")
await asyncio.gather(t1,t2)
async def flow_passthrough():
t1 = sf.Task(["python", "-c", "import results as cf; cf.passthrough_analysis()"])
t1.uid("cf-passthough").output('../figures/table5-passthrough.tex').input('"res_main_model.pkl"')
await t1
async def flow_policy():
# we start by creating all the models to solve
policy_input1 = sf.Task(["python", "-c", "import results as cf; cf.cf_policy_gen_neutral()"])
policy_input1.uid("policy-inputs-no-noise")
policy_input1.output("build/policy/param_lhigh_direct_net0_nm.json")
await policy_input1
policy_input2 = sf.Task(["python", "-c", "import results as cf; cf.cf_policy_gen_neutral(True)"])
policy_input2.uid("policy-inputs-with-noise")
policy_input2.output("build/policy/param_lhigh_direct_net0.json")
await policy_input2
# get all input files that should be ran
p = Path('build/policy')
tasks = [task_solve_model(str(f)) for f in p.glob("param_*.json")]
await asyncio.gather(*tasks)
policy_collect = sf.Task(["python", "-c", "import results as cf; cf.cf_policy_collect()"])
policy_collect.uid("policy-collect")
policy_collect.output("../figures/table6-policy.tex")
await policy_collect
async def flow_slices():
Path('build/slices').mkdir(parents=True, exist_ok=True)
gen_inputs = sf.Task(["python", "-c", "import results as cf; cf.cf_slices_gen(25)"])
gen_inputs.uid("slices-inputs")
gen_inputs.output("build/slices/param_slice_zcorr_0.json")
gen_inputs.quiet=False
await gen_inputs
p = Path('build/slices')
tasks = [task_solve_model(str(f)) for f in p.glob("param_*.json")]
await asyncio.gather(*tasks)
slice_collect = sf.Task(["python", "-c", "import results as cf; cf.cf_slices_collect()"])
slice_collect.uid("slices-collect")
slice_collect.output("build/slices.csv")
slice_collect.quiet=False
await slice_collect
async def flow_bootstrap():
bs_gen = sf.Task(["python", "-c", "import results as cf; cf.cf_bootstrap_gen()"])
bs_gen.uid("bootstrap-gen")
bs_gen.output("build/bootstrap/param_pb_r0_p0.json")
await bs_gen
p = Path('build/bootstrap')
tasks = [task_solve_model(str(f)) for f in p.glob("param_*.json")]
await asyncio.gather(*tasks)
bs_collect = sf.Task(["python", "-c", "import results as cf; cf.cf_bootstrap_collect()"])
bs_collect.uid("bootstrap-collect")
bs_collect.output("../figures/table2-parameters.tex")
await bs_collect
async def flow_sensitivity_figures():
# we generate 3 tables
t4 = task_python("import results as cf; cf.cf_sensitivity_measure()").uid("fig-sensitivity")
await t4
async def flow_data_stats():
# we generate 3 tables
t4 = task_python("import results as cf; cf.table_stats()").uid("table-stats").output('../figures/tablew2-stats.tex')
await t4
async def flow_pdfs():
for f in Path("../figures").glob("*.tex"):
latex_task = sf.Task(["python", "-c", "import results as cf; cf.generate_alone_pdf(\"{}\")".format(f)])
latex_task.uid("latex_" + f.stem)
await latex_task
async def flow_surrogate():
sur_gen = sf.Task(["python", "-c", "import results as cf; cf.surrogate_gen(100)"])
sur_gen.uid("surrogate-gen").output('build/surrogate/param_slice_efcostce_0.json')
await sur_gen
p = Path('build/surrogate')
tasks = [task_solve_model(str(f)) for f in p.glob("param_*.json")]
await asyncio.gather(*tasks)
bs_collect = sf.Task(["python", "-c", "import results as cf; cf.surrogate_collect()"])
bs_collect.uid("bootstrap-collect")
bs_collect.output("../figures/figurew1-surrogate.pdf")
await bs_collect
# -----------------------------
# OPTIMIZER
# -----------------------------
async def flow_optimize():
opt_gen = sf.Task(["python", "-c", "import results as cf; cf.optimize_gen(1,100)"])
opt_gen.uid("opt-step1").output('build/opt/step1/param_slice_efcostce_0.json')
await opt_gen
for step in range(1,16):
# we generate the set of parameters
p = Path('build/opt/step{}'.format(step))
tasks = [task_solve_model(str(f)) for f in p.glob("param_*.json")]
await asyncio.gather(*tasks)
opt_gen = sf.Task(["python", "-c", "import results as cf; cf.optimize_gen({},100)".format(step+1)])
opt_gen.uid("opt-step1").output('build/opt/step1/param_slice_efcostce_0.json')
await opt_gen
async def flow_slices_and_bootstrap():
# 1) we then compute slices, associated plots
await flow_slices()
# 2) we run bootstrap to get standard errors
await flow_bootstrap()
async def flow_fast():
# we start by solving the model and the first best
await flow_model()
# we compute the variance decomposition tables and impulse response plots
await asyncio.gather(
flow_cf_vdec_level(),
flow_cf_vdec_growth(),
flow_model_to_life(),
flow_policy(),
flow_passthrough(),
flow_data_stats(),
flow_surrogate())
# reate pdfs for all latex files
await flow_pdfs()
async def flow_part3():
# 1) we start by solving the model and the first best
await flow_model()
# we compute the variance decomposition tables and impulse response plots
await asyncio.gather(
flow_cf_vdec_level(),
flow_cf_vdec_growth(),
flow_model_to_life(),
flow_policy(),
flow_passthrough(),
flow_slices_and_bootstrap(),
flow_data_stats())
# 4 final tables and pdf output
await flow_sensitivity_figures()
# 5 create pdfs for all latex files
await flow_pdfs()
async def flow_part2():
await flow_optimize()
# flow aliases
async def flow_all():
await asyncio.gather(
flow_part2(),
flow_part3()
)
# flow aliases
async def flow_clean_all():
shutil.rmtree('build')
os.remove("../results/param_reoptimized.json")
for f in glob.glob("../figures/*"):
os.remove(f)
await flow_all()
"""
Main flow
"""
async def main(func):
asyncio.create_task(sf.get_main_maestro().loop())
os.makedirs('.sf', exist_ok=True)
await func()
@cli.command()
@click.argument('name')
@click.option('-n','--nodes', default=1)
def run(name,nodes):
func_names = globals().keys()
flows = [w.replace("flow_","") for w in func_names if w.startswith("flow_")]
if name not in flows:
print("Flow {} is not available, values ares: {}".format(name, ", ".join(flows)))
return()
cr = sf.CommandRunner(nodes)
sf.set_main_maestro(cr)
func = globals()["flow_{}".format(name)]
asyncio.run(main(func))
if __name__ == '__main__':
cli()
| en | 0.64131 | #!/usr/bin/python3 # simple script flow # ----------------------------- TASKS ----------------------- Task that solves the model once taking a given file as input and generating an output file next to it # ----------------------------- FLOWS ----------------------- # save the model # save first best # save fit Flow that creates the table for cross-section decompositions # generate the table Flow that creates the table for growth decompositions # # generate the table # # generate the table # we start by creating all the models to solve # get all input files that should be ran # we generate 3 tables # we generate 3 tables # ----------------------------- # OPTIMIZER # ----------------------------- # we generate the set of parameters # 1) we then compute slices, associated plots # 2) we run bootstrap to get standard errors # we start by solving the model and the first best # we compute the variance decomposition tables and impulse response plots # reate pdfs for all latex files # 1) we start by solving the model and the first best # we compute the variance decomposition tables and impulse response plots # 4 final tables and pdf output # 5 create pdfs for all latex files # flow aliases # flow aliases Main flow | 2.24312 | 2 |
src_old/tests/examples/legendre.py | toddrme2178/pyccel | 0 | 6622540 | <gh_stars>0
# coding: utf-8
#$ header legendre(int)
def legendre(p):
k = p + 1
x = zeros(k, double)
w = zeros(k, double)
if p == 1:
x[0] = -0.577350269189625765
x[1] = 0.577350269189625765
w[0] = 1.0
w[1] = 1.0
elif p == 2:
x[0] = -0.774596669241483377
x[1] = 0.0
x[2] = 0.774596669241483377
w[0] = 0.55555555555555556
w[1] = 0.888888888888888889
w[2] = 0.55555555555555556
elif p == 3:
x[0] = -0.861136311594052575
x[1] = -0.339981043584856265
x[2] = 0.339981043584856265
x[3] = 0.861136311594052575
w[0] = 0.347854845137453853
w[1] = 0.65214515486254615
w[2] = 0.65214515486254614
w[3] = 0.34785484513745386
elif p == 4:
x[0] = -0.906179845938663993
x[1] = -0.538469310105683091
x[2] = 0.0
x[3] = 0.538469310105683091
x[4] = 0.906179845938663993
w[0] = 0.236926885056189088
w[1] = 0.478628670499366468
w[2] = 0.56888888888888889
w[3] = 0.478628670499366468
w[4] = 0.23692688505618909
elif p == 5:
x[0] = -0.932469514203152028
x[1] = -0.661209386466264514
x[2] = -0.238619186083196909
x[3] = 0.238619186083196909
x[4] = 0.661209386466264514
x[5] = 0.932469514203152028
w[0] = 0.171324492379170345
w[1] = 0.360761573048138608
w[2] = 0.467913934572691047
w[3] = 0.467913934572691047
w[4] = 0.36076157304813861
w[5] = 0.171324492379170345
elif p == 6:
x[0] = -0.94910791234275852
x[1] = -0.74153118559939444
x[2] = -0.405845151377397167
x[3] = 0.0
x[4] = 0.405845151377397167
x[5] = 0.74153118559939444
x[6] = 0.949107912342758525
w[0] = 0.129484966168869693
w[1] = 0.27970539148927667
w[2] = 0.381830050505118945
w[3] = 0.417959183673469388
w[4] = 0.38183005050511895
w[5] = 0.279705391489276667
w[6] = 0.129484966168869693
return x,w
m = 3
x,w = legendre(m)
print(x)
print(w)
| # coding: utf-8
#$ header legendre(int)
def legendre(p):
k = p + 1
x = zeros(k, double)
w = zeros(k, double)
if p == 1:
x[0] = -0.577350269189625765
x[1] = 0.577350269189625765
w[0] = 1.0
w[1] = 1.0
elif p == 2:
x[0] = -0.774596669241483377
x[1] = 0.0
x[2] = 0.774596669241483377
w[0] = 0.55555555555555556
w[1] = 0.888888888888888889
w[2] = 0.55555555555555556
elif p == 3:
x[0] = -0.861136311594052575
x[1] = -0.339981043584856265
x[2] = 0.339981043584856265
x[3] = 0.861136311594052575
w[0] = 0.347854845137453853
w[1] = 0.65214515486254615
w[2] = 0.65214515486254614
w[3] = 0.34785484513745386
elif p == 4:
x[0] = -0.906179845938663993
x[1] = -0.538469310105683091
x[2] = 0.0
x[3] = 0.538469310105683091
x[4] = 0.906179845938663993
w[0] = 0.236926885056189088
w[1] = 0.478628670499366468
w[2] = 0.56888888888888889
w[3] = 0.478628670499366468
w[4] = 0.23692688505618909
elif p == 5:
x[0] = -0.932469514203152028
x[1] = -0.661209386466264514
x[2] = -0.238619186083196909
x[3] = 0.238619186083196909
x[4] = 0.661209386466264514
x[5] = 0.932469514203152028
w[0] = 0.171324492379170345
w[1] = 0.360761573048138608
w[2] = 0.467913934572691047
w[3] = 0.467913934572691047
w[4] = 0.36076157304813861
w[5] = 0.171324492379170345
elif p == 6:
x[0] = -0.94910791234275852
x[1] = -0.74153118559939444
x[2] = -0.405845151377397167
x[3] = 0.0
x[4] = 0.405845151377397167
x[5] = 0.74153118559939444
x[6] = 0.949107912342758525
w[0] = 0.129484966168869693
w[1] = 0.27970539148927667
w[2] = 0.381830050505118945
w[3] = 0.417959183673469388
w[4] = 0.38183005050511895
w[5] = 0.279705391489276667
w[6] = 0.129484966168869693
return x,w
m = 3
x,w = legendre(m)
print(x)
print(w) | en | 0.285001 | # coding: utf-8 #$ header legendre(int) | 2.823787 | 3 |
lupa-core/src/test/resources/org/jetbrains/research/lupa/kotlinAnalysis/util/python/jupyter/data/out_5.py | JetBrains-Research/Lupa | 16 | 6622541 | import numpy as np
import plt.pyplot as plt
x = np.linspace(-2, 2, 9).reshape((3,3))
print(x / 2)
print('hello')
def foo(x: int = 1):
assert(isinstance(x, np.ndarray))
return x * x / 2
print(foo(x))
class A:
def __init__(self):
self.string = "It's class A"
def create_a():
a = A()
return a
lots_of_A = [create_a().string for _ in range(100)]
print(lots_of_A[3:5]) | import numpy as np
import plt.pyplot as plt
x = np.linspace(-2, 2, 9).reshape((3,3))
print(x / 2)
print('hello')
def foo(x: int = 1):
assert(isinstance(x, np.ndarray))
return x * x / 2
print(foo(x))
class A:
def __init__(self):
self.string = "It's class A"
def create_a():
a = A()
return a
lots_of_A = [create_a().string for _ in range(100)]
print(lots_of_A[3:5]) | none | 1 | 3.645863 | 4 | |
aries_cloudcontroller/api/multitenancy.py | didx-xyz/aries-cloudcontroller-pyton | 5 | 6622542 | from uplink import (
Consumer,
Path,
Query,
Body,
Header,
get,
post,
patch,
put,
delete,
returns,
json,
)
from typing import Dict, List, Optional, Union # noqa: F401
from aries_cloudcontroller.uplink_util import bool_query
from aries_cloudcontroller.model.create_wallet_request import CreateWalletRequest
from aries_cloudcontroller.model.create_wallet_response import CreateWalletResponse
from aries_cloudcontroller.model.create_wallet_token_request import (
CreateWalletTokenRequest,
)
from aries_cloudcontroller.model.create_wallet_token_response import (
CreateWalletTokenResponse,
)
from aries_cloudcontroller.model.remove_wallet_request import RemoveWalletRequest
from aries_cloudcontroller.model.update_wallet_request import UpdateWalletRequest
from aries_cloudcontroller.model.wallet_list import WalletList
from aries_cloudcontroller.model.wallet_record import WalletRecord
class MultitenancyApi(Consumer):
async def create_wallet(
self, *, body: Optional[CreateWalletRequest] = None
) -> CreateWalletResponse:
"""Create a subwallet"""
return await self.__create_wallet(
body=body,
)
async def delete_wallet(
self, *, wallet_id: str, body: Optional[RemoveWalletRequest] = None
) -> Dict:
"""Remove a subwallet"""
return await self.__delete_wallet(
wallet_id=wallet_id,
body=body,
)
async def get_auth_token(
self, *, wallet_id: str, body: Optional[CreateWalletTokenRequest] = None
) -> CreateWalletTokenResponse:
"""Get auth token for a subwallet"""
return await self.__get_auth_token(
wallet_id=wallet_id,
body=body,
)
async def get_wallet(self, *, wallet_id: str) -> WalletRecord:
"""Get a single subwallet"""
return await self.__get_wallet(
wallet_id=wallet_id,
)
async def get_wallets(self, *, wallet_name: Optional[str] = None) -> WalletList:
"""Query subwallets"""
return await self.__get_wallets(
wallet_name=wallet_name,
)
async def update_wallet(
self, *, wallet_id: str, body: Optional[UpdateWalletRequest] = None
) -> WalletRecord:
"""Update a subwallet"""
return await self.__update_wallet(
wallet_id=wallet_id,
body=body,
)
@returns.json
@json
@post("/multitenancy/wallet")
def __create_wallet(
self, *, body: Body(type=CreateWalletRequest) = {}
) -> CreateWalletResponse:
"""Internal uplink method for create_wallet"""
@returns.json
@json
@post("/multitenancy/wallet/{wallet_id}/remove")
def __delete_wallet(
self, *, wallet_id: str, body: Body(type=RemoveWalletRequest) = {}
) -> Dict:
"""Internal uplink method for delete_wallet"""
@returns.json
@json
@post("/multitenancy/wallet/{wallet_id}/token")
def __get_auth_token(
self, *, wallet_id: str, body: Body(type=CreateWalletTokenRequest) = {}
) -> CreateWalletTokenResponse:
"""Internal uplink method for get_auth_token"""
@returns.json
@get("/multitenancy/wallet/{wallet_id}")
def __get_wallet(self, *, wallet_id: str) -> WalletRecord:
"""Internal uplink method for get_wallet"""
@returns.json
@get("/multitenancy/wallets")
def __get_wallets(self, *, wallet_name: Query = None) -> WalletList:
"""Internal uplink method for get_wallets"""
@returns.json
@json
@put("/multitenancy/wallet/{wallet_id}")
def __update_wallet(
self, *, wallet_id: str, body: Body(type=UpdateWalletRequest) = {}
) -> WalletRecord:
"""Internal uplink method for update_wallet"""
| from uplink import (
Consumer,
Path,
Query,
Body,
Header,
get,
post,
patch,
put,
delete,
returns,
json,
)
from typing import Dict, List, Optional, Union # noqa: F401
from aries_cloudcontroller.uplink_util import bool_query
from aries_cloudcontroller.model.create_wallet_request import CreateWalletRequest
from aries_cloudcontroller.model.create_wallet_response import CreateWalletResponse
from aries_cloudcontroller.model.create_wallet_token_request import (
CreateWalletTokenRequest,
)
from aries_cloudcontroller.model.create_wallet_token_response import (
CreateWalletTokenResponse,
)
from aries_cloudcontroller.model.remove_wallet_request import RemoveWalletRequest
from aries_cloudcontroller.model.update_wallet_request import UpdateWalletRequest
from aries_cloudcontroller.model.wallet_list import WalletList
from aries_cloudcontroller.model.wallet_record import WalletRecord
class MultitenancyApi(Consumer):
async def create_wallet(
self, *, body: Optional[CreateWalletRequest] = None
) -> CreateWalletResponse:
"""Create a subwallet"""
return await self.__create_wallet(
body=body,
)
async def delete_wallet(
self, *, wallet_id: str, body: Optional[RemoveWalletRequest] = None
) -> Dict:
"""Remove a subwallet"""
return await self.__delete_wallet(
wallet_id=wallet_id,
body=body,
)
async def get_auth_token(
self, *, wallet_id: str, body: Optional[CreateWalletTokenRequest] = None
) -> CreateWalletTokenResponse:
"""Get auth token for a subwallet"""
return await self.__get_auth_token(
wallet_id=wallet_id,
body=body,
)
async def get_wallet(self, *, wallet_id: str) -> WalletRecord:
"""Get a single subwallet"""
return await self.__get_wallet(
wallet_id=wallet_id,
)
async def get_wallets(self, *, wallet_name: Optional[str] = None) -> WalletList:
"""Query subwallets"""
return await self.__get_wallets(
wallet_name=wallet_name,
)
async def update_wallet(
self, *, wallet_id: str, body: Optional[UpdateWalletRequest] = None
) -> WalletRecord:
"""Update a subwallet"""
return await self.__update_wallet(
wallet_id=wallet_id,
body=body,
)
@returns.json
@json
@post("/multitenancy/wallet")
def __create_wallet(
self, *, body: Body(type=CreateWalletRequest) = {}
) -> CreateWalletResponse:
"""Internal uplink method for create_wallet"""
@returns.json
@json
@post("/multitenancy/wallet/{wallet_id}/remove")
def __delete_wallet(
self, *, wallet_id: str, body: Body(type=RemoveWalletRequest) = {}
) -> Dict:
"""Internal uplink method for delete_wallet"""
@returns.json
@json
@post("/multitenancy/wallet/{wallet_id}/token")
def __get_auth_token(
self, *, wallet_id: str, body: Body(type=CreateWalletTokenRequest) = {}
) -> CreateWalletTokenResponse:
"""Internal uplink method for get_auth_token"""
@returns.json
@get("/multitenancy/wallet/{wallet_id}")
def __get_wallet(self, *, wallet_id: str) -> WalletRecord:
"""Internal uplink method for get_wallet"""
@returns.json
@get("/multitenancy/wallets")
def __get_wallets(self, *, wallet_name: Query = None) -> WalletList:
"""Internal uplink method for get_wallets"""
@returns.json
@json
@put("/multitenancy/wallet/{wallet_id}")
def __update_wallet(
self, *, wallet_id: str, body: Body(type=UpdateWalletRequest) = {}
) -> WalletRecord:
"""Internal uplink method for update_wallet"""
| en | 0.643617 | # noqa: F401 Create a subwallet Remove a subwallet Get auth token for a subwallet Get a single subwallet Query subwallets Update a subwallet Internal uplink method for create_wallet Internal uplink method for delete_wallet Internal uplink method for get_auth_token Internal uplink method for get_wallet Internal uplink method for get_wallets Internal uplink method for update_wallet | 2.071766 | 2 |
python/testing/bboxtester_azure.py | pokotylo/ocrlayout | 4 | 6622543 | import io
import json
import logging
import os
import os.path
import sys
import types
import time
import requests
from enum import Enum
from pathlib import Path
# Azure CV Support
from azure.cognitiveservices.vision.computervision import ComputerVisionClient
from msrest.authentication import CognitiveServicesCredentials
from PIL import Image, ImageDraw, ImageFont
import cv2
import numpy as np
# OCRLAYOUT Import
try:
from ocrlayout.bboxhelper import BBOXOCRResponse,BBoxHelper,BBOXPoint
print("PyPI Package imported")
except ImportError:
print("Local Package imported")
from ocrlayout_pkg.ocrlayout.bboxhelper import BBOXOCRResponse,BBoxHelper,BBOXPoint
#
# Azure Specific
#
SUBSCRIPTION_KEY_ENV_NAME = os.environ.get("COMPUTERVISION_SUBSCRIPTION_KEY", None)
COMPUTERVISION_LOCATION = os.environ.get("COMPUTERVISION_LOCATION", "westeurope")
# Import
from .bboxtester_utils import OCREngine, OCRUtils
class AzureEngine(OCREngine):
def draw_boxes(self, image, polygon, color, padding=0):
"""Draw a border around the image using the hints in the vector list."""
draw = ImageDraw.Draw(image)
# Convert the given bounding box to BBOXPoint
if isinstance(polygon,str):
points = BBOXPoint.from_azure_ocr(polygon,1)
elif ( len(polygon) > 4 ):
points = BBOXPoint.from_azure_read_2(polygon,1)
else:
points = list(map(BBOXPoint.from_azure, [x for x in polygon]))
draw.polygon([
points[0].X+padding, points[0].Y+padding,
points[1].X+padding, points[1].Y+padding,
points[2].X+padding, points[2].Y+padding,
points[3].X+padding, points[3].Y+padding],
outline=color)
return image
class AzureOCREngine(AzureEngine):
def detect_text(self, filename=None,callOCR=True,verbose=False):
print("AZURE OCR Image Name {}".format(filename))
p = Path(filename)
(imgname,imgext) = os.path.splitext(p.name)
# Check if we have a cached ocr response already for this provider
invokeOCR=callOCR
if not callOCR:
if not os.path.exists(os.path.join(self.RESULTS_FOLDER, imgname+".azure.ocr.json")):
invokeOCR=True
ocrexception=False
if invokeOCR:
ocr_url="https://" + COMPUTERVISION_LOCATION + ".api.cognitive.microsoft.com/vision/v3.0/ocr"
# Set Content-Type to octet-stream
headers = {'Ocp-Apim-Subscription-Key': SUBSCRIPTION_KEY_ENV_NAME, 'Content-Type': 'application/octet-stream'}
params = {'language': 'unk', 'detectOrientation': 'true'}
# Azure Computer Vision OCR API Call
with open(os.path.join(self.IMAGES_FOLDER, filename), "rb") as image_stream:
response = requests.post(ocr_url, headers=headers, params=params, data = image_stream)
# response.raise_for_status()
image_analysis=response.json()
with open(os.path.join(self.RESULTS_FOLDER, imgname+".azure.ocr.json"), 'w') as outfile:
outfile.write(response.content.decode("utf-8"))
with open(os.path.join(self.RESULTS_FOLDER, imgname+".azure.ocr.txt"), 'w') as outfile:
if "regions" in image_analysis:
# Extract the word bounding boxes and text.
line_infos = [region["lines"] for region in image_analysis["regions"]]
for line in line_infos:
for word_metadata in line:
for word_info in word_metadata["words"]:
outfile.write(word_info['text'])
outfile.write('\n')
else:
ocrexception = True
ocrresponse=response.content.decode("utf-8")
else:
# Use local OCR cached response when available
with open(os.path.join(self.RESULTS_FOLDER, imgname+".azure.ocr.json"), 'r') as cachefile:
ocrresponse = cachefile.read().replace('\n', '')
if not ocrexception:
# Create BBOX OCR Response from Azure CV string response
bboxresponse=self.bboxhelper.processAzureOCRResponse(ocrresponse,verbose=verbose)
print("BBOX Helper Response {}".format(bboxresponse.__dict__))
# Write the improved ocr response
with open(os.path.join(self.RESULTS_FOLDER, imgname+".azure.ocr.bbox.json"), 'w') as outfile:
outfile.write(json.dumps(bboxresponse.__dict__, default = lambda o: o.__dict__, indent=4))
# Write the improved ocr text
with open(os.path.join(self.RESULTS_FOLDER, imgname+".azure.ocr.bbox.txt"), 'w') as outfile:
outfile.write(bboxresponse.text)
try:
if imgext not in '.pdf':
# Create the Before and After images
imagefn=os.path.join(self.IMAGES_FOLDER, filename)
image = Image.open(imagefn)
bboximg = image.copy()
# Write the Azure OCR resulted boxes image
jsonres = json.loads(ocrresponse)
if "recognitionResults" in jsonres:
blocks=jsonres["recognitionResults"]
elif "analyzeResult" in jsonres:
blocks=jsonres["analyzeResult"]["readResults"]
elif "regions" in jsonres:
blocks=jsonres["regions"]
else:
blocks={}
for block in blocks:
for line in block["lines"]:
for word in line["words"]:
image = self.draw_boxes(image,word["boundingBox"],'yellow')
image = self.draw_boxes(image,line["boundingBox"],'red',padding=1)
OCRUtils.save_boxed_image(image,os.path.join(self.RESULTS_FOLDER, imgname+".azure.ocr"+imgext))
# Write the BBOX resulted boxes image
OCRUtils.draw_bboxes(bboximg, bboxresponse, 'black',padding=1)
OCRUtils.save_boxed_image(bboximg,os.path.join(self.RESULTS_FOLDER, imgname+".azure.ocr.bbox"+imgext))
except Exception as ex:
print(ex)
pass
class AzureReadEngine(AzureEngine):
def detect_text(self, filename=None,callOCR=True,verbose=False):
"""RecognizeTextUsingBatchReadAPI.
This will recognize text of the given image using the Batch Read API.
"""
azure_client = ComputerVisionClient(
endpoint="https://" + COMPUTERVISION_LOCATION + ".api.cognitive.microsoft.com/",
credentials=CognitiveServicesCredentials(SUBSCRIPTION_KEY_ENV_NAME)
)
print("AZURE READ Image Name {}".format(filename))
p = Path(filename)
(imgname,imgext) = os.path.splitext(p.name)
# Check if we have a cached ocr response already for this provider
invokeOCR=callOCR
if not callOCR:
if not os.path.exists(os.path.join(self.RESULTS_FOLDER, imgname+".azure.read.json")):
invokeOCR=True
if invokeOCR:
# Azure Computer Vision Call
with open(os.path.join(self.IMAGES_FOLDER, filename), "rb") as image_stream:
job = azure_client.read_in_stream(
image=image_stream,
raw=True
)
operation_id = job.headers['Operation-Location'].split('/')[-1]
image_analysis = azure_client.get_read_result(operation_id,raw=True)
while str.lower(image_analysis.output.status) in ['notstarted', 'running']:
time.sleep(1)
image_analysis = azure_client.get_read_result(operation_id=operation_id,raw=True)
print("\tJob completion is: {}".format(image_analysis.output.status))
print("\tRecognized {} page(s)".format(len(image_analysis.output.analyze_result.read_results)))
with open(os.path.join(self.RESULTS_FOLDER, imgname+".azure.read.json"), 'w') as outfile:
outfile.write(image_analysis.response.content.decode("utf-8"))
with open(os.path.join(self.RESULTS_FOLDER, imgname+".azure.read.txt"), 'w') as outfile:
for rec in image_analysis.output.analyze_result.read_results:
for line in rec.lines:
outfile.write(line.text)
outfile.write('\n')
ocrresponse=image_analysis.response.content.decode("utf-8")
else:
# Use local OCR cached response when available
with open(os.path.join(self.RESULTS_FOLDER, imgname+".azure.read.json"), 'r') as cachefile:
ocrresponse = cachefile.read().replace('\n', '')
# Create BBOX OCR Response from Azure CV string response
bboxresponse=self.bboxhelper.processAzureOCRResponse(ocrresponse,verbose=verbose)
if bboxresponse:
print("BBOX Helper Response {}".format(bboxresponse.__dict__))
# Write the improved ocr response
with open(os.path.join(self.RESULTS_FOLDER, imgname+".azure.read.bbox.json"), 'w') as outfile:
outfile.write(json.dumps(bboxresponse.__dict__, default = lambda o: o.__dict__, indent=4))
# Write the improved ocr text
with open(os.path.join(self.RESULTS_FOLDER, imgname+".azure.read.bbox.txt"), 'w') as outfile:
outfile.write(bboxresponse.text)
try:
if imgext not in '.pdf':
# Create the Before and After images
imagefn=os.path.join(self.IMAGES_FOLDER, filename)
image = Image.open(imagefn)
bboximg = image.copy()
# Write the Azure OCR resulted boxes image
jsonres = json.loads(ocrresponse)
if "recognitionResults" in jsonres:
blocks=jsonres["recognitionResults"]
elif "analyzeResult" in jsonres:
blocks=jsonres["analyzeResult"]["readResults"]
elif "regions" in jsonres:
blocks=jsonres["regions"]
else:
blocks={}
for block in blocks:
for line in block["lines"]:
for word in line["words"]:
image = self.draw_boxes(image,word["boundingBox"],'yellow')
image = self.draw_boxes(image,line["boundingBox"],'red',padding=1)
OCRUtils.save_boxed_image(image,os.path.join(self.RESULTS_FOLDER, imgname+".azure.read"+imgext))
if bboxresponse:
# Write the BBOX resulted boxes image
OCRUtils.draw_bboxes(bboximg, bboxresponse, 'black',padding=1)
OCRUtils.save_boxed_image(bboximg,os.path.join(self.RESULTS_FOLDER, imgname+".azure.read.bbox"+imgext))
except Exception as ex:
print(ex)
pass
| import io
import json
import logging
import os
import os.path
import sys
import types
import time
import requests
from enum import Enum
from pathlib import Path
# Azure CV Support
from azure.cognitiveservices.vision.computervision import ComputerVisionClient
from msrest.authentication import CognitiveServicesCredentials
from PIL import Image, ImageDraw, ImageFont
import cv2
import numpy as np
# OCRLAYOUT Import
try:
from ocrlayout.bboxhelper import BBOXOCRResponse,BBoxHelper,BBOXPoint
print("PyPI Package imported")
except ImportError:
print("Local Package imported")
from ocrlayout_pkg.ocrlayout.bboxhelper import BBOXOCRResponse,BBoxHelper,BBOXPoint
#
# Azure Specific
#
SUBSCRIPTION_KEY_ENV_NAME = os.environ.get("COMPUTERVISION_SUBSCRIPTION_KEY", None)
COMPUTERVISION_LOCATION = os.environ.get("COMPUTERVISION_LOCATION", "westeurope")
# Import
from .bboxtester_utils import OCREngine, OCRUtils
class AzureEngine(OCREngine):
def draw_boxes(self, image, polygon, color, padding=0):
"""Draw a border around the image using the hints in the vector list."""
draw = ImageDraw.Draw(image)
# Convert the given bounding box to BBOXPoint
if isinstance(polygon,str):
points = BBOXPoint.from_azure_ocr(polygon,1)
elif ( len(polygon) > 4 ):
points = BBOXPoint.from_azure_read_2(polygon,1)
else:
points = list(map(BBOXPoint.from_azure, [x for x in polygon]))
draw.polygon([
points[0].X+padding, points[0].Y+padding,
points[1].X+padding, points[1].Y+padding,
points[2].X+padding, points[2].Y+padding,
points[3].X+padding, points[3].Y+padding],
outline=color)
return image
class AzureOCREngine(AzureEngine):
def detect_text(self, filename=None,callOCR=True,verbose=False):
print("AZURE OCR Image Name {}".format(filename))
p = Path(filename)
(imgname,imgext) = os.path.splitext(p.name)
# Check if we have a cached ocr response already for this provider
invokeOCR=callOCR
if not callOCR:
if not os.path.exists(os.path.join(self.RESULTS_FOLDER, imgname+".azure.ocr.json")):
invokeOCR=True
ocrexception=False
if invokeOCR:
ocr_url="https://" + COMPUTERVISION_LOCATION + ".api.cognitive.microsoft.com/vision/v3.0/ocr"
# Set Content-Type to octet-stream
headers = {'Ocp-Apim-Subscription-Key': SUBSCRIPTION_KEY_ENV_NAME, 'Content-Type': 'application/octet-stream'}
params = {'language': 'unk', 'detectOrientation': 'true'}
# Azure Computer Vision OCR API Call
with open(os.path.join(self.IMAGES_FOLDER, filename), "rb") as image_stream:
response = requests.post(ocr_url, headers=headers, params=params, data = image_stream)
# response.raise_for_status()
image_analysis=response.json()
with open(os.path.join(self.RESULTS_FOLDER, imgname+".azure.ocr.json"), 'w') as outfile:
outfile.write(response.content.decode("utf-8"))
with open(os.path.join(self.RESULTS_FOLDER, imgname+".azure.ocr.txt"), 'w') as outfile:
if "regions" in image_analysis:
# Extract the word bounding boxes and text.
line_infos = [region["lines"] for region in image_analysis["regions"]]
for line in line_infos:
for word_metadata in line:
for word_info in word_metadata["words"]:
outfile.write(word_info['text'])
outfile.write('\n')
else:
ocrexception = True
ocrresponse=response.content.decode("utf-8")
else:
# Use local OCR cached response when available
with open(os.path.join(self.RESULTS_FOLDER, imgname+".azure.ocr.json"), 'r') as cachefile:
ocrresponse = cachefile.read().replace('\n', '')
if not ocrexception:
# Create BBOX OCR Response from Azure CV string response
bboxresponse=self.bboxhelper.processAzureOCRResponse(ocrresponse,verbose=verbose)
print("BBOX Helper Response {}".format(bboxresponse.__dict__))
# Write the improved ocr response
with open(os.path.join(self.RESULTS_FOLDER, imgname+".azure.ocr.bbox.json"), 'w') as outfile:
outfile.write(json.dumps(bboxresponse.__dict__, default = lambda o: o.__dict__, indent=4))
# Write the improved ocr text
with open(os.path.join(self.RESULTS_FOLDER, imgname+".azure.ocr.bbox.txt"), 'w') as outfile:
outfile.write(bboxresponse.text)
try:
if imgext not in '.pdf':
# Create the Before and After images
imagefn=os.path.join(self.IMAGES_FOLDER, filename)
image = Image.open(imagefn)
bboximg = image.copy()
# Write the Azure OCR resulted boxes image
jsonres = json.loads(ocrresponse)
if "recognitionResults" in jsonres:
blocks=jsonres["recognitionResults"]
elif "analyzeResult" in jsonres:
blocks=jsonres["analyzeResult"]["readResults"]
elif "regions" in jsonres:
blocks=jsonres["regions"]
else:
blocks={}
for block in blocks:
for line in block["lines"]:
for word in line["words"]:
image = self.draw_boxes(image,word["boundingBox"],'yellow')
image = self.draw_boxes(image,line["boundingBox"],'red',padding=1)
OCRUtils.save_boxed_image(image,os.path.join(self.RESULTS_FOLDER, imgname+".azure.ocr"+imgext))
# Write the BBOX resulted boxes image
OCRUtils.draw_bboxes(bboximg, bboxresponse, 'black',padding=1)
OCRUtils.save_boxed_image(bboximg,os.path.join(self.RESULTS_FOLDER, imgname+".azure.ocr.bbox"+imgext))
except Exception as ex:
print(ex)
pass
class AzureReadEngine(AzureEngine):
def detect_text(self, filename=None,callOCR=True,verbose=False):
"""RecognizeTextUsingBatchReadAPI.
This will recognize text of the given image using the Batch Read API.
"""
azure_client = ComputerVisionClient(
endpoint="https://" + COMPUTERVISION_LOCATION + ".api.cognitive.microsoft.com/",
credentials=CognitiveServicesCredentials(SUBSCRIPTION_KEY_ENV_NAME)
)
print("AZURE READ Image Name {}".format(filename))
p = Path(filename)
(imgname,imgext) = os.path.splitext(p.name)
# Check if we have a cached ocr response already for this provider
invokeOCR=callOCR
if not callOCR:
if not os.path.exists(os.path.join(self.RESULTS_FOLDER, imgname+".azure.read.json")):
invokeOCR=True
if invokeOCR:
# Azure Computer Vision Call
with open(os.path.join(self.IMAGES_FOLDER, filename), "rb") as image_stream:
job = azure_client.read_in_stream(
image=image_stream,
raw=True
)
operation_id = job.headers['Operation-Location'].split('/')[-1]
image_analysis = azure_client.get_read_result(operation_id,raw=True)
while str.lower(image_analysis.output.status) in ['notstarted', 'running']:
time.sleep(1)
image_analysis = azure_client.get_read_result(operation_id=operation_id,raw=True)
print("\tJob completion is: {}".format(image_analysis.output.status))
print("\tRecognized {} page(s)".format(len(image_analysis.output.analyze_result.read_results)))
with open(os.path.join(self.RESULTS_FOLDER, imgname+".azure.read.json"), 'w') as outfile:
outfile.write(image_analysis.response.content.decode("utf-8"))
with open(os.path.join(self.RESULTS_FOLDER, imgname+".azure.read.txt"), 'w') as outfile:
for rec in image_analysis.output.analyze_result.read_results:
for line in rec.lines:
outfile.write(line.text)
outfile.write('\n')
ocrresponse=image_analysis.response.content.decode("utf-8")
else:
# Use local OCR cached response when available
with open(os.path.join(self.RESULTS_FOLDER, imgname+".azure.read.json"), 'r') as cachefile:
ocrresponse = cachefile.read().replace('\n', '')
# Create BBOX OCR Response from Azure CV string response
bboxresponse=self.bboxhelper.processAzureOCRResponse(ocrresponse,verbose=verbose)
if bboxresponse:
print("BBOX Helper Response {}".format(bboxresponse.__dict__))
# Write the improved ocr response
with open(os.path.join(self.RESULTS_FOLDER, imgname+".azure.read.bbox.json"), 'w') as outfile:
outfile.write(json.dumps(bboxresponse.__dict__, default = lambda o: o.__dict__, indent=4))
# Write the improved ocr text
with open(os.path.join(self.RESULTS_FOLDER, imgname+".azure.read.bbox.txt"), 'w') as outfile:
outfile.write(bboxresponse.text)
try:
if imgext not in '.pdf':
# Create the Before and After images
imagefn=os.path.join(self.IMAGES_FOLDER, filename)
image = Image.open(imagefn)
bboximg = image.copy()
# Write the Azure OCR resulted boxes image
jsonres = json.loads(ocrresponse)
if "recognitionResults" in jsonres:
blocks=jsonres["recognitionResults"]
elif "analyzeResult" in jsonres:
blocks=jsonres["analyzeResult"]["readResults"]
elif "regions" in jsonres:
blocks=jsonres["regions"]
else:
blocks={}
for block in blocks:
for line in block["lines"]:
for word in line["words"]:
image = self.draw_boxes(image,word["boundingBox"],'yellow')
image = self.draw_boxes(image,line["boundingBox"],'red',padding=1)
OCRUtils.save_boxed_image(image,os.path.join(self.RESULTS_FOLDER, imgname+".azure.read"+imgext))
if bboxresponse:
# Write the BBOX resulted boxes image
OCRUtils.draw_bboxes(bboximg, bboxresponse, 'black',padding=1)
OCRUtils.save_boxed_image(bboximg,os.path.join(self.RESULTS_FOLDER, imgname+".azure.read.bbox"+imgext))
except Exception as ex:
print(ex)
pass
| en | 0.725845 | # Azure CV Support # OCRLAYOUT Import # # Azure Specific # # Import Draw a border around the image using the hints in the vector list. # Convert the given bounding box to BBOXPoint # Check if we have a cached ocr response already for this provider # Set Content-Type to octet-stream # Azure Computer Vision OCR API Call # response.raise_for_status() # Extract the word bounding boxes and text. # Use local OCR cached response when available # Create BBOX OCR Response from Azure CV string response # Write the improved ocr response # Write the improved ocr text # Create the Before and After images # Write the Azure OCR resulted boxes image # Write the BBOX resulted boxes image RecognizeTextUsingBatchReadAPI.
This will recognize text of the given image using the Batch Read API. # Check if we have a cached ocr response already for this provider # Azure Computer Vision Call # Use local OCR cached response when available # Create BBOX OCR Response from Azure CV string response # Write the improved ocr response # Write the improved ocr text # Create the Before and After images # Write the Azure OCR resulted boxes image # Write the BBOX resulted boxes image | 2.372848 | 2 |
tests/test_cli.py | benranderson/uhb | 0 | 6622544 | <filename>tests/test_cli.py
"""Tests for cli module."""
import pytest
import json
import click
from click.testing import CliRunner
from uhb import cli
test_inputs = {"D_o": 0.1731, "f": 0.6}
# def test_command_line_interface():
# """Test the CLI."""
# runner = CliRunner()
# with runner.isolated_filesystem():
# with open("inputs.json", "w") as f:
# json.dump(test_inputs, f)
# result = runner.invoke(cli.main, ["inputs.json"])
# assert result.exit_code == 0
# assert "Calculating soil stiffnesses..." in result.output
# help_result = runner.invoke(cli.main, ["--help"])
# assert help_result.exit_code == 0
# assert "--help Show this message and exit." in help_result.output
| <filename>tests/test_cli.py
"""Tests for cli module."""
import pytest
import json
import click
from click.testing import CliRunner
from uhb import cli
test_inputs = {"D_o": 0.1731, "f": 0.6}
# def test_command_line_interface():
# """Test the CLI."""
# runner = CliRunner()
# with runner.isolated_filesystem():
# with open("inputs.json", "w") as f:
# json.dump(test_inputs, f)
# result = runner.invoke(cli.main, ["inputs.json"])
# assert result.exit_code == 0
# assert "Calculating soil stiffnesses..." in result.output
# help_result = runner.invoke(cli.main, ["--help"])
# assert help_result.exit_code == 0
# assert "--help Show this message and exit." in help_result.output
| en | 0.575805 | Tests for cli module. # def test_command_line_interface(): # """Test the CLI.""" # runner = CliRunner() # with runner.isolated_filesystem(): # with open("inputs.json", "w") as f: # json.dump(test_inputs, f) # result = runner.invoke(cli.main, ["inputs.json"]) # assert result.exit_code == 0 # assert "Calculating soil stiffnesses..." in result.output # help_result = runner.invoke(cli.main, ["--help"]) # assert help_result.exit_code == 0 # assert "--help Show this message and exit." in help_result.output | 2.433151 | 2 |
lc0219_contains_duplicate_ii.py | bowen0701/python-algorithms-data-structures | 8 | 6622545 | """Leetcode 219. Contains Duplicate II
Easy
URL: https://leetcode.com/problems/contains-duplicate-ii/
Given an array of integers and an integer k,
find out whether there are two distinct indices i and j in the array such that
nums[i] = nums[j] and the absolute difference between i and j is at most k.
Example 1:
Input: nums = [1,2,3,1], k = 3
Output: true
Example 2:
Input: nums = [1,0,1,1], k = 1
Output: true
Example 3:
Input: nums = [1,2,3,1,2,3], k = 2
Output: false
"""
from typing import List
class SolutionNumIdxDict(object):
def containsNearbyDuplicate(self, nums: List[int], k: int) -> bool:
"""
:type nums: List[int]
:type k: int
:rtype: bool
Time complexity: O(n).
Space complexity: O(n).
"""
# Create a dict to collect number and their indices.
num_idx_d = dict()
for i, n in enumerate(nums):
if n in num_idx_d and i - num_idx_d[n] <= k:
return True
else:
num_idx_d[n] = i
return False
def main():
# Output: True
nums = [1,2,3,1]
k = 3
print(SolutionNumIdxDict().containsNearbyDuplicate(nums, k))
# Output: True
nums = [1,0,1,1]
k = 1
print(SolutionNumIdxDict().containsNearbyDuplicate(nums, k))
# Output: False
nums = [1,2,3,1,2,3]
k = 2
print(SolutionNumIdxDict().containsNearbyDuplicate(nums, k))
if __name__ == '__main__':
main()
| """Leetcode 219. Contains Duplicate II
Easy
URL: https://leetcode.com/problems/contains-duplicate-ii/
Given an array of integers and an integer k,
find out whether there are two distinct indices i and j in the array such that
nums[i] = nums[j] and the absolute difference between i and j is at most k.
Example 1:
Input: nums = [1,2,3,1], k = 3
Output: true
Example 2:
Input: nums = [1,0,1,1], k = 1
Output: true
Example 3:
Input: nums = [1,2,3,1,2,3], k = 2
Output: false
"""
from typing import List
class SolutionNumIdxDict(object):
def containsNearbyDuplicate(self, nums: List[int], k: int) -> bool:
"""
:type nums: List[int]
:type k: int
:rtype: bool
Time complexity: O(n).
Space complexity: O(n).
"""
# Create a dict to collect number and their indices.
num_idx_d = dict()
for i, n in enumerate(nums):
if n in num_idx_d and i - num_idx_d[n] <= k:
return True
else:
num_idx_d[n] = i
return False
def main():
# Output: True
nums = [1,2,3,1]
k = 3
print(SolutionNumIdxDict().containsNearbyDuplicate(nums, k))
# Output: True
nums = [1,0,1,1]
k = 1
print(SolutionNumIdxDict().containsNearbyDuplicate(nums, k))
# Output: False
nums = [1,2,3,1,2,3]
k = 2
print(SolutionNumIdxDict().containsNearbyDuplicate(nums, k))
if __name__ == '__main__':
main()
| en | 0.712616 | Leetcode 219. Contains Duplicate II Easy URL: https://leetcode.com/problems/contains-duplicate-ii/ Given an array of integers and an integer k, find out whether there are two distinct indices i and j in the array such that nums[i] = nums[j] and the absolute difference between i and j is at most k. Example 1: Input: nums = [1,2,3,1], k = 3 Output: true Example 2: Input: nums = [1,0,1,1], k = 1 Output: true Example 3: Input: nums = [1,2,3,1,2,3], k = 2 Output: false :type nums: List[int] :type k: int :rtype: bool Time complexity: O(n). Space complexity: O(n). # Create a dict to collect number and their indices. # Output: True # Output: True # Output: False | 3.542938 | 4 |
test/test_strict_lax.py | kurtsansom/netcdf-flattener | 0 | 6622546 | """Project: NetCDF Flattener
Copyright (c) 2020 EUMETSAT
License: Apache License 2.0
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
from base_test import BaseTest
class Test(BaseTest):
def test_something(self):
"""Global test of most functionalities.
Flatten input file 'input1.cdl' and compare to reference 'reference1.cdl'.
"""
# Inputs
input_name = "input3.cdl"
reference_name = "reference3.cdl"
output_name = "output3.nc"
# Use strict mode, expect exception
self.flatten_and_compare(input_name, output_name, reference_name, lax_mode=False, expect_exception=True)
# User lax mode, expect success
self.flatten_and_compare(input_name, output_name, reference_name, lax_mode=True, expect_exception=False)
| """Project: NetCDF Flattener
Copyright (c) 2020 EUMETSAT
License: Apache License 2.0
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
from base_test import BaseTest
class Test(BaseTest):
def test_something(self):
"""Global test of most functionalities.
Flatten input file 'input1.cdl' and compare to reference 'reference1.cdl'.
"""
# Inputs
input_name = "input3.cdl"
reference_name = "reference3.cdl"
output_name = "output3.nc"
# Use strict mode, expect exception
self.flatten_and_compare(input_name, output_name, reference_name, lax_mode=False, expect_exception=True)
# User lax mode, expect success
self.flatten_and_compare(input_name, output_name, reference_name, lax_mode=True, expect_exception=False)
| en | 0.827962 | Project: NetCDF Flattener Copyright (c) 2020 EUMETSAT License: Apache License 2.0 Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Global test of most functionalities. Flatten input file 'input1.cdl' and compare to reference 'reference1.cdl'. # Inputs # Use strict mode, expect exception # User lax mode, expect success | 1.876339 | 2 |
uiUtilities/makeScheduleDFdisplayable.py | jaime-varela/boaAnalysisTool | 2 | 6622547 | <filename>uiUtilities/makeScheduleDFdisplayable.py
from .dropdownData import dayOfWeekToLetter, stringToEnumGrouping
from .dropdownData import stringToEnumTextProcess, dayOfWeekToName
from .dropdownData import dayOfWeekToLetter, EnumScheduleToString
import numpy as np
#FIXME: get rid of this copied var (this is a problem!)
SCHEDULE_COLUMNS = ['Description','Frequency','Avg. Cost','Schedule']
def scheduleToString(x):
if not isinstance(x,np.ndarray):
if x < 0:
v = -1 * x
return str(v)
else:
return dayOfWeekToName[x]
# daily handler
x.sort()
retStr = ""
for index in range(len(x)):
retStr += dayOfWeekToLetter[x[index]]
if index != len(x) -1:
retStr += ","
return retStr
def makeScheduleDFdisplayable(scheduledDF):
description = SCHEDULE_COLUMNS[0]
frequency = SCHEDULE_COLUMNS[1]
avgCost = SCHEDULE_COLUMNS[2]
sched = SCHEDULE_COLUMNS[3]
newDF = scheduledDF
newDF[frequency] = newDF[frequency].apply(lambda x: EnumScheduleToString[x])
newDF[avgCost] = newDF[avgCost].apply(lambda x: str( np.round(x,decimals = 2) ) )
newDF[sched] = newDF[sched].apply(lambda x: scheduleToString(x))
return newDF
| <filename>uiUtilities/makeScheduleDFdisplayable.py
from .dropdownData import dayOfWeekToLetter, stringToEnumGrouping
from .dropdownData import stringToEnumTextProcess, dayOfWeekToName
from .dropdownData import dayOfWeekToLetter, EnumScheduleToString
import numpy as np
#FIXME: get rid of this copied var (this is a problem!)
SCHEDULE_COLUMNS = ['Description','Frequency','Avg. Cost','Schedule']
def scheduleToString(x):
if not isinstance(x,np.ndarray):
if x < 0:
v = -1 * x
return str(v)
else:
return dayOfWeekToName[x]
# daily handler
x.sort()
retStr = ""
for index in range(len(x)):
retStr += dayOfWeekToLetter[x[index]]
if index != len(x) -1:
retStr += ","
return retStr
def makeScheduleDFdisplayable(scheduledDF):
description = SCHEDULE_COLUMNS[0]
frequency = SCHEDULE_COLUMNS[1]
avgCost = SCHEDULE_COLUMNS[2]
sched = SCHEDULE_COLUMNS[3]
newDF = scheduledDF
newDF[frequency] = newDF[frequency].apply(lambda x: EnumScheduleToString[x])
newDF[avgCost] = newDF[avgCost].apply(lambda x: str( np.round(x,decimals = 2) ) )
newDF[sched] = newDF[sched].apply(lambda x: scheduleToString(x))
return newDF
| en | 0.729998 | #FIXME: get rid of this copied var (this is a problem!) # daily handler | 2.598998 | 3 |
2-28-22.py | H-isaac23/LeetCode-Challenges | 0 | 6622548 | <filename>2-28-22.py
# You are given a sorted unique integer array nums.
#
# Return the smallest sorted list of ranges that cover all the numbers in the array exactly. That is, each element of nums
# is covered by exactly one of the ranges, and there is no integer x such that x is in one of the ranges but not in nums.
#
# Each range [a,b] in the list should be output as:
#
# "a->b" if a != b
# "a" if a == b
#
# Example 1:
# Input: nums = [0,1,2,4,5,7]
# Output: ["0->2","4->5","7"]
# Explanation: The ranges are:
# [0,2] --> "0->2"
# [4,5] --> "4->5"
# [7,7] --> "7"
#
# Example 2:
# Input: nums = [0,2,3,4,6,8,9]
# Output: ["0","2->4","6","8->9"]
# Explanation: The ranges are:
# [0,0] --> "0"
# [2,4] --> "2->4"
# [6,6] --> "6"
# [8,9] --> "8->9"
#
# Constraints:
#
# 0 <= nums.length <= 20
# -231 <= nums[i] <= 231 - 1
# All the values of nums are unique.
# nums is sorted in ascending order.
class Solution:
def summaryRanges(self, nums: List[int]) -> List[str]:
ranges = []
current_range = ""
for i in range(len(nums)):
if i == len(nums) - 1:
if len(current_range) == 0:
current_range += str(nums[i])
else:
current_range += f"->{nums[i]}"
ranges.append(current_range)
elif nums[i] + 1 == nums[i + 1]:
if len(current_range) == 0:
current_range += str(nums[i])
else:
if len(current_range) > 0 and nums[i] != int(current_range):
current_range += f"->{nums[i]}"
else:
current_range += str(nums[i])
ranges.append(current_range)
current_range = ""
return ranges
| <filename>2-28-22.py
# You are given a sorted unique integer array nums.
#
# Return the smallest sorted list of ranges that cover all the numbers in the array exactly. That is, each element of nums
# is covered by exactly one of the ranges, and there is no integer x such that x is in one of the ranges but not in nums.
#
# Each range [a,b] in the list should be output as:
#
# "a->b" if a != b
# "a" if a == b
#
# Example 1:
# Input: nums = [0,1,2,4,5,7]
# Output: ["0->2","4->5","7"]
# Explanation: The ranges are:
# [0,2] --> "0->2"
# [4,5] --> "4->5"
# [7,7] --> "7"
#
# Example 2:
# Input: nums = [0,2,3,4,6,8,9]
# Output: ["0","2->4","6","8->9"]
# Explanation: The ranges are:
# [0,0] --> "0"
# [2,4] --> "2->4"
# [6,6] --> "6"
# [8,9] --> "8->9"
#
# Constraints:
#
# 0 <= nums.length <= 20
# -231 <= nums[i] <= 231 - 1
# All the values of nums are unique.
# nums is sorted in ascending order.
class Solution:
def summaryRanges(self, nums: List[int]) -> List[str]:
ranges = []
current_range = ""
for i in range(len(nums)):
if i == len(nums) - 1:
if len(current_range) == 0:
current_range += str(nums[i])
else:
current_range += f"->{nums[i]}"
ranges.append(current_range)
elif nums[i] + 1 == nums[i + 1]:
if len(current_range) == 0:
current_range += str(nums[i])
else:
if len(current_range) > 0 and nums[i] != int(current_range):
current_range += f"->{nums[i]}"
else:
current_range += str(nums[i])
ranges.append(current_range)
current_range = ""
return ranges
| en | 0.768752 | # You are given a sorted unique integer array nums. # # Return the smallest sorted list of ranges that cover all the numbers in the array exactly. That is, each element of nums # is covered by exactly one of the ranges, and there is no integer x such that x is in one of the ranges but not in nums. # # Each range [a,b] in the list should be output as: # # "a->b" if a != b # "a" if a == b # # Example 1: # Input: nums = [0,1,2,4,5,7] # Output: ["0->2","4->5","7"] # Explanation: The ranges are: # [0,2] --> "0->2" # [4,5] --> "4->5" # [7,7] --> "7" # # Example 2: # Input: nums = [0,2,3,4,6,8,9] # Output: ["0","2->4","6","8->9"] # Explanation: The ranges are: # [0,0] --> "0" # [2,4] --> "2->4" # [6,6] --> "6" # [8,9] --> "8->9" # # Constraints: # # 0 <= nums.length <= 20 # -231 <= nums[i] <= 231 - 1 # All the values of nums are unique. # nums is sorted in ascending order. | 3.780117 | 4 |
src/armodel/parser/arxml_parser.py | melodypapa/py-armodel | 2 | 6622549 | <reponame>melodypapa/py-armodel
from ..models import AUTOSAR, ARPackage, ARObject, EcuAbstractionSwComponentType, AtomicSwComponentType, SwComponentType, CompositionSwComponentType
from ..models import SwcInternalBehavior, RunnableEntity, RTEEvent, VariableAccess, ServerCallPoint, OperationInvokedEvent
from ..models import RefType, AutosarVariableRef, ArVariableInImplementationDataInstanceRef, POperationInAtomicSwcInstanceRef, ROperationInAtomicSwcInstanceRef
from ..models import ImplementationDataType, SwDataDefProps, SwPointerTargetProps, DataTypeMappingSet, DataTypeMap, ImplementationDataTypeElement
from ..models import DataPrototype, RPortPrototype, PPortPrototype
from ..models import ReceiverComSpec, ClientComSpec, NonqueuedReceiverComSpec, QueuedReceiverComSpec
from ..models import SenderComSpec, NonqueuedSenderComSpec
from ..models import SenderReceiverInterface, ClientServerInterface, ClientServerOperation, ArgumentDataPrototype
from ..models import AutosarDataType, ARElement
from ..models import AssemblySwConnector, ProvidedPortPrototypeInstanceRef, RequiredPortPrototypeInstanceRef
from ..models import CompuMethod, CompuScale, Limit, CompuScales, Compu, CompuConst, CompuConstTextContent
from typing import List
import xml.etree.ElementTree as ET
import re
class ARXMLParser:
def __init__(self):
self.nsmap = {"xmlns": "http://autosar.org/schema/r4.0"}
def getPureTagName(self, tag):
return re.sub(r'\{[\w:\/.]+\}(\w+)', r'\1', tag)
def readChildElement(self, short_name: str, element, key: str) -> str:
child_element = element.find("./xmlns:%s" % key, self.nsmap)
if (child_element != None):
return child_element.text
raise ValueError("The attribute %s of <%s> has not been defined" % (key, short_name))
def readChildElementBooleanValue(self, short_name, element, key) -> bool:
value = self.readChildElement(short_name, element, key)
if (value == "true"):
return True
return False
def readChildOptionalElement(self, element, key) -> str:
child_element = element.find("./xmlns:%s" % key, self.nsmap)
if (child_element != None):
return child_element.text
return None
def readChildOptionElementBooleanValue(self, element, key) -> bool:
value = self.readChildElement("", element, key)
if (value == None):
return None
elif (value == "True"):
return True
else:
return False
def readChildLimitElement(self, element, key) -> Limit:
child_element = element.find("./xmlns:%s" % key, self.nsmap)
if (child_element != None):
limit = Limit()
limit.interval_type = child_element.attrib['INTERVAL-TYPE']
limit.value = child_element.text
return limit
return None
def readShortName(self, element) -> str:
return self.readChildElement("", element, "SHORT-NAME")
def readChildRefElement(self, element, key) -> RefType:
child_element = element.find("./xmlns:%s" % key, self.nsmap)
if (child_element != None):
ref = RefType()
ref.dest = child_element.attrib['DEST']
ref.value = child_element.text
return ref
return None
def readChildRefElementList(self, element, key) -> List[RefType]:
child_elements = element.findall("./xmlns:%s" % key, self.nsmap)
results = []
for child_element in child_elements:
ref = RefType()
ref.dest = child_element.attrib['DEST']
ref.value = child_element.text
results.append(ref)
return results
def readSwInternalBehavior(self, element, parent: AtomicSwComponentType):
for child_element in element.findall("./xmlns:INTERNAL-BEHAVIORS/xmlns:SWC-INTERNAL-BEHAVIOR", self.nsmap):
short_name = self.readShortName(child_element)
behavior = parent.createSwcInternalBehavior(short_name)
self.readRunnableEntities(child_element, behavior)
self.readOperationInvokedEvents(child_element, behavior)
self.readTimingEvents(child_element, behavior)
self.readInternalTriggerOccurredEvent(child_element, behavior)
self.readExplicitInterRunnableVariables(child_element, behavior)
def readAutosarVariableInImplDatatype(self, element, accessed_variable_ref: AutosarVariableRef):
child_element = element.find("./xmlns:ACCESSED-VARIABLE/xmlns:AUTOSAR-VARIABLE-IREF", self.nsmap)
if (child_element != None):
autosar_variable_in_impl_datatype = ArVariableInImplementationDataInstanceRef()
autosar_variable_in_impl_datatype.port_prototype_ref = self.readChildRefElement(child_element, "PORT-PROTOTYPE-REF")
autosar_variable_in_impl_datatype.target_data_prototype_ref = self.readChildRefElement(child_element, "TARGET-DATA-PROTOTYPE-REF")
accessed_variable_ref.autosar_variable_in_impl_datatype = autosar_variable_in_impl_datatype
def readLocalVariableRef(self, element, accessed_variable_ref: AutosarVariableRef):
child_element = element.find("./xmlns:ACCESSED-VARIABLE", self.nsmap)
if (child_element != None):
accessed_variable_ref.local_variable_ref = self.readChildRefElement(child_element, "LOCAL-VARIABLE-REF")
def _readVariableAccesses(self, element, parent: RunnableEntity, key: str):
for child_element in element.findall("./xmlns:%s/xmlns:VARIABLE-ACCESS" % key, self.nsmap):
short_name = self.readShortName(child_element)
if (key == "DATA-RECEIVE-POINT-BY-ARGUMENTS"):
variable_access = parent.createDataReceivePointByArgument(short_name)
self.readAutosarVariableInImplDatatype(child_element, variable_access.accessed_variable_ref)
elif (key == "DATA-RECEIVE-POINT-BY-VALUES"):
variable_access = parent.createDataReceivePointByValue(short_name)
self.readAutosarVariableInImplDatatype(child_element, variable_access.accessed_variable_ref)
elif (key == "DATA-READ-ACCESSS"):
variable_access = parent.createDataReadAccess(short_name)
self.readAutosarVariableInImplDatatype(child_element, variable_access.accessed_variable_ref)
elif (key == "DATA-SEND-POINTS"):
variable_access = parent.createDataSendPoint(short_name)
self.readAutosarVariableInImplDatatype(child_element, variable_access.accessed_variable_ref)
elif (key == "WRITTEN-LOCAL-VARIABLES"):
variable_access = parent.createWrittenLocalVariable(short_name)
self.readLocalVariableRef(child_element, variable_access.accessed_variable_ref)
elif (key == "READ-LOCAL-VARIABLES"):
variable_access = parent.createReadLocalVariable(short_name)
self.readLocalVariableRef(child_element, variable_access.accessed_variable_ref)
else:
raise ValueError("Invalid key type <%s>" % key)
def readDataReceivePointByArguments(self, element, parent: RunnableEntity):
self._readVariableAccesses(element, parent, "DATA-RECEIVE-POINT-BY-ARGUMENTS")
def readDataReceivePointByValues(self, element, parent: RunnableEntity):
self._readVariableAccesses(element, parent, "DATA-RECEIVE-POINT-BY-VALUES")
def readDataReadAccesses(self, element, parent: RunnableEntity):
self._readVariableAccesses(element, parent, "DATA-READ-ACCESSS")
def readDataSendPoints(self, element, parent: RunnableEntity):
self._readVariableAccesses(element, parent, "DATA-SEND-POINTS")
def readWrittenLocalVariables(self, element, parent: RunnableEntity):
self._readVariableAccesses(element, parent, "WRITTEN-LOCAL-VARIABLES")
def readReadLocalVariables(self, element, parent: RunnableEntity):
self._readVariableAccesses(element, parent, "READ-LOCAL-VARIABLES")
def readROperationIRef(self, element, serverCallPoint: ServerCallPoint):
child_element = element.find("./xmlns:OPERATION-IREF", self.nsmap)
if (child_element != None):
operation_iref = ROperationInAtomicSwcInstanceRef()
operation_iref.context_r_port_ref = self.readChildRefElement(child_element, "CONTEXT-R-PORT-REF")
operation_iref.target_required_operation_ref = self.readChildRefElement(child_element, "TARGET-REQUIRED-OPERATION-REF")
serverCallPoint.operation_iref = operation_iref
def readSynchronousServerCallPoint(self, element, parent: RunnableEntity):
for child_element in element.findall("./xmlns:SERVER-CALL-POINTS/xmlns:SYNCHRONOUS-SERVER-CALL-POINT", self.nsmap):
short_name = self.readShortName(child_element)
serverCallPoint = parent.createSynchronousServerCallPoint(short_name)
serverCallPoint.timeout = self.readChildElement(short_name, child_element, "TIMEOUT")
self.readROperationIRef(child_element, serverCallPoint)
def readAsynchronousServerCallPoint(self, element, parent: RunnableEntity):
for child_element in element.findall("./xmlns:SERVER-CALL-POINTS/xmlns:ASYNCHRONOUS-SERVER-CALL-POINT", self.nsmap):
short_name = self.readShortName(child_element)
serverCallPoint = parent.createAsynchronousServerCallPoint(short_name)
serverCallPoint.timeout = self.readChildElement(short_name, child_element, "TIMEOUT")
self.readROperationIRef(child_element, serverCallPoint)
def readInternalTriggeringPoint(self, element, parent: RunnableEntity):
for child_element in element.findall("./xmlns:INTERNAL-TRIGGERING-POINTS/xmlns:INTERNAL-TRIGGERING-POINT", self.nsmap):
short_name = self.readShortName(child_element)
point = parent.createInternalTriggeringPoint(short_name)
point.sw_impl_policy = self.readChildOptionalElement(child_element, "SW-IMPL-POLICY")
def readRunnableEntities(self, element, parent: SwcInternalBehavior):
for child_element in element.findall("./xmlns:RUNNABLES/xmlns:RUNNABLE-ENTITY", self.nsmap):
short_name = self.readShortName(child_element)
runnable = parent.createRunnableEntity(short_name)
runnable.can_be_invoked_concurrently = self.readChildElement(short_name, child_element, "CAN-BE-INVOKED-CONCURRENTLY")
runnable.symbol = self.readChildElement(short_name, child_element, "SYMBOL")
self.readDataReceivePointByArguments(child_element, runnable)
self.readDataReceivePointByValues(child_element, runnable)
self.readDataReadAccesses(child_element, runnable)
self.readDataSendPoints(child_element, runnable)
self.readWrittenLocalVariables(child_element, runnable)
self.readReadLocalVariables(child_element, runnable)
self.readSynchronousServerCallPoint(child_element, runnable)
self.readAsynchronousServerCallPoint(child_element, runnable)
self.readInternalTriggeringPoint(child_element, runnable)
def readRTEEvent(self, element, event: RTEEvent):
event.start_on_event_ref = self.readChildRefElement(element, "START-ON-EVENT-REF")
def readOperationIRef(self, element, parent: OperationInvokedEvent):
child_element = element.find("./xmlns:OPERATION-IREF", self.nsmap)
if (child_element != None):
parent.operation_iref = POperationInAtomicSwcInstanceRef()
parent.operation_iref.context_p_port_ref = self.readChildRefElement(child_element, "CONTEXT-P-PORT-REF")
parent.operation_iref.target_provided_operation_ref = self.readChildRefElement(child_element, "TARGET-PROVIDED-OPERATION-REF")
def readOperationInvokedEvents(self, element, parent: SwcInternalBehavior):
for child_element in element.findall("./xmlns:EVENTS/xmlns:OPERATION-INVOKED-EVENT", self.nsmap):
short_name = self.readShortName(child_element)
event = parent.createOperationInvokedEvent(short_name)
self.readOperationIRef(child_element, event)
self.readRTEEvent(child_element, event)
def readExplicitInterRunnableVariables(self, element, parent: SwcInternalBehavior):
for child_element in element.findall("./xmlns:EXPLICIT-INTER-RUNNABLE-VARIABLES/xmlns:VARIABLE-DATA-PROTOTYPE", self.nsmap):
short_name = self.readShortName(child_element)
prototype = parent.createExplicitInterRunnableVariable(short_name)
self.readSwDataDefProps(child_element, prototype)
prototype.type_tref = self.readChildRefElement(child_element, "TYPE-TREF")
def readTimingEvents(self, element, parent: SwcInternalBehavior):
for child_element in element.findall("./xmlns:EVENTS/xmlns:TIMING-EVENT", self.nsmap):
short_name = self.readShortName(child_element)
event = parent.createTimingEvent(short_name)
self.readRTEEvent(child_element, event)
offset = self.readChildOptionalElement(child_element, "OFFSET")
if (offset != None):
event.offset = (float)(offset)
event.period = (float)(self.readChildElement(short_name, child_element, "PERIOD"))
def readInternalTriggerOccurredEvent(self, element, parent: SwcInternalBehavior):
for child_element in element.findall("./xmlns:EVENTS/xmlns:INTERNAL-TRIGGER-OCCURRED-EVENT", self.nsmap):
short_name = self.readShortName(child_element)
event = parent.createInternalTriggerOccurredEvent(short_name)
self.readRTEEvent(child_element, event)
event.event_source_ref = self.readChildRefElement(child_element, "EVENT-SOURCE-REF")
def readSwPointerTargetProps(self, element, parent: ARElement):
child_element = element.find(
"./xmlns:SW-POINTER-TARGET-PROPS", self.nsmap)
if (child_element != None):
sw_pointer_target_props = SwPointerTargetProps()
sw_pointer_target_props.target_category = self.readChildElement("", child_element, "TARGET-CATEGORY")
self.readSwDataDefProps(child_element, sw_pointer_target_props)
parent.sw_pointer_target_props = sw_pointer_target_props
def readSwDataDefProps(self, element, parent: ARElement):
child_element = element.find(
"./xmlns:SW-DATA-DEF-PROPS/xmlns:SW-DATA-DEF-PROPS-VARIANTS/xmlns:SW-DATA-DEF-PROPS-CONDITIONAL", self.nsmap)
if (child_element != None):
sw_data_def_props = SwDataDefProps()
sw_data_def_props.base_type_ref = self.readChildRefElement(child_element, "BASE-TYPE-REF")
sw_data_def_props.data_constr_ref = self.readChildRefElement(child_element, "DATA-CONSTR-REF")
sw_data_def_props.compu_method_ref = self.readChildRefElement(child_element, "COMPU-METHOD-REF")
sw_data_def_props.implementation_data_type_ref = self.readChildRefElement(child_element, "IMPLEMENTATION-DATA-TYPE-REF")
sw_data_def_props.sw_calibration_access = self.readChildOptionalElement(child_element, "SW-CALIBRATION-ACCESS")
self.readSwPointerTargetProps(child_element, sw_data_def_props)
parent.sw_data_def_props = sw_data_def_props
def readApplicationPrimitiveDataTypes(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:APPLICATION-PRIMITIVE-DATA-TYPE", self.nsmap):
short_name = self.readShortName(child_element)
data_type = parent.createApplicationPrimitiveDataType(short_name)
data_type.category = self.readChildElement(short_name, child_element, "CATEGORY")
self.readSwDataDefProps(child_element, data_type)
def readApplicationRecordDataTypes(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:APPLICATION-RECORD-DATA-TYPE", self.nsmap):
short_name = self.readShortName(child_element)
data_type = parent.createApplicationRecordDataType(short_name)
data_type.category = self.readChildElement(short_name, child_element, "CATEGORY")
self.readSwDataDefProps(child_element, data_type)
# TODO: add read APPLICATION-RECORD-ELEMENT
def readImplementationDataTypeElements(self, element, parent: ARElement):
for child_element in element.findall("./xmlns:SUB-ELEMENTS/xmlns:IMPLEMENTATION-DATA-TYPE-ELEMENT", self.nsmap):
short_name = self.readShortName(child_element)
type_element = parent.createImplementationDataTypeElement(short_name) # type: ImplementationDataTypeElement
type_element.category = self.readChildElement(short_name, child_element, "CATEGORY")
type_element.array_size = self.readChildOptionalElement(child_element, "ARRAY-SIZE")
type_element.array_size_semantics = self.readChildOptionalElement(child_element, "ARRAY-SIZE-SEMANTICS")
self.readImplementationDataTypeElements(child_element, type_element)
self.readSwDataDefProps(child_element, type_element)
def readImplementationDataTypes(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:IMPLEMENTATION-DATA-TYPE", self.nsmap):
short_name = self.readShortName(child_element)
data_type = parent.createImplementationDataType(short_name)
data_type.category = self.readChildElement(short_name, child_element, "CATEGORY")
self.readImplementationDataTypeElements(child_element, data_type)
self.readSwDataDefProps(child_element, data_type)
if (data_type.category == ImplementationDataType.CATEGORY_ARRAY):
if (len(data_type.getImplementationDataTypeElements()) < 1):
raise ValueError("Array Sub-Element of <%s> do not defined." % data_type.short_name)
array_sub_element = data_type.getImplementationDataTypeElements()[0]
if (array_sub_element.category == ImplementationDataType.CATEGORY_TYPE_REFERENCE):
data_type.setArrayElementType(array_sub_element.sw_data_def_props.implementation_data_type_ref.value)
else:
raise ValueError("The catetory <%s> of array sub-element does not support." % array_sub_element.category)
def readSwDataTypes(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:SW-BASE-TYPE", self.nsmap):
short_name = self.readShortName(child_element)
data_type = parent.createSwBaseType(short_name)
def readClientComSpec(self, element, parent: RPortPrototype):
for child_element in element.findall("./xmlns:REQUIRED-COM-SPECS/xmlns:CLIENT-COM-SPEC", self.nsmap):
try:
com_spec = ClientComSpec()
com_spec.operation_ref = self.readChildRefElement(child_element, "OPERATION-REF")
parent.addRequiredComSpec(com_spec)
except ValueError as err:
print(parent.short_name + ": " + str(err))
def readReceiverComSpec(self, element, com_spec: ReceiverComSpec):
#FIXME: readchildElement
com_spec.data_element_ref = self.readChildRefElement(element, "DATA-ELEMENT-REF")
com_spec.handle_out_of_range = self.readChildElement("", element, "HANDLE-OUT-OF-RANGE")
com_spec.uses_end_to_end_protection = self.readChildOptionElementBooleanValue(element, "USES-END-TO-END-PROTECTION")
def readNonqueuedReceiverComSpec(self, element, parent: RPortPrototype):
for child_element in element.findall("./xmlns:REQUIRED-COM-SPECS/xmlns:NONQUEUED-RECEIVER-COM-SPEC", self.nsmap):
com_spec = NonqueuedReceiverComSpec()
self.readReceiverComSpec(child_element, com_spec)
try:
# FIXME:
com_spec.alive_timeout = float(self.readChildElement("", child_element, "ALIVE-TIMEOUT"))
com_spec.enable_updated = self.readChildElementBooleanValue("", child_element, "ENABLE-UPDATE")
com_spec.handle_never_received = self.readChildElementBooleanValue("", child_element, "HANDLE-NEVER-RECEIVED")
com_spec.handel_timeout_type = self.readChildElement("", child_element, "HANDLE-TIMEOUT-TYPE")
except ValueError as err:
print(parent.short_name + ": " + str(err))
parent.addRequiredComSpec(com_spec)
def readRPortPrototype(self, element, parent: AtomicSwComponentType):
for child_element in element.findall("./xmlns:PORTS/xmlns:R-PORT-PROTOTYPE", self.nsmap):
short_name = self.readShortName(child_element)
prototype = parent.createRPortPrototype(short_name)
prototype.required_interface_tref = self.readChildRefElement(
child_element, "REQUIRED-INTERFACE-TREF")
self.readClientComSpec(child_element, prototype)
self.readNonqueuedReceiverComSpec(child_element, prototype)
def readSenderComSpec(self, element, com_spec: SenderComSpec):
# FIXME:
com_spec.data_element_ref = self.readChildRefElement(element, "DATA-ELEMENT-REF")
com_spec.handle_out_of_range = self.readChildElement("", element, "HANDLE-OUT-OF-RANGE")
com_spec.uses_end_to_end_protection = self.readChildOptionElementBooleanValue(element, "USES-END-TO-END-PROTECTION")
def readNonqueuedSenderComSpec(self, element, parent: PPortPrototype):
for child_element in element.findall("./xmlns:PROVIDED-COM-SPECS/xmlns:NONQUEUED-SENDER-COM-SPEC", self.nsmap):
com_spec = NonqueuedSenderComSpec()
self.readSenderComSpec(child_element, com_spec)
parent.addProvidedComSpec(com_spec)
def readPPortPrototype(self, element, parent: AtomicSwComponentType):
for child_element in element.findall("./xmlns:PORTS/xmlns:P-PORT-PROTOTYPE", self.nsmap):
short_name = self.readShortName(child_element)
prototype = parent.createPPortPrototype(short_name)
prototype.provided_interface_tref = self.readChildRefElement(
child_element, "PROVIDED-INTERFACE-TREF")
self.readNonqueuedSenderComSpec(child_element, prototype)
def readSwComponentType(self, element, parent: SwComponentType):
self.readRPortPrototype(element, parent)
self.readPPortPrototype(element, parent)
def readAtomicSwComponentType(self, element, parent: AtomicSwComponentType):
self.readSwComponentType(element, parent)
self.readSwInternalBehavior(element, parent)
def readEcuAbstractionSwComponents(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:ECU-ABSTRACTION-SW-COMPONENT-TYPE", self.nsmap):
short_name = self.readShortName(child_element)
sw_component = parent.createEcuAbstractionSwComponentType(
short_name)
self.readAtomicSwComponentType(child_element, sw_component)
def readApplicationSwComponentTypes(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:APPLICATION-SW-COMPONENT-TYPE", self.nsmap):
short_name = self.readShortName(child_element)
sw_component = parent.createApplicationSwComponentType(short_name)
self.readAtomicSwComponentType(child_element, sw_component)
def readComplexDeviceDriverSwComponentTypes(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:COMPLEX-DEVICE-DRIVER-SW-COMPONENT-TYPE", self.nsmap):
short_name = self.readShortName(child_element)
sw_component = parent.createApplicationSwComponentType(short_name)
self.readAtomicSwComponentType(child_element, sw_component)
def readServiceSwComponentTypes(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:SERVICE-SW-COMPONENT-TYPE", self.nsmap):
short_name = self.readShortName(child_element)
sw_component = parent.createServiceSwComponentType(short_name)
self.readAtomicSwComponentType(child_element, sw_component)
def readAssemblySwConnectorProviderIRef(self, element, parent: AssemblySwConnector):
child_element = element.find("./xmlns:PROVIDER-IREF", self.nsmap)
if (child_element != None):
provider_iref = ProvidedPortPrototypeInstanceRef()
provider_iref.context_component_ref = self.readChildRefElement(
child_element, "CONTEXT-COMPONENT-REF")
provider_iref.target_p_port_ref = self.readChildRefElement(
child_element, "TARGET-P-PORT-REF")
parent.provider_iref = provider_iref
def readAssemblySwConnectorRequesterIRef(self, element, parent: AssemblySwConnector):
child_element = element.find("./xmlns:REQUESTER-IREF", self.nsmap)
if (child_element != None):
requester_iref = RequiredPortPrototypeInstanceRef()
requester_iref.context_component_ref = self.readChildRefElement(
child_element, "CONTEXT-COMPONENT-REF")
requester_iref.target_r_port_ref = self.readChildRefElement(
child_element, "TARGET-R-PORT-REF")
parent.requester_iref = requester_iref
def readAssemblySwConnectors(self, element, parent: CompositionSwComponentType):
for child_element in element.findall("./xmlns:CONNECTORS/xmlns:ASSEMBLY-SW-CONNECTOR", self.nsmap):
short_name = self.readShortName(child_element)
connector = parent.createAssemblySwConnector(short_name)
self.readAssemblySwConnectorProviderIRef(child_element, connector)
self.readAssemblySwConnectorRequesterIRef(child_element, connector)
def readSwComponentPrototypes(self, element, parent: CompositionSwComponentType):
for child_element in element.findall("./xmlns:COMPONENTS/xmlns:SW-COMPONENT-PROTOTYPE", self.nsmap):
short_name = self.readShortName(child_element)
prototype = parent.createSwComponentPrototype(short_name)
prototype.type_tref = self.readChildRefElement(
child_element, "TYPE-TREF")
def readCompositionSwComponentTypes(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:COMPOSITION-SW-COMPONENT-TYPE", self.nsmap):
short_name = self.readShortName(child_element)
sw_component = parent.createCompositionSwComponentType(short_name)
self.readSwComponentType(child_element, sw_component)
self.readSwComponentPrototypes(child_element, sw_component)
self.readAssemblySwConnectors(child_element, sw_component)
def readDataTypeMap(self, element, parent: DataTypeMappingSet):
for child_element in element.findall("./xmlns:DATA-TYPE-MAPS/xmlns:DATA-TYPE-MAP", self.nsmap):
data_type_map = DataTypeMap()
data_type_map.application_data_type_ref = self.readChildRefElement(
child_element, "APPLICATION-DATA-TYPE-REF")
data_type_map.implementation_data_type_ref = self.readChildRefElement(
child_element, "IMPLEMENTATION-DATA-TYPE-REF")
parent.addDataTypeMap(data_type_map)
# add the data type map to global namespace
AUTOSAR.getInstance().addDataTypeMap(data_type_map)
def readDataTypeMappingSets(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:DATA-TYPE-MAPPING-SET", self.nsmap):
short_name = self.readShortName(child_element)
mapping_set = parent.createDataTypeMappingSet(short_name)
self.readDataTypeMap(child_element, mapping_set)
def readVariableDataPrototype(self, element, parent: SenderReceiverInterface):
for child_element in element.findall("./xmlns:DATA-ELEMENTS/xmlns:VARIABLE-DATA-PROTOTYPE", self.nsmap):
short_name = self.readShortName(child_element)
prototype = parent.createDataElement(short_name)
self.readSwDataDefProps(child_element, prototype)
prototype.type_tref = self.readChildRefElement(
child_element, "TYPE-TREF")
def readSenderReceiverInterfaces(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:SENDER-RECEIVER-INTERFACE", self.nsmap):
short_name = self.readShortName(child_element)
sr_interface = parent.createSenderReceiverInterface(short_name)
self.readVariableDataPrototype(child_element, sr_interface)
def readArgumentDataPrototypes(self, element, parent: ClientServerOperation):
for child_element in element.findall("./xmlns:ARGUMENTS/xmlns:ARGUMENT-DATA-PROTOTYPE", self.nsmap):
short_name = self.readShortName(child_element)
prototype = ArgumentDataPrototype(property, short_name)
prototype.type_tref = self.readChildRefElement(
child_element, "TYPE-TREF")
prototype.direction = self.readChildElement(
short_name, child_element, "DIRECTION")
parent.addArgumentDataPrototype(prototype)
def readPossibleErrorRefs(self, element, parent: ClientServerOperation):
child_element = element.find("./xmlns:POSSIBLE-ERROR-REFS", self.nsmap)
if child_element != None:
for ref in self.readChildRefElementList(child_element, "POSSIBLE-ERROR-REF"):
parent.addPossibleErrorRef(ref)
def readOperations(self, element, parent: ClientServerInterface):
for child_element in element.findall("./xmlns:OPERATIONS/xmlns:CLIENT-SERVER-OPERATION", self.nsmap):
short_name = self.readShortName(child_element)
operation = parent.createOperation(short_name)
self.readArgumentDataPrototypes(child_element, operation)
self.readPossibleErrorRefs(child_element, operation)
def readPossibleErrors(self, element, parent: ClientServerInterface):
for child_element in element.findall("./xmlns:POSSIBLE-ERRORS/xmlns:APPLICATION-ERROR", self.nsmap):
short_name = self.readShortName(child_element)
error = parent.createApplicationError(short_name)
error.error_code = int(self.readChildElement(
short_name, child_element, "ERROR-CODE"))
def readClientServerInterfaces(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:CLIENT-SERVER-INTERFACE", self.nsmap):
short_name = self.readShortName(child_element)
cs_interface = parent.createClientServerInterface(short_name)
cs_interface.is_service = self.readChildElement(
short_name, child_element, "IS-SERVICE")
self.readOperations(child_element, cs_interface)
self.readPossibleErrors(child_element, cs_interface)
def readCompuConstTextContent(self, element, parent: CompuConstTextContent):
child_element = element.find(
"./xmlns:COMPU-CONST/xmlns:VT", self.nsmap)
if (child_element != None):
parent.vt = child_element.text
def readCompuScales(self, element, parent: CompuScales):
for child_element in element.findall('./xmlns:COMPU-SCALES/xmlns:COMPU-SCALE', self.nsmap):
compu_scale = CompuScale()
compu_scale.lower_limit = self.readChildLimitElement(
child_element, "LOWER-LIMIT")
compu_scale.upper_limit = self.readChildLimitElement(
child_element, "UPPER-LIMIT")
compu_scale.compu_inverse_value = CompuConstTextContent()
self.readCompuConstTextContent(
child_element, compu_scale.compu_inverse_value)
parent.addCompuScale(compu_scale)
def readCompuInternalToPhys(self, element, parent: CompuMethod):
child_element = element.find(
"./xmlns:COMPU-INTERNAL-TO-PHYS", self.nsmap)
if (child_element != None):
parent.compu_internal_to_phys = Compu()
parent.compu_internal_to_phys.compu_content = CompuScales()
self.readCompuScales(
child_element, parent.compu_internal_to_phys.compu_content)
def readCompuMethods(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:COMPU-METHOD", self.nsmap):
short_name = self.readShortName(child_element)
compu_method = parent.createCompuMethod(short_name)
compu_method.category = self.readChildElement(
short_name, child_element, "CATEGORY")
self.readCompuInternalToPhys(child_element, compu_method)
def readARPackages(self, element, parent):
for child_element in element.findall("./xmlns:AR-PACKAGES/xmlns:AR-PACKAGE", self.nsmap):
short_name = self.readShortName(child_element)
ar_package = parent.createARPackage(short_name)
self.readSenderReceiverInterfaces(child_element, ar_package)
self.readClientServerInterfaces(child_element, ar_package)
self.readDataTypeMappingSets(child_element, ar_package)
self.readARPackages(child_element, ar_package)
self.readApplicationPrimitiveDataTypes(child_element, ar_package)
self.readApplicationRecordDataTypes(child_element, ar_package)
self.readImplementationDataTypes(child_element, ar_package)
self.readSwDataTypes(child_element, ar_package)
self.readCompuMethods(child_element, ar_package)
self.readEcuAbstractionSwComponents(child_element, ar_package)
self.readApplicationSwComponentTypes(child_element, ar_package)
self.readComplexDeviceDriverSwComponentTypes(
child_element, ar_package)
self.readServiceSwComponentTypes(child_element, ar_package)
self.readCompositionSwComponentTypes(child_element, ar_package)
def load(self, filename, document: AUTOSAR):
tree = ET.parse(filename)
root = tree.getroot()
if (self.getPureTagName(root.tag) != "AUTOSAR"):
raise ValueError("Invalid ARXML file <%s>" % filename)
print("Load %s ..." % filename)
self.readARPackages(root, document)
| from ..models import AUTOSAR, ARPackage, ARObject, EcuAbstractionSwComponentType, AtomicSwComponentType, SwComponentType, CompositionSwComponentType
from ..models import SwcInternalBehavior, RunnableEntity, RTEEvent, VariableAccess, ServerCallPoint, OperationInvokedEvent
from ..models import RefType, AutosarVariableRef, ArVariableInImplementationDataInstanceRef, POperationInAtomicSwcInstanceRef, ROperationInAtomicSwcInstanceRef
from ..models import ImplementationDataType, SwDataDefProps, SwPointerTargetProps, DataTypeMappingSet, DataTypeMap, ImplementationDataTypeElement
from ..models import DataPrototype, RPortPrototype, PPortPrototype
from ..models import ReceiverComSpec, ClientComSpec, NonqueuedReceiverComSpec, QueuedReceiverComSpec
from ..models import SenderComSpec, NonqueuedSenderComSpec
from ..models import SenderReceiverInterface, ClientServerInterface, ClientServerOperation, ArgumentDataPrototype
from ..models import AutosarDataType, ARElement
from ..models import AssemblySwConnector, ProvidedPortPrototypeInstanceRef, RequiredPortPrototypeInstanceRef
from ..models import CompuMethod, CompuScale, Limit, CompuScales, Compu, CompuConst, CompuConstTextContent
from typing import List
import xml.etree.ElementTree as ET
import re
class ARXMLParser:
def __init__(self):
self.nsmap = {"xmlns": "http://autosar.org/schema/r4.0"}
def getPureTagName(self, tag):
return re.sub(r'\{[\w:\/.]+\}(\w+)', r'\1', tag)
def readChildElement(self, short_name: str, element, key: str) -> str:
child_element = element.find("./xmlns:%s" % key, self.nsmap)
if (child_element != None):
return child_element.text
raise ValueError("The attribute %s of <%s> has not been defined" % (key, short_name))
def readChildElementBooleanValue(self, short_name, element, key) -> bool:
value = self.readChildElement(short_name, element, key)
if (value == "true"):
return True
return False
def readChildOptionalElement(self, element, key) -> str:
child_element = element.find("./xmlns:%s" % key, self.nsmap)
if (child_element != None):
return child_element.text
return None
def readChildOptionElementBooleanValue(self, element, key) -> bool:
value = self.readChildElement("", element, key)
if (value == None):
return None
elif (value == "True"):
return True
else:
return False
def readChildLimitElement(self, element, key) -> Limit:
child_element = element.find("./xmlns:%s" % key, self.nsmap)
if (child_element != None):
limit = Limit()
limit.interval_type = child_element.attrib['INTERVAL-TYPE']
limit.value = child_element.text
return limit
return None
def readShortName(self, element) -> str:
return self.readChildElement("", element, "SHORT-NAME")
def readChildRefElement(self, element, key) -> RefType:
child_element = element.find("./xmlns:%s" % key, self.nsmap)
if (child_element != None):
ref = RefType()
ref.dest = child_element.attrib['DEST']
ref.value = child_element.text
return ref
return None
def readChildRefElementList(self, element, key) -> List[RefType]:
child_elements = element.findall("./xmlns:%s" % key, self.nsmap)
results = []
for child_element in child_elements:
ref = RefType()
ref.dest = child_element.attrib['DEST']
ref.value = child_element.text
results.append(ref)
return results
def readSwInternalBehavior(self, element, parent: AtomicSwComponentType):
for child_element in element.findall("./xmlns:INTERNAL-BEHAVIORS/xmlns:SWC-INTERNAL-BEHAVIOR", self.nsmap):
short_name = self.readShortName(child_element)
behavior = parent.createSwcInternalBehavior(short_name)
self.readRunnableEntities(child_element, behavior)
self.readOperationInvokedEvents(child_element, behavior)
self.readTimingEvents(child_element, behavior)
self.readInternalTriggerOccurredEvent(child_element, behavior)
self.readExplicitInterRunnableVariables(child_element, behavior)
def readAutosarVariableInImplDatatype(self, element, accessed_variable_ref: AutosarVariableRef):
child_element = element.find("./xmlns:ACCESSED-VARIABLE/xmlns:AUTOSAR-VARIABLE-IREF", self.nsmap)
if (child_element != None):
autosar_variable_in_impl_datatype = ArVariableInImplementationDataInstanceRef()
autosar_variable_in_impl_datatype.port_prototype_ref = self.readChildRefElement(child_element, "PORT-PROTOTYPE-REF")
autosar_variable_in_impl_datatype.target_data_prototype_ref = self.readChildRefElement(child_element, "TARGET-DATA-PROTOTYPE-REF")
accessed_variable_ref.autosar_variable_in_impl_datatype = autosar_variable_in_impl_datatype
def readLocalVariableRef(self, element, accessed_variable_ref: AutosarVariableRef):
child_element = element.find("./xmlns:ACCESSED-VARIABLE", self.nsmap)
if (child_element != None):
accessed_variable_ref.local_variable_ref = self.readChildRefElement(child_element, "LOCAL-VARIABLE-REF")
def _readVariableAccesses(self, element, parent: RunnableEntity, key: str):
for child_element in element.findall("./xmlns:%s/xmlns:VARIABLE-ACCESS" % key, self.nsmap):
short_name = self.readShortName(child_element)
if (key == "DATA-RECEIVE-POINT-BY-ARGUMENTS"):
variable_access = parent.createDataReceivePointByArgument(short_name)
self.readAutosarVariableInImplDatatype(child_element, variable_access.accessed_variable_ref)
elif (key == "DATA-RECEIVE-POINT-BY-VALUES"):
variable_access = parent.createDataReceivePointByValue(short_name)
self.readAutosarVariableInImplDatatype(child_element, variable_access.accessed_variable_ref)
elif (key == "DATA-READ-ACCESSS"):
variable_access = parent.createDataReadAccess(short_name)
self.readAutosarVariableInImplDatatype(child_element, variable_access.accessed_variable_ref)
elif (key == "DATA-SEND-POINTS"):
variable_access = parent.createDataSendPoint(short_name)
self.readAutosarVariableInImplDatatype(child_element, variable_access.accessed_variable_ref)
elif (key == "WRITTEN-LOCAL-VARIABLES"):
variable_access = parent.createWrittenLocalVariable(short_name)
self.readLocalVariableRef(child_element, variable_access.accessed_variable_ref)
elif (key == "READ-LOCAL-VARIABLES"):
variable_access = parent.createReadLocalVariable(short_name)
self.readLocalVariableRef(child_element, variable_access.accessed_variable_ref)
else:
raise ValueError("Invalid key type <%s>" % key)
def readDataReceivePointByArguments(self, element, parent: RunnableEntity):
self._readVariableAccesses(element, parent, "DATA-RECEIVE-POINT-BY-ARGUMENTS")
def readDataReceivePointByValues(self, element, parent: RunnableEntity):
self._readVariableAccesses(element, parent, "DATA-RECEIVE-POINT-BY-VALUES")
def readDataReadAccesses(self, element, parent: RunnableEntity):
self._readVariableAccesses(element, parent, "DATA-READ-ACCESSS")
def readDataSendPoints(self, element, parent: RunnableEntity):
self._readVariableAccesses(element, parent, "DATA-SEND-POINTS")
def readWrittenLocalVariables(self, element, parent: RunnableEntity):
self._readVariableAccesses(element, parent, "WRITTEN-LOCAL-VARIABLES")
def readReadLocalVariables(self, element, parent: RunnableEntity):
self._readVariableAccesses(element, parent, "READ-LOCAL-VARIABLES")
def readROperationIRef(self, element, serverCallPoint: ServerCallPoint):
child_element = element.find("./xmlns:OPERATION-IREF", self.nsmap)
if (child_element != None):
operation_iref = ROperationInAtomicSwcInstanceRef()
operation_iref.context_r_port_ref = self.readChildRefElement(child_element, "CONTEXT-R-PORT-REF")
operation_iref.target_required_operation_ref = self.readChildRefElement(child_element, "TARGET-REQUIRED-OPERATION-REF")
serverCallPoint.operation_iref = operation_iref
def readSynchronousServerCallPoint(self, element, parent: RunnableEntity):
for child_element in element.findall("./xmlns:SERVER-CALL-POINTS/xmlns:SYNCHRONOUS-SERVER-CALL-POINT", self.nsmap):
short_name = self.readShortName(child_element)
serverCallPoint = parent.createSynchronousServerCallPoint(short_name)
serverCallPoint.timeout = self.readChildElement(short_name, child_element, "TIMEOUT")
self.readROperationIRef(child_element, serverCallPoint)
def readAsynchronousServerCallPoint(self, element, parent: RunnableEntity):
for child_element in element.findall("./xmlns:SERVER-CALL-POINTS/xmlns:ASYNCHRONOUS-SERVER-CALL-POINT", self.nsmap):
short_name = self.readShortName(child_element)
serverCallPoint = parent.createAsynchronousServerCallPoint(short_name)
serverCallPoint.timeout = self.readChildElement(short_name, child_element, "TIMEOUT")
self.readROperationIRef(child_element, serverCallPoint)
def readInternalTriggeringPoint(self, element, parent: RunnableEntity):
for child_element in element.findall("./xmlns:INTERNAL-TRIGGERING-POINTS/xmlns:INTERNAL-TRIGGERING-POINT", self.nsmap):
short_name = self.readShortName(child_element)
point = parent.createInternalTriggeringPoint(short_name)
point.sw_impl_policy = self.readChildOptionalElement(child_element, "SW-IMPL-POLICY")
def readRunnableEntities(self, element, parent: SwcInternalBehavior):
for child_element in element.findall("./xmlns:RUNNABLES/xmlns:RUNNABLE-ENTITY", self.nsmap):
short_name = self.readShortName(child_element)
runnable = parent.createRunnableEntity(short_name)
runnable.can_be_invoked_concurrently = self.readChildElement(short_name, child_element, "CAN-BE-INVOKED-CONCURRENTLY")
runnable.symbol = self.readChildElement(short_name, child_element, "SYMBOL")
self.readDataReceivePointByArguments(child_element, runnable)
self.readDataReceivePointByValues(child_element, runnable)
self.readDataReadAccesses(child_element, runnable)
self.readDataSendPoints(child_element, runnable)
self.readWrittenLocalVariables(child_element, runnable)
self.readReadLocalVariables(child_element, runnable)
self.readSynchronousServerCallPoint(child_element, runnable)
self.readAsynchronousServerCallPoint(child_element, runnable)
self.readInternalTriggeringPoint(child_element, runnable)
def readRTEEvent(self, element, event: RTEEvent):
event.start_on_event_ref = self.readChildRefElement(element, "START-ON-EVENT-REF")
def readOperationIRef(self, element, parent: OperationInvokedEvent):
child_element = element.find("./xmlns:OPERATION-IREF", self.nsmap)
if (child_element != None):
parent.operation_iref = POperationInAtomicSwcInstanceRef()
parent.operation_iref.context_p_port_ref = self.readChildRefElement(child_element, "CONTEXT-P-PORT-REF")
parent.operation_iref.target_provided_operation_ref = self.readChildRefElement(child_element, "TARGET-PROVIDED-OPERATION-REF")
def readOperationInvokedEvents(self, element, parent: SwcInternalBehavior):
for child_element in element.findall("./xmlns:EVENTS/xmlns:OPERATION-INVOKED-EVENT", self.nsmap):
short_name = self.readShortName(child_element)
event = parent.createOperationInvokedEvent(short_name)
self.readOperationIRef(child_element, event)
self.readRTEEvent(child_element, event)
def readExplicitInterRunnableVariables(self, element, parent: SwcInternalBehavior):
for child_element in element.findall("./xmlns:EXPLICIT-INTER-RUNNABLE-VARIABLES/xmlns:VARIABLE-DATA-PROTOTYPE", self.nsmap):
short_name = self.readShortName(child_element)
prototype = parent.createExplicitInterRunnableVariable(short_name)
self.readSwDataDefProps(child_element, prototype)
prototype.type_tref = self.readChildRefElement(child_element, "TYPE-TREF")
def readTimingEvents(self, element, parent: SwcInternalBehavior):
for child_element in element.findall("./xmlns:EVENTS/xmlns:TIMING-EVENT", self.nsmap):
short_name = self.readShortName(child_element)
event = parent.createTimingEvent(short_name)
self.readRTEEvent(child_element, event)
offset = self.readChildOptionalElement(child_element, "OFFSET")
if (offset != None):
event.offset = (float)(offset)
event.period = (float)(self.readChildElement(short_name, child_element, "PERIOD"))
def readInternalTriggerOccurredEvent(self, element, parent: SwcInternalBehavior):
for child_element in element.findall("./xmlns:EVENTS/xmlns:INTERNAL-TRIGGER-OCCURRED-EVENT", self.nsmap):
short_name = self.readShortName(child_element)
event = parent.createInternalTriggerOccurredEvent(short_name)
self.readRTEEvent(child_element, event)
event.event_source_ref = self.readChildRefElement(child_element, "EVENT-SOURCE-REF")
def readSwPointerTargetProps(self, element, parent: ARElement):
child_element = element.find(
"./xmlns:SW-POINTER-TARGET-PROPS", self.nsmap)
if (child_element != None):
sw_pointer_target_props = SwPointerTargetProps()
sw_pointer_target_props.target_category = self.readChildElement("", child_element, "TARGET-CATEGORY")
self.readSwDataDefProps(child_element, sw_pointer_target_props)
parent.sw_pointer_target_props = sw_pointer_target_props
def readSwDataDefProps(self, element, parent: ARElement):
child_element = element.find(
"./xmlns:SW-DATA-DEF-PROPS/xmlns:SW-DATA-DEF-PROPS-VARIANTS/xmlns:SW-DATA-DEF-PROPS-CONDITIONAL", self.nsmap)
if (child_element != None):
sw_data_def_props = SwDataDefProps()
sw_data_def_props.base_type_ref = self.readChildRefElement(child_element, "BASE-TYPE-REF")
sw_data_def_props.data_constr_ref = self.readChildRefElement(child_element, "DATA-CONSTR-REF")
sw_data_def_props.compu_method_ref = self.readChildRefElement(child_element, "COMPU-METHOD-REF")
sw_data_def_props.implementation_data_type_ref = self.readChildRefElement(child_element, "IMPLEMENTATION-DATA-TYPE-REF")
sw_data_def_props.sw_calibration_access = self.readChildOptionalElement(child_element, "SW-CALIBRATION-ACCESS")
self.readSwPointerTargetProps(child_element, sw_data_def_props)
parent.sw_data_def_props = sw_data_def_props
def readApplicationPrimitiveDataTypes(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:APPLICATION-PRIMITIVE-DATA-TYPE", self.nsmap):
short_name = self.readShortName(child_element)
data_type = parent.createApplicationPrimitiveDataType(short_name)
data_type.category = self.readChildElement(short_name, child_element, "CATEGORY")
self.readSwDataDefProps(child_element, data_type)
def readApplicationRecordDataTypes(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:APPLICATION-RECORD-DATA-TYPE", self.nsmap):
short_name = self.readShortName(child_element)
data_type = parent.createApplicationRecordDataType(short_name)
data_type.category = self.readChildElement(short_name, child_element, "CATEGORY")
self.readSwDataDefProps(child_element, data_type)
# TODO: add read APPLICATION-RECORD-ELEMENT
def readImplementationDataTypeElements(self, element, parent: ARElement):
for child_element in element.findall("./xmlns:SUB-ELEMENTS/xmlns:IMPLEMENTATION-DATA-TYPE-ELEMENT", self.nsmap):
short_name = self.readShortName(child_element)
type_element = parent.createImplementationDataTypeElement(short_name) # type: ImplementationDataTypeElement
type_element.category = self.readChildElement(short_name, child_element, "CATEGORY")
type_element.array_size = self.readChildOptionalElement(child_element, "ARRAY-SIZE")
type_element.array_size_semantics = self.readChildOptionalElement(child_element, "ARRAY-SIZE-SEMANTICS")
self.readImplementationDataTypeElements(child_element, type_element)
self.readSwDataDefProps(child_element, type_element)
def readImplementationDataTypes(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:IMPLEMENTATION-DATA-TYPE", self.nsmap):
short_name = self.readShortName(child_element)
data_type = parent.createImplementationDataType(short_name)
data_type.category = self.readChildElement(short_name, child_element, "CATEGORY")
self.readImplementationDataTypeElements(child_element, data_type)
self.readSwDataDefProps(child_element, data_type)
if (data_type.category == ImplementationDataType.CATEGORY_ARRAY):
if (len(data_type.getImplementationDataTypeElements()) < 1):
raise ValueError("Array Sub-Element of <%s> do not defined." % data_type.short_name)
array_sub_element = data_type.getImplementationDataTypeElements()[0]
if (array_sub_element.category == ImplementationDataType.CATEGORY_TYPE_REFERENCE):
data_type.setArrayElementType(array_sub_element.sw_data_def_props.implementation_data_type_ref.value)
else:
raise ValueError("The catetory <%s> of array sub-element does not support." % array_sub_element.category)
def readSwDataTypes(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:SW-BASE-TYPE", self.nsmap):
short_name = self.readShortName(child_element)
data_type = parent.createSwBaseType(short_name)
def readClientComSpec(self, element, parent: RPortPrototype):
for child_element in element.findall("./xmlns:REQUIRED-COM-SPECS/xmlns:CLIENT-COM-SPEC", self.nsmap):
try:
com_spec = ClientComSpec()
com_spec.operation_ref = self.readChildRefElement(child_element, "OPERATION-REF")
parent.addRequiredComSpec(com_spec)
except ValueError as err:
print(parent.short_name + ": " + str(err))
def readReceiverComSpec(self, element, com_spec: ReceiverComSpec):
#FIXME: readchildElement
com_spec.data_element_ref = self.readChildRefElement(element, "DATA-ELEMENT-REF")
com_spec.handle_out_of_range = self.readChildElement("", element, "HANDLE-OUT-OF-RANGE")
com_spec.uses_end_to_end_protection = self.readChildOptionElementBooleanValue(element, "USES-END-TO-END-PROTECTION")
def readNonqueuedReceiverComSpec(self, element, parent: RPortPrototype):
for child_element in element.findall("./xmlns:REQUIRED-COM-SPECS/xmlns:NONQUEUED-RECEIVER-COM-SPEC", self.nsmap):
com_spec = NonqueuedReceiverComSpec()
self.readReceiverComSpec(child_element, com_spec)
try:
# FIXME:
com_spec.alive_timeout = float(self.readChildElement("", child_element, "ALIVE-TIMEOUT"))
com_spec.enable_updated = self.readChildElementBooleanValue("", child_element, "ENABLE-UPDATE")
com_spec.handle_never_received = self.readChildElementBooleanValue("", child_element, "HANDLE-NEVER-RECEIVED")
com_spec.handel_timeout_type = self.readChildElement("", child_element, "HANDLE-TIMEOUT-TYPE")
except ValueError as err:
print(parent.short_name + ": " + str(err))
parent.addRequiredComSpec(com_spec)
def readRPortPrototype(self, element, parent: AtomicSwComponentType):
for child_element in element.findall("./xmlns:PORTS/xmlns:R-PORT-PROTOTYPE", self.nsmap):
short_name = self.readShortName(child_element)
prototype = parent.createRPortPrototype(short_name)
prototype.required_interface_tref = self.readChildRefElement(
child_element, "REQUIRED-INTERFACE-TREF")
self.readClientComSpec(child_element, prototype)
self.readNonqueuedReceiverComSpec(child_element, prototype)
def readSenderComSpec(self, element, com_spec: SenderComSpec):
# FIXME:
com_spec.data_element_ref = self.readChildRefElement(element, "DATA-ELEMENT-REF")
com_spec.handle_out_of_range = self.readChildElement("", element, "HANDLE-OUT-OF-RANGE")
com_spec.uses_end_to_end_protection = self.readChildOptionElementBooleanValue(element, "USES-END-TO-END-PROTECTION")
def readNonqueuedSenderComSpec(self, element, parent: PPortPrototype):
for child_element in element.findall("./xmlns:PROVIDED-COM-SPECS/xmlns:NONQUEUED-SENDER-COM-SPEC", self.nsmap):
com_spec = NonqueuedSenderComSpec()
self.readSenderComSpec(child_element, com_spec)
parent.addProvidedComSpec(com_spec)
def readPPortPrototype(self, element, parent: AtomicSwComponentType):
for child_element in element.findall("./xmlns:PORTS/xmlns:P-PORT-PROTOTYPE", self.nsmap):
short_name = self.readShortName(child_element)
prototype = parent.createPPortPrototype(short_name)
prototype.provided_interface_tref = self.readChildRefElement(
child_element, "PROVIDED-INTERFACE-TREF")
self.readNonqueuedSenderComSpec(child_element, prototype)
def readSwComponentType(self, element, parent: SwComponentType):
self.readRPortPrototype(element, parent)
self.readPPortPrototype(element, parent)
def readAtomicSwComponentType(self, element, parent: AtomicSwComponentType):
self.readSwComponentType(element, parent)
self.readSwInternalBehavior(element, parent)
def readEcuAbstractionSwComponents(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:ECU-ABSTRACTION-SW-COMPONENT-TYPE", self.nsmap):
short_name = self.readShortName(child_element)
sw_component = parent.createEcuAbstractionSwComponentType(
short_name)
self.readAtomicSwComponentType(child_element, sw_component)
def readApplicationSwComponentTypes(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:APPLICATION-SW-COMPONENT-TYPE", self.nsmap):
short_name = self.readShortName(child_element)
sw_component = parent.createApplicationSwComponentType(short_name)
self.readAtomicSwComponentType(child_element, sw_component)
def readComplexDeviceDriverSwComponentTypes(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:COMPLEX-DEVICE-DRIVER-SW-COMPONENT-TYPE", self.nsmap):
short_name = self.readShortName(child_element)
sw_component = parent.createApplicationSwComponentType(short_name)
self.readAtomicSwComponentType(child_element, sw_component)
def readServiceSwComponentTypes(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:SERVICE-SW-COMPONENT-TYPE", self.nsmap):
short_name = self.readShortName(child_element)
sw_component = parent.createServiceSwComponentType(short_name)
self.readAtomicSwComponentType(child_element, sw_component)
def readAssemblySwConnectorProviderIRef(self, element, parent: AssemblySwConnector):
child_element = element.find("./xmlns:PROVIDER-IREF", self.nsmap)
if (child_element != None):
provider_iref = ProvidedPortPrototypeInstanceRef()
provider_iref.context_component_ref = self.readChildRefElement(
child_element, "CONTEXT-COMPONENT-REF")
provider_iref.target_p_port_ref = self.readChildRefElement(
child_element, "TARGET-P-PORT-REF")
parent.provider_iref = provider_iref
def readAssemblySwConnectorRequesterIRef(self, element, parent: AssemblySwConnector):
child_element = element.find("./xmlns:REQUESTER-IREF", self.nsmap)
if (child_element != None):
requester_iref = RequiredPortPrototypeInstanceRef()
requester_iref.context_component_ref = self.readChildRefElement(
child_element, "CONTEXT-COMPONENT-REF")
requester_iref.target_r_port_ref = self.readChildRefElement(
child_element, "TARGET-R-PORT-REF")
parent.requester_iref = requester_iref
def readAssemblySwConnectors(self, element, parent: CompositionSwComponentType):
for child_element in element.findall("./xmlns:CONNECTORS/xmlns:ASSEMBLY-SW-CONNECTOR", self.nsmap):
short_name = self.readShortName(child_element)
connector = parent.createAssemblySwConnector(short_name)
self.readAssemblySwConnectorProviderIRef(child_element, connector)
self.readAssemblySwConnectorRequesterIRef(child_element, connector)
def readSwComponentPrototypes(self, element, parent: CompositionSwComponentType):
for child_element in element.findall("./xmlns:COMPONENTS/xmlns:SW-COMPONENT-PROTOTYPE", self.nsmap):
short_name = self.readShortName(child_element)
prototype = parent.createSwComponentPrototype(short_name)
prototype.type_tref = self.readChildRefElement(
child_element, "TYPE-TREF")
def readCompositionSwComponentTypes(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:COMPOSITION-SW-COMPONENT-TYPE", self.nsmap):
short_name = self.readShortName(child_element)
sw_component = parent.createCompositionSwComponentType(short_name)
self.readSwComponentType(child_element, sw_component)
self.readSwComponentPrototypes(child_element, sw_component)
self.readAssemblySwConnectors(child_element, sw_component)
def readDataTypeMap(self, element, parent: DataTypeMappingSet):
for child_element in element.findall("./xmlns:DATA-TYPE-MAPS/xmlns:DATA-TYPE-MAP", self.nsmap):
data_type_map = DataTypeMap()
data_type_map.application_data_type_ref = self.readChildRefElement(
child_element, "APPLICATION-DATA-TYPE-REF")
data_type_map.implementation_data_type_ref = self.readChildRefElement(
child_element, "IMPLEMENTATION-DATA-TYPE-REF")
parent.addDataTypeMap(data_type_map)
# add the data type map to global namespace
AUTOSAR.getInstance().addDataTypeMap(data_type_map)
def readDataTypeMappingSets(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:DATA-TYPE-MAPPING-SET", self.nsmap):
short_name = self.readShortName(child_element)
mapping_set = parent.createDataTypeMappingSet(short_name)
self.readDataTypeMap(child_element, mapping_set)
def readVariableDataPrototype(self, element, parent: SenderReceiverInterface):
for child_element in element.findall("./xmlns:DATA-ELEMENTS/xmlns:VARIABLE-DATA-PROTOTYPE", self.nsmap):
short_name = self.readShortName(child_element)
prototype = parent.createDataElement(short_name)
self.readSwDataDefProps(child_element, prototype)
prototype.type_tref = self.readChildRefElement(
child_element, "TYPE-TREF")
def readSenderReceiverInterfaces(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:SENDER-RECEIVER-INTERFACE", self.nsmap):
short_name = self.readShortName(child_element)
sr_interface = parent.createSenderReceiverInterface(short_name)
self.readVariableDataPrototype(child_element, sr_interface)
def readArgumentDataPrototypes(self, element, parent: ClientServerOperation):
for child_element in element.findall("./xmlns:ARGUMENTS/xmlns:ARGUMENT-DATA-PROTOTYPE", self.nsmap):
short_name = self.readShortName(child_element)
prototype = ArgumentDataPrototype(property, short_name)
prototype.type_tref = self.readChildRefElement(
child_element, "TYPE-TREF")
prototype.direction = self.readChildElement(
short_name, child_element, "DIRECTION")
parent.addArgumentDataPrototype(prototype)
def readPossibleErrorRefs(self, element, parent: ClientServerOperation):
child_element = element.find("./xmlns:POSSIBLE-ERROR-REFS", self.nsmap)
if child_element != None:
for ref in self.readChildRefElementList(child_element, "POSSIBLE-ERROR-REF"):
parent.addPossibleErrorRef(ref)
def readOperations(self, element, parent: ClientServerInterface):
for child_element in element.findall("./xmlns:OPERATIONS/xmlns:CLIENT-SERVER-OPERATION", self.nsmap):
short_name = self.readShortName(child_element)
operation = parent.createOperation(short_name)
self.readArgumentDataPrototypes(child_element, operation)
self.readPossibleErrorRefs(child_element, operation)
def readPossibleErrors(self, element, parent: ClientServerInterface):
for child_element in element.findall("./xmlns:POSSIBLE-ERRORS/xmlns:APPLICATION-ERROR", self.nsmap):
short_name = self.readShortName(child_element)
error = parent.createApplicationError(short_name)
error.error_code = int(self.readChildElement(
short_name, child_element, "ERROR-CODE"))
def readClientServerInterfaces(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:CLIENT-SERVER-INTERFACE", self.nsmap):
short_name = self.readShortName(child_element)
cs_interface = parent.createClientServerInterface(short_name)
cs_interface.is_service = self.readChildElement(
short_name, child_element, "IS-SERVICE")
self.readOperations(child_element, cs_interface)
self.readPossibleErrors(child_element, cs_interface)
def readCompuConstTextContent(self, element, parent: CompuConstTextContent):
child_element = element.find(
"./xmlns:COMPU-CONST/xmlns:VT", self.nsmap)
if (child_element != None):
parent.vt = child_element.text
def readCompuScales(self, element, parent: CompuScales):
for child_element in element.findall('./xmlns:COMPU-SCALES/xmlns:COMPU-SCALE', self.nsmap):
compu_scale = CompuScale()
compu_scale.lower_limit = self.readChildLimitElement(
child_element, "LOWER-LIMIT")
compu_scale.upper_limit = self.readChildLimitElement(
child_element, "UPPER-LIMIT")
compu_scale.compu_inverse_value = CompuConstTextContent()
self.readCompuConstTextContent(
child_element, compu_scale.compu_inverse_value)
parent.addCompuScale(compu_scale)
def readCompuInternalToPhys(self, element, parent: CompuMethod):
child_element = element.find(
"./xmlns:COMPU-INTERNAL-TO-PHYS", self.nsmap)
if (child_element != None):
parent.compu_internal_to_phys = Compu()
parent.compu_internal_to_phys.compu_content = CompuScales()
self.readCompuScales(
child_element, parent.compu_internal_to_phys.compu_content)
def readCompuMethods(self, element, parent: ARPackage):
for child_element in element.findall("./xmlns:ELEMENTS/xmlns:COMPU-METHOD", self.nsmap):
short_name = self.readShortName(child_element)
compu_method = parent.createCompuMethod(short_name)
compu_method.category = self.readChildElement(
short_name, child_element, "CATEGORY")
self.readCompuInternalToPhys(child_element, compu_method)
def readARPackages(self, element, parent):
for child_element in element.findall("./xmlns:AR-PACKAGES/xmlns:AR-PACKAGE", self.nsmap):
short_name = self.readShortName(child_element)
ar_package = parent.createARPackage(short_name)
self.readSenderReceiverInterfaces(child_element, ar_package)
self.readClientServerInterfaces(child_element, ar_package)
self.readDataTypeMappingSets(child_element, ar_package)
self.readARPackages(child_element, ar_package)
self.readApplicationPrimitiveDataTypes(child_element, ar_package)
self.readApplicationRecordDataTypes(child_element, ar_package)
self.readImplementationDataTypes(child_element, ar_package)
self.readSwDataTypes(child_element, ar_package)
self.readCompuMethods(child_element, ar_package)
self.readEcuAbstractionSwComponents(child_element, ar_package)
self.readApplicationSwComponentTypes(child_element, ar_package)
self.readComplexDeviceDriverSwComponentTypes(
child_element, ar_package)
self.readServiceSwComponentTypes(child_element, ar_package)
self.readCompositionSwComponentTypes(child_element, ar_package)
def load(self, filename, document: AUTOSAR):
tree = ET.parse(filename)
root = tree.getroot()
if (self.getPureTagName(root.tag) != "AUTOSAR"):
raise ValueError("Invalid ARXML file <%s>" % filename)
print("Load %s ..." % filename)
self.readARPackages(root, document) | en | 0.173247 | # TODO: add read APPLICATION-RECORD-ELEMENT # type: ImplementationDataTypeElement #FIXME: readchildElement # FIXME: # FIXME: # add the data type map to global namespace | 1.799652 | 2 |
app/migrations/0008_alter_hood_biashara.py | KairigoShawn/neighbourhood | 0 | 6622550 | <filename>app/migrations/0008_alter_hood_biashara.py
# Generated by Django 4.0 on 2022-01-04 15:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0007_alter_hood_biashara'),
]
operations = [
migrations.AlterField(
model_name='hood',
name='biashara',
field=models.ManyToManyField(blank=True, related_name='biashara', to='app.Business'),
),
]
| <filename>app/migrations/0008_alter_hood_biashara.py
# Generated by Django 4.0 on 2022-01-04 15:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0007_alter_hood_biashara'),
]
operations = [
migrations.AlterField(
model_name='hood',
name='biashara',
field=models.ManyToManyField(blank=True, related_name='biashara', to='app.Business'),
),
]
| en | 0.83823 | # Generated by Django 4.0 on 2022-01-04 15:33 | 1.335996 | 1 |