code stringlengths 20 1.04M | apis list | extract_api stringlengths 75 9.94M |
|---|---|---|
import os
import pytest
# No CLI test due to sc-show
@pytest.mark.eda
@pytest.mark.quick
def test_py(setup_example_test):
setup_example_test('fibone')
import fibone
fibone.main()
assert os.path.isfile('build/mkFibOne/job0/export/0/outputs/mkFibOne.gds')
| [
"fibone.main",
"os.path.isfile"
] | [((181, 194), 'fibone.main', 'fibone.main', ([], {}), '()\n', (192, 194), False, 'import fibone\n'), ((207, 274), 'os.path.isfile', 'os.path.isfile', (['"""build/mkFibOne/job0/export/0/outputs/mkFibOne.gds"""'], {}), "('build/mkFibOne/job0/export/0/outputs/mkFibOne.gds')\n", (221, 274), False, 'import os\n')] |
from setuptools import setup
from setuptools.command.develop import develop
setup(name='retroprime',
py_modules=['retroprime']) | [
"setuptools.setup"
] | [((78, 129), 'setuptools.setup', 'setup', ([], {'name': '"""retroprime"""', 'py_modules': "['retroprime']"}), "(name='retroprime', py_modules=['retroprime'])\n", (83, 129), False, 'from setuptools import setup\n')] |
# import the necessary packages
from tensorflow.keras.models import load_model
from image_classification.data import DataDispatcher
from image_classification.utils import config
from image_classification.layers import Mish
import numpy as np
import argparse
# construct an argument parser to parse the command line arguments
ap = argparse.ArgumentParser()
ap.add_argument("-w", "--weights", required=True, help="path to model weights")
args = vars(ap.parse_args())
# load the dataset
dd = DataDispatcher()
test_ds = dd.get_test_data()
# load the model
model = load_model(args["weights"], custom_objects={"Mish": Mish})
# model = load_model(args["weights"])
# evaluate the model
test_steps = np.ceil(dd.num_test_imgs / config.BS)
H = model.evaluate(x=test_ds, batch_size=config.BS, steps=test_steps)
# print the results
print(f"[INFO] test set loss: {np.round(H[0], 4)}")
print(f"[INFO] test set acc: {np.round(H[1], 4)}")
| [
"tensorflow.keras.models.load_model",
"image_classification.data.DataDispatcher",
"numpy.ceil",
"argparse.ArgumentParser",
"numpy.round"
] | [((331, 356), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (354, 356), False, 'import argparse\n'), ((491, 507), 'image_classification.data.DataDispatcher', 'DataDispatcher', ([], {}), '()\n', (505, 507), False, 'from image_classification.data import DataDispatcher\n'), ((563, 621), 'tensorflow.keras.models.load_model', 'load_model', (["args['weights']"], {'custom_objects': "{'Mish': Mish}"}), "(args['weights'], custom_objects={'Mish': Mish})\n", (573, 621), False, 'from tensorflow.keras.models import load_model\n'), ((695, 732), 'numpy.ceil', 'np.ceil', (['(dd.num_test_imgs / config.BS)'], {}), '(dd.num_test_imgs / config.BS)\n', (702, 732), True, 'import numpy as np\n'), ((855, 872), 'numpy.round', 'np.round', (['H[0]', '(4)'], {}), '(H[0], 4)\n', (863, 872), True, 'import numpy as np\n'), ((906, 923), 'numpy.round', 'np.round', (['H[1]', '(4)'], {}), '(H[1], 4)\n', (914, 923), True, 'import numpy as np\n')] |
import folium
from pathlib import Path
from sportgems import parse_fit_data, find_fastest_section
# desired fastest sections to parse, note larges must come first in
# order to be able to render the smaller sections on top of the larger ones
sections = [5000, 3000, 2000, 1000]
colors = ["yellow", "blue", "green", "red"]
if __name__ == "__main__":
fit_file = Path(".").parent / "tests" / "data" / "2019-09-14-17-22-05.fit"
fit_data = parse_fit_data(str(fit_file))
coords = []
for coordinate in fit_data.coordinates:
if coordinate[0] > 0 and coordinate[1] > 0:
coords.append((coordinate[0], coordinate[1]))
trace = folium.PolyLine(coords, color="black")
map = folium.Map(location=fit_data.coordinates[300], zoom_start=15)
trace.add_to(map)
for i in range(len(sections)):
fs = find_fastest_section(sections[i], fit_data.times, fit_data.coordinates)
fs_coords = coords[fs.start:fs.end]
fs_poly = folium.PolyLine(fs_coords, color=colors[i])
fs_poly.add_to(map)
output_file = "map.html"
map.save(output_file)
print(f"saved map to {output_file}, can be viewed in browser") | [
"folium.PolyLine",
"pathlib.Path",
"sportgems.find_fastest_section",
"folium.Map"
] | [((658, 696), 'folium.PolyLine', 'folium.PolyLine', (['coords'], {'color': '"""black"""'}), "(coords, color='black')\n", (673, 696), False, 'import folium\n'), ((707, 768), 'folium.Map', 'folium.Map', ([], {'location': 'fit_data.coordinates[300]', 'zoom_start': '(15)'}), '(location=fit_data.coordinates[300], zoom_start=15)\n', (717, 768), False, 'import folium\n'), ((840, 911), 'sportgems.find_fastest_section', 'find_fastest_section', (['sections[i]', 'fit_data.times', 'fit_data.coordinates'], {}), '(sections[i], fit_data.times, fit_data.coordinates)\n', (860, 911), False, 'from sportgems import parse_fit_data, find_fastest_section\n'), ((974, 1017), 'folium.PolyLine', 'folium.PolyLine', (['fs_coords'], {'color': 'colors[i]'}), '(fs_coords, color=colors[i])\n', (989, 1017), False, 'import folium\n'), ((366, 375), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (370, 375), False, 'from pathlib import Path\n')] |
from flask import Flask, render_template, abort
from fauxsnow import ResortModel, ForecastAPILoader, ForecastModel
app = Flask(__name__)
view_count = 0
@app.route("/")
def welcome():
resort_model = ResortModel()
resorts = resort_model.get_all_resorts()
return render_template("welcome.html", resorts=resorts)
@app.route("/detail/<text_id>")
def detail(text_id):
try:
resort_model = ResortModel()
resort = resort_model.get_resort_by_id(text_id)
if resort:
return render_template("detail.html", resort=resort)
else:
abort(500)
except IndexError:
abort(404)
@app.route("/refresh")
def refresh():
rm = ResortModel()
fm = ForecastModel()
resorts = rm.get_all_resorts()
fAPI = ForecastAPILoader()
forecasts = fAPI.load_forecasts_from_api(resorts)
# if the api call returns None, fail gracefully.
message = ''
if forecasts:
fm.save_forecasts(forecasts)
message = 'Updated forecasts'
else:
message = 'could not update forecasts'
return render_template('refresh.html', message=message)
@app.route("/about")
def about():
rm = ResortModel()
resorts = rm.get_all_resorts()
num_resorts = len(resorts)
return render_template("about.html", resorts=resorts, num_resorts=num_resorts)
@app.errorhandler(404)
def page_not_found(error):
return render_template('404.html', title = '404 Not Found'), 404
@app.errorhandler(500)
def page_not_found(error):
return render_template('404.html', title = 'Something went wrong'), 500
| [
"flask.Flask",
"fauxsnow.ForecastModel",
"flask.abort",
"fauxsnow.ForecastAPILoader",
"fauxsnow.ResortModel",
"flask.render_template"
] | [((122, 137), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (127, 137), False, 'from flask import Flask, render_template, abort\n'), ((207, 220), 'fauxsnow.ResortModel', 'ResortModel', ([], {}), '()\n', (218, 220), False, 'from fauxsnow import ResortModel, ForecastAPILoader, ForecastModel\n'), ((277, 325), 'flask.render_template', 'render_template', (['"""welcome.html"""'], {'resorts': 'resorts'}), "('welcome.html', resorts=resorts)\n", (292, 325), False, 'from flask import Flask, render_template, abort\n'), ((693, 706), 'fauxsnow.ResortModel', 'ResortModel', ([], {}), '()\n', (704, 706), False, 'from fauxsnow import ResortModel, ForecastAPILoader, ForecastModel\n'), ((716, 731), 'fauxsnow.ForecastModel', 'ForecastModel', ([], {}), '()\n', (729, 731), False, 'from fauxsnow import ResortModel, ForecastAPILoader, ForecastModel\n'), ((778, 797), 'fauxsnow.ForecastAPILoader', 'ForecastAPILoader', ([], {}), '()\n', (795, 797), False, 'from fauxsnow import ResortModel, ForecastAPILoader, ForecastModel\n'), ((1083, 1131), 'flask.render_template', 'render_template', (['"""refresh.html"""'], {'message': 'message'}), "('refresh.html', message=message)\n", (1098, 1131), False, 'from flask import Flask, render_template, abort\n'), ((1176, 1189), 'fauxsnow.ResortModel', 'ResortModel', ([], {}), '()\n', (1187, 1189), False, 'from fauxsnow import ResortModel, ForecastAPILoader, ForecastModel\n'), ((1267, 1338), 'flask.render_template', 'render_template', (['"""about.html"""'], {'resorts': 'resorts', 'num_resorts': 'num_resorts'}), "('about.html', resorts=resorts, num_resorts=num_resorts)\n", (1282, 1338), False, 'from flask import Flask, render_template, abort\n'), ((412, 425), 'fauxsnow.ResortModel', 'ResortModel', ([], {}), '()\n', (423, 425), False, 'from fauxsnow import ResortModel, ForecastAPILoader, ForecastModel\n'), ((1400, 1450), 'flask.render_template', 'render_template', (['"""404.html"""'], {'title': '"""404 Not Found"""'}), "('404.html', title='404 Not Found')\n", (1415, 1450), False, 'from flask import Flask, render_template, abort\n'), ((1519, 1576), 'flask.render_template', 'render_template', (['"""404.html"""'], {'title': '"""Something went wrong"""'}), "('404.html', title='Something went wrong')\n", (1534, 1576), False, 'from flask import Flask, render_template, abort\n'), ((520, 565), 'flask.render_template', 'render_template', (['"""detail.html"""'], {'resort': 'resort'}), "('detail.html', resort=resort)\n", (535, 565), False, 'from flask import Flask, render_template, abort\n'), ((592, 602), 'flask.abort', 'abort', (['(500)'], {}), '(500)\n', (597, 602), False, 'from flask import Flask, render_template, abort\n'), ((634, 644), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (639, 644), False, 'from flask import Flask, render_template, abort\n')] |
from PyQt5.QtCore import Qt, QSize
from PyQt5.QtWidgets import QToolButton
from PyQt5 import QtGui
import filedockstylesheet as style
class FolderButton(QToolButton):
def __init__(self, folderNumber, layoutPosition, path, clicked, parent=None):
super(FolderButton, self).__init__(parent)
self.folderNumber = folderNumber
self.layoutPosition = layoutPosition
self.path = path
self.folderName = path.split("/")[-1]
self.clickCount = 1
self.setObjectName('FolderButton')
self.setToolButtonStyle(Qt.ToolButtonTextUnderIcon)
self.setIconSize(QSize(50,50))
self.setIcon(QtGui.QIcon('assets/folderIcon.png'))
self.setText(self.folderName)
self.released.connect(clicked) | [
"PyQt5.QtGui.QIcon",
"PyQt5.QtCore.QSize"
] | [((621, 634), 'PyQt5.QtCore.QSize', 'QSize', (['(50)', '(50)'], {}), '(50, 50)\n', (626, 634), False, 'from PyQt5.QtCore import Qt, QSize\n'), ((656, 692), 'PyQt5.QtGui.QIcon', 'QtGui.QIcon', (['"""assets/folderIcon.png"""'], {}), "('assets/folderIcon.png')\n", (667, 692), False, 'from PyQt5 import QtGui\n')] |
import random
from typing import TypeVar, MutableSequence
T = TypeVar('T')
def sample_items_inplace(items: MutableSequence[T], sample_size: int, item_limit: int = None):
"""Moves sampled elements to the end of items list.
When sample size is equal to the size of the items list it
shuffles items in-place.
"""
n = len(items)
if item_limit is None:
item_limit = n
elif not 0 <= item_limit <= n:
raise ValueError("Item limit is negative or larger than item list size")
if not 0 <= sample_size <= n:
raise ValueError("Sample size is negative or larger than items list")
if sample_size > item_limit:
raise ValueError("Sample size is greater than item limit")
for i in range(sample_size):
j = random.randrange(item_limit - i)
current_index = item_limit - i - 1
if current_index != j:
tmp = items[j]
items[j] = items[current_index]
items[current_index] = tmp
| [
"typing.TypeVar",
"random.randrange"
] | [((63, 75), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (70, 75), False, 'from typing import TypeVar, MutableSequence\n'), ((772, 804), 'random.randrange', 'random.randrange', (['(item_limit - i)'], {}), '(item_limit - i)\n', (788, 804), False, 'import random\n')] |
import re
def capture(input: str, regex: str, pattern_flags: int = 0, groupnum: int = 1, fail_gently: bool = False) -> str:
pattern = re.compile(regex, pattern_flags)
match = pattern.search(input)
if match is None:
if not fail_gently:
raise Warning(f'Attempt to match {regex} on {input} at group {groupnum} failed.')
return None
captured_text = match.group(groupnum)
return captured_text
def almost_equal(str1: str, str2: str) -> bool:
if str1 is None or str2 is None:
return str1 is None and str2 is None
else:
str1_processed = re.sub(r'\W+', '', str1.strip().casefold())
str2_processed = re.sub(r'\W+', '', str2.strip().casefold())
return str1_processed == str2_processed
| [
"re.compile"
] | [((140, 172), 're.compile', 're.compile', (['regex', 'pattern_flags'], {}), '(regex, pattern_flags)\n', (150, 172), False, 'import re\n')] |
# -*- coding: utf-8 -*-
import io
import re
import demjson3
import pandas as pd
import requests
from zvt.api.utils import china_stock_code_to_id
from zvt.contract.api import df_to_db
from zvt.contract.recorder import Recorder
from zvt.domain import EtfStock, Etf
from zvt.recorders.consts import DEFAULT_SH_ETF_LIST_HEADER
from zvt.utils.time_utils import now_pd_timestamp
class ChinaETFListSpider(Recorder):
data_schema = EtfStock
def __init__(self, force_update=False, sleeping_time=10.0, provider='exchange') -> None:
self.provider = provider
super().__init__(force_update, sleeping_time)
def run(self):
# 抓取沪市 ETF 列表
url = 'http://query.sse.com.cn/commonQuery.do?sqlId=COMMON_SSE_ZQPZ_ETFLB_L_NEW'
response = requests.get(url, headers=DEFAULT_SH_ETF_LIST_HEADER)
response_dict = demjson3.decode(response.text)
df = pd.DataFrame(response_dict.get('result', []))
self.persist_etf_list(df, exchange='sh')
self.logger.info('沪市 ETF 列表抓取完成...')
# 抓取沪市 ETF 成分股
self.download_sh_etf_component(df)
self.logger.info('沪市 ETF 成分股抓取完成...')
# 抓取深市 ETF 列表
url = 'http://www.szse.cn/api/report/ShowReport?SHOWTYPE=xlsx&CATALOGID=1945'
response = requests.get(url)
df = pd.read_excel(io.BytesIO(response.content), dtype=str)
self.persist_etf_list(df, exchange='sz')
self.logger.info('深市 ETF 列表抓取完成...')
# 抓取深市 ETF 成分股
self.download_sz_etf_component(df)
self.logger.info('深市 ETF 成分股抓取完成...')
def persist_etf_list(self, df: pd.DataFrame, exchange: str):
if df is None:
return
df = df.copy()
if exchange == 'sh':
df = df[['FUND_ID', 'FUND_NAME']]
elif exchange == 'sz':
df = df[['证券代码', '证券简称']]
df.columns = ['code', 'name']
df['id'] = df['code'].apply(lambda code: f'etf_{exchange}_{code}')
df['entity_id'] = df['id']
df['exchange'] = exchange
df['entity_type'] = 'etf'
df['category'] = 'etf'
df = df.dropna(axis=0, how='any')
df = df.drop_duplicates(subset='id', keep='last')
df_to_db(df=df, data_schema=Etf, provider=self.provider, force_update=False)
def download_sh_etf_component(self, df: pd.DataFrame):
"""
ETF_CLASS => 1. 单市场 ETF 2.跨市场 ETF 3. 跨境 ETF
5. 债券 ETF 6. 黄金 ETF
:param df: ETF 列表数据
:return: None
"""
query_url = 'http://query.sse.com.cn/infodisplay/queryConstituentStockInfo.do?' \
'isPagination=false&type={}&etfClass={}'
etf_df = df[(df['ETF_CLASS'] == '1') | (df['ETF_CLASS'] == '2')]
etf_df = self.populate_sh_etf_type(etf_df)
for _, etf in etf_df.iterrows():
url = query_url.format(etf['ETF_TYPE'], etf['ETF_CLASS'])
response = requests.get(url, headers=DEFAULT_SH_ETF_LIST_HEADER)
response_dict = demjson3.decode(response.text)
response_df = pd.DataFrame(response_dict.get('result', []))
etf_code = etf['FUND_ID']
etf_id = f'etf_sh_{etf_code}'
response_df = response_df[['instrumentId', 'instrumentName']].copy()
response_df.rename(columns={'instrumentId': 'stock_code', 'instrumentName': 'stock_name'}, inplace=True)
response_df['entity_id'] = etf_id
response_df['entity_type'] = 'etf'
response_df['exchange'] = 'sh'
response_df['code'] = etf_code
response_df['name'] = etf['FUND_NAME']
response_df['timestamp'] = now_pd_timestamp()
response_df['stock_id'] = response_df['stock_code'].apply(lambda code: china_stock_code_to_id(code))
response_df['id'] = response_df['stock_id'].apply(
lambda x: f'{etf_id}_{x}')
df_to_db(data_schema=self.data_schema, df=response_df, provider=self.provider)
self.logger.info(f'{etf["FUND_NAME"]} - {etf_code} 成分股抓取完成...')
self.sleep()
def download_sz_etf_component(self, df: pd.DataFrame):
query_url = 'http://vip.stock.finance.sina.com.cn/corp/go.php/vII_NewestComponent/indexid/{}.phtml'
self.parse_sz_etf_underlying_index(df)
for _, etf in df.iterrows():
underlying_index = etf['拟合指数']
etf_code = etf['证券代码']
if len(underlying_index) == 0:
self.logger.info(f'{etf["证券简称"]} - {etf_code} 非 A 股市场指数,跳过...')
continue
url = query_url.format(underlying_index)
response = requests.get(url)
response.encoding = 'gbk'
try:
dfs = pd.read_html(response.text, header=1)
except ValueError as error:
self.logger.error(f'HTML parse error: {error}, response: {response.text}')
continue
if len(dfs) < 4:
continue
response_df = dfs[3].copy()
response_df = response_df.dropna(axis=1, how='any')
response_df['品种代码'] = response_df['品种代码'].apply(lambda x: f'{x:06d}')
etf_id = f'etf_sz_{etf_code}'
response_df = response_df[['品种代码', '品种名称']].copy()
response_df.rename(columns={'品种代码': 'stock_code', '品种名称': 'stock_name'}, inplace=True)
response_df['entity_id'] = etf_id
response_df['entity_type'] = 'etf'
response_df['exchange'] = 'sz'
response_df['code'] = etf_code
response_df['name'] = etf['证券简称']
response_df['timestamp'] = now_pd_timestamp()
response_df['stock_id'] = response_df['stock_code'].apply(lambda code: china_stock_code_to_id(code))
response_df['id'] = response_df['stock_id'].apply(
lambda x: f'{etf_id}_{x}')
df_to_db(data_schema=self.data_schema, df=response_df, provider=self.provider)
self.logger.info(f'{etf["证券简称"]} - {etf_code} 成分股抓取完成...')
self.sleep()
@staticmethod
def populate_sh_etf_type(df: pd.DataFrame):
"""
填充沪市 ETF 代码对应的 TYPE 到列表数据中
:param df: ETF 列表数据
:return: 包含 ETF 对应 TYPE 的列表数据
"""
query_url = 'http://query.sse.com.cn/infodisplay/queryETFNewAllInfo.do?' \
'isPagination=false&type={}&pageHelp.pageSize=25'
type_df = pd.DataFrame()
for etf_class in [1, 2]:
url = query_url.format(etf_class)
response = requests.get(url, headers=DEFAULT_SH_ETF_LIST_HEADER)
response_dict = demjson3.decode(response.text)
response_df = pd.DataFrame(response_dict.get('result', []))
response_df = response_df[['fundid1', 'etftype']]
type_df = pd.concat([type_df, response_df])
result_df = df.copy()
result_df = result_df.sort_values(by='FUND_ID').reset_index(drop=True)
type_df = type_df.sort_values(by='fundid1').reset_index(drop=True)
result_df['ETF_TYPE'] = type_df['etftype']
return result_df
@staticmethod
def parse_sz_etf_underlying_index(df: pd.DataFrame):
"""
解析深市 ETF 对应跟踪的指数代码
:param df: ETF 列表数据
:return: 解析完成 ETF 对应指数代码的列表数据
"""
def parse_index(text):
if len(text) == 0:
return ''
result = re.search(r"(\d+).*", text)
if result is None:
return ''
else:
return result.group(1)
df['拟合指数'] = df['拟合指数'].apply(parse_index)
__all__ = ['ChinaETFListSpider']
if __name__ == '__main__':
spider = ChinaETFListSpider(provider='exchange')
spider.run()
# the __all__ is generated
__all__ = ['ChinaETFListSpider'] | [
"pandas.DataFrame",
"pandas.read_html",
"io.BytesIO",
"zvt.utils.time_utils.now_pd_timestamp",
"zvt.contract.api.df_to_db",
"zvt.api.utils.china_stock_code_to_id",
"demjson3.decode",
"requests.get",
"re.search",
"pandas.concat"
] | [((772, 825), 'requests.get', 'requests.get', (['url'], {'headers': 'DEFAULT_SH_ETF_LIST_HEADER'}), '(url, headers=DEFAULT_SH_ETF_LIST_HEADER)\n', (784, 825), False, 'import requests\n'), ((850, 880), 'demjson3.decode', 'demjson3.decode', (['response.text'], {}), '(response.text)\n', (865, 880), False, 'import demjson3\n'), ((1276, 1293), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1288, 1293), False, 'import requests\n'), ((2204, 2280), 'zvt.contract.api.df_to_db', 'df_to_db', ([], {'df': 'df', 'data_schema': 'Etf', 'provider': 'self.provider', 'force_update': '(False)'}), '(df=df, data_schema=Etf, provider=self.provider, force_update=False)\n', (2212, 2280), False, 'from zvt.contract.api import df_to_db\n'), ((6440, 6454), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (6452, 6454), True, 'import pandas as pd\n'), ((1322, 1350), 'io.BytesIO', 'io.BytesIO', (['response.content'], {}), '(response.content)\n', (1332, 1350), False, 'import io\n'), ((2922, 2975), 'requests.get', 'requests.get', (['url'], {'headers': 'DEFAULT_SH_ETF_LIST_HEADER'}), '(url, headers=DEFAULT_SH_ETF_LIST_HEADER)\n', (2934, 2975), False, 'import requests\n'), ((3004, 3034), 'demjson3.decode', 'demjson3.decode', (['response.text'], {}), '(response.text)\n', (3019, 3034), False, 'import demjson3\n'), ((3656, 3674), 'zvt.utils.time_utils.now_pd_timestamp', 'now_pd_timestamp', ([], {}), '()\n', (3672, 3674), False, 'from zvt.utils.time_utils import now_pd_timestamp\n'), ((3908, 3986), 'zvt.contract.api.df_to_db', 'df_to_db', ([], {'data_schema': 'self.data_schema', 'df': 'response_df', 'provider': 'self.provider'}), '(data_schema=self.data_schema, df=response_df, provider=self.provider)\n', (3916, 3986), False, 'from zvt.contract.api import df_to_db\n'), ((4646, 4663), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (4658, 4663), False, 'import requests\n'), ((5648, 5666), 'zvt.utils.time_utils.now_pd_timestamp', 'now_pd_timestamp', ([], {}), '()\n', (5664, 5666), False, 'from zvt.utils.time_utils import now_pd_timestamp\n'), ((5900, 5978), 'zvt.contract.api.df_to_db', 'df_to_db', ([], {'data_schema': 'self.data_schema', 'df': 'response_df', 'provider': 'self.provider'}), '(data_schema=self.data_schema, df=response_df, provider=self.provider)\n', (5908, 5978), False, 'from zvt.contract.api import df_to_db\n'), ((6557, 6610), 'requests.get', 'requests.get', (['url'], {'headers': 'DEFAULT_SH_ETF_LIST_HEADER'}), '(url, headers=DEFAULT_SH_ETF_LIST_HEADER)\n', (6569, 6610), False, 'import requests\n'), ((6639, 6669), 'demjson3.decode', 'demjson3.decode', (['response.text'], {}), '(response.text)\n', (6654, 6669), False, 'import demjson3\n'), ((6827, 6860), 'pandas.concat', 'pd.concat', (['[type_df, response_df]'], {}), '([type_df, response_df])\n', (6836, 6860), True, 'import pandas as pd\n'), ((7428, 7455), 're.search', 're.search', (['"""(\\\\d+).*"""', 'text'], {}), "('(\\\\d+).*', text)\n", (7437, 7455), False, 'import re\n'), ((4742, 4779), 'pandas.read_html', 'pd.read_html', (['response.text'], {'header': '(1)'}), '(response.text, header=1)\n', (4754, 4779), True, 'import pandas as pd\n'), ((3759, 3787), 'zvt.api.utils.china_stock_code_to_id', 'china_stock_code_to_id', (['code'], {}), '(code)\n', (3781, 3787), False, 'from zvt.api.utils import china_stock_code_to_id\n'), ((5751, 5779), 'zvt.api.utils.china_stock_code_to_id', 'china_stock_code_to_id', (['code'], {}), '(code)\n', (5773, 5779), False, 'from zvt.api.utils import china_stock_code_to_id\n')] |
# Copyright (c) 2020, <NAME>, Honda Research Institute Europe GmbH, and
# Technical University of Darmstadt.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of <NAME>, Honda Research Institute Europe GmbH,
# or Technical University of Darmstadt, nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <NAME>, HONDA RESEARCH INSTITUTE EUROPE GMBH,
# OR TECHNICAL UNIVERSITY OF DARMSTADT BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
Script to plot the observations from rollouts as well as their mean and standard deviation
"""
import os
import os.path as osp
import numpy as np
import torch as to
from tabulate import tabulate
import pyrado
from pyrado.logger.experiment import ask_for_experiment, load_dict_from_yaml
from pyrado.utils.argparser import get_argparser
from pyrado.utils.experiments import load_rollouts_from_dir
if __name__ == "__main__":
# Parse command line arguments
args = get_argparser().parse_args()
# Get the experiment's directory to load from
ex_dir = ask_for_experiment(hparam_list=args.show_hparams) if args.dir is None else args.dir
# Load the rollouts
rollouts, names = load_rollouts_from_dir(ex_dir)
# load rollouts from the
hparam, settings = None, None
for file_name in os.listdir(ex_dir):
if file_name.startswith("hparam") and file_name.endswith(".yaml"):
hparam = load_dict_from_yaml(osp.join(ex_dir, file_name))
elif file_name == "settings.yaml":
settings = load_dict_from_yaml(osp.join(ex_dir, file_name))
if not hparam:
raise pyrado.PathErr(msg="No hyperparam file could be found.")
# get the number of real rollouts from the hyperparams dict
if hparam.get("algo_hparam", None) and hparam.get("algo_hparam").get("num_real_rollouts", None):
num_real_rollouts = hparam.get("algo_hparam").get("num_real_rollouts", None)
elif settings and settings.get("algo_hparam", None):
num_real_rollouts = settings.get("algo_hparam").get("num_real_rollouts", None)
else:
raise pyrado.ValueErr(msg="No `num_real_rollouts` argument was found.")
# get list of iteration numbers and sort them in ascending order
prefix = "iter_"
iter_idcs = [int(name[name.find(prefix) + len(prefix)]) for name in names]
sorted_idcs = np.argsort(iter_idcs)
# collect the rewards
rewards = to.stack([r.undiscounted_return() for r in rollouts])
table = []
mean_reward = []
std_reward = []
for i in sorted_idcs:
mean_reward = to.mean(rewards[i * num_real_rollouts : (i + 1) * num_real_rollouts])
std_reward = to.std(rewards[i * num_real_rollouts : (i + 1) * num_real_rollouts])
max_reward = to.max(rewards[i * num_real_rollouts : (i + 1) * num_real_rollouts])
table.append([iter_idcs[i], num_real_rollouts, mean_reward, std_reward, max_reward])
headers = ("iteration", "num real rollouts", "mean reward", "std reward", "max reward")
# Yehaa
print(tabulate(table, headers))
# Save the table in a latex file if requested
if args.save:
# Save the table for LaTeX
table_latex_str = tabulate(table, headers, tablefmt="latex")
with open(osp.join(ex_dir, f"real_rollouts_rewards.tex"), "w") as tab_file:
print(table_latex_str, file=tab_file)
| [
"torch.mean",
"pyrado.PathErr",
"pyrado.utils.argparser.get_argparser",
"numpy.argsort",
"pyrado.ValueErr",
"torch.std",
"torch.max",
"pyrado.utils.experiments.load_rollouts_from_dir",
"tabulate.tabulate",
"pyrado.logger.experiment.ask_for_experiment",
"os.path.join",
"os.listdir"
] | [((2432, 2462), 'pyrado.utils.experiments.load_rollouts_from_dir', 'load_rollouts_from_dir', (['ex_dir'], {}), '(ex_dir)\n', (2454, 2462), False, 'from pyrado.utils.experiments import load_rollouts_from_dir\n'), ((2548, 2566), 'os.listdir', 'os.listdir', (['ex_dir'], {}), '(ex_dir)\n', (2558, 2566), False, 'import os\n'), ((3592, 3613), 'numpy.argsort', 'np.argsort', (['iter_idcs'], {}), '(iter_idcs)\n', (3602, 3613), True, 'import numpy as np\n'), ((2301, 2350), 'pyrado.logger.experiment.ask_for_experiment', 'ask_for_experiment', ([], {'hparam_list': 'args.show_hparams'}), '(hparam_list=args.show_hparams)\n', (2319, 2350), False, 'from pyrado.logger.experiment import ask_for_experiment, load_dict_from_yaml\n'), ((2862, 2918), 'pyrado.PathErr', 'pyrado.PathErr', ([], {'msg': '"""No hyperparam file could be found."""'}), "(msg='No hyperparam file could be found.')\n", (2876, 2918), False, 'import pyrado\n'), ((3813, 3880), 'torch.mean', 'to.mean', (['rewards[i * num_real_rollouts:(i + 1) * num_real_rollouts]'], {}), '(rewards[i * num_real_rollouts:(i + 1) * num_real_rollouts])\n', (3820, 3880), True, 'import torch as to\n'), ((3904, 3970), 'torch.std', 'to.std', (['rewards[i * num_real_rollouts:(i + 1) * num_real_rollouts]'], {}), '(rewards[i * num_real_rollouts:(i + 1) * num_real_rollouts])\n', (3910, 3970), True, 'import torch as to\n'), ((3994, 4060), 'torch.max', 'to.max', (['rewards[i * num_real_rollouts:(i + 1) * num_real_rollouts]'], {}), '(rewards[i * num_real_rollouts:(i + 1) * num_real_rollouts])\n', (4000, 4060), True, 'import torch as to\n'), ((4272, 4296), 'tabulate.tabulate', 'tabulate', (['table', 'headers'], {}), '(table, headers)\n', (4280, 4296), False, 'from tabulate import tabulate\n'), ((4428, 4470), 'tabulate.tabulate', 'tabulate', (['table', 'headers'], {'tablefmt': '"""latex"""'}), "(table, headers, tablefmt='latex')\n", (4436, 4470), False, 'from tabulate import tabulate\n'), ((2208, 2223), 'pyrado.utils.argparser.get_argparser', 'get_argparser', ([], {}), '()\n', (2221, 2223), False, 'from pyrado.utils.argparser import get_argparser\n'), ((3338, 3403), 'pyrado.ValueErr', 'pyrado.ValueErr', ([], {'msg': '"""No `num_real_rollouts` argument was found."""'}), "(msg='No `num_real_rollouts` argument was found.')\n", (3353, 3403), False, 'import pyrado\n'), ((2684, 2711), 'os.path.join', 'osp.join', (['ex_dir', 'file_name'], {}), '(ex_dir, file_name)\n', (2692, 2711), True, 'import os.path as osp\n'), ((4489, 4535), 'os.path.join', 'osp.join', (['ex_dir', 'f"""real_rollouts_rewards.tex"""'], {}), "(ex_dir, f'real_rollouts_rewards.tex')\n", (4497, 4535), True, 'import os.path as osp\n'), ((2799, 2826), 'os.path.join', 'osp.join', (['ex_dir', 'file_name'], {}), '(ex_dir, file_name)\n', (2807, 2826), True, 'import os.path as osp\n')] |
import sys
if __name__ == '__main__':
N = int(sys.stdin.readline())
rating = [int(sys.stdin.readline()) for i in range(N)]
candies = [1] * N
for i in range(N - 1):
if rating[i + 1] > rating[i]:
candies[i + 1] = candies[i] + 1
for i in reversed(range(N - 1)):
if rating[i] > rating[i + 1] and candies[i] <= candies[i + 1]:
candies[i] = candies[i + 1] + 1
print(sum(candies))
| [
"sys.stdin.readline"
] | [((51, 71), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (69, 71), False, 'import sys\n'), ((91, 111), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (109, 111), False, 'import sys\n')] |
import numpy as np
def one_step_lookahead(environment, state, V, discount_factor):
"""
Helper function to calculate a state-value function.
:param environment: Initialized OpenAI gym environment object.
:param state: Agent's state to consider (integer).
:param V: The value to use as an estimator. Vector of length nS.
:param discount_factor: MDP discount factor.
:return: A vector of length nA containing the expected value of each action.
"""
action_values = np.zeros(environment.nA)
for action in range(environment.nA):
for probability, next_state, reward, terminated in environment.P[state][action]:
action_values[action] += probability * (reward + discount_factor * V[next_state])
return action_values
def policy_evaluation(policy, environment, discount_factor=1.0, theta=1e-9, max_iter=1e9):
"""
Evaluate a policy given a deterministic environment.
:param policy: Matrix of a size nSxnA, each cell represents a probability of taking action a in state s.
:param environment: Initialized OpenAI gym environment object.
:param discount_factor: MDP discount factor. Float in range from 0 to 1.
:param theta: A threshold of a value function change.
:param max_iter: Maximum number of iteration to prevent infinite loops.
:return: A vector of size nS, which represent a value function for each state.
"""
# Number of evaluation iterations
evaluation_iterations = 1
# Initialize a value function for each state as zero
V = np.zeros(environment.nS)
# Repeat until value change is below the threshold
for i in range(int(max_iter)):
# Initialize a change of value function as zero
delta = 0
# Iterate though each state
for state in range(environment.nS):
# Initial a new value of current state
v = 0
# Try all possible actions which can be taken from this state
for action, action_probability in enumerate(policy[state]):
# Evaluate how good each next state will be
for state_probability, next_state, reward, terminated in environment.P[state][action]:
# Calculate the expected value
v += action_probability * state_probability * (reward + discount_factor * V[next_state])
# Calculate the absolute change of value function
delta = max(delta, np.abs(V[state] - v))
# Update value function
V[state] = v
evaluation_iterations += 1
# Terminate if value change is insignificant
if delta < theta:
print(f'Policy evaluated in {evaluation_iterations} iterations.')
return V
def policy_iteration(environment, discount_factor=1.0, max_iter=1e9):
"""
Policy iteration algorithm to solve MDP.
:param environment: Initialized OpenAI gym environment object.
:param discount_factor: MPD discount factor. Float in range from 0 to 1.
:param max_iter: Maximum number of iterations to prevent infinite loops.
:return: tuple(policy, V), which consist of an optimal policy matrix and value function for each state.
"""
# Start with a random policy
#num states x num actions / num actions
policy = np.ones([environment.nS, environment.nA]) / environment.nA
# Initialize counter of evaluated policies
evaluated_policies = 1
# Repeat until convergence or critical number of iterations reached
for i in range(int(max_iter)):
stable_policy = True
# Evaluate current policy
V = policy_evaluation(policy, environment, discount_factor=discount_factor)
# Go through each state and try to improve actions that were taken
for state in range(environment.nS):
# Choose the best action in a current state under current policy
current_action = np.argmax(policy[state])
# Look one step ahead and evaluate if current action is optimal
# We will try every possible action in a current state
action_value = one_step_lookahead(environment, state, V, discount_factor)
# Select a better action
best_action = np.argmax(action_value)
# If action didn't change
if current_action != best_action:
stable_policy = True
# Greedy policy update
policy[state] = np.eye(environment.nA)[best_action]
evaluated_policies += 1
# If the algorithm converged and policy is not changing anymore, than return final policy and value function
if stable_policy:
print(f'Evaluated {evaluated_policies} policies.')
return policy, V
def value_iteration(environment, discount_factor=1.0, theta=1e-9, max_iterations=1e9):
"""
Value Iteration algorithm to solve MDP.
:param environment: Initialized OpenAI environment object.
:param theta: Stopping threshold. If the value of all states changes less than theta in one iteration - we are done.
:param discount_factor: MDP discount factor.
:param max_iterations: Maximum number of iterations that can be ever performed (to prevent infinite loops).
:return: tuple (policy, V) which contains optimal policy and optimal value function.
"""
# Initialize state-value function with zeros for each environment state
V = np.zeros(environment.nS)
for i in range(int(max_iterations)):
# Early stopping condition
delta = 0
# Update each state
for state in range(environment.nS):
# Do a one-step lookahead to calculate state-action values
action_value = one_step_lookahead(environment, state, V, discount_factor)
# Select best action to perform based on the highest state-action value
best_action_value = np.max(action_value)
# Calculate change in value
delta = max(delta, np.abs(V[state] - best_action_value))
# Update the value function for current state
V[state] = best_action_value
# Check if we can stop
if delta < theta:
print(f'Value-iteration converged at iteration#{i}.')
break
# Create a deterministic policy using the optimal value function
policy = np.zeros([environment.nS, environment.nA])
for state in range(environment.nS):
# One step lookahead to find the best action for this state
action_value = one_step_lookahead(environment, state, V, discount_factor)
# Select best action based on the highest state-action value
best_action = np.argmax(action_value)
# Update the policy to perform a better action at a current state
policy[state, best_action] = 1.0
return policy, V
| [
"numpy.abs",
"numpy.argmax",
"numpy.zeros",
"numpy.ones",
"numpy.max",
"numpy.eye"
] | [((502, 526), 'numpy.zeros', 'np.zeros', (['environment.nA'], {}), '(environment.nA)\n', (510, 526), True, 'import numpy as np\n'), ((1553, 1577), 'numpy.zeros', 'np.zeros', (['environment.nS'], {}), '(environment.nS)\n', (1561, 1577), True, 'import numpy as np\n'), ((5437, 5461), 'numpy.zeros', 'np.zeros', (['environment.nS'], {}), '(environment.nS)\n', (5445, 5461), True, 'import numpy as np\n'), ((6362, 6404), 'numpy.zeros', 'np.zeros', (['[environment.nS, environment.nA]'], {}), '([environment.nS, environment.nA])\n', (6370, 6404), True, 'import numpy as np\n'), ((3315, 3356), 'numpy.ones', 'np.ones', (['[environment.nS, environment.nA]'], {}), '([environment.nS, environment.nA])\n', (3322, 3356), True, 'import numpy as np\n'), ((6689, 6712), 'numpy.argmax', 'np.argmax', (['action_value'], {}), '(action_value)\n', (6698, 6712), True, 'import numpy as np\n'), ((3933, 3957), 'numpy.argmax', 'np.argmax', (['policy[state]'], {}), '(policy[state])\n', (3942, 3957), True, 'import numpy as np\n'), ((4252, 4275), 'numpy.argmax', 'np.argmax', (['action_value'], {}), '(action_value)\n', (4261, 4275), True, 'import numpy as np\n'), ((5906, 5926), 'numpy.max', 'np.max', (['action_value'], {}), '(action_value)\n', (5912, 5926), True, 'import numpy as np\n'), ((2461, 2481), 'numpy.abs', 'np.abs', (['(V[state] - v)'], {}), '(V[state] - v)\n', (2467, 2481), True, 'import numpy as np\n'), ((4462, 4484), 'numpy.eye', 'np.eye', (['environment.nA'], {}), '(environment.nA)\n', (4468, 4484), True, 'import numpy as np\n'), ((5999, 6035), 'numpy.abs', 'np.abs', (['(V[state] - best_action_value)'], {}), '(V[state] - best_action_value)\n', (6005, 6035), True, 'import numpy as np\n')] |
# BuildTarget: images/interfaceDefaultLightPlug.png
# BuildTarget: images/interfaceLightLinkSetupGraphEditor.png
# BuildTarget: images/interfaceLightSetGraphEditor.png
# BuildTarget: images/interfaceLightSetNodeEditor.png
# BuildTarget: images/interfaceLinkedLightsPlug.png
# BuildTarget: images/taskLightLinkingSetExpressionLocation.png
# BuildTarget: images/taskLightLinkingSetExpressionSet.png
import os
import subprocess32 as subprocess
import tempfile
import time
import imath
import IECore
import Gaffer
import GafferScene
import GafferUI
import GafferSceneUI
import GafferAppleseed
scriptWindow = GafferUI.ScriptWindow.acquire( script )
viewer = scriptWindow.getLayout().editors( GafferUI.Viewer )[0]
graphEditor = scriptWindow.getLayout().editors( GafferUI.GraphEditor )[0]
hierarchyView = scriptWindow.getLayout().editors( GafferSceneUI.HierarchyView )[0]
# Base graph
script["Sphere"] = GafferScene.Sphere()
script["Group"] = GafferScene.Group()
script["Light"] = GafferAppleseed.AppleseedLight()
script["Group"]["in"]["in0"].setInput( script["Sphere"]["out"] )
script["Group"]["in"]["in1"].setInput( script["Light"]["out"] )
script["PathFilter"] = GafferScene.PathFilter()
script["PathFilter"]["paths"].setValue( IECore.StringVectorData( [ "/group/sphere" ] ) )
script["StandardAttributes"] = GafferScene.StandardAttributes()
script["StandardAttributes"]["in"].setInput( script["Group"]["out"] )
script["StandardAttributes"]["filter"].setInput( script["PathFilter"]["out"] )
script["StandardAttributes"]["attributes"]["linkedLights"]["enabled"].setValue( True )
script["StandardAttributes"]["attributes"]["linkedLights"]["value"].setValue( "/group/light" )
script.addChild( script["Sphere"] )
script.addChild( script["Light"] )
script.addChild( script["Group"] )
script.addChild( script["StandardAttributes"] )
script.addChild( script["PathFilter"] )
# Interface: the Default Light plug of a light node in the Node Editor
# TODO: "AppleseedLight" label clearly visible; figure out a way to fake "ArnoldLight" label
nodeEditorWindow = GafferUI.NodeEditor.acquire( script["Light"], floating = True )
nodeEditorWindow._qtWidget().setFocus()
GafferUI.PlugValueWidget.acquire( script["Light"]["defaultLight"] )
GafferUI.WidgetAlgo.grab( widget = nodeEditorWindow, imagePath = "images/interfaceDefaultLightPlug.png" )
nodeEditorWindow.parent().close()
del nodeEditorWindow
# Interface: the linkedLights attribute in the Scene Inspector
script.selection().clear()
script.selection().add( script["StandardAttributes"] )
__path = "/group/sphere"
__paths = IECore.PathMatcher( [ __path ] )
GafferSceneUI.ContextAlgo.setSelectedPaths( script.context(), __paths )
from GafferSceneUI.SceneInspector import __AttributesSection
for imageName, sectionClass in [
( "LinkedLightsAttribute.png", __AttributesSection )
] :
section = sectionClass()
section._Section__collapsible.setCollapsed( False )
with GafferUI.Window( "Property" ) as window :
sceneInspector = GafferSceneUI.SceneInspector( script, sections = [ section ] )
sceneInspector.setNodeSet( Gaffer.StandardSet( [ script["StandardAttributes"] ] ) )
sceneInspector.setTargetPaths( [ __path ] )
window.resizeToFitChild()
window.setVisible( True )
GafferUI.WidgetAlgo.grab( widget = sceneInspector, imagePath = "images/interface" + imageName )
# Interface: a StandardAttributes node downstream of an object node
script.selection().clear()
graphEditor.frame( script.children( Gaffer.Node ) )
GafferUI.WidgetAlgo.grab( widget = graphEditor, imagePath = "images/interfaceLightLinkSetupGraphEditor.png" )
# Interface: the empty Linked Lights plug of a StandardAttributes node in the Node Editor
script["StandardAttributes"]["attributes"]["linkedLights"]["value"].setValue( "" )
nodeEditorWindow = GafferUI.NodeEditor.acquire( script["StandardAttributes"], floating = True )
nodeEditorWindow._qtWidget().setFocus()
GafferUI.PlugValueWidget.acquire( script["StandardAttributes"]["attributes"]["linkedLights"] )
GafferUI.WidgetAlgo.grab( widget = nodeEditorWindow, imagePath = "images/interfaceLinkedLightsPlug.png" )
nodeEditorWindow.parent().close()
del nodeEditorWindow
# Task: the light linking set expression with a location
script["StandardAttributes"]["attributes"]["linkedLights"]["value"].setValue( "/group/light" )
nodeEditorWindow = GafferUI.NodeEditor.acquire( script["StandardAttributes"], floating = True )
nodeEditorWindow._qtWidget().setFocus()
GafferUI.WidgetAlgo.grab( widget = nodeEditorWindow, imagePath = "images/taskLightLinkingSetExpressionLocation.png" )
nodeEditorWindow.parent().close()
del nodeEditorWindow
# Task: a Set node in the Node Editor
script["Set"] = GafferScene.Set()
script["Set"]["in"].setInput( script["Light"]["out"] )
script["Set"]["name"].setValue( "myLights" )
script["Set"]["paths"].setValue( IECore.StringVectorData( [ "/light" ] ) )
script["Group"]["in"][1].setInput( script["Set"]["out"] )
script.addChild( script["Set"] )
nodeEditorWindow = GafferUI.NodeEditor.acquire( script["Set"], floating = True )
nodeEditorWindow._qtWidget().setFocus()
GafferUI.WidgetAlgo.grab( widget = nodeEditorWindow, imagePath = "images/interfaceLightSetNodeEditor.png" )
nodeEditorWindow.parent().close()
del nodeEditorWindow
# Task: a Set node downstream of a light node in the Graph Editor
graphGadget = GafferUI.GraphGadget( script )
graphGadget.getLayout().layoutNodes( graphGadget )
graphEditor.frame( Gaffer.StandardSet( [ script["Set"] ] ) )
GafferUI.WidgetAlgo.grab( widget = graphEditor, imagePath = "images/interfaceLightSetGraphEditor.png" )
# Task: the light linking set expression with a set
script["StandardAttributes"]["attributes"]["linkedLights"]["value"].setValue( "myLights" )
nodeEditorWindow = GafferUI.NodeEditor.acquire( script["StandardAttributes"], floating = True )
nodeEditorWindow._qtWidget().setFocus()
GafferUI.WidgetAlgo.grab( widget = nodeEditorWindow, imagePath = "images/taskLightLinkingSetExpressionSet.png" )
nodeEditorWindow.parent().close()
del nodeEditorWindow
| [
"GafferScene.Sphere",
"GafferUI.PlugValueWidget.acquire",
"GafferUI.WidgetAlgo.grab",
"GafferSceneUI.SceneInspector",
"GafferUI.ScriptWindow.acquire",
"GafferAppleseed.AppleseedLight",
"GafferScene.StandardAttributes",
"IECore.PathMatcher",
"GafferScene.Group",
"GafferUI.NodeEditor.acquire",
"Ga... | [((607, 644), 'GafferUI.ScriptWindow.acquire', 'GafferUI.ScriptWindow.acquire', (['script'], {}), '(script)\n', (636, 644), False, 'import GafferUI\n'), ((901, 921), 'GafferScene.Sphere', 'GafferScene.Sphere', ([], {}), '()\n', (919, 921), False, 'import GafferScene\n'), ((940, 959), 'GafferScene.Group', 'GafferScene.Group', ([], {}), '()\n', (957, 959), False, 'import GafferScene\n'), ((978, 1010), 'GafferAppleseed.AppleseedLight', 'GafferAppleseed.AppleseedLight', ([], {}), '()\n', (1008, 1010), False, 'import GafferAppleseed\n'), ((1163, 1187), 'GafferScene.PathFilter', 'GafferScene.PathFilter', ([], {}), '()\n', (1185, 1187), False, 'import GafferScene\n'), ((1308, 1340), 'GafferScene.StandardAttributes', 'GafferScene.StandardAttributes', ([], {}), '()\n', (1338, 1340), False, 'import GafferScene\n'), ((2050, 2109), 'GafferUI.NodeEditor.acquire', 'GafferUI.NodeEditor.acquire', (["script['Light']"], {'floating': '(True)'}), "(script['Light'], floating=True)\n", (2077, 2109), False, 'import GafferUI\n'), ((2154, 2219), 'GafferUI.PlugValueWidget.acquire', 'GafferUI.PlugValueWidget.acquire', (["script['Light']['defaultLight']"], {}), "(script['Light']['defaultLight'])\n", (2186, 2219), False, 'import GafferUI\n'), ((2222, 2326), 'GafferUI.WidgetAlgo.grab', 'GafferUI.WidgetAlgo.grab', ([], {'widget': 'nodeEditorWindow', 'imagePath': '"""images/interfaceDefaultLightPlug.png"""'}), "(widget=nodeEditorWindow, imagePath=\n 'images/interfaceDefaultLightPlug.png')\n", (2246, 2326), False, 'import GafferUI\n'), ((2564, 2592), 'IECore.PathMatcher', 'IECore.PathMatcher', (['[__path]'], {}), '([__path])\n', (2582, 2592), False, 'import IECore\n'), ((3468, 3576), 'GafferUI.WidgetAlgo.grab', 'GafferUI.WidgetAlgo.grab', ([], {'widget': 'graphEditor', 'imagePath': '"""images/interfaceLightLinkSetupGraphEditor.png"""'}), "(widget=graphEditor, imagePath=\n 'images/interfaceLightLinkSetupGraphEditor.png')\n", (3492, 3576), False, 'import GafferUI\n'), ((3771, 3843), 'GafferUI.NodeEditor.acquire', 'GafferUI.NodeEditor.acquire', (["script['StandardAttributes']"], {'floating': '(True)'}), "(script['StandardAttributes'], floating=True)\n", (3798, 3843), False, 'import GafferUI\n'), ((3888, 3985), 'GafferUI.PlugValueWidget.acquire', 'GafferUI.PlugValueWidget.acquire', (["script['StandardAttributes']['attributes']['linkedLights']"], {}), "(script['StandardAttributes']['attributes']\n ['linkedLights'])\n", (3920, 3985), False, 'import GafferUI\n'), ((3983, 4087), 'GafferUI.WidgetAlgo.grab', 'GafferUI.WidgetAlgo.grab', ([], {'widget': 'nodeEditorWindow', 'imagePath': '"""images/interfaceLinkedLightsPlug.png"""'}), "(widget=nodeEditorWindow, imagePath=\n 'images/interfaceLinkedLightsPlug.png')\n", (4007, 4087), False, 'import GafferUI\n'), ((4316, 4388), 'GafferUI.NodeEditor.acquire', 'GafferUI.NodeEditor.acquire', (["script['StandardAttributes']"], {'floating': '(True)'}), "(script['StandardAttributes'], floating=True)\n", (4343, 4388), False, 'import GafferUI\n'), ((4433, 4549), 'GafferUI.WidgetAlgo.grab', 'GafferUI.WidgetAlgo.grab', ([], {'widget': 'nodeEditorWindow', 'imagePath': '"""images/taskLightLinkingSetExpressionLocation.png"""'}), "(widget=nodeEditorWindow, imagePath=\n 'images/taskLightLinkingSetExpressionLocation.png')\n", (4457, 4549), False, 'import GafferUI\n'), ((4661, 4678), 'GafferScene.Set', 'GafferScene.Set', ([], {}), '()\n', (4676, 4678), False, 'import GafferScene\n'), ((4964, 5021), 'GafferUI.NodeEditor.acquire', 'GafferUI.NodeEditor.acquire', (["script['Set']"], {'floating': '(True)'}), "(script['Set'], floating=True)\n", (4991, 5021), False, 'import GafferUI\n'), ((5066, 5172), 'GafferUI.WidgetAlgo.grab', 'GafferUI.WidgetAlgo.grab', ([], {'widget': 'nodeEditorWindow', 'imagePath': '"""images/interfaceLightSetNodeEditor.png"""'}), "(widget=nodeEditorWindow, imagePath=\n 'images/interfaceLightSetNodeEditor.png')\n", (5090, 5172), False, 'import GafferUI\n'), ((5310, 5338), 'GafferUI.GraphGadget', 'GafferUI.GraphGadget', (['script'], {}), '(script)\n', (5330, 5338), False, 'import GafferUI\n'), ((5453, 5555), 'GafferUI.WidgetAlgo.grab', 'GafferUI.WidgetAlgo.grab', ([], {'widget': 'graphEditor', 'imagePath': '"""images/interfaceLightSetGraphEditor.png"""'}), "(widget=graphEditor, imagePath=\n 'images/interfaceLightSetGraphEditor.png')\n", (5477, 5555), False, 'import GafferUI\n'), ((5720, 5792), 'GafferUI.NodeEditor.acquire', 'GafferUI.NodeEditor.acquire', (["script['StandardAttributes']"], {'floating': '(True)'}), "(script['StandardAttributes'], floating=True)\n", (5747, 5792), False, 'import GafferUI\n'), ((5837, 5948), 'GafferUI.WidgetAlgo.grab', 'GafferUI.WidgetAlgo.grab', ([], {'widget': 'nodeEditorWindow', 'imagePath': '"""images/taskLightLinkingSetExpressionSet.png"""'}), "(widget=nodeEditorWindow, imagePath=\n 'images/taskLightLinkingSetExpressionSet.png')\n", (5861, 5948), False, 'import GafferUI\n'), ((1228, 1270), 'IECore.StringVectorData', 'IECore.StringVectorData', (["['/group/sphere']"], {}), "(['/group/sphere'])\n", (1251, 1270), False, 'import IECore\n'), ((3224, 3318), 'GafferUI.WidgetAlgo.grab', 'GafferUI.WidgetAlgo.grab', ([], {'widget': 'sceneInspector', 'imagePath': "('images/interface' + imageName)"}), "(widget=sceneInspector, imagePath=\n 'images/interface' + imageName)\n", (3248, 3318), False, 'import GafferUI\n'), ((4812, 4847), 'IECore.StringVectorData', 'IECore.StringVectorData', (["['/light']"], {}), "(['/light'])\n", (4835, 4847), False, 'import IECore\n'), ((5411, 5446), 'Gaffer.StandardSet', 'Gaffer.StandardSet', (["[script['Set']]"], {}), "([script['Set']])\n", (5429, 5446), False, 'import Gaffer\n'), ((2910, 2937), 'GafferUI.Window', 'GafferUI.Window', (['"""Property"""'], {}), "('Property')\n", (2925, 2937), False, 'import GafferUI\n'), ((2972, 3028), 'GafferSceneUI.SceneInspector', 'GafferSceneUI.SceneInspector', (['script'], {'sections': '[section]'}), '(script, sections=[section])\n', (3000, 3028), False, 'import GafferSceneUI\n'), ((3064, 3114), 'Gaffer.StandardSet', 'Gaffer.StandardSet', (["[script['StandardAttributes']]"], {}), "([script['StandardAttributes']])\n", (3082, 3114), False, 'import Gaffer\n')] |
import numpy as np
def hit_rate(array1, array2):
"""
calculate the hit rate based upon 2 boolean maps. (i.e. where are both 1)
"""
# count the number of cells that are flooded in both array1 and 2
idx_both = np.sum(np.logical_and(array1, array2))
idx_1 = np.sum(array1)
return float(idx_both)/float(idx_1)
def false_alarm_rate(array1, array2):
"""
calculate the false alarm rate based upon 2 boolean maps. (i.e. amount of cells where array2 is True but array1 False)
"""
# count the number of cells that are flooded in both array1 and 2
idx_2_only = np.sum(np.logical_and(array2, array1!=1))
idx_2_total = np.sum(array2)
return float(idx_2_only)/float(idx_2_total)
def critical_success(array1, array2):
"""
calculate the critical success rate based upon 2 boolean maps.
"""
idx_both = np.sum(np.logical_and(array1, array2))
idx_either = np.sum(np.logical_or(array1, array2))
return float(idx_both)/float(idx_either)
def contingency_map(array1, array2, threshold1=0., threshold2=0.):
"""
Establish the contingency between array1 and array2.
Returns an array where
1 means only array2 gives a value > threshold1,
2 means only array1 gives a values > threshold2,
3 means array1 gives a value > threshold1, and array2 a value > threshold2
0 means both arrays do not give a value > threshold1, 2 respectively
function returns the threshold exceedance (0-1) of array 1 and 2, as well as the contingency map
"""
array1_thres = array1 > threshold1
array2_thres = array2 > threshold2
contingency = np.zeros(array1.shape)
contingency += np.int16(array2_thres)
contingency += np.int16(array1_thres)*2
return array1_thres, array2_thres, contingency
def calc_contingency(bench_d, model_d, bench_thres, model_thres):
"""
determines hit rate, false alarm ratio, critical success index, and contingency map for a given combination of simulated and observed flood extent.
"""
x_bench = bench_d.width
y_bench = bench_d.height
bench_data = bench_d.read(1)
fill_bench = bench_d.nodata
extent_bench = bench_d.bounds
x_model = model_d.width
y_model = model_d.height
model_data = model_d.read(1)
fill_model = model_d.nodata
bench_data[bench_data==fill_bench] = 0.
model_data[model_data==fill_model] = 0.
flood1, flood2, cont_arr = contingency_map(bench_data, model_data, threshold1=bench_thres, threshold2=model_thres)
hr = hit_rate(flood1, flood2)
far = false_alarm_rate(flood1, flood2)
csi = critical_success(flood1, flood2)
return hr, far, csi, cont_arr | [
"numpy.sum",
"numpy.logical_and",
"numpy.zeros",
"numpy.logical_or",
"numpy.int16"
] | [((280, 294), 'numpy.sum', 'np.sum', (['array1'], {}), '(array1)\n', (286, 294), True, 'import numpy as np\n'), ((665, 679), 'numpy.sum', 'np.sum', (['array2'], {}), '(array2)\n', (671, 679), True, 'import numpy as np\n'), ((1640, 1662), 'numpy.zeros', 'np.zeros', (['array1.shape'], {}), '(array1.shape)\n', (1648, 1662), True, 'import numpy as np\n'), ((1682, 1704), 'numpy.int16', 'np.int16', (['array2_thres'], {}), '(array2_thres)\n', (1690, 1704), True, 'import numpy as np\n'), ((236, 266), 'numpy.logical_and', 'np.logical_and', (['array1', 'array2'], {}), '(array1, array2)\n', (250, 266), True, 'import numpy as np\n'), ((612, 647), 'numpy.logical_and', 'np.logical_and', (['array2', '(array1 != 1)'], {}), '(array2, array1 != 1)\n', (626, 647), True, 'import numpy as np\n'), ((878, 908), 'numpy.logical_and', 'np.logical_and', (['array1', 'array2'], {}), '(array1, array2)\n', (892, 908), True, 'import numpy as np\n'), ((934, 963), 'numpy.logical_or', 'np.logical_or', (['array1', 'array2'], {}), '(array1, array2)\n', (947, 963), True, 'import numpy as np\n'), ((1724, 1746), 'numpy.int16', 'np.int16', (['array1_thres'], {}), '(array1_thres)\n', (1732, 1746), True, 'import numpy as np\n')] |
import apsis.actions
import apsis.lib.json
from apsis.lib.py import tupleize
from apsis.lib import email
#-------------------------------------------------------------------------------
# FIXME: jinja2?
TEMPLATE = """<!doctype html>
<html>
<head>
<title>{subject}</title>
</head>
<body>
<p>
program: <code>{program}</code>
</p>
<pre>{output}</pre>
</body>
</html>
"""
class EmailAction:
"""
Action that sends an HTML email summarizing the run.
"""
def __init__(self, to=(), *, from_=None, condition=None):
self.__to = tupleize(to)
self.__from = from_
self.__condition = condition
@classmethod
def from_jso(cls, jso):
with apsis.lib.json.check_schema(jso) as pop:
to = pop("to", str)
from_ = pop("from", str)
cnd = pop("if", apsis.actions.Condition.from_jso, default=None)
return cls(to, from_=from_, condition=cnd)
def to_jso(self):
cnd = None if self.__condition is None else self.__condition.to_jso()
return {
"to" : list(self.__to),
"from" : self.__from,
"if" : cnd,
}
async def __call__(self, apsis, run):
if self.__condition is not None and not self.__condition(run):
return
subject = f"Apsis {run.run_id}: {run.inst}: {run.state.name}"
program = str(run.program)
output_meta = apsis.outputs.get_metadata(run.run_id)
if "output" in output_meta:
output = apsis.outputs.get_data(run.run_id, "output").decode()
else:
output = ""
body = TEMPLATE.format(**locals())
smtp_cfg = apsis.cfg.get("smtp", {})
email.send_html(
self.__to, subject, body, from_=self.__from, smtp_cfg=smtp_cfg)
| [
"apsis.lib.py.tupleize",
"apsis.lib.email.send_html"
] | [((554, 566), 'apsis.lib.py.tupleize', 'tupleize', (['to'], {}), '(to)\n', (562, 566), False, 'from apsis.lib.py import tupleize\n'), ((1730, 1809), 'apsis.lib.email.send_html', 'email.send_html', (['self.__to', 'subject', 'body'], {'from_': 'self.__from', 'smtp_cfg': 'smtp_cfg'}), '(self.__to, subject, body, from_=self.__from, smtp_cfg=smtp_cfg)\n', (1745, 1809), False, 'from apsis.lib import email\n')] |
# -*- coding:utf-8 -*-
"""
Asynchronous driven quantitative trading framework.
Author: HuangTao
Date: 2017/04/26
Email: <EMAIL>
"""
import signal
import asyncio
from quant.utils import logger
from quant.config import config
class Quant:
""" Asynchronous driven quantitative trading framework.
"""
def __init__(self):
self.loop = None
self.event_center = None
def initialize(self, config_module=None):
""" Initialize.
Args:
config_module: config file path, normally it"s a json file.
"""
self._get_event_loop()
self._load_settings(config_module)
self._init_logger()
self._init_event_center()
self._do_heartbeat()
def start(self):
"""Start the event loop."""
def keyboard_interrupt(s, f):
print("KeyboardInterrupt (ID: {}) has been caught. Cleaning up...".format(s))
self.loop.stop()
signal.signal(signal.SIGINT, keyboard_interrupt)
logger.info("start io loop ...", caller=self)
self.loop.run_forever()
def stop(self):
"""Stop the event loop."""
logger.info("stop io loop.", caller=self)
self.loop.stop()
def _get_event_loop(self):
""" Get a main io loop. """
if not self.loop:
self.loop = asyncio.get_event_loop()
return self.loop
def _load_settings(self, config_module):
""" Load config settings.
Args:
config_module: config file path, normally it"s a json file.
"""
config.loads(config_module)
def _init_logger(self):
"""Initialize logger."""
console = config.log.get("console", True)
level = config.log.get("level", "DEBUG")
path = config.log.get("path", "/tmp/logs/Quant")
name = config.log.get("name", "quant.log")
clear = config.log.get("clear", False)
backup_count = config.log.get("backup_count", 0)
if console:
logger.initLogger(level)
else:
logger.initLogger(level, path, name, clear, backup_count)
def _init_event_center(self):
"""Initialize event center."""
if config.rabbitmq:
from quant.event import EventCenter
self.event_center = EventCenter()
self.loop.run_until_complete(self.event_center.connect())
def _do_heartbeat(self):
"""Start server heartbeat."""
from quant.heartbeat import heartbeat
self.loop.call_later(0.5, heartbeat.ticker)
quant = Quant()
| [
"asyncio.get_event_loop",
"quant.event.EventCenter",
"quant.utils.logger.initLogger",
"quant.utils.logger.info",
"quant.config.config.loads",
"quant.config.config.log.get",
"signal.signal"
] | [((955, 1003), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'keyboard_interrupt'], {}), '(signal.SIGINT, keyboard_interrupt)\n', (968, 1003), False, 'import signal\n'), ((1013, 1058), 'quant.utils.logger.info', 'logger.info', (['"""start io loop ..."""'], {'caller': 'self'}), "('start io loop ...', caller=self)\n", (1024, 1058), False, 'from quant.utils import logger\n'), ((1155, 1196), 'quant.utils.logger.info', 'logger.info', (['"""stop io loop."""'], {'caller': 'self'}), "('stop io loop.', caller=self)\n", (1166, 1196), False, 'from quant.utils import logger\n'), ((1577, 1604), 'quant.config.config.loads', 'config.loads', (['config_module'], {}), '(config_module)\n', (1589, 1604), False, 'from quant.config import config\n'), ((1685, 1716), 'quant.config.config.log.get', 'config.log.get', (['"""console"""', '(True)'], {}), "('console', True)\n", (1699, 1716), False, 'from quant.config import config\n'), ((1733, 1765), 'quant.config.config.log.get', 'config.log.get', (['"""level"""', '"""DEBUG"""'], {}), "('level', 'DEBUG')\n", (1747, 1765), False, 'from quant.config import config\n'), ((1781, 1822), 'quant.config.config.log.get', 'config.log.get', (['"""path"""', '"""/tmp/logs/Quant"""'], {}), "('path', '/tmp/logs/Quant')\n", (1795, 1822), False, 'from quant.config import config\n'), ((1838, 1873), 'quant.config.config.log.get', 'config.log.get', (['"""name"""', '"""quant.log"""'], {}), "('name', 'quant.log')\n", (1852, 1873), False, 'from quant.config import config\n'), ((1890, 1920), 'quant.config.config.log.get', 'config.log.get', (['"""clear"""', '(False)'], {}), "('clear', False)\n", (1904, 1920), False, 'from quant.config import config\n'), ((1944, 1977), 'quant.config.config.log.get', 'config.log.get', (['"""backup_count"""', '(0)'], {}), "('backup_count', 0)\n", (1958, 1977), False, 'from quant.config import config\n'), ((1340, 1364), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (1362, 1364), False, 'import asyncio\n'), ((2010, 2034), 'quant.utils.logger.initLogger', 'logger.initLogger', (['level'], {}), '(level)\n', (2027, 2034), False, 'from quant.utils import logger\n'), ((2061, 2118), 'quant.utils.logger.initLogger', 'logger.initLogger', (['level', 'path', 'name', 'clear', 'backup_count'], {}), '(level, path, name, clear, backup_count)\n', (2078, 2118), False, 'from quant.utils import logger\n'), ((2301, 2314), 'quant.event.EventCenter', 'EventCenter', ([], {}), '()\n', (2312, 2314), False, 'from quant.event import EventCenter\n')] |
"""
extract configuration
"""
import re
_prompt = "ConfigExtractor > "
def _printConfig(str):
print('%s%s' % (_prompt,str))
def _Service(str):
tmpSvcNew = None
tmpSvcOld = None
# get new service
try:
svcMgr = theApp.serviceMgr()
tmpSvcNew = getattr(svcMgr,str)
except Exception:
pass
# get old service
try:
tmpSvcOld = Service(str)
except Exception:
pass
# return old one for 12.0.6
if tmpSvcOld is not None:
return tmpSvcOld
return tmpSvcNew
def _Algorithm(str):
try:
return Algorithm(str)
except Exception:
return None
######################
# input
EventSelector = _Service( "EventSelector" )
if hasattr(EventSelector,'InputCollections') and hasattr(EventSelector.InputCollections,'__len__') \
and len(EventSelector.InputCollections):
# POOL
if hasattr(EventSelector,"CollectionType") and hasattr(EventSelector.CollectionType,'__len__') \
and len(EventSelector.CollectionType) and EventSelector.CollectionType == "ExplicitROOT":
# tag collection
_printConfig('Input=COLL')
# reference
try:
if EventSelector.RefName is not None:
_printConfig('Input=COLLREF %s' % EventSelector.RefName)
except Exception:
pass
# query
try:
if EventSelector.Query is not None:
_printConfig('Input=COLLQUERY %s' % EventSelector.Query)
except Exception:
pass
else:
# normal POOL
_printConfig('Input=POOL')
# file list
str = 'InputFiles '
for file in EventSelector.InputCollections:
str += '%s ' % file.split('/')[-1]
_printConfig(str)
else:
# ByteStream
noInputFlag = True
# both _Service and Service need to be checked due to Configurable
compList = []
try:
compList.append(_Service( "ByteStreamInputSvc" ))
except Exception:
pass
try:
compList.append(Service( "ByteStreamInputSvc" ))
except Exception:
pass
for ByteStreamInputSvc in compList:
if (hasattr(ByteStreamInputSvc,'FullFileName') and hasattr(ByteStreamInputSvc.FullFileName,'__len__')
and len(ByteStreamInputSvc.FullFileName)) or \
(hasattr(ByteStreamInputSvc,'FilePrefix') and hasattr(ByteStreamInputSvc.FilePrefix,'__len__')
and len(ByteStreamInputSvc.FilePrefix)):
_printConfig('Input=BS')
noInputFlag = False
break
if noInputFlag:
_printConfig('No Input')
# back navigation
if hasattr(EventSelector,'BackNavigation') and EventSelector.BackNavigation == True:
_printConfig('BackNavigation=ON')
# minimum bias
minBiasEventSelector = _Service( "minBiasEventSelector" )
if hasattr(minBiasEventSelector,'InputCollections') and hasattr(minBiasEventSelector.InputCollections,'__len__') \
and len(minBiasEventSelector.InputCollections):
_printConfig('Input=MINBIAS')
# cavern
cavernEventSelector = _Service( "cavernEventSelector" )
if hasattr(cavernEventSelector,'InputCollections') and hasattr(cavernEventSelector.InputCollections,'__len__') \
and len(cavernEventSelector.InputCollections):
_printConfig('Input=CAVERN')
# beam gas
BeamGasEventSelector = _Service( "BeamGasEventSelector" )
if hasattr(BeamGasEventSelector,'InputCollections') and hasattr(BeamGasEventSelector.InputCollections,'__len__') \
and len(BeamGasEventSelector.InputCollections):
_printConfig('Input=BEAMGAS')
# beam halo
BeamHaloEventSelector = _Service( "BeamHaloEventSelector" )
if hasattr(BeamHaloEventSelector,'InputCollections') and hasattr(BeamHaloEventSelector.InputCollections,'__len__') \
and len(BeamHaloEventSelector.InputCollections):
_printConfig('Input=BEAMHALO')
# condition files
CondProxyProvider = _Service( "CondProxyProvider" )
if hasattr(CondProxyProvider,'InputCollections') and hasattr(CondProxyProvider.InputCollections,'__len__') \
and len(CondProxyProvider.InputCollections):
condStr = ''
for fName in CondProxyProvider.InputCollections:
if not fName.startswith('LFN:'):
condStr += "%s," % fName
if condStr != '':
retStr = "CondInput %s" % condStr
retStr = retStr[:-1]
_printConfig(retStr)
######################
# configurable
_configs = []
seqList = []
try:
# get all Configurable names
from AthenaCommon.AlgSequence import AlgSequence
tmpKeys = AlgSequence().allConfigurables.keys()
# get AlgSequences
seqList = [AlgSequence()]
try:
for key in tmpKeys:
# check if it is available via AlgSequence
if not hasattr(AlgSequence(),key.split('/')[-1]):
continue
# get full name
tmpConf = getattr(AlgSequence(),key.split('/')[-1])
if hasattr(tmpConf,'getFullName'):
tmpFullName = tmpConf.getFullName()
# append AthSequencer
if tmpFullName.startswith('AthSequencer/'):
seqList.append(tmpConf)
except Exception:
pass
# loop over all sequences
for tmpAlgSequence in seqList:
# loop over keys
for key in tmpKeys:
# check if it is available via AlgSequence
if not hasattr(tmpAlgSequence,key.split('/')[-1]):
continue
# get fullname
if key.find('/') == -1:
if hasattr(tmpAlgSequence,key):
tmpAlg = getattr(tmpAlgSequence,key)
if hasattr(tmpAlg,'getFullName'):
_configs.append(getattr(tmpAlgSequence,key).getFullName())
elif hasattr(tmpAlg,'getName') and hasattr(tmpAlg,'getType'):
# ServiceHandle
_configs.append('%s/%s' % (tmpAlg.getType(),tmpAlg.getName()))
else:
# use short name if it doesn't have getFullName
_configs.append(key)
else:
_configs.append(key)
except Exception:
pass
def _getConfig(key):
if seqList == []:
from AthenaCommon.AlgSequence import AlgSequence
return getattr(AlgSequence(),key.split('/')[-1])
else:
for tmpAlgSequence in seqList:
if hasattr(tmpAlgSequence,key.split('/')[-1]):
return getattr(tmpAlgSequence,key.split('/')[-1])
######################
# output
# hist
HistogramPersistencySvc=_Service("HistogramPersistencySvc")
if hasattr(HistogramPersistencySvc,'OutputFile') and hasattr(HistogramPersistencySvc.OutputFile,'__len__') \
and len(HistogramPersistencySvc.OutputFile):
_printConfig('Output=HIST')
_printConfig(' Name: %s' % HistogramPersistencySvc.OutputFile)
# ntuple
NTupleSvc = _Service( "NTupleSvc" )
if hasattr(NTupleSvc,'Output') and hasattr(NTupleSvc.Output,'__len__') and len(NTupleSvc.Output):
# look for streamname
for item in NTupleSvc.Output:
match = re.search("(\S+)\s+DATAFILE",item)
if match is not None:
sName = item.split()[0]
_printConfig('Output=NTUPLE %s' % sName)
# extract name
fmatch = re.search("DATAFILE=(\S+)\s",item)
if fmatch is not None:
fName = fmatch.group(1)
fName = re.sub('[\"\']','',fName)
fName = fName.split('/')[-1]
_printConfig(' Name: %s'% fName)
streamOutputFiles = {}
ignoreMetaFiles = []
# RDO
foundStreamRD0 = False
key = "AthenaOutputStream/StreamRDO"
if key in _configs:
StreamRDO = _getConfig( key )
else:
StreamRDO = _Algorithm( key.split('/')[-1] )
if hasattr(StreamRDO,'OutputFile') and hasattr(StreamRDO.OutputFile,'__len__') and len(StreamRDO.OutputFile):
streamOutputFiles[key.split('/')[-1]] = StreamRDO.OutputFile
_printConfig('Output=RDO %s' % StreamRDO.OutputFile)
_printConfig(' Name: %s'% StreamRDO.OutputFile)
foundStreamRD0 = True
ignoreMetaFiles.append(StreamRDO.OutputFile)
# ESD
foundStreamESD = False
key = "AthenaOutputStream/StreamESD"
if key in _configs:
StreamESD = _getConfig( key )
else:
StreamESD = _Algorithm( key.split('/')[-1] )
if hasattr(StreamESD,'OutputFile') and hasattr(StreamESD.OutputFile,'__len__') and len(StreamESD.OutputFile):
streamOutputFiles[key.split('/')[-1]] = StreamESD.OutputFile
_printConfig('Output=ESD %s' % StreamESD.OutputFile)
_printConfig(' Name: %s'% StreamESD.OutputFile)
foundStreamESD = True
ignoreMetaFiles.append(StreamESD.OutputFile)
# AOD
foundStreamAOD = False
key = "AthenaOutputStream/StreamAOD"
if key in _configs:
StreamAOD = _getConfig( key )
else:
StreamAOD = _Algorithm( key.split('/')[-1] )
if hasattr(StreamAOD,'OutputFile') and hasattr(StreamAOD.OutputFile,'__len__') and len(StreamAOD.OutputFile):
streamOutputFiles[key.split('/')[-1]] = StreamAOD.OutputFile
_printConfig('Output=AOD %s' % StreamAOD.OutputFile)
_printConfig(' Name: %s'% StreamAOD.OutputFile)
foundStreamAOD = True
ignoreMetaFiles.append(StreamAOD.OutputFile)
# TAG
keys = ["AthenaOutputStream/StreamTAG","RegistrationStream/StreamTAG"]
foundKey = False
for key in keys:
if key in _configs:
StreamTAG = _getConfig( key )
foundKey = True
break
if not foundKey:
StreamTAG = _Algorithm( key.split('/')[-1] )
if hasattr(StreamTAG,'OutputCollection') and hasattr(StreamTAG.OutputCollection,'__len__') and \
len(StreamTAG.OutputCollection):
_printConfig('Output=TAG')
_printConfig(' Name: %s'% StreamTAG.OutputCollection)
# TAGCOM
keys = ["AthenaOutputStream/StreamTAGCOM","RegistrationStream/StreamTAGCOM"]
foundKey = False
for key in keys:
if key in _configs:
StreamTAGX = _getConfig( key )
foundKey = True
break
if not foundKey:
StreamTAGX = _Algorithm( key.split('/')[-1] )
if hasattr(StreamTAGX,'OutputCollection') and hasattr(StreamTAGX.OutputCollection,'__len__') and \
len(StreamTAGX.OutputCollection):
_printConfig('Output=TAGX %s %s' % (StreamTAGX.name(),StreamTAGX.OutputCollection))
_printConfig(' Name: %s'% StreamTAGX.OutputCollection)
# AANT
aantStream = []
appStList = []
for alg in theApp.TopAlg+_configs:
if alg.startswith("AANTupleStream" ):
aName = alg.split('/')[-1]
if alg in _configs:
AANTupleStream = _getConfig(alg)
else:
AANTupleStream = Algorithm(aName)
if hasattr(AANTupleStream.OutputName,'__len__') and len(AANTupleStream.OutputName):
fName = AANTupleStream.OutputName
# look for streamname
THistSvc = _Service( "THistSvc" )
if hasattr(THistSvc.Output,'__len__') and len(THistSvc.Output):
for item in THistSvc.Output:
if re.search(fName,item):
sName = item.split()[0]
# check stream name
if hasattr(AANTupleStream,'StreamName'):
if AANTupleStream.StreamName != sName:
continue
aantStream.append(sName)
tmpAantKey = (aName,sName,fName)
if tmpAantKey not in appStList:
_printConfig('Output=AANT %s %s %s' % (aName,sName,fName))
_printConfig(' Name: %s'% fName)
appStList.append(tmpAantKey)
break
# Stream1
key = "AthenaOutputStream/Stream1"
if key in _configs:
Stream1 = _getConfig( key )
elif hasattr(theApp._streams,key.split('/')[-1]):
Stream1 = getattr(theApp._streams,key.split('/')[-1])
else:
Stream1 = _Algorithm( key.split('/')[-1] )
if hasattr(Stream1,'OutputFile') and hasattr(Stream1.OutputFile,'__len__') and len(Stream1.OutputFile):
if (hasattr(Stream1,'Enable') and Stream1.Enable) or (not hasattr(Stream1,'Enable')):
streamOutputFiles[key.split('/')[-1]] = Stream1.OutputFile
_printConfig('Output=STREAM1 %s' % Stream1.OutputFile)
_printConfig(' Name: %s'% Stream1.OutputFile)
ignoreMetaFiles.append(Stream1.OutputFile)
# Stream2
key = "AthenaOutputStream/Stream2"
if key in _configs:
Stream2 = _getConfig( key )
elif hasattr(theApp._streams,key.split('/')[-1]):
Stream2 = getattr(theApp._streams,key.split('/')[-1])
else:
Stream2 = _Algorithm( key.split('/')[-1] )
if hasattr(Stream2,'OutputFile') and hasattr(Stream2.OutputFile,'__len__') and len(Stream2.OutputFile):
if (hasattr(Stream2,'Enable') and Stream2.Enable) or (not hasattr(Stream2,'Enable')):
streamOutputFiles[key.split('/')[-1]] = Stream2.OutputFile
_printConfig('Output=STREAM2 %s' % Stream2.OutputFile)
_printConfig(' Name: %s'% Stream2.OutputFile)
ignoreMetaFiles.append(Stream2.OutputFile)
# General Stream
strGenFName = ''
strGenStream = ''
strMetaStream = ''
ignoredStreamList = ['Stream1','Stream2','StreamBS','StreamBSFileOutput']
if foundStreamRD0:
# for old releases where StreamRDO was an algorithm
ignoredStreamList += ['StreamRDO']
if foundStreamESD:
# for streamESD defined as an algorithm
ignoredStreamList += ['StreamESD']
if foundStreamAOD:
# for streamAOD defined as an algorithm
ignoredStreamList += ['StreamAOD']
desdStreams = {}
try:
metaStreams = []
for genStream in theApp._streams.getAllChildren()+AlgSequence().getAllChildren():
# check name
fullName = genStream.getFullName()
if (fullName.split('/')[0] == 'AthenaOutputStream' or fullName.split('/')[0] == 'Athena::RootNtupleOutputStream') \
and (not fullName.split('/')[-1] in ignoredStreamList):
if hasattr(genStream,'OutputFile') and hasattr(genStream.OutputFile,'__len__') and len(genStream.OutputFile):
if (hasattr(genStream,'Enable') and genStream.Enable) or (not hasattr(genStream,'Enable')):
# keep meta data
if genStream.OutputFile.startswith("ROOTTREE:") or \
(hasattr(genStream,'WriteOnFinalize') and genStream.WriteOnFinalize):
metaStreams.append(genStream)
elif fullName.split('/')[-1].startswith('StreamDESD'):
# ignore StreamDESD to treat it as multiple-streams later
continue
else:
strGenStream += '%s:%s,' % (fullName.split('/')[-1],genStream.OutputFile)
streamOutputFiles[fullName.split('/')[-1]] = genStream.OutputFile
strGenFName = genStream.OutputFile
ignoreMetaFiles.append(genStream.OutputFile)
# associate meta stream
for mStream in metaStreams:
metaOutName = mStream.OutputFile.split(':')[-1]
assStream = None
# look for associated stream
for stName in streamOutputFiles:
stOut = streamOutputFiles[stName]
if metaOutName == stOut:
assStream = stName
break
# ignore meta stream since renaming is used instead of changing jobO
if metaOutName in ignoreMetaFiles:
continue
# print meta stream
if assStream is not None:
_printConfig('Output=META %s %s' % (mStream.getFullName().split('/')[1],assStream))
_printConfig(' Name: %s'% metaOutName)
except Exception:
pass
if strGenStream != '':
strGenStream = strGenStream[:-1]
_printConfig('Output=STREAMG %s' % strGenStream)
_printConfig(' Name: %s'% strGenFName)
if desdStreams != {}:
for tmpStreamName in desdStreams:
tmpOutFileName = desdStreams[tmpStreamName]
_printConfig('Output=DESD %s' % tmpStreamName)
_printConfig(' Name: %s'% tmpOutFileName)
# THIST
userDataSvcStream = {}
usedTHistStreams = []
THistSvc = _Service( "THistSvc" )
if hasattr(THistSvc.Output,'__len__') and len(THistSvc.Output):
for item in THistSvc.Output:
sName = item.split()[0]
if sName not in aantStream:
# extract name
fmatch = re.search("DATAFILE=(\S+)\s",item)
fName = None
if fmatch is not None:
fName = fmatch.group(1)
fName = re.sub('[\"\']','',fName)
fName = fName.split('/')[-1]
# keep output of UserDataSvc
if sName in ['userdataoutputstream'] or sName.startswith('userdataoutputstream'):
userDataSvcStream[sName] = fName
continue
# skip if defined in StreamG
if strGenFName != '' and fName == strGenFName:
continue
_printConfig('Output=THIST %s' % sName)
if fmatch is not None:
_printConfig(' Name: %s'% fName)
# ROOT outputs for interactive Athena
import ROOT
fList = ROOT.gROOT.GetListOfFiles()
for index in range(fList.GetSize()):
if fList[index].GetOption() == 'CREATE':
_printConfig('Output=IROOT %s' % fList[index].GetName())
_printConfig(' Name: %s'% fList[index].GetName())
# BS
ByteStreamCnvSvc = _Service("ByteStreamCnvSvc")
if hasattr(ByteStreamCnvSvc,'ByteStreamOutputSvc') and \
ByteStreamCnvSvc.ByteStreamOutputSvc=="ByteStreamEventStorageOutputSvc":
_printConfig('Output=BS')
elif hasattr(ByteStreamCnvSvc,'ByteStreamOutputSvcList') and \
'ByteStreamEventStorageOutputSvc' in ByteStreamCnvSvc.ByteStreamOutputSvcList:
_printConfig('Output=BS')
# selected BS
BSESOutputSvc = _Service("BSESOutputSvc")
if hasattr(BSESOutputSvc,'SimpleFileName'):
_printConfig('Output=SelBS %s' % BSESOutputSvc.SimpleFileName)
_printConfig(' Name: %s'% BSESOutputSvc.SimpleFileName)
# MultipleStream
try:
from OutputStreamAthenaPool.MultipleStreamManager import MSMgr
for tmpStream in MSMgr.StreamList:
# avoid duplication
if not tmpStream.Name in streamOutputFiles.keys():
# remove prefix
tmpFileBaseName = tmpStream.Stream.OutputFile.split(':')[-1]
_printConfig('Output=MS %s %s' % (tmpStream.Name,tmpFileBaseName))
_printConfig(' Name: %s'% tmpFileBaseName)
except Exception:
pass
# UserDataSvc
if userDataSvcStream != {}:
for userStName in userDataSvcStream:
userFileName = userDataSvcStream[userStName]
findStream = False
# look for associated stream
for stName in streamOutputFiles:
stOut = streamOutputFiles[stName]
if userFileName == stOut:
_printConfig('Output=USERDATA %s' % stName)
findStream = True
break
# use THIST if not found
if not findStream:
_printConfig('Output=THIST %s' % userStName)
_printConfig(' Name: %s'% userFileName)
######################
# random number
AtRndmGenSvc = _Service( "AtRndmGenSvc" )
if hasattr(AtRndmGenSvc,'Seeds') and hasattr(AtRndmGenSvc.Seeds,'__len__') and len(AtRndmGenSvc.Seeds):
# random seeds
for item in AtRndmGenSvc.Seeds:
_printConfig('RndmStream %s' % item.split()[0])
import types
if hasattr(AtRndmGenSvc,'ReadFromFile') and isinstance(AtRndmGenSvc.ReadFromFile,types.BooleanType) and AtRndmGenSvc.ReadFromFile:
# read from file
rndFileName = "AtRndmGenSvc.out"
if hasattr(AtRndmGenSvc.FileToRead,'__len__') and len(AtRndmGenSvc.FileToRead):
rndFileName = AtRndmGenSvc.FileToRead
_printConfig('RndmGenFile %s' % rndFileName)
# G4 random seed
try:
if hasattr(SimFlags,'SeedsG4'):
_printConfig('G4RandomSeeds')
except Exception:
pass
| [
"re.search",
"AthenaCommon.AlgSequence.AlgSequence",
"ROOT.gROOT.GetListOfFiles",
"re.sub"
] | [((17136, 17163), 'ROOT.gROOT.GetListOfFiles', 'ROOT.gROOT.GetListOfFiles', ([], {}), '()\n', (17161, 17163), False, 'import ROOT\n'), ((4625, 4638), 'AthenaCommon.AlgSequence.AlgSequence', 'AlgSequence', ([], {}), '()\n', (4636, 4638), False, 'from AthenaCommon.AlgSequence import AlgSequence\n'), ((7126, 7163), 're.search', 're.search', (['"""(\\\\S+)\\\\s+DATAFILE"""', 'item'], {}), "('(\\\\S+)\\\\s+DATAFILE', item)\n", (7135, 7163), False, 'import re\n'), ((6313, 6326), 'AthenaCommon.AlgSequence.AlgSequence', 'AlgSequence', ([], {}), '()\n', (6324, 6326), False, 'from AthenaCommon.AlgSequence import AlgSequence\n'), ((7328, 7365), 're.search', 're.search', (['"""DATAFILE=(\\\\S+)\\\\s"""', 'item'], {}), "('DATAFILE=(\\\\S+)\\\\s', item)\n", (7337, 7365), False, 'import re\n'), ((16377, 16414), 're.search', 're.search', (['"""DATAFILE=(\\\\S+)\\\\s"""', 'item'], {}), "('DATAFILE=(\\\\S+)\\\\s', item)\n", (16386, 16414), False, 'import re\n'), ((4549, 4562), 'AthenaCommon.AlgSequence.AlgSequence', 'AlgSequence', ([], {}), '()\n', (4560, 4562), False, 'from AthenaCommon.AlgSequence import AlgSequence\n'), ((4877, 4890), 'AthenaCommon.AlgSequence.AlgSequence', 'AlgSequence', ([], {}), '()\n', (4888, 4890), False, 'from AthenaCommon.AlgSequence import AlgSequence\n'), ((7462, 7488), 're.sub', 're.sub', (['"""["\']"""', '""""""', 'fName'], {}), '(\'["\\\']\', \'\', fName)\n', (7468, 7488), False, 'import re\n'), ((13648, 13661), 'AthenaCommon.AlgSequence.AlgSequence', 'AlgSequence', ([], {}), '()\n', (13659, 13661), False, 'from AthenaCommon.AlgSequence import AlgSequence\n'), ((16536, 16562), 're.sub', 're.sub', (['"""["\']"""', '""""""', 'fName'], {}), '(\'["\\\']\', \'\', fName)\n', (16542, 16562), False, 'import re\n'), ((4759, 4772), 'AthenaCommon.AlgSequence.AlgSequence', 'AlgSequence', ([], {}), '()\n', (4770, 4772), False, 'from AthenaCommon.AlgSequence import AlgSequence\n'), ((10983, 11005), 're.search', 're.search', (['fName', 'item'], {}), '(fName, item)\n', (10992, 11005), False, 'import re\n')] |
# Project: File Volume Indexer
# Author: <NAME>
# Date Started: February 28, 2019
# Copyright: (c) Copyright 2019 <NAME>
# Module: FrameScroller
# Purpose: View for managing scans of volumes and sub volumes.
# Development:
# Instructions for use:
# Since the content of a scrollableFrame must have the scrollable Frame as its parent, the scrollable
# Frame must be obtained from the scroller Frame and the pacing or gridding of the content Frame
# makes it visible in the scroller Frame. This is categorically outside of the control of this module,
# unless the user passes a json frame definition in and it is created here. If so, then the user must
# still retrieve the constructed frame to make it the parent of the components it contains. They could
# also pass in an entire json framem definition including all components, in which case this module
# will have a method to obtain reverences to any of the components constructed by passing their names in.
# The standard name path can be used for the name to make this entirely general for complex nesting structures
# with repeated component names at different levels.
#
# 2021-08-25:
# Copied from VolumeIndexer project.
#
from tkinter import Frame, LabelFrame, Tk, Text, Scrollbar, Label, \
BOTTOM, W, RIGHT, X, Y, VERTICAL, HORIZONTAL, BOTH, INSERT
class FrameScroller(LabelFrame):
def __init__(self, container, name: str, **keyWordArguments):
LabelFrame.__init__(self, container, name=name)
if "minimize" in keyWordArguments and isinstance( keyWordArguments["minimize"], bool ) and keyWordArguments["minimize"]:
self.minimize = keyWordArguments["minimize"]
self.stretch = False
else:
self.stretch = True
self.minimize = False
self.textScroller = Text(self, name="textScroller")
self.scrollerFrame = Frame(self.textScroller, name="scrollerFrame")
self.textScroller.window_create(INSERT, window=self.scrollerFrame, stretch=self.stretch, align=BOTTOM)
self.scrollbarVert = Scrollbar(self, name="scrollbarVert", orient=VERTICAL)
self.scrollbarHorz = Scrollbar(self, name="scrollbarHorz", orient=HORIZONTAL)
#self.scrollbarHorz.pack(side=BOTTOM, fil=X, anchor=W)
self.scrollbarHorz.pack(side=BOTTOM, anchor=W, fill=X)
self.scrollbarVert.pack(side=RIGHT, fill=Y)
self.textScroller.config(yscrollcommand=self.scrollbarVert.set)
self.textScroller.config(xscrollcommand=self.scrollbarHorz.set)
self.scrollbarVert.config(command=self.textScroller.yview)
self.scrollbarHorz.config(command=self.textScroller.xview)
if self.minimize:
self.textScroller.pack()
else:
self.textScroller.pack(fill=BOTH, expand=True)
def getScrollerFrame(self):
return self.scrollerFrame
if __name__ == "__main__":
print("FrameScroller running")
mainView = Tk()
mainView.geometry("300x400+300+100")
frameScroller = FrameScroller(mainView, "frameScroller")
label = Label(frameScroller.getScrollerFrame(), name="label", width=100, text='Since the content of a scrollableFrame must have the scrollable Frame as its parent, the scrollable Frame must be obtained from the scroller Frame and the pacing or gridding of the content Frame makes it visible in the scroller Frame. This is categorically outside of the control of this module,')
label.pack()
frameScroller.pack()
mainView.mainloop() | [
"tkinter.Text",
"tkinter.LabelFrame.__init__",
"tkinter.Scrollbar",
"tkinter.Frame",
"tkinter.Tk"
] | [((3122, 3126), 'tkinter.Tk', 'Tk', ([], {}), '()\n', (3124, 3126), False, 'from tkinter import Frame, LabelFrame, Tk, Text, Scrollbar, Label, BOTTOM, W, RIGHT, X, Y, VERTICAL, HORIZONTAL, BOTH, INSERT\n'), ((1595, 1642), 'tkinter.LabelFrame.__init__', 'LabelFrame.__init__', (['self', 'container'], {'name': 'name'}), '(self, container, name=name)\n', (1614, 1642), False, 'from tkinter import Frame, LabelFrame, Tk, Text, Scrollbar, Label, BOTTOM, W, RIGHT, X, Y, VERTICAL, HORIZONTAL, BOTH, INSERT\n'), ((1987, 2018), 'tkinter.Text', 'Text', (['self'], {'name': '"""textScroller"""'}), "(self, name='textScroller')\n", (1991, 2018), False, 'from tkinter import Frame, LabelFrame, Tk, Text, Scrollbar, Label, BOTTOM, W, RIGHT, X, Y, VERTICAL, HORIZONTAL, BOTH, INSERT\n'), ((2053, 2099), 'tkinter.Frame', 'Frame', (['self.textScroller'], {'name': '"""scrollerFrame"""'}), "(self.textScroller, name='scrollerFrame')\n", (2058, 2099), False, 'from tkinter import Frame, LabelFrame, Tk, Text, Scrollbar, Label, BOTTOM, W, RIGHT, X, Y, VERTICAL, HORIZONTAL, BOTH, INSERT\n'), ((2242, 2296), 'tkinter.Scrollbar', 'Scrollbar', (['self'], {'name': '"""scrollbarVert"""', 'orient': 'VERTICAL'}), "(self, name='scrollbarVert', orient=VERTICAL)\n", (2251, 2296), False, 'from tkinter import Frame, LabelFrame, Tk, Text, Scrollbar, Label, BOTTOM, W, RIGHT, X, Y, VERTICAL, HORIZONTAL, BOTH, INSERT\n'), ((2326, 2382), 'tkinter.Scrollbar', 'Scrollbar', (['self'], {'name': '"""scrollbarHorz"""', 'orient': 'HORIZONTAL'}), "(self, name='scrollbarHorz', orient=HORIZONTAL)\n", (2335, 2382), False, 'from tkinter import Frame, LabelFrame, Tk, Text, Scrollbar, Label, BOTTOM, W, RIGHT, X, Y, VERTICAL, HORIZONTAL, BOTH, INSERT\n')] |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for the API /deploy_templates/ methods.
"""
import datetime
from http import client as http_client
from unittest import mock
from urllib import parse as urlparse
from oslo_config import cfg
from oslo_utils import timeutils
from oslo_utils import uuidutils
from ironic.api.controllers import base as api_base
from ironic.api.controllers import v1 as api_v1
from ironic.api.controllers.v1 import notification_utils
from ironic.common import exception
from ironic import objects
from ironic.objects import fields as obj_fields
from ironic.tests.unit.api import base as test_api_base
from ironic.tests.unit.api import utils as test_api_utils
from ironic.tests.unit.objects import utils as obj_utils
def _obj_to_api_step(obj_step):
"""Convert a deploy step in 'object' form to one in 'API' form."""
return {
'interface': obj_step['interface'],
'step': obj_step['step'],
'args': obj_step['args'],
'priority': obj_step['priority'],
}
class BaseDeployTemplatesAPITest(test_api_base.BaseApiTest):
headers = {api_base.Version.string: str(api_v1.max_version())}
invalid_version_headers = {api_base.Version.string: '1.54'}
class TestListDeployTemplates(BaseDeployTemplatesAPITest):
def test_empty(self):
data = self.get_json('/deploy_templates', headers=self.headers)
self.assertEqual([], data['deploy_templates'])
def test_one(self):
template = obj_utils.create_test_deploy_template(self.context)
data = self.get_json('/deploy_templates', headers=self.headers)
self.assertEqual(1, len(data['deploy_templates']))
self.assertEqual(template.uuid, data['deploy_templates'][0]['uuid'])
self.assertEqual(template.name, data['deploy_templates'][0]['name'])
self.assertNotIn('steps', data['deploy_templates'][0])
self.assertNotIn('extra', data['deploy_templates'][0])
def test_get_one(self):
template = obj_utils.create_test_deploy_template(self.context)
data = self.get_json('/deploy_templates/%s' % template.uuid,
headers=self.headers)
self.assertEqual(template.uuid, data['uuid'])
self.assertEqual(template.name, data['name'])
self.assertEqual(template.extra, data['extra'])
for t_dict_step, t_step in zip(data['steps'], template.steps):
self.assertEqual(t_dict_step['interface'], t_step['interface'])
self.assertEqual(t_dict_step['step'], t_step['step'])
self.assertEqual(t_dict_step['args'], t_step['args'])
self.assertEqual(t_dict_step['priority'], t_step['priority'])
def test_get_one_with_json(self):
template = obj_utils.create_test_deploy_template(self.context)
data = self.get_json('/deploy_templates/%s.json' % template.uuid,
headers=self.headers)
self.assertEqual(template.uuid, data['uuid'])
def test_get_one_with_suffix(self):
template = obj_utils.create_test_deploy_template(self.context,
name='CUSTOM_DT1')
data = self.get_json('/deploy_templates/%s' % template.uuid,
headers=self.headers)
self.assertEqual(template.uuid, data['uuid'])
def test_get_one_custom_fields(self):
template = obj_utils.create_test_deploy_template(self.context)
fields = 'name,steps'
data = self.get_json(
'/deploy_templates/%s?fields=%s' % (template.uuid, fields),
headers=self.headers)
# We always append "links"
self.assertCountEqual(['name', 'steps', 'links'], data)
def test_get_collection_custom_fields(self):
fields = 'uuid,steps'
for i in range(3):
obj_utils.create_test_deploy_template(
self.context,
uuid=uuidutils.generate_uuid(),
name='CUSTOM_DT%s' % i)
data = self.get_json(
'/deploy_templates?fields=%s' % fields,
headers=self.headers)
self.assertEqual(3, len(data['deploy_templates']))
for template in data['deploy_templates']:
# We always append "links"
self.assertCountEqual(['uuid', 'steps', 'links'], template)
def test_get_custom_fields_invalid_fields(self):
template = obj_utils.create_test_deploy_template(self.context)
fields = 'uuid,spongebob'
response = self.get_json(
'/deploy_templates/%s?fields=%s' % (template.uuid, fields),
headers=self.headers, expect_errors=True)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertIn('spongebob', response.json['error_message'])
def test_get_all_invalid_api_version(self):
obj_utils.create_test_deploy_template(self.context)
response = self.get_json('/deploy_templates',
headers=self.invalid_version_headers,
expect_errors=True)
self.assertEqual(http_client.NOT_FOUND, response.status_int)
def test_get_one_invalid_api_version(self):
template = obj_utils.create_test_deploy_template(self.context)
response = self.get_json(
'/deploy_templates/%s' % (template.uuid),
headers=self.invalid_version_headers,
expect_errors=True)
self.assertEqual(http_client.NOT_FOUND, response.status_int)
def test_detail_query(self):
template = obj_utils.create_test_deploy_template(self.context)
data = self.get_json('/deploy_templates?detail=True',
headers=self.headers)
self.assertEqual(template.uuid, data['deploy_templates'][0]['uuid'])
self.assertIn('name', data['deploy_templates'][0])
self.assertIn('steps', data['deploy_templates'][0])
self.assertIn('extra', data['deploy_templates'][0])
def test_detail_query_false(self):
obj_utils.create_test_deploy_template(self.context)
data1 = self.get_json('/deploy_templates', headers=self.headers)
data2 = self.get_json(
'/deploy_templates?detail=False', headers=self.headers)
self.assertEqual(data1['deploy_templates'], data2['deploy_templates'])
def test_detail_using_query_false_and_fields(self):
obj_utils.create_test_deploy_template(self.context)
data = self.get_json(
'/deploy_templates?detail=False&fields=steps',
headers=self.headers)
self.assertIn('steps', data['deploy_templates'][0])
self.assertNotIn('uuid', data['deploy_templates'][0])
self.assertNotIn('extra', data['deploy_templates'][0])
def test_detail_using_query_and_fields(self):
obj_utils.create_test_deploy_template(self.context)
response = self.get_json(
'/deploy_templates?detail=True&fields=name', headers=self.headers,
expect_errors=True)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
def test_many(self):
templates = []
for id_ in range(5):
template = obj_utils.create_test_deploy_template(
self.context, uuid=uuidutils.generate_uuid(),
name='CUSTOM_DT%s' % id_)
templates.append(template.uuid)
data = self.get_json('/deploy_templates', headers=self.headers)
self.assertEqual(len(templates), len(data['deploy_templates']))
uuids = [n['uuid'] for n in data['deploy_templates']]
self.assertCountEqual(templates, uuids)
def test_links(self):
uuid = uuidutils.generate_uuid()
obj_utils.create_test_deploy_template(self.context, uuid=uuid)
data = self.get_json('/deploy_templates/%s' % uuid,
headers=self.headers)
self.assertIn('links', data)
self.assertEqual(2, len(data['links']))
self.assertIn(uuid, data['links'][0]['href'])
for link in data['links']:
bookmark = link['rel'] == 'bookmark'
self.assertTrue(self.validate_link(link['href'], bookmark=bookmark,
headers=self.headers))
def test_collection_links(self):
templates = []
for id_ in range(5):
template = obj_utils.create_test_deploy_template(
self.context, uuid=uuidutils.generate_uuid(),
name='CUSTOM_DT%s' % id_)
templates.append(template.uuid)
data = self.get_json('/deploy_templates/?limit=3',
headers=self.headers)
self.assertEqual(3, len(data['deploy_templates']))
next_marker = data['deploy_templates'][-1]['uuid']
self.assertIn(next_marker, data['next'])
def test_collection_links_default_limit(self):
cfg.CONF.set_override('max_limit', 3, 'api')
templates = []
for id_ in range(5):
template = obj_utils.create_test_deploy_template(
self.context, uuid=uuidutils.generate_uuid(),
name='CUSTOM_DT%s' % id_)
templates.append(template.uuid)
data = self.get_json('/deploy_templates', headers=self.headers)
self.assertEqual(3, len(data['deploy_templates']))
next_marker = data['deploy_templates'][-1]['uuid']
self.assertIn(next_marker, data['next'])
def test_collection_links_custom_fields(self):
cfg.CONF.set_override('max_limit', 3, 'api')
templates = []
fields = 'uuid,steps'
for i in range(5):
template = obj_utils.create_test_deploy_template(
self.context,
uuid=uuidutils.generate_uuid(),
name='CUSTOM_DT%s' % i)
templates.append(template.uuid)
data = self.get_json('/deploy_templates?fields=%s' % fields,
headers=self.headers)
self.assertEqual(3, len(data['deploy_templates']))
next_marker = data['deploy_templates'][-1]['uuid']
self.assertIn(next_marker, data['next'])
self.assertIn('fields', data['next'])
def test_get_collection_pagination_no_uuid(self):
fields = 'name'
limit = 2
templates = []
for id_ in range(3):
template = obj_utils.create_test_deploy_template(
self.context,
uuid=uuidutils.generate_uuid(),
name='CUSTOM_DT%s' % id_)
templates.append(template)
data = self.get_json(
'/deploy_templates?fields=%s&limit=%s' % (fields, limit),
headers=self.headers)
self.assertEqual(limit, len(data['deploy_templates']))
self.assertIn('marker=%s' % templates[limit - 1].uuid, data['next'])
def test_sort_key(self):
templates = []
for id_ in range(3):
template = obj_utils.create_test_deploy_template(
self.context,
uuid=uuidutils.generate_uuid(),
name='CUSTOM_DT%s' % id_)
templates.append(template.uuid)
data = self.get_json('/deploy_templates?sort_key=uuid',
headers=self.headers)
uuids = [n['uuid'] for n in data['deploy_templates']]
self.assertEqual(sorted(templates), uuids)
def test_sort_key_invalid(self):
invalid_keys_list = ['extra', 'foo', 'steps']
for invalid_key in invalid_keys_list:
path = '/deploy_templates?sort_key=%s' % invalid_key
response = self.get_json(path, expect_errors=True,
headers=self.headers)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertIn(invalid_key, response.json['error_message'])
def _test_sort_key_allowed(self, detail=False):
template_uuids = []
for id_ in range(3, 0, -1):
template = obj_utils.create_test_deploy_template(
self.context,
uuid=uuidutils.generate_uuid(),
name='CUSTOM_DT%s' % id_)
template_uuids.append(template.uuid)
template_uuids.reverse()
url = '/deploy_templates?sort_key=name&detail=%s' % str(detail)
data = self.get_json(url, headers=self.headers)
data_uuids = [p['uuid'] for p in data['deploy_templates']]
self.assertEqual(template_uuids, data_uuids)
def test_sort_key_allowed(self):
self._test_sort_key_allowed()
def test_detail_sort_key_allowed(self):
self._test_sort_key_allowed(detail=True)
def test_sensitive_data_masked(self):
template = obj_utils.get_test_deploy_template(self.context)
template.steps[0]['args']['password'] = '<PASSWORD>'
template.create()
data = self.get_json('/deploy_templates/%s' % template.uuid,
headers=self.headers)
self.assertEqual("******", data['steps'][0]['args']['password'])
@mock.patch.object(objects.DeployTemplate, 'save', autospec=True)
class TestPatch(BaseDeployTemplatesAPITest):
def setUp(self):
super(TestPatch, self).setUp()
self.template = obj_utils.create_test_deploy_template(
self.context, name='CUSTOM_DT1')
def _test_update_ok(self, mock_save, patch):
response = self.patch_json('/deploy_templates/%s' % self.template.uuid,
patch, headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_code)
mock_save.assert_called_once_with(mock.ANY)
return response
def _test_update_bad_request(self, mock_save, patch, error_msg=None):
response = self.patch_json('/deploy_templates/%s' % self.template.uuid,
patch, expect_errors=True,
headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.BAD_REQUEST, response.status_code)
self.assertTrue(response.json['error_message'])
if error_msg:
self.assertIn(error_msg, response.json['error_message'])
self.assertFalse(mock_save.called)
return response
@mock.patch.object(notification_utils, '_emit_api_notification',
autospec=True)
def test_update_by_id(self, mock_notify, mock_save):
name = 'CUSTOM_DT2'
patch = [{'path': '/name', 'value': name, 'op': 'add'}]
response = self._test_update_ok(mock_save, patch)
self.assertEqual(name, response.json['name'])
mock_notify.assert_has_calls([mock.call(mock.ANY, mock.ANY, 'update',
obj_fields.NotificationLevel.INFO,
obj_fields.NotificationStatus.START),
mock.call(mock.ANY, mock.ANY, 'update',
obj_fields.NotificationLevel.INFO,
obj_fields.NotificationStatus.END)])
def test_update_by_name(self, mock_save):
steps = [{
'interface': 'bios',
'step': 'apply_configuration',
'args': {'foo': 'bar'},
'priority': 42
}]
patch = [{'path': '/steps', 'value': steps, 'op': 'replace'}]
response = self.patch_json('/deploy_templates/%s' % self.template.name,
patch, headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_code)
mock_save.assert_called_once_with(mock.ANY)
self.assertEqual(steps, response.json['steps'])
def test_update_by_name_with_json(self, mock_save):
interface = 'bios'
path = '/deploy_templates/%s.json' % self.template.name
response = self.patch_json(path,
[{'path': '/steps/0/interface',
'value': interface,
'op': 'replace'}],
headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_code)
self.assertEqual(interface, response.json['steps'][0]['interface'])
def test_update_name_standard_trait(self, mock_save):
name = 'HW_CPU_X86_VMX'
patch = [{'path': '/name', 'value': name, 'op': 'replace'}]
response = self._test_update_ok(mock_save, patch)
self.assertEqual(name, response.json['name'])
def test_update_name_custom_trait(self, mock_save):
name = 'CUSTOM_DT2'
patch = [{'path': '/name', 'value': name, 'op': 'replace'}]
response = self._test_update_ok(mock_save, patch)
self.assertEqual(name, response.json['name'])
def test_update_invalid_name(self, mock_save):
self._test_update_bad_request(
mock_save,
[{'path': '/name', 'value': 'aa:bb_cc', 'op': 'replace'}],
"'aa:bb_cc' does not match '^CUSTOM_[A-Z0-9_]+$'")
def test_update_by_id_invalid_api_version(self, mock_save):
name = 'CUSTOM_DT2'
headers = self.invalid_version_headers
response = self.patch_json('/deploy_templates/%s' % self.template.uuid,
[{'path': '/name',
'value': name,
'op': 'add'}],
headers=headers,
expect_errors=True)
self.assertEqual(http_client.METHOD_NOT_ALLOWED, response.status_int)
self.assertFalse(mock_save.called)
def test_update_by_name_old_api_version(self, mock_save):
name = 'CUSTOM_DT2'
response = self.patch_json('/deploy_templates/%s' % self.template.name,
[{'path': '/name',
'value': name,
'op': 'add'}],
expect_errors=True)
self.assertEqual(http_client.METHOD_NOT_ALLOWED, response.status_int)
self.assertFalse(mock_save.called)
def test_update_not_found(self, mock_save):
name = 'CUSTOM_DT2'
uuid = uuidutils.generate_uuid()
response = self.patch_json('/deploy_templates/%s' % uuid,
[{'path': '/name',
'value': name,
'op': 'add'}],
expect_errors=True,
headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.NOT_FOUND, response.status_int)
self.assertTrue(response.json['error_message'])
self.assertFalse(mock_save.called)
@mock.patch.object(notification_utils, '_emit_api_notification',
autospec=True)
def test_replace_name_already_exist(self, mock_notify, mock_save):
name = 'CUSTOM_DT2'
obj_utils.create_test_deploy_template(self.context,
uuid=uuidutils.generate_uuid(),
name=name)
mock_save.side_effect = exception.DeployTemplateAlreadyExists(
uuid=self.template.uuid)
response = self.patch_json('/deploy_templates/%s' % self.template.uuid,
[{'path': '/name',
'value': name,
'op': 'replace'}],
expect_errors=True,
headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.CONFLICT, response.status_code)
self.assertTrue(response.json['error_message'])
mock_save.assert_called_once_with(mock.ANY)
mock_notify.assert_has_calls([mock.call(mock.ANY, mock.ANY, 'update',
obj_fields.NotificationLevel.INFO,
obj_fields.NotificationStatus.START),
mock.call(mock.ANY, mock.ANY, 'update',
obj_fields.NotificationLevel.ERROR,
obj_fields.NotificationStatus.ERROR)])
def test_replace_invalid_name_too_long(self, mock_save):
name = 'CUSTOM_' + 'X' * 249
patch = [{'path': '/name', 'op': 'replace', 'value': name}]
self._test_update_bad_request(
mock_save, patch, "'%s' is too long" % name)
def test_replace_invalid_name_not_a_trait(self, mock_save):
name = 'not-a-trait'
patch = [{'path': '/name', 'op': 'replace', 'value': name}]
self._test_update_bad_request(
mock_save, patch,
"'not-a-trait' does not match '^CUSTOM_[A-Z0-9_]+$'")
def test_replace_invalid_name_none(self, mock_save):
patch = [{'path': '/name', 'op': 'replace', 'value': None}]
self._test_update_bad_request(
mock_save, patch, "None is not of type 'string'")
def test_replace_duplicate_step(self, mock_save):
# interface & step combination must be unique.
steps = [
{
'interface': 'raid',
'step': 'create_configuration',
'args': {'foo': '%d' % i},
'priority': i,
}
for i in range(2)
]
patch = [{'path': '/steps', 'op': 'replace', 'value': steps}]
self._test_update_bad_request(
mock_save, patch, "Duplicate deploy steps")
def test_replace_invalid_step_interface_fail(self, mock_save):
step = {
'interface': 'foo',
'step': 'apply_configuration',
'args': {'foo': 'bar'},
'priority': 42
}
patch = [{'path': '/steps/0', 'op': 'replace', 'value': step}]
self._test_update_bad_request(
mock_save, patch, "'foo' is not one of")
def test_replace_non_existent_step_fail(self, mock_save):
step = {
'interface': 'bios',
'step': 'apply_configuration',
'args': {'foo': 'bar'},
'priority': 42
}
patch = [{'path': '/steps/1', 'op': 'replace', 'value': step}]
self._test_update_bad_request(mock_save, patch)
def test_replace_empty_step_list_fail(self, mock_save):
patch = [{'path': '/steps', 'op': 'replace', 'value': []}]
self._test_update_bad_request(
mock_save, patch, '[] is too short')
def _test_remove_not_allowed(self, mock_save, field, error_msg=None):
patch = [{'path': '/%s' % field, 'op': 'remove'}]
self._test_update_bad_request(mock_save, patch, error_msg)
def test_remove_uuid(self, mock_save):
self._test_remove_not_allowed(
mock_save, 'uuid',
"Cannot patch /uuid")
def test_remove_name(self, mock_save):
self._test_remove_not_allowed(
mock_save, 'name',
"'name' is a required property")
def test_remove_steps(self, mock_save):
self._test_remove_not_allowed(
mock_save, 'steps',
"'steps' is a required property")
def test_remove_foo(self, mock_save):
self._test_remove_not_allowed(mock_save, 'foo')
def test_replace_step_invalid_interface(self, mock_save):
patch = [{'path': '/steps/0/interface', 'op': 'replace',
'value': 'foo'}]
self._test_update_bad_request(
mock_save, patch, "'foo' is not one of")
def test_replace_multi(self, mock_save):
steps = [
{
'interface': 'raid',
'step': 'create_configuration%d' % i,
'args': {},
'priority': 10,
}
for i in range(3)
]
template = obj_utils.create_test_deploy_template(
self.context, uuid=uuidutils.generate_uuid(), name='CUSTOM_DT2',
steps=steps)
# mutate steps so we replace all of them
for step in steps:
step['priority'] = step['priority'] + 1
patch = []
for i, step in enumerate(steps):
patch.append({'path': '/steps/%s' % i,
'value': step,
'op': 'replace'})
response = self.patch_json('/deploy_templates/%s' % template.uuid,
patch, headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_code)
self.assertEqual(steps, response.json['steps'])
mock_save.assert_called_once_with(mock.ANY)
def test_remove_multi(self, mock_save):
steps = [
{
'interface': 'raid',
'step': 'create_configuration%d' % i,
'args': {},
'priority': 10,
}
for i in range(3)
]
template = obj_utils.create_test_deploy_template(
self.context, uuid=uuidutils.generate_uuid(), name='CUSTOM_DT2',
steps=steps)
# Removing one step from the collection
steps.pop(1)
response = self.patch_json('/deploy_templates/%s' % template.uuid,
[{'path': '/steps/1',
'op': 'remove'}],
headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_code)
self.assertEqual(steps, response.json['steps'])
mock_save.assert_called_once_with(mock.ANY)
def test_remove_non_existent_property_fail(self, mock_save):
patch = [{'path': '/non-existent', 'op': 'remove'}]
self._test_update_bad_request(mock_save, patch)
def test_remove_non_existent_step_fail(self, mock_save):
patch = [{'path': '/steps/1', 'op': 'remove'}]
self._test_update_bad_request(mock_save, patch)
def test_remove_only_step_fail(self, mock_save):
patch = [{'path': '/steps/0', 'op': 'remove'}]
self._test_update_bad_request(
mock_save, patch, "[] is too short")
def test_remove_non_existent_step_property_fail(self, mock_save):
patch = [{'path': '/steps/0/non-existent', 'op': 'remove'}]
self._test_update_bad_request(mock_save, patch)
def test_add_root_non_existent(self, mock_save):
patch = [{'path': '/foo', 'value': 'bar', 'op': 'add'}]
self._test_update_bad_request(
mock_save, patch,
"Cannot patch /foo")
def test_add_too_high_index_step_fail(self, mock_save):
step = {
'interface': 'bios',
'step': 'apply_configuration',
'args': {'foo': 'bar'},
'priority': 42
}
patch = [{'path': '/steps/2', 'op': 'add', 'value': step}]
self._test_update_bad_request(mock_save, patch)
def test_add_multi(self, mock_save):
steps = [
{
'interface': 'raid',
'step': 'create_configuration%d' % i,
'args': {},
'priority': 10,
}
for i in range(3)
]
patch = []
for i, step in enumerate(steps):
patch.append({'path': '/steps/%d' % i,
'value': step,
'op': 'add'})
response = self.patch_json('/deploy_templates/%s' % self.template.uuid,
patch, headers=self.headers)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_code)
self.assertEqual(steps, response.json['steps'][:-1])
self.assertEqual(_obj_to_api_step(self.template.steps[0]),
response.json['steps'][-1])
mock_save.assert_called_once_with(mock.ANY)
class TestPost(BaseDeployTemplatesAPITest):
@mock.patch.object(notification_utils, '_emit_api_notification',
autospec=True)
@mock.patch.object(timeutils, 'utcnow', autospec=True)
def test_create(self, mock_utcnow, mock_notify):
tdict = test_api_utils.post_get_test_deploy_template()
test_time = datetime.datetime(2000, 1, 1, 0, 0)
mock_utcnow.return_value = test_time
response = self.post_json('/deploy_templates', tdict,
headers=self.headers)
self.assertEqual(http_client.CREATED, response.status_int)
result = self.get_json('/deploy_templates/%s' % tdict['uuid'],
headers=self.headers)
self.assertEqual(tdict['uuid'], result['uuid'])
self.assertFalse(result['updated_at'])
return_created_at = timeutils.parse_isotime(
result['created_at']).replace(tzinfo=None)
self.assertEqual(test_time, return_created_at)
# Check location header
self.assertIsNotNone(response.location)
expected_location = '/v1/deploy_templates/%s' % tdict['uuid']
self.assertEqual(expected_location,
urlparse.urlparse(response.location).path)
mock_notify.assert_has_calls([mock.call(mock.ANY, mock.ANY, 'create',
obj_fields.NotificationLevel.INFO,
obj_fields.NotificationStatus.START),
mock.call(mock.ANY, mock.ANY, 'create',
obj_fields.NotificationLevel.INFO,
obj_fields.NotificationStatus.END)])
def test_create_invalid_api_version(self):
tdict = test_api_utils.post_get_test_deploy_template()
response = self.post_json(
'/deploy_templates', tdict, headers=self.invalid_version_headers,
expect_errors=True)
self.assertEqual(http_client.METHOD_NOT_ALLOWED, response.status_int)
def test_create_doesnt_contain_id(self):
with mock.patch.object(
self.dbapi, 'create_deploy_template',
wraps=self.dbapi.create_deploy_template) as mock_create:
tdict = test_api_utils.post_get_test_deploy_template()
self.post_json('/deploy_templates', tdict, headers=self.headers)
self.get_json('/deploy_templates/%s' % tdict['uuid'],
headers=self.headers)
mock_create.assert_called_once_with(mock.ANY)
# Check that 'id' is not in first arg of positional args
self.assertNotIn('id', mock_create.call_args[0][0])
@mock.patch.object(notification_utils.LOG, 'exception', autospec=True)
@mock.patch.object(notification_utils.LOG, 'warning', autospec=True)
def test_create_generate_uuid(self, mock_warn, mock_except):
tdict = test_api_utils.post_get_test_deploy_template()
del tdict['uuid']
response = self.post_json('/deploy_templates', tdict,
headers=self.headers)
result = self.get_json('/deploy_templates/%s' % response.json['uuid'],
headers=self.headers)
self.assertTrue(uuidutils.is_uuid_like(result['uuid']))
self.assertFalse(mock_warn.called)
self.assertFalse(mock_except.called)
@mock.patch.object(notification_utils, '_emit_api_notification',
autospec=True)
@mock.patch.object(objects.DeployTemplate, 'create', autospec=True)
def test_create_error(self, mock_create, mock_notify):
mock_create.side_effect = Exception()
tdict = test_api_utils.post_get_test_deploy_template()
self.post_json('/deploy_templates', tdict, headers=self.headers,
expect_errors=True)
mock_notify.assert_has_calls([mock.call(mock.ANY, mock.ANY, 'create',
obj_fields.NotificationLevel.INFO,
obj_fields.NotificationStatus.START),
mock.call(mock.ANY, mock.ANY, 'create',
obj_fields.NotificationLevel.ERROR,
obj_fields.NotificationStatus.ERROR)])
def _test_create_ok(self, tdict):
response = self.post_json('/deploy_templates', tdict,
headers=self.headers)
self.assertEqual(http_client.CREATED, response.status_int)
def _test_create_bad_request(self, tdict, error_msg):
response = self.post_json('/deploy_templates', tdict,
expect_errors=True, headers=self.headers)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
self.assertIn(error_msg, response.json['error_message'])
def test_create_long_name(self):
name = 'CUSTOM_' + 'X' * 248
tdict = test_api_utils.post_get_test_deploy_template(name=name)
self._test_create_ok(tdict)
def test_create_standard_trait_name(self):
name = 'HW_CPU_X86_VMX'
tdict = test_api_utils.post_get_test_deploy_template(name=name)
self._test_create_ok(tdict)
def test_create_name_invalid_too_long(self):
name = 'CUSTOM_' + 'X' * 249
tdict = test_api_utils.post_get_test_deploy_template(name=name)
self._test_create_bad_request(
tdict, "'%s' is too long" % name)
def test_create_name_invalid_not_a_trait(self):
name = 'not-a-trait'
tdict = test_api_utils.post_get_test_deploy_template(name=name)
self._test_create_bad_request(
tdict, "'not-a-trait' does not match '^CUSTOM_[A-Z0-9_]+$'")
def test_create_steps_invalid_duplicate(self):
steps = [
{
'interface': 'raid',
'step': 'create_configuration',
'args': {'foo': '%d' % i},
'priority': i,
}
for i in range(2)
]
tdict = test_api_utils.post_get_test_deploy_template(steps=steps)
self._test_create_bad_request(tdict, "Duplicate deploy steps")
def _test_create_no_mandatory_field(self, field):
tdict = test_api_utils.post_get_test_deploy_template()
del tdict[field]
self._test_create_bad_request(tdict, "is a required property")
def test_create_no_mandatory_field_name(self):
self._test_create_no_mandatory_field('name')
def test_create_no_mandatory_field_steps(self):
self._test_create_no_mandatory_field('steps')
def _test_create_no_mandatory_step_field(self, field):
tdict = test_api_utils.post_get_test_deploy_template()
del tdict['steps'][0][field]
self._test_create_bad_request(tdict, "is a required property")
def test_create_no_mandatory_step_field_interface(self):
self._test_create_no_mandatory_step_field('interface')
def test_create_no_mandatory_step_field_step(self):
self._test_create_no_mandatory_step_field('step')
def test_create_no_mandatory_step_field_args(self):
self._test_create_no_mandatory_step_field('args')
def test_create_no_mandatory_step_field_priority(self):
self._test_create_no_mandatory_step_field('priority')
def _test_create_invalid_field(self, field, value, error_msg):
tdict = test_api_utils.post_get_test_deploy_template()
tdict[field] = value
self._test_create_bad_request(tdict, error_msg)
def test_create_invalid_field_name(self):
self._test_create_invalid_field(
'name', 42, "42 is not of type 'string'")
def test_create_invalid_field_name_none(self):
self._test_create_invalid_field(
'name', None, "None is not of type 'string'")
def test_create_invalid_field_steps(self):
self._test_create_invalid_field(
'steps', {}, "{} is not of type 'array'")
def test_create_invalid_field_empty_steps(self):
self._test_create_invalid_field(
'steps', [], "[] is too short")
def test_create_invalid_field_extra(self):
self._test_create_invalid_field(
'extra', 42, "42 is not of type 'object'")
def test_create_invalid_field_foo(self):
self._test_create_invalid_field(
'foo', 'bar',
"Additional properties are not allowed ('foo' was unexpected)")
def _test_create_invalid_step_field(self, field, value, error_msg=None):
tdict = test_api_utils.post_get_test_deploy_template()
tdict['steps'][0][field] = value
if error_msg is None:
error_msg = "Deploy template invalid: "
self._test_create_bad_request(tdict, error_msg)
def test_create_invalid_step_field_interface1(self):
self._test_create_invalid_step_field(
'interface', [3], "[3] is not of type 'string'")
def test_create_invalid_step_field_interface2(self):
self._test_create_invalid_step_field(
'interface', 'foo', "'foo' is not one of")
def test_create_invalid_step_field_step(self):
self._test_create_invalid_step_field(
'step', 42, "42 is not of type 'string'")
def test_create_invalid_step_field_args1(self):
self._test_create_invalid_step_field(
'args', 'not a dict', "'not a dict' is not of type 'object'")
def test_create_invalid_step_field_args2(self):
self._test_create_invalid_step_field(
'args', [], "[] is not of type 'object'")
def test_create_invalid_step_field_priority(self):
self._test_create_invalid_step_field(
'priority', 'not a number',
"'not a number' is not of type 'integer'")
def test_create_invalid_step_field_negative_priority(self):
self._test_create_invalid_step_field(
'priority', -1, "-1 is less than the minimum of 0")
def test_create_invalid_step_field_foo(self):
self._test_create_invalid_step_field(
'foo', 'bar',
"Additional properties are not allowed ('foo' was unexpected)")
def test_create_step_string_priority(self):
tdict = test_api_utils.post_get_test_deploy_template()
tdict['steps'][0]['priority'] = '42'
self._test_create_ok(tdict)
def test_create_complex_step_args(self):
tdict = test_api_utils.post_get_test_deploy_template()
tdict['steps'][0]['args'] = {'foo': [{'bar': 'baz'}]}
self._test_create_ok(tdict)
@mock.patch.object(objects.DeployTemplate, 'destroy', autospec=True)
class TestDelete(BaseDeployTemplatesAPITest):
def setUp(self):
super(TestDelete, self).setUp()
self.template = obj_utils.create_test_deploy_template(self.context)
@mock.patch.object(notification_utils, '_emit_api_notification',
autospec=True)
def test_delete_by_uuid(self, mock_notify, mock_destroy):
self.delete('/deploy_templates/%s' % self.template.uuid,
headers=self.headers)
mock_destroy.assert_called_once_with(mock.ANY)
mock_notify.assert_has_calls([mock.call(mock.ANY, mock.ANY, 'delete',
obj_fields.NotificationLevel.INFO,
obj_fields.NotificationStatus.START),
mock.call(mock.ANY, mock.ANY, 'delete',
obj_fields.NotificationLevel.INFO,
obj_fields.NotificationStatus.END)])
def test_delete_by_uuid_with_json(self, mock_destroy):
self.delete('/deploy_templates/%s.json' % self.template.uuid,
headers=self.headers)
mock_destroy.assert_called_once_with(mock.ANY)
def test_delete_by_name(self, mock_destroy):
self.delete('/deploy_templates/%s' % self.template.name,
headers=self.headers)
mock_destroy.assert_called_once_with(mock.ANY)
def test_delete_by_name_with_json(self, mock_destroy):
self.delete('/deploy_templates/%s.json' % self.template.name,
headers=self.headers)
mock_destroy.assert_called_once_with(mock.ANY)
def test_delete_invalid_api_version(self, mock_dpt):
response = self.delete('/deploy_templates/%s' % self.template.uuid,
expect_errors=True,
headers=self.invalid_version_headers)
self.assertEqual(http_client.METHOD_NOT_ALLOWED, response.status_int)
def test_delete_old_api_version(self, mock_dpt):
# Names like CUSTOM_1 were not valid in API 1.1, but the check should
# go after the microversion check.
response = self.delete('/deploy_templates/%s' % self.template.name,
expect_errors=True)
self.assertEqual(http_client.METHOD_NOT_ALLOWED, response.status_int)
def test_delete_by_name_non_existent(self, mock_dpt):
res = self.delete('/deploy_templates/%s' % 'blah', expect_errors=True,
headers=self.headers)
self.assertEqual(http_client.NOT_FOUND, res.status_code)
| [
"unittest.mock.patch.object",
"ironic.tests.unit.api.utils.post_get_test_deploy_template",
"ironic.common.exception.DeployTemplateAlreadyExists",
"ironic.api.controllers.v1.max_version",
"oslo_utils.uuidutils.generate_uuid",
"oslo_utils.timeutils.parse_isotime",
"ironic.tests.unit.objects.utils.create_t... | [((13665, 13729), 'unittest.mock.patch.object', 'mock.patch.object', (['objects.DeployTemplate', '"""save"""'], {'autospec': '(True)'}), "(objects.DeployTemplate, 'save', autospec=True)\n", (13682, 13729), False, 'from unittest import mock\n'), ((39676, 39743), 'unittest.mock.patch.object', 'mock.patch.object', (['objects.DeployTemplate', '"""destroy"""'], {'autospec': '(True)'}), "(objects.DeployTemplate, 'destroy', autospec=True)\n", (39693, 39743), False, 'from unittest import mock\n'), ((14979, 15057), 'unittest.mock.patch.object', 'mock.patch.object', (['notification_utils', '"""_emit_api_notification"""'], {'autospec': '(True)'}), "(notification_utils, '_emit_api_notification', autospec=True)\n", (14996, 15057), False, 'from unittest import mock\n'), ((19689, 19767), 'unittest.mock.patch.object', 'mock.patch.object', (['notification_utils', '"""_emit_api_notification"""'], {'autospec': '(True)'}), "(notification_utils, '_emit_api_notification', autospec=True)\n", (19706, 19767), False, 'from unittest import mock\n'), ((29036, 29114), 'unittest.mock.patch.object', 'mock.patch.object', (['notification_utils', '"""_emit_api_notification"""'], {'autospec': '(True)'}), "(notification_utils, '_emit_api_notification', autospec=True)\n", (29053, 29114), False, 'from unittest import mock\n'), ((29143, 29196), 'unittest.mock.patch.object', 'mock.patch.object', (['timeutils', '"""utcnow"""'], {'autospec': '(True)'}), "(timeutils, 'utcnow', autospec=True)\n", (29160, 29196), False, 'from unittest import mock\n'), ((31698, 31767), 'unittest.mock.patch.object', 'mock.patch.object', (['notification_utils.LOG', '"""exception"""'], {'autospec': '(True)'}), "(notification_utils.LOG, 'exception', autospec=True)\n", (31715, 31767), False, 'from unittest import mock\n'), ((31773, 31840), 'unittest.mock.patch.object', 'mock.patch.object', (['notification_utils.LOG', '"""warning"""'], {'autospec': '(True)'}), "(notification_utils.LOG, 'warning', autospec=True)\n", (31790, 31840), False, 'from unittest import mock\n'), ((32403, 32481), 'unittest.mock.patch.object', 'mock.patch.object', (['notification_utils', '"""_emit_api_notification"""'], {'autospec': '(True)'}), "(notification_utils, '_emit_api_notification', autospec=True)\n", (32420, 32481), False, 'from unittest import mock\n'), ((32510, 32576), 'unittest.mock.patch.object', 'mock.patch.object', (['objects.DeployTemplate', '"""create"""'], {'autospec': '(True)'}), "(objects.DeployTemplate, 'create', autospec=True)\n", (32527, 32576), False, 'from unittest import mock\n'), ((39934, 40012), 'unittest.mock.patch.object', 'mock.patch.object', (['notification_utils', '"""_emit_api_notification"""'], {'autospec': '(True)'}), "(notification_utils, '_emit_api_notification', autospec=True)\n", (39951, 40012), False, 'from unittest import mock\n'), ((2011, 2062), 'ironic.tests.unit.objects.utils.create_test_deploy_template', 'obj_utils.create_test_deploy_template', (['self.context'], {}), '(self.context)\n', (2048, 2062), True, 'from ironic.tests.unit.objects import utils as obj_utils\n'), ((2522, 2573), 'ironic.tests.unit.objects.utils.create_test_deploy_template', 'obj_utils.create_test_deploy_template', (['self.context'], {}), '(self.context)\n', (2559, 2573), True, 'from ironic.tests.unit.objects import utils as obj_utils\n'), ((3269, 3320), 'ironic.tests.unit.objects.utils.create_test_deploy_template', 'obj_utils.create_test_deploy_template', (['self.context'], {}), '(self.context)\n', (3306, 3320), True, 'from ironic.tests.unit.objects import utils as obj_utils\n'), ((3560, 3630), 'ironic.tests.unit.objects.utils.create_test_deploy_template', 'obj_utils.create_test_deploy_template', (['self.context'], {'name': '"""CUSTOM_DT1"""'}), "(self.context, name='CUSTOM_DT1')\n", (3597, 3630), True, 'from ironic.tests.unit.objects import utils as obj_utils\n'), ((3924, 3975), 'ironic.tests.unit.objects.utils.create_test_deploy_template', 'obj_utils.create_test_deploy_template', (['self.context'], {}), '(self.context)\n', (3961, 3975), True, 'from ironic.tests.unit.objects import utils as obj_utils\n'), ((4928, 4979), 'ironic.tests.unit.objects.utils.create_test_deploy_template', 'obj_utils.create_test_deploy_template', (['self.context'], {}), '(self.context)\n', (4965, 4979), True, 'from ironic.tests.unit.objects import utils as obj_utils\n'), ((5437, 5488), 'ironic.tests.unit.objects.utils.create_test_deploy_template', 'obj_utils.create_test_deploy_template', (['self.context'], {}), '(self.context)\n', (5474, 5488), True, 'from ironic.tests.unit.objects import utils as obj_utils\n'), ((5804, 5855), 'ironic.tests.unit.objects.utils.create_test_deploy_template', 'obj_utils.create_test_deploy_template', (['self.context'], {}), '(self.context)\n', (5841, 5855), True, 'from ironic.tests.unit.objects import utils as obj_utils\n'), ((6148, 6199), 'ironic.tests.unit.objects.utils.create_test_deploy_template', 'obj_utils.create_test_deploy_template', (['self.context'], {}), '(self.context)\n', (6185, 6199), True, 'from ironic.tests.unit.objects import utils as obj_utils\n'), ((6617, 6668), 'ironic.tests.unit.objects.utils.create_test_deploy_template', 'obj_utils.create_test_deploy_template', (['self.context'], {}), '(self.context)\n', (6654, 6668), True, 'from ironic.tests.unit.objects import utils as obj_utils\n'), ((6985, 7036), 'ironic.tests.unit.objects.utils.create_test_deploy_template', 'obj_utils.create_test_deploy_template', (['self.context'], {}), '(self.context)\n', (7022, 7036), True, 'from ironic.tests.unit.objects import utils as obj_utils\n'), ((7404, 7455), 'ironic.tests.unit.objects.utils.create_test_deploy_template', 'obj_utils.create_test_deploy_template', (['self.context'], {}), '(self.context)\n', (7441, 7455), True, 'from ironic.tests.unit.objects import utils as obj_utils\n'), ((8257, 8282), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (8280, 8282), False, 'from oslo_utils import uuidutils\n'), ((8291, 8353), 'ironic.tests.unit.objects.utils.create_test_deploy_template', 'obj_utils.create_test_deploy_template', (['self.context'], {'uuid': 'uuid'}), '(self.context, uuid=uuid)\n', (8328, 8353), True, 'from ironic.tests.unit.objects import utils as obj_utils\n'), ((9457, 9501), 'oslo_config.cfg.CONF.set_override', 'cfg.CONF.set_override', (['"""max_limit"""', '(3)', '"""api"""'], {}), "('max_limit', 3, 'api')\n", (9478, 9501), False, 'from oslo_config import cfg\n'), ((10064, 10108), 'oslo_config.cfg.CONF.set_override', 'cfg.CONF.set_override', (['"""max_limit"""', '(3)', '"""api"""'], {}), "('max_limit', 3, 'api')\n", (10085, 10108), False, 'from oslo_config import cfg\n'), ((13332, 13380), 'ironic.tests.unit.objects.utils.get_test_deploy_template', 'obj_utils.get_test_deploy_template', (['self.context'], {}), '(self.context)\n', (13366, 13380), True, 'from ironic.tests.unit.objects import utils as obj_utils\n'), ((13860, 13930), 'ironic.tests.unit.objects.utils.create_test_deploy_template', 'obj_utils.create_test_deploy_template', (['self.context'], {'name': '"""CUSTOM_DT1"""'}), "(self.context, name='CUSTOM_DT1')\n", (13897, 13930), True, 'from ironic.tests.unit.objects import utils as obj_utils\n'), ((19085, 19110), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (19108, 19110), False, 'from oslo_utils import uuidutils\n'), ((20117, 20179), 'ironic.common.exception.DeployTemplateAlreadyExists', 'exception.DeployTemplateAlreadyExists', ([], {'uuid': 'self.template.uuid'}), '(uuid=self.template.uuid)\n', (20154, 20179), False, 'from ironic.common import exception\n'), ((29266, 29312), 'ironic.tests.unit.api.utils.post_get_test_deploy_template', 'test_api_utils.post_get_test_deploy_template', ([], {}), '()\n', (29310, 29312), True, 'from ironic.tests.unit.api import utils as test_api_utils\n'), ((29333, 29368), 'datetime.datetime', 'datetime.datetime', (['(2000)', '(1)', '(1)', '(0)', '(0)'], {}), '(2000, 1, 1, 0, 0)\n', (29350, 29368), False, 'import datetime\n'), ((30768, 30814), 'ironic.tests.unit.api.utils.post_get_test_deploy_template', 'test_api_utils.post_get_test_deploy_template', ([], {}), '()\n', (30812, 30814), True, 'from ironic.tests.unit.api import utils as test_api_utils\n'), ((31922, 31968), 'ironic.tests.unit.api.utils.post_get_test_deploy_template', 'test_api_utils.post_get_test_deploy_template', ([], {}), '()\n', (31966, 31968), True, 'from ironic.tests.unit.api import utils as test_api_utils\n'), ((32698, 32744), 'ironic.tests.unit.api.utils.post_get_test_deploy_template', 'test_api_utils.post_get_test_deploy_template', ([], {}), '()\n', (32742, 32744), True, 'from ironic.tests.unit.api import utils as test_api_utils\n'), ((34089, 34144), 'ironic.tests.unit.api.utils.post_get_test_deploy_template', 'test_api_utils.post_get_test_deploy_template', ([], {'name': 'name'}), '(name=name)\n', (34133, 34144), True, 'from ironic.tests.unit.api import utils as test_api_utils\n'), ((34277, 34332), 'ironic.tests.unit.api.utils.post_get_test_deploy_template', 'test_api_utils.post_get_test_deploy_template', ([], {'name': 'name'}), '(name=name)\n', (34321, 34332), True, 'from ironic.tests.unit.api import utils as test_api_utils\n'), ((34472, 34527), 'ironic.tests.unit.api.utils.post_get_test_deploy_template', 'test_api_utils.post_get_test_deploy_template', ([], {'name': 'name'}), '(name=name)\n', (34516, 34527), True, 'from ironic.tests.unit.api import utils as test_api_utils\n'), ((34711, 34766), 'ironic.tests.unit.api.utils.post_get_test_deploy_template', 'test_api_utils.post_get_test_deploy_template', ([], {'name': 'name'}), '(name=name)\n', (34755, 34766), True, 'from ironic.tests.unit.api import utils as test_api_utils\n'), ((35192, 35249), 'ironic.tests.unit.api.utils.post_get_test_deploy_template', 'test_api_utils.post_get_test_deploy_template', ([], {'steps': 'steps'}), '(steps=steps)\n', (35236, 35249), True, 'from ironic.tests.unit.api import utils as test_api_utils\n'), ((35392, 35438), 'ironic.tests.unit.api.utils.post_get_test_deploy_template', 'test_api_utils.post_get_test_deploy_template', ([], {}), '()\n', (35436, 35438), True, 'from ironic.tests.unit.api import utils as test_api_utils\n'), ((35823, 35869), 'ironic.tests.unit.api.utils.post_get_test_deploy_template', 'test_api_utils.post_get_test_deploy_template', ([], {}), '()\n', (35867, 35869), True, 'from ironic.tests.unit.api import utils as test_api_utils\n'), ((36540, 36586), 'ironic.tests.unit.api.utils.post_get_test_deploy_template', 'test_api_utils.post_get_test_deploy_template', ([], {}), '()\n', (36584, 36586), True, 'from ironic.tests.unit.api import utils as test_api_utils\n'), ((37674, 37720), 'ironic.tests.unit.api.utils.post_get_test_deploy_template', 'test_api_utils.post_get_test_deploy_template', ([], {}), '()\n', (37718, 37720), True, 'from ironic.tests.unit.api import utils as test_api_utils\n'), ((39338, 39384), 'ironic.tests.unit.api.utils.post_get_test_deploy_template', 'test_api_utils.post_get_test_deploy_template', ([], {}), '()\n', (39382, 39384), True, 'from ironic.tests.unit.api import utils as test_api_utils\n'), ((39528, 39574), 'ironic.tests.unit.api.utils.post_get_test_deploy_template', 'test_api_utils.post_get_test_deploy_template', ([], {}), '()\n', (39572, 39574), True, 'from ironic.tests.unit.api import utils as test_api_utils\n'), ((39876, 39927), 'ironic.tests.unit.objects.utils.create_test_deploy_template', 'obj_utils.create_test_deploy_template', (['self.context'], {}), '(self.context)\n', (39913, 39927), True, 'from ironic.tests.unit.objects import utils as obj_utils\n'), ((1665, 1685), 'ironic.api.controllers.v1.max_version', 'api_v1.max_version', ([], {}), '()\n', (1683, 1685), True, 'from ironic.api.controllers import v1 as api_v1\n'), ((31097, 31198), 'unittest.mock.patch.object', 'mock.patch.object', (['self.dbapi', '"""create_deploy_template"""'], {'wraps': 'self.dbapi.create_deploy_template'}), "(self.dbapi, 'create_deploy_template', wraps=self.dbapi.\n create_deploy_template)\n", (31114, 31198), False, 'from unittest import mock\n'), ((31263, 31309), 'ironic.tests.unit.api.utils.post_get_test_deploy_template', 'test_api_utils.post_get_test_deploy_template', ([], {}), '()\n', (31307, 31309), True, 'from ironic.tests.unit.api import utils as test_api_utils\n'), ((32269, 32307), 'oslo_utils.uuidutils.is_uuid_like', 'uuidutils.is_uuid_like', (["result['uuid']"], {}), "(result['uuid'])\n", (32291, 32307), False, 'from oslo_utils import uuidutils\n'), ((15381, 15496), 'unittest.mock.call', 'mock.call', (['mock.ANY', 'mock.ANY', '"""update"""', 'obj_fields.NotificationLevel.INFO', 'obj_fields.NotificationStatus.START'], {}), "(mock.ANY, mock.ANY, 'update', obj_fields.NotificationLevel.INFO,\n obj_fields.NotificationStatus.START)\n", (15390, 15496), False, 'from unittest import mock\n'), ((15608, 15721), 'unittest.mock.call', 'mock.call', (['mock.ANY', 'mock.ANY', '"""update"""', 'obj_fields.NotificationLevel.INFO', 'obj_fields.NotificationStatus.END'], {}), "(mock.ANY, mock.ANY, 'update', obj_fields.NotificationLevel.INFO,\n obj_fields.NotificationStatus.END)\n", (15617, 15721), False, 'from unittest import mock\n'), ((20001, 20026), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (20024, 20026), False, 'from oslo_utils import uuidutils\n'), ((20830, 20945), 'unittest.mock.call', 'mock.call', (['mock.ANY', 'mock.ANY', '"""update"""', 'obj_fields.NotificationLevel.INFO', 'obj_fields.NotificationStatus.START'], {}), "(mock.ANY, mock.ANY, 'update', obj_fields.NotificationLevel.INFO,\n obj_fields.NotificationStatus.START)\n", (20839, 20945), False, 'from unittest import mock\n'), ((21057, 21173), 'unittest.mock.call', 'mock.call', (['mock.ANY', 'mock.ANY', '"""update"""', 'obj_fields.NotificationLevel.ERROR', 'obj_fields.NotificationStatus.ERROR'], {}), "(mock.ANY, mock.ANY, 'update', obj_fields.NotificationLevel.ERROR,\n obj_fields.NotificationStatus.ERROR)\n", (21066, 21173), False, 'from unittest import mock\n'), ((24918, 24943), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (24941, 24943), False, 'from oslo_utils import uuidutils\n'), ((26064, 26089), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (26087, 26089), False, 'from oslo_utils import uuidutils\n'), ((29854, 29899), 'oslo_utils.timeutils.parse_isotime', 'timeutils.parse_isotime', (["result['created_at']"], {}), "(result['created_at'])\n", (29877, 29899), False, 'from oslo_utils import timeutils\n'), ((30208, 30244), 'urllib.parse.urlparse', 'urlparse.urlparse', (['response.location'], {}), '(response.location)\n', (30225, 30244), True, 'from urllib import parse as urlparse\n'), ((30289, 30404), 'unittest.mock.call', 'mock.call', (['mock.ANY', 'mock.ANY', '"""create"""', 'obj_fields.NotificationLevel.INFO', 'obj_fields.NotificationStatus.START'], {}), "(mock.ANY, mock.ANY, 'create', obj_fields.NotificationLevel.INFO,\n obj_fields.NotificationStatus.START)\n", (30298, 30404), False, 'from unittest import mock\n'), ((30516, 30629), 'unittest.mock.call', 'mock.call', (['mock.ANY', 'mock.ANY', '"""create"""', 'obj_fields.NotificationLevel.INFO', 'obj_fields.NotificationStatus.END'], {}), "(mock.ANY, mock.ANY, 'create', obj_fields.NotificationLevel.INFO,\n obj_fields.NotificationStatus.END)\n", (30525, 30629), False, 'from unittest import mock\n'), ((32899, 33014), 'unittest.mock.call', 'mock.call', (['mock.ANY', 'mock.ANY', '"""create"""', 'obj_fields.NotificationLevel.INFO', 'obj_fields.NotificationStatus.START'], {}), "(mock.ANY, mock.ANY, 'create', obj_fields.NotificationLevel.INFO,\n obj_fields.NotificationStatus.START)\n", (32908, 33014), False, 'from unittest import mock\n'), ((33126, 33242), 'unittest.mock.call', 'mock.call', (['mock.ANY', 'mock.ANY', '"""create"""', 'obj_fields.NotificationLevel.ERROR', 'obj_fields.NotificationStatus.ERROR'], {}), "(mock.ANY, mock.ANY, 'create', obj_fields.NotificationLevel.ERROR,\n obj_fields.NotificationStatus.ERROR)\n", (33135, 33242), False, 'from unittest import mock\n'), ((40298, 40413), 'unittest.mock.call', 'mock.call', (['mock.ANY', 'mock.ANY', '"""delete"""', 'obj_fields.NotificationLevel.INFO', 'obj_fields.NotificationStatus.START'], {}), "(mock.ANY, mock.ANY, 'delete', obj_fields.NotificationLevel.INFO,\n obj_fields.NotificationStatus.START)\n", (40307, 40413), False, 'from unittest import mock\n'), ((40525, 40638), 'unittest.mock.call', 'mock.call', (['mock.ANY', 'mock.ANY', '"""delete"""', 'obj_fields.NotificationLevel.INFO', 'obj_fields.NotificationStatus.END'], {}), "(mock.ANY, mock.ANY, 'delete', obj_fields.NotificationLevel.INFO,\n obj_fields.NotificationStatus.END)\n", (40534, 40638), False, 'from unittest import mock\n'), ((4450, 4475), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (4473, 4475), False, 'from oslo_utils import uuidutils\n'), ((7847, 7872), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (7870, 7872), False, 'from oslo_utils import uuidutils\n'), ((9006, 9031), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (9029, 9031), False, 'from oslo_utils import uuidutils\n'), ((9651, 9676), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (9674, 9676), False, 'from oslo_utils import uuidutils\n'), ((10302, 10327), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (10325, 10327), False, 'from oslo_utils import uuidutils\n'), ((11008, 11033), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (11031, 11033), False, 'from oslo_utils import uuidutils\n'), ((11587, 11612), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (11610, 11612), False, 'from oslo_utils import uuidutils\n'), ((12701, 12726), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (12724, 12726), False, 'from oslo_utils import uuidutils\n')] |
import pandas as pd
import glob
import os
import yaml
import sys
def namesToReads(reference_dir, names_to_reads, salmon_dir):
if os.path.isfile(os.path.join(reference_dir,names_to_reads)):
print("Salmon reads file previously created; new file will not be created from Salmon directory.")
sys.exit(0)
folder_names = glob.glob(os.path.join(salmon_dir,'*quant*'))
files_salmon = [os.path.join(curr,"quant.sf") for curr in folder_names]
transcript_dict = dict()
transcript_sample_dict = dict()
for curr_ind in range(len(folder_names)):
curr_salmon = pd.read_csv(files_salmon[curr_ind], sep = "\t")
for curr_ind_2 in range(len(curr_salmon.index)):
name_curr = curr_salmon["Name"][curr_ind_2]
read_curr = float(curr_salmon["NumReads"][curr_ind_2])
sample_curr = folder_names[curr_ind].split("_")[-1]
if name_curr in transcript_dict:
transcript_dict[name_curr] = transcript_dict[name_curr] + read_curr
transcript_sample_dict[name_curr].append(sample_curr)
else:
transcript_dict[name_curr] = read_curr
transcript_sample_dict[name_curr] = [sample_curr]
names_to_reads = pd.DataFrame({"TranscriptNames": list(transcript_dict.keys()),
"NumReads": list(transcript_dict.values()),
"SampleName": list(transcript_sample_dict.values())})
if ".csv" in names_to_reads:
names_to_reads.to_csv(path_or_buf = os.path.join(reference_dir,names_to_reads), sep = "\t")
else:
names_to_reads.to_csv(path_or_buf = os.path.join(reference_dir,"namestoreads.csv"), sep = "\t")
names_to_reads = os.path.join(reference_dir,"namestoreads.csv")
return names_to_reads | [
"pandas.read_csv",
"os.path.join",
"sys.exit"
] | [((150, 193), 'os.path.join', 'os.path.join', (['reference_dir', 'names_to_reads'], {}), '(reference_dir, names_to_reads)\n', (162, 193), False, 'import os\n'), ((310, 321), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (318, 321), False, 'import sys\n'), ((352, 387), 'os.path.join', 'os.path.join', (['salmon_dir', '"""*quant*"""'], {}), "(salmon_dir, '*quant*')\n", (364, 387), False, 'import os\n'), ((408, 438), 'os.path.join', 'os.path.join', (['curr', '"""quant.sf"""'], {}), "(curr, 'quant.sf')\n", (420, 438), False, 'import os\n'), ((599, 644), 'pandas.read_csv', 'pd.read_csv', (['files_salmon[curr_ind]'], {'sep': '"""\t"""'}), "(files_salmon[curr_ind], sep='\\t')\n", (610, 644), True, 'import pandas as pd\n'), ((1761, 1808), 'os.path.join', 'os.path.join', (['reference_dir', '"""namestoreads.csv"""'], {}), "(reference_dir, 'namestoreads.csv')\n", (1773, 1808), False, 'import os\n'), ((1566, 1609), 'os.path.join', 'os.path.join', (['reference_dir', 'names_to_reads'], {}), '(reference_dir, names_to_reads)\n', (1578, 1609), False, 'import os\n'), ((1676, 1723), 'os.path.join', 'os.path.join', (['reference_dir', '"""namestoreads.csv"""'], {}), "(reference_dir, 'namestoreads.csv')\n", (1688, 1723), False, 'import os\n')] |
import time
import pandas as pd
import numpy as np
CITY_DATA = { 'chicago': 'chicago.csv',
'new york city': 'new_york_city.csv',
'washington': 'washington.csv' }
month_list = ['January', 'February', 'March', 'April', 'May', 'June']
day_list = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']
def get_filters():
"""
Asks user to specify a city, month, and day to analyze.
Returns:
(str) city - name of the city to analyze
(str) month - name of the month to filter by, or "all" to apply no month filter
(str) day - name of the day of week to filter by, or "all" to apply no day filter
"""
print('Hello! Let\'s explore some US bikeshare data!')
city_list = list(CITY_DATA.keys())
# This block gets the user's desired city, formats it, and checks it against the city_list variable.
city = input('Would you like to investigate Chicago, Washington, or New York City? ').lower()
while city not in city_list:
print('That\'s not a valid city for this program.')
city = input('Would you like to investigate Chicago, Washington, or New York City? ').lower()
# This block asks if the month needs to be filtered. If yes, it asks for the month name and checks it against the month_list variable.
# If no, it sets month = 'all'.
month_filter = input('Would you like to filter data by month? Enter yes or no: ').lower()
while month_filter != 'yes' and month_filter != 'no':
print('Not a valid input. Please specify yes or no')
month_filter = input('Would you like to filter data by month? Enter yes or no: ').lower()
if month_filter == 'yes':
month = input('Filter by which month? This program has data for January through June: ').title()
while month not in month_list:
print('Not a valid month for this program.')
month = input('Filter by which month? This program has data for January through June: ').title()
else:
month = 'all'
# This block asks if the day needs to be filtered. If yes, it checks the user's input of a day name against the day_list variable.
# If no, it sets the day variable = 'all'.
day_filter = input('Would you like to filter data by day? Enter yes or no: ').lower()
while day_filter != 'yes' and day_filter != 'no':
print('Please specify yes or no.')
day_filter = input('Would you like to filter data by day? Enter yes or no: ').lower()
if day_filter == 'yes':
day = input('Filter by which day? ').title()
while day not in day_list:
print('Not a valid day.')
day = input('Filter by which day? ').title()
else:
day = 'all'
print('-'*40)
return city, month, day
def load_data(city, month, day):
"""
Loads data for the specified city and filters by month and day if applicable.
Args:
(str) city - name of the city to analyze
(str) month - name of the month to filter by, or "all" to apply no month filter
(str) day - name of the day of week to filter by, or "all" to apply no day filter
Returns:
df - Pandas DataFrame containing city data filtered by month and day
"""
# This block loads the user's desired city's table into a DataFrame and uses Pandas datetime functionality to append additional columns
# needed later for time based statistics. It then filters the DataFrame by month and day if specified by the user.
df = pd.read_csv(CITY_DATA[city])
df['Start Time'] = pd.to_datetime(df['Start Time'])
df['month'] = df['Start Time'].dt.month
df['day_of_week'] = df['Start Time'].dt.weekday_name
df['hour'] = df['Start Time'].dt.hour
if month != 'all':
month = month_list.index(month)+1
df = df[df['month'] == month]
if day != 'all':
df = df[df['day_of_week'] == day]
return df
def condisplay_message(city, month, day):
"""
Prints a line with the user's selected filters in a string message.
Args:
(str) city - name of the city to analyze
(str) month - name of the month to filter by, or "all" to apply no month filter
(str) day - name of the day of week to filter by, or "all" to apply no day filter
"""
# This block prints a string with the user's selected filters. It's not completely necessary,
# but is useful as an easily read reminder of the filters used, located at the top of the program's output.
if month == 'all' and day == 'all':
print('For all months and all days in {}:\n'.format(city.title()))
elif month == 'all' and day != 'all':
print('For all months and all {}s in {}:\n'.format(day, city.title()))
elif month != 'all' and day == 'all':
print('For all days in the month of {} in {}:\n'.format(month, city.title()))
else:
print('For all {}s in the month of {} in {}:\n'.format(day, month, city.title()))
def time_stats(df, month, day):
"""
Displays statistics on the most frequent times of travel.
Args:
(DataFrame) df - pre-filtered Pandas DataFrame
(str) month - name of the month to filter by, or "all" to apply no month filter
(str) day - name of the day of week to filter by, or "all" to apply no day filter
"""
print('\nCalculating The Most Frequent Times of Travel...\n')
start_time = time.time()
# This block finds and prints the most popular month statistics if a month filter was not selected by the user.
if month == 'all':
month_value_counts = df['month'].value_counts()
pop_month = month_list[month_value_counts.keys()[0]-1]
pop_month_count = month_value_counts.iloc[0]
print('The most popular month was {}, with {} bikerides.'.format(pop_month, pop_month_count))
# This block finds and prints the most popular day statistics if a day filter was not selected by the user.
if day == 'all':
day_value_counts = df['day_of_week'].value_counts()
pop_day = day_value_counts.keys()[0]
pop_day_count = day_value_counts.iloc[0]
print('The most popular day was {}, with {} bikerides.'.format(pop_day, pop_day_count))
# This block finds and prints the most popular hour statistics for any and all filters selected by the user.
hour_value_counts = df['hour'].value_counts()
pop_hour = hour_value_counts.keys()[0]
pop_hour_count = hour_value_counts.iloc[0]
print('The most popular hour was {} o\'clock with {} bikerides.'.format(pop_hour, pop_hour_count))
print("\nThis took %s seconds." % (time.time() - start_time))
print('-'*40)
def station_stats(df):
"""
Displays statistics on the most popular stations and trip.
Args:
(DataFrame) df - pre-filtered Pandas DataFrame
"""
print('\nCalculating The Most Popular Stations and Trip...\n')
start_time = time.time()
# Adds a column to the DataFrame to help find popular start to end trip locations.
df['Trip'] = df['Start Station'].str.cat(df['End Station'], sep=' to ')
# This block finds and prints the name of the most popular starting place, as well as total number of trips from that station.
start_station_counts = df['Start Station'].value_counts()
pop_start_station = start_station_counts.keys()[0]
pop_start_station_count = start_station_counts.iloc[0]
print('The most popular start station was {} with {} trips starting there.'.format(pop_start_station, pop_start_station_count))
# This block finds and prints the name of the most popular ending place, as well as total number of trips that end at that station.
end_station_counts = df['End Station'].value_counts()
pop_end_station = end_station_counts.keys()[0]
pop_end_station_count = end_station_counts.iloc[0]
print('The most popular ending station was {} with {} trips ending there.'.format(pop_end_station, pop_end_station_count))
# This block finds and prints the most popular trip, start to end, based on the new column created.
trip_counts = df['Trip'].value_counts()
pop_trip = trip_counts.keys()[0]
pop_trip_count = trip_counts.iloc[0]
print('The most popular trip was {} with {} trips.'.format(pop_trip, pop_trip_count))
print("\nThis took %s seconds." % (time.time() - start_time))
print('-'*40)
def trip_time_units(trip_time_type, trip_time):
"""
This program helps the trip_duration_stats() function decide what time units to use. The function checks if the time amount is
less than or equal to 2 times the next biggest unit. If it is, it checks the next size unit, if it's not, it prints the time with
the current unit. For example if its 1000 seconds, it checks if its less than or equal to 2 of the next biggest unit (minutes).
It is not, so it checks if its less than or equal to 2 of the next biggest unit (hours). It is so it stops at minutes and prints
the desired response.
It also only prints to 2 decimal places, for more easily read numbers.
Args:
(str) trip_time_type - indicates the type of travel time being calculated for the printed output. Either 'total', or 'mean'
(int) trip_time - trip time in seconds
"""
if trip_time <= 120:
print('The {} travel duration was {:.2f} seconds.'.format(trip_time_type, trip_time))
elif trip_time <= 120*60:
print('The {} travel duration was {:.2f} minutes'.format(trip_time_type, trip_time/60))
elif trip_time <= 48*60*60:
print('The {} travel duration was {:.2f} hours'.format(trip_time_type, trip_time/(60*60)))
elif trip_time <= 60*24*60*60:
print('The {} travel duration was {:.2f} days'.format(trip_time_type, trip_time/(60*60*24)))
elif trip_time <= 48*30*24*60*60:
print('The {} travel duration was {:.2f} months'.format(trip_time_type, trip_time/(60*60*24*30)))
else:
print('The {} travel duration was {:.2f} years'.format(trip_time_type, trip_time/(60*60*24*365)))
def trip_duration_stats(df):
"""
Displays statistics on the total and average trip duration.
Args:
(DataFrame) df - pre-filtered Pandas DataFrame
"""
print('\nCalculating Trip Duration...\n')
start_time = time.time()
# This block finds the total trip duration of all trips taken for the user's selected filters. It then displays that time
# in seconds, minutes, hours, days, months, or years, rounded to 2 decimal places.
total_trip_time = df['Trip Duration'].sum()
total_trip_type = 'total'
trip_time_units(total_trip_type, total_trip_time)
# This block finds the mean trip duration for all trips taken for the user's selected filters. It then displays that time
# in seconds, minutes, hours, days, months, or years, rounded to 2 decimal places.
mean_trip_time = df['Trip Duration'].mean()
mean_trip_type = 'mean'
trip_time_units(mean_trip_type, mean_trip_time)
print("\nThis took %s seconds." % (time.time() - start_time))
print('-'*40)
def user_stats(df, city):
"""Displays statistics on bikeshare users.
Args:
(DataFrame) df - pre-filtered Pandas DataFrame
(str) city - name of the city to analyze
"""
print('\nCalculating User Stats...\n')
start_time = time.time()
# Prints a Pandas Series of all user types and their counts, as well as filling all NaNs with N/A (not available).
print(df['User Type'].fillna('N/A').value_counts())
print()
# Because the Washington data table doesn't have a gender column, a try/except function is used to handle any KeyErrors.
# If there is no KeyError exception thrown up, this block will print a Pandas series with a count of gender types (male and female)
# as well as a column for N/A, filled with any NaN values.
try:
print(df['Gender'].fillna('N/A').value_counts())
print()
except KeyError:
print('{} has no gender data available.'.format(city.title()))
print()
# The Washington data table also has no birth year column, so another try/except block is used. If no KeyError comes up, this block will find and
# print values for the oldest birth year, most recent birth year, and the most common birth year, for the user's selected filters.
try:
oldest_birth_year = int(df['Birth Year'].min())
youngest_birth_year = int(df['Birth Year'].max())
common_birth_year = int(df['Birth Year'].mode()[0])
print('The earliest user birth year is {}. The most recent user birth year is {}. The most common user birth year is {}.'.format(
oldest_birth_year, youngest_birth_year, common_birth_year))
except KeyError:
print('{} has no birth year data available.'.format(city.title()))
print("\nThis took %s seconds." % (time.time() - start_time))
print('-'*40)
def display_data(df):
"""
This function gives the user the option to see 5 rows of the raw data from their filtered DataFrame
and then see 5 more rows until they decide to stop.
Args:
(DataFrame) df - the Pandas DataFrame loaded based on the user's filters.
"""
# This code gets users yes or no input on seeing 5 rows of data, then asks again and continuously shows
# the next 5 rows until the user inputs no which stops the function.
display_input = input('Would you like to see the raw data (5 rows)? Enter yes or no: ').lower()
while display_input != 'yes' and display_input != 'no':
display_input = input('Not a valid input. Please enter yes or no: ').lower()
if display_input == 'yes':
i=0
while True:
print(df.iloc[i:i+5,:])
display_input = input('Would you like to see 5 more rows? Enter yes or no: ').lower()
while display_input != 'yes' and display_input != 'no':
display_input = input('Not a valid input. Please enter yes or no: ').lower()
if display_input == 'yes':
i += 5
else:
break
def main():
while True:
city, month, day = get_filters()
df = load_data(city, month, day)
condisplay_message(city, month, day)
time_stats(df, month, day)
station_stats(df)
trip_duration_stats(df)
user_stats(df, city)
display_data(df)
restart = input('\nWould you like to restart? Enter yes or no.\n')
if restart.lower() != 'yes':
break
if __name__ == "__main__":
main()
| [
"pandas.read_csv",
"pandas.to_datetime",
"time.time"
] | [((3531, 3559), 'pandas.read_csv', 'pd.read_csv', (['CITY_DATA[city]'], {}), '(CITY_DATA[city])\n', (3542, 3559), True, 'import pandas as pd\n'), ((3583, 3615), 'pandas.to_datetime', 'pd.to_datetime', (["df['Start Time']"], {}), "(df['Start Time'])\n", (3597, 3615), True, 'import pandas as pd\n'), ((5451, 5462), 'time.time', 'time.time', ([], {}), '()\n', (5460, 5462), False, 'import time\n'), ((6974, 6985), 'time.time', 'time.time', ([], {}), '()\n', (6983, 6985), False, 'import time\n'), ((10332, 10343), 'time.time', 'time.time', ([], {}), '()\n', (10341, 10343), False, 'import time\n'), ((11395, 11406), 'time.time', 'time.time', ([], {}), '()\n', (11404, 11406), False, 'import time\n'), ((6674, 6685), 'time.time', 'time.time', ([], {}), '()\n', (6683, 6685), False, 'import time\n'), ((8380, 8391), 'time.time', 'time.time', ([], {}), '()\n', (8389, 8391), False, 'import time\n'), ((11083, 11094), 'time.time', 'time.time', ([], {}), '()\n', (11092, 11094), False, 'import time\n'), ((12940, 12951), 'time.time', 'time.time', ([], {}), '()\n', (12949, 12951), False, 'import time\n')] |
"""The object-oriented wrapper of PyBullet."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import time
import numpy as np
import pybullet
import six
from robovat.math import Orientation
from robovat.math import Pose
from robovat.simulation.physics import physics
from robovat.utils.logging import logger
JOINT_TYPES_MAPPING = {
'revolute': pybullet.JOINT_REVOLUTE,
'prismatic': pybullet.JOINT_PRISMATIC,
'fixed': pybullet.JOINT_FIXED,
'point2point': pybullet.JOINT_POINT2POINT
}
class BulletPhysics(physics.Physics):
"""Physics API wrapper for Bullet."""
def __init__(self,
time_step=1e-3,
use_visualizer=True,
worker_id=0):
"""
Initialization function.
Args:
time_step: The time step of the simulation. Run real-time
simulation if it is set to None.
use_visualizer: If use the visualizer.
worker_id: The key of the simulation client.
"""
logger.info('pybullet API Version: %s.' % (pybullet.getAPIVersion()))
if use_visualizer:
self._uid = pybullet.connect(pybullet.GUI)
pybullet.configureDebugVisualizer(pybullet.COV_ENABLE_SHADOWS, 1)
pybullet.configureDebugVisualizer(pybullet.COV_ENABLE_GUI, 0)
assert worker_id == 0
logger.info('Connected client %d to GUI.', self._uid)
else:
logger.info('Use worker_id %d for the simulation.', worker_id)
self._uid = pybullet.connect(pybullet.DIRECT, key=worker_id)
logger.info('Connected client %d to DIRECT.', self._uid)
self._time_step = time_step
self._start_time = None
self._num_steps = None
self._gravity = None
#
# Properties
#
@property
def uid(self):
return self._uid
@property
def time_step(self):
return self._time_step
@property
def num_steps(self):
return self._num_steps
@property
def gravity(self):
return self._gravity
def __del__(self):
pybullet.disconnect(physicsClientId=self.uid)
logger.info('Disconnected client %d to pybullet server.', self._uid)
def reset(self):
"""Reset the simulation."""
pybullet.resetSimulation(physicsClientId=self.uid)
self._start_time = None
self._num_steps = None
def start(self):
"""Start the simulation."""
if self._time_step is None:
pybullet.setRealTimeSimulation(1, physicsClientId=self.uid)
else:
pybullet.setRealTimeSimulation(0, physicsClientId=self.uid)
pybullet.setTimeStep(self._time_step, physicsClientId=self.uid)
self._start_time = time.time()
self._num_steps = 0
def step(self):
"""Step the simulation."""
pybullet.stepSimulation(physicsClientId=self.uid)
self._num_steps += 1
def is_real_time(self):
"""If the simulation is real-time.
Returns:
True or False.
"""
if self._time_step is None:
return True
else:
return False
def time(self):
"""Return the simulation time."""
if self.is_real_time():
return time.time() - self._start_time
else:
return self._time_step * self._num_steps
def set_gravity(self, gravity):
"""Set the gravity.
Args:
gravity: The gravity as a 3-dimensional vector.
"""
self._gravity = gravity
pybullet.setGravity(gravity[0], gravity[1], gravity[2],
physicsClientId=self.uid)
#
# Body
#
def add_body(self,
filename,
pose,
scale=1.0,
is_static=False,
**kwargs):
"""Load a body into the simulation.
Args:
filename: The path to the body file. The current supported file
formats are urdf and sdf.
pose: The pose of the base, as an instance of robovat.math.Pose or
a tuple of position and orientation.
scale: The global scaling factor.
is_static: If set the pose of the base to be fixed.
**kwargs: extra arguments intended for loading obj file
Returns:
body_uid: The unique ID of the body.
"""
filename = os.path.abspath(filename)
assert os.path.exists(filename), 'File %s does not exist.' % filename
_, ext = os.path.splitext(filename)
pose = Pose(pose)
position = list(pose.position)
quaternion = list(pose.quaternion)
if ext == '.urdf':
# Do not use pybullet.URDF_USE_SELF_COLLISION since it will cause
# problems for the motor control in Bullet.
pybullet.configureDebugVisualizer(pybullet.COV_ENABLE_RENDERING, 0)
body_uid = pybullet.loadURDF(
fileName=filename,
basePosition=position,
baseOrientation=quaternion,
globalScaling=scale,
useFixedBase=is_static,
physicsClientId=self.uid,
flags=pybullet.URDF_USE_SELF_COLLISION_EXCLUDE_PARENT,
)
pybullet.configureDebugVisualizer(pybullet.COV_ENABLE_RENDERING, 1)
elif ext == '.obj':
collision_kwargs = {'physicsClientId': self.uid}
visual_kwargs = {'physicsClientId': self.uid}
body_kwargs = {'physicsClientId': self.uid}
if 'collisionFramePosition' in kwargs:
collision_kwargs['collisionFramePosition'] = kwargs['collisionFramePosition']
if 'collisionFrameOrientation' in kwargs:
collision_kwargs['collisionFrameOrientation'] = kwargs['collisionFrameOrientation']
collision_shape_id = pybullet.createCollisionShape(pybullet.GEOM_MESH,
fileName=filename,
meshScale=[scale] * 3,
**collision_kwargs)
if 'visualFramePosition' in kwargs:
visual_kwargs['visualFramePosition'] = kwargs['visualFramePosition']
if 'visualFrameOrientation' in kwargs:
visual_kwargs['visualFrameOrientation'] = kwargs['visualFrameOrientation']
visual_shape_id = pybullet.createVisualShape(pybullet.GEOM_MESH,
fileName=filename,
meshScale=[scale] * 3,
**visual_kwargs)
pybullet.configureDebugVisualizer(pybullet.COV_ENABLE_RENDERING, 0)
if 'baseMass' in kwargs:
body_kwargs['baseMass'] = kwargs['baseMass']
else:
# Default baseMass to be 0.1
body_kwargs['baseMass'] = 0.1
body_uid = pybullet.createMultiBody(baseCollisionShapeIndex=collision_shape_id,
baseVisualShapeIndex=visual_shape_id,
basePosition=position,
baseOrientation=quaternion,
**body_kwargs
)
pybullet.configureDebugVisualizer(pybullet.COV_ENABLE_RENDERING, 1)
else:
raise ValueError('Unrecognized extension %s.' % ext)
return int(body_uid)
def remove_body(self, body_uid):
"""Remove the body.
Args:
body_uid: The body Unique ID.
"""
pybullet.removeBody(
bodyUniqueId=body_uid, physicsClientId=self.uid)
def get_body_pose(self, body_uid):
"""Get the pose of the body.
The pose of the body is defined as the pose of the base of the body.
Args:
body_uid: The body Unique ID.
Returns:
An instance of Pose.
"""
position, quaternion = pybullet.getBasePositionAndOrientation(
bodyUniqueId=body_uid, physicsClientId=self.uid)
return Pose([position, quaternion])
def get_body_position(self, body_uid):
"""Get the position of the body.
Args:
body_uid: The body Unique ID.
Returns:
A 3-dimensional float32 numpy array.
"""
position, _ = pybullet.getBasePositionAndOrientation(
bodyUniqueId=body_uid, physicsClientId=self.uid)
return np.array(position, dtype=np.float32)
def get_body_linear_velocity(self, body_uid):
"""Get the lienar velocity of the body.
Args:
body_uid: The body Unique ID.
Returns:
A 3-dimensional float32 numpy array.
"""
linear_velocity, _ = pybullet.getBaseVelocity(
bodyUniqueId=body_uid, physicsClientId=self.uid)
return np.array(linear_velocity, dtype=np.float32)
def get_body_angular_velocity(self, body_uid):
"""Get the angular velocity of the body.
Args:
body_uid: The body Unique ID.
Returns:
A 3-dimensional float32 numpy array.
"""
_, angular_velocity = pybullet.getBaseVelocity(
bodyUniqueId=body_uid, physicsClientId=self.uid)
return np.array(angular_velocity, dtype=np.float32)
def get_body_mass(self, body_uid):
"""Get the mass of the body.
Args:
body_uid: The body Unique ID.
Returns:
A 3-dimensional float32 numpy array.
"""
mass, _, _, _, _, _, _, _, _, _ = pybullet.getDynamicsInfo(
bodyUniqueId=body_uid, linkIndex=-1, physicsClientId=self.uid)
return mass
def get_body_dynamics(self, body_uid):
"""Get the dynamics of the body.
Args:
body_uid: The body Unique ID.
Returns:
A dictionary of body dynamics.
"""
(mass, lateral_friction, _, _, _, _,
rolling_friction, spinning_friction, _, _) = pybullet.getDynamicsInfo(
bodyUniqueId=body_uid, linkIndex=-1, physicsClientId=self.uid)
return {
'mass': mass,
'lateral_friction': lateral_friction,
'rolling_friction': rolling_friction,
'spinning_friction': spinning_friction,
}
def get_body_link_indices(self, body_uid):
"""Get the indices of the links of a body.
Args:
body_uid: The body Unique ID.
Returns:
A list of integers.
"""
num_joints = pybullet.getNumJoints(
bodyUniqueId=body_uid, physicsClientId=self.uid)
link_indices = range(num_joints)
return link_indices
def get_body_joint_indices(self, body_uid):
"""Get the indices of the joints of a body.
Args:
body_uid: The body Unique ID.
Returns:
A list of integers.
"""
num_joints = pybullet.getNumJoints(
bodyUniqueId=body_uid, physicsClientId=self.uid)
joint_indices = range(num_joints)
return joint_indices
def set_body_pose(self, body_uid, pose):
"""Set the pose of the body.
Args:
body_uid: The body Unique ID.
pose: An instance of Pose.
"""
pose = Pose(pose)
position = list(pose.position)
quaternion = list(pose.quaternion)
pybullet.resetBasePositionAndOrientation(
bodyUniqueId=body_uid, posObj=position, ornObj=quaternion,
physicsClientId=self.uid)
def set_body_position(self, body_uid, position):
"""Set the position of the body.
Args:
body_uid: The body Unique ID.
position: A 3-dimensional float32 numpy array or a list of 3
float32 values.
"""
position = list(position)
_, quaternion = pybullet.getBasePositionAndOrientation(body_uid)
pybullet.resetBasePositionAndOrientation(
bodyUniqueId=body_uid, posObj=position, ornObj=quaternion,
physicsClientId=self.uid)
def set_body_orientation(self, body_uid, orientation):
"""Set the orientation of the body.
Args:
body_uid: The body Unique ID.
orientation: An instance of Orientation.
"""
position, _ = pybullet.getBasePositionAndOrientation(body_uid)
quaternion = list(orientation.quaternion)
pybullet.resetBasePositionAndOrientation(
bodyUniqueId=body_uid, posObj=position, ornObj=quaternion,
physicsClientId=self.uid)
def set_body_linear_velocity(self, body_uid, linear_velocity):
"""Set the linear velocity of the body.
Args:
body_uid: The body Unique ID.
linear_velocity: A 3-dimensional float32 numpy array or a list of 3
float32 values.
"""
linear_velocity = list(linear_velocity)
pybullet.resetBaseVelocity(
bodyUniqueId=body_uid, linearVelocity=linear_velocity,
physicsClientId=self.uid)
def set_body_angular_velocity(self, body_uid, angular_velocity):
"""Set the angular velocity of the body.
Args:
body_uid: The body Unique ID.
angular_velocity: A 3-dimensional float32 numpy array or a list of
3 float32 values.
"""
angular_velocity = list(angular_velocity)
pybullet.resetBaseVelocity(
bodyUniqueId=body_uid, angularVelocity=angular_velocity,
physicsClientId=self.uid)
def set_body_mass(self, body_uid, mass):
"""Set the mass of the body.
Args:
body_uid: The body Unique ID.
mass: A float32 value.
"""
pybullet.changeDynamics(
bodyUniqueId=body_uid, linkIndex=-1, mass=mass,
physicsClientId=self.uid)
def set_body_dynamics(self,
body_uid,
mass=None,
lateral_friction=None,
rolling_friction=None,
spinning_friction=None,
contact_damping=None,
contact_stiffness=None):
"""Set the dynamics of the body.
Args:
body_uid: The body Unique ID.
mass (float, optional): mass of the body
lateral_friction (float, optional): lateral friction coefficient
rolling_friction (float, optional): rolling friction coefficient
spinning_friction (float, optional): spinning friction coefficient
contact_damping (float, optional): damping cofficient for contact
contact_stiffness (float, optional): stiffness coefficient for contact
"""
kwargs = dict()
kwargs['physicsClientId'] = self.uid
kwargs['bodyUniqueId'] = body_uid
kwargs['linkIndex'] = -1
if mass is not None:
kwargs['mass'] = mass
if lateral_friction is not None:
kwargs['lateralFriction'] = lateral_friction
if rolling_friction is not None:
kwargs['rollingFriction'] = rolling_friction
if spinning_friction is not None:
kwargs['spinningFriction'] = spinning_friction
if contact_damping is not None:
kwargs['contactDamping'] = contact_damping
if contact_stiffness is not None:
kwargs['contactStiffness'] = contact_stiffness
pybullet.changeDynamics(**kwargs)
def set_body_color(self, body_uid, rgba, specular):
"""Set the mass of the body.
Args:
body_uid: The body Unique ID.
rgba: A 4-dimensional float32 vector.
specular: A 4-dimensional float32 vector.
"""
kwargs = dict()
kwargs['physicsClientId'] = self.uid
kwargs['objectUniqueId'] = body_uid
kwargs['linkIndex'] = -1
if rgba is not None:
kwargs['rgbaColor'] = rgba
if specular is not None:
kwargs['specularColor'] = specular
pybullet.changeVisualShape(**kwargs)
#
# Link
#
def get_link_name(self, link_uid):
"""Get the name of the link.
Args:
link_uid: A tuple of the body Unique ID and the link index.
Returns:
The name of the link.
"""
body_uid, link_ind = link_uid
_, _, _, _, _, _, _, _, _, _, _, _, link_name, _, _, _, _ = (
pybullet.getJointInfo(bodyUniqueId=body_uid,
jointIndex=link_ind,
physicsClientId=self.uid))
return link_name
def get_link_pose(self, link_uid):
"""Get the pose of the link.
Args:
link_uid: A tuple of the body Unique ID and the link index.
Returns:
An instance of Pose.
"""
body_uid, link_ind = link_uid
_, _, _, _, position, quaternion = pybullet.getLinkState(
bodyUniqueId=body_uid, linkIndex=link_ind,
physicsClientId=self.uid)
return Pose([position, quaternion])
def get_link_center_of_mass(self, link_uid):
"""Get the center of mass of the link.
Args:
link_uid: A tuple of the body Unique ID and the link index.
Returns:
An instance of Pose.
"""
body_uid, link_ind = link_uid
position, quaternion, _, _, _, _ = pybullet.getLinkState(
bodyUniqueId=body_uid, linkIndex=link_ind,
physicsClientId=self.uid)
return Pose([position, quaternion])
def get_link_mass(self, link_uid):
"""Get the mass of the link.
Args:
link_uid: A tuple of the body Unique ID and the link index.
Returns:
A float32 value.
"""
raise NotImplementedError('This is still buggy in PyBullet.')
body_uid, link_ind = link_uid
mass, _, _, _, _, _, _, _, _, _ = pybullet.getDynamicsInfo(
bodyUniqueId=body_uid, linkIndex=link_ind,
physicsClientId=self.uid)
return mass
def get_link_dynamics(self, link_uid):
"""Get the dynamics of the body.
Args:
body_uid: The body Unique ID.
Returns:
A dictionary of body dynamics.
"""
body_uid, link_ind = link_uid
(mass, lateral_friction, _, _, _, _,
rolling_friction, spinning_friction, _, _) = pybullet.getDynamicsInfo(
bodyUniqueId=body_uid, linkIndex=link_ind,
physicsClientId=self.uid)
return {
'mass': mass,
'lateral_friction': lateral_friction,
'rolling_friction': rolling_friction,
'spinning_friction': spinning_friction,
}
def set_link_mass(self, link_uid, mass):
"""Set the mass of the link.
Args:
link_uid: A tuple of the body Unique ID and the link index.
mass: A float32 value.
"""
raise NotImplementedError('This is still buggy in PyBullet.')
body_uid, link_ind = link_uid
pybullet.changeDynamics(
bodyUniqueId=body_uid, linkIndex=link_ind, mass=mass,
physicsClientId=self.uid)
def set_link_dynamics(self,
link_uid,
mass=None,
lateral_friction=None,
rolling_friction=None,
spinning_friction=None,
):
"""Set the dynamics of the link.
Args:
link_uid: A tuple of the body Unique ID and the link index.
"""
body_uid, link_ind = link_uid
kwargs = dict()
kwargs['physicsClientId'] = self.uid
kwargs['bodyUniqueId'] = body_uid
kwargs['linkIndex'] = link_ind
if mass is not None:
kwargs['mass'] = mass
if lateral_friction is not None:
kwargs['lateralFriction'] = lateral_friction
if rolling_friction is not None:
kwargs['rollingFriction'] = rolling_friction
if spinning_friction is not None:
kwargs['spinningFriction'] = spinning_friction
pybullet.changeDynamics(**kwargs)
#
# Joint
#
def get_joint_name(self, joint_uid):
"""Get the name of the joint.
Args:
joint_uid: A tuple of the body Unique ID and the joint index.
Returns:
The name of the joint.
"""
body_uid, joint_ind = joint_uid
_, joint_name, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _ = (
pybullet.getJointInfo(bodyUniqueId=body_uid,
jointIndex=joint_ind,
physicsClientId=self.uid))
return joint_name
def get_joint_dynamics(self, joint_uid):
"""Get the dynamics of the joint.
Args:
joint_uid: A tuple of the body Unique ID and the joint index.
Returns:
dynamics: A dictionary of dampling and friction.
"""
body_uid, joint_ind = joint_uid
_, _, _, _, _, _, damping, friction, _, _, _, _, _, _, _, _ = (
pybullet.getJointInfo(bodyUniqueId=body_uid,
jointIndex=joint_ind,
physicsClientId=self.uid))
dynamics = {
'damping': damping,
'friction': friction
}
return dynamics
def get_joint_limit(self, joint_uid):
"""Get the limit of the joint.
Args:
joint_uid: A tuple of the body Unique ID and the joint index.
Returns:
limit: A dictionary of lower, upper, effort and velocity.
"""
body_uid, joint_ind = joint_uid
(_, _, _, _, _, _, _, _, lower, upper, max_force, max_vel, _,
_, _, _, _) = pybullet.getJointInfo(bodyUniqueId=body_uid,
jointIndex=joint_ind,
physicsClientId=self.uid)
limit = {
'lower': lower,
'upper': upper,
'effort': max_force,
'velocity': max_vel
}
return limit
def get_joint_position(self, joint_uid):
"""Get the joint position of the joint.
Args:
joint_uid: A tuple of the body Unique ID and the joint index.
Returns:
A float32 value.
"""
body_uid, joint_ind = joint_uid
position, _, _, _ = pybullet.getJointState(
bodyUniqueId=body_uid, jointIndex=joint_ind,
physicsClientId=self.uid)
return position
def get_joint_velocity(self, joint_uid):
"""Get the joint velocity of the joint.
Args:
joint_uid: A tuple of the body Unique ID and the joint index.
Returns:
A float32 value.
"""
body_uid, joint_ind = joint_uid
_, vel, _, _ = pybullet.getJointState(
bodyUniqueId=body_uid, jointIndex=joint_ind,
physicsClientId=self.uid)
return vel
def get_joint_reaction_force(self, joint_uid):
"""Get the reaction force of the joint.
These are the joint reaction forces, if a torque sensor is enabled for
this joint it is [Fx, Fy, Fz, Mx, My, Mz]. Without torque sensor, it is
[0, 0, 0, 0, 0, 0].
Args:
joint_uid: A tuple of the body Unique ID and the joint index.
Returns:
A 3-dimensional float32 numpy array.
"""
body_uid, joint_ind = joint_uid
_, _, reaction_force, _ = pybullet.getJointState(
bodyUniqueId=body_uid, jointIndex=joint_ind,
physicsClientId=self.uid)
return np.array(reaction_force, dtype=np.float32)
def get_joint_torque(self, joint_uid):
"""Get the torque force of the joint.
This is the motor torque applied during the last stepSimulation().
Args:
joint_uid: A tuple of the body Unique ID and the joint index.
Returns:
A 3-dimensional float32 numpy array.
"""
body_uid, joint_ind = joint_uid
_, _, _, torque = pybullet.getJointState(
bodyUniqueId=body_uid, jointIndex=joint_ind,
physicsClientId=self.uid)
return np.array(torque, dtype=np.float32)
def set_joint_position(self, joint_uid, position):
"""Set the position of the joint.
Args:
joint_uid: A tuple of the body Unique ID and the joint index.
position: A float32 value.
"""
body_uid, joint_ind = joint_uid
pybullet.resetJointState(
bodyUniqueId=body_uid, jointIndex=joint_ind,
targetValue=position, physicsClientId=self.uid)
def set_joint_velocity(self, joint_uid, velocity):
"""Set the position of the joint.
The joint position will be set to the current position.
Args:
joint_uid: A tuple of the body Unique ID and the joint index.
vel: A float32 value.
"""
body_uid, joint_ind = joint_uid
position, _, _, _ = pybullet.getJointState(
bodyUniqueId=body_uid, jointIndex=joint_ind,
physicsClientId=self.uid)
pybullet.resetJointState(
bodyUniqueId=body_uid, jointIndex=joint_ind,
targetValue=position,
targetVelocity=velocity, physicsClientId=self.uid)
def enable_joint_sensor(self, joint_uid):
"""Enable joint force torque sensor.
Args:
joint_uid: A tuple of the body Unique ID and the joint index.
"""
body_uid, joint_ind = joint_uid
pybullet.enableJointForceTorqueSensor(
bodyUniqueId=body_uid,
jointIndex=joint_ind,
enableSensor=1,
physicsClientId=self.uid)
#
# Constraint
#
def add_constraint(self,
parent_uid,
child_uid,
joint_type='fixed',
joint_axis=[0, 0, 0],
parent_frame_pose=None,
child_frame_pose=None):
"""Add the constraint.
A constraint limits the relative movements between two entities using a
joint. Different types of joints have different degrees of freedom.
Args:
parent: The unique ID of the parent entity.
child: The unique ID of the child entity.
joint_type: The type of the joint.
joint_axis: The axis of the joint.
parent_frame_pose: The pose of the joint in the parent frame.
child_frame_pose: The pose of the joint in the child frame.
Returns:
The constraint unique ID.
"""
if isinstance(parent_uid, six.integer_types):
# The parent entity is a body.
parent_body_uid = parent_uid
parent_link_ind = -1
elif isinstance(parent_uid, (tuple, list)):
# The parent entity is a link.
parent_body_uid, parent_link_ind = parent_uid
else:
raise ValueError
if isinstance(child_uid, six.integer_types):
# The child entity is a body.
child_body_uid = child_uid
child_link_ind = -1
elif isinstance(child_uid, (tuple, list)):
# The child entity is a link.
child_body_uid, child_link_ind = child_uid
elif child_uid is None:
# The child entity is ignored.
child_body_uid = -1
child_link_ind = -1
else:
raise ValueError
if parent_frame_pose is None:
parent_frame_position = [0, 0, 0]
parent_frame_quaternion = None
else:
if not isinstance(parent_frame_pose, Pose):
parent_frame_pose = Pose(parent_frame_pose)
parent_frame_position = parent_frame_pose.position
parent_frame_quaternion = parent_frame_pose.quaternion
if child_frame_pose is None:
child_frame_position = [0, 0, 0]
child_frame_quaternion = None
else:
if not isinstance(child_frame_pose, Pose):
child_frame_pose = Pose(child_frame_pose)
child_frame_position = child_frame_pose.position
child_frame_quaternion = child_frame_pose.quaternion
joint_type = JOINT_TYPES_MAPPING[joint_type]
joint_axis = list(joint_axis)
kwargs = dict()
kwargs['physicsClientId'] = self.uid
kwargs['parentBodyUniqueId'] = parent_body_uid
kwargs['parentLinkIndex'] = parent_link_ind
kwargs['childBodyUniqueId'] = child_body_uid
kwargs['childLinkIndex'] = child_link_ind
kwargs['jointType'] = joint_type
kwargs['jointAxis'] = joint_axis
kwargs['parentFramePosition'] = parent_frame_position
kwargs['childFramePosition'] = child_frame_position
if parent_frame_quaternion is not None:
kwargs['parentFrameOrientation'] = parent_frame_quaternion
if child_frame_quaternion is not None:
kwargs['childFrameOrientation'] = child_frame_quaternion
constraint_uid = pybullet.createConstraint(**kwargs)
return constraint_uid
def change_constraint(self, constraint_uid, **kwargs):
"""Change a constraint parameter
Args:
constraint_uid: The constraint unique ID
kwargs: arguments for changig constraints
"""
pybullet.changeConstraint(constraint_uid, **kwargs)
def remove_constraint(self, constraint_uid):
"""Remove a constraint.
Args:
constraint_uid: The constraint unique ID.
"""
# TODO(kuanfang): removeConstraint dose not work.
pybullet.changeConstraint(constraint_uid, maxForce=0,
physicsClientId=self.uid)
pybullet.removeConstraint(constraint_uid, physicsClientId=self.uid)
def get_constraint_pose(self, constraint_uid):
"""Get the constraint pose.
Args:
constraint_uid: The constraint unique ID.
Returns:
An instance of Pose.
"""
_, _, _, _, _, _, _, position, _, quaternion, _, _, _, _, _ = (
pybullet.getConstraintInfo(constraintUniqueId=constraint_uid,
physicsClientId=self.uid))
return Pose([position, quaternion])
def get_constraint_position(self, constraint_uid):
"""Get the constraint position.
Args:
constraint_uid: The constraint unique ID.
Returns:
A 3-dimenstional float32 numpy array.
"""
_, _, _, _, _, _, _, position, _, _, _, _, _, _, _ = (
pybullet.getConstraintInfo(
constraintUniqueId=constraint_uid, physicsClientId=self.uid))
return np.array(position, dtype=np.float32)
def get_constraint_orientation(self, constraint_uid):
"""Get the constraint orientation.
Args:
constraint_uid: The constraint unique ID.
Returns:
An instance of Orientation.
"""
_, _, _, _, _, _, _, _, _, quaternion, _, _, _, _, _ = (
pybullet.getConstraintInfo(
constraintUniqueId=constraint_uid, physicsClientId=self.uid))
return Orientation(quaternion)
def get_constraint_max_force(self, constraint_uid):
"""Get the maximal force of the constraint.
Args:
constraint_uid: The constraint unique ID.
Returns:
A 3-dimensional float32 numpy array.
"""
_, _, _, _, _, _, _, _, _, _, max_force, _, _, _, _ = (
pybullet.getConstraintInfo(
constraintUniqueId=constraint_uid, physicsClientId=self.uid))
return np.array(max_force, dtype=np.float32)
def set_constraint_pose(self, constraint_uid, pose):
"""Set the constraint pose.
Args:
constraint_uid: The constraint unique ID.
pose: An instance of Pose.
"""
if not isinstance(pose, Pose):
pose = Pose(pose)
position = list(pose.position)
quaternion = list(pose.quaternion)
pybullet.changeConstraint(
userConstraintUniqueId=constraint_uid,
jointChildPivot=position,
jointChildFrameOrientation=quaternion,
physicsClientId=self.uid)
def set_constraint_position(self, constraint_uid, position):
"""Set the constraint position.
Args:
constraint_uid: The constraint unique ID.
position: A 3-dimensional float32 numpy array.
"""
position = list(position)
pybullet.changeConstraint(
userConstraintUniqueId=constraint_uid,
jointChildPivot=position,
physicsClientId=self.uid)
def set_constraint_orientation(self, constraint_uid, orientation):
"""Set the constraint orientation.
Args:
constraint_uid: The constraint unique ID.
orientation: An instance of Orientation.
"""
quaternion = list(orientation.quaternion)
pybullet.changeConstraint(
userConstraintUniqueId=constraint_uid,
jointChildFrameOrientation=quaternion,
physicsClientId=self.uid)
def set_constraint_max_force(self, constraint_uid, max_force):
"""Set the maximal force of the constraint.
Args:
constraint_uid: The constraint unique ID.
max_force: A 3-dimensional float32 numpy array.
"""
pybullet.changeConstraint(
userConstraintUniqueId=constraint_uid,
maxForce=max_force,
physicsClientId=self.uid)
def get_num_constraints(self):
"""Get number of constraints present in the env.
Return:
num_constraints (int)
"""
return pybullet.getNumConstraints(physicsClientId=self.uid)
#
# Motor Control
#
def position_control(self,
joint_uid,
target_position,
target_velocity=None,
max_velocity=None,
max_force=None,
position_gain=None,
velocity_gain=None):
"""Position control of a joint.
Args:
joint_uid: The tuple of the body unique ID and the joint index.
target_position: The target joint position.
target_velocity: The target joint velocity.
max_velocity: The maximal joint velocity.
max_force: The maximal joint force.
position_gain: The position gain.
velocity_gain: The velocity gain.
"""
body_uid, joint_ind = joint_uid
kwargs = dict()
kwargs['physicsClientId'] = self.uid
kwargs['bodyUniqueId'] = body_uid
kwargs['jointIndex'] = joint_ind
kwargs['controlMode'] = pybullet.POSITION_CONTROL
kwargs['targetPosition'] = target_position
if target_velocity is not None:
kwargs['targetVelocity'] = target_velocity
if max_velocity is not None:
kwargs['maxVelocity'] = max_velocity
if max_force is not None:
kwargs['force'] = max_force
if position_gain is not None:
kwargs['positionGain'] = position_gain
if velocity_gain is not None:
kwargs['velocityGain'] = velocity_gain
pybullet.setJointMotorControl2(**kwargs)
def velocity_control(self,
joint_uid,
target_velocity,
max_force=None,
position_gain=None,
velocity_gain=None):
"""Velocity control of a joint.
Args:
joint_uid: The tuple of the body unique ID and the joint index.
target_velocity: The joint velocity.
max_joint_force: The maximal force of the joint.
position_gain: The position gain.
velocity_gain: The velocity gain.
"""
body_uid, joint_ind = joint_uid
kwargs = dict()
kwargs['physicsClientId'] = self.uid
kwargs['bodyUniqueId'] = body_uid
kwargs['jointIndex'] = joint_ind
kwargs['controlMode'] = pybullet.VELOCITY_CONTROL
kwargs['targetVelocity'] = target_velocity
if max_force is not None:
kwargs['force'] = max_force
if position_gain is not None:
kwargs['positionGain'] = position_gain
if velocity_gain is not None:
kwargs['velocityGain'] = velocity_gain
pybullet.setJointMotorControl2(**kwargs)
def torque_control(self, joint_uid, target_torque):
"""Torque control of a joint.
Args:
joint_uid: The tuple of the body unique ID and the joint index.
joint_torque: The torque of the joint.
"""
body_uid, joint_ind = joint_uid
kwargs = dict()
kwargs['physicsClientId'] = self.uid
kwargs['bodyUniqueId'] = body_uid
kwargs['jointIndex'] = joint_ind
kwargs['controlMode'] = pybullet.TORQUE_CONTROL
kwargs['force'] = target_torque
pybullet.setJointMotorControl2(**kwargs)
def position_control_array(self,
body_uid,
joint_inds,
target_positions,
target_velocities=None,
max_velocities=None,
max_forces=None,
position_gains=None,
velocity_gains=None):
"""Position control of a list of joints of a body.
Args:
body_uid: The body unique ID.
joint_inds: The list of joint indices.
target_positions: The list of target joint positions.
target_velocities: The list of of target joint velocities.
max_velocities: The list of maximal joint velocities.
max_forces: The list of maximal joint forces.
position_gains: The list of position gains.
velocity_gains: The list of velocity gains.
"""
kwargs = dict()
kwargs['physicsClientId'] = self.uid
kwargs['bodyUniqueId'] = body_uid
kwargs['jointIndices'] = joint_inds
kwargs['controlMode'] = pybullet.POSITION_CONTROL
kwargs['targetPositions'] = target_positions
if target_velocities is not None:
kwargs['targetVelocities'] = target_velocities
if max_velocities is not None:
raise NotImplementedError('This is not implemented in pybullet.')
if max_forces is not None:
kwargs['forces'] = max_forces
if position_gains is not None:
kwargs['positionGains'] = position_gains
if velocity_gains is not None:
kwargs['velocityGains'] = velocity_gains
pybullet.setJointMotorControlArray(**kwargs)
def velocity_control_array(self,
body_uid,
joint_inds,
joint_velocities,
max_joint_forces=None,
position_gains=None,
velocity_gains=None):
"""Velocity control of a list of joints of a body.
Args:
body_uid: The body unique ID.
joint_inds: The list of joint indices.
joint_velocities: The list of joint velocities for each specified
joint.
max_joint_forces: The list of maximal forces, set to None to
ignore.
position_gains: The list of position gains, set to None to ignore.
velocity_gains: The list of position gains, set to None to ignore.
"""
kwargs = dict()
kwargs['physicsClientId'] = self.uid
kwargs['bodyUniqueId'] = body_uid
kwargs['jointIndices'] = joint_inds
kwargs['controlMode'] = pybullet.VELOCITY_CONTROL
kwargs['targetVelocities'] = joint_velocities
if max_joint_forces is not None:
kwargs['forces'] = max_joint_forces
if position_gains is not None:
kwargs['positionGains'] = position_gains
if velocity_gains is not None:
kwargs['velocityGains'] = velocity_gains
pybullet.setJointMotorControlArray(**kwargs)
def torque_control_array(self, body_uid, joint_inds, joint_torques):
"""Torque control of a list of joints of a body.
Args:
body_uid: The body unique ID.
joint_inds: The list of joint indices.
joint_torques: The list of torques for each specified joint.
"""
kwargs = dict()
kwargs['physicsClientId'] = self.uid
kwargs['bodyUniqueId'] = body_uid
kwargs['jointIndices'] = joint_inds
kwargs['controlMode'] = pybullet.VELOCITY_CONTROL
kwargs['forces'] = joint_torques
pybullet.setJointMotorControlArray(**kwargs)
#
# Apply external disturbances
#
def apply_force_to_body(self, uid, force, position):
pybullet.applyExternalForce(
objectUniqueId=uid,
linkIndex=-1,
forceObj=list(force),
posObj=list(position),
flags=pybullet.LINK_FRAME,
physicsClientId=self.uid)
def apply_torque_to_body(self, uid, force, position):
pybullet.applyExternalTorque(
objectUniqueId=uid,
linkIndex=-1,
forceObj=list(force),
posObj=list(position),
flags=pybullet.LINK_FRAME,
physicsClientId=self.uid)
def apply_force_to_link(self, uid, force, position):
body_uid, link_ind = uid
pybullet.applyExternalForce(
objectUniqueId=body_uid,
linkIndex=link_ind,
forceObj=list(force),
posObj=list(position),
flags=pybullet.LINK_FRAME,
physicsClientId=self.uid)
def apply_torque_to_link(self, uid, force, position):
body_uid, link_ind = uid
pybullet.applyExternalTorque(
objectUniqueId=body_uid,
linkIndex=link_ind,
forceObj=list(force),
posObj=list(position),
flags=pybullet.LINK_FRAME,
physicsClientId=self.uid)
#
# Inverse Kinematics
#
def compute_inverse_kinematics(self,
link_uid,
link_pose,
upper_limits=None,
lower_limits=None,
ranges=None,
damping=None,
neutral_positions=None):
"""Compute the inverse kinematics.
Args:
link_uid: The unique ID of the link.
link_pose: The target pose of the link.
upper_limits: The upper limits of joints.
lower_limits: The lower limits of joints.
ranges: The ranges of joints.
dampings: The list of joint damping parameters.
neutral_positions: The neutral joint positions.
returns:
target_positions: The list of target joint positions.
"""
body_uid, link_ind = link_uid
if not isinstance(link_pose, Pose):
link_pose = Pose(link_pose)
position = link_pose.position
quaternion = link_pose.quaternion
kwargs = dict()
kwargs['bodyUniqueId'] = body_uid
kwargs['endEffectorLinkIndex'] = link_ind
kwargs['targetPosition'] = list(position)
if quaternion is not None:
kwargs['targetOrientation'] = list(quaternion)
# TODO(kuanfang): Not sure if those are necessary for computing IK.
if lower_limits is not None:
kwargs['lowerLimits'] = lower_limits
if upper_limits is not None:
kwargs['upperLimits'] = upper_limits
if ranges is not None:
kwargs['jointRanges'] = ranges
if damping is not None:
kwargs['jointDamping'] = damping
if neutral_positions is not None:
kwargs['restPoses'] = neutral_positions
kwargs['physicsClientId'] = self.uid
target_positions = pybullet.calculateInverseKinematics(**kwargs)
return target_positions
#
# Contacts
# Pybullet function getContactPoints return:
#
# 0: contactFlag
# 1: bodyUniqueIdA
# 2: bodyUniqueIdB
# 3: linkIndexA
# 4: linkIndexB
# 5: positionOnA
# 6: positionOnB
# 7: contactNormalOnB
# 8: contactDistance
# 9: normalForce
# 10: lateralFriction1
# 11: lateralFrictionDir1
# 12: lateralFriction2
# 13: lateralFrictionDir2
#
def get_contact_points(self, a_uid, b_uid=None):
"""Check if two entities have contacts.
Args:
a_uid: The Unique ID of the fist entity.
b_uid: The Unique ID of the second entity.
Returns:
A list of contact points.
"""
kwargs = dict()
if isinstance(a_uid, six.integer_types):
kwargs['bodyA'] = a_uid
elif isinstance(a_uid, (tuple, list)):
kwargs['bodyA'] = a_uid[0]
kwargs['linkIndexA'] = a_uid[1]
else:
raise ValueError
if b_uid is None:
pass
elif isinstance(b_uid, six.integer_types):
kwargs['bodyB'] = b_uid
elif isinstance(b_uid, (tuple, list)):
kwargs['bodyB'] = b_uid[0]
kwargs['linkIndexB'] = b_uid[1]
else:
raise ValueError
kwargs['physicsClientId'] = self.uid
contact_points = pybullet.getContactPoints(**kwargs)
return contact_points
def get_contact_normal_force(self, a_uid, b_uid=None):
"""get contact normal force
Args:
a_uid: The Unique ID of the fist entity.
b_uid: The Unique ID of the second entity.
Returns:
A list of contact force on the contact points
"""
kwargs = dict()
if isinstance(a_uid, six.integer_types):
kwargs['bodyA'] = a_uid
elif isinstance(a_uid, (tuple, list)):
kwargs['bodyA'] = a_uid[0]
kwargs['linkIndexA'] = a_uid[1]
else:
raise ValueError
if b_uid is None:
pass
elif isinstance(b_uid, six.integer_types):
kwargs['bodyB'] = b_uid
elif isinstance(b_uid, (tuple, list)):
kwargs['bodyB'] = b_uid[0]
kwargs['linkIndexB'] = b_uid[1]
else:
raise ValueError
kwargs['physicsClientId'] = self.uid
contact_points = pybullet.getContactPoints(**kwargs)
contact_points = [cp[9] for cp in contact_points]
return contact_points
def get_contact_body(self, a_uid):
"""get contact body with object of inquiry
Args:
a_uid: The Unique ID of the fist entity.
Returns:
A list of body uids of contacting with body A
"""
kwargs = dict()
if isinstance(a_uid, six.integer_types):
kwargs['bodyA'] = a_uid
elif isinstance(a_uid, (tuple, list)):
kwargs['bodyA'] = a_uid[0]
kwargs['linkIndexA'] = a_uid[1]
else:
raise ValueError
kwargs['physicsClientId'] = self.uid
contact_points = pybullet.getContactPoints(**kwargs)
contact_points = [cp[2] for cp in contact_points]
return contact_points
#
# Debug Visualizer
#
def get_debug_visualizer_info(self, info_args):
"""Return a dictionary of info with keys from info_args
Args:
info_args: A list of strings for debug visualizer information
"""
width, height, view_matrix, projection_matrix, camera_up, camera_forward, horizontal, vertical, yaw, pitch, dist, target = pybullet.getDebugVisualizerCamera(self.uid)
info = {'width': width,
'height': height,
'viewMatrix': view_matrix,
'projectionMatrix': projection_matrix,
'cameraUp': camera_up,
'cameraForward': camera_forward,
'horizontal': horizontal,
'vertical': vertical,
'yaw': yaw,
'pitch': pitch,
'dist': dist,
'target': target
}
visualizer_info = {}
for arg in info_args:
visualizer_info[arg] = info[arg]
return visualizer_info
def reset_debug_visualizer(self, camera_distance, camera_yaw, camera_pitch, camera_target_position):
"""
Args:
camera_distance (float): distance from eye to camera target position
camera_yaw (float): camera yaw angle (in degrees) left/right
camera_pitch (float): camera pitch angle (in degrees) up/down
camera_target_position (list or tuple of 3 floats): the camera focus point
"""
pybullet.resetDebugVisualizerCamera(camera_distance,
camera_yaw,
camera_pitch,
camera_target_position,
self.uid)
| [
"pybullet.resetSimulation",
"pybullet.getAPIVersion",
"pybullet.createVisualShape",
"pybullet.calculateInverseKinematics",
"pybullet.resetDebugVisualizerCamera",
"pybullet.resetBaseVelocity",
"pybullet.setJointMotorControl2",
"pybullet.getBaseVelocity",
"pybullet.connect",
"robovat.math.Orientatio... | [((2187, 2232), 'pybullet.disconnect', 'pybullet.disconnect', ([], {'physicsClientId': 'self.uid'}), '(physicsClientId=self.uid)\n', (2206, 2232), False, 'import pybullet\n'), ((2241, 2309), 'robovat.utils.logging.logger.info', 'logger.info', (['"""Disconnected client %d to pybullet server."""', 'self._uid'], {}), "('Disconnected client %d to pybullet server.', self._uid)\n", (2252, 2309), False, 'from robovat.utils.logging import logger\n'), ((2376, 2426), 'pybullet.resetSimulation', 'pybullet.resetSimulation', ([], {'physicsClientId': 'self.uid'}), '(physicsClientId=self.uid)\n', (2400, 2426), False, 'import pybullet\n'), ((2846, 2857), 'time.time', 'time.time', ([], {}), '()\n', (2855, 2857), False, 'import time\n'), ((2950, 2999), 'pybullet.stepSimulation', 'pybullet.stepSimulation', ([], {'physicsClientId': 'self.uid'}), '(physicsClientId=self.uid)\n', (2973, 2999), False, 'import pybullet\n'), ((3661, 3747), 'pybullet.setGravity', 'pybullet.setGravity', (['gravity[0]', 'gravity[1]', 'gravity[2]'], {'physicsClientId': 'self.uid'}), '(gravity[0], gravity[1], gravity[2], physicsClientId=\n self.uid)\n', (3680, 3747), False, 'import pybullet\n'), ((4544, 4569), 'os.path.abspath', 'os.path.abspath', (['filename'], {}), '(filename)\n', (4559, 4569), False, 'import os\n'), ((4585, 4609), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (4599, 4609), False, 'import os\n'), ((4665, 4691), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (4681, 4691), False, 'import os\n'), ((4708, 4718), 'robovat.math.Pose', 'Pose', (['pose'], {}), '(pose)\n', (4712, 4718), False, 'from robovat.math import Pose\n'), ((7972, 8040), 'pybullet.removeBody', 'pybullet.removeBody', ([], {'bodyUniqueId': 'body_uid', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, physicsClientId=self.uid)\n', (7991, 8040), False, 'import pybullet\n'), ((8364, 8455), 'pybullet.getBasePositionAndOrientation', 'pybullet.getBasePositionAndOrientation', ([], {'bodyUniqueId': 'body_uid', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid,\n physicsClientId=self.uid)\n', (8402, 8455), False, 'import pybullet\n'), ((8484, 8512), 'robovat.math.Pose', 'Pose', (['[position, quaternion]'], {}), '([position, quaternion])\n', (8488, 8512), False, 'from robovat.math import Pose\n'), ((8756, 8847), 'pybullet.getBasePositionAndOrientation', 'pybullet.getBasePositionAndOrientation', ([], {'bodyUniqueId': 'body_uid', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid,\n physicsClientId=self.uid)\n', (8794, 8847), False, 'import pybullet\n'), ((8872, 8908), 'numpy.array', 'np.array', (['position'], {'dtype': 'np.float32'}), '(position, dtype=np.float32)\n', (8880, 8908), True, 'import numpy as np\n'), ((9173, 9246), 'pybullet.getBaseVelocity', 'pybullet.getBaseVelocity', ([], {'bodyUniqueId': 'body_uid', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, physicsClientId=self.uid)\n', (9197, 9246), False, 'import pybullet\n'), ((9275, 9318), 'numpy.array', 'np.array', (['linear_velocity'], {'dtype': 'np.float32'}), '(linear_velocity, dtype=np.float32)\n', (9283, 9318), True, 'import numpy as np\n'), ((9586, 9659), 'pybullet.getBaseVelocity', 'pybullet.getBaseVelocity', ([], {'bodyUniqueId': 'body_uid', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, physicsClientId=self.uid)\n', (9610, 9659), False, 'import pybullet\n'), ((9688, 9732), 'numpy.array', 'np.array', (['angular_velocity'], {'dtype': 'np.float32'}), '(angular_velocity, dtype=np.float32)\n', (9696, 9732), True, 'import numpy as np\n'), ((9988, 10079), 'pybullet.getDynamicsInfo', 'pybullet.getDynamicsInfo', ([], {'bodyUniqueId': 'body_uid', 'linkIndex': '(-1)', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, linkIndex=-1,\n physicsClientId=self.uid)\n', (10012, 10079), False, 'import pybullet\n'), ((10423, 10514), 'pybullet.getDynamicsInfo', 'pybullet.getDynamicsInfo', ([], {'bodyUniqueId': 'body_uid', 'linkIndex': '(-1)', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, linkIndex=-1,\n physicsClientId=self.uid)\n', (10447, 10514), False, 'import pybullet\n'), ((10968, 11038), 'pybullet.getNumJoints', 'pybullet.getNumJoints', ([], {'bodyUniqueId': 'body_uid', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, physicsClientId=self.uid)\n', (10989, 11038), False, 'import pybullet\n'), ((11362, 11432), 'pybullet.getNumJoints', 'pybullet.getNumJoints', ([], {'bodyUniqueId': 'body_uid', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, physicsClientId=self.uid)\n', (11383, 11432), False, 'import pybullet\n'), ((11723, 11733), 'robovat.math.Pose', 'Pose', (['pose'], {}), '(pose)\n', (11727, 11733), False, 'from robovat.math import Pose\n'), ((11824, 11954), 'pybullet.resetBasePositionAndOrientation', 'pybullet.resetBasePositionAndOrientation', ([], {'bodyUniqueId': 'body_uid', 'posObj': 'position', 'ornObj': 'quaternion', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, posObj=\n position, ornObj=quaternion, physicsClientId=self.uid)\n', (11864, 11954), False, 'import pybullet\n'), ((12302, 12350), 'pybullet.getBasePositionAndOrientation', 'pybullet.getBasePositionAndOrientation', (['body_uid'], {}), '(body_uid)\n', (12340, 12350), False, 'import pybullet\n'), ((12359, 12489), 'pybullet.resetBasePositionAndOrientation', 'pybullet.resetBasePositionAndOrientation', ([], {'bodyUniqueId': 'body_uid', 'posObj': 'position', 'ornObj': 'quaternion', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, posObj=\n position, ornObj=quaternion, physicsClientId=self.uid)\n', (12399, 12489), False, 'import pybullet\n'), ((12758, 12806), 'pybullet.getBasePositionAndOrientation', 'pybullet.getBasePositionAndOrientation', (['body_uid'], {}), '(body_uid)\n', (12796, 12806), False, 'import pybullet\n'), ((12865, 12995), 'pybullet.resetBasePositionAndOrientation', 'pybullet.resetBasePositionAndOrientation', ([], {'bodyUniqueId': 'body_uid', 'posObj': 'position', 'ornObj': 'quaternion', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, posObj=\n position, ornObj=quaternion, physicsClientId=self.uid)\n', (12905, 12995), False, 'import pybullet\n'), ((13369, 13481), 'pybullet.resetBaseVelocity', 'pybullet.resetBaseVelocity', ([], {'bodyUniqueId': 'body_uid', 'linearVelocity': 'linear_velocity', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, linearVelocity=\n linear_velocity, physicsClientId=self.uid)\n', (13395, 13481), False, 'import pybullet\n'), ((13861, 13975), 'pybullet.resetBaseVelocity', 'pybullet.resetBaseVelocity', ([], {'bodyUniqueId': 'body_uid', 'angularVelocity': 'angular_velocity', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, angularVelocity=\n angular_velocity, physicsClientId=self.uid)\n', (13887, 13975), False, 'import pybullet\n'), ((14191, 14292), 'pybullet.changeDynamics', 'pybullet.changeDynamics', ([], {'bodyUniqueId': 'body_uid', 'linkIndex': '(-1)', 'mass': 'mass', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, linkIndex=-1, mass=mass,\n physicsClientId=self.uid)\n', (14214, 14292), False, 'import pybullet\n'), ((15939, 15972), 'pybullet.changeDynamics', 'pybullet.changeDynamics', ([], {}), '(**kwargs)\n', (15962, 15972), False, 'import pybullet\n'), ((16545, 16581), 'pybullet.changeVisualShape', 'pybullet.changeVisualShape', ([], {}), '(**kwargs)\n', (16571, 16581), False, 'import pybullet\n'), ((16954, 17049), 'pybullet.getJointInfo', 'pybullet.getJointInfo', ([], {'bodyUniqueId': 'body_uid', 'jointIndex': 'link_ind', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, jointIndex=link_ind,\n physicsClientId=self.uid)\n', (16975, 17049), False, 'import pybullet\n'), ((17448, 17542), 'pybullet.getLinkState', 'pybullet.getLinkState', ([], {'bodyUniqueId': 'body_uid', 'linkIndex': 'link_ind', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, linkIndex=link_ind,\n physicsClientId=self.uid)\n', (17469, 17542), False, 'import pybullet\n'), ((17579, 17607), 'robovat.math.Pose', 'Pose', (['[position, quaternion]'], {}), '([position, quaternion])\n', (17583, 17607), False, 'from robovat.math import Pose\n'), ((17936, 18030), 'pybullet.getLinkState', 'pybullet.getLinkState', ([], {'bodyUniqueId': 'body_uid', 'linkIndex': 'link_ind', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, linkIndex=link_ind,\n physicsClientId=self.uid)\n', (17957, 18030), False, 'import pybullet\n'), ((18067, 18095), 'robovat.math.Pose', 'Pose', (['[position, quaternion]'], {}), '([position, quaternion])\n', (18071, 18095), False, 'from robovat.math import Pose\n'), ((18469, 18566), 'pybullet.getDynamicsInfo', 'pybullet.getDynamicsInfo', ([], {'bodyUniqueId': 'body_uid', 'linkIndex': 'link_ind', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, linkIndex=link_ind,\n physicsClientId=self.uid)\n', (18493, 18566), False, 'import pybullet\n'), ((18960, 19057), 'pybullet.getDynamicsInfo', 'pybullet.getDynamicsInfo', ([], {'bodyUniqueId': 'body_uid', 'linkIndex': 'link_ind', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, linkIndex=link_ind,\n physicsClientId=self.uid)\n', (18984, 19057), False, 'import pybullet\n'), ((19617, 19725), 'pybullet.changeDynamics', 'pybullet.changeDynamics', ([], {'bodyUniqueId': 'body_uid', 'linkIndex': 'link_ind', 'mass': 'mass', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, linkIndex=link_ind, mass=\n mass, physicsClientId=self.uid)\n', (19640, 19725), False, 'import pybullet\n'), ((20731, 20764), 'pybullet.changeDynamics', 'pybullet.changeDynamics', ([], {}), '(**kwargs)\n', (20754, 20764), False, 'import pybullet\n'), ((21147, 21243), 'pybullet.getJointInfo', 'pybullet.getJointInfo', ([], {'bodyUniqueId': 'body_uid', 'jointIndex': 'joint_ind', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, jointIndex=joint_ind,\n physicsClientId=self.uid)\n', (21168, 21243), False, 'import pybullet\n'), ((21727, 21823), 'pybullet.getJointInfo', 'pybullet.getJointInfo', ([], {'bodyUniqueId': 'body_uid', 'jointIndex': 'joint_ind', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, jointIndex=joint_ind,\n physicsClientId=self.uid)\n', (21748, 21823), False, 'import pybullet\n'), ((22413, 22509), 'pybullet.getJointInfo', 'pybullet.getJointInfo', ([], {'bodyUniqueId': 'body_uid', 'jointIndex': 'joint_ind', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, jointIndex=joint_ind,\n physicsClientId=self.uid)\n', (22434, 22509), False, 'import pybullet\n'), ((23076, 23173), 'pybullet.getJointState', 'pybullet.getJointState', ([], {'bodyUniqueId': 'body_uid', 'jointIndex': 'joint_ind', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, jointIndex=joint_ind,\n physicsClientId=self.uid)\n', (23098, 23173), False, 'import pybullet\n'), ((23524, 23621), 'pybullet.getJointState', 'pybullet.getJointState', ([], {'bodyUniqueId': 'body_uid', 'jointIndex': 'joint_ind', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, jointIndex=joint_ind,\n physicsClientId=self.uid)\n', (23546, 23621), False, 'import pybullet\n'), ((24192, 24289), 'pybullet.getJointState', 'pybullet.getJointState', ([], {'bodyUniqueId': 'body_uid', 'jointIndex': 'joint_ind', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, jointIndex=joint_ind,\n physicsClientId=self.uid)\n', (24214, 24289), False, 'import pybullet\n'), ((24326, 24368), 'numpy.array', 'np.array', (['reaction_force'], {'dtype': 'np.float32'}), '(reaction_force, dtype=np.float32)\n', (24334, 24368), True, 'import numpy as np\n'), ((24769, 24866), 'pybullet.getJointState', 'pybullet.getJointState', ([], {'bodyUniqueId': 'body_uid', 'jointIndex': 'joint_ind', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, jointIndex=joint_ind,\n physicsClientId=self.uid)\n', (24791, 24866), False, 'import pybullet\n'), ((24903, 24937), 'numpy.array', 'np.array', (['torque'], {'dtype': 'np.float32'}), '(torque, dtype=np.float32)\n', (24911, 24937), True, 'import numpy as np\n'), ((25224, 25345), 'pybullet.resetJointState', 'pybullet.resetJointState', ([], {'bodyUniqueId': 'body_uid', 'jointIndex': 'joint_ind', 'targetValue': 'position', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, jointIndex=joint_ind,\n targetValue=position, physicsClientId=self.uid)\n', (25248, 25345), False, 'import pybullet\n'), ((25733, 25830), 'pybullet.getJointState', 'pybullet.getJointState', ([], {'bodyUniqueId': 'body_uid', 'jointIndex': 'joint_ind', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, jointIndex=joint_ind,\n physicsClientId=self.uid)\n', (25755, 25830), False, 'import pybullet\n'), ((25860, 26006), 'pybullet.resetJointState', 'pybullet.resetJointState', ([], {'bodyUniqueId': 'body_uid', 'jointIndex': 'joint_ind', 'targetValue': 'position', 'targetVelocity': 'velocity', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, jointIndex=joint_ind,\n targetValue=position, targetVelocity=velocity, physicsClientId=self.uid)\n', (25884, 26006), False, 'import pybullet\n'), ((26281, 26410), 'pybullet.enableJointForceTorqueSensor', 'pybullet.enableJointForceTorqueSensor', ([], {'bodyUniqueId': 'body_uid', 'jointIndex': 'joint_ind', 'enableSensor': '(1)', 'physicsClientId': 'self.uid'}), '(bodyUniqueId=body_uid, jointIndex=\n joint_ind, enableSensor=1, physicsClientId=self.uid)\n', (26318, 26410), False, 'import pybullet\n'), ((29854, 29889), 'pybullet.createConstraint', 'pybullet.createConstraint', ([], {}), '(**kwargs)\n', (29879, 29889), False, 'import pybullet\n'), ((30170, 30221), 'pybullet.changeConstraint', 'pybullet.changeConstraint', (['constraint_uid'], {}), '(constraint_uid, **kwargs)\n', (30195, 30221), False, 'import pybullet\n'), ((30455, 30534), 'pybullet.changeConstraint', 'pybullet.changeConstraint', (['constraint_uid'], {'maxForce': '(0)', 'physicsClientId': 'self.uid'}), '(constraint_uid, maxForce=0, physicsClientId=self.uid)\n', (30480, 30534), False, 'import pybullet\n'), ((30577, 30644), 'pybullet.removeConstraint', 'pybullet.removeConstraint', (['constraint_uid'], {'physicsClientId': 'self.uid'}), '(constraint_uid, physicsClientId=self.uid)\n', (30602, 30644), False, 'import pybullet\n'), ((30949, 31040), 'pybullet.getConstraintInfo', 'pybullet.getConstraintInfo', ([], {'constraintUniqueId': 'constraint_uid', 'physicsClientId': 'self.uid'}), '(constraintUniqueId=constraint_uid,\n physicsClientId=self.uid)\n', (30975, 31040), False, 'import pybullet\n'), ((31092, 31120), 'robovat.math.Pose', 'Pose', (['[position, quaternion]'], {}), '([position, quaternion])\n', (31096, 31120), False, 'from robovat.math import Pose\n'), ((31441, 31532), 'pybullet.getConstraintInfo', 'pybullet.getConstraintInfo', ([], {'constraintUniqueId': 'constraint_uid', 'physicsClientId': 'self.uid'}), '(constraintUniqueId=constraint_uid,\n physicsClientId=self.uid)\n', (31467, 31532), False, 'import pybullet\n'), ((31562, 31598), 'numpy.array', 'np.array', (['position'], {'dtype': 'np.float32'}), '(position, dtype=np.float32)\n', (31570, 31598), True, 'import numpy as np\n'), ((31917, 32008), 'pybullet.getConstraintInfo', 'pybullet.getConstraintInfo', ([], {'constraintUniqueId': 'constraint_uid', 'physicsClientId': 'self.uid'}), '(constraintUniqueId=constraint_uid,\n physicsClientId=self.uid)\n', (31943, 32008), False, 'import pybullet\n'), ((32038, 32061), 'robovat.math.Orientation', 'Orientation', (['quaternion'], {}), '(quaternion)\n', (32049, 32061), False, 'from robovat.math import Orientation\n'), ((32395, 32486), 'pybullet.getConstraintInfo', 'pybullet.getConstraintInfo', ([], {'constraintUniqueId': 'constraint_uid', 'physicsClientId': 'self.uid'}), '(constraintUniqueId=constraint_uid,\n physicsClientId=self.uid)\n', (32421, 32486), False, 'import pybullet\n'), ((32516, 32553), 'numpy.array', 'np.array', (['max_force'], {'dtype': 'np.float32'}), '(max_force, dtype=np.float32)\n', (32524, 32553), True, 'import numpy as np\n'), ((32929, 33092), 'pybullet.changeConstraint', 'pybullet.changeConstraint', ([], {'userConstraintUniqueId': 'constraint_uid', 'jointChildPivot': 'position', 'jointChildFrameOrientation': 'quaternion', 'physicsClientId': 'self.uid'}), '(userConstraintUniqueId=constraint_uid,\n jointChildPivot=position, jointChildFrameOrientation=quaternion,\n physicsClientId=self.uid)\n', (32954, 33092), False, 'import pybullet\n'), ((33422, 33542), 'pybullet.changeConstraint', 'pybullet.changeConstraint', ([], {'userConstraintUniqueId': 'constraint_uid', 'jointChildPivot': 'position', 'physicsClientId': 'self.uid'}), '(userConstraintUniqueId=constraint_uid,\n jointChildPivot=position, physicsClientId=self.uid)\n', (33447, 33542), False, 'import pybullet\n'), ((33883, 34016), 'pybullet.changeConstraint', 'pybullet.changeConstraint', ([], {'userConstraintUniqueId': 'constraint_uid', 'jointChildFrameOrientation': 'quaternion', 'physicsClientId': 'self.uid'}), '(userConstraintUniqueId=constraint_uid,\n jointChildFrameOrientation=quaternion, physicsClientId=self.uid)\n', (33908, 34016), False, 'import pybullet\n'), ((34319, 34434), 'pybullet.changeConstraint', 'pybullet.changeConstraint', ([], {'userConstraintUniqueId': 'constraint_uid', 'maxForce': 'max_force', 'physicsClientId': 'self.uid'}), '(userConstraintUniqueId=constraint_uid, maxForce=\n max_force, physicsClientId=self.uid)\n', (34344, 34434), False, 'import pybullet\n'), ((34637, 34689), 'pybullet.getNumConstraints', 'pybullet.getNumConstraints', ([], {'physicsClientId': 'self.uid'}), '(physicsClientId=self.uid)\n', (34663, 34689), False, 'import pybullet\n'), ((36262, 36302), 'pybullet.setJointMotorControl2', 'pybullet.setJointMotorControl2', ([], {}), '(**kwargs)\n', (36292, 36302), False, 'import pybullet\n'), ((37456, 37496), 'pybullet.setJointMotorControl2', 'pybullet.setJointMotorControl2', ([], {}), '(**kwargs)\n', (37486, 37496), False, 'import pybullet\n'), ((38044, 38084), 'pybullet.setJointMotorControl2', 'pybullet.setJointMotorControl2', ([], {}), '(**kwargs)\n', (38074, 38084), False, 'import pybullet\n'), ((39827, 39871), 'pybullet.setJointMotorControlArray', 'pybullet.setJointMotorControlArray', ([], {}), '(**kwargs)\n', (39861, 39871), False, 'import pybullet\n'), ((41286, 41330), 'pybullet.setJointMotorControlArray', 'pybullet.setJointMotorControlArray', ([], {}), '(**kwargs)\n', (41320, 41330), False, 'import pybullet\n'), ((41918, 41962), 'pybullet.setJointMotorControlArray', 'pybullet.setJointMotorControlArray', ([], {}), '(**kwargs)\n', (41952, 41962), False, 'import pybullet\n'), ((45298, 45343), 'pybullet.calculateInverseKinematics', 'pybullet.calculateInverseKinematics', ([], {}), '(**kwargs)\n', (45333, 45343), False, 'import pybullet\n'), ((46748, 46783), 'pybullet.getContactPoints', 'pybullet.getContactPoints', ([], {}), '(**kwargs)\n', (46773, 46783), False, 'import pybullet\n'), ((47781, 47816), 'pybullet.getContactPoints', 'pybullet.getContactPoints', ([], {}), '(**kwargs)\n', (47806, 47816), False, 'import pybullet\n'), ((48509, 48544), 'pybullet.getContactPoints', 'pybullet.getContactPoints', ([], {}), '(**kwargs)\n', (48534, 48544), False, 'import pybullet\n'), ((49020, 49063), 'pybullet.getDebugVisualizerCamera', 'pybullet.getDebugVisualizerCamera', (['self.uid'], {}), '(self.uid)\n', (49053, 49063), False, 'import pybullet\n'), ((50142, 50258), 'pybullet.resetDebugVisualizerCamera', 'pybullet.resetDebugVisualizerCamera', (['camera_distance', 'camera_yaw', 'camera_pitch', 'camera_target_position', 'self.uid'], {}), '(camera_distance, camera_yaw,\n camera_pitch, camera_target_position, self.uid)\n', (50177, 50258), False, 'import pybullet\n'), ((1213, 1243), 'pybullet.connect', 'pybullet.connect', (['pybullet.GUI'], {}), '(pybullet.GUI)\n', (1229, 1243), False, 'import pybullet\n'), ((1256, 1321), 'pybullet.configureDebugVisualizer', 'pybullet.configureDebugVisualizer', (['pybullet.COV_ENABLE_SHADOWS', '(1)'], {}), '(pybullet.COV_ENABLE_SHADOWS, 1)\n', (1289, 1321), False, 'import pybullet\n'), ((1334, 1395), 'pybullet.configureDebugVisualizer', 'pybullet.configureDebugVisualizer', (['pybullet.COV_ENABLE_GUI', '(0)'], {}), '(pybullet.COV_ENABLE_GUI, 0)\n', (1367, 1395), False, 'import pybullet\n'), ((1442, 1495), 'robovat.utils.logging.logger.info', 'logger.info', (['"""Connected client %d to GUI."""', 'self._uid'], {}), "('Connected client %d to GUI.', self._uid)\n", (1453, 1495), False, 'from robovat.utils.logging import logger\n'), ((1522, 1584), 'robovat.utils.logging.logger.info', 'logger.info', (['"""Use worker_id %d for the simulation."""', 'worker_id'], {}), "('Use worker_id %d for the simulation.', worker_id)\n", (1533, 1584), False, 'from robovat.utils.logging import logger\n'), ((1609, 1657), 'pybullet.connect', 'pybullet.connect', (['pybullet.DIRECT'], {'key': 'worker_id'}), '(pybullet.DIRECT, key=worker_id)\n', (1625, 1657), False, 'import pybullet\n'), ((1670, 1726), 'robovat.utils.logging.logger.info', 'logger.info', (['"""Connected client %d to DIRECT."""', 'self._uid'], {}), "('Connected client %d to DIRECT.', self._uid)\n", (1681, 1726), False, 'from robovat.utils.logging import logger\n'), ((2596, 2655), 'pybullet.setRealTimeSimulation', 'pybullet.setRealTimeSimulation', (['(1)'], {'physicsClientId': 'self.uid'}), '(1, physicsClientId=self.uid)\n', (2626, 2655), False, 'import pybullet\n'), ((2682, 2741), 'pybullet.setRealTimeSimulation', 'pybullet.setRealTimeSimulation', (['(0)'], {'physicsClientId': 'self.uid'}), '(0, physicsClientId=self.uid)\n', (2712, 2741), False, 'import pybullet\n'), ((2754, 2817), 'pybullet.setTimeStep', 'pybullet.setTimeStep', (['self._time_step'], {'physicsClientId': 'self.uid'}), '(self._time_step, physicsClientId=self.uid)\n', (2774, 2817), False, 'import pybullet\n'), ((4975, 5042), 'pybullet.configureDebugVisualizer', 'pybullet.configureDebugVisualizer', (['pybullet.COV_ENABLE_RENDERING', '(0)'], {}), '(pybullet.COV_ENABLE_RENDERING, 0)\n', (5008, 5042), False, 'import pybullet\n'), ((5066, 5293), 'pybullet.loadURDF', 'pybullet.loadURDF', ([], {'fileName': 'filename', 'basePosition': 'position', 'baseOrientation': 'quaternion', 'globalScaling': 'scale', 'useFixedBase': 'is_static', 'physicsClientId': 'self.uid', 'flags': 'pybullet.URDF_USE_SELF_COLLISION_EXCLUDE_PARENT'}), '(fileName=filename, basePosition=position, baseOrientation\n =quaternion, globalScaling=scale, useFixedBase=is_static,\n physicsClientId=self.uid, flags=pybullet.\n URDF_USE_SELF_COLLISION_EXCLUDE_PARENT)\n', (5083, 5293), False, 'import pybullet\n'), ((5423, 5490), 'pybullet.configureDebugVisualizer', 'pybullet.configureDebugVisualizer', (['pybullet.COV_ENABLE_RENDERING', '(1)'], {}), '(pybullet.COV_ENABLE_RENDERING, 1)\n', (5456, 5490), False, 'import pybullet\n'), ((32826, 32836), 'robovat.math.Pose', 'Pose', (['pose'], {}), '(pose)\n', (32830, 32836), False, 'from robovat.math import Pose\n'), ((44366, 44381), 'robovat.math.Pose', 'Pose', (['link_pose'], {}), '(link_pose)\n', (44370, 44381), False, 'from robovat.math import Pose\n'), ((1134, 1158), 'pybullet.getAPIVersion', 'pybullet.getAPIVersion', ([], {}), '()\n', (1156, 1158), False, 'import pybullet\n'), ((3371, 3382), 'time.time', 'time.time', ([], {}), '()\n', (3380, 3382), False, 'import time\n'), ((6029, 6144), 'pybullet.createCollisionShape', 'pybullet.createCollisionShape', (['pybullet.GEOM_MESH'], {'fileName': 'filename', 'meshScale': '([scale] * 3)'}), '(pybullet.GEOM_MESH, fileName=filename,\n meshScale=[scale] * 3, **collision_kwargs)\n', (6058, 6144), False, 'import pybullet\n'), ((6652, 6762), 'pybullet.createVisualShape', 'pybullet.createVisualShape', (['pybullet.GEOM_MESH'], {'fileName': 'filename', 'meshScale': '([scale] * 3)'}), '(pybullet.GEOM_MESH, fileName=filename, meshScale\n =[scale] * 3, **visual_kwargs)\n', (6678, 6762), False, 'import pybullet\n'), ((6941, 7008), 'pybullet.configureDebugVisualizer', 'pybullet.configureDebugVisualizer', (['pybullet.COV_ENABLE_RENDERING', '(0)'], {}), '(pybullet.COV_ENABLE_RENDERING, 0)\n', (6974, 7008), False, 'import pybullet\n'), ((7253, 7433), 'pybullet.createMultiBody', 'pybullet.createMultiBody', ([], {'baseCollisionShapeIndex': 'collision_shape_id', 'baseVisualShapeIndex': 'visual_shape_id', 'basePosition': 'position', 'baseOrientation': 'quaternion'}), '(baseCollisionShapeIndex=collision_shape_id,\n baseVisualShapeIndex=visual_shape_id, basePosition=position,\n baseOrientation=quaternion, **body_kwargs)\n', (7277, 7433), False, 'import pybullet\n'), ((7643, 7710), 'pybullet.configureDebugVisualizer', 'pybullet.configureDebugVisualizer', (['pybullet.COV_ENABLE_RENDERING', '(1)'], {}), '(pybullet.COV_ENABLE_RENDERING, 1)\n', (7676, 7710), False, 'import pybullet\n'), ((28485, 28508), 'robovat.math.Pose', 'Pose', (['parent_frame_pose'], {}), '(parent_frame_pose)\n', (28489, 28508), False, 'from robovat.math import Pose\n'), ((28868, 28890), 'robovat.math.Pose', 'Pose', (['child_frame_pose'], {}), '(child_frame_pose)\n', (28872, 28890), False, 'from robovat.math import Pose\n')] |
#!/usr/bin/env python3
##
## Copyright (c) Facebook, Inc. and its affiliates.
## This source code is licensed under the MIT license found in the
## LICENSE file in the root directory of this source tree.
##
import argparse
import subprocess
def gen(out_name, opt):
fout_batch = open('{}.sh'.format(out_name), 'w')
fout_batch.write('chmod +x *.sh\n')
fout_batch.write('rm job-out-*\n')
fout_batch.write('rm job-in-*\n')
for i in range(opt['num_gpus']):
sh_name = '{}_{}.sh'.format(out_name, i)
fout = open(sh_name, 'w')
if opt['slurm']:
fout.write(
"srun -o checkpoint/slurm-gpu-job-%j.out --error=checkpoint/slurm-gpu-job-%j.err --gres=gpu:1 python3 train.py --job_num {}\n".format(
i
)
)
else:
fout.write(
"CUDA_VISIBLE_DEVICES={} python3 train.py --job_num {}\n".format(i, i)
)
fout.close()
fout_batch.write("./{} &\n".format(sh_name))
fout_batch.close()
subprocess.call("chmod +x {}.sh".format(out_name).split())
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--num_gpus', type=int, default=1, help='number of GPUs to use')
parser.add_argument(
'--slurm', action='store_true', default=False, help='whether use slurm or not'
)
opt = vars(parser.parse_args())
gen('batch_holder', opt)
| [
"argparse.ArgumentParser"
] | [((1154, 1179), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1177, 1179), False, 'import argparse\n')] |
import os
from abc import ABCMeta
class JWEAlgorithmBase(object, metaclass=ABCMeta):
"""Base interface for all JWE algorithms.
"""
EXTRA_HEADERS = None
name = None
description = None
algorithm_type = 'JWE'
algorithm_location = 'alg'
def prepare_key(self, raw_data):
raise NotImplementedError
class JWEAlgorithm(JWEAlgorithmBase, metaclass=ABCMeta):
"""Interface for JWE algorithm conforming to RFC7518.
JWA specification (RFC7518) SHOULD implement the algorithms for JWE with this base implementation.
"""
def wrap(self, enc_alg, headers, key):
raise NotImplementedError
def unwrap(self, enc_alg, ek, headers, key):
raise NotImplementedError
class JWEAlgorithmWithTagAwareKeyAgreement(JWEAlgorithmBase, metaclass=ABCMeta):
"""Interface for JWE algorithm with tag-aware key agreement (in key agreement with key wrapping mode).
ECDH-1PU is an example of such an algorithm.
"""
def generate_keys_and_prepare_headers(self, enc_alg, key, sender_key):
raise NotImplementedError
def agree_upon_key_and_wrap_cek(self, enc_alg, headers, key, sender_key, epk, cek, tag):
raise NotImplementedError
def wrap(self, enc_alg, headers, key, sender_key):
raise NotImplementedError
def unwrap(self, enc_alg, ek, headers, key, sender_key, tag=None):
raise NotImplementedError
class JWEEncAlgorithm(object):
name = None
description = None
algorithm_type = 'JWE'
algorithm_location = 'enc'
IV_SIZE = None
CEK_SIZE = None
def generate_cek(self):
return os.urandom(self.CEK_SIZE // 8)
def generate_iv(self):
return os.urandom(self.IV_SIZE // 8)
def check_iv(self, iv):
if len(iv) * 8 != self.IV_SIZE:
raise ValueError('Invalid "iv" size')
def encrypt(self, msg, aad, iv, key):
"""Encrypt the given "msg" text.
:param msg: text to be encrypt in bytes
:param aad: additional authenticated data in bytes
:param iv: initialization vector in bytes
:param key: encrypted key in bytes
:return: (ciphertext, iv, tag)
"""
raise NotImplementedError
def decrypt(self, ciphertext, aad, iv, tag, key):
"""Decrypt the given cipher text.
:param ciphertext: ciphertext in bytes
:param aad: additional authenticated data in bytes
:param iv: initialization vector in bytes
:param tag: authentication tag in bytes
:param key: encrypted key in bytes
:return: message
"""
raise NotImplementedError
class JWEZipAlgorithm(object):
name = None
description = None
algorithm_type = 'JWE'
algorithm_location = 'zip'
def compress(self, s):
raise NotImplementedError
def decompress(self, s):
raise NotImplementedError
| [
"os.urandom"
] | [((1619, 1649), 'os.urandom', 'os.urandom', (['(self.CEK_SIZE // 8)'], {}), '(self.CEK_SIZE // 8)\n', (1629, 1649), False, 'import os\n'), ((1693, 1722), 'os.urandom', 'os.urandom', (['(self.IV_SIZE // 8)'], {}), '(self.IV_SIZE // 8)\n', (1703, 1722), False, 'import os\n')] |
import os
import getpass
import hashlib
if __name__=="__main__":
#Get number of CPUs
cpus_available = int(os.environ['SLURM_CPUS_PER_TASK'])
#Get username
username = getpass.getuser()
#Create Hash Code
hash_object = hashlib.md5(username.encode())
#Result for webcampus is
print('Your result for the webCampus Quiz is:', username+'_'+str(cpus_available)+'_'+hash_object.hexdigest())
| [
"getpass.getuser"
] | [((187, 204), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\n', (202, 204), False, 'import getpass\n')] |
"""
Module for the Window class which handles everything related to window drawing and updating.
Game wide objects and settings should be set here.
"""
import pygame
from pyggui.gui.page import Page
class Window:
"""
Main class for handling everything window related.
"""
def __init__(self, game: 'Game'):
"""
Args:
game (Game): Main Game object used.
"""
self.game = game
self.display = self.game.display
self.overlay_page = self.game.controller.overlay_page
def update(self) -> None:
"""
Method updates and draws the current page while also updating the screen.
"""
self.display.fill((0, 0, 0))
self.game.controller.current_page.update()
self.overlay_page.update()
self.game.controller.current_page.draw()
self.overlay_page.draw()
pygame.display.update()
| [
"pygame.display.update"
] | [((890, 913), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (911, 913), False, 'import pygame\n')] |
#!/usr/bin/env python
# -*- encoding: latin-1 -*-
"""SQLatorD - the SQLator daemon - Twisted version of Async
This server is not really part of SQLator, it is a version
just for fun to test twisted."""
import xmlrpclib
from twisted.internet import reactor, protocol
from twisted.protocols import basic
from sqlatord_async import LayerServer
class ClientProtocol(basic.LineReceiver):
def lineReceived(self, data):
ret = self.factory.storage.execute(data)
self.transport.write(ret + '\r\n')
class ConnectionFactory(protocol.ServerFactory):
protocol = ClientProtocol
storage = LayerServer()
def main():
reactor.listenTCP(5888, ConnectionFactory())
reactor.run()
if __name__ == '__main__':
main()
| [
"twisted.internet.reactor.run",
"sqlatord_async.LayerServer"
] | [((609, 622), 'sqlatord_async.LayerServer', 'LayerServer', ([], {}), '()\n', (620, 622), False, 'from sqlatord_async import LayerServer\n'), ((689, 702), 'twisted.internet.reactor.run', 'reactor.run', ([], {}), '()\n', (700, 702), False, 'from twisted.internet import reactor, protocol\n')] |
import unittest
import numpy as np
import methods
wilkinson_x = np.array([0.138, 0.220, 0.291, 0.560, 0.766, 1.460])
wilkinson_y = np.array([0.148, 0.171, 0.234, 0.324, 0.390, 0.493])
class TestMethods(unittest.TestCase):
def test_lineweaver_burk_with_wilkinson(self):
expected_results = ''
results = methods.lineweaver_burk(wilkinson_x, wilkinson_y)
assert results.name == 'Lineweaver-Burk'
assert results.error is None
assert results.SS == 0.0
assert results.v_hat is None
assert round(results.V, 3) == 0.585
assert round(results.Km, 3) == 0.441
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"methods.lineweaver_burk",
"numpy.array"
] | [((65, 114), 'numpy.array', 'np.array', (['[0.138, 0.22, 0.291, 0.56, 0.766, 1.46]'], {}), '([0.138, 0.22, 0.291, 0.56, 0.766, 1.46])\n', (73, 114), True, 'import numpy as np\n'), ((132, 183), 'numpy.array', 'np.array', (['[0.148, 0.171, 0.234, 0.324, 0.39, 0.493]'], {}), '([0.148, 0.171, 0.234, 0.324, 0.39, 0.493])\n', (140, 183), True, 'import numpy as np\n'), ((653, 668), 'unittest.main', 'unittest.main', ([], {}), '()\n', (666, 668), False, 'import unittest\n'), ((322, 371), 'methods.lineweaver_burk', 'methods.lineweaver_burk', (['wilkinson_x', 'wilkinson_y'], {}), '(wilkinson_x, wilkinson_y)\n', (345, 371), False, 'import methods\n')] |
from sqlalchemy import Column, text, ForeignKey
from jet_bridge_base import status
from jet_bridge_base.db import get_mapped_base, get_engine, reload_mapped_base
from jet_bridge_base.exceptions.not_found import NotFound
from jet_bridge_base.exceptions.validation_error import ValidationError
from jet_bridge_base.permissions import HasProjectPermissions
from jet_bridge_base.responses.json import JSONResponse
from jet_bridge_base.serializers.table import TableColumnSerializer
from jet_bridge_base.utils.db_types import map_to_sql_type, db_to_sql_type
from jet_bridge_base.views.base.api import APIView
from jet_bridge_base.views.model_description import map_column
from sqlalchemy.sql.ddl import AddConstraint
def map_dto_column(column, metadata=None):
args = []
column_kwargs = {}
autoincrement = False
server_default = None
column_type = db_to_sql_type(column['db_field']) if 'db_field' in column else map_to_sql_type(column['field'])
if column.get('primary_key', False):
autoincrement = True
if 'default_type' in column:
if column['default_type'] == 'value':
server_default = column['default_value']
if isinstance(server_default, bool):
server_default = '1' if server_default else '0'
elif column['default_type'] == 'datetime_now':
server_default = text('NOW()')
elif column['default_type'] == 'uuid':
server_default = text("uuid_generate_v4()")
elif column['default_type'] == 'sequence':
server_default = text("nextval({})".format(column['default_value']))
elif column['default_type'] == 'auto_increment':
autoincrement = True
params = column.get('params')
if params:
if 'length' in params:
column_kwargs['length'] = params['length']
if callable(column_type):
try:
column_type = column_type(**column_kwargs)
except TypeError:
pass
if params:
if 'related_model' in params:
model = params['related_model'].get('model')
try:
table = list(filter(lambda x: x.name == model, metadata.tables.values()))[0]
table_primary_keys = table.primary_key.columns.keys()
table_primary_key = table_primary_keys[0] if len(table_primary_keys) > 0 else None
related_column_name = params.get('custom_primary_key') or table_primary_key
related_column = [x for x in table.columns if x.name == related_column_name][0]
column_type = related_column.type
foreign_key = ForeignKey(related_column)
args.append(foreign_key)
except IndexError:
pass
return Column(
*args,
name=column['name'],
type_=column_type,
autoincrement=autoincrement,
primary_key=column.get('primary_key', False),
nullable=column.get('null', False),
server_default=server_default
)
class TableColumnView(APIView):
permission_classes = (HasProjectPermissions,)
def get_db(self, request):
MappedBase = get_mapped_base(request)
engine = get_engine(request)
return MappedBase.metadata, engine
def update_base(self, request):
MappedBase = get_mapped_base(request)
reload_mapped_base(MappedBase)
def get_table(self, request):
metadata, engine = self.get_db(request)
table_name = request.path_kwargs['table']
try:
obj = list(filter(lambda x: x.name == table_name, metadata.tables.values()))[0]
except IndexError:
raise NotFound
self.check_object_permissions(request, obj)
return obj
def get_object(self, request):
metadata, engine = self.get_db(request)
table_name = request.path_kwargs['table']
try:
table = list(filter(lambda x: x.name == table_name, metadata.tables.values()))[0]
except IndexError:
raise NotFound
pk = request.path_kwargs['pk']
obj = table.columns.get(pk)
if obj is None:
raise NotFound
self.check_object_permissions(request, obj)
return obj
def list(self, request, *args, **kwargs):
table = self.get_table(request)
columns = list(map(lambda x: map_column(x, True), table.columns))
return JSONResponse(columns)
def retrieve(self, request, *args, **kwargs):
instance = self.get_object(request)
return JSONResponse(map_column(instance, True))
def create(self, request, *args, **kwargs):
serializer = TableColumnSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
try:
self.perform_create(request, serializer)
except Exception as e:
raise ValidationError(str(e))
return JSONResponse(serializer.representation_data, status=status.HTTP_201_CREATED)
def perform_create(self, request, serializer):
metadata, engine = self.get_db(request)
table = self.get_table(request)
column = map_dto_column(serializer.validated_data, metadata=metadata)
column._set_parent(table)
ddl_compiler = engine.dialect.ddl_compiler(engine.dialect, None)
column_specification = ddl_compiler.get_column_specification(column)
table_name = ddl_compiler.preparer.format_table(table)
engine.execute('''ALTER TABLE {0} ADD COLUMN {1}'''.format(table_name, column_specification))
for foreign_key in column.foreign_keys:
if not foreign_key.constraint:
foreign_key._set_table(column, table)
engine.execute(AddConstraint(foreign_key.constraint))
metadata.remove(table)
metadata.reflect(bind=engine, only=[table.name])
self.update_base(request)
def destroy(self, request, *args, **kwargs):
instance = self.get_object(request)
self.perform_destroy(request, instance)
return JSONResponse(status=status.HTTP_204_NO_CONTENT)
def perform_destroy(self, request, column):
metadata, engine = self.get_db(request)
table = self.get_table(request)
ddl_compiler = engine.dialect.ddl_compiler(engine.dialect, None)
table_name = ddl_compiler.preparer.format_table(table)
column_name = ddl_compiler.preparer.format_column(column)
engine.execute('''ALTER TABLE {0} DROP COLUMN {1} '''.format(table_name, column_name))
metadata.remove(table)
metadata.reflect(bind=engine, only=[table.name])
self.update_base(request)
def update(self, request, *args, **kwargs):
partial = kwargs.pop('partial', False)
instance = self.get_object(request)
serializer = TableColumnSerializer(data=request.data, partial=partial)
serializer.is_valid(raise_exception=True)
try:
self.perform_update(request, instance, serializer)
except Exception as e:
raise ValidationError(str(e))
return JSONResponse(serializer.representation_data)
def perform_update(self, request, existing_column, serializer):
metadata, engine = self.get_db(request)
table = self.get_table(request)
existing_data = map_column(existing_column, True)
existing_dto = {
'name': existing_data['name'],
'field': existing_data['field'],
'primary_key': existing_column.table.primary_key.columns[0].name == existing_data['name']
}
if 'length' in existing_data:
existing_dto['length'] = existing_data['length']
column = map_dto_column({
**existing_dto,
**serializer.validated_data
}, metadata=metadata)
column._set_parent(table)
ddl_compiler = engine.dialect.ddl_compiler(engine.dialect, None)
table_name = ddl_compiler.preparer.format_table(table)
column_name = ddl_compiler.preparer.format_column(column)
existing_column_name = ddl_compiler.preparer.format_column(existing_column)
column_type = column.type.compile(engine.dialect)
engine.execute('''ALTER TABLE {0} ALTER COLUMN {1} TYPE {2}'''.format(table_name, existing_column_name, column_type))
# engine.execute('ALTER TABLE {0} ALTER COLUMN {1} TYPE {2} USING {1}::integer'.format(table_name, existing_column_name, column_type))
if column.nullable:
engine.execute('''ALTER TABLE {0} ALTER COLUMN {1} DROP NOT NULL'''.format(table_name, existing_column_name))
else:
engine.execute('''ALTER TABLE {0} ALTER COLUMN {1} SET NOT NULL'''.format(table_name, existing_column_name))
default = ddl_compiler.get_column_default_string(column)
if default is not None:
engine.execute('''ALTER TABLE {0} ALTER COLUMN {1} SET DEFAULT {2}'''.format(table_name, existing_column_name, default))
else:
engine.execute('''ALTER TABLE {0} ALTER COLUMN {1} DROP DEFAULT'''.format(table_name, existing_column_name))
for foreign_key in column.foreign_keys:
if not foreign_key.constraint:
existing_foreign_keys = list(filter(lambda x: x.target_fullname == foreign_key.target_fullname, existing_column.foreign_keys))
if len(existing_foreign_keys):
continue
foreign_key._set_table(column, table)
engine.execute(AddConstraint(foreign_key.constraint))
if column_name != existing_column_name:
engine.execute('''ALTER TABLE {0} RENAME COLUMN {1} TO {2}'''.format(table_name, existing_column_name, column_name))
metadata.remove(table)
metadata.reflect(bind=engine, only=[table.name])
self.update_base(request)
def partial_update(self, *args, **kwargs):
kwargs['partial'] = True
return self.update(*args, **kwargs)
| [
"jet_bridge_base.db.get_engine",
"sqlalchemy.sql.ddl.AddConstraint",
"jet_bridge_base.db.get_mapped_base",
"jet_bridge_base.utils.db_types.map_to_sql_type",
"jet_bridge_base.utils.db_types.db_to_sql_type",
"sqlalchemy.ForeignKey",
"sqlalchemy.text",
"jet_bridge_base.responses.json.JSONResponse",
"je... | [((865, 899), 'jet_bridge_base.utils.db_types.db_to_sql_type', 'db_to_sql_type', (["column['db_field']"], {}), "(column['db_field'])\n", (879, 899), False, 'from jet_bridge_base.utils.db_types import map_to_sql_type, db_to_sql_type\n'), ((929, 961), 'jet_bridge_base.utils.db_types.map_to_sql_type', 'map_to_sql_type', (["column['field']"], {}), "(column['field'])\n", (944, 961), False, 'from jet_bridge_base.utils.db_types import map_to_sql_type, db_to_sql_type\n'), ((3169, 3193), 'jet_bridge_base.db.get_mapped_base', 'get_mapped_base', (['request'], {}), '(request)\n', (3184, 3193), False, 'from jet_bridge_base.db import get_mapped_base, get_engine, reload_mapped_base\n'), ((3211, 3230), 'jet_bridge_base.db.get_engine', 'get_engine', (['request'], {}), '(request)\n', (3221, 3230), False, 'from jet_bridge_base.db import get_mapped_base, get_engine, reload_mapped_base\n'), ((3332, 3356), 'jet_bridge_base.db.get_mapped_base', 'get_mapped_base', (['request'], {}), '(request)\n', (3347, 3356), False, 'from jet_bridge_base.db import get_mapped_base, get_engine, reload_mapped_base\n'), ((3365, 3395), 'jet_bridge_base.db.reload_mapped_base', 'reload_mapped_base', (['MappedBase'], {}), '(MappedBase)\n', (3383, 3395), False, 'from jet_bridge_base.db import get_mapped_base, get_engine, reload_mapped_base\n'), ((4435, 4456), 'jet_bridge_base.responses.json.JSONResponse', 'JSONResponse', (['columns'], {}), '(columns)\n', (4447, 4456), False, 'from jet_bridge_base.responses.json import JSONResponse\n'), ((4678, 4718), 'jet_bridge_base.serializers.table.TableColumnSerializer', 'TableColumnSerializer', ([], {'data': 'request.data'}), '(data=request.data)\n', (4699, 4718), False, 'from jet_bridge_base.serializers.table import TableColumnSerializer\n'), ((4925, 5001), 'jet_bridge_base.responses.json.JSONResponse', 'JSONResponse', (['serializer.representation_data'], {'status': 'status.HTTP_201_CREATED'}), '(serializer.representation_data, status=status.HTTP_201_CREATED)\n', (4937, 5001), False, 'from jet_bridge_base.responses.json import JSONResponse\n'), ((6067, 6114), 'jet_bridge_base.responses.json.JSONResponse', 'JSONResponse', ([], {'status': 'status.HTTP_204_NO_CONTENT'}), '(status=status.HTTP_204_NO_CONTENT)\n', (6079, 6114), False, 'from jet_bridge_base.responses.json import JSONResponse\n'), ((6834, 6891), 'jet_bridge_base.serializers.table.TableColumnSerializer', 'TableColumnSerializer', ([], {'data': 'request.data', 'partial': 'partial'}), '(data=request.data, partial=partial)\n', (6855, 6891), False, 'from jet_bridge_base.serializers.table import TableColumnSerializer\n'), ((7108, 7152), 'jet_bridge_base.responses.json.JSONResponse', 'JSONResponse', (['serializer.representation_data'], {}), '(serializer.representation_data)\n', (7120, 7152), False, 'from jet_bridge_base.responses.json import JSONResponse\n'), ((7334, 7367), 'jet_bridge_base.views.model_description.map_column', 'map_column', (['existing_column', '(True)'], {}), '(existing_column, True)\n', (7344, 7367), False, 'from jet_bridge_base.views.model_description import map_column\n'), ((4580, 4606), 'jet_bridge_base.views.model_description.map_column', 'map_column', (['instance', '(True)'], {}), '(instance, True)\n', (4590, 4606), False, 'from jet_bridge_base.views.model_description import map_column\n'), ((1364, 1377), 'sqlalchemy.text', 'text', (['"""NOW()"""'], {}), "('NOW()')\n", (1368, 1377), False, 'from sqlalchemy import Column, text, ForeignKey\n'), ((2642, 2668), 'sqlalchemy.ForeignKey', 'ForeignKey', (['related_column'], {}), '(related_column)\n', (2652, 2668), False, 'from sqlalchemy import Column, text, ForeignKey\n'), ((1454, 1480), 'sqlalchemy.text', 'text', (['"""uuid_generate_v4()"""'], {}), "('uuid_generate_v4()')\n", (1458, 1480), False, 'from sqlalchemy import Column, text, ForeignKey\n'), ((4383, 4402), 'jet_bridge_base.views.model_description.map_column', 'map_column', (['x', '(True)'], {}), '(x, True)\n', (4393, 4402), False, 'from jet_bridge_base.views.model_description import map_column\n'), ((5748, 5785), 'sqlalchemy.sql.ddl.AddConstraint', 'AddConstraint', (['foreign_key.constraint'], {}), '(foreign_key.constraint)\n', (5761, 5785), False, 'from sqlalchemy.sql.ddl import AddConstraint\n'), ((9525, 9562), 'sqlalchemy.sql.ddl.AddConstraint', 'AddConstraint', (['foreign_key.constraint'], {}), '(foreign_key.constraint)\n', (9538, 9562), False, 'from sqlalchemy.sql.ddl import AddConstraint\n')] |
import re
import io
class FrameDecorator:
_decorators = {}
@staticmethod
def get_frame(name):
if name in FrameDecorator._decorators:
return FrameDecorator._decorators[name]
return None
@staticmethod
def set_frame(name, frame):
FrameDecorator._decorators[name] = frame
@staticmethod
def remove_frame(name):
if name not in FrameDecorator._decorators:
return
del FrameDecorator._decorators[name]
@staticmethod
def decorator_route(route, fp):
for name, frame in FrameDecorator._decorators.items():
if frame.is_inherit_route(route):
fp = frame.render(route, fp)
fp.seek(0)
return fp
return fp
class BaseFrame:
START_TAG = '{%'
END_TAG = '%}'
def add_routes(self, *args):
inherit_routes = getattr(self, 'inherit_routes', None)
if inherit_routes is None:
inherit_routes = []
setattr(self, 'inherit_routes', inherit_routes)
for r in args:
if r not in inherit_routes:
inherit_routes.append(r)
def remove_routes(self, *args):
inherit_routes = getattr(self, 'inherit_routes', None)
if inherit_routes is None:
return
for r in args:
if r in inherit_routes:
inherit_routes.remove(r)
def is_inherit_route(self, route):
inherit_routes = getattr(self, 'inherit_routes', None)
if inherit_routes is None:
return False
return route in inherit_routes
def render(self, route, route_fp, encode='utf8'):
raise NotImplementedError('class \'{0}\' has not implemented function \'render\''
.format(self.__class__.__name__))
@staticmethod
def _render(frame_content, route_content, keyword, start_tag='\{\%', end_tag='\%\}'):
pattern = '{0}[\s]*{1}[\s]*{2}'.format(start_tag, keyword, end_tag)
render_content = re.sub(pattern, route_content, frame_content)
return render_content
class SimpleFrame(BaseFrame):
def __init__(self, title=''):
self.title = title
self.keywords = {}
def add_keyword(self, keyword, r):
self.keywords[keyword] = r
def render(self, route, route_fp, encode='utf8'):
render_content = route_fp.read().decode(encode)
for k, r in self.keywords.items():
r = self._to_js_code(r)
render_content = BaseFrame._render(render_content, r, k)
fp = io.BytesIO(render_content.encode(encode))
return fp
def _to_js_code(self, r):
if isinstance(r, str) or isinstance(r, int) or isinstance(r, float):
return r
elif isinstance(r, list):
s = '['
for v in r:
if isinstance(v, str):
v = '\"{0}\"'.format(v)
else:
v = self._to_js_code(v)
s += '{0}, '.format(v)
s = s[:-2] + ']'
return s
elif isinstance(r, dict):
s = '{'
for k, v in r.items():
if isinstance(v, str):
v = '\"{0}\"'.format(v)
else:
v = self._to_js_code(v)
s += '{0}: {1}, '.format(k, v)
s = s[:-2] + '}'
return s
| [
"re.sub"
] | [((2038, 2083), 're.sub', 're.sub', (['pattern', 'route_content', 'frame_content'], {}), '(pattern, route_content, frame_content)\n', (2044, 2083), False, 'import re\n')] |
'''
MoreTransitions
======
Usage:
Import the transitions and use them with the ScreenManager class.
from kivy.garden.moretransitions import PixelTransition,RippleTransition,
BlurTransition,RVBTransition
screenManager = ScreenManager(transition=PixelTransition())
or
screenManager.transition = RVBTransition(duration=2.0)
'''
__all__ = (
'PixelTransition',
'RippleTransition',
'BlurTransition',
'RVBTransition',
'RotateTransition',
'TileTransition',
'FastSlideTransition',
'PageCurlTransition',
)
from kivy.uix.screenmanager import ShaderTransition
from kivy.properties import StringProperty, OptionProperty, NumericProperty, ColorProperty
from kivy.resources import resource_find, resource_add_path
from os.path import dirname
resource_add_path(dirname(__file__))
class TileTransition(ShaderTransition):
with open(resource_find('tile_transition.glsl')) as f:
TILE_TRANSITION_FS = f.read()
fs = StringProperty(TILE_TRANSITION_FS)
class PixelTransition(ShaderTransition):
PIXEL_TRANSITION_FS = '''$HEADER$
uniform float t;
uniform sampler2D tex_in;
uniform sampler2D tex_out;
vec2 myround(vec2 x) {
return vec2(floor(x.x + .5), floor(x.y + .5));
}
void main (void) {
float pixels;
float t2;
if (t < 0.5)
t2 = 1.0 - t * 2.0;
else
t2 = (t - 0.5) * 2.0;
pixels = 5.0 + 1000.0 * t2 * t2;
vec2 new = myround(tex_coord0.st * vec2(pixels,pixels)) /
vec2(pixels,pixels);
vec4 c1 = vec4(texture2D(tex_out, new));
vec4 c2 = vec4(texture2D(tex_in, tex_coord0.st));
float a = min(1.0, max(0.0, (t - 0.4) / 0.2));
gl_FragColor = c1 + vec4(a,a,a,a)*(c2-c1);
}
'''
fs = StringProperty(PIXEL_TRANSITION_FS)
class RippleTransition(ShaderTransition):
RIPPLE_TRANSITION_FS = '''$HEADER$
uniform float t;
uniform sampler2D tex_in;
uniform sampler2D tex_out;
void main (void) {
float frequency = 20.0;
float speed = 10.0;
float amplitude = 0.05;
vec2 center = vec2(0.5,0.5);
vec2 toUV = tex_coord0.st - center;
float distanceFromCenter = length(toUV);
vec2 normToUV = toUV / distanceFromCenter;
float wave = cos(frequency * distanceFromCenter - speed * t);
float offset1 = t * wave * amplitude;
float offset2 = (1.0 - t) * wave * amplitude;
vec2 newUV1 = center + normToUV * vec2(distanceFromCenter+offset1,
distanceFromCenter + offset1);
vec2 newUV2 = center + normToUV * vec2(distanceFromCenter+offset2,
distanceFromCenter + offset2);
vec4 c1 = vec4(texture2D(tex_out, newUV1));
vec4 c2 = vec4(texture2D(tex_in, newUV2));
gl_FragColor = c1 + vec4(t,t,t,t)*(c2 - c1);
}
'''
fs = StringProperty(RIPPLE_TRANSITION_FS)
class BlurTransition(ShaderTransition):
BLUR_TRANSITION_FS = '''$HEADER$
uniform float t;
uniform sampler2D tex_in;
uniform sampler2D tex_out;
void main (void) {
vec2 center = vec2(0.5,0.5);
vec2 toUV = tex_coord0.st - center;
vec4 c1 = vec4(0,0,0,0);
int count = 24;
float s = t * 0.02;
for(int i=0; i<count; i++)
c1 += texture2D(
tex_out,
tex_coord0.st - toUV * vec2(s,s) * vec2(i,i)
);
c1 /= vec4(count,count,count,count);
vec4 c2 = vec4(texture2D(tex_in, tex_coord0.st));
gl_FragColor = c1 + t*(c2 - c1);
}
'''
fs = StringProperty(BLUR_TRANSITION_FS)
class RVBTransition(ShaderTransition):
RVB_TRANSITION_FS = '''$HEADER$
uniform float t;
uniform sampler2D tex_in;
uniform sampler2D tex_out;
uniform vec2 resolution;
void main(void)
{
vec2 uv = vec2(gl_FragCoord.x / resolution.x, gl_FragCoord.y /
resolution.y);
float amount = 0.0;
amount = (1.0 + sin(t*6.0)) * 0.5;
amount *= 1.0 + sin(t*16.0) * 0.5;
amount *= 1.0 + sin(t*19.0) * 0.5;
amount *= 1.0 + sin(t*27.0) * 0.5;
amount = pow(amount, 3.0);
amount *= 0.03;
vec3 col;
col.r = texture2D( tex_out, vec2(uv.x+amount,uv.y) ).r * (1.0-t)
+ texture2D( tex_in, vec2(uv.x+amount,uv.y) ).r * t;
col.g = texture2D( tex_out, uv ).g * (1.0-t)
+ texture2D( tex_in, uv ).g * t;
col.b = texture2D( tex_out, vec2(uv.x-amount,uv.y) ).b * (1.0-t)
+ texture2D( tex_in, vec2(uv.x-amount,uv.y) ).b * t;
col = vec3(col.r*(1.0 - amount * 0.5),
col.g*(1.0 - amount * 0.5),
col.b*(1.0 - amount * 0.5));
gl_FragColor = vec4(col.r,col.g,col.b,1.0);
}
'''
fs = StringProperty(RVB_TRANSITION_FS)
def on_progress(self, progress):
self.render_ctx['resolution'] = [float(wh) for wh in self.screen_out.size]
super(RVBTransition, self).on_progress(progress)
class RotateTransition(ShaderTransition):
'''Rotate transition.
'''
direction = OptionProperty('left', options=('left', 'right', 'up', 'down'))
'''Direction of the transition.
:data:`direction` is an :class:`~kivy.properties.OptionProperty`, default
to left. Can be one of 'left', 'right', 'up' or 'down'.
'''
ROTATE_TRANSITION_HEADER = '''
$HEADER$
uniform float t;
uniform sampler2D tex_in;
uniform sampler2D tex_out;
const vec4 shadow = vec4(0.0, 0.0, 0.0, 1.0);
const float shadow_pow = 0.5;
void main(void) {
'''
ROTATE_TRANSITION_FOOTER = '''
vec4 cnew = cout;
float light = pow(1.0-tt, shadow_pow);
if ( tt + pos > 1.0) {
cnew = cin;
light=pow(tt, shadow_pow);
}
gl_FragColor = cnew*light*frag_color;
}'''
ROTATE_TRANSITION_LEFT = ROTATE_TRANSITION_HEADER + '''
float tt = t;
float pos = tex_coord0.x;
vec4 cin = texture2D(tex_in,
vec2(1.0-(1.0-tex_coord0.x)/tt, tex_coord0.y));
vec4 cout = texture2D(tex_out,
vec2(tex_coord0.x/(1.0-tt), tex_coord0.y));
''' + ROTATE_TRANSITION_FOOTER
ROTATE_TRANSITION_RIGHT = ROTATE_TRANSITION_HEADER + '''
float tt = 1.0 - t;
float pos = tex_coord0.x;
vec4 cin = texture2D(tex_out,
vec2(1.0-(1.0-tex_coord0.x)/tt, tex_coord0.y));
vec4 cout = texture2D(tex_in,
vec2(tex_coord0.x/(1.0-tt), tex_coord0.y));
''' + ROTATE_TRANSITION_FOOTER
ROTATE_TRANSITION_UP = ROTATE_TRANSITION_HEADER + '''
float tt = t;
float pos = tex_coord0.y;
vec4 cin = texture2D(tex_in,
vec2(tex_coord0.x, 1.0-(1.0-tex_coord0.y)/tt));
vec4 cout = texture2D(tex_out,
vec2(tex_coord0.x, tex_coord0.y/(1.0-tt)));
''' + ROTATE_TRANSITION_FOOTER
ROTATE_TRANSITION_DOWN = ROTATE_TRANSITION_HEADER + '''
float tt = 1.0 - t;
float pos = tex_coord0.y;
vec4 cin = texture2D(tex_out,
vec2(tex_coord0.x, 1.0-(1.0-tex_coord0.y)/tt));
vec4 cout = texture2D(tex_in,
vec2(tex_coord0.x, tex_coord0.y/(1.0-tt)));
''' + ROTATE_TRANSITION_FOOTER
fs = StringProperty(ROTATE_TRANSITION_LEFT)
def __init__(self, **kwargs):
self.on_direction(kwargs.get('direction', 'left'))
super(RotateTransition, self).__init__(**kwargs)
def on_direction(self, *largs):
if largs[0] == 'left':
self.fs = self.ROTATE_TRANSITION_LEFT
if largs[0] == 'right':
self.fs = self.ROTATE_TRANSITION_RIGHT
if largs[0] == 'up':
self.fs = self.ROTATE_TRANSITION_UP
if largs[0] == 'down':
self.fs = self.ROTATE_TRANSITION_DOWN
class FastSlideTransition(ShaderTransition):
direction = OptionProperty('left', options=('left', 'right', 'up', 'down'))
'''Direction of the transition.
:data:`direction` is an :class:`~kivy.properties.OptionProperty`, default
to left. Can be one of 'left', 'right', 'up' or 'down'.
'''
FAST_SLIDE_TRANSITION_UP = '''
$HEADER$
uniform float t;
uniform sampler2D tex_in;
uniform sampler2D tex_out;
uniform vec2 resolution;
float y2, n;
float BLURMAX = 50.;
float T = smoothstep(0., 1., t);
void main(void){
vec4 c = vec4(0., 0., 0., 0.);
if (tex_coord0.y < 1. - T) {
float squash = mix(.95, 1., pow(1. - t, 2.));
float x = .5 + (tex_coord0.x - .5) / squash;
float y = tex_coord0.y + T;
if (0. < x && x < 1.) {
for (n=0.; n < BLURMAX; n+=1.) {
y2 = y - n / resolution.y;
if (0. <= y2 && y2 <= 1.)
c += texture2D(tex_out, vec2(x, y2)) / BLURMAX;
}
gl_FragColor = mix(c, texture2D(tex_out, vec2(x, y)), pow(1. - t, 5.));
} else
gl_FragColor = vec4(0, 0, 0, 0);
} else {
float squash = mix(.95, 1., pow(t, 2.));
float x = .5 + (tex_coord0.x - .5) / squash;
float y = tex_coord0.y - 1. + T;
if (0. < x && x < 1.) {
for (n=0.; n < BLURMAX; n+=1.) {
y2 = y - n / resolution.y;
if (0. < y2 && y2 < 1.)
c += texture2D(tex_in, vec2(x, y2)) / BLURMAX;
}
gl_FragColor = mix(c, texture2D(tex_in, vec2(x, y)), pow(t, 5.));
} else
gl_FragColor = vec4(0, 0, 0, 0);
}
}
''' # noqa
FAST_SLIDE_TRANSITION_LEFT = '''
$HEADER$
uniform float t;
uniform sampler2D tex_in;
uniform sampler2D tex_out;
uniform vec2 resolution;
float x2, n;
float BLURMAX = 50.;
float T = smoothstep(0., 1., t);
void main(void){
vec4 c = vec4(0., 0., 0., 0.);
if (tex_coord0.x < 1. - T) {
float squash = mix(.95, 1., pow(1. - t, 2.));
float y = .5 + (tex_coord0.y - .5) / squash;
float x = tex_coord0.x + T;
if (0. < y && y < 1.) {
for (n=0.; n < BLURMAX; n+=1.) {
x2 = x - n / resolution.x;
if (0. <= x2 && x2 <= 1.)
c += texture2D(tex_out, vec2(x2, y)) / BLURMAX;
}
gl_FragColor = mix(c, texture2D(tex_out, vec2(x, y)), pow(1. - t, 5.));
} else
gl_FragColor = vec4(0, 0, 0, 0);
} else {
float squash = mix(.95, 1., pow(t, 2.));
float y = .5 + (tex_coord0.y - .5) / squash;
float x = tex_coord0.x - 1. + T;
if (0. < y && y < 1.) {
for (n=0.; n < BLURMAX; n+=1.) {
x2 = x - n / resolution.x;
if (0. < x2 && x2 < 1.)
c += texture2D(tex_in, vec2(x2, y)) / BLURMAX;
}
gl_FragColor = mix(c, texture2D(tex_in, vec2(x, y)), pow(t, 5.));
} else
gl_FragColor = vec4(0, 0, 0, 0);
}
}
''' # noqa
FAST_SLIDE_TRANSITION_DOWN = '''
$HEADER$
uniform float t;
uniform sampler2D tex_in;
uniform sampler2D tex_out;
uniform vec2 resolution;
float y2, n;
float T = smoothstep(1., 0., t);
float BLURMAX = 50.;
void main(void){
vec4 c = vec4(0., 0., 0., 0.);
if (tex_coord0.y < 1. - T) {
float squash = mix(.95, 1., pow(t, 2.));
float x = .5 + (tex_coord0.x - .5) / squash;
float y = tex_coord0.y + T;
if (0. < x && x < 1.) {
for (n=0.; n < BLURMAX; n+=1.) {
y2 = y - n / resolution.y;
if (0. <= y2 && y2 <= 1.)
c += texture2D(tex_in, vec2(x, y2)) / BLURMAX;
}
gl_FragColor = mix(c, texture2D(tex_in, vec2(x, y)), pow(t, 5.));
} else
gl_FragColor = vec4(0, 0, 0, 0);
} else {
float squash = mix(.95, 1., pow(1. - t, 2.));
float x = .5 + (tex_coord0.x - .5) / squash;
float y = tex_coord0.y - 1. + T;
if (0. < x && x < 1.) {
for (n=0.; n < BLURMAX; n+=1.) {
y2 = y - n / resolution.y;
if (0. <= y2 && y2 <= 1.)
c += texture2D(tex_out, vec2(x, y2)) / BLURMAX;
}
gl_FragColor = mix(c, texture2D(tex_out, vec2(x, y)), pow(1. - t, 5.));
} else
gl_FragColor = vec4(0, 0, 0, 0);
}
}
''' # noqa
FAST_SLIDE_TRANSITION_RIGHT = '''
$HEADER$
uniform float t;
uniform sampler2D tex_in;
uniform sampler2D tex_out;
uniform vec2 resolution;
float x2, n;
float T = smoothstep(1., 0., t);
float BLURMAX = 50.;
void main(void){
vec4 c = vec4(0., 0., 0., 0.);
if (tex_coord0.x < 1. - T) {
float squash = mix(.95, 1., pow(t, 2.));
float y = .5 + (tex_coord0.y - .5) / squash;
float x = tex_coord0.x + T;
if (0. < y && y < 1.) {
for (n=0.; n < BLURMAX; n+=1.) {
x2 = x - n / resolution.x;
if (0. <= x2 && x2 <= 1.)
c += texture2D(tex_in, vec2(x2, y)) / BLURMAX;
}
gl_FragColor = mix(c, texture2D(tex_in, vec2(x, y)), pow(t, 5.));
} else
gl_FragColor = vec4(0, 0, 0, 0);
} else {
float squash = mix(.95, 1., pow(1. - t, 2.));
float y = .5 + (tex_coord0.y - .5) / squash;
float x = tex_coord0.x - 1. + T;
if (0. < y && y < 1.) {
for (n=0.; n < BLURMAX; n+=1.) {
x2 = x - n / resolution.x;
if (0. <= x2 && x2 <= 1.)
c += texture2D(tex_out, vec2(x2, y)) / BLURMAX;
}
gl_FragColor = mix(c, texture2D(tex_out, vec2(x, y)), pow(1. - t, 5.));
} else
gl_FragColor = vec4(0, 0, 0, 0);
}
}
''' # noqa
fs = StringProperty()
def __init__(self, **kwargs):
self.on_direction(self, kwargs.get('direction', 'down'))
super(FastSlideTransition, self).__init__(**kwargs)
def on_progress(self, progress):
self.render_ctx['resolution'] = [float(wh) for wh in self.screen_out.size]
super(FastSlideTransition, self).on_progress(progress)
def on_direction(self, *largs):
if largs[1] == 'left':
self.fs = self.FAST_SLIDE_TRANSITION_LEFT
elif largs[1] == 'right':
self.fs = self.FAST_SLIDE_TRANSITION_RIGHT
elif largs[1] == 'up':
self.fs = self.FAST_SLIDE_TRANSITION_UP
elif largs[1] == 'down':
self.fs = self.FAST_SLIDE_TRANSITION_DOWN
class ShatterTransition(ShaderTransition):
direction = OptionProperty('left', options=('left', 'right', 'up', 'down'))
'''Direction of the transition.
:data:`direction` is an :class:`~kivy.properties.OptionProperty`, default
to left. Can be one of 'left', 'right', 'up' or 'down'.
'''
rows = NumericProperty(10)
cols = NumericProperty(10)
SHATTER_TRANSITION_UP = '''
$HEADER$
uniform float t;
uniform sampler2D tex_in;
uniform sampler2D tex_out;
uniform vec2 resolution;
uniform float rows, cols;
void main(void){
float X, Y;
X = floor(coords.x / cols);
Y = floor(coords.y / rows);
}
'''
def on_cols(self, *largs):
self.render_ctx['cols'] = self.cols
def on_rows(self, *largs):
self.render_ctx['rows'] = self.rows
def on_direction(self, *largs):
if largs[1] == 'left':
self.fs = self.SHATTER_TRANSITION_UP
elif largs[1] == 'right':
self.fs = self.SHATTER_TRANSITION_UP
elif largs[1] == 'up':
self.fs = self.SHATTER_TRANSITION_UP
elif largs[1] == 'down':
self.fs = self.SHATTER_TRANSITION_UP
class PageCurlTransition(ShaderTransition):
direction = OptionProperty("bottom-top", options=["bottom-top", "top-bottom"])
PAGE_CURL_TRANSITION_FS = """
$HEADER$
#define pi 3.14159265359
#define radius .1
uniform float t;
uniform float direction;
uniform float aspect;
uniform vec2 resolution;
uniform sampler2D tex_in;
uniform sampler2D tex_out;
//IDK why but need to remap it to work, if something doesnt works try remap xD
float map(float value)
{
float low_map_from = 0., high_map_from = 1., low_map_to = 0.075, high_map_to = -1.15;
return low_map_to + (value - low_map_from) * (high_map_to - low_map_to) / (high_map_from - low_map_from);
}
void main( void )
{
float aspect_ratio = 0.0;
if (aspect == 1.0) {aspect_ratio = resolution.x / resolution.y;}
else {aspect_ratio = resolution.y / resolution.x; }
vec2 uv = gl_FragCoord.xy/resolution.xy;
vec2 dir = vec2(0.15,-1.0);
vec2 origin = vec2(0.0,0.0);
float move = 0.;
if (direction == 1.0) {move = map(t);}
else {move = map(1.0 - t);}
float proj = dot(uv - origin, dir);
float dist = proj - move ;
vec2 linePoint = uv - dist * dir ;
if (dist > radius)
{
if (direction == 1.0) {gl_FragColor = texture2D(tex_in, uv);}
else{gl_FragColor = texture2D(tex_out, uv);}
gl_FragColor.rgb *= pow(clamp(dist - radius, 0., 1.) * 1.5, .2);
}
else if (dist >= 0.)
{
float theta = asin(dist / radius);
vec2 p2 = linePoint + dir * (pi - theta) * radius;
vec2 p1 = linePoint + dir * theta * radius;
uv = (p2.x <= aspect_ratio && p2.y <= 1. && p2.x > 0. && p2.y > 0.) ? p2 : p1;
if (direction == 1.0) {gl_FragColor = texture2D(tex_out, uv);}
else {gl_FragColor = texture2D(tex_in, uv);}
gl_FragColor.rgb *= pow(clamp((radius - dist) / radius, 0., 1.), .2);
}
else
{
vec2 p = linePoint + dir * (abs(dist) + pi * radius) ;
uv = (p.x <= aspect_ratio && p.y <= 1. && p.x > 0. && p.y > 0.) ? p : uv;
if (direction == 1.0) {gl_FragColor = texture2D(tex_out, uv);}
else {gl_FragColor = texture2D(tex_in, uv);}
}
}
"""
fs = StringProperty(PAGE_CURL_TRANSITION_FS)
clearcolor = ColorProperty([0, 0, 0, 0])
def add_screen(self, screen):
super().add_screen(screen)
self.render_ctx["resolution"] = list(map(float, screen.size))
aspect_ratio = screen.size[0]/screen.size[1]
self.render_ctx["aspect"] = 1.0 * (aspect_ratio > 1.0) + 2.0 * (1.0 >= aspect_ratio)
if self.direction == "bottom-top":
self.render_ctx["direction"] = 1.0
else:
self.render_ctx["direction"] = 2.0
KV = '''
FloatLayout:
ScreenManager:
id: sm
Screen:
name: '0'
Image:
source: '../examples/demo/pictures/images/Bubbles.jpg'
allow_stretch: True
Screen:
name: '1'
Image:
source: '../examples/demo/pictures/images/faust_github.jpg'
allow_stretch: True
GridLayout:
id: box
size_hint: .5, .2
pos_hint: {'center_x': .5, 'y': 0}
cols: 2
Button:
text: 'previous'
size_hint: None, None
size: 100, 48
pos_hint: {'center_y': .5}
on_press:
# if hasattr(sm.transition, 'direction'): sm.transition.direction = 'left'
sm.current = sm.previous()
Button:
text: 'next'
size_hint: None, None
size: 100, 48
pos_hint: {'center_y': .5, 'right': 1}
on_press:
# if hasattr(sm.transition, 'direction'): sm.transition.direction = 'right'
sm.current = sm.next()
''' # noqa
if __name__ == '__main__':
transitions = {
k: v(duration=.5) for k, v in globals().items()
if isinstance(v, type) and
issubclass(v, ShaderTransition)
}
from kivy.lang import Builder
from kivy.base import runTouchApp
from kivy.factory import Factory
root = Builder.load_string(KV)
def update_transition(button):
root.ids.sm.transition = transitions[button.text]
for k, v in transitions.items():
btn = Factory.Button(
text=k,
on_press=update_transition
)
root.ids.box.add_widget(btn)
runTouchApp(root)
| [
"kivy.properties.OptionProperty",
"kivy.lang.Builder.load_string",
"kivy.factory.Factory.Button",
"os.path.dirname",
"kivy.properties.StringProperty",
"kivy.properties.ColorProperty",
"kivy.properties.NumericProperty",
"kivy.base.runTouchApp",
"kivy.resources.resource_find"
] | [((844, 861), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (851, 861), False, 'from os.path import dirname\n'), ((1011, 1045), 'kivy.properties.StringProperty', 'StringProperty', (['TILE_TRANSITION_FS'], {}), '(TILE_TRANSITION_FS)\n', (1025, 1045), False, 'from kivy.properties import StringProperty, OptionProperty, NumericProperty, ColorProperty\n'), ((1943, 1978), 'kivy.properties.StringProperty', 'StringProperty', (['PIXEL_TRANSITION_FS'], {}), '(PIXEL_TRANSITION_FS)\n', (1957, 1978), False, 'from kivy.properties import StringProperty, OptionProperty, NumericProperty, ColorProperty\n'), ((3141, 3177), 'kivy.properties.StringProperty', 'StringProperty', (['RIPPLE_TRANSITION_FS'], {}), '(RIPPLE_TRANSITION_FS)\n', (3155, 3177), False, 'from kivy.properties import StringProperty, OptionProperty, NumericProperty, ColorProperty\n'), ((3938, 3972), 'kivy.properties.StringProperty', 'StringProperty', (['BLUR_TRANSITION_FS'], {}), '(BLUR_TRANSITION_FS)\n', (3952, 3972), False, 'from kivy.properties import StringProperty, OptionProperty, NumericProperty, ColorProperty\n'), ((5289, 5322), 'kivy.properties.StringProperty', 'StringProperty', (['RVB_TRANSITION_FS'], {}), '(RVB_TRANSITION_FS)\n', (5303, 5322), False, 'from kivy.properties import StringProperty, OptionProperty, NumericProperty, ColorProperty\n'), ((5596, 5659), 'kivy.properties.OptionProperty', 'OptionProperty', (['"""left"""'], {'options': "('left', 'right', 'up', 'down')"}), "('left', options=('left', 'right', 'up', 'down'))\n", (5610, 5659), False, 'from kivy.properties import StringProperty, OptionProperty, NumericProperty, ColorProperty\n'), ((7922, 7960), 'kivy.properties.StringProperty', 'StringProperty', (['ROTATE_TRANSITION_LEFT'], {}), '(ROTATE_TRANSITION_LEFT)\n', (7936, 7960), False, 'from kivy.properties import StringProperty, OptionProperty, NumericProperty, ColorProperty\n'), ((8534, 8597), 'kivy.properties.OptionProperty', 'OptionProperty', (['"""left"""'], {'options': "('left', 'right', 'up', 'down')"}), "('left', options=('left', 'right', 'up', 'down'))\n", (8548, 8597), False, 'from kivy.properties import StringProperty, OptionProperty, NumericProperty, ColorProperty\n'), ((14952, 14968), 'kivy.properties.StringProperty', 'StringProperty', ([], {}), '()\n', (14966, 14968), False, 'from kivy.properties import StringProperty, OptionProperty, NumericProperty, ColorProperty\n'), ((15755, 15818), 'kivy.properties.OptionProperty', 'OptionProperty', (['"""left"""'], {'options': "('left', 'right', 'up', 'down')"}), "('left', options=('left', 'right', 'up', 'down'))\n", (15769, 15818), False, 'from kivy.properties import StringProperty, OptionProperty, NumericProperty, ColorProperty\n'), ((16013, 16032), 'kivy.properties.NumericProperty', 'NumericProperty', (['(10)'], {}), '(10)\n', (16028, 16032), False, 'from kivy.properties import StringProperty, OptionProperty, NumericProperty, ColorProperty\n'), ((16044, 16063), 'kivy.properties.NumericProperty', 'NumericProperty', (['(10)'], {}), '(10)\n', (16059, 16063), False, 'from kivy.properties import StringProperty, OptionProperty, NumericProperty, ColorProperty\n'), ((16956, 17022), 'kivy.properties.OptionProperty', 'OptionProperty', (['"""bottom-top"""'], {'options': "['bottom-top', 'top-bottom']"}), "('bottom-top', options=['bottom-top', 'top-bottom'])\n", (16970, 17022), False, 'from kivy.properties import StringProperty, OptionProperty, NumericProperty, ColorProperty\n'), ((19337, 19376), 'kivy.properties.StringProperty', 'StringProperty', (['PAGE_CURL_TRANSITION_FS'], {}), '(PAGE_CURL_TRANSITION_FS)\n', (19351, 19376), False, 'from kivy.properties import StringProperty, OptionProperty, NumericProperty, ColorProperty\n'), ((19394, 19421), 'kivy.properties.ColorProperty', 'ColorProperty', (['[0, 0, 0, 0]'], {}), '([0, 0, 0, 0])\n', (19407, 19421), False, 'from kivy.properties import StringProperty, OptionProperty, NumericProperty, ColorProperty\n'), ((21252, 21275), 'kivy.lang.Builder.load_string', 'Builder.load_string', (['KV'], {}), '(KV)\n', (21271, 21275), False, 'from kivy.lang import Builder\n'), ((21549, 21566), 'kivy.base.runTouchApp', 'runTouchApp', (['root'], {}), '(root)\n', (21560, 21566), False, 'from kivy.base import runTouchApp\n'), ((21422, 21472), 'kivy.factory.Factory.Button', 'Factory.Button', ([], {'text': 'k', 'on_press': 'update_transition'}), '(text=k, on_press=update_transition)\n', (21436, 21472), False, 'from kivy.factory import Factory\n'), ((919, 956), 'kivy.resources.resource_find', 'resource_find', (['"""tile_transition.glsl"""'], {}), "('tile_transition.glsl')\n", (932, 956), False, 'from kivy.resources import resource_find, resource_add_path\n')] |
from unittest.mock import patch
from django.test import TestCase
from sidekick.tasks import add_label_to_turn_conversation, archive_turn_conversation
from sidekick.tests.utils import create_org
class AddLabelToTurnConversationTests(TestCase):
def setUp(self):
self.org = create_org()
@patch("sidekick.tasks.label_whatsapp_message")
@patch("sidekick.tasks.get_whatsapp_contact_messages")
def test_inbound_filtering_and_sorting(self, get_messages, label_message):
"""
It should only look at inbound messages, and pick the latest one
"""
get_messages.return_value = {
"messages": [
{
"_vnd": {"v1": {"direction": "outbound"}},
"id": "ignore-outbound",
"timestamp": "1",
},
{
"_vnd": {"v1": {"direction": "inbound"}},
"id": "first-inbound",
"timestamp": "2",
},
{
"_vnd": {"v1": {"direction": "inbound"}},
"id": "second-inbound",
"timestamp": "3",
},
]
}
label_message.return_value = {}
add_label_to_turn_conversation(self.org.id, "27820001001", ["label1", "label2"])
get_messages.assert_called_once_with(self.org, "27820001001")
label_message.assert_called_once_with(
self.org, "second-inbound", ["label1", "label2"]
)
class ArchiveTurnConversationTests(TestCase):
def setUp(self):
self.org = create_org()
@patch("sidekick.tasks.archive_whatsapp_conversation")
@patch("sidekick.tasks.get_whatsapp_contact_messages")
def test_get_last_message(self, get_messages, archive):
"""
It should archive the conversation to the message with the greatest timestamp
"""
get_messages.return_value = {
"messages": [
{
"_vnd": {"v1": {"direction": "outbound"}},
"id": "ignore-outbound",
"timestamp": "1",
},
{
"_vnd": {"v1": {"direction": "inbound"}},
"id": "first-inbound",
"timestamp": "2",
},
{
"_vnd": {"v1": {"direction": "inbound"}},
"id": "second-inbound",
"timestamp": "3",
},
{
"_vnd": {"v1": {"direction": "outbound"}},
"id": "ignore-outbound-2",
"timestamp": "4",
},
]
}
archive.return_value = {}
archive_turn_conversation(self.org.id, "27820001001", "Test reason")
get_messages.assert_called_once_with(self.org, "27820001001")
archive.assert_called_once_with(
self.org, "27820001001", "second-inbound", "Test reason"
)
| [
"unittest.mock.patch",
"sidekick.tests.utils.create_org",
"sidekick.tasks.add_label_to_turn_conversation",
"sidekick.tasks.archive_turn_conversation"
] | [((306, 352), 'unittest.mock.patch', 'patch', (['"""sidekick.tasks.label_whatsapp_message"""'], {}), "('sidekick.tasks.label_whatsapp_message')\n", (311, 352), False, 'from unittest.mock import patch\n'), ((358, 411), 'unittest.mock.patch', 'patch', (['"""sidekick.tasks.get_whatsapp_contact_messages"""'], {}), "('sidekick.tasks.get_whatsapp_contact_messages')\n", (363, 411), False, 'from unittest.mock import patch\n'), ((1646, 1699), 'unittest.mock.patch', 'patch', (['"""sidekick.tasks.archive_whatsapp_conversation"""'], {}), "('sidekick.tasks.archive_whatsapp_conversation')\n", (1651, 1699), False, 'from unittest.mock import patch\n'), ((1705, 1758), 'unittest.mock.patch', 'patch', (['"""sidekick.tasks.get_whatsapp_contact_messages"""'], {}), "('sidekick.tasks.get_whatsapp_contact_messages')\n", (1710, 1758), False, 'from unittest.mock import patch\n'), ((287, 299), 'sidekick.tests.utils.create_org', 'create_org', ([], {}), '()\n', (297, 299), False, 'from sidekick.tests.utils import create_org\n'), ((1269, 1354), 'sidekick.tasks.add_label_to_turn_conversation', 'add_label_to_turn_conversation', (['self.org.id', '"""27820001001"""', "['label1', 'label2']"], {}), "(self.org.id, '27820001001', ['label1', 'label2']\n )\n", (1299, 1354), False, 'from sidekick.tasks import add_label_to_turn_conversation, archive_turn_conversation\n'), ((1627, 1639), 'sidekick.tests.utils.create_org', 'create_org', ([], {}), '()\n', (1637, 1639), False, 'from sidekick.tests.utils import create_org\n'), ((2789, 2857), 'sidekick.tasks.archive_turn_conversation', 'archive_turn_conversation', (['self.org.id', '"""27820001001"""', '"""Test reason"""'], {}), "(self.org.id, '27820001001', 'Test reason')\n", (2814, 2857), False, 'from sidekick.tasks import add_label_to_turn_conversation, archive_turn_conversation\n')] |
import torch
from model import CutPasteNet
import math
from anomaly_detection import AnomalyDetection
from dataset import MVTecAD
from torch.utils.data import DataLoader
import os
from tqdm import tqdm
from PIL import Image
import math
import numpy as np
from scipy import signal
import torchvision.transforms as transforms
import cv2
import shutil
class Localize:
def __init__(self, model_weights, kernel_dim = (32,32), stride = 4, batch_size = 128, device = 'cuda', image_size = (256,256)):
self.kernel_dim = kernel_dim
self.stride = stride
self.batch_size = batch_size
self.anomaly = AnomalyDetection(model_weights, batch_size)
self.device = device
self.transform = transforms.Compose([transforms.Resize(image_size),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
def extract_image_patches(self,image):
unfold = torch.nn.Unfold(self.kernel_dim, stride=self.stride)
image_patches = unfold(image).squeeze(0).reshape(-1, 3, *self.kernel_dim )
batched_patches = torch.split(image_patches, self.batch_size)
return batched_patches
def extract_patch_embeddings(self, image):
patches = self.extract_image_patches(image)
patch_embeddings =[]
with torch.no_grad():
for patch in patches:
logits, patch_embed = self.anomaly.cutpaste_model(patch.to(self.device))
patch_embeddings.append(patch_embed.to('cpu'))
del logits, patch
patch_dim = math.sqrt(len(patches)*self.batch_size)
patch_matrix = torch.cat(patch_embeddings).reshape(int(patch_dim), int(patch_dim), -1)
return patch_matrix
def patch_GDE_fit(self, train_images, aligned_obj = False):
dataset = MVTecAD(train_images, mode='test')
dataloader = DataLoader(dataset, batch_size=1)
embeds = []
for img, _ in tqdm(dataloader):
patch_matrix = self.extract_patch_embeddings(img)
# TODO: some MvTech classes were fitted into separate models for each location based on paper
if aligned_obj:
pass
else:
w,h,c = patch_matrix.shape
flat = patch_matrix.reshape(w*h, c)
embeds.append(flat)
GDE_model = self.anomaly.GDE_fit(torch.cat(embeds))
return GDE_model
def patch_scores(self,path_to_trian, test_image_pil):
GDE_model = self.patch_GDE_fit(path_to_trian)
image = Image.open(test_image_pil)
test_image = self.transform(image).unsqueeze(0)
patch_matrix = self.extract_patch_embeddings(test_image)
w, h, c = patch_matrix.shape
flat = patch_matrix.reshape(w * h, c)
score = self.anomaly.GDE_scores(flat, GDE_model)
score_matrix = score.reshape(1, 1, w,h)
return score_matrix
class Gaussian_smoothing:
"""
The class does receptive field upsampling via Gaussian smoothing which
essentially applies the transposed convolution with the stride of 4, the same stride
that is used for dense feature extraction,using a single convolution kernel of size 32x32
whose weights are determined by a Gaussian distribution.
Gaussian kernel generation function is taken from https://github.com/liznerski/fcdd.
"""
def __init__(self, kernel_size=32, stride=4, std=None, device=None):
self.kernel_size = kernel_size
self.stride = stride
self.std = self.kernel_size_to_std() if not std else std
if device:
self.device = device
else:
self.device = 'cuda' if torch.cuda.is_available() else 'cpu'
def kernel_size_to_std(self):
""" Returns a standard deviation value for a Gaussian kernel based on its size """
return np.log10(0.45*self.kernel_size + 1) + 0.25 if self.kernel_size < 32 else 10
def gkern(self):
"""Returns a 2D Gaussian kernel array with given kernel size k and self.std std """
if self.kernel_size % 2 == 0:
# if kernel size is even, signal.gaussian returns center values sampled from gaussian at x=-1 and x=1
# which is much less than 1.0 (depending on std). Instead, sample with kernel size k-1 and duplicate center
# value, which is 1.0. Then divide whole signal by 2, because the duplicate results in a too high signal.
gkern1d = signal.gaussian(self.kernel_size - 1, std=self.std).reshape(self.kernel_size - 1, 1)
gkern1d = np.insert(gkern1d, (self.kernel_size - 1) // 2, gkern1d[(self.kernel_size - 1) // 2]) / 2
else:
gkern1d = signal.gaussian(self.kernel_size, std=self.std).reshape(self.kernel_size, 1)
gkern2d = np.outer(gkern1d, gkern1d)
return gkern2d
def upsample(self, X):
tconv = torch.nn.ConvTranspose2d(1,1, kernel_size=self.kernel_size, stride=self.stride)
tconv.weight.data = torch.from_numpy(self.gkern()).unsqueeze(0).unsqueeze(0).float()
tconv.to(self.device)
X = torch.from_numpy(X).float().to(self.device)
out = tconv(X).detach().cpu().numpy()
return out
def heatmap_on_image(image, hmap):
img = cv2.imread(image)
img = cv2.resize(img, (256,256), interpolation = cv2.INTER_AREA)
hmap = hmap.squeeze(0).squeeze(0)
hmap = np.expand_dims(hmap, axis=2)
hmap = np.uint8(hmap)
heatmap_img = cv2.applyColorMap(hmap, cv2.COLORMAP_JET)
fin = cv2.addWeighted(heatmap_img, 0.7, img, 0.3, 0)
return fin
def save_anomaly_map(image, hmap, save_path):
imposed_image = heatmap_on_image(image, hmap)
file_name = image.split('/')[-1].split('.')[0]
shutil.copy(image, os.path.join(save_path, f'{file_name}.jpg'))
cv2.imwrite(os.path.join(save_path, f'{file_name}_amap.jpg'), imposed_image)
# L = Localize('./weights-bottle.ckpt')
# sp = L.patch_scores('./bottle/train/', './bottle/test/broken_large/004.png')
# GS = Gaussian_smoothing()
# up = GS.upsample(sp)
# visualize_heatmap('./bottle/test/broken_large/004.png', up)
L = Localize('/home/lilityolyan/stuff/cutpaste/tb_logs_3way/carpet/version_0/checkpoints/weights.ckpt')
sp = L.patch_scores('/media/lilityolyan/DATA/damage/mvtec/carpet/train', '/media/lilityolyan/DATA/damage/mvtec/carpet/test/hole/008.png')
GS = Gaussian_smoothing()
up = GS.upsample(sp)
heatmap_on_image('/media/lilityolyan/DATA/damage/mvtec/carpet/train', '/media/lilityolyan/DATA/damage/mvtec/carpet/test/hole/008.png', up)
| [
"torch.cat",
"torchvision.transforms.Normalize",
"torch.no_grad",
"os.path.join",
"torch.utils.data.DataLoader",
"numpy.insert",
"numpy.log10",
"cv2.resize",
"tqdm.tqdm",
"numpy.uint8",
"dataset.MVTecAD",
"torch.split",
"torch.nn.Unfold",
"cv2.addWeighted",
"torch.cuda.is_available",
"... | [((5419, 5436), 'cv2.imread', 'cv2.imread', (['image'], {}), '(image)\n', (5429, 5436), False, 'import cv2\n'), ((5447, 5504), 'cv2.resize', 'cv2.resize', (['img', '(256, 256)'], {'interpolation': 'cv2.INTER_AREA'}), '(img, (256, 256), interpolation=cv2.INTER_AREA)\n', (5457, 5504), False, 'import cv2\n'), ((5555, 5583), 'numpy.expand_dims', 'np.expand_dims', (['hmap'], {'axis': '(2)'}), '(hmap, axis=2)\n', (5569, 5583), True, 'import numpy as np\n'), ((5595, 5609), 'numpy.uint8', 'np.uint8', (['hmap'], {}), '(hmap)\n', (5603, 5609), True, 'import numpy as np\n'), ((5628, 5669), 'cv2.applyColorMap', 'cv2.applyColorMap', (['hmap', 'cv2.COLORMAP_JET'], {}), '(hmap, cv2.COLORMAP_JET)\n', (5645, 5669), False, 'import cv2\n'), ((5680, 5726), 'cv2.addWeighted', 'cv2.addWeighted', (['heatmap_img', '(0.7)', 'img', '(0.3)', '(0)'], {}), '(heatmap_img, 0.7, img, 0.3, 0)\n', (5695, 5726), False, 'import cv2\n'), ((625, 668), 'anomaly_detection.AnomalyDetection', 'AnomalyDetection', (['model_weights', 'batch_size'], {}), '(model_weights, batch_size)\n', (641, 668), False, 'from anomaly_detection import AnomalyDetection\n'), ((1064, 1116), 'torch.nn.Unfold', 'torch.nn.Unfold', (['self.kernel_dim'], {'stride': 'self.stride'}), '(self.kernel_dim, stride=self.stride)\n', (1079, 1116), False, 'import torch\n'), ((1226, 1269), 'torch.split', 'torch.split', (['image_patches', 'self.batch_size'], {}), '(image_patches, self.batch_size)\n', (1237, 1269), False, 'import torch\n'), ((1948, 1982), 'dataset.MVTecAD', 'MVTecAD', (['train_images'], {'mode': '"""test"""'}), "(train_images, mode='test')\n", (1955, 1982), False, 'from dataset import MVTecAD\n'), ((2004, 2037), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset'], {'batch_size': '(1)'}), '(dataset, batch_size=1)\n', (2014, 2037), False, 'from torch.utils.data import DataLoader\n'), ((2080, 2096), 'tqdm.tqdm', 'tqdm', (['dataloader'], {}), '(dataloader)\n', (2084, 2096), False, 'from tqdm import tqdm\n'), ((2679, 2705), 'PIL.Image.open', 'Image.open', (['test_image_pil'], {}), '(test_image_pil)\n', (2689, 2705), False, 'from PIL import Image\n'), ((4950, 4976), 'numpy.outer', 'np.outer', (['gkern1d', 'gkern1d'], {}), '(gkern1d, gkern1d)\n', (4958, 4976), True, 'import numpy as np\n'), ((5048, 5133), 'torch.nn.ConvTranspose2d', 'torch.nn.ConvTranspose2d', (['(1)', '(1)'], {'kernel_size': 'self.kernel_size', 'stride': 'self.stride'}), '(1, 1, kernel_size=self.kernel_size, stride=self.stride\n )\n', (5072, 5133), False, 'import torch\n'), ((5914, 5957), 'os.path.join', 'os.path.join', (['save_path', 'f"""{file_name}.jpg"""'], {}), "(save_path, f'{file_name}.jpg')\n", (5926, 5957), False, 'import os\n'), ((5975, 6023), 'os.path.join', 'os.path.join', (['save_path', 'f"""{file_name}_amap.jpg"""'], {}), "(save_path, f'{file_name}_amap.jpg')\n", (5987, 6023), False, 'import os\n'), ((1443, 1458), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1456, 1458), False, 'import torch\n'), ((2506, 2523), 'torch.cat', 'torch.cat', (['embeds'], {}), '(embeds)\n', (2515, 2523), False, 'import torch\n'), ((744, 773), 'torchvision.transforms.Resize', 'transforms.Resize', (['image_size'], {}), '(image_size)\n', (761, 773), True, 'import torchvision.transforms as transforms\n'), ((820, 841), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (839, 841), True, 'import torchvision.transforms as transforms\n'), ((888, 954), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['[0.485, 0.456, 0.406]', '[0.229, 0.224, 0.225]'], {}), '([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])\n', (908, 954), True, 'import torchvision.transforms as transforms\n'), ((1764, 1791), 'torch.cat', 'torch.cat', (['patch_embeddings'], {}), '(patch_embeddings)\n', (1773, 1791), False, 'import torch\n'), ((3828, 3853), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (3851, 3853), False, 'import torch\n'), ((4015, 4052), 'numpy.log10', 'np.log10', (['(0.45 * self.kernel_size + 1)'], {}), '(0.45 * self.kernel_size + 1)\n', (4023, 4052), True, 'import numpy as np\n'), ((4729, 4818), 'numpy.insert', 'np.insert', (['gkern1d', '((self.kernel_size - 1) // 2)', 'gkern1d[(self.kernel_size - 1) // 2]'], {}), '(gkern1d, (self.kernel_size - 1) // 2, gkern1d[(self.kernel_size -\n 1) // 2])\n', (4738, 4818), True, 'import numpy as np\n'), ((4622, 4673), 'scipy.signal.gaussian', 'signal.gaussian', (['(self.kernel_size - 1)'], {'std': 'self.std'}), '(self.kernel_size - 1, std=self.std)\n', (4637, 4673), False, 'from scipy import signal\n'), ((4855, 4902), 'scipy.signal.gaussian', 'signal.gaussian', (['self.kernel_size'], {'std': 'self.std'}), '(self.kernel_size, std=self.std)\n', (4870, 4902), False, 'from scipy import signal\n'), ((5263, 5282), 'torch.from_numpy', 'torch.from_numpy', (['X'], {}), '(X)\n', (5279, 5282), False, 'import torch\n')] |
# import the required modules.
import os, stat, pdb
from util import *
# runfile: write the run script string to a file.
#
def runfile(filename):
# write the output file.
f = open(filename, 'w')
f.write(runstr)
f.close()
# set the file as executable.
st = os.stat(filename)
os.chmod(filename, st.st_mode | stat.S_IEXEC)
# fasta: write the sequence of a structure to a fasta file.
#
def fasta(struct, filename):
# build a dictionary mapping residue number to name.
smap = {atom['resSeq']: atom['resName'] for atom in struct.models[0]}
seq = [oneletter[name] for name in smap.values()]
# include the header.
lines = ['> {}'.format(struct.filename)]
# include the residues with regular line breaks.
i = 0
while i < len(seq):
line = ''.join(seq[i : i + 50])
lines.append(line)
i = i + 50
# include a couple blank lines.
lines.append('')
lines.append('')
# build the output string and write it to file.
f = open(filename, 'w')
f.write('\n'.join(lines))
f.close()
# restraints: write a restraint file from a pdb structure.
#
def restraints(struct, filename):
f = open(filename, 'w')
rbase(struct, f)
rdihed(struct, f)
rdist(struct, f)
f.close()
# rbase: write a set of base restraints from a pdb structure.
#
def rbase(s, f):
# get the start and end residue numbers.
(rmin, rmax) = minmax([atom['resSeq'] for atom in s.models[0]])
# include a heading.
lines = ['', '{* base restraints *}']
# loop over the residues.
for resid in range(rmin, rmax + 1):
# get the residue index.
i = resid - rmin + 1
# write the N-CA-C-HA dihedral restraint.
line = dihed.format(i, 'N', i, 'CA', i, 'C', i, 'HA', -119, 0)
lines.append(line)
# write the CTER HA-C-CA-O dihedral restraint.
i = rmax - rmin + 1
line = dihed.format(i, 'HA', i, 'C', i, 'CA', i, 'O', 128.5, 0)
lines.append(line)
# write the CTER CA-O-C-O2 dihderal restraint.
line = dihed.format(i, 'CA', i, 'O', i, 'C', i, 'O2', 180, 0)
lines.append(line)
# build the output string and write it to the file.
lines.append('')
f.write('\n'.join(lines))
# rdihed: write a set of dihedral restraints from a pdb structure.
#
def rdihed(s, f):
# get the start and end residue numbers.
(rmin, rmax) = minmax([atom['resSeq'] for atom in s.models[0]])
# loop over the residues.
lines = []
for resid in range(rmin, rmax + 1):
# get the residue indices.
h = resid - rmin
i = resid - rmin + 1
j = resid - rmin + 2
# print a heading.
lines.append('')
lines.append('{{* resid {} *}}'.format(i))
# get the relevant atom positions.
Ch = s.select(resid - 1, 'C')
Ni = s.select(resid, 'N')
CAi = s.select(resid, 'CA')
Ci = s.select(resid, 'C')
Nj = s.select(resid + 1, 'N')
CAj = s.select(resid + 1, 'CA')
# compute the phi dihedral.
if Ch and Ni and CAi and Ci:
phi = pdb.dihed(Ch, Ni, CAi, Ci)
(phi0, dphi) = midrange(phi)
line = dihed.format(h, 'C', i, 'N', i, 'CA', i, 'C', phi0, dphi)
lines.append(line)
# compute the psi dihedral.
if Ni and CAi and Ci and Nj:
psi = pdb.dihed(Ni, CAi, Ci, Nj)
(psi0, dpsi) = midrange(psi)
line = dihed.format(i, 'N', i, 'CA', i, 'C', j, 'N', psi0, dpsi)
lines.append(line)
# compute the omega dihedral.
if CAi and Ci and Nj and CAj:
omega = pdb.dihed(CAi, Ci, Nj, CAj)
(omega0, domega) = midrange(fixomega(omega))
line = dihed.format(i, 'CA', i, 'C', j, 'N', j, 'CA', omega0, domega)
lines.append(line)
# build the output string and write it to the file.
lines.append('')
f.write('\n'.join(lines))
# rdist: write a set of distance restraints from a pdb structure.
#
def rdist(s, f):
# get the start and end residue numbers.
(rmin, rmax) = minmax([atom['resSeq'] for atom in s.models[0]])
# print a heading.
lines = ['', '{* distance restraints *}']
# loop over the residues.
for resid1 in range(rmin, rmax + 1):
# get the first residue index and amide proton.
i1 = resid1 - rmin + 1
H1 = s.select(resid1, 'H')
if not H1:
continue
# loop again over the residues.
for resid2 in range(resid1 + 5, rmax + 1):
# get the second residue index and amide proton.
i2 = resid2 - rmin + 1
H2 = s.select(resid2, 'H')
if not H2:
continue
# compute the distance.
d = pdb.dist(H1, H2)
if len([True for di in d if di <= 6]):
(d0, da, db) = meanbounds(d)
line = dist.format(i1, 'H1', i2, 'H1', d0, da, db)
lines.append(line)
# build the output string and write it to the file.
lines.append('')
f.write('\n'.join(lines))
# params: write a parameter file from a pdb structure.
#
def params(struct, filename):
# define a rule set for bonds.
bfmt = 'bond {:<4s} {:<4s} 1.0 {:>.9f} ! [{:.4f}, {:.4f}]'
bonds = ((('NH1', 'H'), ('N', 'H', 0)),
(('NH2', 'H'), ('N', 'H', 0)),
(('NH1', 'CH1E'), ('N', 'CA', 0)),
(('NH2', 'CH1E'), ('N', 'CA', 0)),
(('CH1E', 'HA'), ('CA', 'HA', 0)),
(('CH1E', 'C'), ('CA', 'C', 0)),
(('C', 'NH1'), ('C', 'N', 1)),
(('C', 'NH2'), ('C', 'N', 1)),
(('C', 'O'), ('C', 'O', 0)),
(('C', 'OC'), ('C', 'O', 0)))
# define a rule set for angles.
afmt = 'angle {:<4s} {:<4s} {:<4s} 1.0 {:>.9f} ! [{:.4f}, {:.4f}]'
angles = ((('NH1', 'CH1E', 'C'), ('N', 'CA', 'C', 0, 0)),
(('NH2', 'CH1E', 'C'), ('N', 'CA', 'C', 0, 0)),
(('NH1', 'CH1E', 'HA'), ('N', 'CA', 'HA', 0, 0)),
(('CH1E', 'C', 'NH1'), ('CA', 'C', 'N', 0, 1)),
(('CH1E', 'C', 'O'), ('CA', 'C', 'O', 0, 0)),
(('CH1E', 'C', 'OC'), ('CA', 'C', 'O', 0, 0)),
(('CH1E', 'NH1', 'H'), ('CA', 'N', 'H', 0, 0)),
(('C', 'CH1E', 'HA'), ('C', 'CA', 'HA', 0, 0)),
(('C', 'NH1', 'CH1E'), ('C', 'N', 'CA', 1, 1)),
(('C', 'NH1', 'H'), ('C', 'N', 'H', 0, 0)),
(('O', 'C', 'NH1'), ('O', 'C', 'N', 0, 0)),
(('NH1', 'CH1E', 'HA'), ('N', 'CA', 'HA', 0, 0)),
(('NH2', 'CH1E', 'HA'), ('N', 'CA', 'HA', 0, 0)))
# initialize the lines of output.
lines = []
# output the bonds.
lines = lines + ['', '{* bonds *}']
for rule in bonds:
(key, val) = (rule[0], rule[1])
d = struct.bondStats(*val)
line = bfmt.format(*(key + d))
lines.append(line)
# output the angles.
lines = lines + ['', '{* angles *}']
for rule in angles:
(key, val) = (rule[0], rule[1])
theta = struct.angleStats(*val)
line = afmt.format(*(key + theta))
lines.append(line)
# append the base parameter file.
f = open('protein.par')
lines = lines + [line.strip() for line in f.readlines()]
f.close()
# build the output string and write it to the file.
lines.append('')
f = open(filename, 'w')
f.write('\n'.join(lines))
f.close()
| [
"pdb.dihed",
"os.chmod",
"pdb.dist",
"os.stat"
] | [((271, 288), 'os.stat', 'os.stat', (['filename'], {}), '(filename)\n', (278, 288), False, 'import os, stat, pdb\n'), ((291, 336), 'os.chmod', 'os.chmod', (['filename', '(st.st_mode | stat.S_IEXEC)'], {}), '(filename, st.st_mode | stat.S_IEXEC)\n', (299, 336), False, 'import os, stat, pdb\n'), ((2928, 2954), 'pdb.dihed', 'pdb.dihed', (['Ch', 'Ni', 'CAi', 'Ci'], {}), '(Ch, Ni, CAi, Ci)\n', (2937, 2954), False, 'import os, stat, pdb\n'), ((3164, 3190), 'pdb.dihed', 'pdb.dihed', (['Ni', 'CAi', 'Ci', 'Nj'], {}), '(Ni, CAi, Ci, Nj)\n', (3173, 3190), False, 'import os, stat, pdb\n'), ((3405, 3432), 'pdb.dihed', 'pdb.dihed', (['CAi', 'Ci', 'Nj', 'CAj'], {}), '(CAi, Ci, Nj, CAj)\n', (3414, 3432), False, 'import os, stat, pdb\n'), ((4433, 4449), 'pdb.dist', 'pdb.dist', (['H1', 'H2'], {}), '(H1, H2)\n', (4441, 4449), False, 'import os, stat, pdb\n')] |
#!/usr/bin/python3
import pefile
import string
import os, sys
def tamperUpx(outfile):
pe = pefile.PE(outfile)
newSectionNames = (
'.text',
'.data',
'.rdata',
'.idata',
'.pdata',
)
num = 0
sectnum = 0
section_table_offset = (pe.DOS_HEADER.e_lfanew + 4 +
pe.FILE_HEADER.sizeof() + pe.FILE_HEADER.SizeOfOptionalHeader)
found = 0
print('Step 1. Renaming UPX sections...')
for sect in pe.sections:
section_offset = section_table_offset + sectnum * 0x28
sectnum += 1
if sect.Name.decode().lower().startswith('upx'):
found += 1
newname = newSectionNames[num].encode() + ((8 - len(newSectionNames[num])) * b'\x00')
print('\tRenamed UPX section ({}) => ({})'.format(
sect.Name.decode(), newSectionNames[num]
))
num += 1
pe.set_bytes_at_offset(section_offset, newname)
print('\nStep 2. Removing obvious indicators...')
pos = pe.__data__.find(b'UPX!')
if pos != -1:
found += 1
print('\tRemoved "UPX!" (UPX_MAGIC_LE32) magic value...')
pe.set_bytes_at_offset(pos, b'\x00' * 4)
prev = pe.__data__[pos-5:pos-1]
if all(chr(c) in string.printable for c in prev):
print('\tRemoved "{}" indicator...'.format(prev.decode()))
pe.set_bytes_at_offset(pos-5, b'\x00' * 4)
print('\nStep 3. Corrupting PackHeader...')
version = pe.__data__[pos + 4]
_format = pe.__data__[pos + 5]
method = pe.__data__[pos + 6]
level = pe.__data__[pos + 7]
print('\tOverwriting metadata (version={}, format={}, method={}, level={})...'.format(
version, _format, method, level
))
pe.set_bytes_at_offset(pos + 4, b'\x00')
pe.set_bytes_at_offset(pos + 5, b'\x00')
pe.set_bytes_at_offset(pos + 6, b'\x00')
pe.set_bytes_at_offset(pos + 7, b'\x00')
#
# Src:
# https://github.com/upx/upx/blob/36670251fdbbf72f6ce165148875d369cae8f415/src/packhead.cpp#L187
# https://github.com/upx/upx/blob/36670251fdbbf72f6ce165148875d369cae8f415/src/stub/src/include/header.S#L33
#
u_adler = pe.get_dword_from_data(pe.__data__, pos + 8)
c_adler = pe.get_dword_from_data(pe.__data__, pos + 12)
u_len = pe.get_dword_from_data(pe.__data__, pos + 16)
c_len = pe.get_dword_from_data(pe.__data__, pos + 20)
origsize = pe.get_dword_from_data(pe.__data__, pos + 24)
filter_id = pe.__data__[pos + 28]
filter_cto = pe.__data__[pos + 29]
unused = pe.__data__[pos + 30]
header_chksum = pe.__data__[pos + 31]
print('\tCorrupting stored lengths and sizes:')
print('\t\t- uncompressed_adler (u_adler): ({} / 0x{:x}) => (0)'.format(u_adler, u_adler))
pe.set_dword_at_offset(pos + 8, 0)
print('\t\t- compressed_adler (c_adler): ({} / 0x{:x}) => (0)'.format(c_adler, c_adler))
pe.set_dword_at_offset(pos + 12, 0)
print('\t\t- uncompressed_len (u_len): ({} / 0x{:x}) => (0)'.format(u_len, u_len))
pe.set_dword_at_offset(pos + 16, 0)
print('\t\t- compressed_len (c_len): ({} / 0x{:x}) => (0)'.format(c_len, c_len))
pe.set_dword_at_offset(pos + 20, 0)
print('\t\t- original file size: ({} / 0x{:x}) => (0)'.format(origsize, origsize))
pe.set_dword_at_offset(pos + 24, 0)
print('\t\t- filter id: ({} / 0x{:x}) => (0)'.format(filter_id, filter_id))
pe.set_bytes_at_offset(pos + 28, b'\x00')
print('\t\t- filter cto: ({} / 0x{:x}) => (0)'.format(filter_cto, filter_cto))
pe.set_bytes_at_offset(pos + 29, b'\x00')
print('\t\t- unused: ({} / 0x{:x}) => (0)'.format(unused, unused))
pe.set_bytes_at_offset(pos + 30, b'\x00')
print('\t\t- header checksum: ({} / 0x{:x}) => (0)'.format(header_chksum, header_chksum))
pe.set_bytes_at_offset(pos + 31, b'\x00')
if found > 0:
pe.parse_sections(section_table_offset)
pe.write(outfile)
print('\n[+] UPX-protected executable corrupted: ' + outfile)
return True
else:
print('\n[-] Input file does not resemble UPX packed executable (or it was already corrupted)')
return False
def main(argv):
print('''
:: tamperUpx - a small utility that corrupts UPX-packed executables,
making them much harder to be decompressed & restored.
<NAME>. / mgeeky, '21
''')
if len(argv) < 2:
print('Usage: ./tamperUpx.py <infile> [outfile]')
infile = argv[1]
outfile = ''
if len(argv) >= 3:
outfile = argv[2]
if not os.path.isfile(infile):
print('[!] Input file does not exist.')
return 1
if len(outfile) > 0:
with open(outfile, 'wb') as f:
with open(infile, 'rb') as g:
f.write(g.read())
else:
outfile = infile
if tamperUpx(outfile):
print('[+] Success. UPX should have some issues decompressing output artifact now.')
if __name__ == '__main__':
main(sys.argv) | [
"pefile.PE",
"os.path.isfile"
] | [((98, 116), 'pefile.PE', 'pefile.PE', (['outfile'], {}), '(outfile)\n', (107, 116), False, 'import pefile\n'), ((4773, 4795), 'os.path.isfile', 'os.path.isfile', (['infile'], {}), '(infile)\n', (4787, 4795), False, 'import os, sys\n')] |
# Lakeshore 332 Temperature Controller Driver
# <NAME> <<EMAIL>>
# This file is a driver for the Lakeshore 332 series temperature controller.
# Some sort of license information goes here.
from lantz import Feat, Action, DictFeat
from lantz.messagebased import MessageBasedDriver
from time import sleep
class Lakeshore332(MessageBasedDriver):
"""
Lakeshore 332 Temperature controlller.
This class, based off of the Lantz MessageBasedDriver class, implements a
set of basic controls for the Lakeshore 332 series temperature controller.
It essentially a port of a nice driver written for QtLab by <NAME>.
Full documentation of the device is available at:
http://www.lakeshore.com/ObsoleteAndResearchDocs/332_Manual.pdf
"""
# These defaults assume that you have set the IEEE Term setting to: Lf Cr
DEFAULTS = {'COMMON': {'write_termination': '\n',
'read_termination': ''}}
GPIB_name = None
GPIB_address = -1
channels = ['a', 'b']
loops = ['1', '2']
heater_range_vals = {'off': 0, 'low': 1, 'medium': 2, 'high': 3}
heater_status_vals = {'no error': 0, 'open load': 1, 'short': 2}
controller_modes = {'local': 0, 'remote': 1, 'remote, local lockout': 2}
cmodes = {'manual PID': 1, 'zone': 2, 'open loop': 3, 'AutoTune PID': 4,
'AutoTune PI': 5, 'AutoTune P': 6}
T_min = 0
T_max = 350
T_min_set = 1.8
T_max_set = 350
_verbose = True
@Feat()
def idn(self):
"""
Returns the instrument identification.
"""
print('getting IDN')
return self.query('*IDN?')
@Action()
def reset(self):
"""
Resets the Lakeshore 332 temperature controller.
"""
self.write('*RST')
@DictFeat(limits=(T_min, T_max), keys=channels)
def kelvin_meas(self, channel):
"""
Returns measured temperature reading from specified channel in Kelvin.
"""
return float(self.query('KRDG?{}'.format(channel)))
@DictFeat(keys=channels)
def sensor(self, channel):
"""
Returns sensor reading from specified channel.
"""
return float(self.query('SRDG?{}'.format(channel)))
@Feat(values=heater_status_vals)
def heater_status(self):
"""
Returns the heater status.
"""
return int(self.query('HTRST?'))
@Feat(values=heater_range_vals)
def heater_range(self):
"""
Queries the instrument, prints a message describing the current heater
range setting, then returns the heater range value.
"""
return int(self.query('RANGE?'))
@heater_range.setter
def heater_range(self, heater_setting):
"""
Sets heater range to heater_setting.
heater_setting must be an integer between 0 and 3 inclusive.
"""
return self.write('RANGE {}'.format(heater_setting))
@Feat()
def heater_output_1(self):
"""
Returns Loop 1 heater output in percent (%).
"""
return float(self.query('HTR?'))
@Feat()
def heater_output_2(self):
"""
Returns Loop 2 heater output in percent (%).
"""
return float(self.query('AOUT?'))
@Feat(values=controller_modes)
def mode(self):
"""
Reads the mode setting of the controller.
"""
return int(self.query('MODE?'))
@mode.setter
def mode(self, mode):
"""
Sets controller mode, valid mode inputs are:
local (0)
remote (1)
remote, local lockout (2)
"""
return self.query('MODE{}'.format(mode))
@DictFeat(keys=loops)
def pid(self, loop):
"""
Get parameters for PID loop.
"""
return self.query('PID?{}'.format(loop))
@pid.setter
def pid(self, loop, pid):
"""
Get parameters for PID loop
"""
p = pid[0]
i = pid[1]
d = pid[2]
return self.query('PID{},{},{},{}'.format(loop, p, i, d))
@DictFeat(limits=(T_min_set, T_max_set), keys=loops)
def setpoint(self, channel):
"""
Return the temperature controller setpoint.
"""
return float(self.query('SETP?{}'.format(channel)))
@setpoint.setter
def setpoint(self, loop, T_set):
"""
Sets the setpoint of channel channel to value value
"""
self.query('SETP{},{}'.format(loop, T_set))
sleep(0.05)
return
@DictFeat(limits=(0, 100), keys=loops)
def mout(self, loop):
"""
Returns loop manual heater power output.
"""
return self.query('MOUT?{}'.format(loop))
@mout.setter
def mout(self, loop, percent):
"""
Sets loop manual heater power output in percent.
"""
return self.query('MOUT{},{}'.format(loop, percent))
@DictFeat(values=cmodes, keys=loops)
def cmode(self, loop):
"""
Returns the control mode according to the following table.
'manual PID' (1)
'zone'(2)
'open loop' (3)
'AutoTune PID' (4)
'AutoTune PI' (5)
'AutoTune P' (6)
"""
return int(self.query('CMODE?{}'.format(loop)))
@cmode.setter
def cmode(self, loop, value):
"""
Sets the control mode according to the following table.
'manual PID' (1)
'zone'(2)
'open loop' (3)
'AutoTune PID' (4)
'AutoTune PI' (5)
'AutoTune P' (6)
"""
return self.query('CMODE{},{}'.format(loop, value))
if __name__ == '__main__':
with Lakeshore332('GPIB0::16::INSTR') as inst:
print('The instrument identification is ' + inst.idn)
print('resetting...')
inst.reset
print('reset.')
# Testing mode switching functionality
print('The current mode is ' + inst.mode + '.')
inst.mode = 'remote, local lockout'
print('Now the mode is ' + inst.mode + '.')
inst.mode = 'remote'
print('Now the mode is ' + inst.mode + '.')
# Testing Kelvin read functionality
print('Current temperature on channel a is ' +
str(inst.kelvin_meas['a']) + ' Kelvin')
print('Current temperature on channel b is ' +
str(inst.kelvin_meas['b']) + ' Kelvin')
# Testing sensor reading functionality
print('Sensor reading on channel a is ' + str(inst.sensor['a']))
print('Sensor reading on channel b is ' + str(inst.sensor['b']))
# Testing heater status
print('Heater status is ' + str(inst.heater_status))
# Testing heater range
print('Heater range is ' + str(inst.heater_range))
inst.heater_range = 'low'
print('Heater range is ' + str(inst.heater_range))
inst.heater_range = 'off'
print('Heater range is ' + str(inst.heater_range))
# Testing heater output
print('Loop 1 heater output ' + str(inst.heater_output_1) + '%')
print('Loop 2 heater output ' + str(inst.heater_output_2) + '%')
# Testing manual output
print('Loop 1 manual output ' + str(inst.mout['1']))
print('Loop 2 manual output ' + str(inst.mout['2']))
inst.mout['1'] = 50
inst.mout['2'] = 50
print('Loop 1 manual output ' + str(inst.mout['1']))
print('Loop 2 manual output ' + str(inst.mout['2']))
inst.mout['1'] = 0
inst.mout['2'] = 0
print('Loop 1 manual output ' + str(inst.mout['1']))
print('Loop 2 manual output ' + str(inst.mout['2']))
# Testing cmode
print('Loop 1 Command Mode: ' + str(inst.cmode['1']))
inst.cmode['1'] = 'open loop'
print('Loop 1 Command Mode: ' + str(inst.cmode['1']))
inst.cmode['1'] = 'AutoTune P'
print('Loop 1 Command Mode: ' + str(inst.cmode['1']))
print('Loop 2 Command Mode: ' + str(inst.cmode['2']))
# Testing setpoint
inst.setpoint['1'] = 25
print('Loop 1 setpoint is ' + str(inst.setpoint['1']))
inst.setpoint['1'] = 50
print('Loop 1 setpoint is ' + str(inst.setpoint['1']))
inst.setpoint['1'] = 300
print('Loop 1 setpoint is ' + str(inst.setpoint['1']))
inst.setpoint['2'] = 300
print('Loop 2 setpoint is ' + str(inst.setpoint['2']))
# Testing PID
inst.pid['1'] = list([10.0, 10.0, 10.0])
print('Loop 1 PID parameters:' + str(inst.pid['1']))
inst.pid['1'] = list([50.0, 20.0, 1.0])
print('Loop 1 PID parameters:' + str(inst.pid['1']))
print('Loop 2 PID parameters:' + str(inst.pid['2']))
| [
"lantz.Feat",
"time.sleep",
"lantz.DictFeat",
"lantz.Action"
] | [((1478, 1484), 'lantz.Feat', 'Feat', ([], {}), '()\n', (1482, 1484), False, 'from lantz import Feat, Action, DictFeat\n'), ((1645, 1653), 'lantz.Action', 'Action', ([], {}), '()\n', (1651, 1653), False, 'from lantz import Feat, Action, DictFeat\n'), ((1789, 1835), 'lantz.DictFeat', 'DictFeat', ([], {'limits': '(T_min, T_max)', 'keys': 'channels'}), '(limits=(T_min, T_max), keys=channels)\n', (1797, 1835), False, 'from lantz import Feat, Action, DictFeat\n'), ((2041, 2064), 'lantz.DictFeat', 'DictFeat', ([], {'keys': 'channels'}), '(keys=channels)\n', (2049, 2064), False, 'from lantz import Feat, Action, DictFeat\n'), ((2241, 2272), 'lantz.Feat', 'Feat', ([], {'values': 'heater_status_vals'}), '(values=heater_status_vals)\n', (2245, 2272), False, 'from lantz import Feat, Action, DictFeat\n'), ((2408, 2438), 'lantz.Feat', 'Feat', ([], {'values': 'heater_range_vals'}), '(values=heater_range_vals)\n', (2412, 2438), False, 'from lantz import Feat, Action, DictFeat\n'), ((2948, 2954), 'lantz.Feat', 'Feat', ([], {}), '()\n', (2952, 2954), False, 'from lantz import Feat, Action, DictFeat\n'), ((3110, 3116), 'lantz.Feat', 'Feat', ([], {}), '()\n', (3114, 3116), False, 'from lantz import Feat, Action, DictFeat\n'), ((3273, 3302), 'lantz.Feat', 'Feat', ([], {'values': 'controller_modes'}), '(values=controller_modes)\n', (3277, 3302), False, 'from lantz import Feat, Action, DictFeat\n'), ((3684, 3704), 'lantz.DictFeat', 'DictFeat', ([], {'keys': 'loops'}), '(keys=loops)\n', (3692, 3704), False, 'from lantz import Feat, Action, DictFeat\n'), ((4076, 4127), 'lantz.DictFeat', 'DictFeat', ([], {'limits': '(T_min_set, T_max_set)', 'keys': 'loops'}), '(limits=(T_min_set, T_max_set), keys=loops)\n', (4084, 4127), False, 'from lantz import Feat, Action, DictFeat\n'), ((4533, 4570), 'lantz.DictFeat', 'DictFeat', ([], {'limits': '(0, 100)', 'keys': 'loops'}), '(limits=(0, 100), keys=loops)\n', (4541, 4570), False, 'from lantz import Feat, Action, DictFeat\n'), ((4921, 4956), 'lantz.DictFeat', 'DictFeat', ([], {'values': 'cmodes', 'keys': 'loops'}), '(values=cmodes, keys=loops)\n', (4929, 4956), False, 'from lantz import Feat, Action, DictFeat\n'), ((4500, 4511), 'time.sleep', 'sleep', (['(0.05)'], {}), '(0.05)\n', (4505, 4511), False, 'from time import sleep\n')] |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import with_statement
import contextlib
import tempfile
import os.path
from .basecase import cql, cqlsh, cqlshlog, TEST_HOST, TEST_PORT, rundir, policy, quote_name
from .run_cqlsh import run_cqlsh, call_cqlsh
test_keyspace_init = os.path.join(rundir, 'test_keyspace_init.cql')
def get_cassandra_connection(cql_version=cqlsh.DEFAULT_CQLVER):
if cql_version is None:
cql_version = cqlsh.DEFAULT_CQLVER
conn = cql((TEST_HOST,), TEST_PORT, cql_version=cql_version, load_balancing_policy=policy)
# until the cql lib does this for us
conn.cql_version = cql_version
return conn
def get_cassandra_cursor(cql_version=cqlsh.DEFAULT_CQLVER):
return get_cassandra_connection(cql_version=cql_version).cursor()
TEST_KEYSPACES_CREATED = []
def get_keyspace():
return None if len(TEST_KEYSPACES_CREATED) == 0 else TEST_KEYSPACES_CREATED[-1]
def make_ks_name():
# abuse mktemp to get a quick random-ish name
return os.path.basename(tempfile.mktemp(prefix='CqlshTests_'))
def create_keyspace(cursor):
ksname = make_ks_name()
qksname = quote_name(ksname)
cursor.execute('''
CREATE KEYSPACE %s WITH replication =
{'class': 'SimpleStrategy', 'replication_factor': 1};
''' % quote_name(ksname))
cursor.execute('USE %s;' % qksname)
TEST_KEYSPACES_CREATED.append(ksname)
return ksname
def split_cql_commands(source):
ruleset = cql_rule_set()
statements, endtoken_escaped = ruleset.cql_split_statements(source)
if endtoken_escaped:
raise ValueError("CQL source ends unexpectedly")
return [ruleset.cql_extract_orig(toks, source) for toks in statements if toks]
def execute_cql_commands(cursor, source, logprefix='INIT: '):
for cql in split_cql_commands(source):
cqlshlog.debug(logprefix + cql)
cursor.execute(cql)
def execute_cql_file(cursor, fname):
with open(fname) as f:
return execute_cql_commands(cursor, f.read())
def create_db():
with cassandra_cursor(ks=None) as c:
k = create_keyspace(c)
execute_cql_file(c, test_keyspace_init)
return k
def remove_db():
with cassandra_cursor(ks=None) as c:
c.execute('DROP KEYSPACE %s' % quote_name(TEST_KEYSPACES_CREATED.pop(-1)))
@contextlib.contextmanager
def cassandra_connection(cql_version=cqlsh.DEFAULT_CQLVER):
"""
Make a Cassandra CQL connection with the given CQL version and get a cursor
for it, and optionally connect to a given keyspace.
The connection is returned as the context manager's value, and it will be
closed when the context exits.
"""
conn = get_cassandra_connection(cql_version=cql_version)
try:
yield conn
finally:
conn.close()
@contextlib.contextmanager
def cassandra_cursor(cql_version=None, ks=''):
"""
Make a Cassandra CQL connection with the given CQL version and get a cursor
for it, and optionally connect to a given keyspace. If ks is the empty
string (default), connect to the last test keyspace created. If ks is None,
do not connect to any keyspace. Otherwise, attempt to connect to the
keyspace named.
The cursor is returned as the context manager's value, and the connection
will be closed when the context exits.
"""
if ks == '':
ks = get_keyspace()
conn = get_cassandra_connection(cql_version=cql_version)
try:
c = conn.connect(ks)
# increase default timeout to fix flacky tests, see CASSANDRA-12481
c.default_timeout = 60.0
# if ks is not None:
# c.execute('USE %s;' % quote_name(c, ks))
yield c
finally:
conn.shutdown()
def cql_rule_set():
return cqlsh.cql3handling.CqlRuleSet
class DEFAULTVAL: pass
def testrun_cqlsh(keyspace=DEFAULTVAL, **kwargs):
# use a positive default sentinel so that keyspace=None can be used
# to override the default behavior
if keyspace is DEFAULTVAL:
keyspace = get_keyspace()
return run_cqlsh(keyspace=keyspace, **kwargs)
def testcall_cqlsh(keyspace=None, **kwargs):
if keyspace is None:
keyspace = get_keyspace()
return call_cqlsh(keyspace=keyspace, **kwargs)
| [
"tempfile.mktemp"
] | [((1767, 1804), 'tempfile.mktemp', 'tempfile.mktemp', ([], {'prefix': '"""CqlshTests_"""'}), "(prefix='CqlshTests_')\n", (1782, 1804), False, 'import tempfile\n')] |
from arguments import get_args
from ddpg_agent import ddpg_agent
import mujoco_py
import gym
if __name__ == '__main__':
args = get_args()
env = gym.make(args.env_name)
ddpg_trainer = ddpg_agent(args, env)
ddpg_trainer.learn()
| [
"ddpg_agent.ddpg_agent",
"arguments.get_args",
"gym.make"
] | [((132, 142), 'arguments.get_args', 'get_args', ([], {}), '()\n', (140, 142), False, 'from arguments import get_args\n'), ((153, 176), 'gym.make', 'gym.make', (['args.env_name'], {}), '(args.env_name)\n', (161, 176), False, 'import gym\n'), ((196, 217), 'ddpg_agent.ddpg_agent', 'ddpg_agent', (['args', 'env'], {}), '(args, env)\n', (206, 217), False, 'from ddpg_agent import ddpg_agent\n')] |
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Module for Assessment object"""
from sqlalchemy.orm import validates
from ggrc import db
from ggrc.models import mixins_reminderable
from ggrc.models import mixins_statusable
from ggrc.models import reflection
from ggrc.models.audit import Audit
from ggrc.models.comment import Commentable
from ggrc.models.mixin_autostatuschangable import AutoStatusChangable
from ggrc.models.mixins import BusinessObject
from ggrc.models.mixins import CustomAttributable
from ggrc.models.mixins import FinishedDate
from ggrc.models.mixins import TestPlanned
from ggrc.models.mixins import Timeboxed
from ggrc.models.mixins import VerifiedDate
from ggrc.models.mixins import deferred
from ggrc.models.mixins_assignable import Assignable
from ggrc.models.object_document import Documentable
from ggrc.models.object_person import Personable
from ggrc.models.reflection import PublishOnly
from ggrc.models.relationship import Relatable
from ggrc.models.relationship import Relationship
from ggrc.models.track_object_state import HasObjectState
from ggrc.models.track_object_state import track_state_for_class
class AuditRelationship(object):
"""Mixin for mandatory link to an Audit via Relationships."""
_aliases = {
"audit": {
"display_name": "Audit",
"mandatory": True,
"filter_by": "_filter_by_audit",
"ignore_on_update": True,
"type": reflection.AttributeInfo.Type.MAPPING,
},
}
@classmethod
def _filter_by_audit(cls, predicate):
"""Get filter for objects related to an Audit."""
return Relationship.query.filter(
Relationship.source_type == cls.__name__,
Relationship.source_id == cls.id,
Relationship.destination_type == Audit.__name__,
).join(Audit, Relationship.destination_id == Audit.id).filter(
predicate(Audit.slug)
).exists() | Relationship.query.filter(
Relationship.destination_type == cls.__name__,
Relationship.destination_id == cls.id,
Relationship.source_type == Audit.__name__,
).join(Audit, Relationship.source_id == Audit.id).filter(
predicate(Audit.slug)
).exists()
class Assessment(mixins_statusable.Statusable, AuditRelationship,
AutoStatusChangable, Assignable, HasObjectState, TestPlanned,
CustomAttributable, Documentable, Commentable, Personable,
mixins_reminderable.Reminderable, Timeboxed,
Relatable, FinishedDate, VerifiedDate,
BusinessObject, db.Model):
"""Class representing Assessment.
Assessment is an object representing an individual assessment performed on
a specific object during an audit to ascertain whether or not
certain conditions were met for that object.
"""
__tablename__ = 'assessments'
_title_uniqueness = False
ASSIGNEE_TYPES = (u"Creator", u"Assessor", u"Verifier")
REMINDERABLE_HANDLERS = {
"statusToPerson": {
"handler":
mixins_reminderable.Reminderable.handle_state_to_person_reminder,
"data": {
mixins_statusable.Statusable.START_STATE: "Assessor",
"In Progress": "Assessor"
},
"reminders": {"assessment_assessor_reminder", }
}
}
design = deferred(db.Column(db.String), "Assessment")
operationally = deferred(db.Column(db.String), "Assessment")
object = {} # we add this for the sake of client side error checking
audit = {}
VALID_CONCLUSIONS = frozenset([
"Effective",
"Ineffective",
"Needs improvement",
"Not Applicable"
])
# REST properties
_publish_attrs = [
'design',
'operationally',
PublishOnly('audit'),
PublishOnly('object')
]
_tracked_attrs = {
'contact_id',
'description',
'design',
'notes',
'operationally',
'reference_url',
'secondary_contact_id',
'test_plan',
'title',
'url',
'start_date',
'end_date'
}
_aliases = {
"owners": None,
"assessment_object": {
"display_name": "Object",
"mandatory": True,
"ignore_on_update": True,
"filter_by": "_ignore_filter",
"type": reflection.AttributeInfo.Type.MAPPING,
"description": ("A single object that will be mapped to the audit.\n"
"Example:\n\nControl: Control-slug-1\n"
"Market : MARKET-55"),
},
"assessment_template": {
"display_name": "Template",
"ignore_on_update": True,
"filter_by": "_ignore_filter",
"type": reflection.AttributeInfo.Type.MAPPING,
},
"url": "Assessment URL",
"design": "Conclusion: Design",
"operationally": "Conclusion: Operation",
"related_creators": {
"display_name": "Creator",
"mandatory": True,
"filter_by": "_filter_by_related_creators",
"type": reflection.AttributeInfo.Type.MAPPING,
},
"related_assessors": {
"display_name": "Assessor",
"mandatory": True,
"filter_by": "_filter_by_related_assessors",
"type": reflection.AttributeInfo.Type.MAPPING,
},
"related_verifiers": {
"display_name": "Verifier",
"filter_by": "_filter_by_related_verifiers",
"type": reflection.AttributeInfo.Type.MAPPING,
},
}
def validate_conclusion(self, value):
return value if value in self.VALID_CONCLUSIONS else ""
@validates("operationally")
def validate_opperationally(self, key, value):
# pylint: disable=unused-argument
return self.validate_conclusion(value)
@validates("design")
def validate_design(self, key, value):
# pylint: disable=unused-argument
return self.validate_conclusion(value)
@classmethod
def _filter_by_related_creators(cls, predicate):
return cls._get_relate_filter(predicate, "Creator")
@classmethod
def _filter_by_related_assessors(cls, predicate):
return cls._get_relate_filter(predicate, "Assessor")
@classmethod
def _filter_by_related_verifiers(cls, predicate):
return cls._get_relate_filter(predicate, "Verifier")
@classmethod
def _ignore_filter(cls, predicate):
return None
track_state_for_class(Assessment)
| [
"ggrc.models.track_object_state.track_state_for_class",
"ggrc.db.Column",
"ggrc.models.relationship.Relationship.query.filter",
"ggrc.models.reflection.PublishOnly",
"sqlalchemy.orm.validates"
] | [((6366, 6399), 'ggrc.models.track_object_state.track_state_for_class', 'track_state_for_class', (['Assessment'], {}), '(Assessment)\n', (6387, 6399), False, 'from ggrc.models.track_object_state import track_state_for_class\n'), ((5618, 5644), 'sqlalchemy.orm.validates', 'validates', (['"""operationally"""'], {}), "('operationally')\n", (5627, 5644), False, 'from sqlalchemy.orm import validates\n'), ((5779, 5798), 'sqlalchemy.orm.validates', 'validates', (['"""design"""'], {}), "('design')\n", (5788, 5798), False, 'from sqlalchemy.orm import validates\n'), ((3377, 3397), 'ggrc.db.Column', 'db.Column', (['db.String'], {}), '(db.String)\n', (3386, 3397), False, 'from ggrc import db\n'), ((3440, 3460), 'ggrc.db.Column', 'db.Column', (['db.String'], {}), '(db.String)\n', (3449, 3460), False, 'from ggrc import db\n'), ((3779, 3799), 'ggrc.models.reflection.PublishOnly', 'PublishOnly', (['"""audit"""'], {}), "('audit')\n", (3790, 3799), False, 'from ggrc.models.reflection import PublishOnly\n'), ((3807, 3828), 'ggrc.models.reflection.PublishOnly', 'PublishOnly', (['"""object"""'], {}), "('object')\n", (3818, 3828), False, 'from ggrc.models.reflection import PublishOnly\n'), ((1675, 1834), 'ggrc.models.relationship.Relationship.query.filter', 'Relationship.query.filter', (['(Relationship.source_type == cls.__name__)', '(Relationship.source_id == cls.id)', '(Relationship.destination_type == Audit.__name__)'], {}), '(Relationship.source_type == cls.__name__, \n Relationship.source_id == cls.id, Relationship.destination_type ==\n Audit.__name__)\n', (1700, 1834), False, 'from ggrc.models.relationship import Relationship\n'), ((1965, 2129), 'ggrc.models.relationship.Relationship.query.filter', 'Relationship.query.filter', (['(Relationship.destination_type == cls.__name__)', '(Relationship.destination_id == cls.id)', '(Relationship.source_type == Audit.__name__)'], {}), '(Relationship.destination_type == cls.__name__, \n Relationship.destination_id == cls.id, Relationship.source_type ==\n Audit.__name__)\n', (1990, 2129), False, 'from ggrc.models.relationship import Relationship\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'chengzhi'
from tqsdk import TqApi, TqSim
# 创建API实例. 需要指定交易帐号. 如果使用API自带的模拟功能可以指定为 TqSim
api = TqApi(TqSim())
# 获得上期所 cu1906 的行情引用,当行情有变化时 quote 中的字段会对应更新
quote = api.get_quote("SHFE.cu1906")
# 输出 cu1906 的最新行情时间和最新价
print(quote["datetime"], quote["last_price"])
# 关闭api,释放资源
api.close()
| [
"tqsdk.TqSim"
] | [((163, 170), 'tqsdk.TqSim', 'TqSim', ([], {}), '()\n', (168, 170), False, 'from tqsdk import TqApi, TqSim\n')] |
from django.test import TestCase
from .models import User, Friendship
from faker import Faker
class FriendshipTest(TestCase):
def setUp(self):
fake = Faker()
self.user1 = User.objects.create(
username=fake.name(),
dob=fake.date()
)
self.user2 = User.objects.create(
username=fake.name(),
dob=fake.date()
)
self.friendship1 = Friendship.objects.create(
profile_1=self.user1,
profile_2=self.user2
)
self.friendship2 = Friendship.objects.create(
profile_1=self.user2,
profile_2=self.user1
)
return self.user1, self.user2, self.friendship1, self.friendship2
def test_friendship_creation(self):
u1, u2, f1, f2 = self.setUp()
self.assertTrue(isinstance(u1, User))
self.assertTrue(isinstance(u2, User))
self.assertTrue(isinstance(f1, Friendship))
self.assertTrue(isinstance(f2, Friendship))
self.assertEqual(u1.__str__(), u1.name)
self.assertEqual(u1.__str__(), u2.name)
# def test_friend_of_friend_works(self):
# u1, u2, f1, f2 = self.setUp()
# fof1 = u1.friend_of_friends().values_list('user_id', flat=True)
# fof2 = u2.friend_of_friends().values_list('user_id', flat=True)
# # self.assertEqual(u1 not in fof2, u2 not in fof1)
| [
"faker.Faker"
] | [((165, 172), 'faker.Faker', 'Faker', ([], {}), '()\n', (170, 172), False, 'from faker import Faker\n')] |
import os
import re
from pathlib import Path
from typing import Union, List, TypeVar, Generic, Callable
def esc_file_path(path: Union[str, Path]):
return re.sub(r"([ !([\])'\"])", r"\\\1", os.path.normpath(str(path)))
def noop(*args, **kwargs):
pass
def get_boolean_from_string(val: str) -> bool:
val = val.strip()
if val == "1" or val.lower() == "true" or val.lower() == "t":
return True
else:
return False
def get_bool_from_any(val):
if val is None:
val = False
elif isinstance(val, str):
val = get_boolean_from_string(val)
elif isinstance(val, bool):
pass
elif isinstance(val, float):
val = val == 1
else:
val = False
return val
T = TypeVar("T")
class IterableInterface(Generic[T]):
__data__: List[T]
__curr__: int = 0
def __init__(self, *args, **kwargs):
self.__data__ = []
def __iter__(self):
return self
def __next__(self) -> T:
if self.__curr__ < len(self.__data__):
self.__curr__ += 1
return self.__data__[self.__curr__ - 1]
else:
self.__curr__ = 0
raise StopIteration
def __getitem__(self, item):
return self.__data__[item]
def __setitem__(self, key, value):
self.__data__[key] = value
def __len__(self):
return self.__data__.__len__()
def __sizeof__(self):
return self.__data__.__sizeof__()
def find(self, predicate: Callable[[T], bool]) -> T:
for item in self.__data__:
if predicate(item):
return item
else:
continue
def filter(self, predicate: Callable[[T], bool]) -> List[T]:
return list(filter(predicate, self))
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
if getattr(cls._instances[cls], "__reinit__") is None:
setattr(cls._instances[cls], "__reinit__", noop)
cls._instances[cls].__reinit__(*args, **kwargs)
return cls._instances[cls]
| [
"typing.TypeVar"
] | [((746, 758), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (753, 758), False, 'from typing import Union, List, TypeVar, Generic, Callable\n')] |
"""
Copyright (c) Contributors to the Open 3D Engine Project.
For complete copyright and license terms please see the LICENSE at the root of this distribution.
SPDX-License-Identifier: Apache-2.0 OR MIT
"""
import os
import pytest
import ly_test_tools.environment.file_system as file_system
from ly_test_tools.o3de.editor_test import EditorSingleTest, EditorSharedTest, EditorParallelTest, EditorTestSuite
@pytest.mark.xfail(reason="Optimized tests are experimental, we will enable xfail and monitor them temporarily.")
@pytest.mark.SUITE_main
@pytest.mark.parametrize("launcher_platform", ['windows_editor'])
@pytest.mark.parametrize("project", ["AutomatedTesting"])
class TestAutomationNoAutoTestMode(EditorTestSuite):
# Disable -autotest_mode and -BatchMode. Tests cannot run in -BatchMode due to UI interactions, and these tests
# interact with modal dialogs
global_extra_cmdline_args = []
class test_BasicEditorWorkflows_LevelEntityComponentCRUD(EditorSingleTest):
# Custom teardown to remove slice asset created during test
def teardown(self, request, workspace, editor, editor_test_results, launcher_platform):
file_system.delete([os.path.join(workspace.paths.engine_root(), "AutomatedTesting", "Levels", "tmp_level")],
True, True)
from .EditorScripts import BasicEditorWorkflows_LevelEntityComponentCRUD as test_module
@pytest.mark.REQUIRES_gpu
class test_BasicEditorWorkflows_GPU_LevelEntityComponentCRUD(EditorSingleTest):
# Disable null renderer
use_null_renderer = False
# Custom teardown to remove slice asset created during test
def teardown(self, request, workspace, editor, editor_test_results, launcher_platform):
file_system.delete([os.path.join(workspace.paths.engine_root(), "AutomatedTesting", "Levels", "tmp_level")],
True, True)
from .EditorScripts import BasicEditorWorkflows_LevelEntityComponentCRUD as test_module
class test_InputBindings_Add_Remove_Input_Events(EditorSharedTest):
from .EditorScripts import InputBindings_Add_Remove_Input_Events as test_module
@pytest.mark.skip(reason="Crashes Editor: ATOM-15493")
class test_AssetPicker_UI_UX(EditorSharedTest):
from .EditorScripts import AssetPicker_UI_UX as test_module
@pytest.mark.xfail(reason="Optimized tests are experimental, we will enable xfail and monitor them temporarily.")
@pytest.mark.SUITE_main
@pytest.mark.parametrize("launcher_platform", ['windows_editor'])
@pytest.mark.parametrize("project", ["AutomatedTesting"])
class TestAutomationAutoTestMode(EditorTestSuite):
# Enable only -autotest_mode for these tests. Tests cannot run in -BatchMode due to UI interactions
global_extra_cmdline_args = ["-autotest_mode"]
class test_AssetBrowser_TreeNavigation(EditorSharedTest):
from .EditorScripts import AssetBrowser_TreeNavigation as test_module
@pytest.mark.skip(reason="Crashes Editor: ATOM-15493")
class test_AssetBrowser_SearchFiltering(EditorSharedTest):
from .EditorScripts import AssetBrowser_SearchFiltering as test_module
class test_ComponentCRUD_Add_Delete_Components(EditorSharedTest):
from .EditorScripts import ComponentCRUD_Add_Delete_Components as test_module
class test_Menus_ViewMenuOptions_Work(EditorSharedTest):
from .EditorScripts import Menus_ViewMenuOptions as test_module
@pytest.mark.skip(reason="Times out due to dialogs failing to dismiss: LYN-4208")
class test_Menus_FileMenuOptions_Work(EditorSharedTest):
from .EditorScripts import Menus_FileMenuOptions as test_module
| [
"pytest.mark.parametrize",
"pytest.mark.skip",
"pytest.mark.xfail"
] | [((412, 534), 'pytest.mark.xfail', 'pytest.mark.xfail', ([], {'reason': '"""Optimized tests are experimental, we will enable xfail and monitor them temporarily."""'}), "(reason=\n 'Optimized tests are experimental, we will enable xfail and monitor them temporarily.'\n )\n", (429, 534), False, 'import pytest\n'), ((550, 614), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""launcher_platform"""', "['windows_editor']"], {}), "('launcher_platform', ['windows_editor'])\n", (573, 614), False, 'import pytest\n'), ((616, 672), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""project"""', "['AutomatedTesting']"], {}), "('project', ['AutomatedTesting'])\n", (639, 672), False, 'import pytest\n'), ((2367, 2489), 'pytest.mark.xfail', 'pytest.mark.xfail', ([], {'reason': '"""Optimized tests are experimental, we will enable xfail and monitor them temporarily."""'}), "(reason=\n 'Optimized tests are experimental, we will enable xfail and monitor them temporarily.'\n )\n", (2384, 2489), False, 'import pytest\n'), ((2505, 2569), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""launcher_platform"""', "['windows_editor']"], {}), "('launcher_platform', ['windows_editor'])\n", (2528, 2569), False, 'import pytest\n'), ((2571, 2627), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""project"""', "['AutomatedTesting']"], {}), "('project', ['AutomatedTesting'])\n", (2594, 2627), False, 'import pytest\n'), ((2190, 2243), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""Crashes Editor: ATOM-15493"""'}), "(reason='Crashes Editor: ATOM-15493')\n", (2206, 2243), False, 'import pytest\n'), ((2982, 3035), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""Crashes Editor: ATOM-15493"""'}), "(reason='Crashes Editor: ATOM-15493')\n", (2998, 3035), False, 'import pytest\n'), ((3475, 3560), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""Times out due to dialogs failing to dismiss: LYN-4208"""'}), "(reason='Times out due to dialogs failing to dismiss: LYN-4208'\n )\n", (3491, 3560), False, 'import pytest\n')] |
# -*- coding: utf-8 -*-
#
# Author: <NAME>, Finland 2014-2018
#
# This file is part of Kunquat.
#
# CC0 1.0 Universal, http://creativecommons.org/publicdomain/zero/1.0/
#
# To the extent possible under law, Kunquat Affirmers have waived all
# copyright and related or neighboring rights to Kunquat.
#
import json
import types
from kunquat.kunquat.events import trigger_events_by_name, trigger_events_with_name_spec
from kunquat.kunquat.events import EVENT_ARG_MAYBE_STRING, EVENT_ARG_MAYBE_REALTIME
from kunquat.kunquat.limits import *
from .grid import Grid
from .trigger import Trigger
from .triggerposition import TriggerPosition
from . import tstamp
CONVERTIBLE_TRIGGERS = {
'n+': '/p',
'.f': '/f',
'.s': '/s',
'a.s': 'a/s',
'm.t': 'm/t',
'm.v': 'm/v',
}
CONVERTIBLE_TRIGGERS.update(dict((v, k) for (k, v) in CONVERTIBLE_TRIGGERS.items()))
class SheetManager():
def __init__(self):
self._controller = None
self._session = None
self._updater = None
self._store = None
self._ui_model = None
def set_controller(self, controller):
self._controller = controller
self._session = controller.get_session()
self._updater = controller.get_updater()
self._store = controller.get_store()
def set_ui_model(self, ui_model):
self._ui_model = ui_model
def get_help(self):
share = self._controller.get_share()
return share.get_help_text('sheet')
def flush_latest_column(self):
self._session.set_last_column(None)
def get_column_at_location(self, location):
module = self._ui_model.get_module()
album = module.get_album()
if album and album.get_track_count() > 0:
song = album.get_song_by_track(location.get_track())
pinst = song.get_pattern_instance(location.get_system())
column = pinst.get_column(location.get_col_num())
cached = self._session.get_last_column()
if cached and (cached == column):
return cached
self._session.set_last_column(column)
return column
return None
def get_clamped_location(self, location):
column = self.get_column_at_location(location)
if not column:
return None
row_ts = location.get_row_ts()
if column.has_trigger(row_ts, 0):
new_trigger_index = min(
location.get_trigger_index(),
column.get_trigger_count_at_row(row_ts))
else:
new_trigger_index = 0
new_location = TriggerPosition(
location.get_track(),
location.get_system(),
location.get_col_num(),
location.get_row_ts(),
new_trigger_index)
return new_location
def get_inferred_active_control_id_at_location(self, location):
ret_id = 'control_00'
clamped_loc = self.get_clamped_location(location)
if not clamped_loc:
return ret_id
# Use channel default if we don't get any better ideas
module = self._ui_model.get_module()
ch_defs = module.get_channel_defaults()
ret_id = ch_defs.get_default_control_id(clamped_loc.get_col_num())
# See if there is an audio unit set event at the selected row
column = self.get_column_at_location(clamped_loc)
if column.has_trigger(clamped_loc.get_row_ts(), 0):
triggers = [column.get_trigger(clamped_loc.get_row_ts(), i)
for i in range(clamped_loc.get_trigger_index())]
for trigger in reversed(triggers):
if trigger.get_type() == '.a':
try:
control_num = int(trigger.get_argument())
ret_id = 'control_{:02x}'.format(control_num)
return ret_id
except ValueError:
pass
# See if there is an audio unit set event above the selection
trow_positions = column.get_trigger_row_positions_in_range(
tstamp.Tstamp(0), clamped_loc.get_row_ts())
for row_ts in reversed(trow_positions):
tr_count = column.get_trigger_count_at_row(row_ts)
triggers = [column.get_trigger(row_ts, i) for i in range(tr_count)]
for trigger in reversed(triggers):
if trigger.get_type() == '.a':
try:
control_num = int(trigger.get_argument())
ret_id = 'control_{:02x}'.format(control_num)
return ret_id
except ValueError:
pass
return ret_id
def set_chord_mode(self, enabled):
was_enabled = self.get_chord_mode()
self._session.set_chord_mode(enabled)
if enabled:
if self._session.get_chord_start() == None:
selection = self._ui_model.get_selection()
location = selection.get_location()
self._session.set_chord_start(location)
else:
chord_start = self._session.get_chord_start()
if chord_start != None:
selection = self._ui_model.get_selection()
cur_location = selection.get_location()
if cur_location.get_col_num() == chord_start.get_col_num():
new_location = cur_location
else:
chord_next = TriggerPosition(
chord_start.get_track(),
chord_start.get_system(),
chord_start.get_col_num(),
chord_start.get_row_ts(),
chord_start.get_trigger_index() + 1)
new_location = self.get_clamped_location(chord_next)
selection.set_location(new_location)
self._session.set_chord_start(None)
# Finish our undo step
if was_enabled:
history = self._ui_model.get_sheet_history()
history.commit()
def get_chord_mode(self):
return self._session.get_chord_mode()
def allow_note_autorepeat(self):
return (self.is_editing_enabled() and
(not self.get_chord_mode()) and
self.is_grid_enabled())
def is_at_trigger(self):
selection = self._ui_model.get_selection()
location = selection.get_location()
if not location:
return False
cur_column = self.get_column_at_location(location)
if not cur_column:
return False
row_ts = location.get_row_ts()
index = location.get_trigger_index()
return cur_column.has_trigger(row_ts, index)
def is_at_trigger_row(self):
selection = self._ui_model.get_selection()
location = selection.get_location()
if not location:
return False
cur_column = self.get_column_at_location(location)
if not cur_column:
return False
row_ts = location.get_row_ts()
return row_ts in cur_column.get_trigger_row_positions()
def get_selected_trigger(self):
selection = self._ui_model.get_selection()
location = selection.get_location()
cur_column = self.get_column_at_location(location)
row_ts = location.get_row_ts()
index = location.get_trigger_index()
return cur_column.get_trigger(row_ts, index)
def _add_transaction(self, transaction, add_location=True, commit=None):
location = None
if add_location:
location = self._ui_model.get_selection().get_location()
history = self._ui_model.get_sheet_history()
if commit == None:
commit = not self.get_chord_mode()
history.add_step(transaction, location, commit)
self._store.put(transaction)
def add_trigger(self, trigger, commit=None):
if not self.is_editing_enabled():
return
selection = self._ui_model.get_selection()
location = selection.get_location()
if not location:
return
cur_column = self.get_column_at_location(location)
if not cur_column:
return
row_ts = location.get_row_ts()
index = location.get_trigger_index()
if self.get_replace_mode():
transaction = cur_column.get_edit_replace_or_insert_trigger(
row_ts, index, trigger)
else:
transaction = cur_column.get_edit_insert_trigger(row_ts, index, trigger)
self._add_transaction(transaction, commit=commit)
# This needs to be done before updating our location below
self._on_column_update(location)
cur_col_num = location.get_col_num()
if self.get_chord_mode() and (cur_col_num < COLUMNS_MAX - 1):
new_col_num = cur_col_num + 1
new_location = TriggerPosition(
location.get_track(),
location.get_system(),
new_col_num,
location.get_row_ts(),
location.get_trigger_index())
else:
new_trigger_count = cur_column.get_trigger_count_at_row(row_ts)
new_trigger_index = min(new_trigger_count, location.get_trigger_index() + 1)
new_location = TriggerPosition(
location.get_track(),
location.get_system(),
location.get_col_num(),
location.get_row_ts(),
new_trigger_index)
selection.set_location(new_location)
def _get_edit_try_remove_trigger(self):
if not self.is_editing_enabled():
return {}
selection = self._ui_model.get_selection()
location = selection.get_location()
if not location:
return {}
cur_column = self.get_column_at_location(location)
if not cur_column:
return {}
row_ts = location.get_row_ts()
index = location.get_trigger_index()
if not cur_column.has_trigger(row_ts, index):
return {}
transaction = cur_column.get_edit_remove_trigger(row_ts, index)
return transaction
def try_remove_trigger(self):
transaction = self._get_edit_try_remove_trigger()
if transaction:
self._add_transaction(transaction)
location = self._ui_model.get_selection().get_location()
self._on_column_update(location)
def try_remove_area(self):
selection = self._ui_model.get_selection()
if not selection.has_area():
return
top_left = selection.get_area_top_left()
bottom_right = selection.get_area_bottom_right()
if selection.has_trigger_row_slice():
start_index = top_left.get_trigger_index()
stop_index = bottom_right.get_trigger_index()
cur_column = self.get_column_at_location(top_left)
transaction = cur_column.get_edit_remove_trigger_row_slice(
top_left.get_row_ts(), start_index, stop_index)
self._add_transaction(transaction)
selection.set_location(top_left)
self._on_column_update(top_left)
elif selection.has_rect_area():
start_col = top_left.get_col_num()
stop_col = bottom_right.get_col_num() + 1
start_ts = top_left.get_row_ts()
stop_ts = bottom_right.get_row_ts()
transaction = {}
for col_num in range(start_col, stop_col):
cur_location = TriggerPosition(
top_left.get_track(), top_left.get_system(), col_num, start_ts, 0)
cur_column = self.get_column_at_location(cur_location)
edit = cur_column.get_edit_remove_trigger_rows(start_ts, stop_ts)
transaction.update(edit)
self._on_column_update(cur_location)
self._add_transaction(transaction)
else:
assert False
selection.clear_area()
def _get_convertible_set_or_slide_trigger(self):
if not self.is_at_trigger():
return None
selection = self._ui_model.get_selection()
location = selection.get_location()
cur_column = self.get_column_at_location(location)
row_ts = location.get_row_ts()
index = location.get_trigger_index()
trigger = cur_column.get_trigger(row_ts, index)
return trigger if trigger.get_event_type() in CONVERTIBLE_TRIGGERS else None
def is_at_convertible_set_or_slide_trigger(self):
return self._get_convertible_set_or_slide_trigger() != None
def convert_set_or_slide_trigger(self):
trigger = self._get_convertible_set_or_slide_trigger()
assert trigger
selection = self._ui_model.get_selection()
location = selection.get_location()
cur_column = self.get_column_at_location(location)
row_ts = location.get_row_ts()
index = location.get_trigger_index()
tr_type = trigger.get_type().split(':')
new_tr_type = list(tr_type)
new_tr_type[0] = CONVERTIBLE_TRIGGERS[tr_type[0]]
new_type = ':'.join(new_tr_type)
new_trigger = Trigger(new_type, trigger.get_argument(), location)
transaction = cur_column.get_edit_replace_or_insert_trigger(
row_ts, index, new_trigger)
self._add_transaction(transaction, commit=True)
self._on_column_update(location)
@staticmethod
def get_serialised_area_type():
return 'application/json'
def _get_col_key(self, col_index):
assert 0 <= col_index < COLUMNS_MAX
return 'col_{:02x}'.format(col_index)
def get_serialised_area(self):
selection = self._ui_model.get_selection()
assert selection.has_area()
top_left = selection.get_area_top_left()
bottom_right = selection.get_area_bottom_right()
area_info = {}
if selection.has_trigger_row_slice():
area_info['type'] = 'trow_slice'
start_index = top_left.get_trigger_index()
stop_index = bottom_right.get_trigger_index()
column = self.get_column_at_location(top_left)
row_ts = top_left.get_row_ts()
triggers = (column.get_trigger(row_ts, i)
for i in range(start_index, stop_index))
trigger_tuples = [(t.get_type(), t.get_argument()) for t in triggers]
area_info['triggers'] = trigger_tuples
elif selection.has_rect_area():
area_info['type'] = 'rect'
start_col = top_left.get_col_num()
stop_col = bottom_right.get_col_num() + 1
start_ts = top_left.get_row_ts()
stop_ts = bottom_right.get_row_ts()
area_info['width'] = stop_col - start_col
area_info['height'] = tuple(stop_ts - start_ts)
# Extract triggers with relative locations
for col_index in range(start_col, stop_col):
cur_location = TriggerPosition(
top_left.get_track(), top_left.get_system(), col_index, start_ts, 0)
cur_column = self.get_column_at_location(cur_location)
col_area_data = {}
for row_ts in cur_column.get_trigger_row_positions_in_range(
start_ts, stop_ts):
trigger_count = cur_column.get_trigger_count_at_row(row_ts)
triggers = []
for trigger_index in range(trigger_count):
trigger = cur_column.get_trigger(row_ts, trigger_index)
triggers.append((trigger.get_type(), trigger.get_argument()))
rel_ts = row_ts - start_ts
col_area_data[str(tuple(rel_ts))] = triggers
rel_col_index = col_index - start_col
area_info[self._get_col_key(rel_col_index)] = col_area_data
else:
assert False
return json.dumps(area_info)
def _is_trigger_valid(self, unsafe_trigger):
if (type(unsafe_trigger) != list) or (len(unsafe_trigger) != 2):
return False
tr_type, tr_arg = unsafe_trigger
event_name = tr_type
if type(tr_type) != str:
return False
type_parts = tr_type.split(':')
if len(type_parts) > 2:
return False
elif len(type_parts) == 2:
event_name, name_spec = type_parts
if event_name not in trigger_events_with_name_spec:
return False
if ((not name_spec) or
(len(name_spec) > DEVICE_EVENT_NAME_MAX) or
(not all(ch in DEVICE_EVENT_CHARS for ch in name_spec))):
return False
else:
if tr_type not in trigger_events_by_name:
return False
if type(tr_arg) not in (str, type(None)):
return False
none_arg_types = (None, EVENT_ARG_MAYBE_STRING, EVENT_ARG_MAYBE_REALTIME)
if ((type(tr_arg) == type(None)) and
(trigger_events_by_name[event_name]['arg_type'] not in none_arg_types)):
return False
return True
def _unpack_tstamp_str(self, ts_str):
parts = ts_str.strip('()').split(',')
try:
beats_str, rem_str = parts
beats = int(beats_str)
rem = int(rem_str)
if beats < 0:
return None
if not 0 <= rem < tstamp.BEAT:
return None
return tstamp.Tstamp(beats, rem)
except ValueError:
return None
assert False
def _get_validated_area_info(self, unsafe_area_info):
area_info = {}
try:
if unsafe_area_info['type'] == 'trow_slice':
area_info['type'] = unsafe_area_info['type']
triggers = unsafe_area_info['triggers']
if type(triggers) != list:
return None
if not all(self._is_trigger_valid(t) for t in triggers):
return None
area_info['triggers'] = [Trigger(t[0], t[1]) for t in triggers]
elif unsafe_area_info['type'] == 'rect':
area_info['type'] = unsafe_area_info['type']
width = unsafe_area_info['width']
if (type(width) != int) or not (1 <= width <= COLUMNS_MAX):
return None
height = unsafe_area_info['height']
if (type(height) != list) or (len(height) != 2):
return None
if not all(type(n) == int for n in height):
return None
height_beats, height_rem = height
if height_beats < 0:
return None
if not 0 <= height_rem < tstamp.BEAT:
return None
area_info['width'] = width
area_info['height'] = tstamp.Tstamp(height_beats, height_rem)
for col_index in range(COLUMNS_MAX):
col_key = self._get_col_key(col_index)
if col_key in unsafe_area_info:
col_area_data = unsafe_area_info[col_key]
col_data = {}
for ts_str, triggers in col_area_data.items():
row_ts = self._unpack_tstamp_str(ts_str)
if row_ts == None:
return None
if row_ts >= area_info['height']:
return None
if type(triggers) != list:
return None
if not all(self._is_trigger_valid(t) for t in triggers):
return None
col_data[row_ts] = [Trigger(t[0], t[1]) for t in triggers]
area_info[col_key] = col_data
else:
return None
except KeyError:
return None
return area_info
def is_area_data_valid(self, unsafe_area_data):
unsafe_area_info = json.loads(unsafe_area_data)
area_info = self._get_validated_area_info(unsafe_area_info)
return area_info != None
def try_paste_serialised_area(self, unsafe_area_data):
selection = self._ui_model.get_selection()
location = selection.get_location()
if not location:
return
unsafe_area_info = json.loads(unsafe_area_data)
area_info = self._get_validated_area_info(unsafe_area_info)
if area_info == None:
return
if area_info['type'] == 'trow_slice':
column = self.get_column_at_location(location)
triggers = area_info['triggers']
if selection.has_trigger_row_slice():
top_left = selection.get_area_top_left()
bottom_right = selection.get_area_bottom_right()
start_index = top_left.get_trigger_index()
stop_index = bottom_right.get_trigger_index()
transaction = column.get_edit_replace_trigger_row_slice(
location.get_row_ts(), start_index, stop_index, triggers)
else:
start_index = location.get_trigger_index()
transaction = column.get_edit_insert_trigger_row_slice(
location.get_row_ts(), start_index, triggers)
new_location = TriggerPosition(
location.get_track(),
location.get_system(),
location.get_col_num(),
location.get_row_ts(),
start_index + len(triggers))
self._add_transaction(transaction)
selection.set_location(new_location)
self._on_column_update(location)
elif area_info['type'] == 'rect':
width = area_info['width']
height = area_info['height']
start_ts = location.get_row_ts()
stop_ts = start_ts + height
transaction = {}
for rel_col_num in range(width):
col_num = location.get_col_num() + rel_col_num
if col_num >= COLUMNS_MAX:
break
cur_location = TriggerPosition(
location.get_track(), location.get_system(), col_num, start_ts, 0)
cur_column = self.get_column_at_location(cur_location)
edit = cur_column.get_edit_replace_trigger_rows(
start_ts, stop_ts, area_info[self._get_col_key(rel_col_num)])
transaction.update(edit)
self._on_column_update(cur_location)
self._add_transaction(transaction)
else:
assert False
def set_pattern_length(self, pattern, new_length, is_final):
transaction = pattern.get_edit_set_length(new_length)
self._add_transaction(transaction, add_location=False, commit=is_final)
def set_pattern_base_grid_pattern_id(self, pattern, gp_id, is_final):
transaction = pattern.get_edit_set_base_grid_pattern_id(gp_id)
self._add_transaction(transaction, add_location=False, commit=is_final)
def set_overlay_grid(
self, pinst, start_col, stop_col, start_ts, stop_ts, gp_id, offset):
for col_num in range(start_col, stop_col):
column = pinst.get_column(col_num)
transaction = column.get_edit_set_overlay_grid(
start_ts, stop_ts, gp_id, offset)
self._add_transaction(transaction, commit=(col_num == stop_col - 1))
def clear_overlay_grids(self, pinst, start_col, stop_col):
for col_num in range(start_col, stop_col):
column = pinst.get_column(col_num)
transaction = column.get_edit_clear_overlay_grids()
self._add_transaction(transaction, commit=(col_num == stop_col - 1))
def _on_column_update(self, location):
track_num = location.get_track()
system_num = location.get_system()
col_num = location.get_col_num()
self._session.add_column_update(track_num, system_num, col_num)
column = self.get_column_at_location(location)
column.flush_cache()
self._updater.signal_update('signal_column_updated')
# Clear cached column data
self._session.set_last_column(None)
def get_and_clear_column_updates(self):
updates = self._session.get_column_updates()
self._session.clear_column_updates()
return updates
def set_zoom(self, zoom):
old_zoom = self._session.get_sheet_zoom()
self._session.set_sheet_zoom(zoom)
changed = (self._session.get_sheet_zoom() != old_zoom)
return changed
def set_zoom_range(self, minimum, maximum):
old_zoom = self._session.get_sheet_zoom()
self._session.set_sheet_zoom_range(minimum, maximum)
zoom_changed = (self._session.get_sheet_zoom() != old_zoom)
return zoom_changed
def get_zoom(self):
return self._session.get_sheet_zoom()
def get_zoom_range(self):
return self._session.get_sheet_zoom_range()
def set_column_width(self, width):
old_width = self._session.get_sheet_column_width()
self._session.set_sheet_column_width(width)
changed = (self._session.get_sheet_column_width() != old_width)
return changed
def set_column_width_range(self, minimum, maximum):
old_width = self._session.get_sheet_column_width()
self._session.set_sheet_column_width_range(minimum, maximum)
width_changed = (self._session.get_sheet_column_width() != old_width)
return width_changed
def get_column_width(self):
return self._session.get_sheet_column_width()
def get_column_width_range(self):
return self._session.get_sheet_column_width_range()
def set_edit_mode(self, enabled):
self._session.set_edit_mode(enabled)
self._updater.signal_update('signal_edit_mode')
def get_edit_mode(self):
return self._session.get_edit_mode()
def set_typewriter_connected(self, connected):
self._session.set_typewriter_connected(connected)
self._updater.signal_update('signal_edit_mode')
if not connected and self.get_replace_mode():
self.set_replace_mode(False)
def get_typewriter_connected(self):
return self._session.get_typewriter_connected()
def allow_editing(self):
playback_mgr = self._ui_model.get_playback_manager()
return (not playback_mgr.follow_playback_cursor() or playback_mgr.is_recording())
def is_editing_enabled(self):
return (self.get_edit_mode() and
self.get_typewriter_connected() and
self.allow_editing())
def set_replace_mode(self, enabled):
self._session.set_replace_mode(enabled)
self._updater.signal_update('signal_replace_mode')
if enabled and not self.get_typewriter_connected():
self.set_typewriter_connected(True)
def get_replace_mode(self):
return self._session.get_replace_mode()
def set_grid_enabled(self, enabled):
self._session.set_grid_enabled(enabled)
value = None if enabled else False
self._store.put({ 'i_grid_enabled.json': value }, mark_modified=False)
def is_grid_enabled(self):
return self._session.is_grid_enabled()
def is_grid_default_enabled(self):
return self._store.get('i_grid_enabled.json', True)
def get_grid(self):
grid = Grid()
grid.set_controller(self._controller)
grid.set_ui_model(self._ui_model)
return grid
| [
"json.loads",
"json.dumps"
] | [((16211, 16232), 'json.dumps', 'json.dumps', (['area_info'], {}), '(area_info)\n', (16221, 16232), False, 'import json\n'), ((20420, 20448), 'json.loads', 'json.loads', (['unsafe_area_data'], {}), '(unsafe_area_data)\n', (20430, 20448), False, 'import json\n'), ((20777, 20805), 'json.loads', 'json.loads', (['unsafe_area_data'], {}), '(unsafe_area_data)\n', (20787, 20805), False, 'import json\n')] |
# coding: utf-8
"""Module that does all the ML trained model prediction heavy lifting."""
from os.path import normpath, join, dirname
import numpy as np
import pandas as pd
from sklearn.externals import joblib
def full_path(filename):
"""Returns the full normalised path of a file when working dir is the one containing this module."""
return normpath(join(dirname(__file__), filename))
model = None
columns = None
def ready():
"""Returns whether the ML trained model has been loaded from file correctly."""
return model is not None
def init():
"""Loads the ML trained model (plus ancillary files) from file."""
global model, columns
if not ready():
model = joblib.load(full_path("models/XGBClassifier.pkl"))
columns = joblib.load(full_path("models/columns.pkl"))
def run(data):
"""Makes a prediction using the trained ML model."""
test = data if isinstance(data, pd.DataFrame) else pd.DataFrame(data, index=[0])
test = pd.get_dummies(test)
test = test.reindex(columns=columns, fill_value=0)
prediction = model.predict(test)
if isinstance(prediction, np.ndarray):
prediction = prediction.tolist()
return prediction
def sample():
"""Returns a sample input vector as a dictionary."""
return {
"age":43,
"workclass":"Private",
"fnlwgt":100000,
"education":"Bachelors",
"education-num":13,
"marital-status":"Married-civ-spouse",
"occupation":"Sales",
"relationship":"Husband",
"race":"White",
"sex":"Male",
"capital-gain":0,
"capital-loss":0,
"hours-per-week":40,
"native-country":"Spain"
}
if __name__ == "__main__":
init()
print(sample())
print(run(sample()))
| [
"pandas.DataFrame",
"pandas.get_dummies",
"os.path.dirname"
] | [((981, 1001), 'pandas.get_dummies', 'pd.get_dummies', (['test'], {}), '(test)\n', (995, 1001), True, 'import pandas as pd\n'), ((940, 969), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {'index': '[0]'}), '(data, index=[0])\n', (952, 969), True, 'import pandas as pd\n'), ((366, 383), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (373, 383), False, 'from os.path import normpath, join, dirname\n')] |
from pylightnix.utils import ( tryread, tryread_def, trywrite, tryreadjson,
tryreadjson_def, readstr, writestr, readjson )
from stagedml.imports.sys import ( find_executable, Popen, json_load, islink,
PIPE, STDOUT, fsync, OrderedDict, listdir, re_search, remove, join )
from stagedml.types import ( List, Any, Optional, Dict, Iterable, Path )
import sys
import logging
def listloggers()->List[Any]:
l=logging.root.manager.loggerDict # type:ignore
return [logging.getLogger(name) for name in l]
def json_read(filename:str)->dict:
with open(filename,"r") as f:
return json_load(f)
def get_executable(name:str, not_found_message:str)->str:
e=find_executable(name)
assert e is not None, not_found_message
return e
def assert_link(name:str, not_found_message:str)->None:
if not islink(name):
assert False, not_found_message
def system(cmd:List[str], cwd:Optional[str]=None, env:Optional[dict]=None,
check_retcode:bool=True)->None:
args:Dict[str,Any]={}
if cwd is not None:
args.update({'cwd':cwd})
if env is not None:
args.update({'env':env})
p=Popen(cmd, **args)
retcode=p.wait()
assert not (check_retcode and retcode!=0), f"Retcode is not zero, but {retcode}"
return
def system_log(cmd:List[str], log_file:Optional[Path]=None,
cwd:Optional[str]=None, env:Optional[dict]=None,
assert_retcode:Optional[int]=0)->None:
""" FIXME: either redesign or remove """
args:Dict[str,Any]={}
if cwd is not None:
args.update({'cwd':cwd})
if env is not None:
args.update({'env':env})
p=Popen(cmd, stdout=PIPE, stderr=STDOUT, bufsize=1, **args)
lastlines=10*1024
logbuf=[bytes() for _ in range(lastlines)]
for i,line in enumerate(p.stdout): # type:ignore
try:
sys.stdout.write(line.decode('utf-8'))
sys.stdout.flush()
except UnicodeDecodeError:
print('<Undecodable>')
if log_file is not None:
logbuf[i%len(logbuf)]=line
with open(log_file,'wb') as f:
for i2 in range(min(i,len(logbuf))):
f.write(logbuf[(i-i2)%len(logbuf)])
# ^ FIXME: reverse log order
# ^ FIXME: copy and move rather than copy
# ^ FIXME: don't write full file every time
retcode=p.wait()
if assert_retcode is not None:
assert retcode==assert_retcode, \
f"Expected {assert_retcode} retcode, but had {retcode}"
return
def readlines(filepath:str, tostrip:str='\n')->Iterable[str]:
with open(filepath,'r') as f:
for line in f:
line_stripped=line.rstrip(tostrip)
assert tostrip not in line_stripped
yield line_stripped
def writelines(filepath:str, lines:Iterable[str])->None:
with open(filepath,'w') as f:
for line in lines:
assert '\n' not in line, f"Can't save line '{line}' because it contains EOL"
f.write(line); f.write('\n')
def flines(p:str, newline:str='\n')->int:
with open(p,'r',newline=newline) as f:
for i, l in enumerate(f):
pass
return i+1
def dpurge(dir, pattern, debug=True):
for f in listdir(dir):
if re_search(pattern, f):
if debug:
print('Removing', f, 'from', dir)
remove(join(dir, f))
| [
"stagedml.imports.sys.listdir",
"stagedml.imports.sys.join",
"stagedml.imports.sys.Popen",
"stagedml.imports.sys.find_executable",
"stagedml.imports.sys.re_search",
"sys.stdout.flush",
"stagedml.imports.sys.json_load",
"stagedml.imports.sys.islink",
"logging.getLogger"
] | [((663, 684), 'stagedml.imports.sys.find_executable', 'find_executable', (['name'], {}), '(name)\n', (678, 684), False, 'from stagedml.imports.sys import find_executable, Popen, json_load, islink, PIPE, STDOUT, fsync, OrderedDict, listdir, re_search, remove, join\n'), ((1103, 1121), 'stagedml.imports.sys.Popen', 'Popen', (['cmd'], {}), '(cmd, **args)\n', (1108, 1121), False, 'from stagedml.imports.sys import find_executable, Popen, json_load, islink, PIPE, STDOUT, fsync, OrderedDict, listdir, re_search, remove, join\n'), ((1585, 1642), 'stagedml.imports.sys.Popen', 'Popen', (['cmd'], {'stdout': 'PIPE', 'stderr': 'STDOUT', 'bufsize': '(1)'}), '(cmd, stdout=PIPE, stderr=STDOUT, bufsize=1, **args)\n', (1590, 1642), False, 'from stagedml.imports.sys import find_executable, Popen, json_load, islink, PIPE, STDOUT, fsync, OrderedDict, listdir, re_search, remove, join\n'), ((3037, 3049), 'stagedml.imports.sys.listdir', 'listdir', (['dir'], {}), '(dir)\n', (3044, 3049), False, 'from stagedml.imports.sys import find_executable, Popen, json_load, islink, PIPE, STDOUT, fsync, OrderedDict, listdir, re_search, remove, join\n'), ((469, 492), 'logging.getLogger', 'logging.getLogger', (['name'], {}), '(name)\n', (486, 492), False, 'import logging\n'), ((587, 599), 'stagedml.imports.sys.json_load', 'json_load', (['f'], {}), '(f)\n', (596, 599), False, 'from stagedml.imports.sys import find_executable, Popen, json_load, islink, PIPE, STDOUT, fsync, OrderedDict, listdir, re_search, remove, join\n'), ((804, 816), 'stagedml.imports.sys.islink', 'islink', (['name'], {}), '(name)\n', (810, 816), False, 'from stagedml.imports.sys import find_executable, Popen, json_load, islink, PIPE, STDOUT, fsync, OrderedDict, listdir, re_search, remove, join\n'), ((3058, 3079), 'stagedml.imports.sys.re_search', 're_search', (['pattern', 'f'], {}), '(pattern, f)\n', (3067, 3079), False, 'from stagedml.imports.sys import find_executable, Popen, json_load, islink, PIPE, STDOUT, fsync, OrderedDict, listdir, re_search, remove, join\n'), ((1819, 1837), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1835, 1837), False, 'import sys\n'), ((3152, 3164), 'stagedml.imports.sys.join', 'join', (['dir', 'f'], {}), '(dir, f)\n', (3156, 3164), False, 'from stagedml.imports.sys import find_executable, Popen, json_load, islink, PIPE, STDOUT, fsync, OrderedDict, listdir, re_search, remove, join\n')] |
""""""
import importlib
class StructGenerator:
"""Struct builder """
def __init__(self, filename: str, prefix: str, sub_name: str):
"""Constructor"""
self.filename = filename
self.prefix = prefix
self.sub_name = sub_name
self.typedefs = {}
self.load_constant()
def load_constant(self):
""""""
module_name = f"{self.prefix}_typedef"
module = importlib.import_module(module_name)
for name in dir(module):
if "__" not in name:
self.typedefs[name] = getattr(module, name)
def run(self):
""" run generation """
self.f_cpp = open(self.filename, "r")
self.f_struct = open(f"{self.prefix}_{self.sub_name}_struct.py", "w")
for line in self.f_cpp:
self.process_line(line)
self.f_cpp.close()
self.f_struct.close()
print("Struct generate success ")
def process_line(self, line: str):
""" each processing line """
line = line.replace(";", "")
line = line.replace("\n", "")
if line.startswith("typedef"):
self.process_typedef(line)
elif line.startswith("struct"):
self.process_declare(line)
elif line.startswith("{"):
self.process_start(line)
elif line.startswith("}"):
self.process_end(line)
elif "\t" in line and "///" not in line:
self.process_member(line)
def process_typedef(self, line: str):
""" processing type definition """
line = line.replace("\t", " ")
words = line.split(" ")
name = words[1]
value = words[2]
new_line = f"{value} = {name}\n\n"
self.f_struct.write(new_line)
def process_declare(self, line: str):
""" processing statement """
words = line.split(" ")
name = words[1]
end = "{"
new_line = f"{name} = {end}\n"
self.f_struct.write(new_line)
def process_start(self, line: str):
""" process begins """
pass
def process_end(self, line: str):
""" processing ends """
new_line = "}\n\n"
self.f_struct.write(new_line)
def process_member(self, line: str):
""" processing members """
words = line.split("\t")
words = [word.replace(" ", "") for word in words if word]
py_type = self.typedefs[words[0]]
name = words[1]
new_line = f" \"{name}\": \"{py_type}\",\n"
self.f_struct.write(new_line)
if __name__ == "__main__":
generator_future = StructGenerator(
"../include/da/DAFutureStruct.h", "da", "future")
generator_future.run()
generator_stock = StructGenerator(
"../include/da/DAStockStruct.h", "da", "stock")
generator_stock.run()
generator_market = StructGenerator(
"../include/da/DAMarketStruct.h", "da", "market")
generator_market.run()
| [
"importlib.import_module"
] | [((430, 466), 'importlib.import_module', 'importlib.import_module', (['module_name'], {}), '(module_name)\n', (453, 466), False, 'import importlib\n')] |
# -*- coding: utf-8 -*-
#
# Copyright 2018-2021 - Swiss Data Science Center (SDSC)
# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and
# Eidgenössische Technische Hochschule Zürich (ETHZ).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Represent a Git commit."""
import os
import urllib
import weakref
from collections import OrderedDict
from pathlib import Path, posixpath
import attr
from git import NULL_TREE
from marshmallow import EXCLUDE
from renku.core.management.command_builder.command import inject
from renku.core.models import jsonld
from renku.core.models.calamus import Nested, fields, oa, prov, rdfs, renku, wfprov
from renku.core.models.cwl.annotation import AnnotationSchema
from renku.core.models.entities import (
Collection,
CommitMixin,
Entity,
OldCollectionSchema,
OldCommitMixinSchema,
OldEntitySchema,
)
from renku.core.models.refs import LinkReference
from renku.core.models.workflow.run import Run
from renku.core.utils.scm import git_unicode_unescape
from ..workflow.parameters import RunParameter, RunParameterSchema
from .agents import OldPersonSchema, OldSoftwareAgentSchema, Person, renku_agent
from .qualified import Association, AssociationSchema, Generation, GenerationSchema, Usage, UsageSchema
def _nodes(output, parent=None):
"""Yield nodes from entities."""
# NOTE refactor so all outputs behave the same
entity = getattr(output, "entity", output)
if isinstance(entity, Collection):
for member in entity.members:
if parent is not None:
member = attr.evolve(member, parent=parent)
if entity.client:
_set_entity_client_commit(member, entity.client, None)
if isinstance(output, Generation):
child = Generation(
activity=output.activity, entity=member, role=entity.role if hasattr(entity, "role") else None
)
elif isinstance(output, Usage):
child = Usage(
activity=output.activity, entity=member, role=entity.role if hasattr(entity, "role") else None
)
else:
child = member
yield from _nodes(child)
yield output
def _set_entity_client_commit(entity, client, commit):
"""Set the client and commit of an entity."""
if client and not entity.client:
entity.client = client
if not entity.commit:
revision = "UNCOMMITTED"
if entity._label:
revision = entity._label.rsplit("@", maxsplit=1)[-1]
if revision == "UNCOMMITTED":
commit = commit
elif client:
commit = client.repo.commit(revision)
entity.commit = commit
@attr.s(eq=False, order=False)
class Activity(CommitMixin):
"""Represent an activity in the repository."""
_id = attr.ib(default=None, kw_only=True)
_message = attr.ib(kw_only=True)
_was_informed_by = attr.ib(kw_only=True)
part_of = attr.ib(default=None, kw_only=True)
_collections = attr.ib(default=attr.Factory(OrderedDict), init=False, kw_only=True)
generated = attr.ib(kw_only=True, default=None)
invalidated = attr.ib(kw_only=True, default=None)
influenced = attr.ib(kw_only=True)
started_at_time = attr.ib(kw_only=True)
ended_at_time = attr.ib(kw_only=True)
agents = attr.ib(kw_only=True)
_metadata_path = attr.ib(default=None, init=False)
def default_generated(self):
"""Create default ``generated``."""
generated = []
for path in self.get_output_paths():
entity = self._get_activity_entity(path)
generated.append(Generation(activity=self, entity=entity, role=None))
return generated
def get_output_paths(self):
"""Gets all output paths generated by this run."""
index = set()
commit = self.commit
if not self.commit:
if not self.client:
return index
commit = self.client.repo.head.commit
for file_ in commit.diff(commit.parents or NULL_TREE):
# ignore deleted files (note they appear as ADDED)
# in this backwards diff
if file_.change_type == "A":
continue
path_ = Path(git_unicode_unescape(file_.a_path))
is_dataset = any(
[
path_.resolve() == (self.client.path / f.entity.path).resolve()
for d in self.client.datasets.values()
for f in d.files
]
)
not_refs = LinkReference.REFS not in str(path_)
does_not_exists = not path_.exists()
if all([is_dataset, not_refs, does_not_exists]):
dataset = next(
d
for d in self.client.datasets.values()
for f in d.files
if path_.resolve() == (self.client.path / f.entity.path).resolve()
)
path_ = self.client.path / dataset.path / self.client.METADATA
index.add(str(path_))
return index
def _get_activity_entity(self, path, deleted=False):
"""Gets the entity associated with this Activity and path."""
client, commit, path = self.client.resolve_in_submodules(self.commit, path)
path = str(path)
output_path = client.path / path
parents = list(output_path.relative_to(client.path).parents)
collection = None
members = []
for parent in reversed(parents[:-1]):
if str(parent) in self._collections:
collection = self._collections[str(parent)]
else:
collection = Collection(client=client, commit=commit, path=str(parent), members=[], parent=collection)
members.append(collection)
self._collections[str(parent)] = collection
members = collection.members
entity_cls = Entity
if (self.client.path / path).is_dir():
entity_cls = Collection
entity = entity_cls(commit=commit, client=client, path=path, parent=collection)
if collection:
collection.members.append(entity)
return entity
def default_invalidated(self):
"""Entities invalidated by this Action."""
results = []
for path in self.removed_paths:
entity = self._get_activity_entity(path, deleted=True)
results.append(entity)
return results
@influenced.default
def default_influenced(self):
"""Calculate default values."""
return list(self._collections.values())
@property
def parents(self):
"""Return parent commits."""
if self.commit:
return list(self.commit.parents)
@property
def removed_paths(self):
"""Return all paths removed in the commit."""
index = set()
if not self.commit:
return index
for file_ in self.commit.diff(self.commit.parents or NULL_TREE):
# only process deleted files (note they appear as ADDED)
# in this backwards diff
if file_.change_type != "A":
continue
path_ = Path(git_unicode_unescape(file_.a_path))
index.add(str(path_))
return index
@property
def paths(self):
"""Return all paths in the commit."""
index = set()
for file_ in self.commit.diff(self.commit.parents or NULL_TREE):
# ignore deleted files (note they appear as ADDED)
# in this backwards diff
if file_.change_type == "A":
continue
path_ = Path(git_unicode_unescape(file_.a_path))
is_dataset = any(
[
path_.resolve() == (self.client.path / f.entity.path).resolve()
for d in self.client.datasets.values()
for f in d.files
]
)
not_refs = LinkReference.REFS not in str(path_)
does_not_exists = not (path_.exists() or (path_.is_symlink() and os.path.lexists(path_)))
if all([is_dataset, not_refs, does_not_exists]):
dataset = next(
d
for d in self.client.datasets
for f in d.files
if path_.resolve() == (self.client.path / f.entity.path).resolve()
)
path_ = self.client.path / dataset.path / self.client.METADATA
index.add(str(path_))
return index
@classmethod
def generate_id(cls, commitsha):
"""Calculate action ID."""
host = "localhost"
if hasattr(cls, "client"):
host = cls.client.remote.get("host") or host
host = os.environ.get("RENKU_DOMAIN") or host
return urllib.parse.urljoin(
"https://{host}".format(host=host),
posixpath.join("/activities", "commit/{commit}".format(commit=commitsha)),
)
def default_id(self):
"""Configure calculated ID."""
if self.commit:
return self.generate_id(self.commit.hexsha)
return self.generate_id("UNCOMMITTED")
@_message.default
def default_message(self):
"""Generate a default message."""
if self.commit:
return self.commit.message
@_was_informed_by.default
def default_was_informed_by(self):
"""List parent actions."""
if self.commit:
return [self.generate_id(parent) for parent in self.commit.parents]
@started_at_time.default
def default_started_at_time(self):
"""Configure calculated properties."""
if self.commit:
return self.commit.authored_datetime
@ended_at_time.default
def default_ended_at_time(self):
"""Configure calculated properties."""
if self.commit:
return self.commit.committed_datetime
@agents.default
def default_agents(self):
"""Set person agent to be the author of the commit."""
if self.commit:
return [Person.from_commit(self.commit), renku_agent]
return [renku_agent]
@property
def nodes(self):
"""Return topologically sorted nodes."""
collections = OrderedDict()
def _parents(node):
if node.parent:
yield from _parents(node.parent)
yield node.parent
for output in self.generated:
for parent in _parents(output.entity):
collections[parent.path] = parent
yield from _nodes(output)
for removed in self.invalidated:
for parent in _parents(removed):
collections[parent.path] = parent
yield from _nodes(removed)
yield from reversed(collections.values())
def __attrs_post_init__(self):
"""Sets ``generated`` and ``invalidated`` default values if needed."""
super().__attrs_post_init__()
if not self._id:
self._id = self.default_id()
if not self.generated:
self.generated = self.default_generated()
for g in self.generated:
_set_entity_client_commit(g.entity, self.client, self.commit)
if not self.invalidated:
self.invalidated = self.default_invalidated()
if self.generated:
for g in self.generated:
g._activity = weakref.ref(self)
@classmethod
def from_yaml(cls, path, client=None, commit=None):
"""Return an instance from a YAML file."""
data = jsonld.read_yaml(path)
self = cls.from_jsonld(data=data, client=client, commit=commit)
self._metadata_path = path
return self
def to_yaml(self, path=None):
"""Write an instance to the referenced YAML file."""
self._metadata_path = path or self._metadata_path
data = ActivitySchema(flattened=True).dump(self)
jsonld.write_yaml(path=self._metadata_path, data=data)
@classmethod
def from_jsonld(cls, data, client=None, commit=None):
"""Create an instance from JSON-LD data."""
if isinstance(data, cls):
return data
if not isinstance(data, list):
raise ValueError(data)
schema = ActivitySchema
if any(str(wfprov.WorkflowRun) in d["@type"] for d in data):
schema = WorkflowRunSchema
elif any(str(wfprov.ProcessRun) in d["@type"] for d in data):
schema = ProcessRunSchema
return schema(client=client, commit=commit, flattened=True).load(data)
def as_jsonld(self):
"""Create JSON-LD."""
return ActivitySchema(flattened=True).dump(self)
@attr.s(eq=False, order=False)
class ProcessRun(Activity):
"""A process run is a particular execution of a Process description."""
__association_cls__ = Run
generated = attr.ib(kw_only=True, default=None)
association = attr.ib(default=None, kw_only=True)
annotations = attr.ib(kw_only=True, default=None)
qualified_usage = attr.ib(kw_only=True, default=None)
run_parameter = attr.ib(kw_only=True, default=None)
def __attrs_post_init__(self):
"""Calculate properties."""
super().__attrs_post_init__()
commit_not_set = not self.commit or self.commit.hexsha in self._id
if commit_not_set and self.client and Path(self.path).exists():
self.commit = self.client.find_previous_commit(self.path)
if not self.annotations:
self.annotations = self.plugin_annotations()
if self.association:
self.association.plan._activity = weakref.ref(self)
plan = self.association.plan
if not plan.commit:
if self.client:
plan.client = self.client
if self.commit:
plan.commit = self.commit
if plan.inputs:
for i in plan.inputs:
_set_entity_client_commit(i.consumes, self.client, self.commit)
if plan.outputs:
for o in plan.outputs:
_set_entity_client_commit(o.produces, self.client, self.commit)
if self.qualified_usage and self.client and self.commit:
usages = []
revision = "{0}".format(self.commit)
for usage in self.qualified_usage:
if not usage.commit and "@UNCOMMITTED" in usage._label:
usages.append(
Usage.from_revision(
client=self.client, path=usage.path, role=usage.role, revision=revision, id=usage._id
)
)
else:
if not usage.client:
usage.entity.set_client(self.client)
if not usage.commit:
revision = usage._label.rsplit("@", maxsplit=1)[-1]
usage.entity.commit = self.client.repo.commit(revision)
usages.append(usage)
self.qualified_usage = usages
def default_generated(self):
"""Create default ``generated``."""
generated = []
if not self.association or not self.association.plan:
return generated
for output in self.association.plan.outputs:
entity = Entity.from_revision(
self.client, output.produces.path, revision=self.commit, parent=output.produces.parent
)
generation = Generation(activity=self, role=output.sanitized_id, entity=entity)
generated.append(generation)
return generated
def add_annotations(self, annotations):
"""Adds annotations from an external tool."""
self.annotations.extend(annotations)
def plugin_annotations(self):
"""Adds ``Annotation``s from plugins to a ``ProcessRun``."""
from renku.core.plugins.pluginmanager import get_plugin_manager
pm = get_plugin_manager()
results = pm.hook.process_run_annotations(run=self)
return [a for r in results for a in r]
@classmethod
@inject.params(client="LocalClient")
def from_run(cls, run, client, path, commit=None, subprocess_index=None, update_commits=False):
"""Convert a ``Run`` to a ``ProcessRun``."""
from .agents import SoftwareAgent
if not commit:
commit = client.repo.head.commit
usages = []
id_ = ProcessRun.generate_id(commit)
if subprocess_index is not None:
id_ = f"{id_}/steps/step_{subprocess_index}"
for input_ in run.inputs:
usage_id = f"{id_}/{input_.sanitized_id}"
input_path = input_.consumes.path
entity = input_.consumes
if update_commits:
revision = client.find_previous_commit(input_path, revision=commit.hexsha)
entity = Entity.from_revision(client, input_path, revision)
dependency = Usage(entity=entity, role=input_.sanitized_id, id=usage_id)
usages.append(dependency)
agent = SoftwareAgent.from_commit(commit)
association = Association(agent=agent, id=id_ + "/association", plan=run)
run_parameter = []
for parameter in run.run_parameters:
parameter_id = f"{id_}/{parameter.name}"
run_parameter.append(RunParameter(name=parameter.name, value=parameter.value, id=parameter_id))
process_run = cls(
id=id_,
qualified_usage=usages,
association=association,
client=client,
commit=commit,
path=path,
run_parameter=run_parameter,
)
generated = []
for output in run.outputs:
entity = Entity.from_revision(client, output.produces.path, revision=commit, parent=output.produces.parent)
generation = Generation(activity=process_run, role=output.sanitized_id, entity=entity)
generated.append(generation)
process_run.generated = generated
return process_run
@property
def parents(self):
"""Return parent commits."""
return [member.commit for usage in self.qualified_usage for member in usage.entity.entities] + super().parents
@property
def nodes(self):
"""Return topologically sorted nodes."""
# Outputs go first
yield from super().nodes
# Activity itself
yield self.association.plan
def to_yaml(self, path=None):
"""Write an instance to the referenced YAML file."""
self._metadata_path = path or self._metadata_path
data = ProcessRunSchema(flattened=True).dump(self)
jsonld.write_yaml(path=self._metadata_path, data=data)
@classmethod
@inject.params(client="LocalClient")
def from_jsonld(cls, data, client=None, commit=None):
"""Create an instance from JSON-LD data."""
if isinstance(data, cls):
return data
if not isinstance(data, list):
raise ValueError(data)
return ProcessRunSchema(client=client, commit=commit, flattened=True).load(data)
def as_jsonld(self):
"""Create JSON-LD."""
return ProcessRunSchema(flattened=True).dump(self)
@attr.s(eq=False, order=False)
class WorkflowRun(ProcessRun):
"""A workflow run typically contains several subprocesses."""
__association_cls__ = Run
_processes = attr.ib(kw_only=True, default=attr.Factory(list))
@property
def subprocesses(self):
"""Subprocesses of this ``WorkflowRun``."""
return {i: p for i, p in enumerate(self._processes)}
@classmethod
@inject.params(client="LocalClient")
def from_run(cls, run, client, path, commit=None, subprocess_index=None, update_commits=False):
"""Convert a ``Run`` to a ``WorkflowRun``."""
from .agents import SoftwareAgent
if not commit:
commit = client.repo.head.commit
processes = []
generated = []
for s in run.subprocesses:
proc_run = ProcessRun.from_run(s.process, client, path, commit, s.index, update_commits)
processes.append(proc_run)
generated.extend(proc_run.generated)
usages = []
id_ = cls.generate_id(commit)
input_index = 1
for input_ in run.inputs:
usage_id = f"{id_}/inputs/{input_index}"
dependency = Usage.from_revision(
client=client, path=input_.consumes.path, role=input_.sanitized_id, revision=commit, id=usage_id
)
usages.append(dependency)
input_index += 1
agent = SoftwareAgent.from_commit(commit)
association = Association(agent=agent, id=id_ + "/association", plan=run)
all_generated = []
# fix generations in folders
for generation in generated:
all_generated.append(generation)
entity = generation.entity
if not isinstance(entity, Collection) or not entity.commit:
continue
for e in entity.entities:
if e.commit is not entity.commit or any(g.entity._id == e._id for g in all_generated):
continue
all_generated.append(Generation(activity=generation.activity, entity=e, role=None))
wf_run = WorkflowRun(
id=id_,
processes=processes,
generated=all_generated,
qualified_usage=usages,
association=association,
client=client,
commit=commit,
path=path,
)
return wf_run
@property
def nodes(self):
"""Yield all graph nodes."""
for subprocess in reversed(self._processes):
if subprocess.path is None:
# skip nodes connecting directory to file
continue
for n in subprocess.nodes:
# if self.client and not n.commit and isinstance(n, Entity):
# _set_entity_client_commit(n, self.client, self.commit)
# n._activity = weakref.ref(subprocess)
yield n
yield subprocess.association.plan
def __attrs_post_init__(self):
"""Attrs post initializations."""
if not self._id:
self._id = self.default_id()
if not self._processes:
self._processes = []
for subprocess in self.association.plan.subprocesses:
run = subprocess.process
process_run = ProcessRun.from_run(
run=run,
client=self.client,
path=self.path,
commit=self.commit,
subprocess_index=subprocess.index,
update_commits=True,
)
self._processes.append(process_run)
if self.client:
for s in self._processes:
s.client = self.client
s.commit = self.commit
s.__attrs_post_init__()
s.part_of = self
super().__attrs_post_init__()
def to_yaml(self, path=None):
"""Write an instance to the referenced YAML file."""
self._metadata_path = path or self._metadata_path
data = WorkflowRunSchema(flattened=True).dump(self)
jsonld.write_yaml(path=self._metadata_path, data=data)
@classmethod
@inject.params(client="LocalClient")
def from_jsonld(cls, data, client=None, commit=None):
"""Create an instance from JSON-LD data."""
if isinstance(data, cls):
return data
if not isinstance(data, list):
raise ValueError(data)
return WorkflowRunSchema(client=client, commit=commit, flattened=True).load(data)
def as_jsonld(self):
"""Create JSON-LD."""
return WorkflowRunSchema(flattened=True).dump(self)
class ActivitySchema(OldCommitMixinSchema):
"""Activity schema."""
class Meta:
"""Meta class."""
rdf_type = prov.Activity
model = Activity
unknown = EXCLUDE
_message = fields.String(rdfs.comment, init_name="message", missing=None)
_was_informed_by = fields.List(prov.wasInformedBy, fields.IRI(), init_name="was_informed_by")
generated = Nested(prov.activity, GenerationSchema, reverse=True, many=True, missing=None)
invalidated = Nested(
prov.wasInvalidatedBy, [OldEntitySchema, OldCollectionSchema], reverse=True, many=True, missing=None
)
influenced = Nested(prov.influenced, OldCollectionSchema, many=True)
started_at_time = fields.DateTime(prov.startedAtTime, add_value_types=True)
ended_at_time = fields.DateTime(prov.endedAtTime, add_value_types=True)
agents = Nested(prov.wasAssociatedWith, [OldPersonSchema, OldSoftwareAgentSchema], many=True)
class ProcessRunSchema(ActivitySchema):
"""ProcessRun schema."""
class Meta:
"""Meta class."""
rdf_type = wfprov.ProcessRun
model = ProcessRun
unknown = EXCLUDE
association = Nested(prov.qualifiedAssociation, AssociationSchema)
annotations = Nested(oa.hasTarget, AnnotationSchema, reverse=True, many=True)
qualified_usage = Nested(prov.qualifiedUsage, UsageSchema, many=True)
run_parameter = Nested(renku.hasRunParameter, RunParameterSchema, many=True)
class WorkflowRunSchema(ProcessRunSchema):
"""WorkflowRun schema."""
class Meta:
"""Meta class."""
rdf_type = wfprov.WorkflowRun
model = WorkflowRun
unknown = EXCLUDE
_processes = Nested(wfprov.wasPartOfWorkflowRun, ProcessRunSchema, reverse=True, many=True, init_name="processes")
| [
"renku.core.models.calamus.Nested",
"renku.core.models.calamus.fields.IRI",
"attr.evolve",
"attr.Factory",
"attr.s",
"renku.core.models.calamus.fields.String",
"pathlib.Path",
"weakref.ref",
"renku.core.models.jsonld.read_yaml",
"os.path.lexists",
"renku.core.models.entities.Entity.from_revision... | [((3254, 3283), 'attr.s', 'attr.s', ([], {'eq': '(False)', 'order': '(False)'}), '(eq=False, order=False)\n', (3260, 3283), False, 'import attr\n'), ((13339, 13368), 'attr.s', 'attr.s', ([], {'eq': '(False)', 'order': '(False)'}), '(eq=False, order=False)\n', (13345, 13368), False, 'import attr\n'), ((19965, 19994), 'attr.s', 'attr.s', ([], {'eq': '(False)', 'order': '(False)'}), '(eq=False, order=False)\n', (19971, 19994), False, 'import attr\n'), ((3375, 3410), 'attr.ib', 'attr.ib', ([], {'default': 'None', 'kw_only': '(True)'}), '(default=None, kw_only=True)\n', (3382, 3410), False, 'import attr\n'), ((3426, 3447), 'attr.ib', 'attr.ib', ([], {'kw_only': '(True)'}), '(kw_only=True)\n', (3433, 3447), False, 'import attr\n'), ((3471, 3492), 'attr.ib', 'attr.ib', ([], {'kw_only': '(True)'}), '(kw_only=True)\n', (3478, 3492), False, 'import attr\n'), ((3508, 3543), 'attr.ib', 'attr.ib', ([], {'default': 'None', 'kw_only': '(True)'}), '(default=None, kw_only=True)\n', (3515, 3543), False, 'import attr\n'), ((3649, 3684), 'attr.ib', 'attr.ib', ([], {'kw_only': '(True)', 'default': 'None'}), '(kw_only=True, default=None)\n', (3656, 3684), False, 'import attr\n'), ((3704, 3739), 'attr.ib', 'attr.ib', ([], {'kw_only': '(True)', 'default': 'None'}), '(kw_only=True, default=None)\n', (3711, 3739), False, 'import attr\n'), ((3758, 3779), 'attr.ib', 'attr.ib', ([], {'kw_only': '(True)'}), '(kw_only=True)\n', (3765, 3779), False, 'import attr\n'), ((3803, 3824), 'attr.ib', 'attr.ib', ([], {'kw_only': '(True)'}), '(kw_only=True)\n', (3810, 3824), False, 'import attr\n'), ((3846, 3867), 'attr.ib', 'attr.ib', ([], {'kw_only': '(True)'}), '(kw_only=True)\n', (3853, 3867), False, 'import attr\n'), ((3882, 3903), 'attr.ib', 'attr.ib', ([], {'kw_only': '(True)'}), '(kw_only=True)\n', (3889, 3903), False, 'import attr\n'), ((3926, 3959), 'attr.ib', 'attr.ib', ([], {'default': 'None', 'init': '(False)'}), '(default=None, init=False)\n', (3933, 3959), False, 'import attr\n'), ((13521, 13556), 'attr.ib', 'attr.ib', ([], {'kw_only': '(True)', 'default': 'None'}), '(kw_only=True, default=None)\n', (13528, 13556), False, 'import attr\n'), ((13576, 13611), 'attr.ib', 'attr.ib', ([], {'default': 'None', 'kw_only': '(True)'}), '(default=None, kw_only=True)\n', (13583, 13611), False, 'import attr\n'), ((13631, 13666), 'attr.ib', 'attr.ib', ([], {'kw_only': '(True)', 'default': 'None'}), '(kw_only=True, default=None)\n', (13638, 13666), False, 'import attr\n'), ((13690, 13725), 'attr.ib', 'attr.ib', ([], {'kw_only': '(True)', 'default': 'None'}), '(kw_only=True, default=None)\n', (13697, 13725), False, 'import attr\n'), ((13747, 13782), 'attr.ib', 'attr.ib', ([], {'kw_only': '(True)', 'default': 'None'}), '(kw_only=True, default=None)\n', (13754, 13782), False, 'import attr\n'), ((16808, 16843), 'renku.core.management.command_builder.command.inject.params', 'inject.params', ([], {'client': '"""LocalClient"""'}), "(client='LocalClient')\n", (16821, 16843), False, 'from renku.core.management.command_builder.command import inject\n'), ((19479, 19514), 'renku.core.management.command_builder.command.inject.params', 'inject.params', ([], {'client': '"""LocalClient"""'}), "(client='LocalClient')\n", (19492, 19514), False, 'from renku.core.management.command_builder.command import inject\n'), ((20370, 20405), 'renku.core.management.command_builder.command.inject.params', 'inject.params', ([], {'client': '"""LocalClient"""'}), "(client='LocalClient')\n", (20383, 20405), False, 'from renku.core.management.command_builder.command import inject\n'), ((24151, 24186), 'renku.core.management.command_builder.command.inject.params', 'inject.params', ([], {'client': '"""LocalClient"""'}), "(client='LocalClient')\n", (24164, 24186), False, 'from renku.core.management.command_builder.command import inject\n'), ((24853, 24915), 'renku.core.models.calamus.fields.String', 'fields.String', (['rdfs.comment'], {'init_name': '"""message"""', 'missing': 'None'}), "(rdfs.comment, init_name='message', missing=None)\n", (24866, 24915), False, 'from renku.core.models.calamus import Nested, fields, oa, prov, rdfs, renku, wfprov\n'), ((25030, 25108), 'renku.core.models.calamus.Nested', 'Nested', (['prov.activity', 'GenerationSchema'], {'reverse': '(True)', 'many': '(True)', 'missing': 'None'}), '(prov.activity, GenerationSchema, reverse=True, many=True, missing=None)\n', (25036, 25108), False, 'from renku.core.models.calamus import Nested, fields, oa, prov, rdfs, renku, wfprov\n'), ((25127, 25239), 'renku.core.models.calamus.Nested', 'Nested', (['prov.wasInvalidatedBy', '[OldEntitySchema, OldCollectionSchema]'], {'reverse': '(True)', 'many': '(True)', 'missing': 'None'}), '(prov.wasInvalidatedBy, [OldEntitySchema, OldCollectionSchema],\n reverse=True, many=True, missing=None)\n', (25133, 25239), False, 'from renku.core.models.calamus import Nested, fields, oa, prov, rdfs, renku, wfprov\n'), ((25267, 25322), 'renku.core.models.calamus.Nested', 'Nested', (['prov.influenced', 'OldCollectionSchema'], {'many': '(True)'}), '(prov.influenced, OldCollectionSchema, many=True)\n', (25273, 25322), False, 'from renku.core.models.calamus import Nested, fields, oa, prov, rdfs, renku, wfprov\n'), ((25345, 25402), 'renku.core.models.calamus.fields.DateTime', 'fields.DateTime', (['prov.startedAtTime'], {'add_value_types': '(True)'}), '(prov.startedAtTime, add_value_types=True)\n', (25360, 25402), False, 'from renku.core.models.calamus import Nested, fields, oa, prov, rdfs, renku, wfprov\n'), ((25423, 25478), 'renku.core.models.calamus.fields.DateTime', 'fields.DateTime', (['prov.endedAtTime'], {'add_value_types': '(True)'}), '(prov.endedAtTime, add_value_types=True)\n', (25438, 25478), False, 'from renku.core.models.calamus import Nested, fields, oa, prov, rdfs, renku, wfprov\n'), ((25492, 25580), 'renku.core.models.calamus.Nested', 'Nested', (['prov.wasAssociatedWith', '[OldPersonSchema, OldSoftwareAgentSchema]'], {'many': '(True)'}), '(prov.wasAssociatedWith, [OldPersonSchema, OldSoftwareAgentSchema],\n many=True)\n', (25498, 25580), False, 'from renku.core.models.calamus import Nested, fields, oa, prov, rdfs, renku, wfprov\n'), ((25801, 25853), 'renku.core.models.calamus.Nested', 'Nested', (['prov.qualifiedAssociation', 'AssociationSchema'], {}), '(prov.qualifiedAssociation, AssociationSchema)\n', (25807, 25853), False, 'from renku.core.models.calamus import Nested, fields, oa, prov, rdfs, renku, wfprov\n'), ((25872, 25935), 'renku.core.models.calamus.Nested', 'Nested', (['oa.hasTarget', 'AnnotationSchema'], {'reverse': '(True)', 'many': '(True)'}), '(oa.hasTarget, AnnotationSchema, reverse=True, many=True)\n', (25878, 25935), False, 'from renku.core.models.calamus import Nested, fields, oa, prov, rdfs, renku, wfprov\n'), ((25958, 26009), 'renku.core.models.calamus.Nested', 'Nested', (['prov.qualifiedUsage', 'UsageSchema'], {'many': '(True)'}), '(prov.qualifiedUsage, UsageSchema, many=True)\n', (25964, 26009), False, 'from renku.core.models.calamus import Nested, fields, oa, prov, rdfs, renku, wfprov\n'), ((26030, 26090), 'renku.core.models.calamus.Nested', 'Nested', (['renku.hasRunParameter', 'RunParameterSchema'], {'many': '(True)'}), '(renku.hasRunParameter, RunParameterSchema, many=True)\n', (26036, 26090), False, 'from renku.core.models.calamus import Nested, fields, oa, prov, rdfs, renku, wfprov\n'), ((26320, 26426), 'renku.core.models.calamus.Nested', 'Nested', (['wfprov.wasPartOfWorkflowRun', 'ProcessRunSchema'], {'reverse': '(True)', 'many': '(True)', 'init_name': '"""processes"""'}), "(wfprov.wasPartOfWorkflowRun, ProcessRunSchema, reverse=True, many=\n True, init_name='processes')\n", (26326, 26426), False, 'from renku.core.models.calamus import Nested, fields, oa, prov, rdfs, renku, wfprov\n'), ((10889, 10902), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (10900, 10902), False, 'from collections import OrderedDict\n'), ((12207, 12229), 'renku.core.models.jsonld.read_yaml', 'jsonld.read_yaml', (['path'], {}), '(path)\n', (12223, 12229), False, 'from renku.core.models import jsonld\n'), ((12578, 12632), 'renku.core.models.jsonld.write_yaml', 'jsonld.write_yaml', ([], {'path': 'self._metadata_path', 'data': 'data'}), '(path=self._metadata_path, data=data)\n', (12595, 12632), False, 'from renku.core.models import jsonld\n'), ((16656, 16676), 'renku.core.plugins.pluginmanager.get_plugin_manager', 'get_plugin_manager', ([], {}), '()\n', (16674, 16676), False, 'from renku.core.plugins.pluginmanager import get_plugin_manager\n'), ((19401, 19455), 'renku.core.models.jsonld.write_yaml', 'jsonld.write_yaml', ([], {'path': 'self._metadata_path', 'data': 'data'}), '(path=self._metadata_path, data=data)\n', (19418, 19455), False, 'from renku.core.models import jsonld\n'), ((24073, 24127), 'renku.core.models.jsonld.write_yaml', 'jsonld.write_yaml', ([], {'path': 'self._metadata_path', 'data': 'data'}), '(path=self._metadata_path, data=data)\n', (24090, 24127), False, 'from renku.core.models import jsonld\n'), ((24971, 24983), 'renku.core.models.calamus.fields.IRI', 'fields.IRI', ([], {}), '()\n', (24981, 24983), False, 'from renku.core.models.calamus import Nested, fields, oa, prov, rdfs, renku, wfprov\n'), ((3580, 3605), 'attr.Factory', 'attr.Factory', (['OrderedDict'], {}), '(OrderedDict)\n', (3592, 3605), False, 'import attr\n'), ((9391, 9421), 'os.environ.get', 'os.environ.get', (['"""RENKU_DOMAIN"""'], {}), "('RENKU_DOMAIN')\n", (9405, 9421), False, 'import os\n'), ((14277, 14294), 'weakref.ref', 'weakref.ref', (['self'], {}), '(self)\n', (14288, 14294), False, 'import weakref\n'), ((16024, 16137), 'renku.core.models.entities.Entity.from_revision', 'Entity.from_revision', (['self.client', 'output.produces.path'], {'revision': 'self.commit', 'parent': 'output.produces.parent'}), '(self.client, output.produces.path, revision=self.\n commit, parent=output.produces.parent)\n', (16044, 16137), False, 'from renku.core.models.entities import Collection, CommitMixin, Entity, OldCollectionSchema, OldCommitMixinSchema, OldEntitySchema\n'), ((18467, 18570), 'renku.core.models.entities.Entity.from_revision', 'Entity.from_revision', (['client', 'output.produces.path'], {'revision': 'commit', 'parent': 'output.produces.parent'}), '(client, output.produces.path, revision=commit, parent=\n output.produces.parent)\n', (18487, 18570), False, 'from renku.core.models.entities import Collection, CommitMixin, Entity, OldCollectionSchema, OldCommitMixinSchema, OldEntitySchema\n'), ((20171, 20189), 'attr.Factory', 'attr.Factory', (['list'], {}), '(list)\n', (20183, 20189), False, 'import attr\n'), ((2092, 2126), 'attr.evolve', 'attr.evolve', (['member'], {'parent': 'parent'}), '(member, parent=parent)\n', (2103, 2126), False, 'import attr\n'), ((4808, 4842), 'renku.core.utils.scm.git_unicode_unescape', 'git_unicode_unescape', (['file_.a_path'], {}), '(file_.a_path)\n', (4828, 4842), False, 'from renku.core.utils.scm import git_unicode_unescape\n'), ((7802, 7836), 'renku.core.utils.scm.git_unicode_unescape', 'git_unicode_unescape', (['file_.a_path'], {}), '(file_.a_path)\n', (7822, 7836), False, 'from renku.core.utils.scm import git_unicode_unescape\n'), ((8264, 8298), 'renku.core.utils.scm.git_unicode_unescape', 'git_unicode_unescape', (['file_.a_path'], {}), '(file_.a_path)\n', (8284, 8298), False, 'from renku.core.utils.scm import git_unicode_unescape\n'), ((12049, 12066), 'weakref.ref', 'weakref.ref', (['self'], {}), '(self)\n', (12060, 12066), False, 'import weakref\n'), ((17593, 17643), 'renku.core.models.entities.Entity.from_revision', 'Entity.from_revision', (['client', 'input_path', 'revision'], {}), '(client, input_path, revision)\n', (17613, 17643), False, 'from renku.core.models.entities import Collection, CommitMixin, Entity, OldCollectionSchema, OldCommitMixinSchema, OldEntitySchema\n'), ((14014, 14029), 'pathlib.Path', 'Path', (['self.path'], {}), '(self.path)\n', (14018, 14029), False, 'from pathlib import Path, posixpath\n'), ((8698, 8720), 'os.path.lexists', 'os.path.lexists', (['path_'], {}), '(path_)\n', (8713, 8720), False, 'import os\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
VSAN Management SDK sample exercising VSAN rebalance operation
Please use with extereme caution as this can potentially impact
the performance of your existing workload
Usage:
python vsan-rebalance-sample.py -s 192.168.1.51 \
-u '<EMAIL>' -p 'VMware1!' \
-c VSAN-Cluster --operation get
python vsan-rebalance-sample.py -s 192.168.1.51 \
-u '<EMAIL>' -p 'VMware1!' \
-c VSAN-Cluster --operation start
python vsan-rebalance-sample.py -s 192.168.1.51 \
-u '<EMAIL>' -p 'VMware1!' \
-c VSAN-Cluster --operation stop
"""
__author__ = 'VMware, Inc'
from pyVim.connect import SmartConnect, Disconnect
import sys
import ssl
import atexit
import argparse
import getpass
# import the VSAN API python bindings
import vsanmgmtObjects
import vsanapiutils
def GetArgs():
"""
Supports the command-line arguments listed below.
"""
parser = argparse.ArgumentParser(
description='Process args for VSAN SDK sample application')
parser.add_argument('-s', '--host', required=True, action='store',
help='Remote host to connect to')
parser.add_argument('-o', '--port', type=int, default=443, action='store',
help='Port to connect on')
parser.add_argument('-u', '--user', required=True, action='store',
help='User name to use when connecting to host')
parser.add_argument('-p', '--password', required=False, action='store',
help='Password to use when connecting to host')
parser.add_argument('-c', '--cluster', dest='clusterName', required=True,
action='store')
parser.add_argument('-op', '--operation', dest='operation', required=True,
action='store')
args = parser.parse_args()
return args
def getClusterInstance(clusterName, serviceInstance):
content = serviceInstance.RetrieveContent()
searchIndex = content.searchIndex
datacenters = content.rootFolder.childEntity
for datacenter in datacenters:
cluster = searchIndex.FindChild(datacenter.hostFolder, clusterName)
if cluster is not None:
return cluster
return None
def isRebalancing(vchs, clusterName):
return vchs.VsanHealthIsRebalanceRunning(cluster=clusterName)
# Start program
def main():
args = GetArgs()
if args.password:
password = args.password
else:
password = getpass.getpass(prompt='Enter password for host %s and '
'user %s: ' % (args.host, args.user))
# For python 2.7.9 and later, the defaul SSL conext has more strict
# connection handshaking rule. We may need turn of the hostname checking
# and client side cert verification
context = None
if sys.version_info[:3] > (2, 7, 8):
context = ssl.create_default_context()
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
si = SmartConnect(host=args.host,
user=args.user,
pwd=password,
port=int(args.port),
sslContext=context)
atexit.register(Disconnect, si)
# for detecting whether the host is VC or ESXi
aboutInfo = si.content.about
if aboutInfo.apiType == 'VirtualCenter':
majorApiVersion = aboutInfo.apiVersion.split('.')[0]
if int(majorApiVersion) < 6:
print('The Virtual Center with version %s (lower than 6.0) \
is not supported.' % aboutInfo.apiVersion)
return -1
# Here is an example of how to access VC side VSAN Health Service API
vcMos = vsanapiutils.GetVsanVcMos(si._stub, context=context)
vchs = vcMos['vsan-cluster-health-system']
cluster = getClusterInstance(args.clusterName, si)
if cluster is None:
print("Cluster %s is not found for %s" % (args.clusterName,
args.host))
return -1
if args.operation == "get":
results = isRebalancing(vchs, cluster)
print("%s rebalancing: %s \n" % (args.clusterName, results))
elif args.operation == "start":
if not isRebalancing(vchs, cluster):
print("Starting rebalancing operation on %s cluster ..."
% args.clusterName)
vsanTask = vchs.VsanRebalanceCluster(cluster=cluster)
vcTask = vsanapiutils.ConvertVsanTaskToVcTask(vsanTask,
si._stub)
vsanapiutils.WaitForTasks([vcTask], si)
else:
print("Rebalancing operation is already currently in progress")
elif args.operation == "stop":
if isRebalancing(vchs, cluster):
print("Stopping rebalancing operation on %s cluster ..."
% args.clusterName)
vsanTask = vchs.VsanStopRebalanceCluster(cluster=cluster)
vcTask = vsanapiutils.ConvertVsanTaskToVcTask(vsanTask,
si._stub)
vsanapiutils.WaitForTasks([vcTask], si)
else:
print("The rebalancing operation is currently not running")
# Start program
if __name__ == "__main__":
main()
| [
"atexit.register",
"argparse.ArgumentParser",
"vsanapiutils.ConvertVsanTaskToVcTask",
"vsanapiutils.GetVsanVcMos",
"ssl.create_default_context",
"getpass.getpass",
"vsanapiutils.WaitForTasks"
] | [((960, 1048), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Process args for VSAN SDK sample application"""'}), "(description=\n 'Process args for VSAN SDK sample application')\n", (983, 1048), False, 'import argparse\n'), ((3220, 3251), 'atexit.register', 'atexit.register', (['Disconnect', 'si'], {}), '(Disconnect, si)\n', (3235, 3251), False, 'import atexit\n'), ((2507, 2603), 'getpass.getpass', 'getpass.getpass', ([], {'prompt': "('Enter password for host %s and user %s: ' % (args.host, args.user))"}), "(prompt='Enter password for host %s and user %s: ' % (args.\n host, args.user))\n", (2522, 2603), False, 'import getpass\n'), ((2905, 2933), 'ssl.create_default_context', 'ssl.create_default_context', ([], {}), '()\n', (2931, 2933), False, 'import ssl\n'), ((3733, 3785), 'vsanapiutils.GetVsanVcMos', 'vsanapiutils.GetVsanVcMos', (['si._stub'], {'context': 'context'}), '(si._stub, context=context)\n', (3758, 3785), False, 'import vsanapiutils\n'), ((4511, 4567), 'vsanapiutils.ConvertVsanTaskToVcTask', 'vsanapiutils.ConvertVsanTaskToVcTask', (['vsanTask', 'si._stub'], {}), '(vsanTask, si._stub)\n', (4547, 4567), False, 'import vsanapiutils\n'), ((4646, 4685), 'vsanapiutils.WaitForTasks', 'vsanapiutils.WaitForTasks', (['[vcTask]', 'si'], {}), '([vcTask], si)\n', (4671, 4685), False, 'import vsanapiutils\n'), ((5082, 5138), 'vsanapiutils.ConvertVsanTaskToVcTask', 'vsanapiutils.ConvertVsanTaskToVcTask', (['vsanTask', 'si._stub'], {}), '(vsanTask, si._stub)\n', (5118, 5138), False, 'import vsanapiutils\n'), ((5217, 5256), 'vsanapiutils.WaitForTasks', 'vsanapiutils.WaitForTasks', (['[vcTask]', 'si'], {}), '([vcTask], si)\n', (5242, 5256), False, 'import vsanapiutils\n')] |
import os
for x in os.walk('/etc'):
print(x)
| [
"os.walk"
] | [((20, 35), 'os.walk', 'os.walk', (['"""/etc"""'], {}), "('/etc')\n", (27, 35), False, 'import os\n')] |
from __future__ import absolute_import, unicode_literals
from json import dumps, loads
from django.contrib import messages
from django.contrib.auth.models import User
from django.contrib.auth.views import login, password_change
from django.contrib.contenttypes.models import ContentType
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import redirect, render_to_response
from django.template import RequestContext
from django.utils.http import urlencode
from django.utils.translation import ugettext_lazy as _
from django.views.generic import FormView
from django.views.generic.edit import CreateView, DeleteView, UpdateView
from django.views.generic.list import ListView
from .forms import (
ChoiceForm, EmailAuthenticationForm, LicenseForm, LocaleProfileForm,
LocaleProfileForm_view, UserForm, UserForm_view
)
from .mixins import (
ExtraContextMixin, ObjectListPermissionFilterMixin,
ObjectPermissionCheckMixin, RedirectionMixin, ViewPermissionCheckMixin
)
from .settings import LOGIN_METHOD
def multi_object_action_view(request):
"""
Proxy view called first when using a multi object action, which
then redirects to the appropiate specialized view
"""
next = request.POST.get('next', request.GET.get('next', request.META.get('HTTP_REFERER', reverse('main:home'))))
action = request.GET.get('action', None)
id_list = ','.join([key[3:] for key in request.GET.keys() if key.startswith('pk_')])
items_property_list = [loads(key[11:]) for key in request.GET.keys() if key.startswith('properties_')]
if not action:
messages.error(request, _('No action selected.'))
return HttpResponseRedirect(request.META.get('HTTP_REFERER', reverse('main:home')))
if not id_list and not items_property_list:
messages.error(request, _('Must select at least one item.'))
return HttpResponseRedirect(request.META.get('HTTP_REFERER', reverse('main:home')))
# Separate redirects to keep backwards compatibility with older
# functions that don't expect a properties_list parameter
if items_property_list:
return HttpResponseRedirect('%s?%s' % (
action,
urlencode({'items_property_list': dumps(items_property_list), 'next': next}))
)
else:
return HttpResponseRedirect('%s?%s' % (
action,
urlencode({'id_list': id_list, 'next': next}))
)
def get_obj_from_content_type_string(string):
model, pk = string.split(',')
ct = ContentType.objects.get(model=model)
return ct.get_object_for_this_type(pk=pk)
def assign_remove(request, left_list, right_list, add_method, remove_method, left_list_title=None, right_list_title=None, decode_content_type=False, extra_context=None, grouped=False):
left_list_name = 'left_list'
right_list_name = 'right_list'
if request.method == 'POST':
if '%s-submit' % left_list_name in request.POST.keys():
unselected_list = ChoiceForm(request.POST,
prefix=left_list_name,
choices=left_list())
if unselected_list.is_valid():
for selection in unselected_list.cleaned_data['selection']:
if grouped:
flat_list = []
for group in left_list():
flat_list.extend(group[1])
else:
flat_list = left_list()
label = dict(flat_list)[selection]
if decode_content_type:
selection_obj = get_obj_from_content_type_string(selection)
else:
selection_obj = selection
try:
add_method(selection_obj)
except:
if settings.DEBUG:
raise
else:
messages.error(request, _('Unable to remove %(selection)s.') % {
'selection': label, 'right_list_title': right_list_title})
elif '%s-submit' % right_list_name in request.POST.keys():
selected_list = ChoiceForm(request.POST,
prefix=right_list_name,
choices=right_list())
if selected_list.is_valid():
for selection in selected_list.cleaned_data['selection']:
if grouped:
flat_list = []
for group in right_list():
flat_list.extend(group[1])
else:
flat_list = right_list()
label = dict(flat_list)[selection]
if decode_content_type:
selection = get_obj_from_content_type_string(selection)
try:
remove_method(selection)
except:
if settings.DEBUG:
raise
else:
messages.error(request, _('Unable to add %(selection)s.') % {
'selection': label, 'right_list_title': right_list_title})
unselected_list = ChoiceForm(prefix=left_list_name, choices=left_list())
selected_list = ChoiceForm(prefix=right_list_name, choices=right_list())
context = {
'subtemplates_list': [
{
'name': 'main/generic_form_subtemplate.html',
'grid': 12,
'context': {
'form': unselected_list,
'title': left_list_title or ' ',
'submit_label': _('Add'),
'submit_icon_famfam': 'add'
}
},
{
'name': 'main/generic_form_subtemplate.html',
'grid': 12,
'grid_clear': True,
'context': {
'form': selected_list,
'title': right_list_title or ' ',
'submit_label': _('Remove'),
'submit_icon_famfam': 'delete'
}
},
],
}
if extra_context:
context.update(extra_context)
return render_to_response('main/generic_form.html', context,
context_instance=RequestContext(request))
def current_user_details(request):
"""
Display the current user's details
"""
form = UserForm_view(instance=request.user)
return render_to_response(
'main/generic_form.html', {
'form': form,
'title': _('Current user details'),
'read_only': True,
},
context_instance=RequestContext(request))
def current_user_locale_profile_details(request):
"""
Display the current user's locale profile details
"""
form = LocaleProfileForm_view(instance=request.user.locale_profile)
return render_to_response(
'main/generic_form.html', {
'form': form,
'title': _('Current user locale profile details'),
'read_only': True,
},
context_instance=RequestContext(request))
def current_user_edit(request):
"""
Allow an user to edit his own details
"""
next = request.POST.get('next', request.GET.get('next', request.META.get('HTTP_REFERER', reverse('common:current_user_details'))))
if request.method == 'POST':
form = UserForm(instance=request.user, data=request.POST)
if form.is_valid():
if User.objects.filter(email=form.cleaned_data['email']).exclude(pk=request.user.pk).count():
messages.error(request, _('E-mail conflict, another user has that same email.'))
else:
form.save()
messages.success(request, _('Current user\'s details updated.'))
return HttpResponseRedirect(next)
else:
form = UserForm(instance=request.user)
return render_to_response(
'main/generic_form.html', {
'form': form,
'next': next,
'title': _('Edit current user details'),
},
context_instance=RequestContext(request))
def current_user_locale_profile_edit(request):
"""
Allow an user to edit his own locale profile
"""
next = request.POST.get('next', request.GET.get('next', request.META.get('HTTP_REFERER', reverse('common:current_user_locale_profile_details'))))
if request.method == 'POST':
form = LocaleProfileForm(instance=request.user.locale_profile, data=request.POST)
if form.is_valid():
form.save()
if hasattr(request, 'session'):
request.session['django_language'] = form.cleaned_data['language']
request.session['django_timezone'] = form.cleaned_data['timezone']
else:
request.set_cookie(settings.LANGUAGE_COOKIE_NAME, form.cleaned_data['language'])
messages.success(request, _('Current user\'s locale profile details updated.'))
return HttpResponseRedirect(next)
else:
form = LocaleProfileForm(instance=request.user.locale_profile)
return render_to_response(
'main/generic_form.html', {
'form': form,
'next': next,
'title': _('Edit current user locale profile details'),
},
context_instance=RequestContext(request))
def login_view(request):
"""
Control how the use is to be authenticated, options are 'email' and
'username'
"""
kwargs = {'template_name': 'main/login.html'}
if LOGIN_METHOD == 'email':
kwargs['authentication_form'] = EmailAuthenticationForm
if not request.user.is_authenticated():
context = {'web_theme_view_type': 'plain'}
else:
context = {}
return login(request, extra_context=context, **kwargs)
def license_view(request):
"""
Display the included LICENSE file from the about menu
"""
form = LicenseForm()
return render_to_response(
'main/generic_detail.html', {
'form': form,
'title': _('License'),
},
context_instance=RequestContext(request))
def password_change_view(request):
"""
Password change wrapper for better control
"""
context = {'title': _('Current user password change')}
return password_change(
request,
extra_context=context,
template_name='main/password_change_form.html',
post_change_redirect=reverse('common:password_change_done'),
)
def password_change_done(request):
"""
View called when the new user password has been accepted
"""
messages.success(request, _('Your password has been successfully changed.'))
return redirect('common:current_user_details')
class SingleObjectEditView(ViewPermissionCheckMixin, ObjectPermissionCheckMixin, ExtraContextMixin, RedirectionMixin, UpdateView):
template_name = 'main/generic_form.html'
def form_invalid(self, form):
result = super(SingleObjectEditView, self).form_invalid(form)
try:
messages.error(self.request, _('Error saving %s details.') % self.extra_context['object_name'])
except KeyError:
messages.error(self.request, _('Error saving details.'))
return result
def form_valid(self, form):
result = super(SingleObjectEditView, self).form_valid(form)
try:
messages.success(self.request, _('%s details saved successfully.') % self.extra_context['object_name'].capitalize())
except KeyError:
messages.success(self.request, _('Details saved successfully.'))
return result
class SingleObjectCreateView(ViewPermissionCheckMixin, ExtraContextMixin, RedirectionMixin, CreateView):
template_name = 'main/generic_form.html'
def form_invalid(self, form):
result = super(SingleObjectCreateView, self).form_invalid(form)
try:
messages.error(self.request, _('Error creating new %s.') % self.extra_context['object_name'])
except KeyError:
messages.error(self.request, _('Error creating object.'))
return result
def form_valid(self, form):
result = super(SingleObjectCreateView, self).form_valid(form)
try:
messages.success(self.request, _('%s created successfully.') % self.extra_context['object_name'].capitalize())
except KeyError:
messages.success(self.request, _('New object created successfully.'))
return result
class SingleObjectDeleteView(ViewPermissionCheckMixin, ObjectPermissionCheckMixin, ExtraContextMixin, RedirectionMixin, DeleteView):
template_name = 'main/generic_confirm.html'
def get_context_data(self, **kwargs):
context = super(SingleObjectDeleteView, self).get_context_data(**kwargs)
context.update({'delete_view': True})
return context
def delete(self, request, *args, **kwargs):
try:
result = super(SingleObjectDeleteView, self).delete(request, *args, **kwargs)
except Exception as exception:
try:
messages.error(self.request, _('Error deleting %s.') % self.extra_context['object_name'])
except KeyError:
messages.error(self.request, _('Error deleting object.'))
raise exception
else:
try:
messages.success(self.request, _('%s deleted successfully.') % self.extra_context['object_name'].capitalize())
except KeyError:
messages.success(self.request, _('Object deleted successfully.'))
return result
class SingleObjectListView(ViewPermissionCheckMixin, ObjectListPermissionFilterMixin, ExtraContextMixin, RedirectionMixin, ListView):
template_name = 'main/generic_list.html'
class MultiFormView(FormView):
prefixes = {}
prefix = None
def get_form_kwargs(self, form_name):
kwargs = {}
kwargs.update({'initial': self.get_initial(form_name)})
kwargs.update({'prefix': self.get_prefix(form_name)})
if self.request.method in ('POST', 'PUT'):
kwargs.update({
'data': self.request.POST,
'files': self.request.FILES,
})
return kwargs
def _create_form(self, form_name, klass):
form_kwargs = self.get_form_kwargs(form_name)
form_create_method = 'create_%s_form' % form_name
if hasattr(self, form_create_method):
form = getattr(self, form_create_method)(**form_kwargs)
else:
form = klass(**form_kwargs)
return form
def get_forms(self, form_classes):
return dict([(key, self._create_form(key, klass)) for key, klass in form_classes.items()])
def get_initial(self, form_name):
initial_method = 'get_%s_initial' % form_name
if hasattr(self, initial_method):
return getattr(self, initial_method)()
else:
return self.initial.copy()
def get_prefix(self, form_name):
return self.prefixes.get(form_name, self.prefix)
def get(self, request, *args, **kwargs):
form_classes = self.get_form_classes()
forms = self.get_forms(form_classes)
return self.render_to_response(self.get_context_data(forms=forms))
def forms_valid(self, forms):
for form_name, form in forms.items():
form_valid_method = '%s_form_valid' % form_name
if hasattr(self, form_valid_method):
return getattr(self, form_valid_method)(form)
self.all_forms_valid(forms)
return HttpResponseRedirect(self.get_success_url())
def forms_invalid(self, forms):
return self.render_to_response(self.get_context_data(forms=forms))
def post(self, request, *args, **kwargs):
form_classes = self.get_form_classes()
forms = self.get_forms(form_classes)
if all([form.is_valid() for form in forms.values()]):
return self.forms_valid(forms)
else:
return self.forms_invalid(forms)
| [
"json.loads",
"django.contrib.auth.views.login",
"django.contrib.contenttypes.models.ContentType.objects.get",
"django.shortcuts.redirect",
"django.core.urlresolvers.reverse",
"django.contrib.auth.models.User.objects.filter",
"json.dumps",
"django.utils.http.urlencode",
"django.http.HttpResponseRedi... | [((2594, 2630), 'django.contrib.contenttypes.models.ContentType.objects.get', 'ContentType.objects.get', ([], {'model': 'model'}), '(model=model)\n', (2617, 2630), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((10087, 10134), 'django.contrib.auth.views.login', 'login', (['request'], {'extra_context': 'context'}), '(request, extra_context=context, **kwargs)\n', (10092, 10134), False, 'from django.contrib.auth.views import login, password_change\n'), ((11028, 11067), 'django.shortcuts.redirect', 'redirect', (['"""common:current_user_details"""'], {}), "('common:current_user_details')\n", (11036, 11067), False, 'from django.shortcuts import redirect, render_to_response\n'), ((1569, 1584), 'json.loads', 'loads', (['key[11:]'], {}), '(key[11:])\n', (1574, 1584), False, 'from json import dumps, loads\n'), ((10578, 10611), 'django.utils.translation.ugettext_lazy', '_', (['"""Current user password change"""'], {}), "('Current user password change')\n", (10579, 10611), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((10966, 11015), 'django.utils.translation.ugettext_lazy', '_', (['"""Your password has been successfully changed."""'], {}), "('Your password has been successfully changed.')\n", (10967, 11015), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1701, 1725), 'django.utils.translation.ugettext_lazy', '_', (['"""No action selected."""'], {}), "('No action selected.')\n", (1702, 1725), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1900, 1935), 'django.utils.translation.ugettext_lazy', '_', (['"""Must select at least one item."""'], {}), "('Must select at least one item.')\n", (1901, 1935), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6566, 6589), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (6580, 6589), False, 'from django.template import RequestContext\n'), ((6846, 6871), 'django.utils.translation.ugettext_lazy', '_', (['"""Current user details"""'], {}), "('Current user details')\n", (6847, 6871), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6940, 6963), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (6954, 6963), False, 'from django.template import RequestContext\n'), ((7274, 7314), 'django.utils.translation.ugettext_lazy', '_', (['"""Current user locale profile details"""'], {}), "('Current user locale profile details')\n", (7275, 7314), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7383, 7406), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (7397, 7406), False, 'from django.template import RequestContext\n'), ((8342, 8372), 'django.utils.translation.ugettext_lazy', '_', (['"""Edit current user details"""'], {}), "('Edit current user details')\n", (8343, 8372), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((8410, 8433), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (8424, 8433), False, 'from django.template import RequestContext\n'), ((9314, 9340), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['next'], {}), '(next)\n', (9334, 9340), False, 'from django.http import HttpResponseRedirect\n'), ((9563, 9608), 'django.utils.translation.ugettext_lazy', '_', (['"""Edit current user locale profile details"""'], {}), "('Edit current user locale profile details')\n", (9564, 9608), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((9646, 9669), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (9660, 9669), False, 'from django.template import RequestContext\n'), ((10379, 10391), 'django.utils.translation.ugettext_lazy', '_', (['"""License"""'], {}), "('License')\n", (10380, 10391), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((10429, 10452), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (10443, 10452), False, 'from django.template import RequestContext\n'), ((10775, 10813), 'django.core.urlresolvers.reverse', 'reverse', (['"""common:password_change_done"""'], {}), "('common:password_change_done')\n", (10782, 10813), False, 'from django.core.urlresolvers import reverse\n'), ((1383, 1403), 'django.core.urlresolvers.reverse', 'reverse', (['"""main:home"""'], {}), "('main:home')\n", (1390, 1403), False, 'from django.core.urlresolvers import reverse\n'), ((1796, 1816), 'django.core.urlresolvers.reverse', 'reverse', (['"""main:home"""'], {}), "('main:home')\n", (1803, 1816), False, 'from django.core.urlresolvers import reverse\n'), ((2006, 2026), 'django.core.urlresolvers.reverse', 'reverse', (['"""main:home"""'], {}), "('main:home')\n", (2013, 2026), False, 'from django.core.urlresolvers import reverse\n'), ((7594, 7632), 'django.core.urlresolvers.reverse', 'reverse', (['"""common:current_user_details"""'], {}), "('common:current_user_details')\n", (7601, 7632), False, 'from django.core.urlresolvers import reverse\n'), ((8117, 8143), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['next'], {}), '(next)\n', (8137, 8143), False, 'from django.http import HttpResponseRedirect\n'), ((8643, 8696), 'django.core.urlresolvers.reverse', 'reverse', (['"""common:current_user_locale_profile_details"""'], {}), "('common:current_user_locale_profile_details')\n", (8650, 8696), False, 'from django.core.urlresolvers import reverse\n'), ((9241, 9292), 'django.utils.translation.ugettext_lazy', '_', (['"""Current user\'s locale profile details updated."""'], {}), '("Current user\'s locale profile details updated.")\n', (9242, 9292), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2446, 2491), 'django.utils.http.urlencode', 'urlencode', (["{'id_list': id_list, 'next': next}"], {}), "({'id_list': id_list, 'next': next})\n", (2455, 2491), False, 'from django.utils.http import urlencode\n'), ((5885, 5893), 'django.utils.translation.ugettext_lazy', '_', (['"""Add"""'], {}), "('Add')\n", (5886, 5893), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6278, 6289), 'django.utils.translation.ugettext_lazy', '_', (['"""Remove"""'], {}), "('Remove')\n", (6279, 6289), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7910, 7965), 'django.utils.translation.ugettext_lazy', '_', (['"""E-mail conflict, another user has that same email."""'], {}), "('E-mail conflict, another user has that same email.')\n", (7911, 7965), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((8055, 8091), 'django.utils.translation.ugettext_lazy', '_', (['"""Current user\'s details updated."""'], {}), '("Current user\'s details updated.")\n', (8056, 8091), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((11406, 11435), 'django.utils.translation.ugettext_lazy', '_', (['"""Error saving %s details."""'], {}), "('Error saving %s details.')\n", (11407, 11435), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((11539, 11565), 'django.utils.translation.ugettext_lazy', '_', (['"""Error saving details."""'], {}), "('Error saving details.')\n", (11540, 11565), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((11748, 11783), 'django.utils.translation.ugettext_lazy', '_', (['"""%s details saved successfully."""'], {}), "('%s details saved successfully.')\n", (11749, 11783), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((11902, 11934), 'django.utils.translation.ugettext_lazy', '_', (['"""Details saved successfully."""'], {}), "('Details saved successfully.')\n", (11903, 11934), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((12273, 12300), 'django.utils.translation.ugettext_lazy', '_', (['"""Error creating new %s."""'], {}), "('Error creating new %s.')\n", (12274, 12300), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((12404, 12431), 'django.utils.translation.ugettext_lazy', '_', (['"""Error creating object."""'], {}), "('Error creating object.')\n", (12405, 12431), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((12615, 12644), 'django.utils.translation.ugettext_lazy', '_', (['"""%s created successfully."""'], {}), "('%s created successfully.')\n", (12616, 12644), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((12763, 12800), 'django.utils.translation.ugettext_lazy', '_', (['"""New object created successfully."""'], {}), "('New object created successfully.')\n", (12764, 12800), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((13725, 13754), 'django.utils.translation.ugettext_lazy', '_', (['"""%s deleted successfully."""'], {}), "('%s deleted successfully.')\n", (13726, 13754), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((13881, 13914), 'django.utils.translation.ugettext_lazy', '_', (['"""Object deleted successfully."""'], {}), "('Object deleted successfully.')\n", (13882, 13914), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2302, 2328), 'json.dumps', 'dumps', (['items_property_list'], {}), '(items_property_list)\n', (2307, 2328), False, 'from json import dumps, loads\n'), ((7779, 7832), 'django.contrib.auth.models.User.objects.filter', 'User.objects.filter', ([], {'email': "form.cleaned_data['email']"}), "(email=form.cleaned_data['email'])\n", (7798, 7832), False, 'from django.contrib.auth.models import User\n'), ((13454, 13477), 'django.utils.translation.ugettext_lazy', '_', (['"""Error deleting %s."""'], {}), "('Error deleting %s.')\n", (13455, 13477), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((13589, 13616), 'django.utils.translation.ugettext_lazy', '_', (['"""Error deleting object."""'], {}), "('Error deleting object.')\n", (13590, 13616), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4102, 4138), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to remove %(selection)s."""'], {}), "('Unable to remove %(selection)s.')\n", (4103, 4138), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5287, 5320), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to add %(selection)s."""'], {}), "('Unable to add %(selection)s.')\n", (5288, 5320), True, 'from django.utils.translation import ugettext_lazy as _\n')] |
import pytest
from .common import * # NOQA
namespace = {"p_client": None, "ns": None, "cluster": None, "project": None}
random_password = random_test_name("<PASSWORD>")
PROJECT_ISOLATION = os.environ.get('RANCHER_PROJECT_ISOLATION', "disabled")
def test_connectivity_between_pods():
p_client = namespace["p_client"]
ns = namespace["ns"]
cluster = namespace["cluster"]
con = [{"name": "test1",
"image": TEST_IMAGE,
}]
name = random_test_name("default")
schedulable_node_count = len(get_schedulable_nodes(cluster))
# Check connectivity between pods in the same namespace
workload = p_client.create_workload(name=name,
containers=con,
namespaceId=ns.id,
daemonSetConfig={})
validate_workload(p_client, workload, "daemonSet", ns.name,
schedulable_node_count)
check_connectivity_between_workload_pods(p_client, workload)
# Create another namespace in the same project
# Deploy workloads in this namespace
# Check that pods belonging to different namespace within the
# same project can communicate
c_client = get_cluster_client_for_token(cluster, USER_TOKEN)
ns1 = create_ns(c_client, cluster, namespace["project"])
workload1 = p_client.create_workload(name=name,
containers=con,
namespaceId=ns1.id,
daemonSetConfig={})
validate_workload(p_client, workload1, "daemonSet", ns1.name,
schedulable_node_count)
check_connectivity_between_workload_pods(p_client, workload1)
check_connectivity_between_workloads(p_client, workload, p_client,
workload1)
# Create new project in the same cluster
# Create namespace and deploy workloads
# Check communication between pods belonging to different namespace across
# different projects
p2, ns2 = create_project_and_ns(USER_TOKEN, cluster)
p2_client = get_project_client_for_token(p2, USER_TOKEN)
workload2 = p2_client.create_workload(name=name,
containers=con,
namespaceId=ns2.id,
daemonSetConfig={})
validate_workload(p2_client, workload2, "daemonSet", ns2.name,
schedulable_node_count)
check_connectivity_between_workload_pods(p2_client, workload2)
allow_connectivity = True
if PROJECT_ISOLATION == "enabled":
allow_connectivity = False
check_connectivity_between_workloads(
p_client, workload, p2_client, workload2,
allow_connectivity=allow_connectivity)
@pytest.fixture(scope='module', autouse="True")
def create_project_client(request):
client, cluster = get_user_client_and_cluster()
create_kubeconfig(cluster)
p, ns = create_project_and_ns(USER_TOKEN, cluster, "testnp")
p_client = get_project_client_for_token(p, USER_TOKEN)
namespace["p_client"] = p_client
namespace["ns"] = ns
namespace["cluster"] = cluster
namespace["project"] = p
def fin():
client = get_user_client()
client.delete(namespace["project"])
request.addfinalizer(fin)
| [
"pytest.fixture"
] | [((2859, 2905), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""', 'autouse': '"""True"""'}), "(scope='module', autouse='True')\n", (2873, 2905), False, 'import pytest\n')] |
import unittest
import os
from requests import get
import json
class ParticipantQueryParticipantsTest(unittest.TestCase):
host = '127.0.0.1'
port = 40075
login_endpoint = '/api/participant/login'
participants_endpoint = '/api/participant/participants'
def setUp(self):
pass
def tearDown(self):
pass
def _make_url(self, hostname, port, endpoint):
return 'https://' + hostname + ':' + str(port) + endpoint
def _get_token_with_login_http_auth(self, username, password):
url = self._make_url(self.host, self.port, self.login_endpoint)
auth_response = get(url=url, verify=False, auth=(username, password))
# HTTP AUTH REQUIRED TO GET TOKEN
self.assertEqual(auth_response.status_code, 200)
self.assertEqual(auth_response.headers['Content-Type'], 'application/json')
json_auth = auth_response.json()
self.assertTrue(json_auth.__contains__('participant_token'))
return json_auth['participant_token']
def _get_base_token_with_login_http_auth(self, username, password):
url = self._make_url(self.host, self.port, self.login_endpoint)
auth_response = get(url=url, verify=False, auth=(username, password))
# HTTP AUTH REQUIRED TO GET TOKEN
self.assertEqual(auth_response.status_code, 200)
self.assertEqual(auth_response.headers['Content-Type'], 'application/json')
json_auth = auth_response.json()
self.assertTrue(json_auth.__contains__('base_token'))
return json_auth['base_token']
def _request_with_http_auth(self, username, password, payload=None):
if payload is None:
payload = {}
url = self._make_url(self.host, self.port, self.participants_endpoint)
return get(url=url, verify=False, auth=(username, password), params=payload)
def _request_with_token_auth(self, token, payload=None):
if payload is None:
payload = {}
url = self._make_url(self.host, self.port, self.participants_endpoint)
request_headers = {'Authorization': 'OpenTera ' + token}
return get(url=url, verify=False, headers=request_headers, params=payload)
def _request_with_no_auth(self, payload=None):
if payload is None:
payload = {}
url = self._make_url(self.host, self.port, self.participants_endpoint)
return get(url=url, verify=False, params=payload)
def test_query_invalid_http_auth(self):
response = self._request_with_http_auth('invalid', 'invalid')
self.assertEqual(response.status_code, 401)
def test_query_invalid_token_auth(self):
response = self._request_with_token_auth('invalid')
self.assertEqual(response.status_code, 401)
def test_query_http_auth_no_params(self):
response = self._request_with_http_auth('participant1', 'opentera')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.headers['Content-Type'], 'application/json')
json_data = response.json()
self.assertGreater(len(json_data), 0)
self.assertTrue(json_data.__contains__('id_participant'))
self.assertTrue(json_data.__contains__('id_participant_group'))
self.assertTrue(json_data.__contains__('id_project'))
self.assertTrue(json_data.__contains__('participant_email'))
self.assertTrue(json_data.__contains__('participant_enabled'))
# self.assertTrue(json_data.__contains__('participant_lastonline'))
# self.assertTrue(json_data.__contains__('participant_login_enabled'))
self.assertTrue(json_data.__contains__('participant_name'))
# self.assertTrue(json_data.__contains__('participant_username'))
# self.assertTrue(json_data.__contains__('participant_uuid'))
self.assertFalse(json_data.__contains__('participant_password'))
def test_query_token_auth_no_params(self):
# HTTP AUTH REQUIRED TO GET TOKEN
token = self._get_token_with_login_http_auth('participant1', 'opentera')
response = self._request_with_token_auth(token)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.headers['Content-Type'], 'application/json')
json_data = response.json()
self.assertGreater(len(json_data), 0)
self.assertTrue(json_data.__contains__('id_participant'))
self.assertTrue(json_data.__contains__('id_participant_group'))
self.assertTrue(json_data.__contains__('id_project'))
self.assertTrue(json_data.__contains__('participant_email'))
self.assertTrue(json_data.__contains__('participant_enabled'))
# self.assertTrue(json_data.__contains__('participant_lastonline'))
# self.assertTrue(json_data.__contains__('participant_login_enabled'))
self.assertTrue(json_data.__contains__('participant_name'))
# self.assertTrue(json_data.__contains__('participant_username'))
# self.assertTrue(json_data.__contains__('participant_uuid'))
self.assertFalse(json_data.__contains__('participant_password'))
def test_query_http_auth_invalid_params(self):
params = {
'invalid': 1
}
response = self._request_with_http_auth('participant1', 'opentera', params)
self.assertEqual(response.status_code, 400)
def test_query_http_auth_list_param(self):
params = {
'list': True
}
response = self._request_with_http_auth('participant1', 'opentera', params)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.headers['Content-Type'], 'application/json')
json_data = response.json()
self.assertGreater(len(json_data), 0)
def test_query_base_token(self):
token = self._get_base_token_with_login_http_auth('participant1', 'opentera')
response = self._request_with_token_auth(token)
# Now allowed to get participant information with token
self.assertEqual(response.status_code, 200)
| [
"requests.get"
] | [((626, 679), 'requests.get', 'get', ([], {'url': 'url', 'verify': '(False)', 'auth': '(username, password)'}), '(url=url, verify=False, auth=(username, password))\n', (629, 679), False, 'from requests import get\n'), ((1188, 1241), 'requests.get', 'get', ([], {'url': 'url', 'verify': '(False)', 'auth': '(username, password)'}), '(url=url, verify=False, auth=(username, password))\n', (1191, 1241), False, 'from requests import get\n'), ((1788, 1857), 'requests.get', 'get', ([], {'url': 'url', 'verify': '(False)', 'auth': '(username, password)', 'params': 'payload'}), '(url=url, verify=False, auth=(username, password), params=payload)\n', (1791, 1857), False, 'from requests import get\n'), ((2132, 2199), 'requests.get', 'get', ([], {'url': 'url', 'verify': '(False)', 'headers': 'request_headers', 'params': 'payload'}), '(url=url, verify=False, headers=request_headers, params=payload)\n', (2135, 2199), False, 'from requests import get\n'), ((2399, 2441), 'requests.get', 'get', ([], {'url': 'url', 'verify': '(False)', 'params': 'payload'}), '(url=url, verify=False, params=payload)\n', (2402, 2441), False, 'from requests import get\n')] |
import pygame
from pygame.locals import *
#Generic projectile template
class ProjectileObject(pygame.sprite.Sprite):
def __init__(self, width, height, proj_image, speed, screen, owner_x, owner_y):
pygame.sprite.Sprite.__init__(self)
self.width = width
self.height = height
self.speed = speed
self.screen = screen
self.owner_x = owner_x
self.owner_y = owner_y
self.image_file = proj_image
self.proj_image = pygame.image.load(self.image_file)
self.start()
#Move projectile upward
def start(self):
self.image = pygame.transform.scale(self.proj_image, (self.width, self.height))
self.rect = self.image.get_rect()
self.proj_plain = pygame.sprite.RenderPlain(self)
self.rect.midbottom = self.owner_x
self.rect.y = self.screen.get_height() - self.owner_y
#Signal if collided with rocket (only relevant in EnemyProjectiles)
def rocket_collided(self, rocket):
return False
def render(self):
self.rect.move_ip(0, 0 - self.speed)
self.proj_plain.draw(self.screen) | [
"pygame.image.load",
"pygame.transform.scale",
"pygame.sprite.RenderPlain",
"pygame.sprite.Sprite.__init__"
] | [((210, 245), 'pygame.sprite.Sprite.__init__', 'pygame.sprite.Sprite.__init__', (['self'], {}), '(self)\n', (239, 245), False, 'import pygame\n'), ((495, 529), 'pygame.image.load', 'pygame.image.load', (['self.image_file'], {}), '(self.image_file)\n', (512, 529), False, 'import pygame\n'), ((635, 701), 'pygame.transform.scale', 'pygame.transform.scale', (['self.proj_image', '(self.width, self.height)'], {}), '(self.proj_image, (self.width, self.height))\n', (657, 701), False, 'import pygame\n'), ((770, 801), 'pygame.sprite.RenderPlain', 'pygame.sprite.RenderPlain', (['self'], {}), '(self)\n', (795, 801), False, 'import pygame\n')] |
from django.shortcuts import render, redirect, reverse, get_object_or_404
from django.contrib.auth import login
from django.contrib.auth.decorators import login_required
from django.utils import timezone, translation
from django.utils.translation import gettext as _, get_language
from django.views import View
from django.contrib import messages
from django.db import IntegrityError
from django.http import HttpResponse
from .forms import NotebookForm, NoteForm, UserSettingsForm, ProfileSettingsForm, UserAccountForm, AddNoteVersionForm
from .models import Notebook, Note, PublicSharedNote, Profile, ActivationToken, Reminder, NoteVersion
from xhtml2pdf import pisa
from tempfile import TemporaryFile
from .tasks import send_reminder_task
from .utils import serialize_notebooks_with_notes, redirect_back
from datetime import datetime
from simple_notes.celery import app
def index(request):
if request.user.is_authenticated:
ctx = general_context(request)
return render(request, 'notes/home.html', ctx)
return render(request, 'notes/index.html')
@login_required
def share_note(request, notebook_title, note_title):
notebook = get_object_or_404(Notebook, user=request.user, title=notebook_title)
note = get_object_or_404(Note, notebook=notebook, title=note_title)
try:
PublicSharedNote.objects.create(user=request.user, note=note)
messages.success(request, _('Public link for {note_title} was created successfully.').format(
note_title=note_title
))
except IntegrityError:
pass
return redirect(reverse('notes:view-shared-note', args=[
PublicSharedNote.objects.get(user=request.user, note=note).unique_secret
]))
@login_required
def remove_notebook(request, notebook_title):
get_object_or_404(Notebook, user=request.user, title=notebook_title).delete()
messages.success(request, _('{notebook_title} was removed successfully.').format(
notebook_title=notebook_title
))
return redirect('notes:index')
@login_required
def remove_note(request, notebook_title, note_title):
notebook = get_object_or_404(Notebook, user=request.user, title=notebook_title)
get_object_or_404(Note, notebook=notebook, title=note_title).delete()
messages.success(request, _('{note_title} was removed successfully.').format(note_title=note_title))
return redirect(reverse('notes:view-notes', args=[notebook_title]))
@login_required
def remove_shared_note(request, unique_secret):
get_object_or_404(PublicSharedNote, unique_secret=unique_secret).delete()
messages.success(request, _('Shared note was removed successfully.'))
return redirect('notes:index')
@login_required
def edit_notebook(request, notebook_title):
notebook = Notebook.objects.get(title=notebook_title)
if request.method == 'POST':
form = NotebookForm(request.POST)
if form.is_valid():
notebook.title = form.data['title']
notebook.save()
msg = _('Changes in {notebook_title} were saved successfully.').format(
notebook_title=notebook_title
)
messages.success(request, msg)
else:
messages.error(request, _('Check your input!'))
return redirect(reverse('notes:view-notes', args=[notebook.title]))
@login_required
def edit_note(request, notebook_title, note_title):
notebook = get_object_or_404(Notebook, user=request.user, title=notebook_title)
note = get_object_or_404(Note, notebook=notebook, title=note_title)
form = NoteForm(instance=note)
share = request.GET.get('share')
if request.method == 'POST':
# we need this crutch because there are two input fields for mobile and desktop
titles = request.POST.getlist('title')
new_title = titles[0] if titles[0] != note.title else titles[1]
request.POST._mutable = True
request.POST['title'] = new_title
form = NoteForm(request.POST, instance=note)
if form.is_valid():
note.title = form.data.getlist('title')[0]
note.content = form.data.getlist('content')[0]
note.modified_at = timezone.now()
note.save()
if not share:
messages.success(request, _('Changes in {note_title} were saved successfully.').format(
note_title=note_title
))
else:
messages.error(request, _('The title field should NOT be empty!'))
if form.is_valid() and share:
return redirect(reverse('notes:share-note', args=[notebook.title, note.title]))
ctx = general_context(request, {
'form': form,
'notebook_title': notebook.title,
'note_title': note.title,
'note_edit_date': note.modified_at,
'reminder_set': Reminder.objects.filter(note=note).exists(),
'language': get_language(),
})
return render(request, 'notes/edit-note.html', ctx)
@login_required
def settings(request):
profile = Profile.objects.get(user=request.user)
user_settings_form = UserSettingsForm(instance=request.user)
profile_settings_form = ProfileSettingsForm(instance=profile)
if request.method == 'POST':
user_settings_form = UserSettingsForm({
'username': request.POST.get('username'),
'email': request.POST.get('email')
}, instance=request.user)
profile_settings_form = ProfileSettingsForm({
'language': request.POST.get('language'),
'theme': request.POST.get('theme'),
}, instance=profile)
if user_settings_form.is_valid() and profile_settings_form.is_valid():
user_settings_form.save()
profile_settings_form.save()
language = profile_settings_form.cleaned_data['language']
translation.activate(language)
messages.success(request, _('Your settings were saved successfully.'))
return redirect_back(request)
return redirect('notes:index', {
'user_settings_form': user_settings_form,
'profile_settings_form': profile_settings_form,
})
@login_required
def view_notes(request, notebook_title):
notebook = get_object_or_404(Notebook, user=request.user, title=notebook_title)
notes = Note.objects.filter(notebook=notebook)
ctx = general_context(request, {
'notes': notes,
'notebook_title': notebook_title,
})
return render(request, 'notes/view-notes.html', ctx)
def view_shared_note(request, unique_secret):
shared_note = get_object_or_404(PublicSharedNote, unique_secret=unique_secret)
ctx = {
'shared_note': shared_note,
}
ctx = general_context(request, ctx) if request.user.is_authenticated else ctx
return render(request, 'notes/view-shared-note.html', ctx)
@login_required
def create_note(request, title):
form = NoteForm(initial={'title': '', 'content': ''})
if request.method == 'POST':
# we need this crutch because there are two input fields for mobile and desktop
titles = request.POST.getlist('title')
new_title = titles[0] if titles[0] else titles[1]
request.POST._mutable = True
request.POST['title'] = new_title
form = NoteForm(request.POST)
if form.is_valid():
notebook = get_object_or_404(Notebook, user=request.user, title=title)
note = form.save(commit=False)
note.notebook = notebook
note.modified_at = timezone.now()
try:
note.save()
except IntegrityError:
messages.error(request, _('A note with such name already exists!'))
return render(request, _('notes/create_note.html'), general_context(request, {
'form': form,
'notebook_title': title,
}))
messages.success(request, _('{note_title} was created successfully.').format(note_title=note.title))
return redirect(reverse('notes:edit-note', args=[notebook.title, note.title]))
else:
messages.error(request, _('The title field should NOT be empty!'))
ctx = general_context(request, {
'form': form,
'notebook_title': title,
})
return render(request, 'notes/create_note.html', ctx)
@login_required
def create_notebook(request):
if request.method == 'POST':
form = NotebookForm(request.POST)
if form.is_valid():
notebook = form.save(commit=False)
if Notebook.objects.filter(user=request.user, title=notebook.title).exists():
messages.error(request, _('Notebook with this name already exists!'))
return redirect('notes:index')
notebook.user = request.user
notebook.save()
messages.success(request, _('{notebook_title} was created successfully.').format(
notebook_title=notebook.title
))
return redirect(reverse('notes:view-notes', args=[notebook.title]))
else:
messages.error(request, form.errors['title'])
return redirect('notes:index')
class SignUp(View):
def get(self, request):
if request.user.is_authenticated:
return redirect('notes:index')
form = UserAccountForm(initial={
'username': '',
'email': ''
})
return render(request, 'notes/sign-up.html', {'form': form})
def post(self, request):
form = UserAccountForm(request.POST, initial={
'username': '',
'email': ''
}, request=request)
if form.is_valid():
user = form.save()
login(request, user)
return redirect('notes:index')
return render(request, 'notes/sign-up.html', {'form': form})
def general_context(request, context=None):
notebooks = Notebook.objects \
.filter(user=request.user) \
.only('title')
shared_notes = PublicSharedNote.objects \
.filter(user=request.user) \
.only('unique_secret',
'note__title',
'note__notebook')
theme = Profile.objects \
.only('theme') \
.get(user=request.user) \
.theme
user_settings_form = UserSettingsForm(instance=request.user)
profile_settings_form = ProfileSettingsForm(instance=Profile.objects.get(user=request.user))
data = {
'notebooks': notebooks,
'shared_notes': shared_notes,
'theme': theme,
'user_settings_form': user_settings_form,
'profile_settings_form': profile_settings_form,
'search_data': serialize_notebooks_with_notes(request)
}
notes = Note.objects \
.filter(notebook__user=request.user) \
.only('notebook__title') \
.values('notebook__title')
for notebook in data['notebooks']:
notebook_title = notebook.title
count = len([item for item in notes if item['notebook__title'] == notebook_title])
notebook.notes_count = count
if context:
if 'notes' not in context and 'notebook_title' in context:
notebook = Notebook.objects \
.only('title') \
.get(user=request.user,
title=context['notebook_title'])
context.update({
'notes': Note.objects
.filter(notebook=notebook)
.only('notebook__title', 'title')
})
context.update(data)
return context
return data
@login_required
def export_to_pdf(request, notebook_title, note_title):
notebook = Notebook.objects.get(user=request.user, title=notebook_title)
note = Note.objects.get(notebook=notebook, title=note_title)
tmpFile = TemporaryFile(mode='w+b')
pisa.CreatePDF(note.content.replace('\n', '<br>'), tmpFile)
tmpFile.seek(0)
pdf = tmpFile.read()
content_disposition = f'attachment; filename="{note_title}.pdf"'
response = HttpResponse(pdf, content_type='application/pdf')
response['Content-Disposition'] = content_disposition
tmpFile.close()
return response
def activate_account(request, token):
activation_token = get_object_or_404(ActivationToken, token=token)
activation_token.profile.is_activated = True
activation_token.profile.save()
activation_token.delete()
return render(request, 'notes/account-verification-done.html')
def set_reminder(request, notebook_title, note_title):
try:
date = datetime.strptime(
request.POST.get('remind-me-date'),
'%m/%d/%Y %H:%M'
).astimezone()
except:
date = None
if not date:
return redirect(reverse('notes:edit-note', args=[notebook_title, note_title]))
result = send_reminder_task.apply_async((
request.user.username,
request.user.email,
notebook_title,
note_title,
request.build_absolute_uri(reverse('notes:edit-note', args=[notebook_title, note_title])),
), eta=date)
Reminder.objects.create(
task_id=result.task_id,
note=Note.objects.get(
notebook__user=request.user,
notebook__title=notebook_title,
title=note_title,
),
)
messages.success(request, message=_('Reminder was set successfully!'))
return redirect(reverse('notes:edit-note', args=[notebook_title, note_title]))
def remove_reminder(request, notebook_title, note_title):
note = Note.objects.get(notebook__title=notebook_title, title=note_title)
reminder = Reminder.objects.get(note=note)
app.control.revoke(task_id=reminder.task_id)
reminder.delete()
messages.success(request, message=_('Reminder was canceled successfully!'))
return redirect(reverse('notes:edit-note', args=[notebook_title, note_title]))
def view_note_versions(request, notebook_title, note_title):
note = Note.objects.get(notebook__title=notebook_title, title=note_title, notebook__user=request.user)
versions = NoteVersion.objects.filter(note=note, user=request.user)
ctx = general_context(request, {
'note_versions': versions,
'notebook_title': notebook_title,
'note_title': note_title
})
return render(request, 'notes/note-versions-list.html', ctx)
def add_note_version(request, notebook_title, note_title):
note = Note.objects \
.get(notebook__title=notebook_title,
title=note_title,
notebook__user=request.user)
form = AddNoteVersionForm(request.POST, request=request, note=note)
if form.is_valid():
form.save()
messages.success(request, _('Version was added successfully.'))
return redirect_back(request)
def remove_note_version(request, notebook_title, note_id):
get_object_or_404(
NoteVersion,
id=note_id
).delete()
messages.success(request, _('Version was removed successfully.'))
return redirect_back(request)
def restore_note_version(request, notebook_title, note_id):
note_version = get_object_or_404(
NoteVersion,
id=note_id
)
note = note_version.note
note.title = note_version.title
note.content = note_version.content
note.modified_at = note_version.created_at
note.save()
messages.success(request, _('Version was restored successfully.'))
return redirect(reverse('notes:edit-note', args=(notebook_title, note.title)))
def view_note_version(request, notebook_title, note_id):
note_version = get_object_or_404(
NoteVersion,
id=note_id
)
return render(request, 'notes/view-note-version.html', general_context(request, {
'note_version': note_version,
'notebook_title': note_version.note.notebook.title,
'note_title': note_version.note.title,
}))
| [
"django.utils.translation.activate",
"django.http.HttpResponse",
"django.utils.translation.gettext",
"django.shortcuts.redirect",
"django.utils.timezone.now",
"django.contrib.messages.error",
"django.contrib.auth.login",
"simple_notes.celery.app.control.revoke",
"django.shortcuts.get_object_or_404",... | [((1043, 1078), 'django.shortcuts.render', 'render', (['request', '"""notes/index.html"""'], {}), "(request, 'notes/index.html')\n", (1049, 1078), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((1165, 1233), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Notebook'], {'user': 'request.user', 'title': 'notebook_title'}), '(Notebook, user=request.user, title=notebook_title)\n', (1182, 1233), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((1245, 1305), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Note'], {'notebook': 'notebook', 'title': 'note_title'}), '(Note, notebook=notebook, title=note_title)\n', (1262, 1305), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((2015, 2038), 'django.shortcuts.redirect', 'redirect', (['"""notes:index"""'], {}), "('notes:index')\n", (2023, 2038), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((2126, 2194), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Notebook'], {'user': 'request.user', 'title': 'notebook_title'}), '(Notebook, user=request.user, title=notebook_title)\n', (2143, 2194), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((2679, 2702), 'django.shortcuts.redirect', 'redirect', (['"""notes:index"""'], {}), "('notes:index')\n", (2687, 2702), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((3424, 3492), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Notebook'], {'user': 'request.user', 'title': 'notebook_title'}), '(Notebook, user=request.user, title=notebook_title)\n', (3441, 3492), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((3504, 3564), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Note'], {'notebook': 'notebook', 'title': 'note_title'}), '(Note, notebook=notebook, title=note_title)\n', (3521, 3564), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((4936, 4980), 'django.shortcuts.render', 'render', (['request', '"""notes/edit-note.html"""', 'ctx'], {}), "(request, 'notes/edit-note.html', ctx)\n", (4942, 4980), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((6020, 6139), 'django.shortcuts.redirect', 'redirect', (['"""notes:index"""', "{'user_settings_form': user_settings_form, 'profile_settings_form':\n profile_settings_form}"], {}), "('notes:index', {'user_settings_form': user_settings_form,\n 'profile_settings_form': profile_settings_form})\n", (6028, 6139), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((6233, 6301), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Notebook'], {'user': 'request.user', 'title': 'notebook_title'}), '(Notebook, user=request.user, title=notebook_title)\n', (6250, 6301), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((6476, 6521), 'django.shortcuts.render', 'render', (['request', '"""notes/view-notes.html"""', 'ctx'], {}), "(request, 'notes/view-notes.html', ctx)\n", (6482, 6521), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((6588, 6652), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['PublicSharedNote'], {'unique_secret': 'unique_secret'}), '(PublicSharedNote, unique_secret=unique_secret)\n', (6605, 6652), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((6801, 6852), 'django.shortcuts.render', 'render', (['request', '"""notes/view-shared-note.html"""', 'ctx'], {}), "(request, 'notes/view-shared-note.html', ctx)\n", (6807, 6852), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((8314, 8360), 'django.shortcuts.render', 'render', (['request', '"""notes/create_note.html"""', 'ctx'], {}), "(request, 'notes/create_note.html', ctx)\n", (8320, 8360), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((9176, 9199), 'django.shortcuts.redirect', 'redirect', (['"""notes:index"""'], {}), "('notes:index')\n", (9184, 9199), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((11840, 11865), 'tempfile.TemporaryFile', 'TemporaryFile', ([], {'mode': '"""w+b"""'}), "(mode='w+b')\n", (11853, 11865), False, 'from tempfile import TemporaryFile\n'), ((12060, 12109), 'django.http.HttpResponse', 'HttpResponse', (['pdf'], {'content_type': '"""application/pdf"""'}), "(pdf, content_type='application/pdf')\n", (12072, 12109), False, 'from django.http import HttpResponse\n'), ((12273, 12320), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['ActivationToken'], {'token': 'token'}), '(ActivationToken, token=token)\n', (12290, 12320), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((12449, 12504), 'django.shortcuts.render', 'render', (['request', '"""notes/account-verification-done.html"""'], {}), "(request, 'notes/account-verification-done.html')\n", (12455, 12504), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((13682, 13726), 'simple_notes.celery.app.control.revoke', 'app.control.revoke', ([], {'task_id': 'reminder.task_id'}), '(task_id=reminder.task_id)\n', (13700, 13726), False, 'from simple_notes.celery import app\n'), ((14323, 14376), 'django.shortcuts.render', 'render', (['request', '"""notes/note-versions-list.html"""', 'ctx'], {}), "(request, 'notes/note-versions-list.html', ctx)\n", (14329, 14376), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((15133, 15175), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['NoteVersion'], {'id': 'note_id'}), '(NoteVersion, id=note_id)\n', (15150, 15175), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((15601, 15643), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['NoteVersion'], {'id': 'note_id'}), '(NoteVersion, id=note_id)\n', (15618, 15643), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((991, 1030), 'django.shortcuts.render', 'render', (['request', '"""notes/home.html"""', 'ctx'], {}), "(request, 'notes/home.html', ctx)\n", (997, 1030), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((2396, 2446), 'django.shortcuts.reverse', 'reverse', (['"""notes:view-notes"""'], {'args': '[notebook_title]'}), "('notes:view-notes', args=[notebook_title])\n", (2403, 2446), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((2623, 2665), 'django.utils.translation.gettext', '_', (['"""Shared note was removed successfully."""'], {}), "('Shared note was removed successfully.')\n", (2624, 2665), True, 'from django.utils.translation import gettext as _, get_language\n'), ((3287, 3337), 'django.shortcuts.reverse', 'reverse', (['"""notes:view-notes"""'], {'args': '[notebook.title]'}), "('notes:view-notes', args=[notebook.title])\n", (3294, 3337), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((9456, 9509), 'django.shortcuts.render', 'render', (['request', '"""notes/sign-up.html"""', "{'form': form}"], {}), "(request, 'notes/sign-up.html', {'form': form})\n", (9462, 9509), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((9828, 9881), 'django.shortcuts.render', 'render', (['request', '"""notes/sign-up.html"""', "{'form': form}"], {}), "(request, 'notes/sign-up.html', {'form': form})\n", (9834, 9881), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((13430, 13491), 'django.shortcuts.reverse', 'reverse', (['"""notes:edit-note"""'], {'args': '[notebook_title, note_title]'}), "('notes:edit-note', args=[notebook_title, note_title])\n", (13437, 13491), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((13851, 13912), 'django.shortcuts.reverse', 'reverse', (['"""notes:edit-note"""'], {'args': '[notebook_title, note_title]'}), "('notes:edit-note', args=[notebook_title, note_title])\n", (13858, 13912), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((14977, 15015), 'django.utils.translation.gettext', '_', (['"""Version was removed successfully."""'], {}), "('Version was removed successfully.')\n", (14978, 15015), True, 'from django.utils.translation import gettext as _, get_language\n'), ((15398, 15437), 'django.utils.translation.gettext', '_', (['"""Version was restored successfully."""'], {}), "('Version was restored successfully.')\n", (15399, 15437), True, 'from django.utils.translation import gettext as _, get_language\n'), ((15460, 15521), 'django.shortcuts.reverse', 'reverse', (['"""notes:edit-note"""'], {'args': '(notebook_title, note.title)'}), "('notes:edit-note', args=(notebook_title, note.title))\n", (15467, 15521), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((1793, 1861), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Notebook'], {'user': 'request.user', 'title': 'notebook_title'}), '(Notebook, user=request.user, title=notebook_title)\n', (1810, 1861), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((2199, 2259), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Note'], {'notebook': 'notebook', 'title': 'note_title'}), '(Note, notebook=notebook, title=note_title)\n', (2216, 2259), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((2518, 2582), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['PublicSharedNote'], {'unique_secret': 'unique_secret'}), '(PublicSharedNote, unique_secret=unique_secret)\n', (2535, 2582), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((3161, 3191), 'django.contrib.messages.success', 'messages.success', (['request', 'msg'], {}), '(request, msg)\n', (3177, 3191), False, 'from django.contrib import messages\n'), ((4185, 4199), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (4197, 4199), False, 'from django.utils import timezone, translation\n'), ((4568, 4630), 'django.shortcuts.reverse', 'reverse', (['"""notes:share-note"""'], {'args': '[notebook.title, note.title]'}), "('notes:share-note', args=[notebook.title, note.title])\n", (4575, 4630), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((4901, 4915), 'django.utils.translation.get_language', 'get_language', ([], {}), '()\n', (4913, 4915), False, 'from django.utils.translation import gettext as _, get_language\n'), ((5850, 5880), 'django.utils.translation.activate', 'translation.activate', (['language'], {}), '(language)\n', (5870, 5880), False, 'from django.utils import timezone, translation\n'), ((7358, 7417), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Notebook'], {'user': 'request.user', 'title': 'title'}), '(Notebook, user=request.user, title=title)\n', (7375, 7417), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((7529, 7543), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (7541, 7543), False, 'from django.utils import timezone, translation\n'), ((9118, 9163), 'django.contrib.messages.error', 'messages.error', (['request', "form.errors['title']"], {}), "(request, form.errors['title'])\n", (9132, 9163), False, 'from django.contrib import messages\n'), ((9311, 9334), 'django.shortcuts.redirect', 'redirect', (['"""notes:index"""'], {}), "('notes:index')\n", (9319, 9334), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((9747, 9767), 'django.contrib.auth.login', 'login', (['request', 'user'], {}), '(request, user)\n', (9752, 9767), False, 'from django.contrib.auth import login\n'), ((9788, 9811), 'django.shortcuts.redirect', 'redirect', (['"""notes:index"""'], {}), "('notes:index')\n", (9796, 9811), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((12779, 12840), 'django.shortcuts.reverse', 'reverse', (['"""notes:edit-note"""'], {'args': '[notebook_title, note_title]'}), "('notes:edit-note', args=[notebook_title, note_title])\n", (12786, 12840), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((13372, 13407), 'django.utils.translation.gettext', '_', (['"""Reminder was set successfully!"""'], {}), "('Reminder was set successfully!')\n", (13373, 13407), True, 'from django.utils.translation import gettext as _, get_language\n'), ((13788, 13828), 'django.utils.translation.gettext', '_', (['"""Reminder was canceled successfully!"""'], {}), "('Reminder was canceled successfully!')\n", (13789, 13828), True, 'from django.utils.translation import gettext as _, get_language\n'), ((14734, 14770), 'django.utils.translation.gettext', '_', (['"""Version was added successfully."""'], {}), "('Version was added successfully.')\n", (14735, 14770), True, 'from django.utils.translation import gettext as _, get_language\n'), ((14872, 14914), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['NoteVersion'], {'id': 'note_id'}), '(NoteVersion, id=note_id)\n', (14889, 14914), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((1902, 1949), 'django.utils.translation.gettext', '_', (['"""{notebook_title} was removed successfully."""'], {}), "('{notebook_title} was removed successfully.')\n", (1903, 1949), True, 'from django.utils.translation import gettext as _, get_language\n'), ((2300, 2343), 'django.utils.translation.gettext', '_', (['"""{note_title} was removed successfully."""'], {}), "('{note_title} was removed successfully.')\n", (2301, 2343), True, 'from django.utils.translation import gettext as _, get_language\n'), ((3242, 3264), 'django.utils.translation.gettext', '_', (['"""Check your input!"""'], {}), "('Check your input!')\n", (3243, 3264), True, 'from django.utils.translation import gettext as _, get_language\n'), ((4466, 4507), 'django.utils.translation.gettext', '_', (['"""The title field should NOT be empty!"""'], {}), "('The title field should NOT be empty!')\n", (4467, 4507), True, 'from django.utils.translation import gettext as _, get_language\n'), ((5920, 5963), 'django.utils.translation.gettext', '_', (['"""Your settings were saved successfully."""'], {}), "('Your settings were saved successfully.')\n", (5921, 5963), True, 'from django.utils.translation import gettext as _, get_language\n'), ((8046, 8107), 'django.shortcuts.reverse', 'reverse', (['"""notes:edit-note"""'], {'args': '[notebook.title, note.title]'}), "('notes:edit-note', args=[notebook.title, note.title])\n", (8053, 8107), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((8159, 8200), 'django.utils.translation.gettext', '_', (['"""The title field should NOT be empty!"""'], {}), "('The title field should NOT be empty!')\n", (8160, 8200), True, 'from django.utils.translation import gettext as _, get_language\n'), ((8761, 8784), 'django.shortcuts.redirect', 'redirect', (['"""notes:index"""'], {}), "('notes:index')\n", (8769, 8784), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((9040, 9090), 'django.shortcuts.reverse', 'reverse', (['"""notes:view-notes"""'], {'args': '[notebook.title]'}), "('notes:view-notes', args=[notebook.title])\n", (9047, 9090), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((13027, 13088), 'django.shortcuts.reverse', 'reverse', (['"""notes:edit-note"""'], {'args': '[notebook_title, note_title]'}), "('notes:edit-note', args=[notebook_title, note_title])\n", (13034, 13088), False, 'from django.shortcuts import render, redirect, reverse, get_object_or_404\n'), ((1421, 1480), 'django.utils.translation.gettext', '_', (['"""Public link for {note_title} was created successfully."""'], {}), "('Public link for {note_title} was created successfully.')\n", (1422, 1480), True, 'from django.utils.translation import gettext as _, get_language\n'), ((3023, 3080), 'django.utils.translation.gettext', '_', (['"""Changes in {notebook_title} were saved successfully."""'], {}), "('Changes in {notebook_title} were saved successfully.')\n", (3024, 3080), True, 'from django.utils.translation import gettext as _, get_language\n'), ((8691, 8735), 'django.utils.translation.gettext', '_', (['"""Notebook with this name already exists!"""'], {}), "('Notebook with this name already exists!')\n", (8692, 8735), True, 'from django.utils.translation import gettext as _, get_language\n'), ((7664, 7706), 'django.utils.translation.gettext', '_', (['"""A note with such name already exists!"""'], {}), "('A note with such name already exists!')\n", (7665, 7706), True, 'from django.utils.translation import gettext as _, get_language\n'), ((7748, 7775), 'django.utils.translation.gettext', '_', (['"""notes/create_note.html"""'], {}), "('notes/create_note.html')\n", (7749, 7775), True, 'from django.utils.translation import gettext as _, get_language\n'), ((7942, 7985), 'django.utils.translation.gettext', '_', (['"""{note_title} was created successfully."""'], {}), "('{note_title} was created successfully.')\n", (7943, 7985), True, 'from django.utils.translation import gettext as _, get_language\n'), ((8894, 8941), 'django.utils.translation.gettext', '_', (['"""{notebook_title} was created successfully."""'], {}), "('{notebook_title} was created successfully.')\n", (8895, 8941), True, 'from django.utils.translation import gettext as _, get_language\n'), ((4293, 4346), 'django.utils.translation.gettext', '_', (['"""Changes in {note_title} were saved successfully."""'], {}), "('Changes in {note_title} were saved successfully.')\n", (4294, 4346), True, 'from django.utils.translation import gettext as _, get_language\n')] |
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import pylab
from awrams.utils.metatypes import *
import awrams.utils.datetools as dt
from awrams.utils.extents import *
from awrams.utils.ts.processing import *
o = ObjectDict
MIN_FIG_WIDTH = 800.0
MIN_FIG_HEIGHT = 600.0
NAN_FILTER_MAP = matplotlib.colors.ListedColormap([[0.5,0.5,0.5],[1,0,0]])
NAN_FILTER_MAP.set_bad((0.1,0.4,0.8))
def _get_ax(kwargs):
ax = _get_kwd('ax',kwargs)
if 'ax' is None:
ax = plt.gca()
return ax
def _get_kwd(k,kwargs):
if k in kwargs:
kwd = kwargs[k]
del kwargs[k]
return kwd
else:
return None
def plot_single_grid(data,extent,show_bar=True,**kwargs):
'''
Standard plot method utilising coords generated by extent
'''
units = _get_kwd('units',kwargs)
cmap = _get_kwd('cmap',kwargs)
norm = _get_kwd('norm',kwargs)
clim = _get_kwd('clim',kwargs)
interpolation = _get_kwd('interpolation',kwargs) or 'none'
ax = _get_ax(kwargs)
im = ax.imshow(data,extent=extent.get_graph_extent(),interpolation=interpolation,cmap=cmap,norm=norm,clim=clim)
if show_bar:
from mpl_toolkits.axes_grid1 import make_axes_locatable
divider = make_axes_locatable(ax) #plt.gca())
cax = divider.append_axes("right", "5%", pad="3%")
cb = plt.colorbar(im, cax=cax)
cb.set_label(units)
for k,v in kwargs.items():
try:
ax.set(**{k:v})
except:
pass
def plot_single_gridMinMax(data,extent,ax=None,labels=None,show_bar=True):
'''
Standard plot method utilising coords generated by extent
'''
if labels is None:
labels = {}
title = get_label(labels,'title')
ylabel = get_label(labels,'ylabel')
units = get_label(labels,'units')
cmap = get_label(labels,'cmap')
minval = get_label(labels,'minval')
maxval = get_label(labels,'maxval')
if ax == None:
ax = plt.gca()
formatter = matplotlib.ticker.ScalarFormatter(useOffset=False)
ax.yaxis.set_major_formatter(formatter)
ax.xaxis.set_major_formatter(formatter)
ax.set_title(title)
im = ax.imshow(data,extent=extent.get_graph_extent(),interpolation="none",cmap=cmap)
plt.ylabel(ylabel)
if 'clim' in labels:
im.set_clim(labels['clim'][0], labels['clim'][1])
if show_bar:
im.set_clim(minval,maxval)
fig = ax.figure
cb = fig.colorbar(im)
cb.set_label(units)
def highlight_map(bg_colour,hl_colour,mask_colour=(1,1,1)):
'''
Generate a 3 colour map; zero vals will show as bg_colour,
one vals as hl_colour, masked vals mask_colour
'''
cmap = matplotlib.colors.ListedColormap([bg_colour,hl_colour])
cmap.set_bad(mask_colour)
return cmap
def gen_title(title_pattern,subs_map,sub_source):
'''
Placeholder
'''
subs = []
for sub in subs_map:
subs.append(sub_source[sub])
return title_pattern % tuple(subs)
def set_figsize_inches(width,height):
pylab.rcParams['figure.figsize'] = (width,height)
def set_figsize_pixels(w,h):
fig = plt.gcf()
dpi = float(fig.get_dpi())
width = w / dpi
height = h / dpi
pylab.rcParams['figure.figsize'] = (width,height)
def fig_params(w,h,dpi=100.0):
width = w/dpi
height = h/dpi
return dict(figsize=(width,height), dpi=dpi)
def get_label(labels,key):
if key in labels:
return labels[key]
else:
return ''
class View(object):
def __init__(self):
pass
def draw(self,ax):
raise BaseException
class SpatialView(View):
def __init__(self,data_ob,**ctl_dict):
self.data = data_ob
self.ctl_dict = ctl_dict
def draw(self,ax):
self.ax = ax
plot_single_grid(self.data._data,self.data.extent,ax=ax,**self.ctl_dict)
class NoBarView(View):
def __init__(self,data,labels=None):
if labels is None:
labels = {}
self.data = data
self.labels = labels
def draw(self,ax):
plot_single_grid(self.data.data,self.data.extent,ax=ax,labels=self.labels,show_bar=False)
class TimeSeriesView(View):
def __init__(self,data,**ctl_dict):
self.data = data
self.ctl_dict = ctl_dict
def draw(self,ax):
if ax == None:
ax = plt.gca()
self.ax = ax
legend = _get_kwd('legend',self.ctl_dict)
self.data._data.plot(label=legend)
if legend != '':
plt.legend()
for k,v in self.ctl_dict.items():
try:
ax.set(**{k:v})
except:
pass
class GridView(View):
def __init__(self,rows,cols,x_scale=1.0,y_scale=1.0):
self.rows = rows
self.cols = cols
self.children = np.empty(shape=(rows,cols),dtype=View)
self.axes = plt.GridSpec(rows,cols)
self.x_scale = x_scale
self.y_scale = y_scale
def __getitem__(self,idx):
return self.children[idx]
def __setitem__(self,idx,val):
self.children[idx] = val
def draw(self):
# Need this to prevent lat/lon being shown in scientific notation
formatter = matplotlib.ticker.ScalarFormatter(useOffset=False)
width = max(self.cols * 400 * self.x_scale, MIN_FIG_WIDTH)
height = max(self.rows * 400 * self.y_scale, MIN_FIG_HEIGHT)
fig = plt.figure(**fig_params(width,height,96.0))
for row_i in range(0,self.rows):
for col_i in range(0,self.cols):
if self.children[row_i,col_i]:
ax = plt.subplot(self.axes[row_i,col_i])
ax.yaxis.set_major_formatter(formatter)
ax.xaxis.set_major_formatter(formatter)
self.children[row_i,col_i].draw(ax)
fig.tight_layout()
# +++
# Hardcoding max number of columns
MAX_COLUMNS = 3
class GridLayout(object):
#Generates layout based on field-lookup
def __init__(self,rows,cols,subview,x_scale=1.0,y_scale=1.0):
'Specify fields of the source object which will be used to assign layout'
self.row_field = rows
self.col_field = cols
self.subview_class = subview
self.x_scale = x_scale
self.y_scale = y_scale
def generate_view(self,source):
if self.row_field:
row_values = source.keys[self.row_field]
n_rvals = len(row_values)
else:
row_values = [None]
n_rvals = 1
if self.col_field:
col_values = source.keys[self.col_field]
n_cvals = len(col_values)
else:
n_cvals = 1
total_rows = n_rvals * int(np.ceil(n_cvals/float(MAX_COLUMNS)))
if n_cvals > MAX_COLUMNS:
n_cols = MAX_COLUMNS
else:
n_cols = n_cvals
view = GridView(total_rows,n_cols,self.x_scale,self.y_scale)
row_i = 0
col_i = 0
q = {}
for row_val in row_values:
if row_val:
q.update({self.row_field: row_val})
for col_val in col_values:
if col_val:
q.update({self.col_field: col_val})
if col_i == MAX_COLUMNS:
col_i = 0
row_i += 1
results = source.query(q)
if len(results) == 0:
raise Exception
elif len(results) > 1:
raise Exception
else:
cur_var = results.items[0]
labels = self.gen_labels(cur_var)
if col_i > 0:
labels['ylabel'] = ''
view[row_i,col_i] = self.subview_class(cur_var,**labels)
col_i += 1
col_i = 0
row_i += 1
return view
class DefaultSpatialGridLayout(GridLayout):
'''
Generates layout based on field-lookup
'''
def __init__(self,rows,cols,subview=SpatialView,**kwds):
super(DefaultSpatialGridLayout,self).__init__(rows,cols,subview,1.6,1.2)
self.kwds = kwds
def gen_labels(self,datum):
dd = {"title": datum[self.col_field],
"ylabel": "%s" % (dt.pretty_print_period(datum['period'])),
"units": datum['units'],
"cmap": 'RdYlBu'}
if datum.source.name is not None:
dd['title'] += " " + datum.source.name
dd.update(self.kwds)
dd.update(datum)
for k in 'data','extent':
if k in dd:
del dd[k]
return dd
class ShowNaNGridLayout(GridLayout):
'''
Generates layout based on field-lookup
'''
def __init__(self,rows,cols,subview=NoBarView):
super(ShowNaNGridLayout,self).__init__(rows,cols,subview,1.2,0.8)
def gen_labels(self,datum):
return {"title": datum[self.col_field], "ylabel": datum[self.row_field],
"cmap": NAN_FILTER_MAP}
class SpatialAggregateLayout(GridLayout):
'''
Generates layout based on field-lookup
'''
def __init__(self,**kwds):
### Specify fields of the source object which will be used to assign layout
super(SpatialAggregateLayout,self).__init__('variable','extent',TimeSeriesView,3.5,1.0)
self.kwds = kwds
def gen_labels(self,datum):
labels = {}
labels['title'] = "%s over area (%s)" % (datum['method'],datum['extent'])
labels['ylabel'] = "%s" % (datum['units'])
labels['legend'] = "%s" % datum['variable']
labels.update(self.kwds)
return labels
| [
"mpl_toolkits.axes_grid1.make_axes_locatable",
"matplotlib.pyplot.subplot",
"numpy.empty",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.colorbar",
"awrams.utils.datetools.pretty_print_period",
"matplotlib.pyplot.GridSpec",
"matplotlib.pyplot.gca",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.g... | [((311, 373), 'matplotlib.colors.ListedColormap', 'matplotlib.colors.ListedColormap', (['[[0.5, 0.5, 0.5], [1, 0, 0]]'], {}), '([[0.5, 0.5, 0.5], [1, 0, 0]])\n', (343, 373), False, 'import matplotlib\n'), ((2001, 2051), 'matplotlib.ticker.ScalarFormatter', 'matplotlib.ticker.ScalarFormatter', ([], {'useOffset': '(False)'}), '(useOffset=False)\n', (2034, 2051), False, 'import matplotlib\n'), ((2259, 2277), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['ylabel'], {}), '(ylabel)\n', (2269, 2277), True, 'import matplotlib.pyplot as plt\n'), ((2697, 2753), 'matplotlib.colors.ListedColormap', 'matplotlib.colors.ListedColormap', (['[bg_colour, hl_colour]'], {}), '([bg_colour, hl_colour])\n', (2729, 2753), False, 'import matplotlib\n'), ((3130, 3139), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (3137, 3139), True, 'import matplotlib.pyplot as plt\n'), ((494, 503), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (501, 503), True, 'import matplotlib.pyplot as plt\n'), ((1243, 1266), 'mpl_toolkits.axes_grid1.make_axes_locatable', 'make_axes_locatable', (['ax'], {}), '(ax)\n', (1262, 1266), False, 'from mpl_toolkits.axes_grid1 import make_axes_locatable\n'), ((1351, 1376), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['im'], {'cax': 'cax'}), '(im, cax=cax)\n', (1363, 1376), True, 'import matplotlib.pyplot as plt\n'), ((1974, 1983), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (1981, 1983), True, 'import matplotlib.pyplot as plt\n'), ((4803, 4843), 'numpy.empty', 'np.empty', ([], {'shape': '(rows, cols)', 'dtype': 'View'}), '(shape=(rows, cols), dtype=View)\n', (4811, 4843), True, 'import numpy as np\n'), ((4863, 4887), 'matplotlib.pyplot.GridSpec', 'plt.GridSpec', (['rows', 'cols'], {}), '(rows, cols)\n', (4875, 4887), True, 'import matplotlib.pyplot as plt\n'), ((5200, 5250), 'matplotlib.ticker.ScalarFormatter', 'matplotlib.ticker.ScalarFormatter', ([], {'useOffset': '(False)'}), '(useOffset=False)\n', (5233, 5250), False, 'import matplotlib\n'), ((4338, 4347), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (4345, 4347), True, 'import matplotlib.pyplot as plt\n'), ((4501, 4513), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (4511, 4513), True, 'import matplotlib.pyplot as plt\n'), ((8260, 8299), 'awrams.utils.datetools.pretty_print_period', 'dt.pretty_print_period', (["datum['period']"], {}), "(datum['period'])\n", (8282, 8299), True, 'import awrams.utils.datetools as dt\n'), ((5606, 5642), 'matplotlib.pyplot.subplot', 'plt.subplot', (['self.axes[row_i, col_i]'], {}), '(self.axes[row_i, col_i])\n', (5617, 5642), True, 'import matplotlib.pyplot as plt\n')] |
"""
Author: <NAME>
Date Created: Apr 22, 2019
Last Edited: Apr 22, 2019
Description:
"""
from __future__ import division
import rospy
import numpy as np
from functools import reduce
class RadarUtilities:
def __init__(self):
pass
## filtering of 2D and 3D radar data
def AIRE_filtering(self, data_AIRE, thresholds):
## unpack data (into row vectors)
radar_azimuth = data_AIRE[:,0] # [rad]
radar_intensity = data_AIRE[:,1] # [dB]
radar_range = data_AIRE[:,2] # [m]
radar_elevation = data_AIRE[:,3] # [rad]
azimuth_thres = thresholds[0]; # [deg]
intensity_thres = thresholds[1]; # [dB]
range_thres = thresholds[2]; # [m]
elevation_thres = thresholds[3]; # [deg]
## Indexing in Python example
## print("Values bigger than 10 =", x[x>10])
## print("Their indices are ", np.nonzero(x > 10))
idx_azimuth = np.nonzero(np.abs(np.rad2deg(radar_azimuth)) < azimuth_thres);
idx_intensity = np.nonzero(radar_intensity > intensity_thres);
idx_range = np.nonzero(radar_range > range_thres);
## combine filters
if np.all(np.isnan(radar_elevation)):
## 2D radar data
idx_AIRE = reduce(np.intersect1d, (idx_azimuth,idx_intensity,idx_range))
else:
## 3D radar data
idx_elevation = np.nonzero(np.abs(np.rad2deg(radar_elevation)) < elevation_thres);
idx_AIRE = reduce(np.intersect1d, (idx_azimuth,idx_intensity,idx_range,idx_elevation))
return idx_AIRE
def getNumAzimuthBins(self, radar_azimuth):
bin_thres = 0.009; # [rad] - empirically determined
azimuth_bins = np.unique(radar_azimuth);
bins = [];
current_bin = azimuth_bins[0];
begin_idx = 0;
for i in range(azimuth_bins.shape[0]):
if np.abs(current_bin - azimuth_bins[i]) > bin_thres:
## update location of last angle to be averaged
end_idx = i-1;
## add single averaged value to table
azimuth_bin = np.mean(azimuth_bins[begin_idx:end_idx]);
bins.append(azimuth_bin)
## set new beginning index
begin_idx = i;
## set new current bin
current_bin = azimuth_bins[i];
if i == azimuth_bins.shape[0]-1:
## update location of last angle to be averaged
end_idx = i;
## add single averaged value to table
azimuth_bin = np.mean(azimuth_bins[begin_idx:end_idx]);
bins.append(azimuth_bin)
bins = np.array(bins)
numAzimuthBins = bins.size
return numAzimuthBins
| [
"numpy.abs",
"numpy.isnan",
"numpy.nonzero",
"numpy.rad2deg",
"numpy.mean",
"numpy.array",
"functools.reduce",
"numpy.unique"
] | [((1069, 1114), 'numpy.nonzero', 'np.nonzero', (['(radar_intensity > intensity_thres)'], {}), '(radar_intensity > intensity_thres)\n', (1079, 1114), True, 'import numpy as np\n'), ((1140, 1177), 'numpy.nonzero', 'np.nonzero', (['(radar_range > range_thres)'], {}), '(radar_range > range_thres)\n', (1150, 1177), True, 'import numpy as np\n'), ((1767, 1791), 'numpy.unique', 'np.unique', (['radar_azimuth'], {}), '(radar_azimuth)\n', (1776, 1791), True, 'import numpy as np\n'), ((2737, 2751), 'numpy.array', 'np.array', (['bins'], {}), '(bins)\n', (2745, 2751), True, 'import numpy as np\n'), ((1225, 1250), 'numpy.isnan', 'np.isnan', (['radar_elevation'], {}), '(radar_elevation)\n', (1233, 1250), True, 'import numpy as np\n'), ((1305, 1368), 'functools.reduce', 'reduce', (['np.intersect1d', '(idx_azimuth, idx_intensity, idx_range)'], {}), '(np.intersect1d, (idx_azimuth, idx_intensity, idx_range))\n', (1311, 1368), False, 'from functools import reduce\n'), ((1528, 1606), 'functools.reduce', 'reduce', (['np.intersect1d', '(idx_azimuth, idx_intensity, idx_range, idx_elevation)'], {}), '(np.intersect1d, (idx_azimuth, idx_intensity, idx_range, idx_elevation))\n', (1534, 1606), False, 'from functools import reduce\n'), ((1938, 1975), 'numpy.abs', 'np.abs', (['(current_bin - azimuth_bins[i])'], {}), '(current_bin - azimuth_bins[i])\n', (1944, 1975), True, 'import numpy as np\n'), ((2169, 2209), 'numpy.mean', 'np.mean', (['azimuth_bins[begin_idx:end_idx]'], {}), '(azimuth_bins[begin_idx:end_idx])\n', (2176, 2209), True, 'import numpy as np\n'), ((2638, 2678), 'numpy.mean', 'np.mean', (['azimuth_bins[begin_idx:end_idx]'], {}), '(azimuth_bins[begin_idx:end_idx])\n', (2645, 2678), True, 'import numpy as np\n'), ((1000, 1025), 'numpy.rad2deg', 'np.rad2deg', (['radar_azimuth'], {}), '(radar_azimuth)\n', (1010, 1025), True, 'import numpy as np\n'), ((1456, 1483), 'numpy.rad2deg', 'np.rad2deg', (['radar_elevation'], {}), '(radar_elevation)\n', (1466, 1483), True, 'import numpy as np\n')] |
import granatum_sdk
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from deepimpute.deepImpute import deepImpute
def main():
gn = granatum_sdk.Granatum()
assay = gn.get_import("assay")
data = np.array(assay.get("matrix")).T
seed = gn.get_arg("seed")
checkbox = gn.get_arg("use_auto_limit")
cell_subset = gn.get_arg("cell_subset")
NN_lim = {False: gn.get_arg("NN_lim"), True: "auto"}.get(checkbox, True)
#model = MultiNet(n_cores="all", seed=seed)
#model.fit(data, NN_lim=NN_lim, cell_subset=cell_subset)
frameddata = pd.DataFrame(data)
imputed, model = deepImpute(frameddata, NN_lim=NN_lim, cell_subset=cell_subset)
LABELS_PARAMS = {"fontsize": 14, "fontweight": "bold", "fontname": "serif"}
vmax = np.percentile(np.log10(1 + data.flatten()), 99)
print("Generating Heatmap")
fig, ax = plt.subplots(1, 2)
ax[0].imshow(np.log10(1 + data), aspect="auto", vmax=vmax)
ax[1].imshow(np.log10(1 + imputed), aspect="auto", vmax=vmax)
ax[0].set_xlabel("Genes", **LABELS_PARAMS)
ax[1].set_xlabel("Genes", **LABELS_PARAMS)
ax[0].set_ylabel("Cells", **LABELS_PARAMS)
ax[0].set_title("raw (log)", **LABELS_PARAMS)
ax[1].set_title("imputed (log)", **LABELS_PARAMS)
gn.add_current_figure_to_results("Heatmaps")
nb_genes = len(set(model.targets.flatten()))
#nb_genes = np.sum([len(net.targetGenes) for net in model.targets])
def calc_dropout(matrix):
return np.sum((np.array(matrix) == 0)) * 1. / data.size
r, p = model.score(frameddata)
rows, cols = frameddata.shape
message = "\n".join(
[
" - Data frame number of rows: **{0}**",
" - Data frame number of columns: **{1}**",
" - Number of imputed genes: **{2}**",
" - Percentage of dropout entries *before* imputation: **{3:.2f}%**",
" - Percentage of dropout entries *after* imputation: **{4:.2f}%**",
" - Accuracy (correlation) on masked data: **{5:.2f}**"
]
).format(
rows,
cols,
nb_genes,
calc_dropout(data) * 100,
calc_dropout(imputed.to_numpy()) * 100,
r
)
gn.add_result(message, data_type="markdown")
assay["matrix"] = imputed.T.to_numpy().tolist()
gn.export_statically(assay, "Imputed assay")
gn.commit()
if __name__ == "__main__":
main()
| [
"pandas.DataFrame",
"deepimpute.deepImpute.deepImpute",
"granatum_sdk.Granatum",
"numpy.array",
"numpy.log10",
"matplotlib.pyplot.subplots"
] | [((160, 183), 'granatum_sdk.Granatum', 'granatum_sdk.Granatum', ([], {}), '()\n', (181, 183), False, 'import granatum_sdk\n'), ((588, 606), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (600, 606), True, 'import pandas as pd\n'), ((628, 690), 'deepimpute.deepImpute.deepImpute', 'deepImpute', (['frameddata'], {'NN_lim': 'NN_lim', 'cell_subset': 'cell_subset'}), '(frameddata, NN_lim=NN_lim, cell_subset=cell_subset)\n', (638, 690), False, 'from deepimpute.deepImpute import deepImpute\n'), ((879, 897), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {}), '(1, 2)\n', (891, 897), True, 'import matplotlib.pyplot as plt\n'), ((915, 933), 'numpy.log10', 'np.log10', (['(1 + data)'], {}), '(1 + data)\n', (923, 933), True, 'import numpy as np\n'), ((978, 999), 'numpy.log10', 'np.log10', (['(1 + imputed)'], {}), '(1 + imputed)\n', (986, 999), True, 'import numpy as np\n'), ((1497, 1513), 'numpy.array', 'np.array', (['matrix'], {}), '(matrix)\n', (1505, 1513), True, 'import numpy as np\n')] |
# Copyright 2017 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Planar Stacker domain."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from dm_control.utils.inverse_kinematics import qpos_from_site_pose
from dm_control import mujoco
from dm_env import specs
from dm_control.rl import control
from dm_control.suite import base
from dm_control.suite import common
from dm_control.utils import containers
from dm_control.utils import rewards
from dm_control.utils import xml_tools
import os
from imageio import imsave
from PIL import Image,ImageColor
from lxml import etree
import numpy as np
import math
_TOL = 1e-13
_CLOSE = .01 # (Meters) Distance below which a thing is considered close.
_CONTROL_TIMESTEP = .02 # (Seconds)
_TIME_LIMIT = 30 # (Seconds)
CORNER_INDEX_ACTION=['B0_0','B0_8','B8_0','B8_8']
CORNER_INDEX_GEOM=['G0_0','G0_8','G8_0','G8_8']
SUITE = containers.TaggedTasks()
def get_model_and_assets():
"""Returns a tuple containing the model XML string and a dict of assets."""
return common.read_model('cloth_gripper.xml'), common.ASSETS
@SUITE.add('hard')
def easy(fully_observable=True, time_limit=_TIME_LIMIT, random=None,
environment_kwargs=None):
"""Returns stacker task with 2 boxes."""
physics=Physics.from_xml_string(*get_model_and_assets())
task = Stack(randomize_gains=False,random=random)
environment_kwargs = environment_kwargs or {}
return control.Environment(
physics, task, control_timestep=_CONTROL_TIMESTEP,special_task=True,time_limit=time_limit,
**environment_kwargs)
class Physics(mujoco.Physics):
"""Physics with additional features for the Planar Manipulator domain."""
class Stack(base.Task):
"""A Stack `Task`: stack the boxes."""
def __init__(self, randomize_gains, random=None):
"""Initialize an instance of `PointMass`.
Args:
randomize_gains: A `bool`, whether to randomize the actuator gains.
random: Optional, either a `numpy.random.RandomState` instance, an
integer seed for creating a new `RandomState`, or None to select a seed
automatically (default).
"""
self._randomize_gains = randomize_gains
self.current_loc = np.zeros((2,))
super(Stack, self).__init__(random=random)
def initialize_episode(self, physics):
physics.named.data.xfrc_applied['B3_4', :3] = np.array([0, 0, -2])
physics.named.data.xfrc_applied['B4_4', :3] = np.array([0, 0, -2])
physics.named.data.xfrc_applied[CORNER_INDEX_ACTION, :3] = np.random.uniform(-.3, .3, size=3)
super(Stack, self).initialize_episode(physics)
def action_spec(self, physics):
"""Returns a `BoundedArray` matching the `physics` actuators."""
return specs.BoundedArray(
shape=(3,), dtype=np.float, minimum=[-1.0,-1.0,-1.0] , maximum=[1.0,1.0,1.0])
def before_step(self, action, physics):
"""Sets the control signal for the actuators to values in `action`."""
# Support legacy internal code.
# clear previous xfrc_force
physics.named.data.xfrc_applied[:, :3] = np.zeros((3,))
# scale the position to be a normal range
goal_position = action[:3]
location = self.current_loc
goal_position = goal_position * 0.1
# computing the mapping from geom_xpos to location in image
cam_fovy = physics.model.cam_fovy[0]
f = 0.5 * 64 / math.tan(cam_fovy * math.pi / 360)
cam_matrix = np.array([[f, 0, 64 / 2], [0, f, 64 / 2], [0, 0, 1]])
cam_mat = physics.data.cam_xmat[0].reshape((3, 3))
cam_pos = physics.data.cam_xpos[0].reshape((3, 1))
cam = np.concatenate([cam_mat, cam_pos], axis=1)
cam_pos_all = np.zeros((86, 3, 1))
for i in range(86):
geom_xpos_added = np.concatenate([physics.data.geom_xpos[i], np.array([1])]).reshape((4, 1))
cam_pos_all[i] = cam_matrix.dot(cam.dot(geom_xpos_added)[:3])
# cam_pos_xy=cam_pos_all[5:,:]
cam_pos_xy=np.rint(cam_pos_all[:,:2].reshape((86,2))/cam_pos_all[:,2])
cam_pos_xy=cam_pos_xy.astype(int)
cam_pos_xy[:,1]=64-cam_pos_xy[:,1]
# hyperparameter epsilon=3(selecting 3 nearest joint) and select the point
epsilon=3
possible_index=[]
possible_z=[]
for i in range(86):
#flipping the x and y to make sure it corresponds to the real location
if abs(cam_pos_xy[i][0]-location[0,1])<epsilon and abs(cam_pos_xy[i][1]-location[0,0])<epsilon and i>4:
possible_index.append(i)
possible_z.append(physics.data.geom_xpos[i,2])
if possible_index != [] :
index=possible_index[possible_z.index(max(possible_z))]
corner_action = index-4
corner_geom = index
# apply consecutive force to move the point to the target position
position=goal_position+physics.named.data.geom_xpos[corner_geom]
dist = position-physics.named.data.geom_xpos[corner_geom]
loop=0
while np.linalg.norm(dist)>0.025:
loop+=1
if loop >40:
break
physics.named.data.xfrc_applied[corner_action,:3]=dist*20
physics.step()
self.after_step(physics)
dist=position-physics.named.data.geom_xpos[corner_geom]
def get_observation(self, physics):
"""Returns either features or only sensors (to be used with pixels)."""
obs = collections.OrderedDict()
self.current_loc = self.sample_location(physics)
obs['force_location'] = np.tile(self.current_loc, 50).reshape(-1).astype('float32')
return obs
def sample_location(self,physics):
# obs=self.get_observation(physics)
render_kwargs={}
render_kwargs['camera_id']=0
render_kwargs['width'] = 64
render_kwargs['height'] = 64
image=physics.render(**render_kwargs)
location_range = np.transpose(np.where(~np.all(image > 100, axis=2)))
num_loc=np.shape(location_range)[0]
index=np.random.randint(num_loc,size=1)
location=location_range[index]
return location
def get_reward(self,physics):
dist_sum=0
for i in range(9):
for j in range(9):
index='G'+str(i)+'_'+str(j)
geom_dist=np.sum(abs(physics.named.data.geom_xpos[index]-np.array([-0.09+0.03*i,-0.15+0.03*j,0])))
dist_sum += geom_dist
dist_sum = dist_sum/81
return -dist_sum, dict()
| [
"dm_control.suite.common.read_model",
"numpy.random.uniform",
"dm_control.rl.control.Environment",
"dm_control.utils.containers.TaggedTasks",
"math.tan",
"numpy.zeros",
"numpy.all",
"dm_env.specs.BoundedArray",
"numpy.shape",
"numpy.random.randint",
"numpy.array",
"numpy.linalg.norm",
"numpy... | [((1562, 1586), 'dm_control.utils.containers.TaggedTasks', 'containers.TaggedTasks', ([], {}), '()\n', (1584, 1586), False, 'from dm_control.utils import containers\n'), ((2097, 2235), 'dm_control.rl.control.Environment', 'control.Environment', (['physics', 'task'], {'control_timestep': '_CONTROL_TIMESTEP', 'special_task': '(True)', 'time_limit': 'time_limit'}), '(physics, task, control_timestep=_CONTROL_TIMESTEP,\n special_task=True, time_limit=time_limit, **environment_kwargs)\n', (2116, 2235), False, 'from dm_control.rl import control\n'), ((1703, 1741), 'dm_control.suite.common.read_model', 'common.read_model', (['"""cloth_gripper.xml"""'], {}), "('cloth_gripper.xml')\n", (1720, 1741), False, 'from dm_control.suite import common\n'), ((2863, 2877), 'numpy.zeros', 'np.zeros', (['(2,)'], {}), '((2,))\n', (2871, 2877), True, 'import numpy as np\n'), ((3018, 3038), 'numpy.array', 'np.array', (['[0, 0, -2]'], {}), '([0, 0, -2])\n', (3026, 3038), True, 'import numpy as np\n'), ((3089, 3109), 'numpy.array', 'np.array', (['[0, 0, -2]'], {}), '([0, 0, -2])\n', (3097, 3109), True, 'import numpy as np\n'), ((3173, 3209), 'numpy.random.uniform', 'np.random.uniform', (['(-0.3)', '(0.3)'], {'size': '(3)'}), '(-0.3, 0.3, size=3)\n', (3190, 3209), True, 'import numpy as np\n'), ((3376, 3479), 'dm_env.specs.BoundedArray', 'specs.BoundedArray', ([], {'shape': '(3,)', 'dtype': 'np.float', 'minimum': '[-1.0, -1.0, -1.0]', 'maximum': '[1.0, 1.0, 1.0]'}), '(shape=(3,), dtype=np.float, minimum=[-1.0, -1.0, -1.0],\n maximum=[1.0, 1.0, 1.0])\n', (3394, 3479), False, 'from dm_env import specs\n'), ((3713, 3727), 'numpy.zeros', 'np.zeros', (['(3,)'], {}), '((3,))\n', (3721, 3727), True, 'import numpy as np\n'), ((4057, 4110), 'numpy.array', 'np.array', (['[[f, 0, 64 / 2], [0, f, 64 / 2], [0, 0, 1]]'], {}), '([[f, 0, 64 / 2], [0, f, 64 / 2], [0, 0, 1]])\n', (4065, 4110), True, 'import numpy as np\n'), ((4231, 4273), 'numpy.concatenate', 'np.concatenate', (['[cam_mat, cam_pos]'], {'axis': '(1)'}), '([cam_mat, cam_pos], axis=1)\n', (4245, 4273), True, 'import numpy as np\n'), ((4292, 4312), 'numpy.zeros', 'np.zeros', (['(86, 3, 1)'], {}), '((86, 3, 1))\n', (4300, 4312), True, 'import numpy as np\n'), ((5948, 5973), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (5971, 5973), False, 'import collections\n'), ((6493, 6527), 'numpy.random.randint', 'np.random.randint', (['num_loc'], {'size': '(1)'}), '(num_loc, size=1)\n', (6510, 6527), True, 'import numpy as np\n'), ((4005, 4039), 'math.tan', 'math.tan', (['(cam_fovy * math.pi / 360)'], {}), '(cam_fovy * math.pi / 360)\n', (4013, 4039), False, 'import math\n'), ((6455, 6479), 'numpy.shape', 'np.shape', (['location_range'], {}), '(location_range)\n', (6463, 6479), True, 'import numpy as np\n'), ((5542, 5562), 'numpy.linalg.norm', 'np.linalg.norm', (['dist'], {}), '(dist)\n', (5556, 5562), True, 'import numpy as np\n'), ((6413, 6440), 'numpy.all', 'np.all', (['(image > 100)'], {'axis': '(2)'}), '(image > 100, axis=2)\n', (6419, 6440), True, 'import numpy as np\n'), ((6055, 6084), 'numpy.tile', 'np.tile', (['self.current_loc', '(50)'], {}), '(self.current_loc, 50)\n', (6062, 6084), True, 'import numpy as np\n'), ((4406, 4419), 'numpy.array', 'np.array', (['[1]'], {}), '([1])\n', (4414, 4419), True, 'import numpy as np\n'), ((6800, 6849), 'numpy.array', 'np.array', (['[-0.09 + 0.03 * i, -0.15 + 0.03 * j, 0]'], {}), '([-0.09 + 0.03 * i, -0.15 + 0.03 * j, 0])\n', (6808, 6849), True, 'import numpy as np\n')] |
import logging
import typing
from collections import defaultdict
from dataclasses import dataclass
from dataclasses import field as dataclass_field
from typing import Any, Dict, Iterable, Iterator, List, Optional, Set, Tuple, Union
from urllib.parse import urlparse
import yaml
from pydantic import validator
from datahub.configuration.common import ConfigurationError
from datahub.emitter import mce_builder
from datahub.emitter.mcp import MetadataChangeProposalWrapper
from datahub.ingestion.api.common import PipelineContext
from datahub.ingestion.api.source import Source, SourceReport
from datahub.ingestion.api.workunit import MetadataWorkUnit
from datahub.ingestion.source.aws.aws_common import AwsSourceConfig, make_s3_urn
from datahub.metadata.com.linkedin.pegasus2avro.common import Status
from datahub.metadata.com.linkedin.pegasus2avro.metadata.snapshot import DatasetSnapshot
from datahub.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent
from datahub.metadata.com.linkedin.pegasus2avro.schema import (
MySqlDDL,
SchemaField,
SchemaMetadata,
)
from datahub.metadata.schema_classes import (
ChangeTypeClass,
DataFlowInfoClass,
DataFlowSnapshotClass,
DataJobInfoClass,
DataJobInputOutputClass,
DataJobSnapshotClass,
DatasetLineageTypeClass,
DatasetPropertiesClass,
MetadataChangeEventClass,
OwnerClass,
OwnershipClass,
OwnershipTypeClass,
UpstreamClass,
UpstreamLineageClass,
)
from datahub.utilities.hive_schema_to_avro import get_schema_fields_for_hive_column
logger = logging.getLogger(__name__)
class GlueSourceConfig(AwsSourceConfig):
extract_transforms: Optional[bool] = True
underlying_platform: Optional[str] = None
ignore_unsupported_connectors: Optional[bool] = True
emit_s3_lineage: bool = False
glue_s3_lineage_direction: str = "upstream"
@property
def glue_client(self):
return self.get_glue_client()
@property
def s3_client(self):
return self.get_s3_client()
@validator("glue_s3_lineage_direction")
def check_direction(cls, v: str) -> str:
if v.lower() not in ["upstream", "downstream"]:
raise ConfigurationError(
"glue_s3_lineage_direction must be either upstream or downstream"
)
return v.lower()
@dataclass
class GlueSourceReport(SourceReport):
tables_scanned = 0
filtered: List[str] = dataclass_field(default_factory=list)
def report_table_scanned(self) -> None:
self.tables_scanned += 1
def report_table_dropped(self, table: str) -> None:
self.filtered.append(table)
class GlueSource(Source):
source_config: GlueSourceConfig
report = GlueSourceReport()
def __init__(self, config: GlueSourceConfig, ctx: PipelineContext):
super().__init__(ctx)
self.source_config = config
self.report = GlueSourceReport()
self.glue_client = config.glue_client
self.s3_client = config.s3_client
self.extract_transforms = config.extract_transforms
self.underlying_platform = config.underlying_platform
self.env = config.env
@classmethod
def create(cls, config_dict, ctx):
config = GlueSourceConfig.parse_obj(config_dict)
return cls(config, ctx)
def get_underlying_platform(self):
if self.underlying_platform in ["athena"]:
return self.underlying_platform
return "glue"
def get_all_jobs(self):
"""
List all jobs in Glue.
"""
jobs = []
# see https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/glue.html#Glue.Client.get_jobs
paginator = self.glue_client.get_paginator("get_jobs")
for page in paginator.paginate():
jobs += page["Jobs"]
return jobs
def get_dataflow_graph(self, script_path: str) -> Optional[Dict[str, Any]]:
"""
Get the DAG of transforms and data sources/sinks for a job.
Parameters
----------
script_path:
S3 path to the job's Python script.
"""
# handle a bug in AWS where script path has duplicate prefixes
if script_path.lower().startswith("s3://s3://"):
script_path = script_path[5:]
# catch any other cases where the script path is invalid
if not script_path.startswith("s3://"):
self.report.report_warning(
script_path,
f"Error parsing DAG for Glue job. The script {script_path} is not a valid S3 path.",
)
return None
# extract the script's bucket and key
url = urlparse(script_path, allow_fragments=False)
bucket = url.netloc
key = url.path[1:]
# download the script contents
# see https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.get_object
obj = self.s3_client.get_object(Bucket=bucket, Key=key)
script = obj["Body"].read().decode("utf-8")
try:
# extract the job DAG from the script
# see https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/glue.html#Glue.Client.get_dataflow_graph
return self.glue_client.get_dataflow_graph(PythonScript=script)
# sometimes the Python script can be user-modified and the script is not valid for graph extraction
except self.glue_client.exceptions.InvalidInputException as e:
self.report.report_warning(
script_path,
f"Error parsing DAG for Glue job. The script {script_path} cannot be processed by Glue (this usually occurs when it has been user-modified): {e}",
)
return None
def get_s3_uri(self, node_args):
s3_uri = node_args.get("connection_options", {}).get("path")
# sometimes the path is a single element in a list rather than a single one
if s3_uri is None:
s3_uri = node_args.get("connection_options", {}).get("paths")[0]
return s3_uri
def get_dataflow_s3_names(
self, dataflow_graph: Dict[str, Any]
) -> Iterator[Tuple[str, Optional[str]]]:
# iterate through each node to populate processed nodes
for node in dataflow_graph["DagNodes"]:
node_type = node["NodeType"]
# for nodes representing datasets, we construct a dataset URN accordingly
if node_type in ["DataSource", "DataSink"]:
node_args = {
x["Name"]: yaml.safe_load(x["Value"]) for x in node["Args"]
}
# if data object is S3 bucket
if node_args.get("connection_type") == "s3":
s3_uri = self.get_s3_uri(node_args)
if s3_uri is None:
continue
extension = node_args.get("format")
yield s3_uri, extension
def process_dataflow_node(
self,
node: Dict[str, Any],
flow_urn: str,
new_dataset_ids: List[str],
new_dataset_mces: List[MetadataChangeEvent],
s3_formats: typing.DefaultDict[str, Set[Union[str, None]]],
) -> Optional[Dict[str, Any]]:
node_type = node["NodeType"]
# for nodes representing datasets, we construct a dataset URN accordingly
if node_type in ["DataSource", "DataSink"]:
node_args = {x["Name"]: yaml.safe_load(x["Value"]) for x in node["Args"]}
# if data object is Glue table
if "database" in node_args and "table_name" in node_args:
full_table_name = f"{node_args['database']}.{node_args['table_name']}"
# we know that the table will already be covered when ingesting Glue tables
node_urn = f"urn:li:dataset:(urn:li:dataPlatform:{self.get_underlying_platform()},{full_table_name},{self.env})"
# if data object is S3 bucket
elif node_args.get("connection_type") == "s3":
s3_uri = self.get_s3_uri(node_args)
if s3_uri is None:
self.report.report_warning(
f"{node['Nodetype']}-{node['Id']}",
f"Could not find script path for job {node['Nodetype']}-{node['Id']} in flow {flow_urn}. Skipping",
)
return None
# append S3 format if different ones exist
if len(s3_formats[s3_uri]) > 1:
node_urn = make_s3_urn(
s3_uri,
self.env,
suffix=node_args.get("format"),
)
else:
node_urn = make_s3_urn(s3_uri, self.env)
dataset_snapshot = DatasetSnapshot(
urn=node_urn,
aspects=[],
)
dataset_snapshot.aspects.append(Status(removed=False))
dataset_snapshot.aspects.append(
DatasetPropertiesClass(
customProperties={k: str(v) for k, v in node_args.items()},
tags=[],
)
)
new_dataset_mces.append(
MetadataChangeEvent(proposedSnapshot=dataset_snapshot)
)
new_dataset_ids.append(f"{node['NodeType']}-{node['Id']}")
else:
if self.source_config.ignore_unsupported_connectors:
logger.info(
flow_urn,
f"Unrecognized Glue data object type: {node_args}. Skipping.",
)
else:
raise ValueError(f"Unrecognized Glue data object type: {node_args}")
# otherwise, a node represents a transformation
else:
node_urn = mce_builder.make_data_job_urn_with_flow(
flow_urn, job_id=f'{node["NodeType"]}-{node["Id"]}'
)
return {
**node,
"urn": node_urn,
# to be filled in after traversing edges
"inputDatajobs": [],
"inputDatasets": [],
"outputDatasets": [],
}
def process_dataflow_graph(
self,
dataflow_graph: Dict[str, Any],
flow_urn: str,
s3_formats: typing.DefaultDict[str, Set[Union[str, None]]],
) -> Tuple[Dict[str, Dict[str, Any]], List[str], List[MetadataChangeEvent]]:
"""
Prepare a job's DAG for ingestion.
Parameters
----------
dataflow_graph:
Job DAG returned from get_dataflow_graph()
flow_urn:
URN of the flow (i.e. the AWS Glue job itself).
s3_formats:
Map from s3 URIs to formats used (for deduplication purposes)
"""
new_dataset_ids: List[str] = []
new_dataset_mces: List[MetadataChangeEvent] = []
nodes: dict = {}
# iterate through each node to populate processed nodes
for node in dataflow_graph["DagNodes"]:
processed_node = self.process_dataflow_node(
node, flow_urn, new_dataset_ids, new_dataset_mces, s3_formats
)
if processed_node is not None:
nodes[node["Id"]] = processed_node
# traverse edges to fill in node properties
for edge in dataflow_graph["DagEdges"]:
source_node = nodes[edge["Source"]]
target_node = nodes[edge["Target"]]
source_node_type = source_node["NodeType"]
target_node_type = target_node["NodeType"]
# note that source nodes can't be data sinks
if source_node_type == "DataSource":
target_node["inputDatasets"].append(source_node["urn"])
# keep track of input data jobs (as defined in schemas)
else:
target_node["inputDatajobs"].append(source_node["urn"])
# track output datasets (these can't be input datasets)
if target_node_type == "DataSink":
source_node["outputDatasets"].append(target_node["urn"])
return nodes, new_dataset_ids, new_dataset_mces
def get_dataflow_wu(self, flow_urn: str, job: Dict[str, Any]) -> MetadataWorkUnit:
"""
Generate a DataFlow workunit for a Glue job.
Parameters
----------
flow_urn:
URN for the flow
job:
Job object from get_all_jobs()
"""
region = self.source_config.aws_region
custom_props = {
"role": job["Role"],
}
if job.get("CreatedOn") is not None:
custom_props["created"] = str(job["CreatedOn"])
if job.get("LastModifiedOn") is not None:
custom_props["modified"] = str(job["LastModifiedOn"])
command = job.get("Command", {}).get("ScriptLocation")
if command is not None:
custom_props["command"] = command
mce = MetadataChangeEventClass(
proposedSnapshot=DataFlowSnapshotClass(
urn=flow_urn,
aspects=[
DataFlowInfoClass(
name=job["Name"],
description=job.get("Description"),
externalUrl=f"https://{region}.console.aws.amazon.com/gluestudio/home?region={region}#/editor/job/{job['Name']}/graph",
# specify a few Glue-specific properties
customProperties=custom_props,
),
],
)
)
return MetadataWorkUnit(id=job["Name"], mce=mce)
def get_datajob_wu(self, node: Dict[str, Any], job_name: str) -> MetadataWorkUnit:
"""
Generate a DataJob workunit for a component (node) in a Glue job.
Parameters
----------
node:
Node from process_dataflow_graph()
job:
Job object from get_all_jobs()
"""
region = self.source_config.aws_region
mce = MetadataChangeEventClass(
proposedSnapshot=DataJobSnapshotClass(
urn=node["urn"],
aspects=[
DataJobInfoClass(
name=f"{job_name}:{node['NodeType']}-{node['Id']}",
type="GLUE",
# there's no way to view an individual job node by link, so just show the graph
externalUrl=f"https://{region}.console.aws.amazon.com/gluestudio/home?region={region}#/editor/job/{job_name}/graph",
customProperties={
**{x["Name"]: x["Value"] for x in node["Args"]},
"transformType": node["NodeType"],
"nodeId": node["Id"],
},
),
DataJobInputOutputClass(
inputDatasets=node["inputDatasets"],
outputDatasets=node["outputDatasets"],
inputDatajobs=node["inputDatajobs"],
),
],
)
)
return MetadataWorkUnit(id=f'{job_name}-{node["Id"]}', mce=mce)
def get_all_tables(self) -> List[dict]:
def get_tables_from_database(database_name: str) -> List[dict]:
new_tables = []
# see https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/glue.html#Glue.Client.get_tables
paginator = self.glue_client.get_paginator("get_tables")
for page in paginator.paginate(DatabaseName=database_name):
new_tables += page["TableList"]
return new_tables
def get_database_names() -> List[str]:
database_names = []
# see https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/glue.html#Glue.Client.get_databases
paginator = self.glue_client.get_paginator("get_databases")
for page in paginator.paginate():
for db in page["DatabaseList"]:
if self.source_config.database_pattern.allowed(db["Name"]):
database_names.append(db["Name"])
return database_names
if self.source_config.database_pattern.is_fully_specified_allow_list():
database_names = self.source_config.database_pattern.get_allowed_list()
else:
database_names = get_database_names()
all_tables: List[dict] = []
for database in database_names:
all_tables += get_tables_from_database(database)
return all_tables
def get_lineage_if_enabled(
self, mce: MetadataChangeEventClass
) -> Optional[MetadataChangeProposalWrapper]:
if self.source_config.emit_s3_lineage:
# extract dataset properties aspect
dataset_properties: Optional[
DatasetPropertiesClass
] = mce_builder.get_aspect_if_available(mce, DatasetPropertiesClass)
if dataset_properties and "Location" in dataset_properties.customProperties:
location = dataset_properties.customProperties["Location"]
if location.startswith("s3://"):
s3_dataset_urn = make_s3_urn(location, self.source_config.env)
if self.source_config.glue_s3_lineage_direction == "upstream":
upstream_lineage = UpstreamLineageClass(
upstreams=[
UpstreamClass(
dataset=s3_dataset_urn,
type=DatasetLineageTypeClass.COPY,
)
]
)
mcp = MetadataChangeProposalWrapper(
entityType="dataset",
entityUrn=mce.proposedSnapshot.urn,
changeType=ChangeTypeClass.UPSERT,
aspectName="upstreamLineage",
aspect=upstream_lineage,
)
return mcp
else:
# Need to mint the s3 dataset with upstream lineage from it to glue
upstream_lineage = UpstreamLineageClass(
upstreams=[
UpstreamClass(
dataset=mce.proposedSnapshot.urn,
type=DatasetLineageTypeClass.COPY,
)
]
)
mcp = MetadataChangeProposalWrapper(
entityType="dataset",
entityUrn=s3_dataset_urn,
changeType=ChangeTypeClass.UPSERT,
aspectName="upstreamLineage",
aspect=upstream_lineage,
)
return mcp
return None
def get_workunits(self) -> Iterable[MetadataWorkUnit]:
tables = self.get_all_tables()
for table in tables:
database_name = table["DatabaseName"]
table_name = table["Name"]
full_table_name = f"{database_name}.{table_name}"
self.report.report_table_scanned()
if not self.source_config.database_pattern.allowed(
database_name
) or not self.source_config.table_pattern.allowed(full_table_name):
self.report.report_table_dropped(full_table_name)
continue
mce = self._extract_record(table, full_table_name)
workunit = MetadataWorkUnit(full_table_name, mce=mce)
self.report.report_workunit(workunit)
yield workunit
mcp = self.get_lineage_if_enabled(mce)
if mcp:
mcp_wu = MetadataWorkUnit(
id=f"{full_table_name}-upstreamLineage", mcp=mcp
)
self.report.report_workunit(mcp_wu)
yield mcp_wu
if self.extract_transforms:
dags = {}
flow_names: Dict[str, str] = {}
for job in self.get_all_jobs():
flow_urn = mce_builder.make_data_flow_urn(
self.get_underlying_platform(), job["Name"], self.env
)
flow_wu = self.get_dataflow_wu(flow_urn, job)
self.report.report_workunit(flow_wu)
yield flow_wu
job_script_location = job.get("Command", {}).get("ScriptLocation")
dag: Optional[Dict[str, Any]] = None
if job_script_location is not None:
dag = self.get_dataflow_graph(job_script_location)
dags[flow_urn] = dag
flow_names[flow_urn] = job["Name"]
# run a first pass to pick up s3 bucket names and formats
# in Glue, it's possible for two buckets to have files of different extensions
# if this happens, we append the extension in the URN so the sources can be distinguished
# see process_dataflow_node() for details
s3_formats: typing.DefaultDict[str, Set[Union[str, None]]] = defaultdict(
lambda: set()
)
for dag in dags.values():
if dag is not None:
for s3_name, extension in self.get_dataflow_s3_names(dag):
s3_formats[s3_name].add(extension)
# run second pass to generate node workunits
for flow_urn, dag in dags.items():
if dag is None:
continue
nodes, new_dataset_ids, new_dataset_mces = self.process_dataflow_graph(
dag, flow_urn, s3_formats
)
for node in nodes.values():
if node["NodeType"] not in ["DataSource", "DataSink"]:
job_wu = self.get_datajob_wu(node, flow_names[flow_urn])
self.report.report_workunit(job_wu)
yield job_wu
for dataset_id, dataset_mce in zip(new_dataset_ids, new_dataset_mces):
dataset_wu = MetadataWorkUnit(id=dataset_id, mce=dataset_mce)
self.report.report_workunit(dataset_wu)
yield dataset_wu
def _extract_record(self, table: Dict, table_name: str) -> MetadataChangeEvent:
def get_owner() -> OwnershipClass:
owner = table.get("Owner")
if owner:
owners = [
OwnerClass(
owner=f"urn:li:corpuser:{owner}",
type=OwnershipTypeClass.DATAOWNER,
)
]
else:
owners = []
return OwnershipClass(
owners=owners,
)
def get_dataset_properties() -> DatasetPropertiesClass:
return DatasetPropertiesClass(
description=table.get("Description"),
customProperties={
**table.get("Parameters", {}),
**{
k: str(v)
for k, v in table["StorageDescriptor"].items()
if k not in ["Columns", "Parameters"]
},
},
uri=table.get("Location"),
tags=[],
)
def get_schema_metadata(glue_source: GlueSource) -> SchemaMetadata:
schema = table["StorageDescriptor"]["Columns"]
fields: List[SchemaField] = []
for field in schema:
schema_fields = get_schema_fields_for_hive_column(
hive_column_name=field["Name"],
hive_column_type=field["Type"],
description=field.get("Comment"),
default_nullable=True,
)
assert schema_fields
fields.extend(schema_fields)
partition_keys = table.get("PartitionKeys", [])
for partition_key in partition_keys:
schema_fields = get_schema_fields_for_hive_column(
hive_column_name=partition_key["Name"],
hive_column_type=partition_key["Type"],
default_nullable=False,
)
assert schema_fields
fields.extend(schema_fields)
return SchemaMetadata(
schemaName=table_name,
version=0,
fields=fields,
platform=f"urn:li:dataPlatform:{self.get_underlying_platform()}",
hash="",
platformSchema=MySqlDDL(tableSchema=""),
)
dataset_snapshot = DatasetSnapshot(
urn=f"urn:li:dataset:(urn:li:dataPlatform:{self.get_underlying_platform()},{table_name},{self.env})",
aspects=[],
)
dataset_snapshot.aspects.append(Status(removed=False))
dataset_snapshot.aspects.append(get_owner())
dataset_snapshot.aspects.append(get_dataset_properties())
dataset_snapshot.aspects.append(get_schema_metadata(self))
metadata_record = MetadataChangeEvent(proposedSnapshot=dataset_snapshot)
return metadata_record
def get_report(self):
return self.report
def close(self):
pass
| [
"datahub.metadata.com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot",
"datahub.metadata.schema_classes.OwnershipClass",
"datahub.emitter.mce_builder.make_data_job_urn_with_flow",
"datahub.ingestion.api.workunit.MetadataWorkUnit",
"yaml.safe_load",
"datahub.metadata.schema_classes.UpstreamClass",... | [((1569, 1596), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1586, 1596), False, 'import logging\n'), ((2034, 2072), 'pydantic.validator', 'validator', (['"""glue_s3_lineage_direction"""'], {}), "('glue_s3_lineage_direction')\n", (2043, 2072), False, 'from pydantic import validator\n'), ((2433, 2470), 'dataclasses.field', 'dataclass_field', ([], {'default_factory': 'list'}), '(default_factory=list)\n', (2448, 2470), True, 'from dataclasses import field as dataclass_field\n'), ((4689, 4733), 'urllib.parse.urlparse', 'urlparse', (['script_path'], {'allow_fragments': '(False)'}), '(script_path, allow_fragments=False)\n', (4697, 4733), False, 'from urllib.parse import urlparse\n'), ((13778, 13819), 'datahub.ingestion.api.workunit.MetadataWorkUnit', 'MetadataWorkUnit', ([], {'id': "job['Name']", 'mce': 'mce'}), "(id=job['Name'], mce=mce)\n", (13794, 13819), False, 'from datahub.ingestion.api.workunit import MetadataWorkUnit\n'), ((15368, 15424), 'datahub.ingestion.api.workunit.MetadataWorkUnit', 'MetadataWorkUnit', ([], {'id': 'f"""{job_name}-{node[\'Id\']}"""', 'mce': 'mce'}), '(id=f"{job_name}-{node[\'Id\']}", mce=mce)\n', (15384, 15424), False, 'from datahub.ingestion.api.workunit import MetadataWorkUnit\n'), ((25652, 25706), 'datahub.metadata.com.linkedin.pegasus2avro.mxe.MetadataChangeEvent', 'MetadataChangeEvent', ([], {'proposedSnapshot': 'dataset_snapshot'}), '(proposedSnapshot=dataset_snapshot)\n', (25671, 25706), False, 'from datahub.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent\n'), ((2192, 2282), 'datahub.configuration.common.ConfigurationError', 'ConfigurationError', (['"""glue_s3_lineage_direction must be either upstream or downstream"""'], {}), "(\n 'glue_s3_lineage_direction must be either upstream or downstream')\n", (2210, 2282), False, 'from datahub.configuration.common import ConfigurationError\n'), ((9972, 10069), 'datahub.emitter.mce_builder.make_data_job_urn_with_flow', 'mce_builder.make_data_job_urn_with_flow', (['flow_urn'], {'job_id': 'f"""{node[\'NodeType\']}-{node[\'Id\']}"""'}), '(flow_urn, job_id=\n f"{node[\'NodeType\']}-{node[\'Id\']}")\n', (10011, 10069), False, 'from datahub.emitter import mce_builder\n'), ((17178, 17242), 'datahub.emitter.mce_builder.get_aspect_if_available', 'mce_builder.get_aspect_if_available', (['mce', 'DatasetPropertiesClass'], {}), '(mce, DatasetPropertiesClass)\n', (17213, 17242), False, 'from datahub.emitter import mce_builder\n'), ((20006, 20048), 'datahub.ingestion.api.workunit.MetadataWorkUnit', 'MetadataWorkUnit', (['full_table_name'], {'mce': 'mce'}), '(full_table_name, mce=mce)\n', (20022, 20048), False, 'from datahub.ingestion.api.workunit import MetadataWorkUnit\n'), ((23226, 23255), 'datahub.metadata.schema_classes.OwnershipClass', 'OwnershipClass', ([], {'owners': 'owners'}), '(owners=owners)\n', (23240, 23255), False, 'from datahub.metadata.schema_classes import ChangeTypeClass, DataFlowInfoClass, DataFlowSnapshotClass, DataJobInfoClass, DataJobInputOutputClass, DataJobSnapshotClass, DatasetLineageTypeClass, DatasetPropertiesClass, MetadataChangeEventClass, OwnerClass, OwnershipClass, OwnershipTypeClass, UpstreamClass, UpstreamLineageClass\n'), ((25416, 25437), 'datahub.metadata.com.linkedin.pegasus2avro.common.Status', 'Status', ([], {'removed': '(False)'}), '(removed=False)\n', (25422, 25437), False, 'from datahub.metadata.com.linkedin.pegasus2avro.common import Status\n'), ((7499, 7525), 'yaml.safe_load', 'yaml.safe_load', (["x['Value']"], {}), "(x['Value'])\n", (7513, 7525), False, 'import yaml\n'), ((20222, 20288), 'datahub.ingestion.api.workunit.MetadataWorkUnit', 'MetadataWorkUnit', ([], {'id': 'f"""{full_table_name}-upstreamLineage"""', 'mcp': 'mcp'}), "(id=f'{full_table_name}-upstreamLineage', mcp=mcp)\n", (20238, 20288), False, 'from datahub.ingestion.api.workunit import MetadataWorkUnit\n'), ((24572, 24713), 'datahub.utilities.hive_schema_to_avro.get_schema_fields_for_hive_column', 'get_schema_fields_for_hive_column', ([], {'hive_column_name': "partition_key['Name']", 'hive_column_type': "partition_key['Type']", 'default_nullable': '(False)'}), "(hive_column_name=partition_key['Name'],\n hive_column_type=partition_key['Type'], default_nullable=False)\n", (24605, 24713), False, 'from datahub.utilities.hive_schema_to_avro import get_schema_fields_for_hive_column\n'), ((6591, 6617), 'yaml.safe_load', 'yaml.safe_load', (["x['Value']"], {}), "(x['Value'])\n", (6605, 6617), False, 'import yaml\n'), ((8866, 8907), 'datahub.metadata.com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot', 'DatasetSnapshot', ([], {'urn': 'node_urn', 'aspects': '[]'}), '(urn=node_urn, aspects=[])\n', (8881, 8907), False, 'from datahub.metadata.com.linkedin.pegasus2avro.metadata.snapshot import DatasetSnapshot\n'), ((17493, 17538), 'datahub.ingestion.source.aws.aws_common.make_s3_urn', 'make_s3_urn', (['location', 'self.source_config.env'], {}), '(location, self.source_config.env)\n', (17504, 17538), False, 'from datahub.ingestion.source.aws.aws_common import AwsSourceConfig, make_s3_urn\n'), ((22610, 22658), 'datahub.ingestion.api.workunit.MetadataWorkUnit', 'MetadataWorkUnit', ([], {'id': 'dataset_id', 'mce': 'dataset_mce'}), '(id=dataset_id, mce=dataset_mce)\n', (22626, 22658), False, 'from datahub.ingestion.api.workunit import MetadataWorkUnit\n'), ((22992, 23071), 'datahub.metadata.schema_classes.OwnerClass', 'OwnerClass', ([], {'owner': 'f"""urn:li:corpuser:{owner}"""', 'type': 'OwnershipTypeClass.DATAOWNER'}), "(owner=f'urn:li:corpuser:{owner}', type=OwnershipTypeClass.DATAOWNER)\n", (23002, 23071), False, 'from datahub.metadata.schema_classes import ChangeTypeClass, DataFlowInfoClass, DataFlowSnapshotClass, DataJobInfoClass, DataJobInputOutputClass, DataJobSnapshotClass, DatasetLineageTypeClass, DatasetPropertiesClass, MetadataChangeEventClass, OwnerClass, OwnershipClass, OwnershipTypeClass, UpstreamClass, UpstreamLineageClass\n'), ((25142, 25166), 'datahub.metadata.com.linkedin.pegasus2avro.schema.MySqlDDL', 'MySqlDDL', ([], {'tableSchema': '""""""'}), "(tableSchema='')\n", (25150, 25166), False, 'from datahub.metadata.com.linkedin.pegasus2avro.schema import MySqlDDL, SchemaField, SchemaMetadata\n'), ((8800, 8829), 'datahub.ingestion.source.aws.aws_common.make_s3_urn', 'make_s3_urn', (['s3_uri', 'self.env'], {}), '(s3_uri, self.env)\n', (8811, 8829), False, 'from datahub.ingestion.source.aws.aws_common import AwsSourceConfig, make_s3_urn\n'), ((9016, 9037), 'datahub.metadata.com.linkedin.pegasus2avro.common.Status', 'Status', ([], {'removed': '(False)'}), '(removed=False)\n', (9022, 9037), False, 'from datahub.metadata.com.linkedin.pegasus2avro.common import Status\n'), ((9351, 9405), 'datahub.metadata.com.linkedin.pegasus2avro.mxe.MetadataChangeEvent', 'MetadataChangeEvent', ([], {'proposedSnapshot': 'dataset_snapshot'}), '(proposedSnapshot=dataset_snapshot)\n', (9370, 9405), False, 'from datahub.metadata.com.linkedin.pegasus2avro.mxe import MetadataChangeEvent\n'), ((18025, 18212), 'datahub.emitter.mcp.MetadataChangeProposalWrapper', 'MetadataChangeProposalWrapper', ([], {'entityType': '"""dataset"""', 'entityUrn': 'mce.proposedSnapshot.urn', 'changeType': 'ChangeTypeClass.UPSERT', 'aspectName': '"""upstreamLineage"""', 'aspect': 'upstream_lineage'}), "(entityType='dataset', entityUrn=mce.\n proposedSnapshot.urn, changeType=ChangeTypeClass.UPSERT, aspectName=\n 'upstreamLineage', aspect=upstream_lineage)\n", (18054, 18212), False, 'from datahub.emitter.mcp import MetadataChangeProposalWrapper\n'), ((18936, 19113), 'datahub.emitter.mcp.MetadataChangeProposalWrapper', 'MetadataChangeProposalWrapper', ([], {'entityType': '"""dataset"""', 'entityUrn': 's3_dataset_urn', 'changeType': 'ChangeTypeClass.UPSERT', 'aspectName': '"""upstreamLineage"""', 'aspect': 'upstream_lineage'}), "(entityType='dataset', entityUrn=\n s3_dataset_urn, changeType=ChangeTypeClass.UPSERT, aspectName=\n 'upstreamLineage', aspect=upstream_lineage)\n", (18965, 19113), False, 'from datahub.emitter.mcp import MetadataChangeProposalWrapper\n'), ((14397, 14739), 'datahub.metadata.schema_classes.DataJobInfoClass', 'DataJobInfoClass', ([], {'name': 'f"""{job_name}:{node[\'NodeType\']}-{node[\'Id\']}"""', 'type': '"""GLUE"""', 'externalUrl': 'f"""https://{region}.console.aws.amazon.com/gluestudio/home?region={region}#/editor/job/{job_name}/graph"""', 'customProperties': "{**{x['Name']: x['Value'] for x in node['Args']}, 'transformType': node[\n 'NodeType'], 'nodeId': node['Id']}"}), '(name=f"{job_name}:{node[\'NodeType\']}-{node[\'Id\']}", type=\n \'GLUE\', externalUrl=\n f\'https://{region}.console.aws.amazon.com/gluestudio/home?region={region}#/editor/job/{job_name}/graph\'\n , customProperties={**{x[\'Name\']: x[\'Value\'] for x in node[\'Args\']},\n \'transformType\': node[\'NodeType\'], \'nodeId\': node[\'Id\']})\n', (14413, 14739), False, 'from datahub.metadata.schema_classes import ChangeTypeClass, DataFlowInfoClass, DataFlowSnapshotClass, DataJobInfoClass, DataJobInputOutputClass, DataJobSnapshotClass, DatasetLineageTypeClass, DatasetPropertiesClass, MetadataChangeEventClass, OwnerClass, OwnershipClass, OwnershipTypeClass, UpstreamClass, UpstreamLineageClass\n'), ((15076, 15217), 'datahub.metadata.schema_classes.DataJobInputOutputClass', 'DataJobInputOutputClass', ([], {'inputDatasets': "node['inputDatasets']", 'outputDatasets': "node['outputDatasets']", 'inputDatajobs': "node['inputDatajobs']"}), "(inputDatasets=node['inputDatasets'], outputDatasets\n =node['outputDatasets'], inputDatajobs=node['inputDatajobs'])\n", (15099, 15217), False, 'from datahub.metadata.schema_classes import ChangeTypeClass, DataFlowInfoClass, DataFlowSnapshotClass, DataJobInfoClass, DataJobInputOutputClass, DataJobSnapshotClass, DatasetLineageTypeClass, DatasetPropertiesClass, MetadataChangeEventClass, OwnerClass, OwnershipClass, OwnershipTypeClass, UpstreamClass, UpstreamLineageClass\n'), ((17759, 17831), 'datahub.metadata.schema_classes.UpstreamClass', 'UpstreamClass', ([], {'dataset': 's3_dataset_urn', 'type': 'DatasetLineageTypeClass.COPY'}), '(dataset=s3_dataset_urn, type=DatasetLineageTypeClass.COPY)\n', (17772, 17831), False, 'from datahub.metadata.schema_classes import ChangeTypeClass, DataFlowInfoClass, DataFlowSnapshotClass, DataJobInfoClass, DataJobInputOutputClass, DataJobSnapshotClass, DatasetLineageTypeClass, DatasetPropertiesClass, MetadataChangeEventClass, OwnerClass, OwnershipClass, OwnershipTypeClass, UpstreamClass, UpstreamLineageClass\n'), ((18660, 18747), 'datahub.metadata.schema_classes.UpstreamClass', 'UpstreamClass', ([], {'dataset': 'mce.proposedSnapshot.urn', 'type': 'DatasetLineageTypeClass.COPY'}), '(dataset=mce.proposedSnapshot.urn, type=\n DatasetLineageTypeClass.COPY)\n', (18673, 18747), False, 'from datahub.metadata.schema_classes import ChangeTypeClass, DataFlowInfoClass, DataFlowSnapshotClass, DataJobInfoClass, DataJobInputOutputClass, DataJobSnapshotClass, DatasetLineageTypeClass, DatasetPropertiesClass, MetadataChangeEventClass, OwnerClass, OwnershipClass, OwnershipTypeClass, UpstreamClass, UpstreamLineageClass\n')] |
""" Tests of utilities.
:Author: <NAME> <<EMAIL>>
:Date: 2016-11-10
:Copyright: 2016, Karr Lab
:License: MIT
"""
from wc_lang.core import (Model, Taxon, Environment, Submodel,
Compartment,
SpeciesType, Species, SpeciesCoefficient, DistributionInitConcentration,
Reaction, RateLaw, RateLawExpression, Parameter,
DfbaObjSpecies, DfbaObjReaction,
DfbaObjective, DfbaObjectiveExpression,
Observable, ObservableExpression,
Function, FunctionExpression,
StopCondition, StopConditionExpression,
Observation, ObservationSet, Evidence, Conclusion,
Reference, Author, Change, Identifier,
InitVolume, Ph, ChemicalStructure, FluxBounds, ObservationGenotype, ObservationEnv, Process,
)
from wc_lang import io
from wc_lang import util
from wc_onto import onto
from wc_utils.util.units import unit_registry
import obj_tables.sci.units
import os.path
import shutil
import tempfile
import unittest
class TestUtil(unittest.TestCase):
""" Test utilities """
def setUp(self):
self.model = mdl = Model()
self.comp_0 = comp_0 = mdl.compartments.create(id='comp_0', name='compartment 0')
self.comp_1 = comp_1 = mdl.compartments.create(id='comp_1', name='compartment 1')
self.compartments = compartments = [comp_0, comp_1]
self.species_types = species_types = []
self.species = species = []
for i in range(8):
spec_type = mdl.species_types.create(id='spec_type_{}'.format(
i), name='species type {}'.format(i), type=onto['WC:metabolite'])
species_types.append(spec_type)
if i != 3:
spec = Species(species_type=spec_type, compartment=comp_0)
else:
spec = Species(species_type=spec_type, compartment=comp_1)
spec.id = spec.gen_id()
spec.model = mdl
species.append(spec)
conc = DistributionInitConcentration(species=spec, mean=1)
conc.id = conc.gen_id()
conc.model = mdl
self.submdl_0 = submdl_0 = mdl.submodels.create(id='submdl_0', framework=onto['WC:stochastic_simulation_algorithm'])
self.submdl_1 = submdl_1 = mdl.submodels.create(id='submdl_1', framework=onto['WC:stochastic_simulation_algorithm'])
self.submdl_2 = submdl_2 = mdl.submodels.create(id='submdl_2', framework=onto['WC:dynamic_flux_balance_analysis'])
self.submodels = [submdl_0, submdl_1, submdl_2]
self.rxn_0 = rxn_0 = submdl_0.reactions.create(id='rxn_0', model=mdl)
rxn_0.participants.create(species=species[0], coefficient=-2)
rxn_0.participants.create(species=species[1], coefficient=-3)
rxn_0.participants.create(species=species[2], coefficient=1)
expression = RateLawExpression(
expression='k_cat_0 * {0} / (k_m_0 + {0})'.format(species[5].get_primary_attribute()),
species=species[5:6])
expression.parameters.create(id='k_cat_0', value=2, model=mdl)
expression.parameters.create(id='k_m_0', value=1, model=mdl)
rate_law_0 = rxn_0.rate_laws.create(expression=expression, model=mdl)
self.rxn_1 = rxn_1 = submdl_1.reactions.create(id='rxn_1', model=mdl)
rxn_1.participants.create(species=species[0], coefficient=-2)
rxn_1.participants.create(species=species[1], coefficient=-3)
rxn_1.participants.create(species=species[3], coefficient=2)
expression = RateLawExpression(
expression='k_cat_1 * {0} / (k_m_1 + {0})'.format(species[6].get_primary_attribute()),
species=species[6:7])
expression.parameters.create(id='k_cat_1', value=2, model=mdl)
expression.parameters.create(id='k_m_1', value=1, model=mdl)
rate_law_1 = rxn_1.rate_laws.create(expression=expression, model=mdl)
self.rxn_2 = rxn_2 = submdl_2.reactions.create(id='rxn_2', model=mdl)
rxn_2.participants.create(species=species[0], coefficient=-2)
rxn_2.participants.create(species=species[1], coefficient=-3)
rxn_2.participants.create(species=species[4], coefficient=1)
expression = RateLawExpression(
expression='k_cat_2 * {0} / (k_m_2 + {0})'.format(species[7].get_primary_attribute()),
species=species[7:8])
expression.parameters.create(id='k_cat_2', value=2, model=mdl)
expression.parameters.create(id='k_m_2', value=1, model=mdl)
rate_law_2 = rxn_2.rate_laws.create(expression=expression, model=mdl)
self.reactions = [rxn_0, rxn_1, rxn_2]
self.rate_laws = [rate_law_0, rate_law_1, rate_law_2]
self.parameters = parameters = []
self.references = references = []
self.identifiers = identifiers = []
for i in range(3):
param = mdl.parameters.create(id='param_{}'.format(i))
parameters.append(param)
ref = param.references.create(id='ref_{}'.format(i), type=None)
ref.model = mdl
references.append(ref)
x_ref = ref.identifiers.create(namespace='Y', id='x')
identifiers.append(x_ref)
def test_get_model_size(self):
model = self.model
size = util.get_model_size(model)
self.assertEqual(3, size['submodels'])
self.assertEqual(8, size['species_types'])
self.assertEqual(8, size['species'])
self.assertEqual(3, size['reactions'])
self.assertEqual(2, size['compartments'])
self.assertEqual(9, size['parameters'])
self.assertEqual(3, size['references'])
def test_get_model_summary(self):
model = self.model
summary = util.get_model_summary(model)
self.assertIsInstance(summary, str)
def test_get_models(self):
non_inline_models = set([
Model, Taxon, Environment,
Submodel, Compartment, SpeciesType, Species, Observable, DistributionInitConcentration,
DfbaObjective,
Reaction, RateLaw, DfbaObjSpecies, DfbaObjReaction, Parameter, Function,
StopCondition, Observation, ObservationSet, Conclusion, Reference, Author, Change,
])
inline_models = set([
SpeciesCoefficient, RateLawExpression,
DfbaObjectiveExpression, FunctionExpression, StopConditionExpression, ObservableExpression,
Identifier, InitVolume, Ph, ChemicalStructure, FluxBounds, ObservationGenotype, ObservationEnv, Process,
Evidence,
])
self.assertEqual(set(util.get_models()), non_inline_models | inline_models)
self.assertEqual(set(util.get_models(inline=False)), non_inline_models)
def test_gen_ids(self):
model = Model()
model.compartments.create(id='c_1')
model.compartments.create(id='c_2')
model.species_types.create(id='st_1')
model.species_types.create(id='st_2')
model.species.create(species_type=model.species_types[0], compartment=model.compartments[0])
model.species.create(species_type=model.species_types[0], compartment=model.compartments[1])
model.species.create(species_type=model.species_types[1], compartment=model.compartments[0])
model.species.create(species_type=model.species_types[1], compartment=model.compartments[1])
util.gen_ids(model)
self.assertEqual(model.species[0].id, 'st_1[c_1]')
self.assertEqual(model.species[1].id, 'st_1[c_2]')
self.assertEqual(model.species[2].id, 'st_2[c_1]')
self.assertEqual(model.species[3].id, 'st_2[c_2]')
def test_get_obj_units(self):
model = Model()
units = set([model.time_units])
self.assertEqual(set(obj_tables.sci.units.get_obj_units(model)), units)
model.compartments.create()
model.compartments.create()
for c in model.compartments:
units.add(c.mass_units)
if c.init_volume:
units.add(c.init_volume.units)
if c.ph:
units.add(c.ph.units)
self.assertEqual(set(obj_tables.sci.units.get_obj_units(model)), units)
model.species_types.create()
model.species_types.create()
self.assertEqual(set(obj_tables.sci.units.get_obj_units(model)), units)
for c in model.compartments:
for s in model.species_types:
model.species.create(compartment=c, species_type=s)
for s in model.species:
units.add(s.units)
self.assertEqual(set(obj_tables.sci.units.get_obj_units(model)), units)
model.distribution_init_concentrations.create(species=model.species[0], units=unit_registry.parse_units('M'))
model.distribution_init_concentrations.create(species=model.species[1], units=unit_registry.parse_units('molecule'))
for o in model.distribution_init_concentrations:
units.add(o.units)
self.assertEqual(set(obj_tables.sci.units.get_obj_units(model)), units)
model.parameters.create(units=unit_registry.parse_units('g'))
model.parameters.create(units=unit_registry.parse_units('l'))
model.parameters.create(units=unit_registry.parse_units('s'))
for p in model.parameters:
units.add(p.units)
self.assertEqual(set(obj_tables.sci.units.get_obj_units(model)), units)
model.functions.create(units=unit_registry.parse_units('g / l'))
model.functions.create(units=unit_registry.parse_units('g / s'))
model.functions.create(units=unit_registry.parse_units('l / s'))
for f in model.functions:
units.add(f.units)
self.assertEqual(set(obj_tables.sci.units.get_obj_units(model)), units)
| [
"wc_lang.util.get_model_summary",
"wc_lang.core.Species",
"wc_lang.core.Model",
"wc_lang.core.DistributionInitConcentration",
"wc_lang.util.get_model_size",
"wc_lang.util.gen_ids",
"wc_lang.util.get_models",
"wc_utils.util.units.unit_registry.parse_units"
] | [((1308, 1315), 'wc_lang.core.Model', 'Model', ([], {}), '()\n', (1313, 1315), False, 'from wc_lang.core import Model, Taxon, Environment, Submodel, Compartment, SpeciesType, Species, SpeciesCoefficient, DistributionInitConcentration, Reaction, RateLaw, RateLawExpression, Parameter, DfbaObjSpecies, DfbaObjReaction, DfbaObjective, DfbaObjectiveExpression, Observable, ObservableExpression, Function, FunctionExpression, StopCondition, StopConditionExpression, Observation, ObservationSet, Evidence, Conclusion, Reference, Author, Change, Identifier, InitVolume, Ph, ChemicalStructure, FluxBounds, ObservationGenotype, ObservationEnv, Process\n'), ((5457, 5483), 'wc_lang.util.get_model_size', 'util.get_model_size', (['model'], {}), '(model)\n', (5476, 5483), False, 'from wc_lang import util\n'), ((5904, 5933), 'wc_lang.util.get_model_summary', 'util.get_model_summary', (['model'], {}), '(model)\n', (5926, 5933), False, 'from wc_lang import util\n'), ((6945, 6952), 'wc_lang.core.Model', 'Model', ([], {}), '()\n', (6950, 6952), False, 'from wc_lang.core import Model, Taxon, Environment, Submodel, Compartment, SpeciesType, Species, SpeciesCoefficient, DistributionInitConcentration, Reaction, RateLaw, RateLawExpression, Parameter, DfbaObjSpecies, DfbaObjReaction, DfbaObjective, DfbaObjectiveExpression, Observable, ObservableExpression, Function, FunctionExpression, StopCondition, StopConditionExpression, Observation, ObservationSet, Evidence, Conclusion, Reference, Author, Change, Identifier, InitVolume, Ph, ChemicalStructure, FluxBounds, ObservationGenotype, ObservationEnv, Process\n'), ((7545, 7564), 'wc_lang.util.gen_ids', 'util.gen_ids', (['model'], {}), '(model)\n', (7557, 7564), False, 'from wc_lang import util\n'), ((7852, 7859), 'wc_lang.core.Model', 'Model', ([], {}), '()\n', (7857, 7859), False, 'from wc_lang.core import Model, Taxon, Environment, Submodel, Compartment, SpeciesType, Species, SpeciesCoefficient, DistributionInitConcentration, Reaction, RateLaw, RateLawExpression, Parameter, DfbaObjSpecies, DfbaObjReaction, DfbaObjective, DfbaObjectiveExpression, Observable, ObservableExpression, Function, FunctionExpression, StopCondition, StopConditionExpression, Observation, ObservationSet, Evidence, Conclusion, Reference, Author, Change, Identifier, InitVolume, Ph, ChemicalStructure, FluxBounds, ObservationGenotype, ObservationEnv, Process\n'), ((2180, 2231), 'wc_lang.core.DistributionInitConcentration', 'DistributionInitConcentration', ([], {'species': 'spec', 'mean': '(1)'}), '(species=spec, mean=1)\n', (2209, 2231), False, 'from wc_lang.core import Model, Taxon, Environment, Submodel, Compartment, SpeciesType, Species, SpeciesCoefficient, DistributionInitConcentration, Reaction, RateLaw, RateLawExpression, Parameter, DfbaObjSpecies, DfbaObjReaction, DfbaObjective, DfbaObjectiveExpression, Observable, ObservableExpression, Function, FunctionExpression, StopCondition, StopConditionExpression, Observation, ObservationSet, Evidence, Conclusion, Reference, Author, Change, Identifier, InitVolume, Ph, ChemicalStructure, FluxBounds, ObservationGenotype, ObservationEnv, Process\n'), ((1917, 1968), 'wc_lang.core.Species', 'Species', ([], {'species_type': 'spec_type', 'compartment': 'comp_0'}), '(species_type=spec_type, compartment=comp_0)\n', (1924, 1968), False, 'from wc_lang.core import Model, Taxon, Environment, Submodel, Compartment, SpeciesType, Species, SpeciesCoefficient, DistributionInitConcentration, Reaction, RateLaw, RateLawExpression, Parameter, DfbaObjSpecies, DfbaObjReaction, DfbaObjective, DfbaObjectiveExpression, Observable, ObservableExpression, Function, FunctionExpression, StopCondition, StopConditionExpression, Observation, ObservationSet, Evidence, Conclusion, Reference, Author, Change, Identifier, InitVolume, Ph, ChemicalStructure, FluxBounds, ObservationGenotype, ObservationEnv, Process\n'), ((2010, 2061), 'wc_lang.core.Species', 'Species', ([], {'species_type': 'spec_type', 'compartment': 'comp_1'}), '(species_type=spec_type, compartment=comp_1)\n', (2017, 2061), False, 'from wc_lang.core import Model, Taxon, Environment, Submodel, Compartment, SpeciesType, Species, SpeciesCoefficient, DistributionInitConcentration, Reaction, RateLaw, RateLawExpression, Parameter, DfbaObjSpecies, DfbaObjReaction, DfbaObjective, DfbaObjectiveExpression, Observable, ObservableExpression, Function, FunctionExpression, StopCondition, StopConditionExpression, Observation, ObservationSet, Evidence, Conclusion, Reference, Author, Change, Identifier, InitVolume, Ph, ChemicalStructure, FluxBounds, ObservationGenotype, ObservationEnv, Process\n'), ((6765, 6782), 'wc_lang.util.get_models', 'util.get_models', ([], {}), '()\n', (6780, 6782), False, 'from wc_lang import util\n'), ((6849, 6878), 'wc_lang.util.get_models', 'util.get_models', ([], {'inline': '(False)'}), '(inline=False)\n', (6864, 6878), False, 'from wc_lang import util\n'), ((8875, 8905), 'wc_utils.util.units.unit_registry.parse_units', 'unit_registry.parse_units', (['"""M"""'], {}), "('M')\n", (8900, 8905), False, 'from wc_utils.util.units import unit_registry\n'), ((8993, 9030), 'wc_utils.util.units.unit_registry.parse_units', 'unit_registry.parse_units', (['"""molecule"""'], {}), "('molecule')\n", (9018, 9030), False, 'from wc_utils.util.units import unit_registry\n'), ((9239, 9269), 'wc_utils.util.units.unit_registry.parse_units', 'unit_registry.parse_units', (['"""g"""'], {}), "('g')\n", (9264, 9269), False, 'from wc_utils.util.units import unit_registry\n'), ((9309, 9339), 'wc_utils.util.units.unit_registry.parse_units', 'unit_registry.parse_units', (['"""l"""'], {}), "('l')\n", (9334, 9339), False, 'from wc_utils.util.units import unit_registry\n'), ((9379, 9409), 'wc_utils.util.units.unit_registry.parse_units', 'unit_registry.parse_units', (['"""s"""'], {}), "('s')\n", (9404, 9409), False, 'from wc_utils.util.units import unit_registry\n'), ((9595, 9629), 'wc_utils.util.units.unit_registry.parse_units', 'unit_registry.parse_units', (['"""g / l"""'], {}), "('g / l')\n", (9620, 9629), False, 'from wc_utils.util.units import unit_registry\n'), ((9668, 9702), 'wc_utils.util.units.unit_registry.parse_units', 'unit_registry.parse_units', (['"""g / s"""'], {}), "('g / s')\n", (9693, 9702), False, 'from wc_utils.util.units import unit_registry\n'), ((9741, 9775), 'wc_utils.util.units.unit_registry.parse_units', 'unit_registry.parse_units', (['"""l / s"""'], {}), "('l / s')\n", (9766, 9775), False, 'from wc_utils.util.units import unit_registry\n')] |
#coding:utf-8
# K-means
import numpy as np
import sys
import matplotlib.pyplot as plt
argvs = sys.argv
if __name__ == "__main__":
data = np.genfromtxt(argvs[1], delimiter=",")
print(data)
#plt.subplot(2, 1, 1)
#plt.plot(data[:,0])
#plt.subplot(2, 1, 2)
plt.plot(data[:,1])
plt.show()
filename = argvs[1] + ".png"
plt.savefig(filename)
| [
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.show",
"numpy.genfromtxt",
"matplotlib.pyplot.plot"
] | [((144, 182), 'numpy.genfromtxt', 'np.genfromtxt', (['argvs[1]'], {'delimiter': '""","""'}), "(argvs[1], delimiter=',')\n", (157, 182), True, 'import numpy as np\n'), ((282, 302), 'matplotlib.pyplot.plot', 'plt.plot', (['data[:, 1]'], {}), '(data[:, 1])\n', (290, 302), True, 'import matplotlib.pyplot as plt\n'), ((307, 317), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (315, 317), True, 'import matplotlib.pyplot as plt\n'), ((366, 387), 'matplotlib.pyplot.savefig', 'plt.savefig', (['filename'], {}), '(filename)\n', (377, 387), True, 'import matplotlib.pyplot as plt\n')] |
#!/usr/bin/env python
# coding: utf-8
# In[1]:
# src: https://machinelearningmastery.com/use-word-embedding-layers-deep-learning-keras/
# A word embedding is a class of approaches for representing words and documents,
# using a dense vector representation.
# Word embeddings can be learned from text data and reused among projects.
# They can also be learned as part of fitting a neural network on text data.
# Word embeddings provide a dense representation of words and their relative meanings.
# They are IMPROVED representations compared to used in BOW model
# Instead, in an embedding, words are represented by dense vectors where a
# vector represents the projection of the word into a continuous vector space.
# The position of a word within the vector space is learned from text and
# is based on the words that surround the word when it is used.
# The position of a word in the learned vector space is referred to as its embedding.
# Methods to obtain word embeddings:
# Word2Vec
# GloVe
# In[2]:
# Keras Embedding layer arguments:
# 1. input_dim:
# This is the size of the vocabulary in the text data.
# For example, if your data is integer encoded to values between 0-10,
# then the size of the vocabulary would be 11 words.
# 2. output_dim:
# This is the size of the vector space in which words will be embedded.
# It defines the size of the output vectors from this layer for each word.
# For example, it could be 32 or 100 or even larger.
# Test different values for your problem.
# 3. input_length:
# This is the length of input sequences, as you would define for any
# input layer of a Keras model.
# For example, if all of your input documents are comprised of 1000 words,
# this would be 1000.
# The Embedding layer has weights that are learned.
# The output of the Embedding layer is a 2D vector with one embedding
# for each word in the input sequence of words (input document).
# If you wish to connect a Dense layer directly to an Embedding layer, you
# must first flatten the 2D output matrix to a 1D vector using the Flatten layer.
# In[18]:
get_ipython().run_line_magic('matplotlib', 'inline')
import matplotlib.pyplot as plt
import numpy as np
from keras.models import Sequential
from keras.layers import Dense, Flatten
from keras.layers.embeddings import Embedding
from keras.preprocessing.text import one_hot
from keras.preprocessing.sequence import pad_sequences
# In[4]:
# Documents
docs = [
# GOOD ONES
'Well done!',
'Good work',
'Great effort',
'nice work',
'Excellent!',
# BAD ONES
'Weak',
'Poor effort!',
'not good',
'poor work',
'Could have done better.'
]
# Class labels
labels = np.array( [1, 1, 1, 1, 1, 0, 0, 0, 0, 0] )
# In[7]:
# Encode each document
# Estimate the vocabulary size of 50 (which is much larger than needed to reduce the probability of collisions from the hash function)
vocab_size = 50
enc_docs = [ one_hot(doc, vocab_size) for doc in docs ]
display(enc_docs)
# In[10]:
# Pad every sequence to have the length of 4 => Keras prefers inputs to be vectorized and all inputs to have the same length
max_length = max( [len(seq) for seq in enc_docs] )
padded_docs = pad_sequences( enc_docs, maxlen=max_length, padding='post' )
display(padded_docs)
# In[11]:
# Define Embedding layer as part of NN model
def nn():
model = Sequential()
model.add( Embedding(vocab_size, 8, input_length=max_length) )
model.add( Flatten() )
model.add( Dense(1, activation='sigmoid') )
return model
model = nn()
model.compile( optimizer='adam', loss='binary_crossentropy', metrics=['acc'] )
# In[14]:
model.summary()
# In[16]:
# Fit the model
history = model.fit(
padded_docs, labels,
epochs=1000,
verbose=1
)
# In[19]:
# Overview fit history
fig, [ax_0, ax_1] = plt.subplots( 1, 2, figsize=(12, 6) )
ax_0.plot(history.history['loss'])
ax_0.set_title('loss')
ax_0.set_xlabel('epoch')
ax_0.set_ylabel('loss value')
ax_1.plot(history.history['acc'])
ax_1.set_title('accuracy')
ax_1.set_xlabel('epoch')
ax_1.set_ylabel('accuracy value')
plt.show()
# In[20]:
loss, accuracy = model.evaluate(padded_docs, labels, verbose=0)
print('Accuracy: %f' % (accuracy*100))
| [
"keras.layers.embeddings.Embedding",
"matplotlib.pyplot.show",
"keras.preprocessing.sequence.pad_sequences",
"keras.layers.Flatten",
"keras.layers.Dense",
"numpy.array",
"keras.preprocessing.text.one_hot",
"keras.models.Sequential",
"matplotlib.pyplot.subplots"
] | [((2782, 2822), 'numpy.array', 'np.array', (['[1, 1, 1, 1, 1, 0, 0, 0, 0, 0]'], {}), '([1, 1, 1, 1, 1, 0, 0, 0, 0, 0])\n', (2790, 2822), True, 'import numpy as np\n'), ((3296, 3354), 'keras.preprocessing.sequence.pad_sequences', 'pad_sequences', (['enc_docs'], {'maxlen': 'max_length', 'padding': '"""post"""'}), "(enc_docs, maxlen=max_length, padding='post')\n", (3309, 3354), False, 'from keras.preprocessing.sequence import pad_sequences\n'), ((3934, 3969), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {'figsize': '(12, 6)'}), '(1, 2, figsize=(12, 6))\n', (3946, 3969), True, 'import matplotlib.pyplot as plt\n'), ((4208, 4218), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4216, 4218), True, 'import matplotlib.pyplot as plt\n'), ((3028, 3052), 'keras.preprocessing.text.one_hot', 'one_hot', (['doc', 'vocab_size'], {}), '(doc, vocab_size)\n', (3035, 3052), False, 'from keras.preprocessing.text import one_hot\n'), ((3461, 3473), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (3471, 3473), False, 'from keras.models import Sequential\n'), ((3494, 3543), 'keras.layers.embeddings.Embedding', 'Embedding', (['vocab_size', '(8)'], {'input_length': 'max_length'}), '(vocab_size, 8, input_length=max_length)\n', (3503, 3543), False, 'from keras.layers.embeddings import Embedding\n'), ((3561, 3570), 'keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (3568, 3570), False, 'from keras.layers import Dense, Flatten\n'), ((3588, 3618), 'keras.layers.Dense', 'Dense', (['(1)'], {'activation': '"""sigmoid"""'}), "(1, activation='sigmoid')\n", (3593, 3618), False, 'from keras.layers import Dense, Flatten\n')] |
'''
Copyright 2022 Airbus SAS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
from copy import deepcopy
from gemseo.utils.compare_data_manager_tooling import compare_dict,\
delete_keys_from_dict
'''
mode: python; py-indent-offset: 4; tab-width: 4; coding: utf-8
'''
from sos_trades_core.execution_engine.execution_engine import ExecutionEngine
import unittest
import pandas as pd
import numpy as np
class TestParallelExecution(unittest.TestCase):
"""
Tests on parallel execution
"""
def setUp(self):
'''
Initialize third data needed for testing
'''
self.study_name = 'optim'
self.ns = f'{self.study_name}'
self.c_name = "SellarCoupling"
self.sc_name = "SellarOptimScenario"
self.repo = 'sos_trades_core.sos_processes.test'
self.proc_name = 'test_sellar_coupling'
dspace_dict = {'variable': ['x', 'z', 'y_1', 'y_2'],
'value': [1., [5., 2.], 1., 1.],
'lower_bnd': [0., [-10., 0.], -100., -100.],
'upper_bnd': [10., [10., 10.], 100., 100.],
'enable_variable': [True, True, True, True],
'activated_elem': [[True], [True, True], [True], [True]]}
self.dspace = pd.DataFrame(dspace_dict)
def test_01_parallel_execution_NR_2procs(self):
"""
1 proc
"""
n_proc = 1
exec_eng = ExecutionEngine(self.study_name)
factory = exec_eng.factory
builder = factory.get_builder_from_process(repo=self.repo,
mod_id='test_sellar_coupling')
exec_eng.factory.set_builders_to_coupling_builder(builder)
exec_eng.configure()
# Sellar inputs
local_dv = 10.
values_dict = {}
values_dict[f'{self.ns}.{self.c_name}.sub_mda_class'] = "MDANewtonRaphson"
values_dict[f'{self.ns}.{self.c_name}.x'] = 1.
values_dict[f'{self.ns}.{self.c_name}.y_1'] = 1.
values_dict[f'{self.ns}.{self.c_name}.y_2'] = 1.
values_dict[f'{self.ns}.{self.c_name}.z'] = np.array([1., 1.])
values_dict[f'{self.ns}.{self.c_name}.Sellar_Problem.local_dv'] = local_dv
values_dict[f'{self.ns}.{self.c_name}.n_processes'] = n_proc
exec_eng.load_study_from_input_dict(values_dict)
mda = exec_eng.root_process.sos_disciplines[0]
self.assertEqual(mda.n_processes, n_proc)
exec_eng.execute()
dm_dict_1 = deepcopy(exec_eng.get_anonimated_data_dict())
"""
2 procs
"""
n_proc = 2
exec_eng2 = ExecutionEngine(self.study_name)
factory = exec_eng2.factory
builder = factory.get_builder_from_process(repo=self.repo,
mod_id='test_sellar_coupling')
exec_eng2.factory.set_builders_to_coupling_builder(builder)
exec_eng2.configure()
# Sellar inputs
local_dv = 10.
values_dict = {}
values_dict[f'{self.ns}.{self.c_name}.sub_mda_class'] = "MDANewtonRaphson"
values_dict[f'{self.ns}.{self.c_name}.x'] = 1.
values_dict[f'{self.ns}.{self.c_name}.y_1'] = 1.
values_dict[f'{self.ns}.{self.c_name}.y_2'] = 1.
values_dict[f'{self.ns}.{self.c_name}.z'] = np.array([1., 1.])
values_dict[f'{self.ns}.{self.c_name}.Sellar_Problem.local_dv'] = local_dv
values_dict[f'{self.ns}.{self.c_name}.n_processes'] = n_proc
exec_eng2.load_study_from_input_dict(values_dict)
mda2 = exec_eng2.root_process.sos_disciplines[0]
self.assertEqual(mda2.n_processes, n_proc)
exec_eng2.execute()
dm_dict_2 = deepcopy(exec_eng2.get_anonimated_data_dict())
dict_error = {}
# to delete modelorigin and discipline dependencies which are not the
# same
delete_keys_from_dict(dm_dict_1)
delete_keys_from_dict(dm_dict_2)
compare_dict(dm_dict_1,
dm_dict_2, '', dict_error)
# The only different value is n_processes
self.assertDictEqual(dict_error, {
'.<study_ph>.SellarCoupling.n_processes.value': "1 and 2 don't match"})
for disc1, disc2 in zip(exec_eng.root_process.sos_disciplines[0].sos_disciplines, exec_eng2.root_process.sos_disciplines[0].sos_disciplines):
if disc1.jac is not None:
self.assertDictEqual(disc1.jac, disc2.jac)
def test_02_parallel_execution_NR_64procs(self):
"""
1 proc
"""
n_proc = 1
exec_eng = ExecutionEngine(self.study_name)
factory = exec_eng.factory
builder = factory.get_builder_from_process(repo=self.repo,
mod_id='test_sellar_coupling')
exec_eng.factory.set_builders_to_coupling_builder(builder)
exec_eng.configure()
# Sellar inputs
local_dv = 10.
values_dict = {}
values_dict[f'{self.ns}.{self.c_name}.sub_mda_class'] = "MDANewtonRaphson"
values_dict[f'{self.ns}.{self.c_name}.x'] = 1.
values_dict[f'{self.ns}.{self.c_name}.y_1'] = 1.
values_dict[f'{self.ns}.{self.c_name}.y_2'] = 1.
values_dict[f'{self.ns}.{self.c_name}.z'] = np.array([1., 1.])
values_dict[f'{self.ns}.{self.c_name}.Sellar_Problem.local_dv'] = local_dv
values_dict[f'{self.ns}.{self.c_name}.n_processes'] = n_proc
exec_eng.load_study_from_input_dict(values_dict)
mda = exec_eng.root_process.sos_disciplines[0]
self.assertEqual(mda.n_processes, n_proc)
exec_eng.execute()
dm_dict_1 = deepcopy(exec_eng.get_anonimated_data_dict())
"""
64 procs
"""
n_proc = 64
exec_eng2 = ExecutionEngine(self.study_name)
factory = exec_eng2.factory
builder = factory.get_builder_from_process(repo=self.repo,
mod_id='test_sellar_coupling')
exec_eng2.factory.set_builders_to_coupling_builder(builder)
exec_eng2.configure()
# Sellar inputs
local_dv = 10.
values_dict = {}
values_dict[f'{self.ns}.{self.c_name}.sub_mda_class'] = "MDANewtonRaphson"
values_dict[f'{self.ns}.{self.c_name}.x'] = 1.
values_dict[f'{self.ns}.{self.c_name}.y_1'] = 1.
values_dict[f'{self.ns}.{self.c_name}.y_2'] = 1.
values_dict[f'{self.ns}.{self.c_name}.z'] = np.array([1., 1.])
values_dict[f'{self.ns}.{self.c_name}.Sellar_Problem.local_dv'] = local_dv
values_dict[f'{self.ns}.{self.c_name}.n_processes'] = n_proc
exec_eng2.load_study_from_input_dict(values_dict)
mda2 = exec_eng2.root_process.sos_disciplines[0]
self.assertEqual(mda2.n_processes, n_proc)
exec_eng2.execute()
dm_dict_2 = deepcopy(exec_eng2.get_anonimated_data_dict())
dict_error = {}
# to delete modelorigin and discipline dependencies which are not the
# same
delete_keys_from_dict(dm_dict_1)
delete_keys_from_dict(dm_dict_2)
compare_dict(dm_dict_1,
dm_dict_2, '', dict_error)
# The only different value is n_processes
self.assertDictEqual(dict_error, {
'.<study_ph>.SellarCoupling.n_processes.value': "1 and 64 don't match"})
for disc1, disc2 in zip(exec_eng.root_process.sos_disciplines[0].sos_disciplines, exec_eng2.root_process.sos_disciplines[0].sos_disciplines):
if disc1.jac is not None:
self.assertDictEqual(disc1.jac, disc2.jac)
def test_03_parallel_execution_PureNR_2procs(self):
"""
1 proc
"""
n_proc = 1
exec_eng = ExecutionEngine(self.study_name)
factory = exec_eng.factory
builder = factory.get_builder_from_process(repo=self.repo,
mod_id='test_sellar_coupling')
exec_eng.factory.set_builders_to_coupling_builder(builder)
exec_eng.configure()
# Sellar inputs
local_dv = 10.
values_dict = {}
values_dict[f'{self.ns}.{self.c_name}.sub_mda_class'] = "PureNewtonRaphson"
values_dict[f'{self.ns}.{self.c_name}.x'] = 1.
values_dict[f'{self.ns}.{self.c_name}.y_1'] = 1.
values_dict[f'{self.ns}.{self.c_name}.y_2'] = 1.
values_dict[f'{self.ns}.{self.c_name}.z'] = np.array([1., 1.])
values_dict[f'{self.ns}.{self.c_name}.Sellar_Problem.local_dv'] = local_dv
values_dict[f'{self.ns}.{self.c_name}.n_processes'] = n_proc
exec_eng.load_study_from_input_dict(values_dict)
mda = exec_eng.root_process.sos_disciplines[0]
self.assertEqual(mda.n_processes, n_proc)
exec_eng.execute()
dm_dict_1 = deepcopy(exec_eng.get_anonimated_data_dict())
"""
2 procs
"""
n_proc = 2
exec_eng2 = ExecutionEngine(self.study_name)
factory = exec_eng2.factory
builder = factory.get_builder_from_process(repo=self.repo,
mod_id='test_sellar_coupling')
exec_eng2.factory.set_builders_to_coupling_builder(builder)
exec_eng2.configure()
# Sellar inputs
local_dv = 10.
values_dict = {}
values_dict[f'{self.ns}.{self.c_name}.sub_mda_class'] = "PureNewtonRaphson"
values_dict[f'{self.ns}.{self.c_name}.x'] = 1.
values_dict[f'{self.ns}.{self.c_name}.y_1'] = 1.
values_dict[f'{self.ns}.{self.c_name}.y_2'] = 1.
values_dict[f'{self.ns}.{self.c_name}.z'] = np.array([1., 1.])
values_dict[f'{self.ns}.{self.c_name}.Sellar_Problem.local_dv'] = local_dv
values_dict[f'{self.ns}.{self.c_name}.n_processes'] = n_proc
exec_eng2.load_study_from_input_dict(values_dict)
mda2 = exec_eng2.root_process.sos_disciplines[0]
self.assertEqual(mda2.n_processes, n_proc)
exec_eng2.execute()
dm_dict_2 = deepcopy(exec_eng2.get_anonimated_data_dict())
dict_error = {}
# to delete modelorigin and discipline dependencies which are not the
# same
delete_keys_from_dict(dm_dict_1)
delete_keys_from_dict(dm_dict_2)
compare_dict(dm_dict_1,
dm_dict_2, '', dict_error)
# The only different value is n_processes
self.assertDictEqual(dict_error, {
'.<study_ph>.SellarCoupling.n_processes.value': "1 and 2 don't match"})
for disc1, disc2 in zip(exec_eng.root_process.sos_disciplines[0].sos_disciplines, exec_eng2.root_process.sos_disciplines[0].sos_disciplines):
if disc1.jac is not None:
self.assertDictEqual(disc1.jac, disc2.jac)
def test_04_parallel_execution_pureNR_64procs(self):
"""
1 proc
"""
n_proc = 1
exec_eng = ExecutionEngine(self.study_name)
factory = exec_eng.factory
builder = factory.get_builder_from_process(repo=self.repo,
mod_id='test_sellar_coupling')
exec_eng.factory.set_builders_to_coupling_builder(builder)
exec_eng.configure()
# Sellar inputs
local_dv = 10.
values_dict = {}
values_dict[f'{self.ns}.{self.c_name}.sub_mda_class'] = "PureNewtonRaphson"
values_dict[f'{self.ns}.{self.c_name}.x'] = 1.
values_dict[f'{self.ns}.{self.c_name}.y_1'] = 1.
values_dict[f'{self.ns}.{self.c_name}.y_2'] = 1.
values_dict[f'{self.ns}.{self.c_name}.z'] = np.array([1., 1.])
values_dict[f'{self.ns}.{self.c_name}.Sellar_Problem.local_dv'] = local_dv
values_dict[f'{self.ns}.{self.c_name}.n_processes'] = n_proc
exec_eng.load_study_from_input_dict(values_dict)
mda = exec_eng.root_process.sos_disciplines[0]
self.assertEqual(mda.n_processes, n_proc)
exec_eng.execute()
dm_dict_1 = deepcopy(exec_eng.get_anonimated_data_dict())
"""
64 procs
"""
n_proc = 64
exec_eng2 = ExecutionEngine(self.study_name)
factory = exec_eng2.factory
builder = factory.get_builder_from_process(repo=self.repo,
mod_id='test_sellar_coupling')
exec_eng2.factory.set_builders_to_coupling_builder(builder)
exec_eng2.configure()
# Sellar inputs
local_dv = 10.
values_dict = {}
values_dict[f'{self.ns}.{self.c_name}.sub_mda_class'] = "PureNewtonRaphson"
values_dict[f'{self.ns}.{self.c_name}.x'] = 1.
values_dict[f'{self.ns}.{self.c_name}.y_1'] = 1.
values_dict[f'{self.ns}.{self.c_name}.y_2'] = 1.
values_dict[f'{self.ns}.{self.c_name}.z'] = np.array([1., 1.])
values_dict[f'{self.ns}.{self.c_name}.Sellar_Problem.local_dv'] = local_dv
values_dict[f'{self.ns}.{self.c_name}.n_processes'] = n_proc
exec_eng2.load_study_from_input_dict(values_dict)
mda2 = exec_eng2.root_process.sos_disciplines[0]
self.assertEqual(mda2.n_processes, n_proc)
exec_eng2.execute()
dm_dict_2 = deepcopy(exec_eng2.get_anonimated_data_dict())
dict_error = {}
# to delete modelorigin and discipline dependencies which are not the
# same
delete_keys_from_dict(dm_dict_1)
delete_keys_from_dict(dm_dict_2)
compare_dict(dm_dict_1,
dm_dict_2, '', dict_error)
# The only different value is n_processes
self.assertDictEqual(dict_error, {
'.<study_ph>.SellarCoupling.n_processes.value': "1 and 64 don't match"})
for disc1, disc2 in zip(exec_eng.root_process.sos_disciplines[0].sos_disciplines, exec_eng2.root_process.sos_disciplines[0].sos_disciplines):
if disc1.jac is not None:
self.assertDictEqual(disc1.jac, disc2.jac)
def test_05_parallel_execution_GSPureNR_2procs(self):
"""
1 proc
"""
n_proc = 1
exec_eng = ExecutionEngine(self.study_name)
factory = exec_eng.factory
builder = factory.get_builder_from_process(repo=self.repo,
mod_id='test_sellar_coupling')
exec_eng.factory.set_builders_to_coupling_builder(builder)
exec_eng.configure()
# Sellar inputs
local_dv = 10.
values_dict = {}
values_dict[f'{self.ns}.{self.c_name}.sub_mda_class'] = "GSPureNewtonMDA"
values_dict[f'{self.ns}.{self.c_name}.x'] = 1.
values_dict[f'{self.ns}.{self.c_name}.y_1'] = 1.
values_dict[f'{self.ns}.{self.c_name}.y_2'] = 1.
values_dict[f'{self.ns}.{self.c_name}.z'] = np.array([1., 1.])
values_dict[f'{self.ns}.{self.c_name}.Sellar_Problem.local_dv'] = local_dv
values_dict[f'{self.ns}.{self.c_name}.n_processes'] = n_proc
exec_eng.load_study_from_input_dict(values_dict)
mda = exec_eng.root_process.sos_disciplines[0]
self.assertEqual(mda.n_processes, n_proc)
exec_eng.execute()
dm_dict_1 = deepcopy(exec_eng.get_anonimated_data_dict())
"""
2 procs
"""
n_proc = 2
exec_eng2 = ExecutionEngine(self.study_name)
factory = exec_eng2.factory
builder = factory.get_builder_from_process(repo=self.repo,
mod_id='test_sellar_coupling')
exec_eng2.factory.set_builders_to_coupling_builder(builder)
exec_eng2.configure()
# Sellar inputs
local_dv = 10.
values_dict = {}
values_dict[f'{self.ns}.{self.c_name}.sub_mda_class'] = "GSPureNewtonMDA"
values_dict[f'{self.ns}.{self.c_name}.x'] = 1.
values_dict[f'{self.ns}.{self.c_name}.y_1'] = 1.
values_dict[f'{self.ns}.{self.c_name}.y_2'] = 1.
values_dict[f'{self.ns}.{self.c_name}.z'] = np.array([1., 1.])
values_dict[f'{self.ns}.{self.c_name}.Sellar_Problem.local_dv'] = local_dv
values_dict[f'{self.ns}.{self.c_name}.n_processes'] = n_proc
exec_eng2.load_study_from_input_dict(values_dict)
mda2 = exec_eng2.root_process.sos_disciplines[0]
self.assertEqual(mda2.n_processes, n_proc)
exec_eng2.execute()
dm_dict_2 = deepcopy(exec_eng2.get_anonimated_data_dict())
dict_error = {}
# to delete modelorigin and discipline dependencies which are not the
# same
delete_keys_from_dict(dm_dict_1)
delete_keys_from_dict(dm_dict_2)
compare_dict(dm_dict_1,
dm_dict_2, '', dict_error)
# The only different value is n_processes
self.assertDictEqual(dict_error, {
'.<study_ph>.SellarCoupling.n_processes.value': "1 and 2 don't match"})
for disc1, disc2 in zip(exec_eng.root_process.sos_disciplines[0].sos_disciplines, exec_eng2.root_process.sos_disciplines[0].sos_disciplines):
if disc1.jac is not None:
self.assertDictEqual(disc1.jac, disc2.jac)
if '__main__' == __name__:
cls = TestParallelExecution()
cls.setUp()
cls.test_01_parallel_execution_NR_2procs()
| [
"pandas.DataFrame",
"gemseo.utils.compare_data_manager_tooling.compare_dict",
"gemseo.utils.compare_data_manager_tooling.delete_keys_from_dict",
"numpy.array",
"sos_trades_core.execution_engine.execution_engine.ExecutionEngine"
] | [((1814, 1839), 'pandas.DataFrame', 'pd.DataFrame', (['dspace_dict'], {}), '(dspace_dict)\n', (1826, 1839), True, 'import pandas as pd\n'), ((1977, 2009), 'sos_trades_core.execution_engine.execution_engine.ExecutionEngine', 'ExecutionEngine', (['self.study_name'], {}), '(self.study_name)\n', (1992, 2009), False, 'from sos_trades_core.execution_engine.execution_engine import ExecutionEngine\n'), ((2687, 2707), 'numpy.array', 'np.array', (['[1.0, 1.0]'], {}), '([1.0, 1.0])\n', (2695, 2707), True, 'import numpy as np\n'), ((3210, 3242), 'sos_trades_core.execution_engine.execution_engine.ExecutionEngine', 'ExecutionEngine', (['self.study_name'], {}), '(self.study_name)\n', (3225, 3242), False, 'from sos_trades_core.execution_engine.execution_engine import ExecutionEngine\n'), ((3923, 3943), 'numpy.array', 'np.array', (['[1.0, 1.0]'], {}), '([1.0, 1.0])\n', (3931, 3943), True, 'import numpy as np\n'), ((4495, 4527), 'gemseo.utils.compare_data_manager_tooling.delete_keys_from_dict', 'delete_keys_from_dict', (['dm_dict_1'], {}), '(dm_dict_1)\n', (4516, 4527), False, 'from gemseo.utils.compare_data_manager_tooling import compare_dict, delete_keys_from_dict\n'), ((4537, 4569), 'gemseo.utils.compare_data_manager_tooling.delete_keys_from_dict', 'delete_keys_from_dict', (['dm_dict_2'], {}), '(dm_dict_2)\n', (4558, 4569), False, 'from gemseo.utils.compare_data_manager_tooling import compare_dict, delete_keys_from_dict\n'), ((4579, 4629), 'gemseo.utils.compare_data_manager_tooling.compare_dict', 'compare_dict', (['dm_dict_1', 'dm_dict_2', '""""""', 'dict_error'], {}), "(dm_dict_1, dm_dict_2, '', dict_error)\n", (4591, 4629), False, 'from gemseo.utils.compare_data_manager_tooling import compare_dict, delete_keys_from_dict\n'), ((5239, 5271), 'sos_trades_core.execution_engine.execution_engine.ExecutionEngine', 'ExecutionEngine', (['self.study_name'], {}), '(self.study_name)\n', (5254, 5271), False, 'from sos_trades_core.execution_engine.execution_engine import ExecutionEngine\n'), ((5949, 5969), 'numpy.array', 'np.array', (['[1.0, 1.0]'], {}), '([1.0, 1.0])\n', (5957, 5969), True, 'import numpy as np\n'), ((6472, 6504), 'sos_trades_core.execution_engine.execution_engine.ExecutionEngine', 'ExecutionEngine', (['self.study_name'], {}), '(self.study_name)\n', (6487, 6504), False, 'from sos_trades_core.execution_engine.execution_engine import ExecutionEngine\n'), ((7185, 7205), 'numpy.array', 'np.array', (['[1.0, 1.0]'], {}), '([1.0, 1.0])\n', (7193, 7205), True, 'import numpy as np\n'), ((7757, 7789), 'gemseo.utils.compare_data_manager_tooling.delete_keys_from_dict', 'delete_keys_from_dict', (['dm_dict_1'], {}), '(dm_dict_1)\n', (7778, 7789), False, 'from gemseo.utils.compare_data_manager_tooling import compare_dict, delete_keys_from_dict\n'), ((7799, 7831), 'gemseo.utils.compare_data_manager_tooling.delete_keys_from_dict', 'delete_keys_from_dict', (['dm_dict_2'], {}), '(dm_dict_2)\n', (7820, 7831), False, 'from gemseo.utils.compare_data_manager_tooling import compare_dict, delete_keys_from_dict\n'), ((7841, 7891), 'gemseo.utils.compare_data_manager_tooling.compare_dict', 'compare_dict', (['dm_dict_1', 'dm_dict_2', '""""""', 'dict_error'], {}), "(dm_dict_1, dm_dict_2, '', dict_error)\n", (7853, 7891), False, 'from gemseo.utils.compare_data_manager_tooling import compare_dict, delete_keys_from_dict\n'), ((8505, 8537), 'sos_trades_core.execution_engine.execution_engine.ExecutionEngine', 'ExecutionEngine', (['self.study_name'], {}), '(self.study_name)\n', (8520, 8537), False, 'from sos_trades_core.execution_engine.execution_engine import ExecutionEngine\n'), ((9216, 9236), 'numpy.array', 'np.array', (['[1.0, 1.0]'], {}), '([1.0, 1.0])\n', (9224, 9236), True, 'import numpy as np\n'), ((9739, 9771), 'sos_trades_core.execution_engine.execution_engine.ExecutionEngine', 'ExecutionEngine', (['self.study_name'], {}), '(self.study_name)\n', (9754, 9771), False, 'from sos_trades_core.execution_engine.execution_engine import ExecutionEngine\n'), ((10453, 10473), 'numpy.array', 'np.array', (['[1.0, 1.0]'], {}), '([1.0, 1.0])\n', (10461, 10473), True, 'import numpy as np\n'), ((11025, 11057), 'gemseo.utils.compare_data_manager_tooling.delete_keys_from_dict', 'delete_keys_from_dict', (['dm_dict_1'], {}), '(dm_dict_1)\n', (11046, 11057), False, 'from gemseo.utils.compare_data_manager_tooling import compare_dict, delete_keys_from_dict\n'), ((11067, 11099), 'gemseo.utils.compare_data_manager_tooling.delete_keys_from_dict', 'delete_keys_from_dict', (['dm_dict_2'], {}), '(dm_dict_2)\n', (11088, 11099), False, 'from gemseo.utils.compare_data_manager_tooling import compare_dict, delete_keys_from_dict\n'), ((11109, 11159), 'gemseo.utils.compare_data_manager_tooling.compare_dict', 'compare_dict', (['dm_dict_1', 'dm_dict_2', '""""""', 'dict_error'], {}), "(dm_dict_1, dm_dict_2, '', dict_error)\n", (11121, 11159), False, 'from gemseo.utils.compare_data_manager_tooling import compare_dict, delete_keys_from_dict\n'), ((11773, 11805), 'sos_trades_core.execution_engine.execution_engine.ExecutionEngine', 'ExecutionEngine', (['self.study_name'], {}), '(self.study_name)\n', (11788, 11805), False, 'from sos_trades_core.execution_engine.execution_engine import ExecutionEngine\n'), ((12484, 12504), 'numpy.array', 'np.array', (['[1.0, 1.0]'], {}), '([1.0, 1.0])\n', (12492, 12504), True, 'import numpy as np\n'), ((13007, 13039), 'sos_trades_core.execution_engine.execution_engine.ExecutionEngine', 'ExecutionEngine', (['self.study_name'], {}), '(self.study_name)\n', (13022, 13039), False, 'from sos_trades_core.execution_engine.execution_engine import ExecutionEngine\n'), ((13721, 13741), 'numpy.array', 'np.array', (['[1.0, 1.0]'], {}), '([1.0, 1.0])\n', (13729, 13741), True, 'import numpy as np\n'), ((14293, 14325), 'gemseo.utils.compare_data_manager_tooling.delete_keys_from_dict', 'delete_keys_from_dict', (['dm_dict_1'], {}), '(dm_dict_1)\n', (14314, 14325), False, 'from gemseo.utils.compare_data_manager_tooling import compare_dict, delete_keys_from_dict\n'), ((14335, 14367), 'gemseo.utils.compare_data_manager_tooling.delete_keys_from_dict', 'delete_keys_from_dict', (['dm_dict_2'], {}), '(dm_dict_2)\n', (14356, 14367), False, 'from gemseo.utils.compare_data_manager_tooling import compare_dict, delete_keys_from_dict\n'), ((14377, 14427), 'gemseo.utils.compare_data_manager_tooling.compare_dict', 'compare_dict', (['dm_dict_1', 'dm_dict_2', '""""""', 'dict_error'], {}), "(dm_dict_1, dm_dict_2, '', dict_error)\n", (14389, 14427), False, 'from gemseo.utils.compare_data_manager_tooling import compare_dict, delete_keys_from_dict\n'), ((15043, 15075), 'sos_trades_core.execution_engine.execution_engine.ExecutionEngine', 'ExecutionEngine', (['self.study_name'], {}), '(self.study_name)\n', (15058, 15075), False, 'from sos_trades_core.execution_engine.execution_engine import ExecutionEngine\n'), ((15752, 15772), 'numpy.array', 'np.array', (['[1.0, 1.0]'], {}), '([1.0, 1.0])\n', (15760, 15772), True, 'import numpy as np\n'), ((16275, 16307), 'sos_trades_core.execution_engine.execution_engine.ExecutionEngine', 'ExecutionEngine', (['self.study_name'], {}), '(self.study_name)\n', (16290, 16307), False, 'from sos_trades_core.execution_engine.execution_engine import ExecutionEngine\n'), ((16987, 17007), 'numpy.array', 'np.array', (['[1.0, 1.0]'], {}), '([1.0, 1.0])\n', (16995, 17007), True, 'import numpy as np\n'), ((17559, 17591), 'gemseo.utils.compare_data_manager_tooling.delete_keys_from_dict', 'delete_keys_from_dict', (['dm_dict_1'], {}), '(dm_dict_1)\n', (17580, 17591), False, 'from gemseo.utils.compare_data_manager_tooling import compare_dict, delete_keys_from_dict\n'), ((17601, 17633), 'gemseo.utils.compare_data_manager_tooling.delete_keys_from_dict', 'delete_keys_from_dict', (['dm_dict_2'], {}), '(dm_dict_2)\n', (17622, 17633), False, 'from gemseo.utils.compare_data_manager_tooling import compare_dict, delete_keys_from_dict\n'), ((17643, 17693), 'gemseo.utils.compare_data_manager_tooling.compare_dict', 'compare_dict', (['dm_dict_1', 'dm_dict_2', '""""""', 'dict_error'], {}), "(dm_dict_1, dm_dict_2, '', dict_error)\n", (17655, 17693), False, 'from gemseo.utils.compare_data_manager_tooling import compare_dict, delete_keys_from_dict\n')] |
from datetime import datetime
from airflow import DAG
from airflow.operators.dummy_operator import DummyOperator
from airflow.contrib.sensors.file_sensor import FileSensor
with DAG(
dag_id="file_sensor_consume_new_data",
start_date=datetime(2020, 12, 1),
schedule_interval="0 * * * *"
) as dag:
# task 1
get_new_data = FileSensor(task_id="get_new_data",
filepath="../shop123/{{ ds_nodash }}/${hour}/data.json")
# task 2
parse_file = DummyOperator(task_id="parse_file")
# task 3
check_is_it_ne_customer = DummyOperator(task_id="check_is_it_ne_customer")
# task 4
create_new_customer = DummyOperator(task_id="create_new_customer")
# task 5
update_existed_customer = DummyOperator(task_id="update_existed_customer")
# task 6
get_new_data >> parse_file >> check_is_it_ne_customer >> [create_new_customer, update_existed_customer] | [
"airflow.contrib.sensors.file_sensor.FileSensor",
"airflow.operators.dummy_operator.DummyOperator",
"datetime.datetime"
] | [((351, 447), 'airflow.contrib.sensors.file_sensor.FileSensor', 'FileSensor', ([], {'task_id': '"""get_new_data"""', 'filepath': '"""../shop123/{{ ds_nodash }}/${hour}/data.json"""'}), "(task_id='get_new_data', filepath=\n '../shop123/{{ ds_nodash }}/${hour}/data.json')\n", (361, 447), False, 'from airflow.contrib.sensors.file_sensor import FileSensor\n'), ((509, 544), 'airflow.operators.dummy_operator.DummyOperator', 'DummyOperator', ([], {'task_id': '"""parse_file"""'}), "(task_id='parse_file')\n", (522, 544), False, 'from airflow.operators.dummy_operator import DummyOperator\n'), ((593, 641), 'airflow.operators.dummy_operator.DummyOperator', 'DummyOperator', ([], {'task_id': '"""check_is_it_ne_customer"""'}), "(task_id='check_is_it_ne_customer')\n", (606, 641), False, 'from airflow.operators.dummy_operator import DummyOperator\n'), ((686, 730), 'airflow.operators.dummy_operator.DummyOperator', 'DummyOperator', ([], {'task_id': '"""create_new_customer"""'}), "(task_id='create_new_customer')\n", (699, 730), False, 'from airflow.operators.dummy_operator import DummyOperator\n'), ((779, 827), 'airflow.operators.dummy_operator.DummyOperator', 'DummyOperator', ([], {'task_id': '"""update_existed_customer"""'}), "(task_id='update_existed_customer')\n", (792, 827), False, 'from airflow.operators.dummy_operator import DummyOperator\n'), ((243, 264), 'datetime.datetime', 'datetime', (['(2020)', '(12)', '(1)'], {}), '(2020, 12, 1)\n', (251, 264), False, 'from datetime import datetime\n')] |
import smtplib
import os
from email.mime.text import MIMEText
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
def report(text, img=None):
user = os.environ['EMAIL_USER']
password = os.environ['EMAIL_PASSWORD']
msg = MIMEMultipart()
msg['From'] = user
msg['To'] = user
msg['Subject'] = 'Behavioral Neuroevolution Reporting'
text = MIMEText(''.join(text))
msg.attach(text)
if img:
img_data = open(img, 'rb').read()
image = MIMEImage(img_data, name=os.path.basename(img))
msg.attach(image)
try:
with smtplib.SMTP('smtp.office365.com', 587, timeout=10) as server_ssl:
server_ssl.ehlo()
server_ssl.starttls()
server_ssl.login(user, password)
server_ssl.sendmail(msg['From'], msg['To'], msg.as_string())
except Exception as ex:
print(ex.__class__.__name__ + ": " + 'Email Not Sent!')
| [
"email.mime.multipart.MIMEMultipart",
"smtplib.SMTP",
"os.path.basename"
] | [((269, 284), 'email.mime.multipart.MIMEMultipart', 'MIMEMultipart', ([], {}), '()\n', (282, 284), False, 'from email.mime.multipart import MIMEMultipart\n'), ((613, 664), 'smtplib.SMTP', 'smtplib.SMTP', (['"""smtp.office365.com"""', '(587)'], {'timeout': '(10)'}), "('smtp.office365.com', 587, timeout=10)\n", (625, 664), False, 'import smtplib\n'), ((541, 562), 'os.path.basename', 'os.path.basename', (['img'], {}), '(img)\n', (557, 562), False, 'import os\n')] |
#!/usr/bin/env python3
# Copyright (c) 2020-2021, <NAME> <<EMAIL>>
# Copyright (c) 2021, <NAME> <<EMAIL>>
# Copyright (c) 2021, <NAME> <<EMAIL>>
# Copyright (c) 2021, <NAME> <<EMAIL>>
#
# SPDX-License-Identifier: MIT
from __future__ import annotations
import concurrent.futures
import datetime
import glob
import json
import multiprocessing
import os
import resource
import signal
import subprocess
import sys
import threading
import traceback
from argparse import ArgumentParser
from dataclasses import dataclass
from enum import Enum
from pathlib import Path
from typing import Any, Callable, Optional
from tqdm import tqdm
class TestResult(str, Enum):
PASSED = "PASSED"
FAILED = "FAILED"
SKIPPED = "SKIPPED"
METADATA_ERROR = "METADATA_ERROR"
HARNESS_ERROR = "HARNESS_ERROR"
TIMEOUT_ERROR = "TIMEOUT_ERROR"
PROCESS_ERROR = "PROCESS_ERROR"
RUNNER_EXCEPTION = "RUNNER_EXCEPTION"
TODO_ERROR = "TODO_ERROR"
@dataclass
class TestRun:
file: Path
result: TestResult
output: str | None
exit_code: int | None
strict_mode: bool | None
EMOJIS = {
TestResult.PASSED: "✅",
TestResult.FAILED: "❌",
TestResult.SKIPPED: "⚠️",
TestResult.METADATA_ERROR: "📄",
TestResult.HARNESS_ERROR: "⚙️",
TestResult.TIMEOUT_ERROR: "💀",
TestResult.PROCESS_ERROR: "💥️",
TestResult.RUNNER_EXCEPTION: "🐍",
TestResult.TODO_ERROR: "📝",
}
NON_FAIL_RESULTS = [TestResult.PASSED, TestResult.SKIPPED]
CPU_COUNT = multiprocessing.cpu_count()
BATCH_SIZE = 250
progress_mutex = threading.Lock()
def run_streaming_script(
libjs_test262_runner: Path,
test262_root: Path,
use_bytecode: bool,
parse_only: bool,
timeout: int,
memory_limit: int,
test_file_paths: list[Path],
) -> subprocess.CompletedProcess:
def limit_memory():
resource.setrlimit(
resource.RLIMIT_AS, (memory_limit * 1024 * 1024, resource.RLIM_INFINITY)
)
command = [
str(libjs_test262_runner),
*(["-b"] if use_bytecode else []),
*(["--parse-only"] if parse_only else []),
"--harness-location",
str((test262_root / "harness").resolve()),
"-t",
str(timeout),
]
return subprocess.run(
command,
input="\n".join(str(path) for path in test_file_paths),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=True,
text=True,
preexec_fn=limit_memory,
errors="ignore", # strip invalid utf8 code points instead of throwing (to allow for invalid utf-8 tests)
)
def run_tests(
libjs_test262_runner: Path,
test262_root: Path,
test_file_paths: list[Path],
use_bytecode: bool,
parse_only: bool,
timeout: int,
memory_limit: int,
on_progress_change: Callable[[int], None] | None,
forward_stderr: Callable[[str], None] | None,
) -> list[TestRun]:
current_test = 0
results = []
def add_result(
result: TestResult,
output: str = "",
exit_code: int = 0,
strict_mode: bool = False,
) -> None:
results.append(
TestRun(
test_file_paths[current_test], result, output, exit_code, strict_mode
)
)
while current_test < len(test_file_paths):
start_count = current_test
process_failed = False
try:
process_result: Any = run_streaming_script(
libjs_test262_runner,
test262_root,
use_bytecode,
parse_only,
timeout,
memory_limit,
test_file_paths[current_test : current_test + BATCH_SIZE],
)
except subprocess.CalledProcessError as e:
process_failed = True
process_result = e
test_results = [
part.strip() for part in process_result.stdout.strip().split("\0")
]
have_stopping_result = False
while test_results:
if not test_results[0].startswith("RESULT "):
break
test_result_string = test_results.pop(0).removeprefix("RESULT ")
try:
test_result = json.loads(test_result_string, strict=False)
except json.decoder.JSONDecodeError:
raise Exception(f"Could not parse JSON from '{test_result_string}'")
file_name = Path(test_result["test"])
if file_name != test_file_paths[current_test]:
raise Exception(
f"Unexpected result from test {file_name} but expected result from {test_file_paths[current_test]}"
)
strict_mode = test_result.get("strict_mode", False)
test_result_state = TestResult.FAILED
result = test_result["result"]
if result == "harness_error":
test_result_state = TestResult.HARNESS_ERROR
elif result == "metadata_error":
test_result_state = TestResult.METADATA_ERROR
elif result == "timeout":
have_stopping_result = True
test_result_state = TestResult.TIMEOUT_ERROR
elif result == "assert_fail":
have_stopping_result = True
test_result_state = TestResult.PROCESS_ERROR
elif result == "passed":
test_result_state = TestResult.PASSED
elif result == "skipped":
test_result_state = TestResult.SKIPPED
elif result == "todo_error":
test_result_state = TestResult.TODO_ERROR
elif result != "failed":
raise Exception(f"Unknown error code: {result} from {test_result}")
if strict_output := test_result.get("strict_output"):
output = strict_output
elif non_strict_output := test_result.get("output"):
output = non_strict_output
else:
output = json.dumps(test_result, indent=2, ensure_ascii=False)
add_result(test_result_state, output, strict_mode=strict_mode)
current_test += 1
if process_failed and not have_stopping_result:
if forward_stderr is not None and process_result.stderr.strip() != "":
forward_stderr(
f"Last tests ran: {test_file_paths[current_test]} before failing with stderr output:\n\n"
+ process_result.stderr
)
add_result(
TestResult.PROCESS_ERROR,
"\n".join(test_results),
process_result.returncode,
)
current_test += 1
elif forward_stderr is not None and process_result.stderr.strip() != "":
forward_stderr(
"Process did not fail but still there is stderr output:\n\n"
+ process_result.stderr
)
if on_progress_change is not None:
on_progress_change(current_test - start_count)
return results
class Runner:
def __init__(
self,
libjs_test262_runner: Path,
test262_root: Path,
concurrency: int,
timeout: int,
memory_limit: int,
silent: bool = False,
verbose: bool = False,
use_bytecode: bool = False,
track_per_file: bool = False,
fail_only: bool = False,
parse_only: bool = False,
forward_stderr: bool = False,
summary: bool = False,
) -> None:
self.libjs_test262_runner = libjs_test262_runner
self.test262_root = test262_root
self.concurrency = concurrency
self.timeout = timeout
self.memory_limit = memory_limit
self.silent = silent
self.verbose = verbose
self.use_bytecode = use_bytecode
self.track_per_file = track_per_file
self.fail_only = fail_only
self.files: list[Path] = []
self.directory_result_map: dict[str, dict] = {}
self.file_result_map: dict[str, str] = {}
self.total_count = 0
self.duration = datetime.timedelta()
self.parse_only = parse_only
self.update_function: Callable[[int], None] | None = None
self.print_output: Callable[[Optional[Any]], Any] = print
self.forward_stderr_function: Callable[[str], None] | None
if forward_stderr:
if self.silent:
self.forward_stderr_function = lambda message: print(
message, file=sys.stderr
)
else:
self.forward_stderr_function = lambda message: tqdm.write(
message, file=sys.stderr
)
else:
self.forward_stderr_function = None
self.summary = summary
def log(self, message: str) -> None:
if not self.silent:
self.print_output(message)
def find_tests(self, pattern: str, ignore: str) -> None:
if Path(pattern).resolve().is_file():
self.files = [Path(pattern).resolve()]
else:
ignored_files = set(
glob.iglob(str(self.test262_root / ignore), recursive=True)
)
for path in glob.iglob(str(self.test262_root / pattern), recursive=True):
found_path = Path(path)
if (
found_path.is_dir()
or "_FIXTURE" in found_path.stem
or not found_path.exists()
or path in ignored_files
):
continue
self.files.append(found_path)
self.files.sort()
self.total_count = len(self.files)
self.log(f"Found {self.total_count}.")
if self.total_count == 0:
return
if not self.summary:
self.build_directory_result_map()
else:
root_folder = self.files[0].relative_to(self.test262_root).parent.parts[0]
self.directory_result_map = {
root_folder: {
"count": self.total_count,
"results": {result: 0 for result in TestResult},
"children": {},
}
}
def build_directory_result_map(self) -> None:
for file in self.files:
directory = file.relative_to(self.test262_root).parent
counter = self.directory_result_map
for segment in directory.parts:
if not segment in counter:
counter[segment] = {"count": 1, "results": {}, "children": {}}
for result in TestResult:
counter[segment]["results"][result] = 0
else:
counter[segment]["count"] += 1
counter = counter[segment]["children"]
def count_result(self, test_run: TestRun) -> None:
relative_file = test_run.file.relative_to(self.test262_root)
if self.track_per_file:
self.file_result_map[str(relative_file)] = test_run.result.name
directory = relative_file.parent
counter = self.directory_result_map
if self.summary:
counter[directory.parts[0]]["results"][test_run.result] += 1
return
for segment in directory.parts:
counter[segment]["results"][test_run.result] += 1
counter = counter[segment]["children"]
def report(self) -> None:
def print_tree(tree, path, level):
results = "[ "
for k, v in tree["results"].items():
if v > 0:
results += f"{EMOJIS[k]} {v:<5} "
results += "]"
count = tree["count"]
passed = tree["results"][TestResult.PASSED]
percentage = (passed / count) * 100
pad = " " * (80 - len(path))
self.print_output(
f"{path}{pad}{passed:>5}/{count:<5} ({percentage:6.2f}%) {results} "
)
if passed > 0:
for k, v in tree["children"].items():
print_tree(v, path + "/" + k, level + 1)
for k, v in self.directory_result_map.items():
print_tree(v, k, 0)
def process_list(self, files: list[Path]) -> list[TestRun]:
if not files:
return []
try:
return run_tests(
self.libjs_test262_runner,
self.test262_root,
files,
use_bytecode=self.use_bytecode,
parse_only=self.parse_only,
timeout=self.timeout,
memory_limit=self.memory_limit,
on_progress_change=self.update_function,
forward_stderr=self.forward_stderr_function,
)
except Exception as e:
return [
TestRun(
file,
result=TestResult.RUNNER_EXCEPTION
if i == 0
else TestResult.SKIPPED,
output=traceback.format_exc() if i == 0 else "",
exit_code=None,
strict_mode=None,
)
for i, file in enumerate(files)
]
def run(self) -> None:
if not self.files:
self.log("No tests to run.")
return
workers = self.concurrency
amount_of_work_lists = workers
if self.total_count > workers * workers * 4:
amount_of_work_lists = workers * 4
amount_of_work_lists = min(amount_of_work_lists, self.total_count)
work_lists: list[list[Path]] = [[] for _ in range(amount_of_work_lists)]
for index, test_path in enumerate(self.files):
work_lists[index % amount_of_work_lists].append(test_path)
if not self.silent:
progressbar = tqdm(
total=self.total_count, mininterval=1, unit="tests", smoothing=0.1
)
def update_progress(value):
progress_mutex.acquire()
try:
progressbar.update(value)
finally:
progress_mutex.release()
self.update_function = update_progress
def write_output(message: Any):
tqdm.write(message)
self.print_output = write_output
start = datetime.datetime.now()
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
futures = [
executor.submit(self.process_list, file_list)
for file_list in work_lists
]
for future in concurrent.futures.as_completed(futures):
test_runs = future.result()
for test_run in test_runs:
self.count_result(test_run)
if self.verbose or (
self.fail_only and test_run.result not in NON_FAIL_RESULTS
):
self.print_output(
f"{EMOJIS[test_run.result]} {test_run.file}"
f"{' (strict mode)' if test_run.strict_mode else ''}"
)
if test_run.output:
self.print_output("")
self.print_output(test_run.output)
self.print_output("")
if test_run.exit_code:
signalnum = test_run.exit_code * -1
if not test_run.output:
self.print_output("")
self.print_output(
f"{signal.strsignal(signalnum)}: {signalnum}"
)
self.print_output("")
if not self.silent:
progressbar.close()
end = datetime.datetime.now()
self.duration = end - start
self.log(f"Finished running tests in {self.duration}.")
def main() -> None:
parser = ArgumentParser(
description="Run the test262 ECMAScript test suite with SerenityOS's LibJS",
epilog=", ".join(f"{EMOJIS[result]} = {result.value}" for result in TestResult),
)
parser.add_argument(
"-j",
"--libjs-test262-runner",
required=True,
metavar="PATH",
help="path to the 'libjs-test262-runner' binary",
)
parser.add_argument(
"-b",
"--use-bytecode",
action="store_true",
help="Use the bytecode interpreter to run the tests",
)
parser.add_argument(
"-t",
"--test262-root",
required=True,
metavar="PATH",
help="path to the 'test262' directory",
)
parser.add_argument(
"-p",
"--pattern",
default="test/**/*.js",
help="glob pattern used for test file searching (defaults to test/**/*.js)",
)
parser.add_argument(
"-c",
"--concurrency",
default=CPU_COUNT,
type=int,
help="number of concurrent workers (defaults to number of CPU cores)",
)
parser.add_argument(
"--timeout",
default=10,
type=int,
help="timeout for each test run in seconds (defaults to 10)",
)
parser.add_argument(
"--memory-limit",
default=512,
type=int,
help="memory limit for each test run in megabytes (defaults to 512)",
)
parser.add_argument(
"--json", action="store_true", help="print the test results as JSON"
)
parser.add_argument(
"--per-file",
default=None,
type=str,
metavar="PATH",
help="output per-file results to file",
)
logging_group = parser.add_mutually_exclusive_group()
logging_group.add_argument(
"-s",
"--silent",
action="store_true",
help="don't print any progress information",
)
logging_group.add_argument(
"-v", "--verbose", action="store_true", help="print output of test runs"
)
parser.add_argument(
"-f", "--fail-only", action="store_true", help="only show failed tests"
)
parser.add_argument(
"--parse-only",
action="store_true",
help="only parse the test files and fail/pass based on that",
)
parser.add_argument(
"--ignore",
default="",
help="ignore any tests matching the glob",
)
parser.add_argument(
"--forward-stderr",
action="store_true",
help="forward all stderr output to the stderr of the script",
)
parser.add_argument(
"--summary",
action="store_true",
help="only show the top level results",
)
args = parser.parse_args()
runner = Runner(
Path(args.libjs_test262_runner).resolve(),
Path(args.test262_root).resolve(),
args.concurrency,
args.timeout,
args.memory_limit,
args.silent,
args.verbose,
args.use_bytecode,
args.per_file is not None,
args.fail_only,
args.parse_only,
args.forward_stderr,
args.summary,
)
runner.find_tests(args.pattern, args.ignore)
runner.run()
if args.json:
data = {
"duration": runner.duration.total_seconds(),
"results": runner.directory_result_map,
}
print(json.dumps(data))
else:
runner.report()
if args.per_file is not None:
data = {
"duration": runner.duration.total_seconds(),
"results": runner.file_result_map,
}
with open(args.per_file, "w") as per_file_file:
json.dump(data, per_file_file)
if __name__ == "__main__":
os.setpgrp()
try:
main()
except KeyboardInterrupt:
os.killpg(0, signal.SIGKILL)
| [
"json.dump",
"tqdm.tqdm",
"tqdm.tqdm.write",
"json.loads",
"os.killpg",
"resource.setrlimit",
"json.dumps",
"threading.Lock",
"pathlib.Path",
"datetime.timedelta",
"traceback.format_exc",
"signal.strsignal",
"os.setpgrp",
"datetime.datetime.now",
"multiprocessing.cpu_count"
] | [((1477, 1504), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (1502, 1504), False, 'import multiprocessing\n'), ((1540, 1556), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (1554, 1556), False, 'import threading\n'), ((19819, 19831), 'os.setpgrp', 'os.setpgrp', ([], {}), '()\n', (19829, 19831), False, 'import os\n'), ((1827, 1923), 'resource.setrlimit', 'resource.setrlimit', (['resource.RLIMIT_AS', '(memory_limit * 1024 * 1024, resource.RLIM_INFINITY)'], {}), '(resource.RLIMIT_AS, (memory_limit * 1024 * 1024,\n resource.RLIM_INFINITY))\n', (1845, 1923), False, 'import resource\n'), ((8098, 8118), 'datetime.timedelta', 'datetime.timedelta', ([], {}), '()\n', (8116, 8118), False, 'import datetime\n'), ((14392, 14415), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (14413, 14415), False, 'import datetime\n'), ((15943, 15966), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (15964, 15966), False, 'import datetime\n'), ((4400, 4425), 'pathlib.Path', 'Path', (["test_result['test']"], {}), "(test_result['test'])\n", (4404, 4425), False, 'from pathlib import Path\n'), ((13874, 13946), 'tqdm.tqdm', 'tqdm', ([], {'total': 'self.total_count', 'mininterval': '(1)', 'unit': '"""tests"""', 'smoothing': '(0.1)'}), "(total=self.total_count, mininterval=1, unit='tests', smoothing=0.1)\n", (13878, 13946), False, 'from tqdm import tqdm\n'), ((19469, 19485), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (19479, 19485), False, 'import json\n'), ((19755, 19785), 'json.dump', 'json.dump', (['data', 'per_file_file'], {}), '(data, per_file_file)\n', (19764, 19785), False, 'import json\n'), ((19894, 19922), 'os.killpg', 'os.killpg', (['(0)', 'signal.SIGKILL'], {}), '(0, signal.SIGKILL)\n', (19903, 19922), False, 'import os\n'), ((4196, 4240), 'json.loads', 'json.loads', (['test_result_string'], {'strict': '(False)'}), '(test_result_string, strict=False)\n', (4206, 4240), False, 'import json\n'), ((9314, 9324), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (9318, 9324), False, 'from pathlib import Path\n'), ((14309, 14328), 'tqdm.tqdm.write', 'tqdm.write', (['message'], {}), '(message)\n', (14319, 14328), False, 'from tqdm import tqdm\n'), ((18863, 18894), 'pathlib.Path', 'Path', (['args.libjs_test262_runner'], {}), '(args.libjs_test262_runner)\n', (18867, 18894), False, 'from pathlib import Path\n'), ((18914, 18937), 'pathlib.Path', 'Path', (['args.test262_root'], {}), '(args.test262_root)\n', (18918, 18937), False, 'from pathlib import Path\n'), ((5978, 6031), 'json.dumps', 'json.dumps', (['test_result'], {'indent': '(2)', 'ensure_ascii': '(False)'}), '(test_result, indent=2, ensure_ascii=False)\n', (5988, 6031), False, 'import json\n'), ((8625, 8661), 'tqdm.tqdm.write', 'tqdm.write', (['message'], {'file': 'sys.stderr'}), '(message, file=sys.stderr)\n', (8635, 8661), False, 'from tqdm import tqdm\n'), ((8976, 8989), 'pathlib.Path', 'Path', (['pattern'], {}), '(pattern)\n', (8980, 8989), False, 'from pathlib import Path\n'), ((9037, 9050), 'pathlib.Path', 'Path', (['pattern'], {}), '(pattern)\n', (9041, 9050), False, 'from pathlib import Path\n'), ((13048, 13070), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (13068, 13070), False, 'import traceback\n'), ((15736, 15763), 'signal.strsignal', 'signal.strsignal', (['signalnum'], {}), '(signalnum)\n', (15752, 15763), False, 'import signal\n')] |
import argparse
from utils.train_utils import add_flags_from_config
config_args = {
'training_config': {
'lr': (0.01, 'learning rate'),
'dropout': (0.0, 'dropout probability'),
'cuda': (-1, 'which cuda device to use (-1 for cpu training, 99 for auto gpu assign)'),
'epochs': (5000, 'maximum number of epochs to train for'),
'weight-decay': (0., 'l2 regularization strength'),
'optimizer': ('Adam', 'which optimizer to use, can be any of [Adam, RiemannianAdam]'),
'momentum': (0.999, 'momentum in optimizer'),
'patience': (100, 'patience for early stopping'),
'seed': (1234, 'seed for training'),
'log-freq': (1, 'how often to compute print train/val metrics (in epochs)'),
'eval-freq': (1, 'how often to compute val metrics (in epochs)'),
'save': (0, '1 to save model and logs and 0 otherwise'),
'save-dir': (None, 'path to save training logs and model weights (defaults to logs/task/date/run/)'),
'sweep-c': (0, ''),
'lr-reduce-freq': (None, 'reduce lr every lr-reduce-freq or None to keep lr constant'),
'gamma': (0.5, 'gamma for lr scheduler'),
'print-epoch': (True, ''),
'grad-clip': (None, 'max norm for gradient clipping, or None for no gradient clipping'),
'min-epochs': (100, 'do not early stop before min-epochs'),
'lambda-rec': (1.0, 'loss weight for reconstruction task.'),
'lambda-lp': (1.0, 'lp loss weight. Used with lambda_lp=0 for HNN + rec decoder without lp loss')
},
'model_config': {
'model': ('HGCAE', 'which encoder to use, can be any of [Shallow, MLP, HNN, GCN, GAT]'),
'hidden-dim': ('16', 'hidden layer feature dimension. , comma seprated number'),
'dim': (16, 'embedding dimension'),
'manifold': ('Euclidean', 'which manifold to use, can be any of [Euclidean, PoincareBall]'),
'c': (1.0, 'init hyperbolic radius'),
'c-trainable': (1, '1 for trainable curvature'),
'r': (2., 'fermi-dirac decoder parameter for lp'),
't': (1., 'fermi-dirac decoder parameter for lp'),
'pretrained-embeddings': (None, 'path to pretrained embeddings (.npy file) for Shallow node classification'),
'pos-weight': (0, 'whether to upweight positive class in node classification tasks'),
'num-layers': (2, 'number of hidden layers in encoder'),
'bias': (1, 'whether to use bias (1) or not (0)'),
'act': ('relu', 'which activation function to use (or None for no activation)'),
'n-heads': (4, 'number of attention heads for graph attention networks, must be a divisor dim'),
'alpha': (0.2, 'alpha for leakyrelu in graph attention networks'),
'use-att': (0, 'whether to use hyperbolic attention in model'),
'double-precision': ('0', 'whether to use double precision'),
'att-type': ('mlp', 'Specify Attention type, can bye any of [mlp, dist] for GAT.\
Also [dense_mlp, dense_adjmask_mlp, dense_adjmask_dist, sparse_adjmask_dist] for HGCAE'),
'att-logit': (None, 'Specify logit for attention, can be any of [exp, sigmoid, tanh, ... from torch.<loigt>]') ,
'beta': (0., 'coefficient of feature-distance in when --att-type dist') ,
'non-param-dec': ('fermidirac', 'Non-param decoder for link prediction. [fermidirac, innerproduct]') ,
'num-dec-layers': (2, 'number of hidden layers in encoder'),
'node-cluster': (0, 'Set test,val prop to 0 and adjust [log/eval]_freq and patience'),
'visualize-dim2': (0, 'If set, fix dim==2 for visualization.')
},
'data_config': {
'dataset': ('cora', 'which dataset to use'),
'val-prop': (0.05, 'proportion of validation edges for link prediction'),
'test-prop': (0.1, 'proportion of test edges for link prediction'),
'use-feats': (1, 'whether to use node features or not'),
'normalize-feats': (1, 'whether to normalize input node features'),
'normalize-adj': (1, 'whether to row-normalize the adjacency matrix'),
'split-seed': (1234, 'seed for data splits (train/test/val)'),
},
}
parser = argparse.ArgumentParser()
for _, config_dict in config_args.items():
parser = add_flags_from_config(parser, config_dict)
| [
"argparse.ArgumentParser",
"utils.train_utils.add_flags_from_config"
] | [((4176, 4201), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (4199, 4201), False, 'import argparse\n'), ((4258, 4300), 'utils.train_utils.add_flags_from_config', 'add_flags_from_config', (['parser', 'config_dict'], {}), '(parser, config_dict)\n', (4279, 4300), False, 'from utils.train_utils import add_flags_from_config\n')] |
# coding: utf-8
"""
Marketplace Insights API
<a href=\"https://developer.ebay.com/api-docs/static/versioning.html#limited\" target=\"_blank\"> <img src=\"/cms/img/docs/partners-api.svg\" class=\"legend-icon partners-icon\" title=\"Limited Release\" alt=\"Limited Release\" />(Limited Release)</a> The Marketplace Insights API provides the ability to search for sold items on eBay by keyword, GTIN, category, and product and returns the of sales history of those items. # noqa: E501
OpenAPI spec version: v1_beta.2.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class AspectDistribution(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'aspect_value_distributions': 'list[AspectValueDistribution]',
'localized_aspect_name': 'str'
}
attribute_map = {
'aspect_value_distributions': 'aspectValueDistributions',
'localized_aspect_name': 'localizedAspectName'
}
def __init__(self, aspect_value_distributions=None, localized_aspect_name=None): # noqa: E501
"""AspectDistribution - a model defined in Swagger""" # noqa: E501
self._aspect_value_distributions = None
self._localized_aspect_name = None
self.discriminator = None
if aspect_value_distributions is not None:
self.aspect_value_distributions = aspect_value_distributions
if localized_aspect_name is not None:
self.localized_aspect_name = localized_aspect_name
@property
def aspect_value_distributions(self):
"""Gets the aspect_value_distributions of this AspectDistribution. # noqa: E501
An array of containers for the various values of the aspect and the match count and a HATEOAS reference (<b> refinementHref</b>) for this aspect. # noqa: E501
:return: The aspect_value_distributions of this AspectDistribution. # noqa: E501
:rtype: list[AspectValueDistribution]
"""
return self._aspect_value_distributions
@aspect_value_distributions.setter
def aspect_value_distributions(self, aspect_value_distributions):
"""Sets the aspect_value_distributions of this AspectDistribution.
An array of containers for the various values of the aspect and the match count and a HATEOAS reference (<b> refinementHref</b>) for this aspect. # noqa: E501
:param aspect_value_distributions: The aspect_value_distributions of this AspectDistribution. # noqa: E501
:type: list[AspectValueDistribution]
"""
self._aspect_value_distributions = aspect_value_distributions
@property
def localized_aspect_name(self):
"""Gets the localized_aspect_name of this AspectDistribution. # noqa: E501
Name of an aspect, such as Brand, Color, etc. # noqa: E501
:return: The localized_aspect_name of this AspectDistribution. # noqa: E501
:rtype: str
"""
return self._localized_aspect_name
@localized_aspect_name.setter
def localized_aspect_name(self, localized_aspect_name):
"""Sets the localized_aspect_name of this AspectDistribution.
Name of an aspect, such as Brand, Color, etc. # noqa: E501
:param localized_aspect_name: The localized_aspect_name of this AspectDistribution. # noqa: E501
:type: str
"""
self._localized_aspect_name = localized_aspect_name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(AspectDistribution, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AspectDistribution):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"six.iteritems"
] | [((3946, 3979), 'six.iteritems', 'six.iteritems', (['self.swagger_types'], {}), '(self.swagger_types)\n', (3959, 3979), False, 'import six\n')] |
#!/usr/bin/python
from __future__ import absolute_import, print_function, unicode_literals
import sys
import os
import re
import signal
import tempfile
from flask import Flask
from flask import request,url_for,redirect,render_template,request,flash
__version__ = "0.0.1a2"
if __name__ == '__main__':
cdir = os.path.dirname( os.path.realpath(__file__) )
sys.path.insert(0, cdir+"/..")
import pypdx
from pypdx import dbconn
import pdxdisplay
from pdxdisplay import homepage
from pdxdisplay import partsmaster
from pdxdisplay import getitem
from pdxdisplay import upload
from pdxdisplay import bomtree
from pdxdisplay import getbomitem
ALLOWED_EXTENSIONS = set( ['xml'] )
debug = False
if 'FLASK_DEBUG' in os.environ and os.environ['FLASK_DEBUG'] == 1:
print("** DEBUG MODE ON **")
debug = True
dsn = None
if 'PDX_DSN' in os.environ:
dsn = os.environ['PDX_DSN']
if dsn == None:
print("You need to set the database DSN via the environment variable PDX_DSN")
# need to use this to exit when using "flask run"
os.kill( os.getpid(), signal.SIGTERM)
# sys.exit(1)
def get_db( dsn ):
var = {}
db = None
var['error'] = None
try:
if dsn == 'pg':
dsn = "dbname='pdx' user='pdxuser' host='localhost' port=5432"
db = dbconn.DBconn(dsn, dbtype='pg',debug=debug)
db.placeholder = '%s'
elif dsn[-8:] == '.sqlite3':
# Note: cannot use in-memory (:memory:) here
db = dbconn.DBconn(dsn, dbtype='sqlite3',debug=debug)
db.placeholder = '?'
elif re.match('dbname\s*=', dsn) != None:
db = dbconn.DBconn(dsn, dbtype='pg',debug=debug)
db.placeholder = '%s'
elif dsn == ':memory:':
var['error'] = "Sorry, can't use :memory: here"
else:
var['error'] = "Unrecognized DSN %s" % dsn
except IOError as e:
var['error'] = "Database connection failed "+e.__repr__()
return db, var
# make sure we can connect
db, var = get_db(dsn)
if var['error'] != None or db == None:
print(var['error'])
os.kill( os.getpid(), signal.SIGTERM)
db.close()
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.',1)[1].lower() in ALLOWED_EXTENSIONS
# =========================================================================
# load flask
app = Flask(__name__)
app.secret_key = "yskl6z5itoomanysecrets"
# -------------------------------------------------------------------------
@app.route("/",methods=['GET','POST'])
def homepage_app():
db, var = get_db(dsn)
if var['error'] != None or db == None:
return render_template('error.html',var=var)
dbtype = db.dbtype
# proceeed....
# see if this is an upload
if request.method == 'POST':
cleardata = request.values.get('clearall')
cleardata = True if cleardata=='on' else False;
# print("try to upload")
xmlfile = ""
if 'pdxfile' in request.files:
xmldata = request.files['pdxfile']
if xmldata.filename == '':
flash("no file selected")
elif xmldata and allowed_file(xmldata.filename):
var = upload.process( xmldata, cleardata, dsn, dbtype, debug=debug )
if var['error'] != None:
return render_template('error.html',var=var)
else:
if cleardata:
flash("Old data removed by request.")
flash("XML data successfully uploaded.")
else:
flash("File type not allowed")
else:
print("** no file **")
pass
# continue with display of top level items
var = homepage.process(db)
db.close()
if var['error'] != None:
return render_template('error.html',var=var)
else:
return render_template('homepage.html', var=var)
# -------------------------------------------------------------------------
@app.route("/partsmaster",methods=['GET','POST'])
def partsmaster_app():
sortby = request.args.get('sort')
db,var = get_db(dsn)
if var['error'] != None or db == None:
return render_template('error.html',var=var)
var = partsmaster.process(db,sortby)
db.close()
if var['error'] != None:
return render_template('error.html',var=var)
else:
return render_template('partsmaster.html',var=var)
# -------------------------------------------------------------------------
@app.route("/getitem",methods=['GET','POST'])
def getitem_app():
uid = request.args.get('item')
# var = { 'item': uid }
db,var = get_db(dsn)
if var['error'] != None or db == None:
return render_template('error.html',var=var)
var = getitem.getitem(db, uid)
db.close()
if var['error'] == None:
return render_template('getitem.html',var=var)
else:
return render_template('error_insert.html',var=var)
# -------------------------------------------------------------------------
@app.route("/getbomitem",methods=['GET','POST'])
def getbomitem_app():
s_uid = request.values.get('source')
t_uid = request.values.get('target')
db,var = get_db(dsn)
if var['error'] != None or db == None:
return render_template('error.html',var=var)
var = getbomitem.getbomitem(db, s_uid, t_uid)
db.close()
if var['error'] == None:
return render_template('getbomitem.html',var=var)
else:
return render_template('error_insert.html',var=var)
# -------------------------------------------------------------------------
@app.route("/bom",methods=['GET','POST'])
def bomtree_app():
uid = request.args.get('item')
db,var = get_db(dsn)
if var['error'] != None or db == None:
return render_template('error.html',var=var)
var = bomtree.process(db, uid)
db.close()
if var['error'] == None:
return render_template('bomtree.html',var=var)
else:
return render_template('error.html',var=var)
# -------------------------------------------------------------------------
@app.errorhandler(404)
def custom_401(error):
var = {}
var['error'] = error
return render_template('error.html',var=var)
# --------------- static routes ------------------------------------------
@app.route("/img/<imagename>")
def load_img(imagename):
return redirect( url_for('static', filename=('img/%s' % imagename)) )
@app.route("/javascripts/<sname>")
def loadjs(sname):
return redirect( url_for('static', filename=('javascripts/%s' % sname)) )
@app.route("/stylesheets/<sname>")
def load_ss(sname):
return redirect( url_for('static', filename=('stylesheets/%s' % sname)) )
def main():
extra_files = ['templates/*.html']
extaccess = False
if 'PDX_EXTACCESS' in os.environ:
ea = os.environ['PDX_EXTACCESS']
if ea == '1':
extaccess = True
if extaccess:
print("* Allowing external access", extaccess)
app.run( host='0.0.0.0', extra_files=extra_files, debug=debug )
else:
app.run( extra_files=extra_files, debug=debug )
# app.run( ssl_context="adhoc" )
# =========================================================
if __name__ == '__main__':
pdxdisplay.main()
| [
"flask.flash",
"os.getpid",
"flask.request.args.get",
"pdxdisplay.homepage.process",
"pdxdisplay.bomtree.process",
"flask.Flask",
"os.path.realpath",
"sys.path.insert",
"flask.request.values.get",
"re.match",
"pdxdisplay.getitem.getitem",
"pdxdisplay.main",
"flask.url_for",
"pypdx.dbconn.D... | [((2383, 2398), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (2388, 2398), False, 'from flask import Flask\n'), ((363, 395), 'sys.path.insert', 'sys.path.insert', (['(0)', "(cdir + '/..')"], {}), "(0, cdir + '/..')\n", (378, 395), False, 'import sys\n'), ((3811, 3831), 'pdxdisplay.homepage.process', 'homepage.process', (['db'], {}), '(db)\n', (3827, 3831), False, 'from pdxdisplay import homepage\n'), ((4159, 4183), 'flask.request.args.get', 'request.args.get', (['"""sort"""'], {}), "('sort')\n", (4175, 4183), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((4320, 4351), 'pdxdisplay.partsmaster.process', 'partsmaster.process', (['db', 'sortby'], {}), '(db, sortby)\n', (4339, 4351), False, 'from pdxdisplay import partsmaster\n'), ((4674, 4698), 'flask.request.args.get', 'request.args.get', (['"""item"""'], {}), "('item')\n", (4690, 4698), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((4863, 4887), 'pdxdisplay.getitem.getitem', 'getitem.getitem', (['db', 'uid'], {}), '(db, uid)\n', (4878, 4887), False, 'from pdxdisplay import getitem\n'), ((5217, 5245), 'flask.request.values.get', 'request.values.get', (['"""source"""'], {}), "('source')\n", (5235, 5245), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((5258, 5286), 'flask.request.values.get', 'request.values.get', (['"""target"""'], {}), "('target')\n", (5276, 5286), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((5428, 5467), 'pdxdisplay.getbomitem.getbomitem', 'getbomitem.getbomitem', (['db', 's_uid', 't_uid'], {}), '(db, s_uid, t_uid)\n', (5449, 5467), False, 'from pdxdisplay import getbomitem\n'), ((5788, 5812), 'flask.request.args.get', 'request.args.get', (['"""item"""'], {}), "('item')\n", (5804, 5812), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((5949, 5973), 'pdxdisplay.bomtree.process', 'bomtree.process', (['db', 'uid'], {}), '(db, uid)\n', (5964, 5973), False, 'from pdxdisplay import bomtree\n'), ((6313, 6351), 'flask.render_template', 'render_template', (['"""error.html"""'], {'var': 'var'}), "('error.html', var=var)\n", (6328, 6351), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((7384, 7401), 'pdxdisplay.main', 'pdxdisplay.main', ([], {}), '()\n', (7399, 7401), False, 'import pdxdisplay\n'), ((330, 356), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (346, 356), False, 'import os\n'), ((1053, 1064), 'os.getpid', 'os.getpid', ([], {}), '()\n', (1062, 1064), False, 'import os\n'), ((2124, 2135), 'os.getpid', 'os.getpid', ([], {}), '()\n', (2133, 2135), False, 'import os\n'), ((2661, 2699), 'flask.render_template', 'render_template', (['"""error.html"""'], {'var': 'var'}), "('error.html', var=var)\n", (2676, 2699), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((2835, 2865), 'flask.request.values.get', 'request.values.get', (['"""clearall"""'], {}), "('clearall')\n", (2853, 2865), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((3891, 3929), 'flask.render_template', 'render_template', (['"""error.html"""'], {'var': 'var'}), "('error.html', var=var)\n", (3906, 3929), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((3954, 3995), 'flask.render_template', 'render_template', (['"""homepage.html"""'], {'var': 'var'}), "('homepage.html', var=var)\n", (3969, 3995), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((4267, 4305), 'flask.render_template', 'render_template', (['"""error.html"""'], {'var': 'var'}), "('error.html', var=var)\n", (4282, 4305), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((4415, 4453), 'flask.render_template', 'render_template', (['"""error.html"""'], {'var': 'var'}), "('error.html', var=var)\n", (4430, 4453), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((4478, 4522), 'flask.render_template', 'render_template', (['"""partsmaster.html"""'], {'var': 'var'}), "('partsmaster.html', var=var)\n", (4493, 4522), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((4810, 4848), 'flask.render_template', 'render_template', (['"""error.html"""'], {'var': 'var'}), "('error.html', var=var)\n", (4825, 4848), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((4947, 4987), 'flask.render_template', 'render_template', (['"""getitem.html"""'], {'var': 'var'}), "('getitem.html', var=var)\n", (4962, 4987), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((5012, 5057), 'flask.render_template', 'render_template', (['"""error_insert.html"""'], {'var': 'var'}), "('error_insert.html', var=var)\n", (5027, 5057), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((5375, 5413), 'flask.render_template', 'render_template', (['"""error.html"""'], {'var': 'var'}), "('error.html', var=var)\n", (5390, 5413), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((5527, 5570), 'flask.render_template', 'render_template', (['"""getbomitem.html"""'], {'var': 'var'}), "('getbomitem.html', var=var)\n", (5542, 5570), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((5595, 5640), 'flask.render_template', 'render_template', (['"""error_insert.html"""'], {'var': 'var'}), "('error_insert.html', var=var)\n", (5610, 5640), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((5896, 5934), 'flask.render_template', 'render_template', (['"""error.html"""'], {'var': 'var'}), "('error.html', var=var)\n", (5911, 5934), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((6033, 6073), 'flask.render_template', 'render_template', (['"""bomtree.html"""'], {'var': 'var'}), "('bomtree.html', var=var)\n", (6048, 6073), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((6098, 6136), 'flask.render_template', 'render_template', (['"""error.html"""'], {'var': 'var'}), "('error.html', var=var)\n", (6113, 6136), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((6504, 6552), 'flask.url_for', 'url_for', (['"""static"""'], {'filename': "('img/%s' % imagename)"}), "('static', filename='img/%s' % imagename)\n", (6511, 6552), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((6633, 6685), 'flask.url_for', 'url_for', (['"""static"""'], {'filename': "('javascripts/%s' % sname)"}), "('static', filename='javascripts/%s' % sname)\n", (6640, 6685), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((6767, 6819), 'flask.url_for', 'url_for', (['"""static"""'], {'filename': "('stylesheets/%s' % sname)"}), "('static', filename='stylesheets/%s' % sname)\n", (6774, 6819), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((1296, 1340), 'pypdx.dbconn.DBconn', 'dbconn.DBconn', (['dsn'], {'dbtype': '"""pg"""', 'debug': 'debug'}), "(dsn, dbtype='pg', debug=debug)\n", (1309, 1340), False, 'from pypdx import dbconn\n'), ((1485, 1534), 'pypdx.dbconn.DBconn', 'dbconn.DBconn', (['dsn'], {'dbtype': '"""sqlite3"""', 'debug': 'debug'}), "(dsn, dbtype='sqlite3', debug=debug)\n", (1498, 1534), False, 'from pypdx import dbconn\n'), ((3131, 3156), 'flask.flash', 'flash', (['"""no file selected"""'], {}), "('no file selected')\n", (3136, 3156), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((1580, 1608), 're.match', 're.match', (['"""dbname\\\\s*="""', 'dsn'], {}), "('dbname\\\\s*=', dsn)\n", (1588, 1608), False, 'import re\n'), ((1634, 1678), 'pypdx.dbconn.DBconn', 'dbconn.DBconn', (['dsn'], {'dbtype': '"""pg"""', 'debug': 'debug'}), "(dsn, dbtype='pg', debug=debug)\n", (1647, 1678), False, 'from pypdx import dbconn\n'), ((3240, 3300), 'pdxdisplay.upload.process', 'upload.process', (['xmldata', 'cleardata', 'dsn', 'dbtype'], {'debug': 'debug'}), '(xmldata, cleardata, dsn, dbtype, debug=debug)\n', (3254, 3300), False, 'from pdxdisplay import upload\n'), ((3643, 3673), 'flask.flash', 'flash', (['"""File type not allowed"""'], {}), "('File type not allowed')\n", (3648, 3673), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((3371, 3409), 'flask.render_template', 'render_template', (['"""error.html"""'], {'var': 'var'}), "('error.html', var=var)\n", (3386, 3409), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((3568, 3608), 'flask.flash', 'flash', (['"""XML data successfully uploaded."""'], {}), "('XML data successfully uploaded.')\n", (3573, 3608), False, 'from flask import request, url_for, redirect, render_template, request, flash\n'), ((3489, 3526), 'flask.flash', 'flash', (['"""Old data removed by request."""'], {}), "('Old data removed by request.')\n", (3494, 3526), False, 'from flask import request, url_for, redirect, render_template, request, flash\n')] |
import logging
import ibmsecurity.utilities.tools
import os.path
from ibmsecurity.utilities.tools import files_same, get_random_temp_dir
import shutil
logger = logging.getLogger(__name__)
def get_all(isamAppliance, instance_id, check_mode=False, force=False):
"""
Retrieving the current administration pages root contents
"""
return isamAppliance.invoke_get("Retrieving the current administration pages root contents",
"/wga/reverseproxy/{0}/management_root?recursive=yes".format(instance_id))
def get(isamAppliance, instance_id, id, check_mode=False, force=False):
"""
Retrieving the contents of a file in the administration pages root
"""
return isamAppliance.invoke_get("Retrieving the contents of a file in the administration pages root",
"/wga/reverseproxy/{0}/management_root/{1}".format(instance_id, id))
def _check(isamAppliance, instance_id, id, name):
ret_obj = get_all(isamAppliance, instance_id)
file_name = os.path.join(id, name)
return _parse_id(ret_obj['data'], file_name)
def _check_file(isamAppliance, instance_id, id):
"""
Check whether file exists
:param isamAppliance:
:param instance_id:
:param id:
:return: True/False
"""
ret_obj = get(isamAppliance, instance_id, id)
logger.info(ret_obj['data'])
if ret_obj['rc'] == 0:
return True
else:
return False
def _parse_id(contents, file_name):
"""
Recursively parse and find the id for a given file name
:param contents:
:param file_name:
:return id:
"""
try:
split_file = file_name.split('/', 1)
cur_file = split_file[0]
rest_file = split_file[1]
except:
rest_file = ''
for file in contents:
if file['name'] == cur_file:
if rest_file == '':
if file['type'] == 'File':
return file['id']
else:
return None
else:
if len(file['children']) == 0:
return None
else:
return _parse_id(file['children'], rest_file)
return None
def create(isamAppliance, instance_id, id, name, contents=None, check_mode=False, force=False):
"""
Creating a file in the administration pages root
:param isamAppliance:
:param instance_id:
:param id:
:param name:
:param contents:
:param check_mode:
:param force:
:return:
"""
if force is True or _check(isamAppliance, instance_id, id, name) == None:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post(
"Creating a file in the administration pages root",
"/wga/reverseproxy/{0}/management_root/{1}".format(instance_id, id),
{
'file_name': name,
'type': 'file',
'contents': contents
})
return isamAppliance.create_return_object()
def update(isamAppliance, instance_id, id, filename=None, contents=None, check_mode=False, force=False):
"""
Update a file in the administration pages root
:param isamAppliance:
:param instance_id:
:param id:
:param name:
:param contents:
:param check_mode:
:param force:
:return:
"""
if force is True or _check_file(isamAppliance, instance_id, id) is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
if filename is not None:
return isamAppliance.invoke_put_files(
"Update a file in the administration page root",
"/wga/reverseproxy/{0}/management_root/{1}".format(instance_id, id),
[
{
'file_formfield': 'file',
'filename': filename,
'mimetype': 'application/octet-stream'
}
],
{
'file': filename,
'type': 'file'
})
elif contents is not None:
return isamAppliance.invoke_put_files(
"Update a file in the administration page root",
"/wga/reverseproxy/{0}/management_root/{1}".format(instance_id, id),
{
'contents': contents,
'type': 'file'
})
else:
return isamAppliance.create_return_object(
warnings=["Either contents or filename parameter need to be provided. Skipping update request."])
def delete(isamAppliance, instance_id, id, check_mode=False, force=False):
"""
Deleting a file in the administration pages root
:param isamAppliance:
:param instance_id:
:param id:
:param check_mode:
:param force:
:return:
"""
if force is True or _check(isamAppliance, instance_id, id, '') != None:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_delete(
"Deleting a file in the administration pages root",
"/wga/reverseproxy/{0}/management_root/{1}".format(instance_id, id))
return isamAppliance.create_return_object()
def rename(isamAppliance, instance_id, id, new_name, check_mode=False, force=False):
"""
Deleting a file in the administration pages root
:param isamAppliance:
:param instance_id:
:param id:
:param new_name:
:param check_mode:
:param force:
:return:
"""
file_id = None
if force is False:
file_id = _check(isamAppliance, instance_id, id, '')
if force is True or file_id != None:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_put(
"Renaming a file in the administration pages root",
"/wga/reverseproxy/{0}/management_root/{1}".format(instance_id, id),
{
'id': file_id,
'new_name': new_name,
'type': 'file'
})
return isamAppliance.create_return_object()
def export_file(isamAppliance, instance_id, id, filename, check_mode=False, force=False):
"""
Exporting a file in the administration pages root
:param isamAppliance:
:param instance_id:
:param id:
:param filename:
:param check_mode:
:param force:
:return:
"""
if force is True or (_check(isamAppliance, instance_id, id, name='') is not None):
if check_mode is False:
return isamAppliance.invoke_get_file(
"Exporting a file in the administration pages root",
"/wga/reverseproxy/{0}/management_root/{1}?export=true".format(instance_id, id), filename)
return isamAppliance.create_return_object()
def _check_import(isamAppliance, instance_id, id, filename, check_mode=False):
"""
Checks if file on the Appliance (id) exists and if so, whether it is different from filename
:param isamAppliance:
:param instance_id:
:param id:
:param filename:
:return:
"""
tmpdir = get_random_temp_dir()
tmp_original_file = os.path.join(tmpdir, os.path.basename(id))
if _check(isamAppliance, instance_id, id, ''):
export_file(isamAppliance, instance_id, id, tmp_original_file, check_mode=False, force=True)
logger.debug("file already exists on appliance")
if files_same(tmp_original_file, filename):
logger.debug("files are the same, so we don't want to do anything")
shutil.rmtree(tmpdir)
return False
else:
logger.debug("files are different, so we delete existing file in preparation for import")
delete(isamAppliance, instance_id, id, check_mode=check_mode, force=True)
shutil.rmtree(tmpdir)
return True
else:
logger.debug("file does not exist on appliance, so we'll want to import")
shutil.rmtree(tmpdir)
return True
def import_file(isamAppliance, instance_id, id, filename, check_mode=False, force=False):
"""
Importing a file in the administration pages root.
Note that _check_import() checks that the file (id) is present and if so, whether it is
the same and the one being imported. If it's the same, there is no need to import the new file.
"""
if force is True or _check_import(isamAppliance, instance_id, id, filename, check_mode=check_mode):
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post_files(
"Importing a file in the administration pages root",
"/wga/reverseproxy/{0}/management_root/{1}".format(instance_id, id),
[
{
'file_formfield': 'file',
'filename': filename,
'mimetype': 'application/octet-stream'
}
],
{
'type': 'file',
'force': force
})
return isamAppliance.create_return_object()
def compare(isamAppliance1, isamAppliance2, instance_id):
ret_obj1 = get_all(isamAppliance1, instance_id)
ret_obj2 = get_all(isamAppliance2, instance_id)
return ibmsecurity.utilities.tools.json_compare(ret_obj1, ret_obj2, deleted_keys=[])
| [
"shutil.rmtree",
"ibmsecurity.utilities.tools.get_random_temp_dir",
"ibmsecurity.utilities.tools.files_same",
"logging.getLogger"
] | [((161, 188), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (178, 188), False, 'import logging\n'), ((7543, 7564), 'ibmsecurity.utilities.tools.get_random_temp_dir', 'get_random_temp_dir', ([], {}), '()\n', (7562, 7564), False, 'from ibmsecurity.utilities.tools import files_same, get_random_temp_dir\n'), ((7852, 7891), 'ibmsecurity.utilities.tools.files_same', 'files_same', (['tmp_original_file', 'filename'], {}), '(tmp_original_file, filename)\n', (7862, 7891), False, 'from ibmsecurity.utilities.tools import files_same, get_random_temp_dir\n'), ((8392, 8413), 'shutil.rmtree', 'shutil.rmtree', (['tmpdir'], {}), '(tmpdir)\n', (8405, 8413), False, 'import shutil\n'), ((7985, 8006), 'shutil.rmtree', 'shutil.rmtree', (['tmpdir'], {}), '(tmpdir)\n', (7998, 8006), False, 'import shutil\n'), ((8246, 8267), 'shutil.rmtree', 'shutil.rmtree', (['tmpdir'], {}), '(tmpdir)\n', (8259, 8267), False, 'import shutil\n')] |
#!/usr/bin/env python3
#! coding:utf-8
'''
Picomotor Power Control
MEIKO WATCH BOOT nino RPC-MCS
Usage:
pico_power_control.py [TARGET] [ON or OFF]
ex.
pico_power_controls.py TEST ON
'''
import sys
import getpass
import telnetlib
import time
import subprocess
from datetime import datetime
from datetime import timedelta
driverDict = {
# Same happy_pico_start.py list.
#"MCO" :{"IPADDR":"10.68.160.101", "OUTLET":1}, # MCF
#"STM1" :{"IPADDR":"10.68.160.90", "OUTLET":1},
#"STM2" :{"IPADDR":"10.68.160.90", "OUTLET":1},
#"POM1" :{"IPADDR":"10.68.160.90", "OUTLET":1},
"MCF" :{"IPADDR":"10.68.160.101", "OUTLET":1},
"MCE" :{"IPADDR":"10.68.160.102", "OUTLET":1},
"IMMT1" :{"IPADDR":"10.68.160.103", "OUTLET":1},
"IMMT2" :{"IPADDR":"10.68.160.104", "OUTLET":1},
"PR3_IM" :{"IPADDR":"10.68.160.105", "OUTLET":1},
"PR3_BF" :{"IPADDR":"10.68.160.105", "OUTLET":2},
"PR2_IM" :{"IPADDR":"10.68.160.106", "OUTLET":1},
"PR2_BF" :{"IPADDR":"10.68.160.106", "OUTLET":2},
"PRM_IM" :{"IPADDR":"10.68.160.107", "OUTLET":1},
"PRM_BF" :{"IPADDR":"10.68.160.107", "OUTLET":2},
"BS_IM" :{"IPADDR":"10.68.160.108", "OUTLET":1},
"BS_BF" :{"IPADDR":"10.68.160.108", "OUTLET":2},
"SR3_IM" :{"IPADDR":"10.68.160.109", "OUTLET":1},
"SR3_BF" :{"IPADDR":"10.68.160.109", "OUTLET":2},
"SR2_IM" :{"IPADDR":"10.68.160.110", "OUTLET":1},
"SR2_BF" :{"IPADDR":"10.68.160.110", "OUTLET":2},
"SRM_IM" :{"IPADDR":"10.68.160.111", "OUTLET":1},
"SRM_BF" :{"IPADDR":"10.68.160.111", "OUTLET":2},
"ETMX" :{"IPADDR":"10.68.160.112", "OUTLET":1},
"ETMY" :{"IPADDR":"10.68.160.113", "OUTLET":1},
"ITMX" :{"IPADDR":"10.68.160.114", "OUTLET":1},
"ITMY" :{"IPADDR":"10.68.160.115", "OUTLET":1},
"OMMT1" :{"IPADDR":"10.68.160.116", "OUTLET":1},
"OMMT2" :{"IPADDR":"10.68.160.116", "OUTLET":2},
"OSTM" :{"IPADDR":"10.68.160.117", "OUTLET":1},
"PCAL_EX1" :{"IPADDR":"10.68.160.118", "OUTLET":1},
"PCAL_EX2" :{"IPADDR":"10.68.160.118", "OUTLET":2},
"PCAL_EY1" :{"IPADDR":"10.68.160.119", "OUTLET":1},
"PCAL_EY2" :{"IPADDR":"10.68.160.119", "OUTLET":2},
"POP" :{"IPADDR":"10.68.160.120", "OUTLET":1},
"POS" :{"IPADDR":"10.68.160.121", "OUTLET":1},
"TEST" :{"IPADDR":"10.68.150.90", "OUTLET":1},
"TEST2" :{"IPADDR":"10.68.150.90", "OUTLET":2},
#"AS_WFS" :{"IPADDR":"Use SmartPlag", "OUTLET":1},
#"REFL_WFS" :{"IPADDR":"Use SmartPlag", "OUTLET":1},
}
cmdListDic={
"ON":'PON',
"OFF":'POF',
"REBOOT":'POR',
}
TIMEOUT = 60
def print_driverList():
print("| --- Driver List ---")
for item in driverDict.items():
print("| {0:10s} : {1:14s} |".format(item[0],item[1]['IPADDR']))
class rpcm2cs_telnet(object):
def __init__(self,prefix,host,timeout):
self.prefix = prefix
self.host = host
self.port = 23
self.timeout = timeout
self.logfile = '/kagra/Dropbox/Subsystems/VIS/Scripts/PicoMotorPowerController/LogFiles/'+self.prefix+'.log'
self.connect()
def connect(self):
try:
print('Connecting to ',self.host)
self.tn=telnetlib.Telnet(self.host,port=self.port, timeout=self.timeout)
self.logOutput('Conecting to: '+self.host)
except:
print('! Not Connected %s' % self.host)
self.logOutput('Not Conected to: '+self.host)
quit()
def read_until(self,readdata):
try:
#print(readdata)
return self.tn.read_until(readdata,self.timeout)
except:
print('Error: Not Read Data %s' % readdata)
quit()
def write(self,writedata):
try:
#print(writedata)
self.tn.write(writedata)
except:
print('Error: Not Write Data %s' % writedata)
quit()
'''
Login
'''
def login(self):
user='pico'
password='<PASSWORD>'
self.read_until(b"220 RPC-M2CS (Noname) server ready.")
self.write(b'\r\n')
self.read_until(b"ID:")
self.write(user.encode('ascii')+b"\r\n")
self.read_until(b"Password:")
self.write(password.encode('ascii')+b'\r\n')
self.read_until(b">")
'''
Logout
Send command
Q : Logout telenet
'''
def logout(self):
self.write(b'Q\r\n')
self.logOutput('Logout of: '+self.host)
'''
Get Outret State
Send command
POS : Get Outret State
Result
Power State
00 : Outlet 1 and 2 power off
10 : Outlet 1 Power On and Outlet 2 power off
01 : Outlet 1 Power Off and Outlet 2 power on
11 : Outlet 1 and 2 power on
'''
def getpos(self):
self.write(b'POS\r\n')
self.read_until(b"POS ")
outlet = self.read_until(b"\r")
self.read_until(b">")
self.logOutput('Status:'+str(outlet[0:2]))
print("[deb]",outlet) #KKK
return outlet
#########################################
'''
Wait Outret State Change
Send command
XPOS : Get Outret State
'''
def getxpos(self):
self.write(b'XPOS\r\n')
self.read_until(b"XPOS ")
status = self.read_until(b"\r")
self.read_until(b">")
#self.logOutput('Status:'+str(outlet[0:2]))
# print("[deb]",status) #KKK
# print("[deb0:6]",status[0:6]) #KKK
# print("[deb7:13]",status[7:13]) #KKK
zero = '0'
# print("=",status[1,2],zero.encode()) #KKK
# print("[deb1:2]",status[1:2]) #KKK
print(status[1:2],zero.encode())
print(type(status[1:2]),type(zero.encode()))
print(bool(type(status[1:2]) == type(zero.encode())))
print("loop start")
print("status[0:1]=",status[0:1],"status[1:2]=",status[1:2],"status[2:6]=",status[2:6])
print("status[7:8]=",status[7:8],"status[8:9]=",status[8:9],"status[9:13]=",status[9:13])
cnt = 1
while status[1:2] != zero.encode():
self.write(b'XPOS\r\n')
self.read_until(b"XPOS ")
status = self.read_until(b"\r")
self.read_until(b">")
print("status[0:1]=",status[0:1],"status[1:2]=",status[1:2],"status[2:6]=",status[2:6])
time.sleep(1)
cnt = cnt + 1
if cnt >= 30:
print("loop time up",cnt)
break
cnt = 1
while status[8:9] != zero.encode():
self.write(b'XPOS\r\n')
self.read_until(b"XPOS ")
status = self.read_until(b"\r")
self.read_until(b">")
print("status[7:8]=",status[7:8],"status[8:9]=",status[8:9],"status[9:13]=",status[9:13])
time.sleep(1)
cnt = cnt + 1
if cnt >= 30:
print("loop time up",cnt)
break
return status
#########################################
'''
Turnon or off
Send command
PON1 or PON2 : Turn on Outret 1 or 2
POF1 or POF2 : Turn off Outret 1 or 2
'''
def putTurnOnOff(self,cmd,outlet):
cmd = cmd+str(outlet)
self.write(bytes(cmd+'\r\n', encoding='utf-8'))
self.logOutput('Send '+cmd)
self.read_until(b">")
'''
Log File Output
'''
def logOutput(self,message):
d = datetime.now()
with open(self.logfile,'a') as f:
f.write(d.strftime('%Y-%m-%d %H:%M:%S')+' '+message+'\n')
def main():
agvs = sys.argv
argc = len(agvs)
print(sys.version_info)
if(argc != 3):
print_driverList()
quit()
if agvs[1] not in driverDict:
print('! please check DRIVER_NAME %s' % agvs[1])
print_driverList()
quit()
if agvs[2] not in cmdListDic:
print('! please check Command %s' % agvs[2])
print_driverList()
quit()
prefix = agvs[1]
host = driverDict[agvs[1]]['IPADDR']
outlet = driverDict[agvs[1]]['OUTLET']
drv = rpcm2cs_telnet(prefix, host, TIMEOUT)
drv.login()
drv.getpos() # Write Log
drv.putTurnOnOff(cmdListDic[agvs[2]],outlet) # Turn On command works with delay.
drv.getxpos()
drv.logout()
command = [
'zenity'
,'--info'
,'--title=Result'
,'--text=\r\rRemote Power Command Successfully\r\r'
]
print(command)
subprocess.Popen(command)
if __name__== "__main__":
main()
| [
"subprocess.Popen",
"datetime.datetime.now",
"telnetlib.Telnet",
"time.sleep"
] | [((8652, 8677), 'subprocess.Popen', 'subprocess.Popen', (['command'], {}), '(command)\n', (8668, 8677), False, 'import subprocess\n'), ((7642, 7656), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7654, 7656), False, 'from datetime import datetime\n'), ((3329, 3394), 'telnetlib.Telnet', 'telnetlib.Telnet', (['self.host'], {'port': 'self.port', 'timeout': 'self.timeout'}), '(self.host, port=self.port, timeout=self.timeout)\n', (3345, 3394), False, 'import telnetlib\n'), ((6538, 6551), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (6548, 6551), False, 'import time\n'), ((6997, 7010), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (7007, 7010), False, 'import time\n')] |
import torch
import sys
import os
from PIL import Image
from chess_net_simple import SimpleChessNet
from chess_net_simple import transform
# Using a basic version of the chess net which only detects if there is a piece on a field or not
# to remove empty tiles
# Removes empty tiles from a given dataset to simplify labeling process
def sort_out_empty_tiles(model, file_path):
files = os.listdir(file_path)
for file in files:
if file.lower().endswith(".jpg") or file.lower().endswith(".jpeg"):
img = Image.open(os.path.join(file_path, file))
img = transform(img)
img = torch.unsqueeze(img, 0)
out = model(img)
_, prediction = torch.max(out.data, 1)
if prediction == 0:
print("Delete", file)
os.remove(os.path.join(file_path, file))
def main():
model_path = "../model/simple-net.pt"
file_path = "../data/chessboards/val_additional"
if not os.path.exists(model_path):
print("Model weights were not found")
sys.exit(1)
model = SimpleChessNet()
model.load_state_dict(torch.load(model_path))
model.eval()
sort_out_empty_tiles(model, file_path)
if __name__ == "__main__":
main()
| [
"chess_net_simple.transform",
"torch.load",
"os.path.exists",
"torch.max",
"torch.unsqueeze",
"os.path.join",
"os.listdir",
"sys.exit",
"chess_net_simple.SimpleChessNet"
] | [((392, 413), 'os.listdir', 'os.listdir', (['file_path'], {}), '(file_path)\n', (402, 413), False, 'import os\n'), ((1086, 1102), 'chess_net_simple.SimpleChessNet', 'SimpleChessNet', ([], {}), '()\n', (1100, 1102), False, 'from chess_net_simple import SimpleChessNet\n'), ((979, 1005), 'os.path.exists', 'os.path.exists', (['model_path'], {}), '(model_path)\n', (993, 1005), False, 'import os\n'), ((1061, 1072), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1069, 1072), False, 'import sys\n'), ((1129, 1151), 'torch.load', 'torch.load', (['model_path'], {}), '(model_path)\n', (1139, 1151), False, 'import torch\n'), ((592, 606), 'chess_net_simple.transform', 'transform', (['img'], {}), '(img)\n', (601, 606), False, 'from chess_net_simple import transform\n'), ((625, 648), 'torch.unsqueeze', 'torch.unsqueeze', (['img', '(0)'], {}), '(img, 0)\n', (640, 648), False, 'import torch\n'), ((707, 729), 'torch.max', 'torch.max', (['out.data', '(1)'], {}), '(out.data, 1)\n', (716, 729), False, 'import torch\n'), ((543, 572), 'os.path.join', 'os.path.join', (['file_path', 'file'], {}), '(file_path, file)\n', (555, 572), False, 'import os\n'), ((827, 856), 'os.path.join', 'os.path.join', (['file_path', 'file'], {}), '(file_path, file)\n', (839, 856), False, 'import os\n')] |
name = input('What is your name? ')
import sys
import time
age = input(F"{name}, how old are you? ")
if age != "11":
if age > "11":
if age == "12" or age == "13" or age == "14" or age == "15" or age == "16" or age == "17":
print(f"Go to your house table, {name.title()}, you are already sorted.")
sys.exit('Already sorted.')
elif age > "17":
print(f"You are not attending Hogwarts any more, {name.title()}...Why are you here?")
sys.exit('Out of school, not attending Hogwarts.')
elif age < "11":
print(f"You are too young, {name.title()}. Come back when you are 11 years old.")
sys.exit(0)
Gryffindor, Hufflepuff, Slytherin, Ravenclaw = 0, 0, 0, 0
Q1 = input (F' {name.title()}, there are four houses. [R]avenclaw, [H]ufflepuff, [G]ryffindor, and [S]lytherin. Which of these do you like? ')
if Q1 == "R":
Ravenclaw += 1
elif Q1 == "H":
Hufflepuff += 1
elif Q1 == "G":
Gryffindor += 1
elif Q1 == "S":
Slytherin += 1
Q2 = input(F' {name.title()}, is your name <NAME>? y/n ')
if Q2 == "y":
sys.exit("Please leave the school, you are not allowed entrance. ")
Q3 = input(F"What do you like to do the most on a wand? A. Unforgiveable curses. B. Charms. C. Tranfiguration. D. A variety. ")
if Q3 == "A":
Slytherin += 2
elif Q3 == "B":
Ravenclaw += 2
Hufflepuff += 2
elif Q3 == "C":
Gryffindor += 2
else:
Ravenclaw += 2
time.sleep(0.5)
Q4 = input(F'{name.title()}, what is your favorite animal? [B]adger, [S]nake, [L]ion, [E]agle. ')
if Q4 == "B":
Hufflepuff += 2
elif Q4 == "S":
Slytherin += 2
elif Q4 == "L":
Gryffindor += 2
else:
Ravenclaw += 2
Q5 = input(f"{name.title()}, who is your favorite character in Harry potter out of these: A. Harry. <NAME>. <NAME>. <NAME> ")
if Q5 == "A":
Gryffindor+=2
elif Q5 == "B":
Ravenclaw += 2
elif Q5 == "C":
Hufflepuff += 2
else:
Slytherin += 2
Q6 = input(F"{name.title()} Are you: A. ambitious, B. Loyal. C. Brave, or D. Smart? ")
if Q6 == "A":
Slytherin += 2
elif Q6 == "B":
Hufflepuff += 2
elif Q6 == "C":
Gryffindor += 2
else:
Ravenclaw += 2
Q7 = input(F"{name.title()}, what would you do if you were being chased by a werewolf? [F]ight, [R]un, or [P]anic? ")
if Q7 == "F":
Gryffindor+=5
else:
Slytherin += 1
Hufflepuff +=1
Ravenclaw+=1
Q8 = input(f'{name.title()}Would you rather: [G]et a bad grade, [S]urrender, [B]etray someone, or [N]ot get what you want ')
if Q8 == "G":
Ravenclaw -= 2
elif Q8 == "S":
Gryffindor -= 2
elif Q8 == "B":
Hufflepuff -= 2
else:
Slytherin -= 2
time.sleep(0.5)
print("That's the end! Give me a moment to calculate your results!")
time.sleep(1.5)
if Gryffindor != Ravenclaw and Hufflepuff and Slytherin:
if Gryffindor > Ravenclaw and Gryffindor > Slytherin and Gryffindor > Hufflepuff:
print("Congratulations, you are in Gryffindor!")
elif Ravenclaw > Gryffindor and Ravenclaw > Slytherin and Ravenclaw > Hufflepuff:
print("Congratulations, you are in Ravenclaw!")
elif Slytherin > Ravenclaw and Slytherin > Gryffindor and Slytherin > Hufflepuff:
print("Congratulations, you are in Slytherin!")
else:
print("Congratulations, you are in Hufflepuff!")
elif Gryffindor == Ravenclaw and Gryffindor == Slytherin and Gryffindor == Hufflepuff and Ravenclaw == Hufflepuff and Ravenclaw == Slytherin and Hufflepuff == Slytherin:
print("You have been chosen for all of the houses. Choose which one yourself.")
elif Hufflepuff == Gryffindor and Hufflepuff == Ravenclaw and Ravenclaw and Gryffindor and Hufflepuff > Slytherin:
h1 = input (f' You have been chosen for 3 houses {name}, which do you prefer, [H]ufflepuff, [R]avenclaw, or [G]ryffindor? ')
if h1=='H':
print('Congratulations, you are in Hufflepuff!')
elif h1=='R':
print('Congratulations, you are in Ravenclaw!')
else:
print('Congratulations, you are in Gryffindor!')
elif Hufflepuff == Gryffindor and Hufflepuff == Slytherin and Hufflepuff and Slytherin and Gryffindor > Ravenclaw:
h2 = input (f' You have been chosen for 3 houses {name}, which do you prefer, [H]ufflepuff, [S]lytherin, or [G]ryffindor? ')
if h2=='H':
print('Congratulations, you are in Hufflepuff!')
elif h2=='S':
print('Congratulations, you are in Slytherin!')
else:
print('Congratulation, you are in Gryffindor!')
elif Hufflepuff == Ravenclaw and Hufflepuff == Slytherin and Slytherin and Ravenclaw and Hufflepuff > Gryffindor:
h3 = input (f' You have been chosen for 3 houses {name}, which do you prefer, [H]ufflepuff, [S]lytherin, or [R]avenclaw? ')
if h3=='H':
print('Congratulations, you are in Hufflepuff!')
elif h3=='S':
print('Congratulations, you are in Slytherin!')
else:
print('Congratulations, you are in Ravenclaw!')
elif Ravenclaw == Gryffindor and Ravenclaw == Slytherin and Ravenclaw and Gryffindor and Slytherin > Hufflepuff:
h4 = input (f' You have been chosen for 3 houses {name}, which do you prefer, [R]avenclaw, [S]lytherin, or [G]ryffindor? ')
if h4=='R':
print('Congratulations, you are in Ravenclaw!')
elif h4=='S':
print('Congratulations, you are in Slytherin!')
else:
print('Congratulation, you are in Gryffindor!')
elif Gryffindor == Hufflepuff and Gryffindor > Ravenclaw and Slytherin and Hufflepuff > Ravenclaw and Slytherin:
jl = input (f' You have been chosen for 2 houses {name}, which do you prefer, [H]ufflepuff or [G]ryffindor? ')
if jl=='H':
print('Congratulations, you are in Hufflepuff!')
else:
print('Congratulations, you are in Gryffindor!')
elif Gryffindor == Slytherin and Slytherin > Ravenclaw and Hufflepuff and Gryffindor > Ravenclaw and Hufflepuff:
j2 = input (f' You have been chosen for 2 houses {name}, which do you prefer, [S]lytherin or [G]ryffindor? ')
if (j2=='S'):
print('Congratulations, you are in Slytherin!')
else:
print('Congratulations, you are in Gryffindor!')
elif Gryffindor == Ravenclaw and Gryffindor > Hufflepuff and Slytherin and Ravenclaw > Hufflepuff and Slytherin:
j3 = input (f' You have been chosen for 2 houses {name}, which do you prefer, [R]avenclaw or [G]ryffindor? ')
if j3=='R':
print('Congratulations, you are in Ravenclaw!')
else:
print('Congratulations, you are in Gryffindor!')
elif Ravenclaw == Hufflepuff and Hufflepuff > Gryffindor and Slytherin and Ravenclaw > Gryffindor and Slytherin:
j4 = input (f' You have been chosen for 2 houses {name}, which do you prefer, [R]avenclaw or [H]ufflepuff? ')
if j4=='R':
print('Congratulations, you are in Ravenclaw!')
else:
print('Congratulations, you are in Hufflepuff!')
elif Ravenclaw == Slytherin and Ravenclaw > Gryffindor and Hufflepuff and Slytherin > Gryffindor and Hufflepuff:
j5 = input (f' You have been chosen for 2 houses {name}, which do you prefer, [S]lytherin or [R]avenclaw? ')
if j5=='S':
print('Congratulations, you are in Slytherin!')
else:
print('Congratulations, you are in Ravenclaw!')
elif Hufflepuff == Slytherin and Hufflepuff > Ravenclaw and Hufflepuff > Gryffindor and Slytherin > Ravenclaw and Slytherin > Gryffindor:
j6 = input (f' You have been chosen for 2 houses {name}, which do you prefer, [S]lytherin or [H]ufflepuff? ')
if j6=='S':
print('Congratulations, you are in Slytherin!')
else:
print('Congratulations, you are in Hufflepuff!')
point = input(f"{name.title()}, thats the end, but would you like to know how many points you got in each house?")
if point == 'y':
print(f"\tGryffindor: {Gryffindor}")
time.sleep(0.5)
print(f"\tHufflepuff: {Hufflepuff}")
time.sleep(0.5)
print(f'\tRavenclaw: {Ravenclaw}')
time.sleep(0.5)
print(f'\tSlytherin: {Slytherin}') | [
"sys.exit",
"time.sleep"
] | [((1419, 1434), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (1429, 1434), False, 'import time\n'), ((2558, 2573), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (2568, 2573), False, 'import time\n'), ((2643, 2658), 'time.sleep', 'time.sleep', (['(1.5)'], {}), '(1.5)\n', (2653, 2658), False, 'import time\n'), ((1085, 1152), 'sys.exit', 'sys.exit', (['"""Please leave the school, you are not allowed entrance. """'], {}), "('Please leave the school, you are not allowed entrance. ')\n", (1093, 1152), False, 'import sys\n'), ((7410, 7425), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (7420, 7425), False, 'import time\n'), ((7465, 7480), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (7475, 7480), False, 'import time\n'), ((7518, 7533), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (7528, 7533), False, 'import time\n'), ((333, 360), 'sys.exit', 'sys.exit', (['"""Already sorted."""'], {}), "('Already sorted.')\n", (341, 360), False, 'import sys\n'), ((666, 677), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (674, 677), False, 'import sys\n'), ((496, 546), 'sys.exit', 'sys.exit', (['"""Out of school, not attending Hogwarts."""'], {}), "('Out of school, not attending Hogwarts.')\n", (504, 546), False, 'import sys\n')] |
from setuptools import setup, find_packages
from hitomi.version import __version__
from setuptools.command.install import install
import sys
import os
def read_file(fname):
"""
return file contents
:param fname: path relative to setup.py
:return: file contents
"""
with open(os.path.join(os.path.dirname(__file__), fname), "r") as fd:
return fd.read()
class VerifyVersionCommand(install):
"""Custom command to verify that the git tag matches our version"""
description = "verify that the git tag matches our version"
def run(self):
tag = os.getenv("CIRCLE_TAG")
if tag != __version__:
info = "Git tag: {0} does not match the version of this app: {1}".format(
tag, __version__
)
sys.exit(info)
setup(
name="hitomi",
version=__version__,
license="MIT",
author="<NAME>",
author_email="<EMAIL>",
long_description=read_file("Readme.md") if os.path.isfile("Readme.md") else "",
packages=find_packages(exclude=["contrib", "docs", "tests"]),
install_requires=read_file("requirements.txt").split("\n"),
long_description_content_type="text/markdown",
entry_points="""
[console_scripts]
hitomi=hitomi.main:main
""",
include_package_data=True,
package_data={
"": ["*.json"],
},
cmdclass={"verify": VerifyVersionCommand},
url="https://github.com/cleanunicorn/hitomi",
)
| [
"setuptools.find_packages",
"os.path.dirname",
"os.path.isfile",
"os.getenv",
"sys.exit"
] | [((596, 619), 'os.getenv', 'os.getenv', (['"""CIRCLE_TAG"""'], {}), "('CIRCLE_TAG')\n", (605, 619), False, 'import os\n'), ((1030, 1081), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['contrib', 'docs', 'tests']"}), "(exclude=['contrib', 'docs', 'tests'])\n", (1043, 1081), False, 'from setuptools import setup, find_packages\n'), ((797, 811), 'sys.exit', 'sys.exit', (['info'], {}), '(info)\n', (805, 811), False, 'import sys\n'), ((980, 1007), 'os.path.isfile', 'os.path.isfile', (['"""Readme.md"""'], {}), "('Readme.md')\n", (994, 1007), False, 'import os\n'), ((314, 339), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (329, 339), False, 'import os\n')] |
from datetime import datetime
from quickbooks.objects.term import Term
from tests.integration.test_base import QuickbooksTestCase
class TermTest(QuickbooksTestCase):
def setUp(self):
super(TermTest, self).setUp()
self.name = "Term {0}".format(datetime.now().strftime('%d%H%M'))
def test_create(self):
term = Term()
term.Name = self.name
term.DueDays = 10
term.save(qb=self.qb_client)
query_term = Term.get(term.Id, qb=self.qb_client)
self.assertEqual(query_term.Id, term.Id)
self.assertEqual(query_term.Name, self.name)
self.assertEqual(query_term.DueDays, 10)
def test_update(self):
term = Term.all(max_results=1, qb=self.qb_client)[0]
term.DueDays = 60
term.save(qb=self.qb_client)
query_term = Term.get(term.Id, qb=self.qb_client)
self.assertEqual(query_term.Id, term.Id)
self.assertEqual(query_term.DueDays, 60)
| [
"datetime.datetime.now",
"quickbooks.objects.term.Term.all",
"quickbooks.objects.term.Term.get",
"quickbooks.objects.term.Term"
] | [((345, 351), 'quickbooks.objects.term.Term', 'Term', ([], {}), '()\n', (349, 351), False, 'from quickbooks.objects.term import Term\n'), ((467, 503), 'quickbooks.objects.term.Term.get', 'Term.get', (['term.Id'], {'qb': 'self.qb_client'}), '(term.Id, qb=self.qb_client)\n', (475, 503), False, 'from quickbooks.objects.term import Term\n'), ((830, 866), 'quickbooks.objects.term.Term.get', 'Term.get', (['term.Id'], {'qb': 'self.qb_client'}), '(term.Id, qb=self.qb_client)\n', (838, 866), False, 'from quickbooks.objects.term import Term\n'), ((699, 741), 'quickbooks.objects.term.Term.all', 'Term.all', ([], {'max_results': '(1)', 'qb': 'self.qb_client'}), '(max_results=1, qb=self.qb_client)\n', (707, 741), False, 'from quickbooks.objects.term import Term\n'), ((267, 281), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (279, 281), False, 'from datetime import datetime\n')] |
#!/usr/bin/env python
# -*- coding: latin-1 -*-
#
# !/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `certifiable.complex.certify_tuple` method."""
import unittest
from mock import Mock
from certifiable import CertifierValueError, make_certifier
from certifiable.complex import certify_dict_schema, certify_iterable_schema
from certifiable.errors import CertifierParamError
from tests import Doh
class ComplexIterableSchemaTestCase(unittest.TestCase):
"""Tests for `certifiable.complex.certify_iterable_schema` method."""
def setUp(self):
"""Tuple up test fixtures, if any."""
def tearDown(self):
"""Tear down test fixtures, if any."""
def test_schema_none_provided(self):
required = True
value = []
self.assertEqual(
certify_iterable_schema(
value=value,
schema=[],
required=required,
),
value,
)
def test_schema_lengths_different_not_enough_values(self):
value = []
required = True
schema = tuple([
None,
None,
None
])
try:
certify_iterable_schema(
value=value,
schema=schema,
required=required,
)
except CertifierValueError as e:
self.assertEqual(e.message, 'encountered -3 extra items')
self.assertEqual(e.required, required)
self.assertEqual(e.value, value)
else:
assert False
def test_schema_lengths_different_too_many_values(self):
value = [
1,
2,
3,
4,
5,
6,
]
required = True
schema = tuple([
None,
None,
])
try:
certify_iterable_schema(
value=value,
schema=schema,
required=required,
)
except CertifierValueError as e:
self.assertEqual(e.message, 'encountered 4 extra items')
self.assertEqual(e.required, required)
self.assertEqual(e.value, value)
else:
assert False
def test_certifier_error_reraised(self):
value = [
'abc',
'xyz',
]
required = True
@make_certifier()
def x(value, **kwargs):
raise Doh('bang: {value}'.format(
value=value,
))
schema = tuple([
lambda value: None,
x,
])
try:
certify_iterable_schema(
value=value,
schema=schema,
required=required,
)
except CertifierValueError as e:
self.assertEqual(e.message, 'invalid value \'xyz\' for item 1')
self.assertEqual(e.required, required)
self.assertEqual(e.value, value)
else:
assert False
class ComplexDictSchemaTestCase(unittest.TestCase):
"""Tests for `certifiable.complex.certify_dict_schema` method."""
def setUp(self):
"""Tuple up test fixtures, if any."""
def tearDown(self):
"""Tear down test fixtures, if any."""
def test_no_schema_or_certifiers(self):
value = {'a': 1, 'b': 2}
schema = {}
allow_extra = True
self.assertEqual(
certify_dict_schema(
value=value,
schema=schema,
key_certifier=None,
value_certifier=None,
required=True,
allow_extra=allow_extra,
),
value,
)
def test_invalid_schema(self):
value = {'a': 1, 'b': 2}
schema = 123
allow_extra = True
self.assertRaises(
CertifierParamError,
certify_dict_schema,
value=value,
schema=schema,
key_certifier=None,
value_certifier=None,
required=True,
allow_extra=allow_extra,
)
def test_key_certifier(self):
value = {'a': 1, 'b': 2}
schema = None
m = Mock()
@make_certifier()
def key_certifier(value):
m(value)
raise Doh()
allow_extra = True
self.assertRaises(
Doh,
certify_dict_schema,
value=value,
schema=schema,
key_certifier=key_certifier,
value_certifier=None,
required=True,
allow_extra=allow_extra,
)
self.assertEqual(m.call_count, 1)
def test_value_certifier(self):
value = {'a': 1, 'b': 2}
schema = None
m = Mock()
@make_certifier()
def key_certifier(value):
m(value)
raise Doh()
allow_extra = True
self.assertRaises(
Doh,
certify_dict_schema,
value=value,
schema=schema,
key_certifier=None,
value_certifier=key_certifier,
required=True,
allow_extra=allow_extra,
)
self.assertEqual(m.call_count, 1)
def test_extra_key_not_allowed(self):
@make_certifier()
def key_certifier(value):
m(value)
required = True
value = {'a': 1, 'b': 2}
schema = {
'a': key_certifier,
}
m = Mock()
allow_extra = False
try:
certify_dict_schema(
value=value,
schema=schema,
key_certifier=None,
value_certifier=None,
required=required,
allow_extra=allow_extra,
)
except CertifierValueError as e:
self.assertEqual(e.message, 'encountered unexpected keys: set([\'b\'])')
self.assertEqual(e.required, required)
self.assertIn(e.value, [set(['b']), set(['b'])])
else:
assert False
self.assertEqual(m.call_count, 1)
def test_key_not_in_schema(self):
@make_certifier()
def key_certifier(value):
m(value)
required = True
value = {'b': 2}
schema = {
'a': key_certifier,
}
m = Mock()
allow_extra = False
try:
certify_dict_schema(
value=value,
schema=schema,
key_certifier=None,
value_certifier=None,
required=required,
allow_extra=allow_extra,
)
except CertifierValueError as e:
self.assertEqual(e.message, 'key \'a\' missing from dictionary')
self.assertEqual(e.required, required)
else:
assert False
m.assert_not_called()
def test_schema_ok(self):
@make_certifier()
def value_certifier_2(value):
m2(value)
assert value == 2
@make_certifier()
def value_certifier_3(value):
m3(value)
assert value == 3
@make_certifier()
def value_certifier_4(value):
m4(value)
assert value == 4
required = True
value = {
'a': 2,
'b': 3,
'c': 4,
}
schema = {
'a': value_certifier_2,
'b': value_certifier_3,
'c': value_certifier_4,
}
m2 = Mock()
m3 = Mock()
m4 = Mock()
certify_dict_schema(
value=value,
schema=schema,
key_certifier=None,
value_certifier=None,
required=required,
)
def test_schema_fail(self):
@make_certifier()
def value_certifier(value):
m(value)
assert value == 2
raise Doh()
required = True
value = {
'a': 2,
}
schema = {
'a': value_certifier,
}
m = Mock()
self.assertRaises(
Doh,
certify_dict_schema,
value=value,
schema=schema,
key_certifier=None,
value_certifier=None,
required=required,
)
self.assertEqual(m.call_count, 1)
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"certifiable.make_certifier",
"certifiable.complex.certify_iterable_schema",
"certifiable.complex.certify_dict_schema",
"mock.Mock",
"tests.Doh"
] | [((8448, 8463), 'unittest.main', 'unittest.main', ([], {}), '()\n', (8461, 8463), False, 'import unittest\n'), ((2395, 2411), 'certifiable.make_certifier', 'make_certifier', ([], {}), '()\n', (2409, 2411), False, 'from certifiable import CertifierValueError, make_certifier\n'), ((4232, 4238), 'mock.Mock', 'Mock', ([], {}), '()\n', (4236, 4238), False, 'from mock import Mock\n'), ((4249, 4265), 'certifiable.make_certifier', 'make_certifier', ([], {}), '()\n', (4263, 4265), False, 'from certifiable import CertifierValueError, make_certifier\n'), ((4798, 4804), 'mock.Mock', 'Mock', ([], {}), '()\n', (4802, 4804), False, 'from mock import Mock\n'), ((4815, 4831), 'certifiable.make_certifier', 'make_certifier', ([], {}), '()\n', (4829, 4831), False, 'from certifiable import CertifierValueError, make_certifier\n'), ((5312, 5328), 'certifiable.make_certifier', 'make_certifier', ([], {}), '()\n', (5326, 5328), False, 'from certifiable import CertifierValueError, make_certifier\n'), ((5515, 5521), 'mock.Mock', 'Mock', ([], {}), '()\n', (5519, 5521), False, 'from mock import Mock\n'), ((6190, 6206), 'certifiable.make_certifier', 'make_certifier', ([], {}), '()\n', (6204, 6206), False, 'from certifiable import CertifierValueError, make_certifier\n'), ((6385, 6391), 'mock.Mock', 'Mock', ([], {}), '()\n', (6389, 6391), False, 'from mock import Mock\n'), ((6971, 6987), 'certifiable.make_certifier', 'make_certifier', ([], {}), '()\n', (6985, 6987), False, 'from certifiable import CertifierValueError, make_certifier\n'), ((7088, 7104), 'certifiable.make_certifier', 'make_certifier', ([], {}), '()\n', (7102, 7104), False, 'from certifiable import CertifierValueError, make_certifier\n'), ((7205, 7221), 'certifiable.make_certifier', 'make_certifier', ([], {}), '()\n', (7219, 7221), False, 'from certifiable import CertifierValueError, make_certifier\n'), ((7575, 7581), 'mock.Mock', 'Mock', ([], {}), '()\n', (7579, 7581), False, 'from mock import Mock\n'), ((7595, 7601), 'mock.Mock', 'Mock', ([], {}), '()\n', (7599, 7601), False, 'from mock import Mock\n'), ((7615, 7621), 'mock.Mock', 'Mock', ([], {}), '()\n', (7619, 7621), False, 'from mock import Mock\n'), ((7631, 7743), 'certifiable.complex.certify_dict_schema', 'certify_dict_schema', ([], {'value': 'value', 'schema': 'schema', 'key_certifier': 'None', 'value_certifier': 'None', 'required': 'required'}), '(value=value, schema=schema, key_certifier=None,\n value_certifier=None, required=required)\n', (7650, 7743), False, 'from certifiable.complex import certify_dict_schema, certify_iterable_schema\n'), ((7853, 7869), 'certifiable.make_certifier', 'make_certifier', ([], {}), '()\n', (7867, 7869), False, 'from certifiable import CertifierValueError, make_certifier\n'), ((8129, 8135), 'mock.Mock', 'Mock', ([], {}), '()\n', (8133, 8135), False, 'from mock import Mock\n'), ((804, 870), 'certifiable.complex.certify_iterable_schema', 'certify_iterable_schema', ([], {'value': 'value', 'schema': '[]', 'required': 'required'}), '(value=value, schema=[], required=required)\n', (827, 870), False, 'from certifiable.complex import certify_dict_schema, certify_iterable_schema\n'), ((1187, 1257), 'certifiable.complex.certify_iterable_schema', 'certify_iterable_schema', ([], {'value': 'value', 'schema': 'schema', 'required': 'required'}), '(value=value, schema=schema, required=required)\n', (1210, 1257), False, 'from certifiable.complex import certify_dict_schema, certify_iterable_schema\n'), ((1870, 1940), 'certifiable.complex.certify_iterable_schema', 'certify_iterable_schema', ([], {'value': 'value', 'schema': 'schema', 'required': 'required'}), '(value=value, schema=schema, required=required)\n', (1893, 1940), False, 'from certifiable.complex import certify_dict_schema, certify_iterable_schema\n'), ((2644, 2714), 'certifiable.complex.certify_iterable_schema', 'certify_iterable_schema', ([], {'value': 'value', 'schema': 'schema', 'required': 'required'}), '(value=value, schema=schema, required=required)\n', (2667, 2714), False, 'from certifiable.complex import certify_dict_schema, certify_iterable_schema\n'), ((3457, 3590), 'certifiable.complex.certify_dict_schema', 'certify_dict_schema', ([], {'value': 'value', 'schema': 'schema', 'key_certifier': 'None', 'value_certifier': 'None', 'required': '(True)', 'allow_extra': 'allow_extra'}), '(value=value, schema=schema, key_certifier=None,\n value_certifier=None, required=True, allow_extra=allow_extra)\n', (3476, 3590), False, 'from certifiable.complex import certify_dict_schema, certify_iterable_schema\n'), ((4339, 4344), 'tests.Doh', 'Doh', ([], {}), '()\n', (4342, 4344), False, 'from tests import Doh\n'), ((4905, 4910), 'tests.Doh', 'Doh', ([], {}), '()\n', (4908, 4910), False, 'from tests import Doh\n'), ((5577, 5714), 'certifiable.complex.certify_dict_schema', 'certify_dict_schema', ([], {'value': 'value', 'schema': 'schema', 'key_certifier': 'None', 'value_certifier': 'None', 'required': 'required', 'allow_extra': 'allow_extra'}), '(value=value, schema=schema, key_certifier=None,\n value_certifier=None, required=required, allow_extra=allow_extra)\n', (5596, 5714), False, 'from certifiable.complex import certify_dict_schema, certify_iterable_schema\n'), ((6447, 6584), 'certifiable.complex.certify_dict_schema', 'certify_dict_schema', ([], {'value': 'value', 'schema': 'schema', 'key_certifier': 'None', 'value_certifier': 'None', 'required': 'required', 'allow_extra': 'allow_extra'}), '(value=value, schema=schema, key_certifier=None,\n value_certifier=None, required=required, allow_extra=allow_extra)\n', (6466, 6584), False, 'from certifiable.complex import certify_dict_schema, certify_iterable_schema\n'), ((7975, 7980), 'tests.Doh', 'Doh', ([], {}), '()\n', (7978, 7980), False, 'from tests import Doh\n')] |
# coding=utf-8
# Copyright 2021 The Dopamine Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Atari 100k rainbow agent with support for data augmentation."""
import functools
from absl import logging
from dopamine.jax import networks
from dopamine.jax.agents.full_rainbow import full_rainbow_agent
import gin
import jax
import jax.numpy as jnp
import tensorflow as tf
############################ Data Augmentation ############################
@functools.partial(jax.vmap, in_axes=(0, 0, 0, None))
def _crop_with_indices(img, x, y, cropped_shape):
cropped_image = (jax.lax.dynamic_slice(img, [x, y, 0], cropped_shape[1:]))
return cropped_image
def _per_image_random_crop(key, img, cropped_shape):
"""Random crop an image."""
batch_size, width, height = cropped_shape[:-1]
key_x, key_y = jax.random.split(key, 2)
x = jax.random.randint(
key_x, shape=(batch_size,), minval=0, maxval=img.shape[1] - width)
y = jax.random.randint(
key_y, shape=(batch_size,), minval=0, maxval=img.shape[2] - height)
return _crop_with_indices(img, x, y, cropped_shape)
def _intensity_aug(key, x, scale=0.05):
"""Follows the code in Schwarzer et al. (2020) for intensity augmentation."""
r = jax.random.normal(key, shape=(x.shape[0], 1, 1, 1))
noise = 1.0 + (scale * jnp.clip(r, -2.0, 2.0))
return x * noise
@jax.jit
def drq_image_augmentation(key, obs, img_pad=4):
"""Padding and cropping for DrQ."""
flat_obs = obs.reshape(-1, *obs.shape[-3:])
paddings = [(0, 0), (img_pad, img_pad), (img_pad, img_pad), (0, 0)]
cropped_shape = flat_obs.shape
# The reference uses ReplicationPad2d in pytorch, but it is not available
# in Jax. Use 'edge' instead.
flat_obs = jnp.pad(flat_obs, paddings, 'edge')
key1, key2 = jax.random.split(key, num=2)
cropped_obs = _per_image_random_crop(key2, flat_obs, cropped_shape)
# cropped_obs = _random_crop(key2, flat_obs, cropped_shape)
aug_obs = _intensity_aug(key1, cropped_obs)
return aug_obs.reshape(*obs.shape)
def preprocess_inputs_with_augmentation(x, data_augmentation=False, rng=None):
"""Input normalization and if specified, data augmentation."""
out = x.astype(jnp.float32) / 255.
if data_augmentation:
if rng is None:
raise ValueError('Pass rng when using data augmentation')
out = drq_image_augmentation(rng, out)
return out
@gin.configurable
class Atari100kRainbowAgent(full_rainbow_agent.JaxFullRainbowAgent):
"""A compact implementation of agents for Atari 100k."""
def __init__(self,
num_actions,
data_augmentation=False,
summary_writer=None,
network=networks.FullRainbowNetwork,
seed=None):
"""Creates the Rainbow-based agent for the Atari 100k benchmark.
On Atari 100k, an agent is evaluated after 100k environment steps, which
corresponds to 2-3 hours of game play, for training.
Args:
num_actions: int, number of actions the agent can take at any state.
data_augmentation: bool, whether to use data augmentation.
summary_writer: SummaryWriter object, for outputting training statistics.
network: flax.linen Module, neural network used by the agent initialized
by shape in _create_network below. See
dopamine.jax.networks.RainbowNetwork as an example.
seed: int, a seed for Jax RNG and initialization.
"""
super().__init__(
num_actions=num_actions,
preprocess_fn=preprocess_inputs_with_augmentation,
summary_writer=summary_writer,
network=network,
seed=seed)
logging.info('\t data_augmentation: %s', data_augmentation)
self._data_augmentation = data_augmentation
logging.info('\t data_augmentation: %s', data_augmentation)
# Preprocessing during training and evaluation can be possibly different,
# for example, when using data augmentation during training.
self.train_preprocess_fn = functools.partial(
preprocess_inputs_with_augmentation,
data_augmentation=data_augmentation)
def _training_step_update(self):
"""Gradient update during every training step."""
self._sample_from_replay_buffer()
self._rng, rng1, rng2 = jax.random.split(self._rng, num=3)
states = self.train_preprocess_fn(self.replay_elements['state'], rng=rng1)
next_states = self.train_preprocess_fn(
self.replay_elements['next_state'], rng=rng2)
if self._replay_scheme == 'prioritized':
probs = self.replay_elements['sampling_probabilities']
# Weight the loss by the inverse priorities.
loss_weights = 1.0 / jnp.sqrt(probs + 1e-10)
loss_weights /= jnp.max(loss_weights)
else:
# Uniform weights if not using prioritized replay.
loss_weights = jnp.ones(states.shape[0])
(self.optimizer_state, self.online_params,
loss, mean_loss, self._rng) = full_rainbow_agent.train(
self.network_def, self.online_params, self.target_network_params,
self.optimizer, self.optimizer_state, states,
self.replay_elements['action'], next_states,
self.replay_elements['reward'], self.replay_elements['terminal'],
loss_weights, self._support, self.cumulative_gamma, self._double_dqn,
self._distributional, self._rng)
if self._replay_scheme == 'prioritized':
self._replay.set_priority(self.replay_elements['indices'],
jnp.sqrt(loss + 1e-10))
if self.summary_writer is not None:
summary = tf.compat.v1.Summary(value=[
tf.compat.v1.Summary.Value(
tag='CrossEntropyLoss', simple_value=mean_loss)
])
self.summary_writer.add_summary(summary, self.training_steps)
| [
"functools.partial",
"jax.random.normal",
"jax.numpy.pad",
"jax.lax.dynamic_slice",
"jax.numpy.max",
"absl.logging.info",
"dopamine.jax.agents.full_rainbow.full_rainbow_agent.train",
"jax.random.randint",
"tensorflow.compat.v1.Summary.Value",
"jax.numpy.clip",
"jax.numpy.ones",
"jax.numpy.sqrt... | [((961, 1013), 'functools.partial', 'functools.partial', (['jax.vmap'], {'in_axes': '(0, 0, 0, None)'}), '(jax.vmap, in_axes=(0, 0, 0, None))\n', (978, 1013), False, 'import functools\n'), ((1083, 1139), 'jax.lax.dynamic_slice', 'jax.lax.dynamic_slice', (['img', '[x, y, 0]', 'cropped_shape[1:]'], {}), '(img, [x, y, 0], cropped_shape[1:])\n', (1104, 1139), False, 'import jax\n'), ((1315, 1339), 'jax.random.split', 'jax.random.split', (['key', '(2)'], {}), '(key, 2)\n', (1331, 1339), False, 'import jax\n'), ((1346, 1436), 'jax.random.randint', 'jax.random.randint', (['key_x'], {'shape': '(batch_size,)', 'minval': '(0)', 'maxval': '(img.shape[1] - width)'}), '(key_x, shape=(batch_size,), minval=0, maxval=img.shape[1\n ] - width)\n', (1364, 1436), False, 'import jax\n'), ((1445, 1536), 'jax.random.randint', 'jax.random.randint', (['key_y'], {'shape': '(batch_size,)', 'minval': '(0)', 'maxval': '(img.shape[2] - height)'}), '(key_y, shape=(batch_size,), minval=0, maxval=img.shape[2\n ] - height)\n', (1463, 1536), False, 'import jax\n'), ((1721, 1772), 'jax.random.normal', 'jax.random.normal', (['key'], {'shape': '(x.shape[0], 1, 1, 1)'}), '(key, shape=(x.shape[0], 1, 1, 1))\n', (1738, 1772), False, 'import jax\n'), ((2209, 2244), 'jax.numpy.pad', 'jnp.pad', (['flat_obs', 'paddings', '"""edge"""'], {}), "(flat_obs, paddings, 'edge')\n", (2216, 2244), True, 'import jax.numpy as jnp\n'), ((2260, 2288), 'jax.random.split', 'jax.random.split', (['key'], {'num': '(2)'}), '(key, num=2)\n', (2276, 2288), False, 'import jax\n'), ((4089, 4148), 'absl.logging.info', 'logging.info', (['"""\t data_augmentation: %s"""', 'data_augmentation'], {}), "('\\t data_augmentation: %s', data_augmentation)\n", (4101, 4148), False, 'from absl import logging\n'), ((4201, 4260), 'absl.logging.info', 'logging.info', (['"""\t data_augmentation: %s"""', 'data_augmentation'], {}), "('\\t data_augmentation: %s', data_augmentation)\n", (4213, 4260), False, 'from absl import logging\n'), ((4435, 4531), 'functools.partial', 'functools.partial', (['preprocess_inputs_with_augmentation'], {'data_augmentation': 'data_augmentation'}), '(preprocess_inputs_with_augmentation, data_augmentation=\n data_augmentation)\n', (4452, 4531), False, 'import functools\n'), ((4701, 4735), 'jax.random.split', 'jax.random.split', (['self._rng'], {'num': '(3)'}), '(self._rng, num=3)\n', (4717, 4735), False, 'import jax\n'), ((5363, 5736), 'dopamine.jax.agents.full_rainbow.full_rainbow_agent.train', 'full_rainbow_agent.train', (['self.network_def', 'self.online_params', 'self.target_network_params', 'self.optimizer', 'self.optimizer_state', 'states', "self.replay_elements['action']", 'next_states', "self.replay_elements['reward']", "self.replay_elements['terminal']", 'loss_weights', 'self._support', 'self.cumulative_gamma', 'self._double_dqn', 'self._distributional', 'self._rng'], {}), "(self.network_def, self.online_params, self.\n target_network_params, self.optimizer, self.optimizer_state, states,\n self.replay_elements['action'], next_states, self.replay_elements[\n 'reward'], self.replay_elements['terminal'], loss_weights, self.\n _support, self.cumulative_gamma, self._double_dqn, self._distributional,\n self._rng)\n", (5387, 5736), False, 'from dopamine.jax.agents.full_rainbow import full_rainbow_agent\n'), ((1798, 1820), 'jax.numpy.clip', 'jnp.clip', (['r', '(-2.0)', '(2.0)'], {}), '(r, -2.0, 2.0)\n', (1806, 1820), True, 'import jax.numpy as jnp\n'), ((5144, 5165), 'jax.numpy.max', 'jnp.max', (['loss_weights'], {}), '(loss_weights)\n', (5151, 5165), True, 'import jax.numpy as jnp\n'), ((5254, 5279), 'jax.numpy.ones', 'jnp.ones', (['states.shape[0]'], {}), '(states.shape[0])\n', (5262, 5279), True, 'import jax.numpy as jnp\n'), ((5098, 5121), 'jax.numpy.sqrt', 'jnp.sqrt', (['(probs + 1e-10)'], {}), '(probs + 1e-10)\n', (5106, 5121), True, 'import jax.numpy as jnp\n'), ((5912, 5934), 'jax.numpy.sqrt', 'jnp.sqrt', (['(loss + 1e-10)'], {}), '(loss + 1e-10)\n', (5920, 5934), True, 'import jax.numpy as jnp\n'), ((6032, 6106), 'tensorflow.compat.v1.Summary.Value', 'tf.compat.v1.Summary.Value', ([], {'tag': '"""CrossEntropyLoss"""', 'simple_value': 'mean_loss'}), "(tag='CrossEntropyLoss', simple_value=mean_loss)\n", (6058, 6106), True, 'import tensorflow as tf\n')] |
import json
import logging
import os
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponse
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from ..models import DiscordRole, EmailRole, Registration
logger = logging.getLogger(__name__)
REGISTRATION_WEBHOOK_TOKEN = settings.REGISTRATION_WEBHOOK_TOKEN
@csrf_exempt
def webhook_handler(request):
if not REGISTRATION_WEBHOOK_TOKEN:
raise Exception("No webook token set")
if request.META.get("HTTP_AUTHORIZATION") != f"Bearer {REGISTRATION_WEBHOOK_TOKEN}":
return HttpResponse("Unauthorized", status=401)
payload = json.loads(request.body.decode("utf-8"))
logger.warning(payload)
default_roles = DiscordRole.objects.filter(assign_by_default=True)
try:
email_role = EmailRole.objects.get(email__iexact=payload["email"])
except ObjectDoesNotExist:
email_role = EmailRole(email=payload["email"].lower())
email_role.save()
logger.error(email_role)
email_role.discord_roles.add(*default_roles)
email_role.save()
registration = Registration(email=email_role, reference_id=payload["reference_id"])
registration.save()
return HttpResponse(status=201)
| [
"django.http.HttpResponse",
"logging.getLogger"
] | [((285, 312), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (302, 312), False, 'import logging\n'), ((1240, 1264), 'django.http.HttpResponse', 'HttpResponse', ([], {'status': '(201)'}), '(status=201)\n', (1252, 1264), False, 'from django.http import HttpResponse\n'), ((613, 653), 'django.http.HttpResponse', 'HttpResponse', (['"""Unauthorized"""'], {'status': '(401)'}), "('Unauthorized', status=401)\n", (625, 653), False, 'from django.http import HttpResponse\n')] |
# -*- coding: utf-8 -*-
"""
Provide asynchronous writers for stdout and stderr
Use code from [aioconsole][https://github.com/vxgmichel/aioconsole],
stream.py
"""
import asyncio
import os
import platform
import stat
from typing import Union
StringOfBytes = Union[str, bytes]
def is_pipe_transport_compatible(pipe) -> bool:
"""Check is pipe compatible with transport
Taken from aioconsole/stream.py
"""
if platform.system() == 'Windows':
return False
try:
file_no = pipe.fileno()
except OSError:
return False
mode = os.fstat(file_no).st_mode
is_char = stat.S_ISCHR(mode)
is_fifo = stat.S_ISFIFO(mode)
is_socket = stat.S_ISSOCK(mode)
if not (is_char or is_fifo or is_socket):
return False
return True
class DSNonFileStreamReader(object):
""" Asynchronous reader from stream
Based on aioconsole.stream.NonFileStreamReader
Use run_in_executor
"""
def __init__(
self,
stream,
*,
loop: asyncio.AbstractEventLoop = None
) -> None:
"""Create async stream reader."""
if loop is None:
loop = asyncio.get_event_loop()
self.loop = loop
self.stream = stream
self.eof = False
def at_eof(self) -> bool:
"""Is it at eof."""
return self.eof
async def readline(self) -> bytes:
"""Async read a line from stream."""
data = await self.loop.run_in_executor(None, self.stream.readline)
if isinstance(data, str):
data = data.encode()
self.eof = not data
return data
async def read(self, n: int = -1) -> bytes:
"""Async read n bytes from stream."""
data = await self.loop.run_in_executor(None, self.stream.read, n)
if isinstance(data, str):
data = data.encode()
self.eof = not data
return data
class DSNonFileStreamWriter(object):
""" Asynchronous writer to stream
Based on aioconsole.stream.NonFileStreamWriter
Use run_in_executor
"""
def __init__(
self,
stream,
*,
loop: asyncio.AbstractEventLoop = None
) -> None:
"""Create async stream writer."""
if loop is None:
loop = asyncio.get_event_loop()
self.loop = loop
self.stream = stream
def write(self, data: StringOfBytes) -> None:
"""Write data to stream."""
if isinstance(data, bytes):
data = data.decode()
self.stream.write(data)
async def drain(self) -> None:
"""Async flush stream in loop."""
if hasattr(self.stream, 'flush'):
await self.loop.run_in_executor(None, self.stream.flush)
else:
pass
def close(self):
pass
| [
"asyncio.get_event_loop",
"stat.S_ISCHR",
"stat.S_ISSOCK",
"stat.S_ISFIFO",
"platform.system",
"os.fstat"
] | [((613, 631), 'stat.S_ISCHR', 'stat.S_ISCHR', (['mode'], {}), '(mode)\n', (625, 631), False, 'import stat\n'), ((646, 665), 'stat.S_ISFIFO', 'stat.S_ISFIFO', (['mode'], {}), '(mode)\n', (659, 665), False, 'import stat\n'), ((682, 701), 'stat.S_ISSOCK', 'stat.S_ISSOCK', (['mode'], {}), '(mode)\n', (695, 701), False, 'import stat\n'), ((427, 444), 'platform.system', 'platform.system', ([], {}), '()\n', (442, 444), False, 'import platform\n'), ((573, 590), 'os.fstat', 'os.fstat', (['file_no'], {}), '(file_no)\n', (581, 590), False, 'import os\n'), ((1156, 1180), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (1178, 1180), False, 'import asyncio\n'), ((2271, 2295), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (2293, 2295), False, 'import asyncio\n')] |
import pytest
from apispec.exceptions import APISpecError
def test_undecorated_view(app, spec):
def gist_detail(gist_id):
'''
---
get:
responses:
200:
schema:
$ref: '#/definitions/Gist'
'''
pass
with pytest.raises(APISpecError):
spec.add_path(app=app, view=gist_detail)
def test_wrong_path(app, spec):
@app.route('/gists/{gist_id}', methods=['GET'])
def gist_detail(gist_id):
'''
---
get:
responses:
200:
schema:
$ref: '#/definitions/Gist'
'''
pass
with pytest.raises(APISpecError):
spec.add_path(app=app, view=gist_detail, path='/foo')
def test_add_missing_op(app, spec):
@app.route('/gists/{gist_id}', methods=['GET'])
def gist_detail(gist_id):
'''
---
get:
responses:
200:
schema:
$ref: '#/definitions/Gist'
'''
pass
with pytest.raises(APISpecError):
spec.add_path(app=app, view=gist_detail, path='/foo', operations={'delete': {}})
def test_no_match_docstring_ops_route_methods(app, spec):
@app.route('/gists/{gist_id}', methods=['GET'])
def gist_detail(gist_id):
'''
---
get:
responses:
200:
schema:
$ref: '#/definitions/Gist'
delete:
responses:
204:
schema:
$ref: '#/definitions/Empty'
'''
pass
spec.add_path(app=app, view=gist_detail)
assert '/gists/{gist_id}' in spec._paths
assert 'get' in spec._paths['/gists/{gist_id}']
assert spec._paths['/gists/{gist_id}']['get'] == {
'responses': {
200: {
'schema': {
'$ref': '#/definitions/Gist'
}
}
}
}
assert 'delete' not in spec._paths['/gists/{gist_id}']
| [
"pytest.raises"
] | [((328, 355), 'pytest.raises', 'pytest.raises', (['APISpecError'], {}), '(APISpecError)\n', (341, 355), False, 'import pytest\n'), ((722, 749), 'pytest.raises', 'pytest.raises', (['APISpecError'], {}), '(APISpecError)\n', (735, 749), False, 'import pytest\n'), ((1133, 1160), 'pytest.raises', 'pytest.raises', (['APISpecError'], {}), '(APISpecError)\n', (1146, 1160), False, 'import pytest\n')] |
from unittest import main
from ... import dpo7104
from .. import mock_dpo7104
from ...tests.server.test_dpo7104 import DPO7104Test
# Don't lose the real device.
real_DPO7104 = dpo7104.DPO7104
is_mock = DPO7104Test.mock
def setup():
# Run the tests with a fake device.
dpo7104.DPO7104 = mock_dpo7104.MockDPO7104
DPO7104Test.mock = True
def teardown():
# Restore the real device for any remaining tests.
dpo7104.DPO7104 = real_DPO7104
DPO7104Test.mock = is_mock
if __name__ == '__main__':
main()
| [
"unittest.main"
] | [((503, 509), 'unittest.main', 'main', ([], {}), '()\n', (507, 509), False, 'from unittest import main\n')] |
import pytest
import numpy as np
import pandas as pd
import pygdf
import dask
import dask_gdf as dgd
@pytest.mark.parametrize('by', ['a', 'b'])
@pytest.mark.parametrize('nelem', [10, 100, 1000])
@pytest.mark.parametrize('nparts', [1, 2, 5, 10])
def test_sort_values(nelem, nparts, by):
df = pygdf.DataFrame()
df['a'] = np.ascontiguousarray(np.arange(nelem)[::-1])
df['b'] = np.arange(100, nelem + 100)
ddf = dgd.from_pygdf(df, npartitions=nparts)
got = ddf.sort_values(by=by).compute().to_pandas()
expect = df.sort_values(by=by).to_pandas().reset_index(drop=True)
pd.util.testing.assert_frame_equal(got, expect)
def test_sort_values_binned():
np.random.seed(43)
nelem = 100
nparts = 5
by = 'a'
df = pygdf.DataFrame()
df['a'] = np.random.randint(1, 5, nelem)
ddf = dgd.from_pygdf(df, npartitions=nparts)
parts = ddf.sort_values_binned(by=by).to_delayed()
part_uniques = []
for i, p in enumerate(parts):
part = dask.compute(p)[0]
part_uniques.append(set(part.a.unique()))
# Partitions do not have intersecting keys
for i in range(len(part_uniques)):
for j in range(i + 1, len(part_uniques)):
assert not (part_uniques[i] & part_uniques[j]), \
"should have empty intersection"
| [
"pandas.util.testing.assert_frame_equal",
"numpy.random.seed",
"pygdf.DataFrame",
"numpy.random.randint",
"numpy.arange",
"dask.compute",
"pytest.mark.parametrize",
"dask_gdf.from_pygdf"
] | [((106, 147), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""by"""', "['a', 'b']"], {}), "('by', ['a', 'b'])\n", (129, 147), False, 'import pytest\n'), ((149, 198), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""nelem"""', '[10, 100, 1000]'], {}), "('nelem', [10, 100, 1000])\n", (172, 198), False, 'import pytest\n'), ((200, 248), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""nparts"""', '[1, 2, 5, 10]'], {}), "('nparts', [1, 2, 5, 10])\n", (223, 248), False, 'import pytest\n'), ((299, 316), 'pygdf.DataFrame', 'pygdf.DataFrame', ([], {}), '()\n', (314, 316), False, 'import pygdf\n'), ((390, 417), 'numpy.arange', 'np.arange', (['(100)', '(nelem + 100)'], {}), '(100, nelem + 100)\n', (399, 417), True, 'import numpy as np\n'), ((428, 466), 'dask_gdf.from_pygdf', 'dgd.from_pygdf', (['df'], {'npartitions': 'nparts'}), '(df, npartitions=nparts)\n', (442, 466), True, 'import dask_gdf as dgd\n'), ((597, 644), 'pandas.util.testing.assert_frame_equal', 'pd.util.testing.assert_frame_equal', (['got', 'expect'], {}), '(got, expect)\n', (631, 644), True, 'import pandas as pd\n'), ((682, 700), 'numpy.random.seed', 'np.random.seed', (['(43)'], {}), '(43)\n', (696, 700), True, 'import numpy as np\n'), ((754, 771), 'pygdf.DataFrame', 'pygdf.DataFrame', ([], {}), '()\n', (769, 771), False, 'import pygdf\n'), ((786, 816), 'numpy.random.randint', 'np.random.randint', (['(1)', '(5)', 'nelem'], {}), '(1, 5, nelem)\n', (803, 816), True, 'import numpy as np\n'), ((827, 865), 'dask_gdf.from_pygdf', 'dgd.from_pygdf', (['df'], {'npartitions': 'nparts'}), '(df, npartitions=nparts)\n', (841, 865), True, 'import dask_gdf as dgd\n'), ((352, 368), 'numpy.arange', 'np.arange', (['nelem'], {}), '(nelem)\n', (361, 368), True, 'import numpy as np\n'), ((993, 1008), 'dask.compute', 'dask.compute', (['p'], {}), '(p)\n', (1005, 1008), False, 'import dask\n')] |
# coding=utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import re
from tensorflow.python.platform import gfile
"""
Utilities for downloading data from WMT, tokenizing, vocabularies.
"""
# Special vocabulary symbols - we always put them at the start.
_PAD = b"_PAD"
_GO = b"_GO"
_EOS = b"_EOS"
_UNK = b"_UNK"
_START_VOCAB = [_PAD, _GO, _EOS, _UNK]
PAD_ID = 0
GO_ID = 1
EOS_ID = 2
UNK_ID = 3
# Regular expressions used to tokenize.
_WORD_SPLIT = re.compile(b"([.,!?\"':;)(])")
_DIGIT_RE = re.compile(br"\d")
def basic_tokenizer(sentence):
"""Very basic tokenizer: split the sentence into a list of tokens."""
words = []
for space_separated_fragment in sentence.strip().split():
if isinstance(space_separated_fragment, str): # if space_separated_fragment is a str, return True
# str.encode(encoding=”utf-8”, errors=”strict”), Return an encoded version of the string as a bytes
# object. Default encoding is 'utf-8'. errors may be given to set a different error handling scheme.
# The default for errors is 'strict', meaning that encoding errors raise a UnicodeError. Other
# possible values are 'ignore', 'replace', 'xmlcharrefreplace', 'backslashreplace'
word = str.encode(space_separated_fragment, encoding='utf-8', errors='ignore')
else:
word = space_separated_fragment
words.extend(re.split(_WORD_SPLIT, word))
return [w for w in words if w]
def create_vocabulary(vocabulary_path, data_path, max_vocabulary_size, tokenizer=None, normalize_digits=True):
if not gfile.Exists(vocabulary_path):
print("Creating vocabulary %s from %s" % (vocabulary_path, data_path))
vocab = {}
with gfile.GFile(data_path, mode="rb") as f:
counter = 0
for line in f:
counter += 1
if counter % 100000 == 0:
print(" processing line %d" % counter)
tokens = tokenizer(line) if tokenizer else basic_tokenizer(line)
for w in tokens:
word = re.sub(_DIGIT_RE, b"0", w) if normalize_digits else w # replace all digits with just one "0"
if word in vocab:
vocab[word] += 1
else:
vocab[word] = 1
# the sorted() function accepts any iterable
# e.g.: sorted({1: 'D', 2: 'B', 3: 'B', 4: 'E', 5: 'A'})
# out: [1, 2, 3, 4, 5]
# say, given a dict, it will return a list of sorted key (default), only compare the keys
# by setting key=vocab.get, it will compare by the values, and return the sorted keys.
vocab_list = _START_VOCAB + sorted(vocab, key=vocab.get, reverse=True)
print('>> Full Vocabulary Size: ', len(vocab_list))
if len(vocab_list) > max_vocabulary_size:
# cut down rare words (words who rank after max_vocabulary_size)
vocab_list = vocab_list[:max_vocabulary_size]
with gfile.GFile(vocabulary_path, mode="wb") as vocab_file:
for w in vocab_list:
vocab_file.write(w + b"\n")
def initialize_vocabulary(vocabulary_path):
if gfile.Exists(vocabulary_path):
rev_vocab = []
with gfile.GFile(vocabulary_path, mode="rb") as f:
rev_vocab.extend(f.readlines())
rev_vocab = [line.strip() for line in rev_vocab]
# enumerate will give a index and a value of rev_vocab
vocab = dict([(x, y) for (y, x) in enumerate(rev_vocab)])
return vocab, rev_vocab
else:
raise ValueError("Vocabulary file %s not found.", vocabulary_path)
def sentence_to_token_ids(sentence, vocabulary, tokenizer=None, normalize_digits=True):
"""convert a sentence to token ids"""
if tokenizer:
words = tokenizer(sentence)
else:
words = basic_tokenizer(sentence)
if not normalize_digits:
# obtain index of w in vocabulary, if not found in vocabulary, return UNK_ID
return [vocabulary.get(w, UNK_ID) for w in words]
# Normalize digits by 0 before looking words up in the vocabulary.
return [vocabulary.get(re.sub(_DIGIT_RE, b"0", w), UNK_ID) for w in words]
def data_to_token_ids(data_path, target_path, vocabulary_path, tokenizer=None, normalize_digits=True):
"""convert all sentences in dataset to token ids"""
if not gfile.Exists(target_path):
print("Tokenizing data in %s" % data_path)
vocab, _ = initialize_vocabulary(vocabulary_path)
with gfile.GFile(data_path, mode="rb") as data_file:
with gfile.GFile(target_path, mode="w") as tokens_file:
counter = 0
for line in data_file:
counter += 1
if counter % 100000 == 0:
print(" tokenizing line %d..." % counter)
token_ids = sentence_to_token_ids(line, vocab, tokenizer, normalize_digits)
tokens_file.write(" ".join([str(tok) for tok in token_ids]) + "\n")
def prepare_custom_data(working_directory, train_enc, train_dec, test_enc, test_dec, enc_vocabulary_size,
dec_vocabulary_size, tokenizer=None):
# Create vocabularies of the appropriate sizes.
enc_vocab_path = os.path.join(working_directory, "vocab%d.enc" % enc_vocabulary_size)
dec_vocab_path = os.path.join(working_directory, "vocab%d.dec" % dec_vocabulary_size)
create_vocabulary(enc_vocab_path, train_enc, enc_vocabulary_size, tokenizer)
create_vocabulary(dec_vocab_path, train_dec, dec_vocabulary_size, tokenizer)
# Create token ids for the training data.
enc_train_ids_path = train_enc + (".ids%d" % enc_vocabulary_size)
dec_train_ids_path = train_dec + (".ids%d" % dec_vocabulary_size)
data_to_token_ids(train_enc, enc_train_ids_path, enc_vocab_path, tokenizer)
data_to_token_ids(train_dec, dec_train_ids_path, dec_vocab_path, tokenizer)
# Create token ids for the development data.
enc_dev_ids_path = test_enc + (".ids%d" % enc_vocabulary_size)
dec_dev_ids_path = test_dec + (".ids%d" % dec_vocabulary_size)
data_to_token_ids(test_enc, enc_dev_ids_path, enc_vocab_path, tokenizer)
data_to_token_ids(test_dec, dec_dev_ids_path, dec_vocab_path, tokenizer)
return enc_train_ids_path, dec_train_ids_path, enc_dev_ids_path, dec_dev_ids_path, enc_vocab_path, dec_vocab_path
| [
"tensorflow.python.platform.gfile.GFile",
"re.split",
"tensorflow.python.platform.gfile.Exists",
"os.path.join",
"re.sub",
"re.compile"
] | [((528, 558), 're.compile', 're.compile', (['b\'([.,!?"\\\':;)(])\''], {}), '(b\'([.,!?"\\\':;)(])\')\n', (538, 558), False, 'import re\n'), ((571, 589), 're.compile', 're.compile', (["b'\\\\d'"], {}), "(b'\\\\d')\n", (581, 589), False, 'import re\n'), ((3326, 3355), 'tensorflow.python.platform.gfile.Exists', 'gfile.Exists', (['vocabulary_path'], {}), '(vocabulary_path)\n', (3338, 3355), False, 'from tensorflow.python.platform import gfile\n'), ((5423, 5491), 'os.path.join', 'os.path.join', (['working_directory', "('vocab%d.enc' % enc_vocabulary_size)"], {}), "(working_directory, 'vocab%d.enc' % enc_vocabulary_size)\n", (5435, 5491), False, 'import os\n'), ((5513, 5581), 'os.path.join', 'os.path.join', (['working_directory', "('vocab%d.dec' % dec_vocabulary_size)"], {}), "(working_directory, 'vocab%d.dec' % dec_vocabulary_size)\n", (5525, 5581), False, 'import os\n'), ((1666, 1695), 'tensorflow.python.platform.gfile.Exists', 'gfile.Exists', (['vocabulary_path'], {}), '(vocabulary_path)\n', (1678, 1695), False, 'from tensorflow.python.platform import gfile\n'), ((4518, 4543), 'tensorflow.python.platform.gfile.Exists', 'gfile.Exists', (['target_path'], {}), '(target_path)\n', (4530, 4543), False, 'from tensorflow.python.platform import gfile\n'), ((1478, 1505), 're.split', 're.split', (['_WORD_SPLIT', 'word'], {}), '(_WORD_SPLIT, word)\n', (1486, 1505), False, 'import re\n'), ((1808, 1841), 'tensorflow.python.platform.gfile.GFile', 'gfile.GFile', (['data_path'], {'mode': '"""rb"""'}), "(data_path, mode='rb')\n", (1819, 1841), False, 'from tensorflow.python.platform import gfile\n'), ((3393, 3432), 'tensorflow.python.platform.gfile.GFile', 'gfile.GFile', (['vocabulary_path'], {'mode': '"""rb"""'}), "(vocabulary_path, mode='rb')\n", (3404, 3432), False, 'from tensorflow.python.platform import gfile\n'), ((4294, 4320), 're.sub', 're.sub', (['_DIGIT_RE', "b'0'", 'w'], {}), "(_DIGIT_RE, b'0', w)\n", (4300, 4320), False, 'import re\n'), ((4667, 4700), 'tensorflow.python.platform.gfile.GFile', 'gfile.GFile', (['data_path'], {'mode': '"""rb"""'}), "(data_path, mode='rb')\n", (4678, 4700), False, 'from tensorflow.python.platform import gfile\n'), ((3133, 3172), 'tensorflow.python.platform.gfile.GFile', 'gfile.GFile', (['vocabulary_path'], {'mode': '"""wb"""'}), "(vocabulary_path, mode='wb')\n", (3144, 3172), False, 'from tensorflow.python.platform import gfile\n'), ((4732, 4766), 'tensorflow.python.platform.gfile.GFile', 'gfile.GFile', (['target_path'], {'mode': '"""w"""'}), "(target_path, mode='w')\n", (4743, 4766), False, 'from tensorflow.python.platform import gfile\n'), ((2171, 2197), 're.sub', 're.sub', (['_DIGIT_RE', "b'0'", 'w'], {}), "(_DIGIT_RE, b'0', w)\n", (2177, 2197), False, 'import re\n')] |
# -*- coding: utf-8 -*-
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
from six.moves import zip
import attr
from synapse.api.constants import EventTypes, JoinRules, Membership
from synapse.api.room_versions import RoomVersions
from synapse.event_auth import auth_types_for_event
from synapse.events import FrozenEvent
from synapse.state.v2 import lexicographical_topological_sort, resolve_events_with_store
from synapse.types import EventID
from tests import unittest
ALICE = "@alice:example.com"
BOB = "@bob:example.com"
CHARLIE = "@charlie:example.com"
EVELYN = "@evelyn:example.com"
ZARA = "@zara:example.com"
ROOM_ID = "!test:example.com"
MEMBERSHIP_CONTENT_JOIN = {"membership": Membership.JOIN}
MEMBERSHIP_CONTENT_BAN = {"membership": Membership.BAN}
ORIGIN_SERVER_TS = 0
class FakeEvent(object):
"""A fake event we use as a convenience.
NOTE: Again as a convenience we use "node_ids" rather than event_ids to
refer to events. The event_id has node_id as localpart and example.com
as domain.
"""
def __init__(self, id, sender, type, state_key, content):
self.node_id = id
self.event_id = EventID(id, "example.com").to_string()
self.sender = sender
self.type = type
self.state_key = state_key
self.content = content
def to_event(self, auth_events, prev_events):
"""Given the auth_events and prev_events, convert to a Frozen Event
Args:
auth_events (list[str]): list of event_ids
prev_events (list[str]): list of event_ids
Returns:
FrozenEvent
"""
global ORIGIN_SERVER_TS
ts = ORIGIN_SERVER_TS
ORIGIN_SERVER_TS = ORIGIN_SERVER_TS + 1
event_dict = {
"auth_events": [(a, {}) for a in auth_events],
"prev_events": [(p, {}) for p in prev_events],
"event_id": self.node_id,
"sender": self.sender,
"type": self.type,
"content": self.content,
"origin_server_ts": ts,
"room_id": ROOM_ID,
}
if self.state_key is not None:
event_dict["state_key"] = self.state_key
return FrozenEvent(event_dict)
# All graphs start with this set of events
INITIAL_EVENTS = [
FakeEvent(
id="CREATE",
sender=ALICE,
type=EventTypes.Create,
state_key="",
content={"creator": ALICE},
),
FakeEvent(
id="IMA",
sender=ALICE,
type=EventTypes.Member,
state_key=ALICE,
content=MEMBERSHIP_CONTENT_JOIN,
),
FakeEvent(
id="IPOWER",
sender=ALICE,
type=EventTypes.PowerLevels,
state_key="",
content={"users": {ALICE: 100}},
),
FakeEvent(
id="IJR",
sender=ALICE,
type=EventTypes.JoinRules,
state_key="",
content={"join_rule": JoinRules.PUBLIC},
),
FakeEvent(
id="IMB",
sender=BOB,
type=EventTypes.Member,
state_key=BOB,
content=MEMBERSHIP_CONTENT_JOIN,
),
FakeEvent(
id="IMC",
sender=CHARLIE,
type=EventTypes.Member,
state_key=CHARLIE,
content=MEMBERSHIP_CONTENT_JOIN,
),
FakeEvent(
id="IMZ",
sender=ZARA,
type=EventTypes.Member,
state_key=ZARA,
content=MEMBERSHIP_CONTENT_JOIN,
),
FakeEvent(
id="START", sender=ZARA, type=EventTypes.Message, state_key=None, content={}
),
FakeEvent(
id="END", sender=ZARA, type=EventTypes.Message, state_key=None, content={}
),
]
INITIAL_EDGES = ["START", "IMZ", "IMC", "IMB", "IJR", "IPOWER", "IMA", "CREATE"]
class StateTestCase(unittest.TestCase):
def test_ban_vs_pl(self):
events = [
FakeEvent(
id="PA",
sender=ALICE,
type=EventTypes.PowerLevels,
state_key="",
content={"users": {ALICE: 100, BOB: 50}},
),
FakeEvent(
id="MA",
sender=ALICE,
type=EventTypes.Member,
state_key=ALICE,
content={"membership": Membership.JOIN},
),
FakeEvent(
id="MB",
sender=ALICE,
type=EventTypes.Member,
state_key=BOB,
content={"membership": Membership.BAN},
),
FakeEvent(
id="PB",
sender=BOB,
type=EventTypes.PowerLevels,
state_key='',
content={"users": {ALICE: 100, BOB: 50}},
),
]
edges = [["END", "MB", "MA", "PA", "START"], ["END", "PB", "PA"]]
expected_state_ids = ["PA", "MA", "MB"]
self.do_check(events, edges, expected_state_ids)
def test_join_rule_evasion(self):
events = [
FakeEvent(
id="JR",
sender=ALICE,
type=EventTypes.JoinRules,
state_key="",
content={"join_rules": JoinRules.PRIVATE},
),
FakeEvent(
id="ME",
sender=EVELYN,
type=EventTypes.Member,
state_key=EVELYN,
content={"membership": Membership.JOIN},
),
]
edges = [["END", "JR", "START"], ["END", "ME", "START"]]
expected_state_ids = ["JR"]
self.do_check(events, edges, expected_state_ids)
def test_offtopic_pl(self):
events = [
FakeEvent(
id="PA",
sender=ALICE,
type=EventTypes.PowerLevels,
state_key="",
content={"users": {ALICE: 100, BOB: 50}},
),
FakeEvent(
id="PB",
sender=BOB,
type=EventTypes.PowerLevels,
state_key='',
content={"users": {ALICE: 100, BOB: 50, CHARLIE: 50}},
),
FakeEvent(
id="PC",
sender=CHARLIE,
type=EventTypes.PowerLevels,
state_key='',
content={"users": {ALICE: 100, BOB: 50, CHARLIE: 0}},
),
]
edges = [["END", "PC", "PB", "PA", "START"], ["END", "PA"]]
expected_state_ids = ["PC"]
self.do_check(events, edges, expected_state_ids)
def test_topic_basic(self):
events = [
FakeEvent(
id="T1", sender=ALICE, type=EventTypes.Topic, state_key="", content={}
),
FakeEvent(
id="PA1",
sender=ALICE,
type=EventTypes.PowerLevels,
state_key='',
content={"users": {ALICE: 100, BOB: 50}},
),
FakeEvent(
id="T2", sender=ALICE, type=EventTypes.Topic, state_key="", content={}
),
FakeEvent(
id="PA2",
sender=ALICE,
type=EventTypes.PowerLevels,
state_key='',
content={"users": {ALICE: 100, BOB: 0}},
),
FakeEvent(
id="PB",
sender=BOB,
type=EventTypes.PowerLevels,
state_key='',
content={"users": {ALICE: 100, BOB: 50}},
),
FakeEvent(
id="T3", sender=BOB, type=EventTypes.Topic, state_key="", content={}
),
]
edges = [["END", "PA2", "T2", "PA1", "T1", "START"], ["END", "T3", "PB", "PA1"]]
expected_state_ids = ["PA2", "T2"]
self.do_check(events, edges, expected_state_ids)
def test_topic_reset(self):
events = [
FakeEvent(
id="T1", sender=ALICE, type=EventTypes.Topic, state_key="", content={}
),
FakeEvent(
id="PA",
sender=ALICE,
type=EventTypes.PowerLevels,
state_key='',
content={"users": {ALICE: 100, BOB: 50}},
),
FakeEvent(
id="T2", sender=BOB, type=EventTypes.Topic, state_key="", content={}
),
FakeEvent(
id="MB",
sender=ALICE,
type=EventTypes.Member,
state_key=BOB,
content={"membership": Membership.BAN},
),
]
edges = [["END", "MB", "T2", "PA", "T1", "START"], ["END", "T1"]]
expected_state_ids = ["T1", "MB", "PA"]
self.do_check(events, edges, expected_state_ids)
def test_topic(self):
events = [
FakeEvent(
id="T1", sender=ALICE, type=EventTypes.Topic, state_key="", content={}
),
FakeEvent(
id="PA1",
sender=ALICE,
type=EventTypes.PowerLevels,
state_key='',
content={"users": {ALICE: 100, BOB: 50}},
),
FakeEvent(
id="T2", sender=ALICE, type=EventTypes.Topic, state_key="", content={}
),
FakeEvent(
id="PA2",
sender=ALICE,
type=EventTypes.PowerLevels,
state_key='',
content={"users": {ALICE: 100, BOB: 0}},
),
FakeEvent(
id="PB",
sender=BOB,
type=EventTypes.PowerLevels,
state_key='',
content={"users": {ALICE: 100, BOB: 50}},
),
FakeEvent(
id="T3", sender=BOB, type=EventTypes.Topic, state_key="", content={}
),
FakeEvent(
id="MZ1",
sender=ZARA,
type=EventTypes.Message,
state_key=None,
content={},
),
FakeEvent(
id="T4", sender=ALICE, type=EventTypes.Topic, state_key="", content={}
),
]
edges = [
["END", "T4", "MZ1", "PA2", "T2", "PA1", "T1", "START"],
["END", "MZ1", "T3", "PB", "PA1"],
]
expected_state_ids = ["T4", "PA2"]
self.do_check(events, edges, expected_state_ids)
def do_check(self, events, edges, expected_state_ids):
"""Take a list of events and edges and calculate the state of the
graph at END, and asserts it matches `expected_state_ids`
Args:
events (list[FakeEvent])
edges (list[list[str]]): A list of chains of event edges, e.g.
`[[A, B, C]]` are edges A->B and B->C.
expected_state_ids (list[str]): The expected state at END, (excluding
the keys that haven't changed since START).
"""
# We want to sort the events into topological order for processing.
graph = {}
# node_id -> FakeEvent
fake_event_map = {}
for ev in itertools.chain(INITIAL_EVENTS, events):
graph[ev.node_id] = set()
fake_event_map[ev.node_id] = ev
for a, b in pairwise(INITIAL_EDGES):
graph[a].add(b)
for edge_list in edges:
for a, b in pairwise(edge_list):
graph[a].add(b)
# event_id -> FrozenEvent
event_map = {}
# node_id -> state
state_at_event = {}
# We copy the map as the sort consumes the graph
graph_copy = {k: set(v) for k, v in graph.items()}
for node_id in lexicographical_topological_sort(graph_copy, key=lambda e: e):
fake_event = fake_event_map[node_id]
event_id = fake_event.event_id
prev_events = list(graph[node_id])
if len(prev_events) == 0:
state_before = {}
elif len(prev_events) == 1:
state_before = dict(state_at_event[prev_events[0]])
else:
state_d = resolve_events_with_store(
RoomVersions.V2.identifier,
[state_at_event[n] for n in prev_events],
event_map=event_map,
state_res_store=TestStateResolutionStore(event_map),
)
state_before = self.successResultOf(state_d)
state_after = dict(state_before)
if fake_event.state_key is not None:
state_after[(fake_event.type, fake_event.state_key)] = event_id
auth_types = set(auth_types_for_event(fake_event))
auth_events = []
for key in auth_types:
if key in state_before:
auth_events.append(state_before[key])
event = fake_event.to_event(auth_events, prev_events)
state_at_event[node_id] = state_after
event_map[event_id] = event
expected_state = {}
for node_id in expected_state_ids:
# expected_state_ids are node IDs rather than event IDs,
# so we have to convert
event_id = EventID(node_id, "example.com").to_string()
event = event_map[event_id]
key = (event.type, event.state_key)
expected_state[key] = event_id
start_state = state_at_event["START"]
end_state = {
key: value
for key, value in state_at_event["END"].items()
if key in expected_state or start_state.get(key) != value
}
self.assertEqual(expected_state, end_state)
class LexicographicalTestCase(unittest.TestCase):
def test_simple(self):
graph = {"l": {"o"}, "m": {"n", "o"}, "n": {"o"}, "o": set(), "p": {"o"}}
res = list(lexicographical_topological_sort(graph, key=lambda x: x))
self.assertEqual(["o", "l", "n", "m", "p"], res)
class SimpleParamStateTestCase(unittest.TestCase):
def setUp(self):
# We build up a simple DAG.
event_map = {}
create_event = FakeEvent(
id="CREATE",
sender=ALICE,
type=EventTypes.Create,
state_key="",
content={"creator": ALICE},
).to_event([], [])
event_map[create_event.event_id] = create_event
alice_member = FakeEvent(
id="IMA",
sender=ALICE,
type=EventTypes.Member,
state_key=ALICE,
content=MEMBERSHIP_CONTENT_JOIN,
).to_event([create_event.event_id], [create_event.event_id])
event_map[alice_member.event_id] = alice_member
join_rules = FakeEvent(
id="IJR",
sender=ALICE,
type=EventTypes.JoinRules,
state_key="",
content={"join_rule": JoinRules.PUBLIC},
).to_event(
auth_events=[create_event.event_id, alice_member.event_id],
prev_events=[alice_member.event_id],
)
event_map[join_rules.event_id] = join_rules
# Bob and Charlie join at the same time, so there is a fork
bob_member = FakeEvent(
id="IMB",
sender=BOB,
type=EventTypes.Member,
state_key=BOB,
content=MEMBERSHIP_CONTENT_JOIN,
).to_event(
auth_events=[create_event.event_id, join_rules.event_id],
prev_events=[join_rules.event_id],
)
event_map[bob_member.event_id] = bob_member
charlie_member = FakeEvent(
id="IMC",
sender=CHARLIE,
type=EventTypes.Member,
state_key=CHARLIE,
content=MEMBERSHIP_CONTENT_JOIN,
).to_event(
auth_events=[create_event.event_id, join_rules.event_id],
prev_events=[join_rules.event_id],
)
event_map[charlie_member.event_id] = charlie_member
self.event_map = event_map
self.create_event = create_event
self.alice_member = alice_member
self.join_rules = join_rules
self.bob_member = bob_member
self.charlie_member = charlie_member
self.state_at_bob = {
(e.type, e.state_key): e.event_id
for e in [create_event, alice_member, join_rules, bob_member]
}
self.state_at_charlie = {
(e.type, e.state_key): e.event_id
for e in [create_event, alice_member, join_rules, charlie_member]
}
self.expected_combined_state = {
(e.type, e.state_key): e.event_id
for e in [
create_event,
alice_member,
join_rules,
bob_member,
charlie_member,
]
}
def test_event_map_none(self):
# Test that we correctly handle passing `None` as the event_map
state_d = resolve_events_with_store(
RoomVersions.V2.identifier,
[self.state_at_bob, self.state_at_charlie],
event_map=None,
state_res_store=TestStateResolutionStore(self.event_map),
)
state = self.successResultOf(state_d)
self.assert_dict(self.expected_combined_state, state)
def pairwise(iterable):
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
a, b = itertools.tee(iterable)
next(b, None)
return zip(a, b)
@attr.s
class TestStateResolutionStore(object):
event_map = attr.ib()
def get_events(self, event_ids, allow_rejected=False):
"""Get events from the database
Args:
event_ids (list): The event_ids of the events to fetch
allow_rejected (bool): If True return rejected events.
Returns:
Deferred[dict[str, FrozenEvent]]: Dict from event_id to event.
"""
return {eid: self.event_map[eid] for eid in event_ids if eid in self.event_map}
def get_auth_chain(self, event_ids):
"""Gets the full auth chain for a set of events (including rejected
events).
Includes the given event IDs in the result.
Note that:
1. All events must be state events.
2. For v1 rooms this may not have the full auth chain in the
presence of rejected events
Args:
event_ids (list): The event IDs of the events to fetch the auth
chain for. Must be state events.
Returns:
Deferred[list[str]]: List of event IDs of the auth chain.
"""
# Simple DFS for auth chain
result = set()
stack = list(event_ids)
while stack:
event_id = stack.pop()
if event_id in result:
continue
result.add(event_id)
event = self.event_map[event_id]
for aid in event.auth_event_ids():
stack.append(aid)
return list(result)
| [
"synapse.events.FrozenEvent",
"attr.ib",
"six.moves.zip",
"synapse.event_auth.auth_types_for_event",
"synapse.state.v2.lexicographical_topological_sort",
"itertools.tee",
"itertools.chain",
"synapse.types.EventID"
] | [((17883, 17906), 'itertools.tee', 'itertools.tee', (['iterable'], {}), '(iterable)\n', (17896, 17906), False, 'import itertools\n'), ((17936, 17945), 'six.moves.zip', 'zip', (['a', 'b'], {}), '(a, b)\n', (17939, 17945), False, 'from six.moves import zip\n'), ((18012, 18021), 'attr.ib', 'attr.ib', ([], {}), '()\n', (18019, 18021), False, 'import attr\n'), ((2743, 2766), 'synapse.events.FrozenEvent', 'FrozenEvent', (['event_dict'], {}), '(event_dict)\n', (2754, 2766), False, 'from synapse.events import FrozenEvent\n'), ((11661, 11700), 'itertools.chain', 'itertools.chain', (['INITIAL_EVENTS', 'events'], {}), '(INITIAL_EVENTS, events)\n', (11676, 11700), False, 'import itertools\n'), ((12222, 12283), 'synapse.state.v2.lexicographical_topological_sort', 'lexicographical_topological_sort', (['graph_copy'], {'key': '(lambda e: e)'}), '(graph_copy, key=lambda e: e)\n', (12254, 12283), False, 'from synapse.state.v2 import lexicographical_topological_sort, resolve_events_with_store\n'), ((14384, 14440), 'synapse.state.v2.lexicographical_topological_sort', 'lexicographical_topological_sort', (['graph'], {'key': '(lambda x: x)'}), '(graph, key=lambda x: x)\n', (14416, 14440), False, 'from synapse.state.v2 import lexicographical_topological_sort, resolve_events_with_store\n'), ((1697, 1723), 'synapse.types.EventID', 'EventID', (['id', '"""example.com"""'], {}), "(id, 'example.com')\n", (1704, 1723), False, 'from synapse.types import EventID\n'), ((13186, 13218), 'synapse.event_auth.auth_types_for_event', 'auth_types_for_event', (['fake_event'], {}), '(fake_event)\n', (13206, 13218), False, 'from synapse.event_auth import auth_types_for_event\n'), ((13741, 13772), 'synapse.types.EventID', 'EventID', (['node_id', '"""example.com"""'], {}), "(node_id, 'example.com')\n", (13748, 13772), False, 'from synapse.types import EventID\n')] |
from django.urls import path
from . import views
app_name='newsCatch'
urlpatterns = [
path('', views.index , name='index'),
path('politics/', views.politics , name='politics'),
path('culture/', views.culture , name='culture'),
path('society/', views.society , name='society'),
path('economy/', views.economy , name='economy'),
path('international/', views.international , name='international'),
path('sports/', views.sports , name='sports'),
path('etc/', views.etc , name='etc'),
path('about/', views.about, name='about'),
path('subscription/', views.subscription, name='subscription'),
path('unsubscription/', views.unsubscription, name='unsubscription'),
path('errorsubs/', views.errorsubs, name='errorsubs'),
path('errorunsubs/', views.errorunsubs, name='errorunsubs'),
]
| [
"django.urls.path"
] | [((90, 125), 'django.urls.path', 'path', (['""""""', 'views.index'], {'name': '"""index"""'}), "('', views.index, name='index')\n", (94, 125), False, 'from django.urls import path\n'), ((132, 182), 'django.urls.path', 'path', (['"""politics/"""', 'views.politics'], {'name': '"""politics"""'}), "('politics/', views.politics, name='politics')\n", (136, 182), False, 'from django.urls import path\n'), ((189, 236), 'django.urls.path', 'path', (['"""culture/"""', 'views.culture'], {'name': '"""culture"""'}), "('culture/', views.culture, name='culture')\n", (193, 236), False, 'from django.urls import path\n'), ((243, 290), 'django.urls.path', 'path', (['"""society/"""', 'views.society'], {'name': '"""society"""'}), "('society/', views.society, name='society')\n", (247, 290), False, 'from django.urls import path\n'), ((297, 344), 'django.urls.path', 'path', (['"""economy/"""', 'views.economy'], {'name': '"""economy"""'}), "('economy/', views.economy, name='economy')\n", (301, 344), False, 'from django.urls import path\n'), ((351, 416), 'django.urls.path', 'path', (['"""international/"""', 'views.international'], {'name': '"""international"""'}), "('international/', views.international, name='international')\n", (355, 416), False, 'from django.urls import path\n'), ((423, 467), 'django.urls.path', 'path', (['"""sports/"""', 'views.sports'], {'name': '"""sports"""'}), "('sports/', views.sports, name='sports')\n", (427, 467), False, 'from django.urls import path\n'), ((474, 509), 'django.urls.path', 'path', (['"""etc/"""', 'views.etc'], {'name': '"""etc"""'}), "('etc/', views.etc, name='etc')\n", (478, 509), False, 'from django.urls import path\n'), ((516, 557), 'django.urls.path', 'path', (['"""about/"""', 'views.about'], {'name': '"""about"""'}), "('about/', views.about, name='about')\n", (520, 557), False, 'from django.urls import path\n'), ((563, 625), 'django.urls.path', 'path', (['"""subscription/"""', 'views.subscription'], {'name': '"""subscription"""'}), "('subscription/', views.subscription, name='subscription')\n", (567, 625), False, 'from django.urls import path\n'), ((631, 699), 'django.urls.path', 'path', (['"""unsubscription/"""', 'views.unsubscription'], {'name': '"""unsubscription"""'}), "('unsubscription/', views.unsubscription, name='unsubscription')\n", (635, 699), False, 'from django.urls import path\n'), ((705, 758), 'django.urls.path', 'path', (['"""errorsubs/"""', 'views.errorsubs'], {'name': '"""errorsubs"""'}), "('errorsubs/', views.errorsubs, name='errorsubs')\n", (709, 758), False, 'from django.urls import path\n'), ((764, 823), 'django.urls.path', 'path', (['"""errorunsubs/"""', 'views.errorunsubs'], {'name': '"""errorunsubs"""'}), "('errorunsubs/', views.errorunsubs, name='errorunsubs')\n", (768, 823), False, 'from django.urls import path\n')] |
import matplotlib.pyplot as plt
from collections import defaultdict
import numpy as np
import pandas as pd
import seaborn as sns
import os
import pickle
from tqdm import tqdm
import networkx as nx
import torch
import os
import psutil
from sklearn.metrics import roc_auc_score, average_precision_score, accuracy_score, log_loss
from sklearn.metrics import precision_recall_curve
from IPython.display import clear_output
import sys
sys.path.append("..")
from opera_tools import plot_graphx, DISTANCE, scattering_estimation_loss
from sklearn.linear_model import TheilSenRegressor
from copy import deepcopy
from collections import Counter
from torch_geometric.data import Data
import heapq
from create_graph.create_graph import generate_distances
NUM_SHOWERS_IN_BRICK = 200
from math import fabs, sqrt, log
def rms_integral_root_closed_py(basetrack_left, basetrack_right):
EPS = 1e-6
dz = basetrack_right['features']['SZ'] - basetrack_left['features']['SZ']
dx = basetrack_left['features']['SX'] - (basetrack_right['features']['SX'] - basetrack_right['features']['TX'] * dz)
dy = basetrack_left['features']['SY'] - (basetrack_right['features']['SY'] - basetrack_right['features']['TY'] * dz)
dtx = (basetrack_left['features']['TX'] - basetrack_right['features']['TX'])
dty = (basetrack_left['features']['TY'] - basetrack_right['features']['TY'])
a = (dtx * dz) ** 2 + (dty * dz) ** 2
b = 2 * (dtx * dz * dx + dty * dz * dy)
c = dx ** 2 + dy ** 2
if a == 0.:
return fabs(sqrt(c))
discriminant = (b ** 2 - 4 * a * c)
log_denominator = 2 * sqrt(a) * sqrt(a + b + c) + 2 * a + b + EPS
log_numerator = 2 * sqrt(a) * sqrt(c) + b + EPS
first_part = ( (2 * a + b) * sqrt(a + b + c) - b * sqrt(c) ) / (4 * a)
if fabs(discriminant) < EPS:
return fabs(first_part)
else:
result = fabs((discriminant * log(log_numerator / log_denominator) / (8 * sqrt(a * a * a)) + first_part))
return result
def class_disbalance_graphx(graphx):
signal = []
for _, node in graphx.nodes(data=True):
signal.append(node['signal'])
return list(zip(*np.unique(signal, return_counts=True)))
def class_disbalance_graphx__(graphx):
signal = []
for _, node in graphx.nodes(data=True):
signal.append(node['signal'])
return np.unique(signal, return_counts=True)
#load data
from opera_tools import combine_mc_bg, gen_graphx, gen_x_y_dataset, load_bg, load_mc
def pmc_to_ship_format(pmc):
showers = []
scale = 10000
for idx in pmc.index:
shower = pmc.loc[idx]
showers.append(
{
'TX': shower['BT_X'] / scale,
'TY': shower['BT_Y'] / scale,
'TZ': shower['BT_Z'] / scale,
'PX': shower['BT_SX'],
'PY': shower['BT_SY'],
'PZ': np.ones_like(shower['BT_X']),
'ele_P': shower['ele_P'],
'ele_TX': shower['ele_x'] / scale,
'ele_TY': shower['ele_y'] / scale,
'ele_TZ': shower['ele_z'] / scale,
'ele_PX': shower['ele_sx'],
'ele_PY': shower['ele_sy'],
'ele_PZ': 1.
}
)
return showers
def main():
process = psutil.Process(os.getpid())
pmc = load_mc(filename='mcdata_taue2.root', step=1)
selected_showers = pmc_to_ship_format(pmc)
selected_showers = [selected_shower for selected_shower in selected_showers if len(selected_shower['PX']) > 70]
selected_showers = [selected_shower for selected_shower in selected_showers if len(selected_shower['PX']) < 3000]
bricks = []
NUM_SHOWERS_IN_BRICK = 200
scale = 10000
bricks = []
for i in range(len(selected_showers) // NUM_SHOWERS_IN_BRICK):
node_id = 0
graphx = nx.DiGraph()
nodes_to_add = []
showers_data = []
for j in range(NUM_SHOWERS_IN_BRICK):
selected_shower = selected_showers[i * NUM_SHOWERS_IN_BRICK + j]
showers_data.append(
{
'numtracks': len(selected_shower['PX']),
'signal': j,
'ele_P': selected_shower['ele_P'],
'ele_SX': selected_shower['ele_TX'] * scale,
'ele_SY': selected_shower['ele_TY'] * scale,
'ele_SZ': selected_shower['ele_TZ'] * scale,
'ele_TX': selected_shower['ele_PX'] / selected_shower['ele_PZ'],
'ele_TY': selected_shower['ele_PY'] / selected_shower['ele_PZ']
}
)
for k in range(len(selected_shower['PX'])):
nodes_to_add.append(
(
node_id,
{
'features': {
'SX': selected_shower['TX'][k] * scale,
'SY': selected_shower['TY'][k] * scale,
'SZ': selected_shower['TZ'][k] * scale,
'TX': selected_shower['PX'][k] / selected_shower['PZ'][k],
'TY': selected_shower['PY'][k] / selected_shower['PZ'][k],
},
'signal': j
}
)
)
node_id += 1
graphx.add_nodes_from(nodes_to_add)
graphx.graph['showers_data'] = showers_data
bricks.append(graphx)
print(len(bricks))
def run_gen_graphx(graphx, layers=2, threshold=250):
graphx_nodes = list(graphx.nodes(data=True))
edges = list(graphx.edges())
graphx.remove_edges_from(edges)
ebunch = generate_distances(graphx_nodes, layers=layers, threshold=threshold)
graphx.add_edges_from(ebunch)
return graphx, ebunch
#save graphx for clusterization saparetly
def brick_to_graph(data):
graphx, ebunch = run_gen_graphx(data, layers=5, threshold=400)
x = torch.FloatTensor(pd.DataFrame([graphx.nodes(data = True)[i]['features']
for i in range(len(graphx.nodes))]).values)
edges_from = [ebunch[i][0] for i in range(len(ebunch))]
edge_to = [ebunch[i][1] for i in range(len(ebunch))]
edges = np.vstack([edges_from, edge_to])
edge_index = torch.LongTensor(edges)
shower_data = torch.FloatTensor(pd.DataFrame(graphx.graph['showers_data']).values)
#numtracks signal ele_P ele_SX ele_SY ele_SZ ele_TX ele_TY
dist = [ebunch[i][2]['weight'] for i in range(len(ebunch))]
edge_attr = torch.log(torch.FloatTensor(dist).view(-1, 1))
y = torch.LongTensor([graphx.nodes(data = True)[i]['signal'] for i in range(len(graphx.nodes))])
shower = Data(
x=x,
edge_index=edge_index,
shower_data=shower_data,
pos=x,
edge_attr=edge_attr,
y=y
)
return graphx, shower
showers = []
graphs_for_clusterization = []
for i in tqdm(range(len(bricks))):
graphx, shower = brick_to_graph(bricks[i])
graphs_for_clusterization.append(graphx)
showers.append(shower)
output_file='./graphx.pt'
torch.save(showers, output_file)
output_file='./graph_for_clustering.pt'
torch.save(graphs_for_clusterization, output_file)
if __name__ == "__main__":
main()
| [
"sys.path.append",
"pandas.DataFrame",
"os.getpid",
"numpy.ones_like",
"opera_tools.load_mc",
"math.fabs",
"torch.LongTensor",
"math.sqrt",
"create_graph.create_graph.generate_distances",
"torch.FloatTensor",
"torch.save",
"math.log",
"torch_geometric.data.Data",
"numpy.vstack",
"network... | [((431, 452), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (446, 452), False, 'import sys\n'), ((2342, 2379), 'numpy.unique', 'np.unique', (['signal'], {'return_counts': '(True)'}), '(signal, return_counts=True)\n', (2351, 2379), True, 'import numpy as np\n'), ((3336, 3381), 'opera_tools.load_mc', 'load_mc', ([], {'filename': '"""mcdata_taue2.root"""', 'step': '(1)'}), "(filename='mcdata_taue2.root', step=1)\n", (3343, 3381), False, 'from opera_tools import combine_mc_bg, gen_graphx, gen_x_y_dataset, load_bg, load_mc\n'), ((7309, 7341), 'torch.save', 'torch.save', (['showers', 'output_file'], {}), '(showers, output_file)\n', (7319, 7341), False, 'import torch\n'), ((7391, 7441), 'torch.save', 'torch.save', (['graphs_for_clusterization', 'output_file'], {}), '(graphs_for_clusterization, output_file)\n', (7401, 7441), False, 'import torch\n'), ((1790, 1808), 'math.fabs', 'fabs', (['discriminant'], {}), '(discriminant)\n', (1794, 1808), False, 'from math import fabs, sqrt, log\n'), ((1831, 1847), 'math.fabs', 'fabs', (['first_part'], {}), '(first_part)\n', (1835, 1847), False, 'from math import fabs, sqrt, log\n'), ((3312, 3323), 'os.getpid', 'os.getpid', ([], {}), '()\n', (3321, 3323), False, 'import os\n'), ((3851, 3863), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (3861, 3863), True, 'import networkx as nx\n'), ((5739, 5807), 'create_graph.create_graph.generate_distances', 'generate_distances', (['graphx_nodes'], {'layers': 'layers', 'threshold': 'threshold'}), '(graphx_nodes, layers=layers, threshold=threshold)\n', (5757, 5807), False, 'from create_graph.create_graph import generate_distances\n'), ((6348, 6380), 'numpy.vstack', 'np.vstack', (['[edges_from, edge_to]'], {}), '([edges_from, edge_to])\n', (6357, 6380), True, 'import numpy as np\n'), ((6402, 6425), 'torch.LongTensor', 'torch.LongTensor', (['edges'], {}), '(edges)\n', (6418, 6425), False, 'import torch\n'), ((6845, 6940), 'torch_geometric.data.Data', 'Data', ([], {'x': 'x', 'edge_index': 'edge_index', 'shower_data': 'shower_data', 'pos': 'x', 'edge_attr': 'edge_attr', 'y': 'y'}), '(x=x, edge_index=edge_index, shower_data=shower_data, pos=x, edge_attr=\n edge_attr, y=y)\n', (6849, 6940), False, 'from torch_geometric.data import Data\n'), ((1532, 1539), 'math.sqrt', 'sqrt', (['c'], {}), '(c)\n', (1536, 1539), False, 'from math import fabs, sqrt, log\n'), ((1685, 1692), 'math.sqrt', 'sqrt', (['c'], {}), '(c)\n', (1689, 1692), False, 'from math import fabs, sqrt, log\n'), ((1736, 1751), 'math.sqrt', 'sqrt', (['(a + b + c)'], {}), '(a + b + c)\n', (1740, 1751), False, 'from math import fabs, sqrt, log\n'), ((1758, 1765), 'math.sqrt', 'sqrt', (['c'], {}), '(c)\n', (1762, 1765), False, 'from math import fabs, sqrt, log\n'), ((2153, 2190), 'numpy.unique', 'np.unique', (['signal'], {'return_counts': '(True)'}), '(signal, return_counts=True)\n', (2162, 2190), True, 'import numpy as np\n'), ((2883, 2911), 'numpy.ones_like', 'np.ones_like', (["shower['BT_X']"], {}), "(shower['BT_X'])\n", (2895, 2911), True, 'import numpy as np\n'), ((6467, 6509), 'pandas.DataFrame', 'pd.DataFrame', (["graphx.graph['showers_data']"], {}), "(graphx.graph['showers_data'])\n", (6479, 6509), True, 'import pandas as pd\n'), ((1617, 1632), 'math.sqrt', 'sqrt', (['(a + b + c)'], {}), '(a + b + c)\n', (1621, 1632), False, 'from math import fabs, sqrt, log\n'), ((1675, 1682), 'math.sqrt', 'sqrt', (['a'], {}), '(a)\n', (1679, 1682), False, 'from math import fabs, sqrt, log\n'), ((6684, 6707), 'torch.FloatTensor', 'torch.FloatTensor', (['dist'], {}), '(dist)\n', (6701, 6707), False, 'import torch\n'), ((1607, 1614), 'math.sqrt', 'sqrt', (['a'], {}), '(a)\n', (1611, 1614), False, 'from math import fabs, sqrt, log\n'), ((1897, 1933), 'math.log', 'log', (['(log_numerator / log_denominator)'], {}), '(log_numerator / log_denominator)\n', (1900, 1933), False, 'from math import fabs, sqrt, log\n'), ((1941, 1956), 'math.sqrt', 'sqrt', (['(a * a * a)'], {}), '(a * a * a)\n', (1945, 1956), False, 'from math import fabs, sqrt, log\n')] |
#!/usr/bin/env python
####################
# Required Modules #
####################
# Generic/Built-in
# Libs
from flask import Blueprint
from flask_restx import Api
# Custom
from rest_rpc.connection.collaborations import ns_api as collab_ns
from rest_rpc.connection.projects import ns_api as project_ns
from rest_rpc.connection.experiments import ns_api as experiment_ns
from rest_rpc.connection.runs import ns_api as run_ns
from rest_rpc.connection.participants import ns_api as participant_ns
from rest_rpc.connection.registration import ns_api as registration_ns
from rest_rpc.connection.tags import ns_api as tag_ns
##################
# Configurations #
##################
blueprint = Blueprint('connections', __name__)
api = Api(
app=blueprint,
version="0.1.0",
title="Synergos Orchestrator REST-RPC Connection API",
description="API to facilitate metadata collection between TTP & participants for WS connection"
)
#################################
# Collaboration management APIs #
#################################
"""
Supported routes:
1) "/collaborations"
2) "/collaborations/<collab_id>"
3) "/collaborations/<collab_id>/registrations" ***
4) "/collaborations/<collab_id>/participants/<participant_id>/registrations ***
***: Imported
"""
api.add_namespace(collab_ns, path="/collaborations")
###########################
# Project management APIs #
###########################
"""
Supported routes:
1) "/collaborations/<collab_id>/projects"
2) "/collaborations/<collab_id>/projects/<project_id>"
3) "/collaborations/<collab_id>/projects/<project_id>/registrations" ***
4) "/collaborations/<collab_id>/projects/<project_id>/participants/<participant_id>/registration ***
5) "/collaborations/<collab_id>/projects/<project_id>/participants/<participant_id>/registration/tags" ***
***: imported
"""
api.add_namespace(project_ns, path="/collaborations/<collab_id>/projects")
##############################
# Experiment management APIs #
##############################
"""
Supported routes:
1) "/collaborations/<collab_id>/projects/<project_id>/experiments"
2) "/collaborations/<collab_id>/projects/<project_id>/experiments/<expt_id>"
"""
api.add_namespace(experiment_ns, path="/collaborations/<collab_id>/projects/<project_id>/experiments")
#######################
# Run management APIs #
#######################
"""
Supported routes:
1) "/collaborations/<collab_id>/projects/<project_id>/experiments/<expt_id>/run"
2) "/collaborations/<collab_id>/projects/<project_id>/experiments/<expt_id>/run/<run_id>"
"""
api.add_namespace(run_ns, path="/collaborations/<collab_id>/projects/<project_id>/experiments/<expt_id>/runs")
###############################
# Participant management APIs #
###############################
"""
Supported routes:
1) "/participants"
2) "/participants/<participant_id>"
3) "/participants/<participant_id>/registrations" ***
4) "/participants/<participant_id>/collaborations/<collab_id>/registrations" ***
5) "/participants/<participant_id>/collaborations/<collab_id>/projects/<project_id>/registration" ***
6) "/participants/<participant_id>/collaborations/<collab_id>/projects/<project_id>/registration/tags" ***
***: imported
"""
api.add_namespace(participant_ns, path="/participants")
################################
# Registration management APIs #
################################
"""
The resources defined under this section are used as imports into other
namespaces (i.e. projects & participants). Hence they do not support direct
routing. Instead, all supported routes can be found in their respective hosts.
"""
api.add_namespace(registration_ns)
api.add_namespace(tag_ns) | [
"flask_restx.Api",
"flask.Blueprint"
] | [((697, 731), 'flask.Blueprint', 'Blueprint', (['"""connections"""', '__name__'], {}), "('connections', __name__)\n", (706, 731), False, 'from flask import Blueprint\n'), ((739, 942), 'flask_restx.Api', 'Api', ([], {'app': 'blueprint', 'version': '"""0.1.0"""', 'title': '"""Synergos Orchestrator REST-RPC Connection API"""', 'description': '"""API to facilitate metadata collection between TTP & participants for WS connection"""'}), "(app=blueprint, version='0.1.0', title=\n 'Synergos Orchestrator REST-RPC Connection API', description=\n 'API to facilitate metadata collection between TTP & participants for WS connection'\n )\n", (742, 942), False, 'from flask_restx import Api\n')] |
import asyncio
from collections import deque
from datetime import datetime
from pathlib import Path
from re import Match, match
from typing import Dict, Iterable, List, Tuple, Union
from urllib import parse
import requests
from bs4 import BeautifulSoup, Tag
from more_itertools import unique_everseen
from pyppeteer import connect
from .browser import get_browser_connection_url
from .helpers import error_handler, pipe
from .requests_cache import CachedResponse, CachedSession
@error_handler
def get_csrf_token(session: CachedSession) -> str:
csrf_token_get_response: CachedResponse = session.get(
"https://foxford.ru/api/csrf_token",
headers={
"X-Requested-With": "XMLHttpRequest"
}
)
if csrf_token_get_response.status_code != 200:
return {"fatal_error": "CSRF token fetch has failed"}
if "token" not in csrf_token_get_response.json():
return {"fatal_error": "CSRF token structure is unknown"}
return csrf_token_get_response.json()["token"]
@error_handler
def login(email: str, password: str, session: CachedSession) -> CachedSession:
if not email or not password:
return {"fatal_error": "No credentials provided"}
credential_post_response: CachedResponse = session.post(
"https://foxford.ru/user/login",
headers={
"X-CSRF-Token": get_csrf_token(session),
"X-Requested-With": "XMLHttpRequest"
},
json={
"user": {
"email": email,
"password": password
}
}
)
if credential_post_response.status_code != 200:
return {"fatal_error": "Wrong credentials"}
return session
def get_user_courses(session: CachedSession) -> Tuple[Dict]:
@error_handler
def recursive_collection(page_num: int) -> Tuple[Dict]:
course_list_response: CachedResponse = session.get(
f"https://foxford.ru/api/user/bookmarks?page={page_num}&archived=false",
headers={
"X-CSRF-Token": get_csrf_token(session),
"X-Requested-With": "XMLHttpRequest"
}
)
if course_list_response.status_code != 200:
return {"fatal_error": "Course list fetch has failed"}
if "bookmarks" not in course_list_response.json():
return {"fatal_error": "Course list structure is unknown"}
if all(False for _ in course_list_response.json()["bookmarks"]):
return ()
if not {"name", "subtitle", "resource_id"}.issubset(set(course_list_response.json()["bookmarks"][0])):
return {"fatal_error": "Course structure is unknown"}
return (
*course_list_response.json()["bookmarks"],
*recursive_collection(page_num + 1)
)
return recursive_collection(1)
class get_course_lessons():
@error_handler
def __new__(self, course_id: int, session: CachedSession) -> Iterable[Dict]:
lesson_list_at_somewhere_response: CachedResponse = session.get(
f"https://foxford.ru/api/courses/{course_id}/lessons",
headers={
"X-Requested-With": "XMLHttpRequest"
}
)
if lesson_list_at_somewhere_response.status_code != 200:
return {"fatal_error": "Lesson list fetch has failed"}
if not {"lessons", "cursors"}.issubset(set(lesson_list_at_somewhere_response.json())):
return {"fatal_error": "Lesson list structure is unknown"}
if "id" not in lesson_list_at_somewhere_response.json()["lessons"][0]:
return {"fatal_error": "Lesson structure is unknown"}
self.course_id = course_id
self.session = session
return pipe(
lambda json: (
*self.recursive_collection(
self,
"before",
json["cursors"]["before"]
),
*json["lessons"],
*self.recursive_collection(
self,
"after",
json["cursors"]["after"]
)
),
lambda lessons: map(
lambda lesson: self.lesson_extension(self, lesson),
lessons
)
)(lesson_list_at_somewhere_response.json())
@error_handler
def recursive_collection(self, direction: str, cursor: Union[int, None]) -> Tuple[Dict]:
if not cursor:
return ()
lesson_list_at_direction_response: CachedResponse = self.session.get(
f"https://foxford.ru/api/courses/{self.course_id}/lessons?{direction}={cursor}",
headers={
"X-Requested-With": "XMLHttpRequest"
}
)
if lesson_list_at_direction_response.status_code != 200:
return {"fatal_error": "Lesson list fetch has failed"}
if not {"lessons", "cursors"}.issubset(set(lesson_list_at_direction_response.json())):
return {"fatal_error": "Lesson list structure is unknown"}
if "id" not in lesson_list_at_direction_response.json()["lessons"][0]:
return {"fatal_error": "Lesson structure is unknown"}
if direction == "before":
return (
*self.recursive_collection(
self,
direction,
lesson_list_at_direction_response
.json()["cursors"][direction]
),
*lesson_list_at_direction_response.json()["lessons"]
)
else:
return (
*lesson_list_at_direction_response.json()["lessons"],
*self.recursive_collection(
self,
direction,
lesson_list_at_direction_response
.json()["cursors"][direction]
)
)
@error_handler
def lesson_extension(self, lesson: Dict) -> Dict:
lesson_extension_response: CachedResponse = self.session.get(
f"https://foxford.ru/api/courses/{self.course_id}/lessons/{lesson['id']}",
headers={
"X-Requested-With": "XMLHttpRequest"
}
)
if lesson_extension_response.status_code != 200:
return {"fatal_error": "Lesson extension fetch has failed"}
if not {"webinar_id", "access_state", "webinar_status", "is_locked"}.issubset(set(lesson_extension_response.json())):
return {"fatal_error": "Lesson extension structure is unknown"}
return lesson_extension_response.json()
class get_resources_for_lessons():
def __new__(self, course_id: int, webinar_ids: Iterable[int], session: CachedSession) -> Tuple[Dict]:
self.course_id = course_id
self.webinar_ids = webinar_ids
self.session = session
return self.recursive_collection(self)
@error_handler
def recursive_collection(self) -> Tuple[Dict]:
webinar_id: Union[int, None] = next(self.webinar_ids, None)
if not webinar_id:
return ()
video_source_response: CachedResponse = self.session.get(
f"https://foxford.ru/groups/{webinar_id}"
)
if video_source_response.status_code != 200:
return {"fatal_error": "Video source fetch has failed"}
return (
pipe(
lambda res: self.retrieve_erly_iframe_src(self, res),
lambda src: self.construct_resource_links(self, src)
)(video_source_response),
*self.recursive_collection(self)
)
@error_handler
def retrieve_erly_iframe_src(self, video_source_response: CachedResponse) -> str:
erly_iframe: Union[Tag, None] = pipe(
lambda r_content: BeautifulSoup(
r_content,
"html.parser"
),
lambda soup: soup.select_one(
"div.full_screen > iframe"
)
)(video_source_response.content)
if not erly_iframe:
return {"fatal_error": ".full_screen > iframe wasn't found"}
erly_iframe_src: Union[str, None] = erly_iframe.get("src")
if not erly_iframe_src:
return {"fatal_error": ".full_screen > iframe doesn't have src attribute"}
return erly_iframe_src
@error_handler
def construct_resource_links(self, erly_iframe_src: str) -> Dict:
search_params: Dict = dict(
parse.parse_qsl(
parse.urlparse(erly_iframe_src).query
)
)
if not {"conf", "access_token"}.issubset(set(search_params)):
return {"fatal_error": "Iframe src search params structure is unknown"}
webinar_id_match: Union[Match, None] = match(
r"^webinar-(\d+)$", search_params.get("conf")
)
if not webinar_id_match:
return {"fatal_error": "Unable to extract webinar id"}
return {
"video": f"https://storage.netology-group.services/api/v1/buckets/ms.webinar.foxford.ru/sets/{webinar_id_match[1]}/objects/mp4?access_token={search_params.get('access_token')}",
"events": f"https://storage.netology-group.services/api/v1/buckets/meta.webinar.foxford.ru/sets/{webinar_id_match[1]}/objects/events.json?access_token={search_params.get('access_token')}"
}
def get_lesson_tasks(lesson_ids: Iterable[int], session: CachedSession) -> Iterable[List[Dict]]:
@error_handler
def fetch(lesson_id: int) -> List[Dict]:
tasks_response: CachedResponse = session.get(
f"https://foxford.ru/api/lessons/{lesson_id}/tasks",
headers={
"X-Requested-With": "XMLHttpRequest"
}
)
if tasks_response.status_code != 200:
return {"fatal_error": "Tasks fetch has failed"}
if "id" not in tasks_response.json()[0]:
return {"fatal_error": "Task structure is unknown"}
return tasks_response.json()
return map(fetch, lesson_ids)
def construct_task_urls(lesson_ids: Iterable[int], lesson_tasks: Iterable[List[Dict]]) -> Iterable[Iterable[str]]:
def combination(lesson_id: int, task_list: List[Dict]) -> Iterable[str]:
return map(
lambda task: f"https://foxford.ru/lessons/{lesson_id}/tasks/{task['id']}",
task_list
)
return map(
combination,
lesson_ids,
lesson_tasks
)
def construct_conspect_urls(lesson_ids: Iterable[int], conspect_amount: Iterable[int]) -> Iterable[Tuple[str]]:
def recursive_collection(lesson_id: int, amount: int) -> Tuple[str]:
if amount == 0:
return ()
return (
*recursive_collection(lesson_id, amount - 1),
f"https://foxford.ru/lessons/{lesson_id}/conspects/{amount}"
)
return map(
recursive_collection,
lesson_ids,
conspect_amount
)
def build_dir_hierarchy(course_name: str, course_subtitle: str, grade: str, lessons: Iterable[Dict]) -> Iterable[Path]:
def sanitize_string(string: str) -> str:
return pipe(
lambda char_list: filter(
lambda char: char.isalpha() or char.isdigit() or char == " ", char_list
),
lambda iterable: "".join(iterable),
lambda filtered_char_list: filtered_char_list[:30].strip()
)(string)
def create_dir(lesson: Dict) -> Path:
constructed_path: Path = Path(
Path.cwd(),
(
f"({grade}) " +
sanitize_string(course_name) +
" - " +
sanitize_string(course_subtitle)
).strip(),
(
f"({lesson['number']}) " +
sanitize_string(lesson['title'])
).strip()
)
if not constructed_path.exists():
constructed_path.mkdir(parents=True)
return constructed_path
return map(
create_dir,
lessons
)
def download_resources(res_with_path: Dict, session: CachedSession) -> None:
@error_handler
def download_url(url: str, dest: Path) -> None:
with requests.get(url, stream=True) as r:
if r.status_code != 200:
return {"fatal_error": "Video fetch has failed"}
with dest.open("wb") as f:
deque(
map(
lambda chunk: f.write(chunk),
filter(None, r.iter_content(10 * 1024))
),
0
)
def save_video() -> None:
if res_with_path["destination"].joinpath("video.mp4").exists():
return
download_url(
res_with_path["video"],
res_with_path["destination"].joinpath("video.mp4")
)
@error_handler
def parse_and_save_event_data() -> None:
if res_with_path["destination"].joinpath("message_log.txt").exists():
return
events_response: CachedResponse = session.get(
res_with_path["events"]
)
if events_response.status_code != 200:
return {"fatal_error": "Events fetch has failed"}
if "meta" not in events_response.json()[0]:
return {"fatal_error": "Events structure is unknown"}
with res_with_path["destination"].joinpath("message_log.txt").open("w", errors="replace") as f:
pipe(
lambda json: filter(
lambda obj: obj["meta"]["action"] == "message",
json
),
lambda messages: map(
lambda msg: f"[{datetime.fromtimestamp(msg['meta']['time'])}] {msg['meta']['user_name']}: {parse.unquote(msg['meta']['body'])}",
messages
),
lambda message_log: "\n".join(message_log),
f.write
)(events_response.json())
pipe(
lambda json: filter(
lambda obj:
(obj["meta"]["action"] == "add_tab" or
obj["meta"]["action"] == "change_tab") and
obj["meta"]["content_type"] == "pdf",
json
),
lambda pdfs: map(
lambda pdf: pdf["meta"]["url"],
pdfs
),
unique_everseen,
lambda urls: enumerate(urls, 1),
lambda enumed_urls: map(
lambda item: download_url(
item[1],
res_with_path["destination"]
.joinpath(f"{item[0]}.pdf")
),
enumed_urls
),
lambda task_map: deque(task_map, 0)
)(events_response.json())
save_video()
parse_and_save_event_data()
print(
f"-> {res_with_path['destination'].name}: \033[92m\u2713\033[0m"
)
async def save_page(url: str, path: Path, folder: str, cookies: Iterable[Dict], semaphore: asyncio.Semaphore) -> None:
async with semaphore:
if not path.joinpath(folder).joinpath(url.split("/")[-1] + ".pdf").exists():
browser_endpoint = await get_browser_connection_url()
browser = await connect(browserWSEndpoint=browser_endpoint)
page = await browser.newPage()
await page.emulateMedia("screen")
await page.setViewport({"width": 411, "height": 823})
await page.setCookie(*cookies)
await page.goto(url, {"waitUntil": "domcontentloaded"})
if await page.waitForFunction("() => window.MathJax", timeout=10000):
await asyncio.sleep(3.5)
await page.evaluate("""
async function() {
await new Promise(function(resolve) {
window.MathJax.Hub.Register.StartupHook(
"End",
resolve
)
})
}
""")
await asyncio.sleep(0.1)
await page.evaluate("""
document.querySelectorAll(".toggle_element > .toggle_content").forEach(el => el.style.display = "block")
""", force_expr=True)
await asyncio.sleep(0.1)
await page.evaluate("""
document.querySelector("#cc_container").remove()
""", force_expr=True)
await asyncio.sleep(0.1)
if not path.joinpath(folder).exists():
path.joinpath(folder).mkdir()
path.joinpath(folder).joinpath(url.split("/")[-1] + ".pdf").touch()
await page.pdf({
"path": str(path.joinpath(folder).joinpath(url.split("/")[-1] + ".pdf")),
"printBackground": True
})
await page.close()
await browser.disconnect()
print(
f"-> {folder}/{url.split('/')[-3]}/{url.split('/')[-1]}: \033[92m\u2713\033[0m"
)
| [
"urllib.parse.unquote",
"pyppeteer.connect",
"asyncio.sleep",
"collections.deque",
"requests.get",
"datetime.datetime.fromtimestamp",
"bs4.BeautifulSoup",
"pathlib.Path.cwd",
"urllib.parse.urlparse"
] | [((11539, 11549), 'pathlib.Path.cwd', 'Path.cwd', ([], {}), '()\n', (11547, 11549), False, 'from pathlib import Path\n'), ((12225, 12255), 'requests.get', 'requests.get', (['url'], {'stream': '(True)'}), '(url, stream=True)\n', (12237, 12255), False, 'import requests\n'), ((15306, 15349), 'pyppeteer.connect', 'connect', ([], {'browserWSEndpoint': 'browser_endpoint'}), '(browserWSEndpoint=browser_endpoint)\n', (15313, 15349), False, 'from pyppeteer import connect\n'), ((16380, 16398), 'asyncio.sleep', 'asyncio.sleep', (['(0.1)'], {}), '(0.1)\n', (16393, 16398), False, 'import asyncio\n'), ((16553, 16571), 'asyncio.sleep', 'asyncio.sleep', (['(0.1)'], {}), '(0.1)\n', (16566, 16571), False, 'import asyncio\n'), ((7817, 7856), 'bs4.BeautifulSoup', 'BeautifulSoup', (['r_content', '"""html.parser"""'], {}), "(r_content, 'html.parser')\n", (7830, 7856), False, 'from bs4 import BeautifulSoup, Tag\n'), ((8537, 8568), 'urllib.parse.urlparse', 'parse.urlparse', (['erly_iframe_src'], {}), '(erly_iframe_src)\n', (8551, 8568), False, 'from urllib import parse\n'), ((14787, 14805), 'collections.deque', 'deque', (['task_map', '(0)'], {}), '(task_map, 0)\n', (14792, 14805), False, 'from collections import deque\n'), ((15721, 15739), 'asyncio.sleep', 'asyncio.sleep', (['(3.5)'], {}), '(3.5)\n', (15734, 15739), False, 'import asyncio\n'), ((16151, 16169), 'asyncio.sleep', 'asyncio.sleep', (['(0.1)'], {}), '(0.1)\n', (16164, 16169), False, 'import asyncio\n'), ((13726, 13769), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (["msg['meta']['time']"], {}), "(msg['meta']['time'])\n", (13748, 13769), False, 'from datetime import datetime\n'), ((13801, 13835), 'urllib.parse.unquote', 'parse.unquote', (["msg['meta']['body']"], {}), "(msg['meta']['body'])\n", (13814, 13835), False, 'from urllib import parse\n')] |
###
# (C) Copyright [2019-2020] Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
import unittest
from unittest import mock
from unittest.mock import call
from simplivity.connection import Connection
from simplivity import exceptions
from simplivity.resources import omnistack_clusters as clusters
class OmnistackClustersTest(unittest.TestCase):
def setUp(self):
self.connection = Connection('127.0.0.1')
self.connection._access_token = "<PASSWORD>"
self.clusters = clusters.OmnistackClusters(self.connection)
@mock.patch.object(Connection, "get")
def test_get_all_returns_resource_obj(self, mock_get):
url = "{}?case=sensitive&limit=500&offset=0&order=descending&sort=name".format(clusters.URL)
resource_data = [{'id': '12345'}, {'id': '67890'}]
mock_get.return_value = {clusters.DATA_FIELD: resource_data}
objs = self.clusters.get_all()
self.assertIsInstance(objs[0], clusters.OmnistackCluster)
self.assertEqual(objs[0].data, resource_data[0])
mock_get.assert_called_once_with(url)
@mock.patch.object(Connection, "get")
def test_get_by_name_found(self, mock_get):
name = "testname"
url = "{}?case=sensitive&limit=500&name={}&offset=0&order=descending&sort=name".format(clusters.URL, name)
resource_data = [{'id': '12345', 'name': name}]
mock_get.return_value = {clusters.DATA_FIELD: resource_data}
obj = self.clusters.get_by_name(name)
self.assertIsInstance(obj, clusters.OmnistackCluster)
mock_get.assert_called_once_with(url)
@mock.patch.object(Connection, "get")
def test_get_by_name_not_found(self, mock_get):
name = "testname"
resource_data = []
mock_get.return_value = {clusters.DATA_FIELD: resource_data}
with self.assertRaises(exceptions.HPESimpliVityResourceNotFound) as error:
self.clusters.get_by_name(name)
self.assertEqual(error.exception.msg, "Resource not found with the name {}".format(name))
@mock.patch.object(Connection, "get")
def test_get_by_id_found(self, mock_get):
resource_id = "12345"
url = "{}?case=sensitive&id={}&limit=500&offset=0&order=descending&sort=name".format(clusters.URL, resource_id)
resource_data = [{'id': resource_id}]
mock_get.return_value = {clusters.DATA_FIELD: resource_data}
obj = self.clusters.get_by_id(resource_id)
self.assertIsInstance(obj, clusters.OmnistackCluster)
mock_get.assert_called_once_with(url)
@mock.patch.object(Connection, "get")
def test_get_by_id_not_found(self, mock_get):
resource_id = "12345"
resource_data = []
mock_get.return_value = {clusters.DATA_FIELD: resource_data}
with self.assertRaises(exceptions.HPESimpliVityResourceNotFound) as error:
self.clusters.get_by_id(resource_id)
self.assertEqual(error.exception.msg, "Resource not found with the id {}".format(resource_id))
def test_get_by_data(self):
resource_data = {'id': '12345'}
obj = self.clusters.get_by_data(resource_data)
self.assertIsInstance(obj, clusters.OmnistackCluster)
self.assertEqual(obj.data, resource_data)
@mock.patch.object(Connection, "get")
def test_get_time_zones(self, mock_get):
resource_data = [
"America/Denver",
"America/New_York"
]
mock_get.return_value = resource_data
time_zones = self.clusters.get_time_zone_list()
self.assertEqual(time_zones, resource_data)
@mock.patch.object(Connection, "get")
def test_get_connected_clusters(self, mock_get):
mock_get.side_effect = [{'omnistack_clusters': [{'id': '12345'}]}, {'omnistack_clusters': [{'id': '12345'}]}]
cluster_data = {'name': 'name1', 'id': '12345'}
cluster = self.clusters.get_by_data(cluster_data)
obj = cluster.get_connected_clusters()
self.assertIsInstance(obj[0], clusters.OmnistackCluster)
mock_get.assert_has_calls([call('/omnistack_clusters/12345/connected_clusters'),
call('/omnistack_clusters?case=sensitive&id=12345&limit=500&offset=0&order=descending&sort=name')])
@mock.patch.object(Connection, "post")
@mock.patch.object(Connection, "get")
def test_set_time_zone(self, mock_get, mock_post):
resource_data = {'id': '12345', 'name': 'name1', 'time_zone': 'Zulu'}
mock_get.return_value = {'omnistack_cluster': {'id': '12345', 'name': 'name1', 'time_zone': 'Africa/Accra'}}
mock_post.return_value = None, [{'object_id': '12345'}]
cluster = self.clusters.get_by_data(resource_data)
cluster_obj = cluster.set_time_zone("Africa/Accra")
self.assertEqual(cluster_obj.data['time_zone'], "Africa/Accra")
data = {'time_zone': 'Africa/Accra'}
mock_post.assert_called_once_with('/omnistack_clusters/12345/set_time_zone', data, custom_headers=None)
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"unittest.mock.patch.object",
"simplivity.resources.omnistack_clusters.OmnistackClusters",
"simplivity.connection.Connection",
"unittest.mock.call"
] | [((1087, 1123), 'unittest.mock.patch.object', 'mock.patch.object', (['Connection', '"""get"""'], {}), "(Connection, 'get')\n", (1104, 1123), False, 'from unittest import mock\n'), ((1627, 1663), 'unittest.mock.patch.object', 'mock.patch.object', (['Connection', '"""get"""'], {}), "(Connection, 'get')\n", (1644, 1663), False, 'from unittest import mock\n'), ((2139, 2175), 'unittest.mock.patch.object', 'mock.patch.object', (['Connection', '"""get"""'], {}), "(Connection, 'get')\n", (2156, 2175), False, 'from unittest import mock\n'), ((2583, 2619), 'unittest.mock.patch.object', 'mock.patch.object', (['Connection', '"""get"""'], {}), "(Connection, 'get')\n", (2600, 2619), False, 'from unittest import mock\n'), ((3097, 3133), 'unittest.mock.patch.object', 'mock.patch.object', (['Connection', '"""get"""'], {}), "(Connection, 'get')\n", (3114, 3133), False, 'from unittest import mock\n'), ((3794, 3830), 'unittest.mock.patch.object', 'mock.patch.object', (['Connection', '"""get"""'], {}), "(Connection, 'get')\n", (3811, 3830), False, 'from unittest import mock\n'), ((4133, 4169), 'unittest.mock.patch.object', 'mock.patch.object', (['Connection', '"""get"""'], {}), "(Connection, 'get')\n", (4150, 4169), False, 'from unittest import mock\n'), ((4797, 4834), 'unittest.mock.patch.object', 'mock.patch.object', (['Connection', '"""post"""'], {}), "(Connection, 'post')\n", (4814, 4834), False, 'from unittest import mock\n'), ((4840, 4876), 'unittest.mock.patch.object', 'mock.patch.object', (['Connection', '"""get"""'], {}), "(Connection, 'get')\n", (4857, 4876), False, 'from unittest import mock\n'), ((5572, 5587), 'unittest.main', 'unittest.main', ([], {}), '()\n', (5585, 5587), False, 'import unittest\n'), ((936, 959), 'simplivity.connection.Connection', 'Connection', (['"""127.0.0.1"""'], {}), "('127.0.0.1')\n", (946, 959), False, 'from simplivity.connection import Connection\n'), ((1037, 1080), 'simplivity.resources.omnistack_clusters.OmnistackClusters', 'clusters.OmnistackClusters', (['self.connection'], {}), '(self.connection)\n', (1063, 1080), True, 'from simplivity.resources import omnistack_clusters as clusters\n'), ((4603, 4655), 'unittest.mock.call', 'call', (['"""/omnistack_clusters/12345/connected_clusters"""'], {}), "('/omnistack_clusters/12345/connected_clusters')\n", (4607, 4655), False, 'from unittest.mock import call\n'), ((4691, 4798), 'unittest.mock.call', 'call', (['"""/omnistack_clusters?case=sensitive&id=12345&limit=500&offset=0&order=descending&sort=name"""'], {}), "(\n '/omnistack_clusters?case=sensitive&id=12345&limit=500&offset=0&order=descending&sort=name'\n )\n", (4695, 4798), False, 'from unittest.mock import call\n')] |
import sys, os, argparse
sys.path.insert(0, os.path.abspath('..'))
import warnings
warnings.filterwarnings("ignore")
import foolbox
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import json
import matplotlib.pyplot as plt
import umap
import seaborn as sns
from torchvision import datasets, transforms
from torch.optim.lr_scheduler import ExponentialLR
from torch.utils.data import DataLoader
from torchvision.datasets.mnist import MNIST, FashionMNIST
from test.attacks import *
from advertorch.attacks import *
from MAD_VAE import *
# argument parser
def parse_args():
desc = "MAD-VAE for adversarial defense"
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('--batch_size', type=int, default=512, help='Training batch size')
parser.add_argument('--epochs', type=int, default=500, help='Training epoch numbers')
parser.add_argument('--h_dim', type=int, default=4096, help='Hidden dimensions')
parser.add_argument('--z_dim', type=int, default=128, help='Latent dimensions for images')
parser.add_argument('--image_channels', type=int, default=1, help='Image channels')
parser.add_argument('--image_size', type=int, default=28, help='Image size (default to be squared images)')
parser.add_argument('--num_classes', type=int, default=10, help='Number of image classes')
parser.add_argument('--log_dir', type=str, default='v_logs', help='Logs directory')
parser.add_argument('--lr', type=float, default=0.001, help='Learning rate for the Adam optimizer')
parser.add_argument('--data_root', type=str, default='data', help='Data directory')
parser.add_argument('--model_dir', type=str, default='pretrained_model', help='Pretrained model directory')
parser.add_argument('--use_gpu', type=bool, default=True, help='If use GPU for training')
parser.add_argument('--gpu_num', type=int, default=1, choices=range(0,5), help='GPU numbers available for parallel training')
parser.add_argument('--experiment', type=int, default=0, choices=range(0,4))
return parser.parse_args()
'''
Classifier for generating the adversarial examples, (similar to the one in MagNet: https://arxiv.org/pdf/1705.09064.pdf)
MNIST accuracy: 0.993100
FashionMNIST accuracy: 0.926600
'''
class Classifier(nn.Module):
def __init__(self, args):
super(Classifier, self).__init__()
self.name = 'Classifier'
self.image_size = args.image_size
self.image_channels = args.image_channels
self.conv1 = nn.Conv2d(self.image_channels, 32, 3, 1, 1)
self.conv2 = nn.Conv2d(32, 32, 3, 1, 1)
self.pool1 = nn.MaxPool2d(2)
self.conv3 = nn.Conv2d(32, 64, 3, 1, 1)
self.conv4 = nn.Conv2d(64, 64, 3, 1, 1)
self.pool2 = nn.MaxPool2d(2)
self.fc1 = nn.Linear(3136, 200)
self.fc2 = nn.Linear(200, 10)
def forward(self, x):
self.batch_size = x.size(0)
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = self.pool1(x)
x = F.relu(self.conv3(x))
x = F.relu(self.conv4(x))
x = self.pool2(x)
x = x.view(self.batch_size, -1)
x = F.relu(self.fc1(x))
x = self.fc2(x)
return F.log_softmax(x, dim=1)
if __name__ == "__main__":
# get arguments
args = parse_args()
sns.set(style='white', context='paper', rc={'figure.figsize':(14,10)})
data = np.load('../data/xs_mnist.npy')
data = data.reshape(data.shape[0],28*28)
sample = np.random.randint(data.shape[0], size=3000)
data = data[sample,:]
y_s = np.load('../data/ys_mnist.npy')
y_s = y_s[sample]
fit = umap.UMAP(random_state=42, n_components=2)
u = fit.fit_transform(data)
plt.scatter(u[:,0], u[:,1], c=y_s, cmap='Spectral',s=14)
plt.gca().set_aspect('equal', 'datalim')
clb = plt.colorbar(boundaries=np.arange(11)-0.5)
clb.set_ticks(np.arange(10))
clb.ax.tick_params(labelsize=18)
plt.xticks([])
plt.yticks([])
plt.title(f'UMAP embedding of MNIST', fontsize=24);
plt.savefig(f'img/MNIST.png', dpi=300)
plt.clf()
| [
"matplotlib.pyplot.title",
"numpy.load",
"argparse.ArgumentParser",
"matplotlib.pyplot.clf",
"numpy.random.randint",
"numpy.arange",
"matplotlib.pyplot.gca",
"os.path.abspath",
"matplotlib.pyplot.yticks",
"torch.nn.functional.log_softmax",
"torch.nn.Linear",
"matplotlib.pyplot.xticks",
"seab... | [((83, 116), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (106, 116), False, 'import warnings\n'), ((44, 65), 'os.path.abspath', 'os.path.abspath', (['""".."""'], {}), "('..')\n", (59, 65), False, 'import sys, os, argparse\n'), ((696, 737), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'desc'}), '(description=desc)\n', (719, 737), False, 'import sys, os, argparse\n'), ((3366, 3438), 'seaborn.set', 'sns.set', ([], {'style': '"""white"""', 'context': '"""paper"""', 'rc': "{'figure.figsize': (14, 10)}"}), "(style='white', context='paper', rc={'figure.figsize': (14, 10)})\n", (3373, 3438), True, 'import seaborn as sns\n'), ((3449, 3480), 'numpy.load', 'np.load', (['"""../data/xs_mnist.npy"""'], {}), "('../data/xs_mnist.npy')\n", (3456, 3480), True, 'import numpy as np\n'), ((3539, 3582), 'numpy.random.randint', 'np.random.randint', (['data.shape[0]'], {'size': '(3000)'}), '(data.shape[0], size=3000)\n', (3556, 3582), True, 'import numpy as np\n'), ((3620, 3651), 'numpy.load', 'np.load', (['"""../data/ys_mnist.npy"""'], {}), "('../data/ys_mnist.npy')\n", (3627, 3651), True, 'import numpy as np\n'), ((3685, 3727), 'umap.UMAP', 'umap.UMAP', ([], {'random_state': '(42)', 'n_components': '(2)'}), '(random_state=42, n_components=2)\n', (3694, 3727), False, 'import umap\n'), ((3765, 3824), 'matplotlib.pyplot.scatter', 'plt.scatter', (['u[:, 0]', 'u[:, 1]'], {'c': 'y_s', 'cmap': '"""Spectral"""', 's': '(14)'}), "(u[:, 0], u[:, 1], c=y_s, cmap='Spectral', s=14)\n", (3776, 3824), True, 'import matplotlib.pyplot as plt\n'), ((3994, 4008), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[]'], {}), '([])\n', (4004, 4008), True, 'import matplotlib.pyplot as plt\n'), ((4013, 4027), 'matplotlib.pyplot.yticks', 'plt.yticks', (['[]'], {}), '([])\n', (4023, 4027), True, 'import matplotlib.pyplot as plt\n'), ((4032, 4082), 'matplotlib.pyplot.title', 'plt.title', (['f"""UMAP embedding of MNIST"""'], {'fontsize': '(24)'}), "(f'UMAP embedding of MNIST', fontsize=24)\n", (4041, 4082), True, 'import matplotlib.pyplot as plt\n'), ((4088, 4126), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""img/MNIST.png"""'], {'dpi': '(300)'}), "(f'img/MNIST.png', dpi=300)\n", (4099, 4126), True, 'import matplotlib.pyplot as plt\n'), ((4131, 4140), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (4138, 4140), True, 'import matplotlib.pyplot as plt\n'), ((2558, 2601), 'torch.nn.Conv2d', 'nn.Conv2d', (['self.image_channels', '(32)', '(3)', '(1)', '(1)'], {}), '(self.image_channels, 32, 3, 1, 1)\n', (2567, 2601), True, 'import torch.nn as nn\n'), ((2623, 2649), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', '(32)', '(3)', '(1)', '(1)'], {}), '(32, 32, 3, 1, 1)\n', (2632, 2649), True, 'import torch.nn as nn\n'), ((2671, 2686), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {}), '(2)\n', (2683, 2686), True, 'import torch.nn as nn\n'), ((2708, 2734), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', '(64)', '(3)', '(1)', '(1)'], {}), '(32, 64, 3, 1, 1)\n', (2717, 2734), True, 'import torch.nn as nn\n'), ((2756, 2782), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(64)', '(3)', '(1)', '(1)'], {}), '(64, 64, 3, 1, 1)\n', (2765, 2782), True, 'import torch.nn as nn\n'), ((2804, 2819), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {}), '(2)\n', (2816, 2819), True, 'import torch.nn as nn\n'), ((2839, 2859), 'torch.nn.Linear', 'nn.Linear', (['(3136)', '(200)'], {}), '(3136, 200)\n', (2848, 2859), True, 'import torch.nn as nn\n'), ((2879, 2897), 'torch.nn.Linear', 'nn.Linear', (['(200)', '(10)'], {}), '(200, 10)\n', (2888, 2897), True, 'import torch.nn as nn\n'), ((3265, 3288), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['x'], {'dim': '(1)'}), '(x, dim=1)\n', (3278, 3288), True, 'import torch.nn.functional as F\n'), ((3938, 3951), 'numpy.arange', 'np.arange', (['(10)'], {}), '(10)\n', (3947, 3951), True, 'import numpy as np\n'), ((3826, 3835), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (3833, 3835), True, 'import matplotlib.pyplot as plt\n'), ((3901, 3914), 'numpy.arange', 'np.arange', (['(11)'], {}), '(11)\n', (3910, 3914), True, 'import numpy as np\n')] |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Extending JAX's vmap to work like NumPY's gufuncs.
From the `example notebook <https://nbviewer.jupyter.org/github/google/jax/blob/master/notebooks/gufuncs.ipynb>`_ by `<NAME> <https://github.com/shoyer>`_.
What is a gufunc?
=================
`Generalized universal functions
<https://docs.scipy.org/doc/numpy-1.15.0/reference/c-api.generalized-ufuncs.html>`_
("gufuncs") are one of my favorite abstractions from NumPy. They generalize
NumPy's `broadcasting rules
<https://docs.scipy.org/doc/numpy-1.15.0/user/basics.broadcasting.html>`_ to
handle non-scalar operations. When a gufuncs is applied to arrays, there are:
* "core dimensions" over which an operation is defined.
* "broadcast dimensions" over which operations can be automatically vectorized.
A string `signature <https://docs.scipy.org/doc/numpy-1.15.0/reference/c-api.generalized-ufuncs.html#details-of-signature>`_
associated with each gufunc controls how this happens by indicating how core
dimensions are mapped between inputs and outputs. The syntax is easiest to
understand by looking at a few examples:
* Addition: `(),()->()`
* 1D inner product: `(i),(i)->()`
* 1D sum: `(i)->()`
* Matrix multiplcation: `(m,n),(n,k)->(m,k)`
Why write gufuncs?
=====================
From a user perspective, gufuncs are nice because they're guaranteed to
vectorize in a consistent and general fashion. For example, by default gufuncs
use the last dimensions of arrays as core dimensions, but you can control that
explicitly with the ``axis`` or ``axes`` arguments.
From a developer perspective, gufuncs are nice because they simplify your work:
you only need to think about the core logic of your function, not how it
handles arbitrary dimensional input. You can just write that down in a simple,
declarative way.
JAX makes it easy to write high-level performant code
=====================================================
Unfortunately, writing NumPy gufuncs today is somewhat non-trivial. Your
options today are:
1. Write the inner loops yourself in C.
2. `np.vectorize <https://docs.scipy.org/doc/numpy/reference/generated/numpy.vectorize.html>`_ creates something kind of like a gufunc, but it's painfully slow: the outer loop is performed in Python.
3. `numba.guvectorize <https://numba.pydata.org/numba-doc/dev/user/vectorize.html>`_ can work well, if you don't need further code transformations like automatic differentiation.
JAX's ``vmap`` contains all the core functionality we need to write functions that work like gufuncs. JAX gufuncs play nicely with other transformations like ``grad`` and ``jit``.
A simple example
================
Consider a simple example from data preprocessing, centering an array.
Here's how we might write a vectorized version using NumPy::
def center(array, axis=-1):
# array can have any number of dimensions
bias = np.mean(array, axis=axis)
debiased = array - np.expand_dims(bias, axis)
return bias, debiased
And here's how we could write a vectorized version using JAX gufuncs::
@vectorize('(n)->(),(n)')
def center(array):
# array is always a 1D vector
bias = np.mean(array)
debiased = array - bias
return bias, debiased
See the difference?
* Instead of needing to think about broadcasting while writing the entire function, we can write the function assuming the input is always a vector.
* We get the ``axis`` argument automatically, without needing to write it ourselves.
* As a bonus, the decorator makes the function self-documenting: a reader immediately knows that it handles higher dimensional input and output correctly.
"""
from jax import grad, jit, vmap
import jax.numpy as jnp
import numpy as np
import re
# See http://docs.scipy.org/doc/numpy/reference/c-api.generalized-ufuncs.html
_DIMENSION_NAME = r'\w+'
_CORE_DIMENSION_LIST = '(?:{0:}(?:,{0:})*)?'.format(_DIMENSION_NAME)
_ARGUMENT = r'\({}\)'.format(_CORE_DIMENSION_LIST)
_ARGUMENT_LIST = '{0:}(?:,{0:})*'.format(_ARGUMENT)
_SIGNATURE = '^{0:}->{0:}$'.format(_ARGUMENT_LIST)
def _parse_gufunc_signature(signature):
"""Parse string signatures for a generalized universal function.
Args:
signature : string
Generalized universal function signature, e.g., ``(m,n),(n,p)->(m,p)``
for ``np.matmul``.
Returns:
Tuple of input and output core dimensions parsed from the signature, each
of the form List[Tuple[str, ...]].
"""
if not re.match(_SIGNATURE, signature):
raise ValueError(
'not a valid gufunc signature: {}'.format(signature))
return tuple([tuple(re.findall(_DIMENSION_NAME, arg))
for arg in re.findall(_ARGUMENT, arg_list)]
for arg_list in signature.split('->'))
def _update_dim_sizes(dim_sizes, arg, core_dims):
"""Incrementally check and update core dimension sizes for a single argument.
Args:
dim_sizes : Dict[str, int]
Sizes of existing core dimensions. Will be updated in-place.
arg : ndarray
Argument to examine.
core_dims : Tuple[str, ...]
Core dimensions for this argument.
"""
if not core_dims:
return
num_core_dims = len(core_dims)
if arg.ndim < num_core_dims:
raise ValueError(
'%d-dimensional argument does not have enough '
'dimensions for all core dimensions %r'
% (arg.ndim, core_dims))
core_shape = arg.shape[-num_core_dims:]
for dim, size in zip(core_dims, core_shape):
if dim in dim_sizes:
if size != dim_sizes[dim]:
raise ValueError(
'inconsistent size for core dimension %r: %r vs %r'
% (dim, size, dim_sizes[dim]))
else:
dim_sizes[dim] = size
def _parse_input_dimensions(args, input_core_dims):
"""Parse broadcast and core dimensions for vectorize with a signature.
Args:
args : Tuple[ndarray, ...]
Tuple of input arguments to examine.
input_core_dims : List[Tuple[str, ...]]
List of core dimensions corresponding to each input.
Returns:
broadcast_shape : Tuple[int, ...]
Common shape to broadcast all non-core dimensions to.
dim_sizes : Dict[str, int]
Common sizes for named core dimensions.
"""
broadcast_args = []
dim_sizes = {}
for arg, core_dims in zip(args, input_core_dims):
_update_dim_sizes(dim_sizes, arg, core_dims)
ndim = arg.ndim - len(core_dims)
dummy_array = np.lib.stride_tricks.as_strided(0, arg.shape[:ndim])
broadcast_args.append(dummy_array)
broadcast_shape = np.lib.stride_tricks._broadcast_shape(*broadcast_args)
return broadcast_shape, dim_sizes
def _calculate_shapes(broadcast_shape, dim_sizes, list_of_core_dims):
"""Helper for calculating broadcast shapes with core dimensions."""
return [broadcast_shape + tuple(dim_sizes[dim] for dim in core_dims)
for core_dims in list_of_core_dims]
# adapted from np.vectorize (again authored by shoyer@)
def broadcast_with_core_dims(args, input_core_dims, output_core_dims):
if len(args) != len(input_core_dims):
raise TypeError('wrong number of positional arguments: '
'expected %r, got %r'
% (len(input_core_dims), len(args)))
broadcast_shape, dim_sizes = _parse_input_dimensions(
args, input_core_dims)
input_shapes = _calculate_shapes(broadcast_shape, dim_sizes,
input_core_dims)
args = [jnp.broadcast_to(arg, shape)
for arg, shape in zip(args, input_shapes)]
return args
def verify_axis_is_supported(input_core_dims, output_core_dims):
all_core_dims = set()
for input_or_output_core_dims in [input_core_dims, output_core_dims]:
for core_dims in input_or_output_core_dims:
all_core_dims.update(core_dims)
if len(core_dims) > 1:
raise ValueError('only one gufuncs with one core dim support axis')
def reorder_inputs(args, axis, input_core_dims):
return tuple(jnp.moveaxis(arg, axis, -1) if core_dims else arg
for arg, core_dims in zip(args, input_core_dims))
def reorder_outputs(result, axis, output_core_dims):
if not isinstance(result, tuple):
result = (result,)
result = tuple(jnp.moveaxis(res, -1, axis) if core_dims else res
for res, core_dims in zip(result, output_core_dims))
if len(result) == 1:
(result,) = result
return result
import functools
def vectorize(signature):
"""Vectorize a function using JAX.
Turns an abritrary function into a numpy style "gufunc". Once
you specify the behavior of the core axis, the rest will be
broadcast naturally.
Args:
signature: an einsum style signature that defines how the core dimensions are mapped between inputs and outputs.
Returns:
The vectorized 'gufunc' that will automatically broadcast
while maintaining the specified core logic, the returned
function also has a new ``axis`` parameter for specifying
which axis should be treated as the core one.
"""
input_core_dims, output_core_dims = _parse_gufunc_signature(signature)
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
axis = kwargs.get('axis') # for python2 compat.
if axis is not None:
verify_axis_is_supported(input_core_dims, output_core_dims)
args = reorder_inputs(args, axis, input_core_dims)
broadcast_args = broadcast_with_core_dims(
args, input_core_dims, output_core_dims)
num_batch_dims = len(broadcast_args[0].shape) - len(input_core_dims[0])
vectorized_func = func
for _ in range(num_batch_dims):
vectorized_func = vmap(vectorized_func)
result = vectorized_func(*broadcast_args)
if axis is not None:
result = reorder_outputs(result, axis, output_core_dims)
return result
return wrapper
return decorator
| [
"jax.vmap",
"numpy.lib.stride_tricks._broadcast_shape",
"re.match",
"jax.numpy.moveaxis",
"numpy.lib.stride_tricks.as_strided",
"re.findall",
"functools.wraps",
"jax.numpy.broadcast_to"
] | [((7139, 7193), 'numpy.lib.stride_tricks._broadcast_shape', 'np.lib.stride_tricks._broadcast_shape', (['*broadcast_args'], {}), '(*broadcast_args)\n', (7176, 7193), True, 'import numpy as np\n'), ((4982, 5013), 're.match', 're.match', (['_SIGNATURE', 'signature'], {}), '(_SIGNATURE, signature)\n', (4990, 5013), False, 'import re\n'), ((7021, 7073), 'numpy.lib.stride_tricks.as_strided', 'np.lib.stride_tricks.as_strided', (['(0)', 'arg.shape[:ndim]'], {}), '(0, arg.shape[:ndim])\n', (7052, 7073), True, 'import numpy as np\n'), ((8039, 8067), 'jax.numpy.broadcast_to', 'jnp.broadcast_to', (['arg', 'shape'], {}), '(arg, shape)\n', (8055, 8067), True, 'import jax.numpy as jnp\n'), ((9679, 9700), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (9694, 9700), False, 'import functools\n'), ((8546, 8573), 'jax.numpy.moveaxis', 'jnp.moveaxis', (['arg', 'axis', '(-1)'], {}), '(arg, axis, -1)\n', (8558, 8573), True, 'import jax.numpy as jnp\n'), ((8792, 8819), 'jax.numpy.moveaxis', 'jnp.moveaxis', (['res', '(-1)', 'axis'], {}), '(res, -1, axis)\n', (8804, 8819), True, 'import jax.numpy as jnp\n'), ((10218, 10239), 'jax.vmap', 'vmap', (['vectorized_func'], {}), '(vectorized_func)\n', (10222, 10239), False, 'from jax import grad, jit, vmap\n'), ((5131, 5163), 're.findall', 're.findall', (['_DIMENSION_NAME', 'arg'], {}), '(_DIMENSION_NAME, arg)\n', (5141, 5163), False, 'import re\n'), ((5194, 5225), 're.findall', 're.findall', (['_ARGUMENT', 'arg_list'], {}), '(_ARGUMENT, arg_list)\n', (5204, 5225), False, 'import re\n')] |
# coding=utf-8
# Copyright 2019-present, the HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict, List, Optional, Union
import requests
from .utils.logging import get_logger
logger = get_logger(__name__)
ENDPOINT = "https://huggingface.co"
class ObjectInfo:
"""
Info about a public dataset or Metric accessible from our S3.
"""
def __init__(
self,
id: str,
key: str,
lastModified: Optional[str] = None,
description: Optional[str] = None,
citation: Optional[str] = None,
size: Optional[int] = None,
etag: Optional[str] = None,
siblings: List[Dict] = None,
author: str = None,
**kwargs,
):
self.id = id # id of dataset
self.key = key # S3 object key of config.json
self.lastModified = lastModified
self.description = description
self.citation = citation
self.size = size
self.etag = etag
self.siblings = siblings # list of files that constitute the dataset
self.author = author
for k, v in kwargs.items():
setattr(self, k, v)
def __repr__(self):
single_line_description = self.description.replace("\n", "") if self.description is not None else ""
return f"datasets.ObjectInfo(\n\tid='{self.id}',\n\tdescription='{single_line_description}',\n\tfiles={self.siblings}\n)"
class HfApi:
ALLOWED_FILE_TYPES = ["datasets", "metrics"]
def __init__(self, endpoint=None):
"""Create Api using a specific endpoint and also the file types ('datasets' or 'metrics')"""
self.endpoint = endpoint if endpoint is not None else ENDPOINT
def dataset_list(self, with_community_datasets=True, id_only=False) -> Union[List[ObjectInfo], List[str]]:
"""
Get the public list of all the datasets on huggingface, including the community datasets
"""
path = "{}/api/datasets".format(self.endpoint)
r = requests.get(path)
r.raise_for_status()
d = r.json()
datasets = [ObjectInfo(**x) for x in d]
if not with_community_datasets:
datasets = [d for d in datasets if "/" not in d.id]
if id_only:
datasets = [d.id for d in datasets]
return datasets
def metric_list(self, with_community_metrics=True, id_only=False) -> Union[List[ObjectInfo], List[str]]:
"""
Get the public list of all the metrics on huggingface, including the community metrics
"""
path = "{}/api/metrics".format(self.endpoint)
r = requests.get(path)
r.raise_for_status()
d = r.json()
metrics = [ObjectInfo(**x) for x in d]
if not with_community_metrics:
metrics = [m for m in metrics if "/" not in m.id]
if id_only:
metrics = [m.id for m in metrics]
return metrics
| [
"requests.get"
] | [((2518, 2536), 'requests.get', 'requests.get', (['path'], {}), '(path)\n', (2530, 2536), False, 'import requests\n'), ((3126, 3144), 'requests.get', 'requests.get', (['path'], {}), '(path)\n', (3138, 3144), False, 'import requests\n')] |
from __future__ import print_function
from TestBase import TestBase
from util import run_cmd, capture
class Compilers(TestBase):
error_message=""
def __init__(self):
pass
def setup(self):
pass
def name(self):
return "Check compilers"
def description(self):
return "Check compilers:"
def error(self):
print("\033[1;31m%s\033[0m" %(self.error_message))
def help(self):
print("\tPlease check your $PATH again, compilers are missing.\n",
"\tIf you unload the compilers on purpose, please ignore this test.\n")
def execute(self):
compilers=["gcc","g++","gfortran","icc","icpc","ifort","mpicc","mpicxx","mpif90"]
Flag=True
for compiler1 in compilers:
typecmd="type %s" %compiler1
output=capture(typecmd)
# print(output)
if "not found" in output:
print("\033[1;33m\tWarning: %s is not available right now.\033[0m" %compiler1)
# Flag=False
# self.error_message+="\tError: Compiler %s is not available at this time!\n" %compiler1
return Flag
| [
"util.capture"
] | [((802, 818), 'util.capture', 'capture', (['typecmd'], {}), '(typecmd)\n', (809, 818), False, 'from util import run_cmd, capture\n')] |
#!/usr/bin/env python
import os
import re
import sys
from setuptools import setup, find_packages
version = re.compile(r'VERSION\s*=\s*\((.*?)\)')
def get_package_version():
"returns package version without importing it"
base = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(base, "flower/__init__.py")) as initf:
for line in initf:
m = version.match(line.strip())
if not m:
continue
return ".".join(m.groups()[0].split(", "))
def get_requirements(filename):
return open('requirements/' + filename).read().splitlines()
classes = """
Development Status :: 4 - Beta
Intended Audience :: Developers
License :: OSI Approved :: BSD License
Topic :: System :: Distributed Computing
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.6
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.3
Programming Language :: Python :: 3.4
Programming Language :: Python :: Implementation :: CPython
Programming Language :: Python :: Implementation :: PyPy
Operating System :: OS Independent
"""
classifiers = [s.strip() for s in classes.split('\n') if s]
install_requires = get_requirements('default.txt')
if sys.version_info < (3, 0):
install_requires.append('futures')
setup(
name='flower',
version=get_package_version(),
description='Celery Flower',
long_description=open('README.rst').read(),
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/mher/flower',
license='BSD',
classifiers=classifiers,
packages=find_packages(exclude=['tests', 'tests.*']),
install_requires=install_requires,
test_suite="tests",
tests_require=get_requirements('test.txt'),
package_data={'flower': ['templates/*', 'static/**/*', 'static/*.*']},
entry_points={
'console_scripts': [
'flower = flower.__main__:main',
],
'celery.commands': [
'flower = flower.command:FlowerCommand',
],
},
)
| [
"os.path.dirname",
"os.path.join",
"setuptools.find_packages",
"re.compile"
] | [((110, 151), 're.compile', 're.compile', (['"""VERSION\\\\s*=\\\\s*\\\\((.*?)\\\\)"""'], {}), "('VERSION\\\\s*=\\\\s*\\\\((.*?)\\\\)')\n", (120, 151), False, 'import re\n'), ((256, 281), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (271, 281), False, 'import os\n'), ((1724, 1767), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['tests', 'tests.*']"}), "(exclude=['tests', 'tests.*'])\n", (1737, 1767), False, 'from setuptools import setup, find_packages\n'), ((297, 337), 'os.path.join', 'os.path.join', (['base', '"""flower/__init__.py"""'], {}), "(base, 'flower/__init__.py')\n", (309, 337), False, 'import os\n')] |
#!/usr/bin/python
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module is for management of the Timesketch application."""
from __future__ import unicode_literals
import os
import pwd
import sys
import uuid
import yaml
from flask import current_app
from flask_migrate import MigrateCommand
from flask_script import Command
from flask_script import Manager
from flask_script import Server
from flask_script import Option
from flask_script import prompt_bool
from flask_script import prompt_pass
from sqlalchemy.exc import IntegrityError
from werkzeug.exceptions import Forbidden
from timesketch import create_app
from timesketch.lib.datastores.elastic import ElasticsearchDataStore
from timesketch.models import db_session
from timesketch.models import drop_all
from timesketch.models.user import Group
from timesketch.models.user import User
from timesketch.models.sketch import SearchIndex
from timesketch.models.sketch import SearchTemplate
from timesketch.models.sketch import Sketch
from timesketch.models.sketch import Timeline
class DropDataBaseTables(Command):
"""Drop all database tables."""
def __init__(self):
super(DropDataBaseTables, self).__init__()
# pylint: disable=method-hidden
def run(self):
"""Drop all tables after user ha verified."""
verified = prompt_bool(
u'Do you really want to drop all the database tables?')
if verified:
sys.stdout.write(u'All tables dropped. Database is now empty.\n')
drop_all()
class AddUser(Command):
"""Create a new Timesketch user."""
option_list = (
Option(u'--username', u'-u', dest=u'username', required=True),
Option(u'--password', u'-p', dest=u'password', required=False), )
def __init__(self):
super(AddUser, self).__init__()
def get_password_from_prompt(self):
"""Get password from the command line prompt."""
first_password = prompt_pass(u'Enter password')
second_password = prompt_pass(u'Enter password again')
if first_password != second_password:
sys.stderr.write(u'Passwords don\'t match, try again.\n')
self.get_password_from_prompt()
return first_password
# pylint: disable=arguments-differ, method-hidden
def run(self, username, password):
"""Creates the user."""
if not password:
password = self.get_password_from_prompt()
password = unicode(password.decode(encoding=u'utf-8'))
username = unicode(username.decode(encoding=u'utf-8'))
user = User.get_or_create(username=username)
user.set_password(plaintext=password)
db_session.add(user)
db_session.commit()
sys.stdout.write(u'User {0:s} created/updated\n'.format(username))
class AddGroup(Command):
"""Create a new Timesketch group."""
option_list = (Option(u'--name', u'-n', dest=u'name', required=True), )
def __init__(self):
super(AddGroup, self).__init__()
# pylint: disable=arguments-differ, method-hidden
def run(self, name):
"""Creates the group."""
name = unicode(name.decode(encoding=u'utf-8'))
group = Group.get_or_create(name=name)
db_session.add(group)
db_session.commit()
sys.stdout.write(u'Group {0:s} created\n'.format(name))
class GroupManager(Command):
"""Manage group memberships."""
option_list = (
Option(
u'--remove',
u'-r',
dest=u'remove',
action=u'store_true',
required=False,
default=False),
Option(u'--group', u'-g', dest=u'group_name', required=True),
Option(u'--user', u'-u', dest=u'user_name', required=True), )
def __init__(self):
super(GroupManager, self).__init__()
# pylint: disable=arguments-differ, method-hidden
def run(self, remove, group_name, user_name):
"""Add the user to the group."""
group_name = unicode(group_name.decode(encoding=u'utf-8'))
user_name = unicode(user_name.decode(encoding=u'utf-8'))
group = Group.query.filter_by(name=group_name).first()
user = User.query.filter_by(username=user_name).first()
# Add or remove user from group
if remove:
try:
user.groups.remove(group)
sys.stdout.write(u'{0:s} removed from group {1:s}\n'.format(
user_name, group_name))
db_session.commit()
except ValueError:
sys.stdout.write(u'{0:s} is not a member of group {1:s}\n'.
format(user_name, group_name))
else:
user.groups.append(group)
try:
db_session.commit()
sys.stdout.write(u'{0:s} added to group {1:s}\n'.format(
user_name, group_name))
except IntegrityError:
sys.stdout.write(u'{0:s} is already a member of group {1:s}\n'.
format(user_name, group_name))
class AddSearchIndex(Command):
"""Create a new Timesketch searchindex."""
option_list = (
Option(u'--name', u'-n', dest=u'name', required=True),
Option(u'--index', u'-i', dest=u'index', required=True),
Option(u'--user', u'-u', dest=u'username', required=True), )
def __init__(self):
super(AddSearchIndex, self).__init__()
# pylint: disable=arguments-differ, method-hidden
def run(self, name, index, username):
"""Create the SearchIndex."""
es = ElasticsearchDataStore(
host=current_app.config[u'ELASTIC_HOST'],
port=current_app.config[u'ELASTIC_PORT'])
user = User.query.filter_by(username=username).first()
if not user:
sys.stderr.write(u'User does not exist\n')
sys.exit(1)
if not es.client.indices.exists(index=index):
sys.stderr.write(u'Index does not exist in the datastore\n')
sys.exit(1)
if SearchIndex.query.filter_by(name=name, index_name=index).first():
sys.stderr.write(
u'Index with this name already exist in Timesketch\n')
sys.exit(1)
searchindex = SearchIndex(
name=name, description=name, user=user, index_name=index)
searchindex.grant_permission(u'read')
db_session.add(searchindex)
db_session.commit()
sys.stdout.write(u'Search index {0:s} created\n'.format(name))
class PurgeTimeline(Command):
"""Delete timeline permanently from Timesketch and Elasticsearch."""
option_list = (Option(
u'--index', u'-i', dest=u'index_name', required=True), )
def __init__(self):
super(PurgeTimeline, self).__init__()
# pylint: disable=arguments-differ, method-hidden
def run(self, index_name):
"""Delete timeline in both Timesketch and Elasticsearch.
Args:
index_name: The name of the index in Elasticsearch
"""
index_name = unicode(index_name.decode(encoding=u'utf-8'))
searchindex = SearchIndex.query.filter_by(
index_name=index_name).first()
if not searchindex:
sys.stdout.write(u'No such index\n')
sys.exit()
es = ElasticsearchDataStore(
host=current_app.config[u'ELASTIC_HOST'],
port=current_app.config[u'ELASTIC_PORT'])
timelines = Timeline.query.filter_by(searchindex=searchindex).all()
sketches = [
t.sketch for t in timelines
if t.sketch and t.sketch.get_status.status != u'deleted'
]
if sketches:
sys.stdout.write(u'WARNING: This timeline is in use by:\n')
for sketch in sketches:
sys.stdout.write(u' * {0:s}\n'.format(sketch.name))
sys.stdout.flush()
really_delete = prompt_bool(
u'Are you sure you want to delete this timeline?')
if really_delete:
for timeline in timelines:
db_session.delete(timeline)
db_session.delete(searchindex)
db_session.commit()
es.client.indices.delete(index=index_name)
class SearchTemplateManager(Command):
"""Command Module to manipulate Search templates."""
option_list = (
Option(u'--import', u'-i', dest=u'import_location', required=False),
Option(u'--export', u'-e', dest=u'export_location', required=False),
)
# pylint: disable=arguments-differ, method-hidden
def run(self, import_location, export_location):
"""Export/Import search templates to/from file.
Args:
import_location: Path to the yaml file to import templates.
export_location: Path to the yaml file to export templates.
"""
if export_location:
search_templates = []
for search_template in SearchTemplate.query.all():
labels = []
for label in search_template.labels:
if label.label.startswith(u'supported_os:'):
labels.append(label.label.replace(
u'supported_os:', u''))
search_templates.append({
u'name': search_template.name,
u'query_string': search_template.query_string,
u'query_dsl': search_template.query_dsl,
u'supported_os': labels
})
with open(export_location, 'w') as fh:
yaml.safe_dump(search_templates, stream=fh)
if import_location:
try:
with open(import_location, 'rb') as fh:
search_templates = yaml.safe_load(fh)
except IOError as e:
sys.stdout.write(u'Unable to open file: {0:s}\n'.format(e))
sys.exit(1)
for search_template in search_templates:
name = search_template[u'name']
query_string = search_template[u'query_string'],
query_dsl = search_template[u'query_dsl']
# Skip search template if already exits.
if SearchTemplate.query.filter_by(name=name).first():
continue
imported_template = SearchTemplate(
name=name,
user=User(None),
query_string=query_string,
query_dsl=query_dsl)
# Add supported_os labels.
for supported_os in search_template[u'supported_os']:
label_name = u'supported_os:{0:s}'.format(supported_os)
label = SearchTemplate.Label.get_or_create(
label=label_name, user=None)
imported_template.labels.append(label)
# Set flag to identify local vs import templates.
remote_flag = SearchTemplate.Label.get_or_create(
label=u'remote_template', user=None)
imported_template.labels.append(remote_flag)
db_session.add(imported_template)
db_session.commit()
class ImportTimeline(Command):
"""Create a new Timesketch timeline from a file."""
option_list = (
Option('--file', '-f', dest='file_path', required=True),
Option('--sketch_id', '-s', dest='sketch_id', required=False),
Option('--username', '-u', dest='username', required=False),
Option('--timeline_name', '-n', dest='timeline_name',
required=False),
)
def __init__(self):
super(ImportTimeline, self).__init__()
# pylint: disable=arguments-differ, method-hidden
def run(self, file_path, sketch_id, username, timeline_name):
"""This is the run method."""
file_path = os.path.realpath(file_path)
file_path_no_extension, extension = os.path.splitext(file_path)
extension = extension.lstrip('.')
filename = os.path.basename(file_path_no_extension)
supported_extensions = ('plaso', 'csv', 'jsonl')
if not os.path.isfile(file_path):
sys.exit('No such file: {0:s}'.format(file_path))
if extension not in supported_extensions:
sys.exit(
'Extension {0:s} is not supported. '
'(supported extensions are: {1:s})'.format(
extension, ', '.join(supported_extensions)))
user = None
if not username:
username = pwd.getpwuid(os.stat(file_path).st_uid).pw_name
if username is not 'root':
user = User.query.filter_by(username=unicode(username)).first()
if not user:
sys.exit('Cannot determine user for file: {0:s}'.format(file_path))
sketch = None
# If filename starts with <number> then use that as sketch_id.
# E.g: 42_file_name.plaso means sketch_id is 42.
sketch_id_from_filename = filename.split('_')[0]
if not sketch_id and sketch_id_from_filename.isdigit():
sketch_id = sketch_id_from_filename
if sketch_id:
try:
sketch = Sketch.query.get_with_acl(sketch_id, user=user)
except Forbidden:
pass
if not timeline_name:
timeline_name = unicode(filename.replace('_', ' '))
# Remove sketch ID if present in the filename.
timeline_parts = timeline_name.split()
if timeline_parts[0].isdigit():
timeline_name = ' '.join(timeline_name.split()[1:])
if not sketch:
# Create a new sketch.
sketch_name = 'Sketch for: {0:s}'.format(timeline_name)
sketch = Sketch(
name=sketch_name, description=sketch_name, user=user)
# Need to commit here to be able to set permissions later.
db_session.add(sketch)
db_session.commit()
sketch.grant_permission(permission='read', user=user)
sketch.grant_permission(permission='write', user=user)
sketch.grant_permission(permission='delete', user=user)
sketch.status.append(sketch.Status(user=None, status=u'new'))
db_session.add(sketch)
db_session.commit()
index_name = unicode(uuid.uuid4().hex)
searchindex = SearchIndex.get_or_create(
name=timeline_name,
description=timeline_name,
user=user,
index_name=index_name)
searchindex.grant_permission(permission='read', user=user)
searchindex.grant_permission(permission='write', user=user)
searchindex.grant_permission(permission='delete', user=user)
searchindex.set_status('processing')
db_session.add(searchindex)
db_session.commit()
if sketch and sketch.has_permission(user, 'write'):
timeline = Timeline(
name=searchindex.name,
description=searchindex.description,
sketch=sketch,
user=user,
searchindex=searchindex)
timeline.set_status('processing')
sketch.timelines.append(timeline)
db_session.add(timeline)
db_session.commit()
# Start Celery pipeline for indexing and analysis.
# Import here to avoid circular imports.
from timesketch.lib import tasks
pipeline = tasks.build_index_pipeline(
file_path, timeline_name, index_name, extension, sketch.id)
pipeline.apply_async(task_id=index_name)
print('Imported {0:s} to sketch: {1:d} ({2:s})'.format(
file_path, sketch.id, sketch.name))
def main():
# Setup Flask-script command manager and register commands.
shell_manager = Manager(create_app)
shell_manager.add_command('add_user', AddUser())
shell_manager.add_command('add_group', AddGroup())
shell_manager.add_command('manage_group', GroupManager())
shell_manager.add_command('add_index', AddSearchIndex())
shell_manager.add_command('db', MigrateCommand)
shell_manager.add_command('drop_db', DropDataBaseTables())
shell_manager.add_command('purge', PurgeTimeline())
shell_manager.add_command('search_template', SearchTemplateManager())
shell_manager.add_command('import', ImportTimeline())
shell_manager.add_command('runserver',
Server(host='127.0.0.1', port=5000))
shell_manager.add_option(
'-c',
'--config',
dest='config',
default='/etc/timesketch.conf',
required=False)
shell_manager.run()
if __name__ == '__main__':
main()
| [
"sys.stdout.write",
"yaml.safe_dump",
"flask_script.Manager",
"os.path.isfile",
"timesketch.models.user.User.get_or_create",
"sys.stdout.flush",
"yaml.safe_load",
"timesketch.lib.tasks.build_index_pipeline",
"timesketch.models.sketch.SearchTemplate.query.filter_by",
"flask_script.prompt_bool",
"... | [((16389, 16408), 'flask_script.Manager', 'Manager', (['create_app'], {}), '(create_app)\n', (16396, 16408), False, 'from flask_script import Manager\n'), ((1872, 1939), 'flask_script.prompt_bool', 'prompt_bool', (['u"""Do you really want to drop all the database tables?"""'], {}), "(u'Do you really want to drop all the database tables?')\n", (1883, 1939), False, 'from flask_script import prompt_bool\n'), ((2169, 2230), 'flask_script.Option', 'Option', (['u"""--username"""', 'u"""-u"""'], {'dest': 'u"""username"""', 'required': '(True)'}), "(u'--username', u'-u', dest=u'username', required=True)\n", (2175, 2230), False, 'from flask_script import Option\n'), ((2240, 2302), 'flask_script.Option', 'Option', (['u"""--password"""', 'u"""-p"""'], {'dest': 'u"""password"""', 'required': '(False)'}), "(u'--password', u'-p', dest=u'password', required=False)\n", (2246, 2302), False, 'from flask_script import Option\n'), ((2494, 2524), 'flask_script.prompt_pass', 'prompt_pass', (['u"""Enter password"""'], {}), "(u'Enter password')\n", (2505, 2524), False, 'from flask_script import prompt_pass\n'), ((2551, 2587), 'flask_script.prompt_pass', 'prompt_pass', (['u"""Enter password again"""'], {}), "(u'Enter password again')\n", (2562, 2587), False, 'from flask_script import prompt_pass\n'), ((3125, 3162), 'timesketch.models.user.User.get_or_create', 'User.get_or_create', ([], {'username': 'username'}), '(username=username)\n', (3143, 3162), False, 'from timesketch.models.user import User\n'), ((3217, 3237), 'timesketch.models.db_session.add', 'db_session.add', (['user'], {}), '(user)\n', (3231, 3237), False, 'from timesketch.models import db_session\n'), ((3246, 3265), 'timesketch.models.db_session.commit', 'db_session.commit', ([], {}), '()\n', (3263, 3265), False, 'from timesketch.models import db_session\n'), ((3428, 3481), 'flask_script.Option', 'Option', (['u"""--name"""', 'u"""-n"""'], {'dest': 'u"""name"""', 'required': '(True)'}), "(u'--name', u'-n', dest=u'name', required=True)\n", (3434, 3481), False, 'from flask_script import Option\n'), ((3735, 3765), 'timesketch.models.user.Group.get_or_create', 'Group.get_or_create', ([], {'name': 'name'}), '(name=name)\n', (3754, 3765), False, 'from timesketch.models.user import Group\n'), ((3774, 3795), 'timesketch.models.db_session.add', 'db_session.add', (['group'], {}), '(group)\n', (3788, 3795), False, 'from timesketch.models import db_session\n'), ((3804, 3823), 'timesketch.models.db_session.commit', 'db_session.commit', ([], {}), '()\n', (3821, 3823), False, 'from timesketch.models import db_session\n'), ((3983, 4083), 'flask_script.Option', 'Option', (['u"""--remove"""', 'u"""-r"""'], {'dest': 'u"""remove"""', 'action': 'u"""store_true"""', 'required': '(False)', 'default': '(False)'}), "(u'--remove', u'-r', dest=u'remove', action=u'store_true', required=\n False, default=False)\n", (3989, 4083), False, 'from flask_script import Option\n'), ((4161, 4221), 'flask_script.Option', 'Option', (['u"""--group"""', 'u"""-g"""'], {'dest': 'u"""group_name"""', 'required': '(True)'}), "(u'--group', u'-g', dest=u'group_name', required=True)\n", (4167, 4221), False, 'from flask_script import Option\n'), ((4231, 4289), 'flask_script.Option', 'Option', (['u"""--user"""', 'u"""-u"""'], {'dest': 'u"""user_name"""', 'required': '(True)'}), "(u'--user', u'-u', dest=u'user_name', required=True)\n", (4237, 4289), False, 'from flask_script import Option\n'), ((5724, 5777), 'flask_script.Option', 'Option', (['u"""--name"""', 'u"""-n"""'], {'dest': 'u"""name"""', 'required': '(True)'}), "(u'--name', u'-n', dest=u'name', required=True)\n", (5730, 5777), False, 'from flask_script import Option\n'), ((5787, 5842), 'flask_script.Option', 'Option', (['u"""--index"""', 'u"""-i"""'], {'dest': 'u"""index"""', 'required': '(True)'}), "(u'--index', u'-i', dest=u'index', required=True)\n", (5793, 5842), False, 'from flask_script import Option\n'), ((5852, 5909), 'flask_script.Option', 'Option', (['u"""--user"""', 'u"""-u"""'], {'dest': 'u"""username"""', 'required': '(True)'}), "(u'--user', u'-u', dest=u'username', required=True)\n", (5858, 5909), False, 'from flask_script import Option\n'), ((6133, 6244), 'timesketch.lib.datastores.elastic.ElasticsearchDataStore', 'ElasticsearchDataStore', ([], {'host': "current_app.config[u'ELASTIC_HOST']", 'port': "current_app.config[u'ELASTIC_PORT']"}), "(host=current_app.config[u'ELASTIC_HOST'], port=\n current_app.config[u'ELASTIC_PORT'])\n", (6155, 6244), False, 'from timesketch.lib.datastores.elastic import ElasticsearchDataStore\n'), ((6803, 6872), 'timesketch.models.sketch.SearchIndex', 'SearchIndex', ([], {'name': 'name', 'description': 'name', 'user': 'user', 'index_name': 'index'}), '(name=name, description=name, user=user, index_name=index)\n', (6814, 6872), False, 'from timesketch.models.sketch import SearchIndex\n'), ((6940, 6967), 'timesketch.models.db_session.add', 'db_session.add', (['searchindex'], {}), '(searchindex)\n', (6954, 6967), False, 'from timesketch.models import db_session\n'), ((6976, 6995), 'timesketch.models.db_session.commit', 'db_session.commit', ([], {}), '()\n', (6993, 6995), False, 'from timesketch.models import db_session\n'), ((7191, 7251), 'flask_script.Option', 'Option', (['u"""--index"""', 'u"""-i"""'], {'dest': 'u"""index_name"""', 'required': '(True)'}), "(u'--index', u'-i', dest=u'index_name', required=True)\n", (7197, 7251), False, 'from flask_script import Option\n'), ((7852, 7963), 'timesketch.lib.datastores.elastic.ElasticsearchDataStore', 'ElasticsearchDataStore', ([], {'host': "current_app.config[u'ELASTIC_HOST']", 'port': "current_app.config[u'ELASTIC_PORT']"}), "(host=current_app.config[u'ELASTIC_HOST'], port=\n current_app.config[u'ELASTIC_PORT'])\n", (7874, 7963), False, 'from timesketch.lib.datastores.elastic import ElasticsearchDataStore\n'), ((8457, 8519), 'flask_script.prompt_bool', 'prompt_bool', (['u"""Are you sure you want to delete this timeline?"""'], {}), "(u'Are you sure you want to delete this timeline?')\n", (8468, 8519), False, 'from flask_script import prompt_bool\n'), ((8897, 8964), 'flask_script.Option', 'Option', (['u"""--import"""', 'u"""-i"""'], {'dest': 'u"""import_location"""', 'required': '(False)'}), "(u'--import', u'-i', dest=u'import_location', required=False)\n", (8903, 8964), False, 'from flask_script import Option\n'), ((8974, 9041), 'flask_script.Option', 'Option', (['u"""--export"""', 'u"""-e"""'], {'dest': 'u"""export_location"""', 'required': '(False)'}), "(u'--export', u'-e', dest=u'export_location', required=False)\n", (8980, 9041), False, 'from flask_script import Option\n'), ((11872, 11927), 'flask_script.Option', 'Option', (['"""--file"""', '"""-f"""'], {'dest': '"""file_path"""', 'required': '(True)'}), "('--file', '-f', dest='file_path', required=True)\n", (11878, 11927), False, 'from flask_script import Option\n'), ((11937, 11998), 'flask_script.Option', 'Option', (['"""--sketch_id"""', '"""-s"""'], {'dest': '"""sketch_id"""', 'required': '(False)'}), "('--sketch_id', '-s', dest='sketch_id', required=False)\n", (11943, 11998), False, 'from flask_script import Option\n'), ((12008, 12067), 'flask_script.Option', 'Option', (['"""--username"""', '"""-u"""'], {'dest': '"""username"""', 'required': '(False)'}), "('--username', '-u', dest='username', required=False)\n", (12014, 12067), False, 'from flask_script import Option\n'), ((12077, 12146), 'flask_script.Option', 'Option', (['"""--timeline_name"""', '"""-n"""'], {'dest': '"""timeline_name"""', 'required': '(False)'}), "('--timeline_name', '-n', dest='timeline_name', required=False)\n", (12083, 12146), False, 'from flask_script import Option\n'), ((12421, 12448), 'os.path.realpath', 'os.path.realpath', (['file_path'], {}), '(file_path)\n', (12437, 12448), False, 'import os\n'), ((12493, 12520), 'os.path.splitext', 'os.path.splitext', (['file_path'], {}), '(file_path)\n', (12509, 12520), False, 'import os\n'), ((12582, 12622), 'os.path.basename', 'os.path.basename', (['file_path_no_extension'], {}), '(file_path_no_extension)\n', (12598, 12622), False, 'import os\n'), ((14943, 15053), 'timesketch.models.sketch.SearchIndex.get_or_create', 'SearchIndex.get_or_create', ([], {'name': 'timeline_name', 'description': 'timeline_name', 'user': 'user', 'index_name': 'index_name'}), '(name=timeline_name, description=timeline_name,\n user=user, index_name=index_name)\n', (14968, 15053), False, 'from timesketch.models.sketch import SearchIndex\n'), ((15358, 15385), 'timesketch.models.db_session.add', 'db_session.add', (['searchindex'], {}), '(searchindex)\n', (15372, 15385), False, 'from timesketch.models import db_session\n'), ((15394, 15413), 'timesketch.models.db_session.commit', 'db_session.commit', ([], {}), '()\n', (15411, 15413), False, 'from timesketch.models import db_session\n'), ((16029, 16119), 'timesketch.lib.tasks.build_index_pipeline', 'tasks.build_index_pipeline', (['file_path', 'timeline_name', 'index_name', 'extension', 'sketch.id'], {}), '(file_path, timeline_name, index_name, extension,\n sketch.id)\n', (16055, 16119), False, 'from timesketch.lib import tasks\n'), ((17016, 17051), 'flask_script.Server', 'Server', ([], {'host': '"""127.0.0.1"""', 'port': '(5000)'}), "(host='127.0.0.1', port=5000)\n", (17022, 17051), False, 'from flask_script import Server\n'), ((1986, 2051), 'sys.stdout.write', 'sys.stdout.write', (['u"""All tables dropped. Database is now empty.\n"""'], {}), "(u'All tables dropped. Database is now empty.\\n')\n", (2002, 2051), False, 'import sys\n'), ((2064, 2074), 'timesketch.models.drop_all', 'drop_all', ([], {}), '()\n', (2072, 2074), False, 'from timesketch.models import drop_all\n'), ((2646, 2702), 'sys.stderr.write', 'sys.stderr.write', (['u"""Passwords don\'t match, try again.\n"""'], {}), '(u"Passwords don\'t match, try again.\\n")\n', (2662, 2702), False, 'import sys\n'), ((6361, 6403), 'sys.stderr.write', 'sys.stderr.write', (['u"""User does not exist\n"""'], {}), "(u'User does not exist\\n')\n", (6377, 6403), False, 'import sys\n'), ((6416, 6427), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6424, 6427), False, 'import sys\n'), ((6494, 6554), 'sys.stderr.write', 'sys.stderr.write', (['u"""Index does not exist in the datastore\n"""'], {}), "(u'Index does not exist in the datastore\\n')\n", (6510, 6554), False, 'import sys\n'), ((6567, 6578), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6575, 6578), False, 'import sys\n'), ((6668, 6739), 'sys.stderr.write', 'sys.stderr.write', (['u"""Index with this name already exist in Timesketch\n"""'], {}), "(u'Index with this name already exist in Timesketch\\n')\n", (6684, 6739), False, 'import sys\n'), ((6769, 6780), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6777, 6780), False, 'import sys\n'), ((7778, 7814), 'sys.stdout.write', 'sys.stdout.write', (['u"""No such index\n"""'], {}), "(u'No such index\\n')\n", (7794, 7814), False, 'import sys\n'), ((7827, 7837), 'sys.exit', 'sys.exit', ([], {}), '()\n', (7835, 7837), False, 'import sys\n'), ((8234, 8293), 'sys.stdout.write', 'sys.stdout.write', (['u"""WARNING: This timeline is in use by:\n"""'], {}), "(u'WARNING: This timeline is in use by:\\n')\n", (8250, 8293), False, 'import sys\n'), ((8654, 8684), 'timesketch.models.db_session.delete', 'db_session.delete', (['searchindex'], {}), '(searchindex)\n', (8671, 8684), False, 'from timesketch.models import db_session\n'), ((8697, 8716), 'timesketch.models.db_session.commit', 'db_session.commit', ([], {}), '()\n', (8714, 8716), False, 'from timesketch.models import db_session\n'), ((9482, 9508), 'timesketch.models.sketch.SearchTemplate.query.all', 'SearchTemplate.query.all', ([], {}), '()\n', (9506, 9508), False, 'from timesketch.models.sketch import SearchTemplate\n'), ((12697, 12722), 'os.path.isfile', 'os.path.isfile', (['file_path'], {}), '(file_path)\n', (12711, 12722), False, 'import os\n'), ((14315, 14375), 'timesketch.models.sketch.Sketch', 'Sketch', ([], {'name': 'sketch_name', 'description': 'sketch_name', 'user': 'user'}), '(name=sketch_name, description=sketch_name, user=user)\n', (14321, 14375), False, 'from timesketch.models.sketch import Sketch\n'), ((14476, 14498), 'timesketch.models.db_session.add', 'db_session.add', (['sketch'], {}), '(sketch)\n', (14490, 14498), False, 'from timesketch.models import db_session\n'), ((14511, 14530), 'timesketch.models.db_session.commit', 'db_session.commit', ([], {}), '()\n', (14528, 14530), False, 'from timesketch.models import db_session\n'), ((14818, 14840), 'timesketch.models.db_session.add', 'db_session.add', (['sketch'], {}), '(sketch)\n', (14832, 14840), False, 'from timesketch.models import db_session\n'), ((14853, 14872), 'timesketch.models.db_session.commit', 'db_session.commit', ([], {}), '()\n', (14870, 14872), False, 'from timesketch.models import db_session\n'), ((15498, 15622), 'timesketch.models.sketch.Timeline', 'Timeline', ([], {'name': 'searchindex.name', 'description': 'searchindex.description', 'sketch': 'sketch', 'user': 'user', 'searchindex': 'searchindex'}), '(name=searchindex.name, description=searchindex.description, sketch\n =sketch, user=user, searchindex=searchindex)\n', (15506, 15622), False, 'from timesketch.models.sketch import Timeline\n'), ((15803, 15827), 'timesketch.models.db_session.add', 'db_session.add', (['timeline'], {}), '(timeline)\n', (15817, 15827), False, 'from timesketch.models import db_session\n'), ((15840, 15859), 'timesketch.models.db_session.commit', 'db_session.commit', ([], {}), '()\n', (15857, 15859), False, 'from timesketch.models import db_session\n'), ((4657, 4695), 'timesketch.models.user.Group.query.filter_by', 'Group.query.filter_by', ([], {'name': 'group_name'}), '(name=group_name)\n', (4678, 4695), False, 'from timesketch.models.user import Group\n'), ((4719, 4759), 'timesketch.models.user.User.query.filter_by', 'User.query.filter_by', ([], {'username': 'user_name'}), '(username=user_name)\n', (4739, 4759), False, 'from timesketch.models.user import User\n'), ((5024, 5043), 'timesketch.models.db_session.commit', 'db_session.commit', ([], {}), '()\n', (5041, 5043), False, 'from timesketch.models import db_session\n'), ((5300, 5319), 'timesketch.models.db_session.commit', 'db_session.commit', ([], {}), '()\n', (5317, 5319), False, 'from timesketch.models import db_session\n'), ((6280, 6319), 'timesketch.models.user.User.query.filter_by', 'User.query.filter_by', ([], {'username': 'username'}), '(username=username)\n', (6300, 6319), False, 'from timesketch.models.user import User\n'), ((6590, 6646), 'timesketch.models.sketch.SearchIndex.query.filter_by', 'SearchIndex.query.filter_by', ([], {'name': 'name', 'index_name': 'index'}), '(name=name, index_name=index)\n', (6617, 6646), False, 'from timesketch.models.sketch import SearchIndex\n'), ((7665, 7715), 'timesketch.models.sketch.SearchIndex.query.filter_by', 'SearchIndex.query.filter_by', ([], {'index_name': 'index_name'}), '(index_name=index_name)\n', (7692, 7715), False, 'from timesketch.models.sketch import SearchIndex\n'), ((8005, 8054), 'timesketch.models.sketch.Timeline.query.filter_by', 'Timeline.query.filter_by', ([], {'searchindex': 'searchindex'}), '(searchindex=searchindex)\n', (8029, 8054), False, 'from timesketch.models.sketch import Timeline\n'), ((8414, 8432), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (8430, 8432), False, 'import sys\n'), ((8614, 8641), 'timesketch.models.db_session.delete', 'db_session.delete', (['timeline'], {}), '(timeline)\n', (8631, 8641), False, 'from timesketch.models import db_session\n'), ((10119, 10162), 'yaml.safe_dump', 'yaml.safe_dump', (['search_templates'], {'stream': 'fh'}), '(search_templates, stream=fh)\n', (10133, 10162), False, 'import yaml\n'), ((11514, 11585), 'timesketch.models.sketch.SearchTemplate.Label.get_or_create', 'SearchTemplate.Label.get_or_create', ([], {'label': 'u"""remote_template"""', 'user': 'None'}), "(label=u'remote_template', user=None)\n", (11548, 11585), False, 'from timesketch.models.sketch import SearchTemplate\n'), ((11685, 11718), 'timesketch.models.db_session.add', 'db_session.add', (['imported_template'], {}), '(imported_template)\n', (11699, 11718), False, 'from timesketch.models import db_session\n'), ((11735, 11754), 'timesketch.models.db_session.commit', 'db_session.commit', ([], {}), '()\n', (11752, 11754), False, 'from timesketch.models import db_session\n'), ((13751, 13798), 'timesketch.models.sketch.Sketch.query.get_with_acl', 'Sketch.query.get_with_acl', (['sketch_id'], {'user': 'user'}), '(sketch_id, user=user)\n', (13776, 13798), False, 'from timesketch.models.sketch import Sketch\n'), ((14903, 14915), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (14913, 14915), False, 'import uuid\n'), ((10304, 10322), 'yaml.safe_load', 'yaml.safe_load', (['fh'], {}), '(fh)\n', (10318, 10322), False, 'import yaml\n'), ((10448, 10459), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (10456, 10459), False, 'import sys\n'), ((11269, 11332), 'timesketch.models.sketch.SearchTemplate.Label.get_or_create', 'SearchTemplate.Label.get_or_create', ([], {'label': 'label_name', 'user': 'None'}), '(label=label_name, user=None)\n', (11303, 11332), False, 'from timesketch.models.sketch import SearchTemplate\n'), ((10762, 10803), 'timesketch.models.sketch.SearchTemplate.query.filter_by', 'SearchTemplate.query.filter_by', ([], {'name': 'name'}), '(name=name)\n', (10792, 10803), False, 'from timesketch.models.sketch import SearchTemplate\n'), ((10951, 10961), 'timesketch.models.user.User', 'User', (['None'], {}), '(None)\n', (10955, 10961), False, 'from timesketch.models.user import User\n'), ((13119, 13137), 'os.stat', 'os.stat', (['file_path'], {}), '(file_path)\n', (13126, 13137), False, 'import os\n')] |
from __future__ import absolute_import
from __future__ import print_function
from collections import defaultdict
from copy import deepcopy
import datetime
import json
import os
import os.path
import re
import sys
from xml.sax.saxutils import escape
import glob
import yaml
from .build_cpe import CPEDoesNotExist, parse_platform_definition
from .constants import XCCDF_REFINABLE_PROPERTIES, SCE_SYSTEM, ocil_cs, ocil_namespace, xhtml_namespace, xsi_namespace, timestamp
from .rules import get_rule_dir_id, get_rule_dir_yaml, is_rule_dir
from .rule_yaml import parse_prodtype
from .cce import is_cce_format_valid, is_cce_value_valid
from .yaml import DocumentationNotComplete, open_and_expand, open_and_macro_expand
from .utils import required_key, mkdir_p
from .xml import ElementTree as ET
from .shims import unicode_func
def dump_yaml_preferably_in_original_order(dictionary, file_object):
try:
return yaml.dump(dictionary, file_object, indent=4, sort_keys=False)
except TypeError as exc:
# Older versions of libyaml don't understand the sort_keys kwarg
if "sort_keys" not in str(exc):
raise exc
return yaml.dump(dictionary, file_object, indent=4)
def add_sub_element(parent, tag, data):
"""
Creates a new child element under parent with tag tag, and sets
data as the content under the tag. In particular, data is a string
to be parsed as an XML tree, allowing sub-elements of children to be
added.
If data should not be parsed as an XML tree, either escape the contents
before passing into this function, or use ElementTree.SubElement().
Returns the newly created subelement of type tag.
"""
# This is used because our YAML data contain XML and XHTML elements
# ET.SubElement() escapes the < > characters by < and >
# and therefore it does not add child elements
# we need to do a hack instead
# TODO: Remove this function after we move to Markdown everywhere in SSG
ustr = unicode_func("<{0}>{1}</{0}>").format(tag, data)
try:
element = ET.fromstring(ustr.encode("utf-8"))
except Exception:
msg = ("Error adding subelement to an element '{0}' from string: '{1}'"
.format(parent.tag, ustr))
raise RuntimeError(msg)
parent.append(element)
return element
def reorder_according_to_ordering(unordered, ordering, regex=None):
ordered = []
if regex is None:
regex = "|".join(["({0})".format(item) for item in ordering])
regex = re.compile(regex)
items_to_order = list(filter(regex.match, unordered))
unordered = set(unordered)
for priority_type in ordering:
for item in items_to_order:
if priority_type in item and item in unordered:
ordered.append(item)
unordered.remove(item)
ordered.extend(sorted(unordered))
return ordered
def add_warning_elements(element, warnings):
# The use of [{dict}, {dict}] in warnings is to handle the following
# scenario where multiple warnings have the same category which is
# valid in SCAP and our content:
#
# warnings:
# - general: Some general warning
# - general: Some other general warning
# - general: |-
# Some really long multiline general warning
#
# Each of the {dict} should have only one key/value pair.
for warning_dict in warnings:
warning = add_sub_element(element, "warning", list(warning_dict.values())[0])
warning.set("category", list(warning_dict.keys())[0])
def add_nondata_subelements(element, subelement, attribute, attr_data):
"""Add multiple iterations of a sublement that contains an attribute but no data
For example, <requires id="my_required_id"/>"""
for data in attr_data:
req = ET.SubElement(element, subelement)
req.set(attribute, data)
def check_warnings(xccdf_structure):
for warning_list in xccdf_structure.warnings:
if len(warning_list) != 1:
msg = "Only one key/value pair should exist for each warnings dictionary"
raise ValueError(msg)
class SelectionHandler(object):
def __init__(self):
self.refine_rules = defaultdict(list)
self.variables = dict()
self.unselected = []
self.selected = []
@property
def selections(self):
selections = []
for item in self.selected:
selections.append(str(item))
for item in self.unselected:
selections.append("!"+str(item))
for varname in self.variables.keys():
selections.append(varname+"="+self.variables.get(varname))
for rule, refinements in self.refine_rules.items():
for prop, val in refinements:
selections.append("{rule}.{property}={value}"
.format(rule=rule, property=prop, value=val))
return selections
@selections.setter
def selections(self, entries):
for item in entries:
self.apply_selection(item)
def apply_selection(self, item):
if "." in item:
rule, refinement = item.split(".", 1)
property_, value = refinement.split("=", 1)
if property_ not in XCCDF_REFINABLE_PROPERTIES:
msg = ("Property '{property_}' cannot be refined. "
"Rule properties that can be refined are {refinables}. "
"Fix refinement '{rule_id}.{property_}={value}' in profile '{profile}'."
.format(property_=property_, refinables=XCCDF_REFINABLE_PROPERTIES,
rule_id=rule, value=value, profile=self.id_)
)
raise ValueError(msg)
self.refine_rules[rule].append((property_, value))
elif "=" in item:
varname, value = item.split("=", 1)
self.variables[varname] = value
elif item.startswith("!"):
self.unselected.append(item[1:])
else:
self.selected.append(item)
def _subtract_refinements(self, extended_refinements):
"""
Given a dict of rule refinements from the extended profile,
"undo" every refinement prefixed with '!' in this profile.
"""
for rule, refinements in list(self.refine_rules.items()):
if rule.startswith("!"):
for prop, val in refinements:
extended_refinements[rule[1:]].remove((prop, val))
del self.refine_rules[rule]
return extended_refinements
def update_with(self, rhs):
extended_selects = set(rhs.selected)
extra_selections = extended_selects.difference(set(self.selected))
self.selected.extend(list(extra_selections))
updated_variables = dict(rhs.variables)
updated_variables.update(self.variables)
self.variables = updated_variables
extended_refinements = deepcopy(rhs.refine_rules)
updated_refinements = self._subtract_refinements(extended_refinements)
updated_refinements.update(self.refine_rules)
self.refine_rules = updated_refinements
class XCCDFEntity(object):
"""
This class can load itself from a YAML with Jinja macros,
and it can also save itself to YAML.
It is supposed to work with the content in the project,
when entities are defined in the benchmark tree,
and they are compiled into flat YAMLs to the build directory.
"""
KEYS = dict(
id_=lambda: "",
definition_location=lambda: "",
)
MANDATORY_KEYS = set()
GENERIC_FILENAME = ""
ID_LABEL = "id"
def __init__(self, id_):
super(XCCDFEntity, self).__init__()
self._assign_defaults()
self.id_ = id_
def _assign_defaults(self):
for key, default in self.KEYS.items():
default_val = default()
if isinstance(default_val, RuntimeError):
default_val = None
setattr(self, key, default_val)
@classmethod
def get_instance_from_full_dict(cls, data):
"""
Given a defining dictionary, produce an instance
by treating all dict elements as attributes.
Extend this if you want tight control over the instance creation process.
"""
entity = cls(data["id_"])
for key, value in data.items():
setattr(entity, key, value)
return entity
@classmethod
def process_input_dict(cls, input_contents, env_yaml):
"""
Take the contents of the definition as a dictionary, and
add defaults or raise errors if a required member is not present.
Extend this if you want to add, remove or alter the result
that will constitute the new instance.
"""
data = dict()
for key, default in cls.KEYS.items():
if key in input_contents:
data[key] = input_contents[key]
del input_contents[key]
continue
if key not in cls.MANDATORY_KEYS:
data[key] = cls.KEYS[key]()
else:
msg = (
"Key '{key}' is mandatory for definition of '{class_name}'."
.format(key=key, class_name=cls.__name__))
raise ValueError(msg)
return data
@classmethod
def parse_yaml_into_processed_dict(cls, yaml_file, env_yaml=None):
"""
Given yaml filename and environment info, produce a dictionary
that defines the instance to be created.
This wraps :meth:`process_input_dict` and it adds generic keys on the top:
- `id_` as the entity ID that is deduced either from thefilename,
or from the parent directory name.
- `definition_location` as the original location whenre the entity got defined.
"""
file_basename = os.path.basename(yaml_file)
entity_id = file_basename.split(".")[0]
if file_basename == cls.GENERIC_FILENAME:
entity_id = os.path.basename(os.path.dirname(yaml_file))
if env_yaml:
env_yaml[cls.ID_LABEL] = entity_id
yaml_data = open_and_macro_expand(yaml_file, env_yaml)
try:
processed_data = cls.process_input_dict(yaml_data, env_yaml)
except ValueError as exc:
msg = (
"Error processing {yaml_file}: {exc}"
.format(yaml_file=yaml_file, exc=str(exc)))
raise ValueError(msg)
if yaml_data:
msg = (
"Unparsed YAML data in '{yaml_file}': {keys}"
.format(yaml_file=yaml_file, keys=list(yaml_data.keys())))
raise RuntimeError(msg)
if not processed_data.get("definition_location", ""):
processed_data["definition_location"] = yaml_file
processed_data["id_"] = entity_id
return processed_data
@classmethod
def from_yaml(cls, yaml_file, env_yaml=None):
yaml_file = os.path.normpath(yaml_file)
local_env_yaml = None
if env_yaml:
local_env_yaml = dict()
local_env_yaml.update(env_yaml)
try:
data_dict = cls.parse_yaml_into_processed_dict(yaml_file, local_env_yaml)
except DocumentationNotComplete as exc:
raise
except Exception as exc:
msg = (
"Error loading a {class_name} from {filename}: {error}"
.format(class_name=cls.__name__, filename=yaml_file, error=str(exc)))
raise RuntimeError(msg)
result = cls.get_instance_from_full_dict(data_dict)
return result
def represent_as_dict(self):
"""
Produce a dict representation of the class.
Extend this method if you need the representation to be different from the object.
"""
data = dict()
for key in self.KEYS:
value = getattr(self, key)
if value or True:
data[key] = getattr(self, key)
del data["id_"]
return data
def dump_yaml(self, file_name, documentation_complete=True):
to_dump = self.represent_as_dict()
to_dump["documentation_complete"] = documentation_complete
with open(file_name, "w+") as f:
dump_yaml_preferably_in_original_order(to_dump, f)
def to_xml_element(self):
raise NotImplementedError()
class Profile(XCCDFEntity, SelectionHandler):
"""Represents XCCDF profile
"""
KEYS = dict(
title=lambda: "",
description=lambda: "",
extends=lambda: "",
metadata=lambda: None,
reference=lambda: None,
selections=lambda: list(),
platforms=lambda: set(),
cpe_names=lambda: set(),
platform=lambda: None,
filter_rules=lambda: "",
** XCCDFEntity.KEYS
)
MANDATORY_KEYS = {
"title",
"description",
"selections",
}
@classmethod
def process_input_dict(cls, input_contents, env_yaml):
input_contents = super(Profile, cls).process_input_dict(input_contents, env_yaml)
platform = input_contents.get("platform")
if platform is not None:
input_contents["platforms"].add(platform)
if env_yaml:
for platform in input_contents["platforms"]:
try:
new_cpe_name = env_yaml["product_cpes"].get_cpe_name(platform)
input_contents["cpe_names"].add(new_cpe_name)
except CPEDoesNotExist:
msg = (
"Unsupported platform '{platform}' in a profile."
.format(platform=platform))
raise CPEDoesNotExist(msg)
return input_contents
@property
def rule_filter(self):
if self.filter_rules:
return rule_filter_from_def(self.filter_rules)
else:
return noop_rule_filterfunc
def to_xml_element(self):
element = ET.Element('Profile')
element.set("id", self.id_)
if self.extends:
element.set("extends", self.extends)
title = add_sub_element(element, "title", self.title)
title.set("override", "true")
desc = add_sub_element(element, "description", self.description)
desc.set("override", "true")
if self.reference:
add_sub_element(element, "reference", escape(self.reference))
for cpe_name in self.cpe_names:
plat = ET.SubElement(element, "platform")
plat.set("idref", cpe_name)
for selection in self.selected:
select = ET.Element("select")
select.set("idref", selection)
select.set("selected", "true")
element.append(select)
for selection in self.unselected:
unselect = ET.Element("select")
unselect.set("idref", selection)
unselect.set("selected", "false")
element.append(unselect)
for value_id, selector in self.variables.items():
refine_value = ET.Element("refine-value")
refine_value.set("idref", value_id)
refine_value.set("selector", selector)
element.append(refine_value)
for refined_rule, refinement_list in self.refine_rules.items():
refine_rule = ET.Element("refine-rule")
refine_rule.set("idref", refined_rule)
for refinement in refinement_list:
refine_rule.set(refinement[0], refinement[1])
element.append(refine_rule)
return element
def get_rule_selectors(self):
return self.selected + self.unselected
def get_variable_selectors(self):
return self.variables
def validate_refine_rules(self, rules):
existing_rule_ids = [r.id_ for r in rules]
for refine_rule, refinement_list in self.refine_rules.items():
# Take first refinement to ilustrate where the error is
# all refinements in list are invalid, so it doesn't really matter
a_refinement = refinement_list[0]
if refine_rule not in existing_rule_ids:
msg = (
"You are trying to refine a rule that doesn't exist. "
"Rule '{rule_id}' was not found in the benchmark. "
"Please check all rule refinements for rule: '{rule_id}', for example: "
"- {rule_id}.{property_}={value}' in profile {profile_id}."
.format(rule_id=refine_rule, profile_id=self.id_,
property_=a_refinement[0], value=a_refinement[1])
)
raise ValueError(msg)
if refine_rule not in self.get_rule_selectors():
msg = ("- {rule_id}.{property_}={value}' in profile '{profile_id}' is refining "
"a rule that is not selected by it. The refinement will not have any "
"noticeable effect. Either select the rule or remove the rule refinement."
.format(rule_id=refine_rule, property_=a_refinement[0],
value=a_refinement[1], profile_id=self.id_)
)
raise ValueError(msg)
def validate_variables(self, variables):
variables_by_id = dict()
for var in variables:
variables_by_id[var.id_] = var
for var_id, our_val in self.variables.items():
if var_id not in variables_by_id:
all_vars_list = [" - %s" % v for v in variables_by_id.keys()]
msg = (
"Value '{var_id}' in profile '{profile_name}' is not known. "
"We know only variables:\n{var_names}"
.format(
var_id=var_id, profile_name=self.id_,
var_names="\n".join(sorted(all_vars_list)))
)
raise ValueError(msg)
allowed_selectors = [str(s) for s in variables_by_id[var_id].options.keys()]
if our_val not in allowed_selectors:
msg = (
"Value '{var_id}' in profile '{profile_name}' "
"uses the selector '{our_val}'. "
"This is not possible, as only selectors {all_selectors} are available. "
"Either change the selector used in the profile, or "
"add the selector-value pair to the variable definition."
.format(
var_id=var_id, profile_name=self.id_, our_val=our_val,
all_selectors=allowed_selectors,
)
)
raise ValueError(msg)
def validate_rules(self, rules, groups):
existing_rule_ids = [r.id_ for r in rules]
rule_selectors = self.get_rule_selectors()
for id_ in rule_selectors:
if id_ in groups:
msg = (
"You have selected a group '{group_id}' instead of a "
"rule. Groups have no effect in the profile and are not "
"allowed to be selected. Please remove '{group_id}' "
"from profile '{profile_id}' before proceeding."
.format(group_id=id_, profile_id=self.id_)
)
raise ValueError(msg)
if id_ not in existing_rule_ids:
msg = (
"Rule '{rule_id}' was not found in the benchmark. Please "
"remove rule '{rule_id}' from profile '{profile_id}' "
"before proceeding."
.format(rule_id=id_, profile_id=self.id_)
)
raise ValueError(msg)
def __sub__(self, other):
profile = Profile(self.id_)
profile.title = self.title
profile.description = self.description
profile.extends = self.extends
profile.platforms = self.platforms
profile.platform = self.platform
profile.selected = list(set(self.selected) - set(other.selected))
profile.selected.sort()
profile.unselected = list(set(self.unselected) - set(other.unselected))
profile.variables = dict ((k, v) for (k, v) in self.variables.items()
if k not in other.variables or v != other.variables[k])
return profile
class ResolvableProfile(Profile):
def __init__(self, * args, ** kwargs):
super(ResolvableProfile, self).__init__(* args, ** kwargs)
self.resolved = False
def _controls_ids_to_controls(self, controls_manager, policy_id, control_id_list):
items = [controls_manager.get_control(policy_id, cid) for cid in control_id_list]
return items
def resolve_controls(self, controls_manager):
pass
def extend_by(self, extended_profile):
self.update_with(extended_profile)
def resolve_selections_with_rules(self, rules_by_id):
selections = set()
for rid in self.selected:
if rid not in rules_by_id:
continue
rule = rules_by_id[rid]
if not self.rule_filter(rule):
continue
selections.add(rid)
self.selected = list(selections)
def resolve(self, all_profiles, rules_by_id, controls_manager=None):
if self.resolved:
return
if controls_manager:
self.resolve_controls(controls_manager)
self.resolve_selections_with_rules(rules_by_id)
if self.extends:
if self.extends not in all_profiles:
msg = (
"Profile {name} extends profile {extended}, but "
"only profiles {known_profiles} are available for resolution."
.format(name=self.id_, extended=self.extends,
known_profiles=list(all_profiles.keys())))
raise RuntimeError(msg)
extended_profile = all_profiles[self.extends]
extended_profile.resolve(all_profiles, rules_by_id, controls_manager)
self.extend_by(extended_profile)
self.selected = [s for s in set(self.selected) if s not in self.unselected]
self.unselected = []
self.extends = None
self.selected = sorted(self.selected)
for rid in self.selected:
if rid not in rules_by_id:
msg = (
"Rule {rid} is selected by {profile}, but the rule is not available. "
"This may be caused by a discrepancy of prodtypes."
.format(rid=rid, profile=self.id_))
raise ValueError(msg)
self.resolved = True
class ProfileWithInlinePolicies(ResolvableProfile):
def __init__(self, * args, ** kwargs):
super(ProfileWithInlinePolicies, self).__init__(* args, ** kwargs)
self.controls_by_policy = defaultdict(list)
def apply_selection(self, item):
# ":" is the delimiter for controls but not when the item is a variable
if ":" in item and "=" not in item:
policy_id, control_id = item.split(":", 1)
self.controls_by_policy[policy_id].append(control_id)
else:
super(ProfileWithInlinePolicies, self).apply_selection(item)
def _process_controls_ids_into_controls(self, controls_manager, policy_id, controls_ids):
controls = []
for cid in controls_ids:
if not cid.startswith("all"):
controls.extend(
self._controls_ids_to_controls(controls_manager, policy_id, [cid]))
elif ":" in cid:
_, level_id = cid.split(":", 1)
controls.extend(
controls_manager.get_all_controls_of_level(policy_id, level_id))
else:
controls.extend(
controls_manager.get_all_controls(policy_id))
return controls
def resolve_controls(self, controls_manager):
for policy_id, controls_ids in self.controls_by_policy.items():
controls = self._process_controls_ids_into_controls(
controls_manager, policy_id, controls_ids)
for c in controls:
self.update_with(c)
class Value(XCCDFEntity):
"""Represents XCCDF Value
"""
KEYS = dict(
title=lambda: "",
description=lambda: "",
type=lambda: "",
operator=lambda: "equals",
interactive=lambda: False,
options=lambda: dict(),
warnings=lambda: list(),
** XCCDFEntity.KEYS
)
MANDATORY_KEYS = {
"title",
"description",
"type",
}
@classmethod
def process_input_dict(cls, input_contents, env_yaml):
input_contents["interactive"] = (
input_contents.get("interactive", "false").lower() == "true")
data = super(Value, cls).process_input_dict(input_contents, env_yaml)
possible_operators = ["equals", "not equal", "greater than",
"less than", "greater than or equal",
"less than or equal", "pattern match"]
if data["operator"] not in possible_operators:
raise ValueError(
"Found an invalid operator value '%s'. "
"Expected one of: %s"
% (data["operator"], ", ".join(possible_operators))
)
return data
@classmethod
def from_yaml(cls, yaml_file, env_yaml=None):
value = super(Value, cls).from_yaml(yaml_file, env_yaml)
check_warnings(value)
return value
def to_xml_element(self):
value = ET.Element('Value')
value.set('id', self.id_)
value.set('type', self.type)
if self.operator != "equals": # equals is the default
value.set('operator', self.operator)
if self.interactive: # False is the default
value.set('interactive', 'true')
title = ET.SubElement(value, 'title')
title.text = self.title
add_sub_element(value, 'description', self.description)
add_warning_elements(value, self.warnings)
for selector, option in self.options.items():
# do not confuse Value with big V with value with small v
# value is child element of Value
value_small = ET.SubElement(value, 'value')
# by XCCDF spec, default value is value without selector
if selector != "default":
value_small.set('selector', str(selector))
value_small.text = str(option)
return value
def to_file(self, file_name):
root = self.to_xml_element()
tree = ET.ElementTree(root)
tree.write(file_name)
class Benchmark(XCCDFEntity):
"""Represents XCCDF Benchmark
"""
KEYS = dict(
title=lambda: "",
status=lambda: "",
description=lambda: "",
notice_id=lambda: "",
notice_description=lambda: "",
front_matter=lambda: "",
rear_matter=lambda: "",
cpes=lambda: list(),
version=lambda: "0",
profiles=lambda: list(),
values=lambda: dict(),
groups=lambda: dict(),
rules=lambda: dict(),
product_cpe_names=lambda: list(),
** XCCDFEntity.KEYS
)
MANDATORY_KEYS = {
"title",
"status",
"description",
"front_matter",
"rear_matter",
"version",
}
GENERIC_FILENAME = "benchmark.yml"
def load_entities(self, rules_by_id, values_by_id, groups_by_id):
for rid, val in self.rules.items():
if not val:
self.rules[rid] = rules_by_id[rid]
for vid, val in self.values.items():
if not val:
self.values[vid] = values_by_id[vid]
for gid, val in self.groups.items():
if not val:
self.groups[gid] = groups_by_id[gid]
@classmethod
def process_input_dict(cls, input_contents, env_yaml):
input_contents["front_matter"] = input_contents["front-matter"]
del input_contents["front-matter"]
input_contents["rear_matter"] = input_contents["rear-matter"]
del input_contents["rear-matter"]
data = super(Benchmark, cls).process_input_dict(input_contents, env_yaml)
notice_contents = required_key(input_contents, "notice")
del input_contents["notice"]
data["notice_id"] = required_key(notice_contents, "id")
del notice_contents["id"]
data["notice_description"] = required_key(notice_contents, "description")
del notice_contents["description"]
data["version"] = str(data["version"])
return data
def represent_as_dict(self):
data = super(Benchmark, cls).represent_as_dict()
data["rear-matter"] = data["rear_matter"]
del data["rear_matter"]
data["front-matter"] = data["front_matter"]
del data["front_matter"]
return data
@classmethod
def from_yaml(cls, yaml_file, env_yaml=None, benchmark_id="product-name"):
benchmark = super(Benchmark, cls).from_yaml(yaml_file, env_yaml)
if env_yaml:
benchmark.product_cpe_names = env_yaml["product_cpes"].get_product_cpe_names()
benchmark.cpe_platform_spec = env_yaml["product_cpes"].cpe_platform_specification
benchmark.id_ = benchmark_id
return benchmark
def add_profiles_from_dir(self, dir_, env_yaml):
for dir_item in sorted(os.listdir(dir_)):
dir_item_path = os.path.join(dir_, dir_item)
if not os.path.isfile(dir_item_path):
continue
_, ext = os.path.splitext(os.path.basename(dir_item_path))
if ext != '.profile':
sys.stderr.write(
"Encountered file '%s' while looking for profiles, "
"extension '%s' is unknown. Skipping..\n"
% (dir_item, ext)
)
continue
try:
new_profile = ProfileWithInlinePolicies.from_yaml(dir_item_path, env_yaml)
except DocumentationNotComplete:
continue
except Exception as exc:
msg = ("Error building profile from '{fname}': '{error}'"
.format(fname=dir_item_path, error=str(exc)))
raise RuntimeError(msg)
if new_profile is None:
continue
self.profiles.append(new_profile)
def to_xml_element(self):
root = ET.Element('Benchmark')
root.set('xmlns:xsi', 'http://www.w3.org/2001/XMLSchema-instance')
root.set('xmlns:xhtml', 'http://www.w3.org/1999/xhtml')
root.set('xmlns:dc', 'http://purl.org/dc/elements/1.1/')
root.set('xmlns:cpe-lang', 'http://cpe.mitre.org/language/2.0')
root.set('id', 'product-name')
root.set('xsi:schemaLocation',
'http://checklists.nist.gov/xccdf/1.1 xccdf-1.1.4.xsd')
root.set('style', 'SCAP_1.1')
root.set('resolved', 'false')
root.set('xml:lang', 'en-US')
status = ET.SubElement(root, 'status')
status.set('date', datetime.date.today().strftime("%Y-%m-%d"))
status.text = self.status
add_sub_element(root, "title", self.title)
add_sub_element(root, "description", self.description)
notice = add_sub_element(root, "notice", self.notice_description)
notice.set('id', self.notice_id)
add_sub_element(root, "front-matter", self.front_matter)
add_sub_element(root, "rear-matter", self.rear_matter)
# if there are no platforms, do not output platform-specification at all
if len(self.cpe_platform_spec.platforms) > 0:
root.append(self.cpe_platform_spec.to_xml_element())
# The Benchmark applicability is determined by the CPEs
# defined in the product.yml
for cpe_name in self.product_cpe_names:
plat = ET.SubElement(root, "platform")
plat.set("idref", cpe_name)
version = ET.SubElement(root, 'version')
version.text = self.version
ET.SubElement(root, "metadata")
for profile in self.profiles:
root.append(profile.to_xml_element())
for value in self.values.values():
root.append(value.to_xml_element())
groups_in_bench = list(self.groups.keys())
priority_order = ["system", "services"]
groups_in_bench = reorder_according_to_ordering(groups_in_bench, priority_order)
# Make system group the first, followed by services group
for group_id in groups_in_bench:
group = self.groups.get(group_id)
# Products using application benchmark don't have system or services group
if group is not None:
root.append(group.to_xml_element())
for rule in self.rules.values():
root.append(rule.to_xml_element())
return root
def to_file(self, file_name, ):
root = self.to_xml_element()
tree = ET.ElementTree(root)
tree.write(file_name)
def add_value(self, value):
if value is None:
return
self.values[value.id_] = value
# The benchmark is also considered a group, so this function signature needs to match
# Group()'s add_group()
def add_group(self, group, env_yaml=None):
if group is None:
return
self.groups[group.id_] = group
def add_rule(self, rule):
if rule is None:
return
self.rules[rule.id_] = rule
def to_xccdf(self):
"""We can easily extend this script to generate a valid XCCDF instead
of SSG SHORTHAND.
"""
raise NotImplementedError
def __str__(self):
return self.id_
class Group(XCCDFEntity):
"""Represents XCCDF Group
"""
ATTRIBUTES_TO_PASS_ON = (
"platforms",
"cpe_platform_names",
)
GENERIC_FILENAME = "group.yml"
KEYS = dict(
prodtype=lambda: "all",
title=lambda: "",
description=lambda: "",
warnings=lambda: list(),
requires=lambda: list(),
conflicts=lambda: list(),
values=lambda: dict(),
groups=lambda: dict(),
rules=lambda: dict(),
platform=lambda: "",
platforms=lambda: set(),
cpe_platform_names=lambda: set(),
** XCCDFEntity.KEYS
)
MANDATORY_KEYS = {
"title",
"status",
"description",
"front_matter",
"rear_matter",
"version",
}
@classmethod
def process_input_dict(cls, input_contents, env_yaml):
data = super(Group, cls).process_input_dict(input_contents, env_yaml)
if data["rules"]:
rule_ids = data["rules"]
data["rules"] = {rid: None for rid in rule_ids}
if data["groups"]:
group_ids = data["groups"]
data["groups"] = {gid: None for gid in group_ids}
if data["values"]:
value_ids = data["values"]
data["values"] = {vid: None for vid in value_ids}
if data["platform"]:
data["platforms"].add(data["platform"])
# parse platform definition and get CPEAL platform
if data["platforms"]:
for platform in data["platforms"]:
cpe_platform = parse_platform_definition(platform, env_yaml["product_cpes"])
data["cpe_platform_names"].add(cpe_platform.id)
# add platform to platform specification
env_yaml["product_cpes"].cpe_platform_specification.add_platform(cpe_platform)
return data
def load_entities(self, rules_by_id, values_by_id, groups_by_id):
for rid, val in self.rules.items():
if not val:
self.rules[rid] = rules_by_id[rid]
for vid, val in self.values.items():
if not val:
self.values[vid] = values_by_id[vid]
for gid, val in self.groups.items():
if not val:
self.groups[gid] = groups_by_id[gid]
def represent_as_dict(self):
yaml_contents = super(Group, self).represent_as_dict()
if self.rules:
yaml_contents["rules"] = sorted(list(self.rules.keys()))
if self.groups:
yaml_contents["groups"] = sorted(list(self.groups.keys()))
if self.values:
yaml_contents["values"] = sorted(list(self.values.keys()))
return yaml_contents
def validate_prodtype(self, yaml_file):
for ptype in self.prodtype.split(","):
if ptype.strip() != ptype:
msg = (
"Comma-separated '{prodtype}' prodtype "
"in {yaml_file} contains whitespace."
.format(prodtype=self.prodtype, yaml_file=yaml_file))
raise ValueError(msg)
def to_xml_element(self):
group = ET.Element('Group')
group.set('id', self.id_)
if self.prodtype != "all":
group.set("prodtype", self.prodtype)
title = ET.SubElement(group, 'title')
title.text = self.title
add_sub_element(group, 'description', self.description)
add_warning_elements(group, self.warnings)
add_nondata_subelements(group, "requires", "id", self.requires)
add_nondata_subelements(group, "conflicts", "id", self.conflicts)
for cpe_platform_name in self.cpe_platform_names:
platform_el = ET.SubElement(group, "platform")
platform_el.set("idref", "#"+cpe_platform_name)
for _value in self.values.values():
group.append(_value.to_xml_element())
# Rules that install or remove packages affect remediation
# of other rules.
# When packages installed/removed rules come first:
# The Rules are ordered in more logical way, and
# remediation order is natural, first the package is installed, then configured.
rules_in_group = list(self.rules.keys())
regex = (r'(package_.*_(installed|removed))|' +
r'(service_.*_(enabled|disabled))|' +
r'install_smartcard_packages$')
priority_order = ["installed", "install_smartcard_packages", "removed",
"enabled", "disabled"]
rules_in_group = reorder_according_to_ordering(rules_in_group, priority_order, regex)
# Add rules in priority order, first all packages installed, then removed,
# followed by services enabled, then disabled
for rule_id in rules_in_group:
group.append(self.rules.get(rule_id).to_xml_element())
# Add the sub groups after any current level group rules.
# As package installed/removed and service enabled/disabled rules are usuallly in
# top level group, this ensures groups that further configure a package or service
# are after rules that install or remove it.
groups_in_group = list(self.groups.keys())
priority_order = [
# Make sure rpm_verify_(hashes|permissions|ownership) are run before any other rule.
# Due to conflicts between rules rpm_verify_* rules and any rule that configures
# stricter settings, like file_permissions_grub2_cfg and sudo_dedicated_group,
# the rules deviating from the system default should be evaluated later.
# So that in the end the system has contents, permissions and ownership reset, and
# any deviations or stricter settings are applied by the rules in the profile.
"software", "integrity", "integrity-software", "rpm_verification",
# The account group has to precede audit group because
# the rule package_screen_installed is desired to be executed before the rule
# audit_rules_privileged_commands, othervise the rule
# does not catch newly installed screen binary during remediation
# and report fail
"accounts", "auditing",
# The FIPS group should come before Crypto,
# if we want to set a different (stricter) Crypto Policy than FIPS.
"fips", "crypto",
# The firewalld_activation must come before ruleset_modifications, othervise
# remediations for ruleset_modifications won't work
"firewalld_activation", "ruleset_modifications",
# Rules from group disabling_ipv6 must precede rules from configuring_ipv6,
# otherwise the remediation prints error although it is successful
"disabling_ipv6", "configuring_ipv6"
]
groups_in_group = reorder_according_to_ordering(groups_in_group, priority_order)
for group_id in groups_in_group:
_group = self.groups[group_id]
group.append(_group.to_xml_element())
return group
def to_file(self, file_name):
root = self.to_xml_element()
tree = ET.ElementTree(root)
tree.write(file_name)
def add_value(self, value):
if value is None:
return
self.values[value.id_] = value
def add_group(self, group, env_yaml=None):
if group is None:
return
if self.platforms and not group.platforms:
group.platforms = self.platforms
self.groups[group.id_] = group
self._pass_our_properties_on_to(group)
# Once the group has inherited properties, update cpe_names
if env_yaml:
for platform in group.platforms:
cpe_platform = parse_platform_definition(
platform, env_yaml["product_cpes"])
group.cpe_platform_names.add(cpe_platform.id)
env_yaml["product_cpes"].cpe_platform_specification.add_platform(
cpe_platform)
def _pass_our_properties_on_to(self, obj):
for attr in self.ATTRIBUTES_TO_PASS_ON:
if hasattr(obj, attr) and getattr(obj, attr) is None:
setattr(obj, attr, getattr(self, attr))
def add_rule(self, rule, env_yaml=None):
if rule is None:
return
if self.platforms and not rule.platforms:
rule.platforms = self.platforms
self.rules[rule.id_] = rule
self._pass_our_properties_on_to(rule)
# Once the rule has inherited properties, update cpe_platform_names
if env_yaml:
for platform in rule.platforms:
cpe_platform = parse_platform_definition(
platform, env_yaml["product_cpes"])
rule.cpe_platform_names.add(cpe_platform.id)
env_yaml["product_cpes"].cpe_platform_specification.add_platform(
cpe_platform)
def __str__(self):
return self.id_
def noop_rule_filterfunc(rule):
return True
def rule_filter_from_def(filterdef):
if filterdef is None or filterdef == "":
return noop_rule_filterfunc
def filterfunc(rule):
# Remove globals for security and only expose
# variables relevant to the rule
return eval(filterdef, {"__builtins__": None}, rule.__dict__)
return filterfunc
class Rule(XCCDFEntity):
"""Represents XCCDF Rule
"""
KEYS = dict(
prodtype=lambda: "all",
title=lambda: "",
description=lambda: "",
rationale=lambda: "",
severity=lambda: "",
references=lambda: dict(),
identifiers=lambda: dict(),
ocil_clause=lambda: None,
ocil=lambda: None,
oval_external_content=lambda: None,
warnings=lambda: list(),
conflicts=lambda: list(),
requires=lambda: list(),
platform=lambda: None,
platforms=lambda: set(),
inherited_platforms=lambda: list(),
template=lambda: None,
cpe_platform_names=lambda: set(),
** XCCDFEntity.KEYS
)
MANDATORY_KEYS = {
"title",
"description",
"rationale",
"severity",
}
GENERIC_FILENAME = "rule.yml"
ID_LABEL = "rule_id"
PRODUCT_REFERENCES = ("stigid", "cis",)
GLOBAL_REFERENCES = ("srg", "vmmsrg", "disa", "cis-csc",)
def __init__(self, id_):
super(Rule, self).__init__(id_)
self.sce_metadata = None
def __deepcopy__(self, memo):
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
for k, v in self.__dict__.items():
# These are difficult to deep copy, so let's just re-use them.
if k != "template" and k != "local_env_yaml":
setattr(result, k, deepcopy(v, memo))
else:
setattr(result, k, v)
return result
@classmethod
def from_yaml(cls, yaml_file, env_yaml=None, sce_metadata=None):
rule = super(Rule, cls).from_yaml(yaml_file, env_yaml)
# platforms are read as list from the yaml file
# we need them to convert to set again
rule.platforms = set(rule.platforms)
# rule.platforms.update(set(rule.inherited_platforms))
check_warnings(rule)
# ensure that content of rule.platform is in rule.platforms as
# well
if rule.platform is not None:
rule.platforms.add(rule.platform)
# Convert the platform names to CPE names
# But only do it if an env_yaml was specified (otherwise there would be no product CPEs
# to lookup), and the rule's prodtype matches the product being built
if (
env_yaml and env_yaml["product"] in parse_prodtype(rule.prodtype)
or env_yaml and rule.prodtype == "all"):
# parse platform definition and get CPEAL platform
for platform in rule.platforms:
cpe_platform = parse_platform_definition(
platform, env_yaml["product_cpes"])
rule.cpe_platform_names.add(cpe_platform.id)
# add platform to platform specification
env_yaml["product_cpes"].cpe_platform_specification.add_platform(
cpe_platform)
if sce_metadata and rule.id_ in sce_metadata:
rule.sce_metadata = sce_metadata[rule.id_]
rule.sce_metadata["relative_path"] = os.path.join(
env_yaml["product"], "checks/sce", rule.sce_metadata['filename'])
rule.validate_prodtype(yaml_file)
rule.validate_identifiers(yaml_file)
rule.validate_references(yaml_file)
return rule
def _verify_stigid_format(self, product):
stig_id = self.references.get("stigid", None)
if not stig_id:
return
if "," in stig_id:
raise ValueError("Rules can not have multiple STIG IDs.")
def _verify_disa_cci_format(self):
cci_id = self.references.get("disa", None)
if not cci_id:
return
cci_ex = re.compile(r'^CCI-[0-9]{6}$')
for cci in cci_id.split(","):
if not cci_ex.match(cci):
raise ValueError("CCI '{}' is in the wrong format! "
"Format should be similar to: "
"CCI-XXXXXX".format(cci))
self.references["disa"] = cci_id
def normalize(self, product):
try:
self.make_refs_and_identifiers_product_specific(product)
self.make_template_product_specific(product)
except Exception as exc:
msg = (
"Error normalizing '{rule}': {msg}"
.format(rule=self.id_, msg=str(exc))
)
raise RuntimeError(msg)
def _get_product_only_references(self):
product_references = dict()
for ref in Rule.PRODUCT_REFERENCES:
start = "{0}@".format(ref)
for gref, gval in self.references.items():
if ref == gref or gref.startswith(start):
product_references[gref] = gval
return product_references
def make_template_product_specific(self, product):
product_suffix = "@{0}".format(product)
if not self.template:
return
not_specific_vars = self.template.get("vars", dict())
specific_vars = self._make_items_product_specific(
not_specific_vars, product_suffix, True)
self.template["vars"] = specific_vars
not_specific_backends = self.template.get("backends", dict())
specific_backends = self._make_items_product_specific(
not_specific_backends, product_suffix, True)
self.template["backends"] = specific_backends
def make_refs_and_identifiers_product_specific(self, product):
product_suffix = "@{0}".format(product)
product_references = self._get_product_only_references()
general_references = self.references.copy()
for todel in product_references:
general_references.pop(todel)
for ref in Rule.PRODUCT_REFERENCES:
if ref in general_references:
msg = "Unexpected reference identifier ({0}) without "
msg += "product qualifier ({0}@{1}) while building rule "
msg += "{2}"
msg = msg.format(ref, product, self.id_)
raise ValueError(msg)
to_set = dict(
identifiers=(self.identifiers, False),
general_references=(general_references, True),
product_references=(product_references, False),
)
for name, (dic, allow_overwrites) in to_set.items():
try:
new_items = self._make_items_product_specific(
dic, product_suffix, allow_overwrites)
except ValueError as exc:
msg = (
"Error processing {what} for rule '{rid}': {msg}"
.format(what=name, rid=self.id_, msg=str(exc))
)
raise ValueError(msg)
dic.clear()
dic.update(new_items)
self.references = general_references
self._verify_disa_cci_format()
self.references.update(product_references)
self._verify_stigid_format(product)
def _make_items_product_specific(self, items_dict, product_suffix, allow_overwrites=False):
new_items = dict()
for full_label, value in items_dict.items():
if "@" not in full_label and full_label not in new_items:
new_items[full_label] = value
continue
label = full_label.split("@")[0]
# this test should occur before matching product_suffix with the product qualifier
# present in the reference, so it catches problems even for products that are not
# being built at the moment
if label in Rule.GLOBAL_REFERENCES:
msg = (
"You cannot use product-qualified for the '{item_u}' reference. "
"Please remove the product-qualifier and merge values with the "
"existing reference if there is any. Original line: {item_q}: {value_q}"
.format(item_u=label, item_q=full_label, value_q=value)
)
raise ValueError(msg)
if not full_label.endswith(product_suffix):
continue
if label in items_dict and not allow_overwrites and value != items_dict[label]:
msg = (
"There is a product-qualified '{item_q}' item, "
"but also an unqualified '{item_u}' item "
"and those two differ in value - "
"'{value_q}' vs '{value_u}' respectively."
.format(item_q=full_label, item_u=label,
value_q=value, value_u=items_dict[label])
)
raise ValueError(msg)
new_items[label] = value
return new_items
def validate_identifiers(self, yaml_file):
if self.identifiers is None:
raise ValueError("Empty identifier section in file %s" % yaml_file)
# Validate all identifiers are non-empty:
for ident_type, ident_val in self.identifiers.items():
if not isinstance(ident_type, str) or not isinstance(ident_val, str):
raise ValueError("Identifiers and values must be strings: %s in file %s"
% (ident_type, yaml_file))
if ident_val.strip() == "":
raise ValueError("Identifiers must not be empty: %s in file %s"
% (ident_type, yaml_file))
if ident_type[0:3] == 'cce':
if not is_cce_format_valid(ident_val):
raise ValueError("CCE Identifier format must be valid: invalid format '%s' for CEE '%s'"
" in file '%s'" % (ident_val, ident_type, yaml_file))
if not is_cce_value_valid("CCE-" + ident_val):
raise ValueError("CCE Identifier value is not a valid checksum: invalid value '%s' for CEE '%s'"
" in file '%s'" % (ident_val, ident_type, yaml_file))
def validate_references(self, yaml_file):
if self.references is None:
raise ValueError("Empty references section in file %s" % yaml_file)
for ref_type, ref_val in self.references.items():
if not isinstance(ref_type, str) or not isinstance(ref_val, str):
raise ValueError("References and values must be strings: %s in file %s"
% (ref_type, yaml_file))
if ref_val.strip() == "":
raise ValueError("References must not be empty: %s in file %s"
% (ref_type, yaml_file))
for ref_type, ref_val in self.references.items():
for ref in ref_val.split(","):
if ref.strip() != ref:
msg = (
"Comma-separated '{ref_type}' reference "
"in {yaml_file} contains whitespace."
.format(ref_type=ref_type, yaml_file=yaml_file))
raise ValueError(msg)
def validate_prodtype(self, yaml_file):
for ptype in self.prodtype.split(","):
if ptype.strip() != ptype:
msg = (
"Comma-separated '{prodtype}' prodtype "
"in {yaml_file} contains whitespace."
.format(prodtype=self.prodtype, yaml_file=yaml_file))
raise ValueError(msg)
def to_xml_element(self):
rule = ET.Element('Rule')
rule.set('id', self.id_)
if self.prodtype != "all":
rule.set("prodtype", self.prodtype)
rule.set('severity', self.severity)
add_sub_element(rule, 'title', self.title)
add_sub_element(rule, 'description', self.description)
add_sub_element(rule, 'rationale', self.rationale)
main_ident = ET.Element('ident')
for ident_type, ident_val in self.identifiers.items():
# This is not true if items were normalized
if '@' in ident_type:
# the ident is applicable only on some product
# format : 'policy@product', eg. 'stigid@product'
# for them, we create a separate <ref> element
policy, product = ident_type.split('@')
ident = ET.SubElement(rule, 'ident')
ident.set(policy, ident_val)
ident.set('prodtype', product)
else:
main_ident.set(ident_type, ident_val)
if main_ident.attrib:
rule.append(main_ident)
main_ref = ET.Element('ref')
for ref_type, ref_val in self.references.items():
# This is not true if items were normalized
if '@' in ref_type:
# the reference is applicable only on some product
# format : 'policy@product', eg. 'stigid@product'
# for them, we create a separate <ref> element
policy, product = ref_type.split('@')
ref = ET.SubElement(rule, 'ref')
ref.set(policy, ref_val)
ref.set('prodtype', product)
else:
main_ref.set(ref_type, ref_val)
if main_ref.attrib:
rule.append(main_ref)
ocil_parent = rule
check_parent = rule
if self.sce_metadata:
# TODO: This is pretty much another hack, just like the previous OVAL
# one. However, we avoided the external SCE content as I'm not sure it
# is generally useful (unlike say, CVE checking with external OVAL)
#
# Additionally, we build the content (check subelement) here rather
# than in xslt due to the nature of our SCE metadata.
#
# Finally, before we begin, we might have an element with both SCE
# and OVAL. We have no way of knowing (right here) whether that is
# the case (due to a variety of issues, most notably, that linking
# hasn't yet occurred). So we must rely on the content author's
# good will, by annotating SCE content with a complex-check tag
# if necessary.
if 'complex-check' in self.sce_metadata:
# Here we have an issue: XCCDF allows EITHER one or more check
# elements OR a single complex-check. While we have an explicit
# case handling the OVAL-and-SCE interaction, OCIL entries have
# (historically) been alongside OVAL content and been in an
# "OR" manner -- preferring OVAL to SCE. In order to accomplish
# this, we thus need to add _yet another parent_ when OCIL data
# is present, and add update ocil_parent accordingly.
if self.ocil or self.ocil_clause:
ocil_parent = ET.SubElement(ocil_parent, "complex-check")
ocil_parent.set('operator', 'OR')
check_parent = ET.SubElement(ocil_parent, "complex-check")
check_parent.set('operator', self.sce_metadata['complex-check'])
# Now, add the SCE check element to the tree.
check = ET.SubElement(check_parent, "check")
check.set("system", SCE_SYSTEM)
if 'check-import' in self.sce_metadata:
if isinstance(self.sce_metadata['check-import'], str):
self.sce_metadata['check-import'] = [self.sce_metadata['check-import']]
for entry in self.sce_metadata['check-import']:
check_import = ET.SubElement(check, 'check-import')
check_import.set('import-name', entry)
check_import.text = None
if 'check-export' in self.sce_metadata:
if isinstance(self.sce_metadata['check-export'], str):
self.sce_metadata['check-export'] = [self.sce_metadata['check-export']]
for entry in self.sce_metadata['check-export']:
export, value = entry.split('=')
check_export = ET.SubElement(check, 'check-export')
check_export.set('value-id', value)
check_export.set('export-name', export)
check_export.text = None
check_ref = ET.SubElement(check, "check-content-ref")
href = self.sce_metadata['relative_path']
check_ref.set("href", href)
if self.oval_external_content:
check = ET.SubElement(check_parent, 'check')
check.set("system", "http://oval.mitre.org/XMLSchema/oval-definitions-5")
external_content = ET.SubElement(check, "check-content-ref")
external_content.set("href", self.oval_external_content)
else:
# TODO: This is pretty much a hack, oval ID will be the same as rule ID
# and we don't want the developers to have to keep them in sync.
# Therefore let's just add an OVAL ref of that ID.
oval_ref = ET.SubElement(check_parent, "oval")
oval_ref.set("id", self.id_)
if self.ocil or self.ocil_clause:
ocil_check = ET.SubElement(check_parent, "check")
ocil_check.set("system", ocil_cs)
ocil_check_ref = ET.SubElement(ocil_check, "check-content-ref")
ocil_check_ref.set("href", "ocil-unlinked.xml")
ocil_check_ref.set("name", self.id_ + "_ocil")
add_warning_elements(rule, self.warnings)
add_nondata_subelements(rule, "requires", "id", self.requires)
add_nondata_subelements(rule, "conflicts", "id", self.conflicts)
for cpe_platform_name in self.cpe_platform_names:
platform_el = ET.SubElement(rule, "platform")
platform_el.set("idref", "#"+cpe_platform_name)
return rule
def to_file(self, file_name):
root = self.to_xml_element()
tree = ET.ElementTree(root)
tree.write(file_name)
def to_ocil(self):
if not self.ocil and not self.ocil_clause:
raise ValueError("Rule {0} doesn't have OCIL".format(self.id_))
# Create <questionnaire> for the rule
questionnaire = ET.Element("questionnaire", id=self.id_ + "_ocil")
title = ET.SubElement(questionnaire, "title")
title.text = self.title
actions = ET.SubElement(questionnaire, "actions")
test_action_ref = ET.SubElement(actions, "test_action_ref")
test_action_ref.text = self.id_ + "_action"
# Create <boolean_question_test_action> for the rule
action = ET.Element(
"boolean_question_test_action",
id=self.id_ + "_action",
question_ref=self.id_ + "_question")
when_true = ET.SubElement(action, "when_true")
result = ET.SubElement(when_true, "result")
result.text = "PASS"
when_true = ET.SubElement(action, "when_false")
result = ET.SubElement(when_true, "result")
result.text = "FAIL"
# Create <boolean_question>
boolean_question = ET.Element(
"boolean_question", id=self.id_ + "_question")
# TODO: The contents of <question_text> element used to be broken in
# the legacy XSLT implementation. The following code contains hacks
# to get the same results as in the legacy XSLT implementation.
# This enabled us a smooth transition to new OCIL generator
# without a need to mass-edit rule YAML files.
# We need to solve:
# TODO: using variables (aka XCCDF Values) in OCIL content
# TODO: using HTML formating tags eg. <pre> in OCIL content
#
# The "ocil" key in compiled rules contains HTML and XML elements
# but OCIL question texts shouldn't contain HTML or XML elements,
# therefore removing them.
if self.ocil is not None:
ocil_without_tags = re.sub(r"</?[^>]+>", "", self.ocil)
else:
ocil_without_tags = ""
# The "ocil" key in compiled rules contains XML entities which would
# be escaped by ET.Subelement() so we need to use add_sub_element()
# instead because we don't want to escape them.
question_text = add_sub_element(
boolean_question, "question_text", ocil_without_tags)
# The "ocil_clause" key in compiled rules also contains HTML and XML
# elements but unlike the "ocil" we want to escape the '<' and '>'
# characters.
# The empty ocil_clause causing broken question is in line with the
# legacy XSLT implementation.
ocil_clause = self.ocil_clause if self.ocil_clause else ""
question_text.text = (
"{0}\n Is it the case that {1}?\n ".format(
question_text.text if question_text.text is not None else "",
ocil_clause))
return (questionnaire, action, boolean_question)
def __hash__(self):
""" Controls are meant to be unique, so using the
ID should suffice"""
return hash(self.id_)
def __eq__(self, other):
return isinstance(other, self.__class__) and self.id_ == other.id_
def __ne__(self, other):
return not self != other
def __lt__(self, other):
return self.id_ < other.id_
def __str__(self):
return self.id_
class DirectoryLoader(object):
def __init__(self, profiles_dir, env_yaml):
self.benchmark_file = None
self.group_file = None
self.loaded_group = None
self.rule_files = []
self.value_files = []
self.subdirectories = []
self.all_values = dict()
self.all_rules = dict()
self.all_groups = dict()
self.profiles_dir = profiles_dir
self.env_yaml = env_yaml
self.product = env_yaml["product"]
self.parent_group = None
def _collect_items_to_load(self, guide_directory):
for dir_item in sorted(os.listdir(guide_directory)):
dir_item_path = os.path.join(guide_directory, dir_item)
_, extension = os.path.splitext(dir_item)
if extension == '.var':
self.value_files.append(dir_item_path)
elif dir_item == "benchmark.yml":
if self.benchmark_file:
raise ValueError("Multiple benchmarks in one directory")
self.benchmark_file = dir_item_path
elif dir_item == "group.yml":
if self.group_file:
raise ValueError("Multiple groups in one directory")
self.group_file = dir_item_path
elif extension == '.rule':
self.rule_files.append(dir_item_path)
elif is_rule_dir(dir_item_path):
self.rule_files.append(get_rule_dir_yaml(dir_item_path))
elif dir_item != "tests":
if os.path.isdir(dir_item_path):
self.subdirectories.append(dir_item_path)
else:
sys.stderr.write(
"Encountered file '%s' while recursing, extension '%s' "
"is unknown. Skipping..\n"
% (dir_item, extension)
)
def load_benchmark_or_group(self, guide_directory):
"""
Loads a given benchmark or group from the specified benchmark_file or
group_file, in the context of guide_directory, profiles_dir and env_yaml.
Returns the loaded group or benchmark.
"""
group = None
if self.group_file and self.benchmark_file:
raise ValueError("A .benchmark file and a .group file were found in "
"the same directory '%s'" % (guide_directory))
# we treat benchmark as a special form of group in the following code
if self.benchmark_file:
group = Benchmark.from_yaml(
self.benchmark_file, self.env_yaml, 'product-name'
)
if self.profiles_dir:
group.add_profiles_from_dir(self.profiles_dir, self.env_yaml)
if self.group_file:
group = Group.from_yaml(self.group_file, self.env_yaml)
self.all_groups[group.id_] = group
return group
def _load_group_process_and_recurse(self, guide_directory):
self.loaded_group = self.load_benchmark_or_group(guide_directory)
if self.loaded_group:
if self.parent_group:
self.parent_group.add_group(self.loaded_group, env_yaml=self.env_yaml)
self._process_values()
self._recurse_into_subdirs()
self._process_rules()
def process_directory_tree(self, start_dir, extra_group_dirs=None):
self._collect_items_to_load(start_dir)
if extra_group_dirs:
self.subdirectories += extra_group_dirs
self._load_group_process_and_recurse(start_dir)
def process_directory_trees(self, directories):
start_dir = directories[0]
extra_group_dirs = directories[1:]
return self.process_directory_tree(start_dir, extra_group_dirs)
def _recurse_into_subdirs(self):
for subdir in self.subdirectories:
loader = self._get_new_loader()
loader.parent_group = self.loaded_group
loader.process_directory_tree(subdir)
self.all_values.update(loader.all_values)
self.all_rules.update(loader.all_rules)
self.all_groups.update(loader.all_groups)
def _get_new_loader(self):
raise NotImplementedError()
def _process_values(self):
raise NotImplementedError()
def _process_rules(self):
raise NotImplementedError()
def save_all_entities(self, base_dir):
destdir = os.path.join(base_dir, "rules")
mkdir_p(destdir)
if self.all_rules:
self.save_entities(self.all_rules.values(), destdir)
destdir = os.path.join(base_dir, "groups")
mkdir_p(destdir)
if self.all_groups:
self.save_entities(self.all_groups.values(), destdir)
destdir = os.path.join(base_dir, "values")
mkdir_p(destdir)
if self.all_values:
self.save_entities(self.all_values.values(), destdir)
def save_entities(self, entities, destdir):
if not entities:
return
for entity in entities:
basename = entity.id_ + ".yml"
dest_filename = os.path.join(destdir, basename)
entity.dump_yaml(dest_filename)
class BuildLoader(DirectoryLoader):
def __init__(self, profiles_dir, env_yaml,
sce_metadata_path=None):
super(BuildLoader, self).__init__(profiles_dir, env_yaml)
self.sce_metadata = None
if sce_metadata_path and os.path.getsize(sce_metadata_path):
self.sce_metadata = json.load(open(sce_metadata_path, 'r'))
def _process_values(self):
for value_yaml in self.value_files:
value = Value.from_yaml(value_yaml, self.env_yaml)
self.all_values[value.id_] = value
self.loaded_group.add_value(value)
def _process_rules(self):
for rule_yaml in self.rule_files:
try:
rule = Rule.from_yaml(rule_yaml, self.env_yaml, self.sce_metadata)
except DocumentationNotComplete:
# Happens on non-debug build when a rule is "documentation-incomplete"
continue
prodtypes = parse_prodtype(rule.prodtype)
if "all" not in prodtypes and self.product not in prodtypes:
continue
self.all_rules[rule.id_] = rule
self.loaded_group.add_rule(rule, env_yaml=self.env_yaml)
if self.loaded_group.platforms:
rule.inherited_platforms += self.loaded_group.platforms
rule.normalize(self.env_yaml["product"])
def _get_new_loader(self):
loader = BuildLoader(
self.profiles_dir, self.env_yaml)
# Do it this way so we only have to parse the SCE metadata once.
loader.sce_metadata = self.sce_metadata
return loader
def export_group_to_file(self, filename):
return self.loaded_group.to_file(filename)
class LinearLoader(object):
def __init__(self, env_yaml, resolved_path):
self.resolved_rules_dir = os.path.join(resolved_path, "rules")
self.rules = dict()
self.resolved_profiles_dir = os.path.join(resolved_path, "profiles")
self.profiles = dict()
self.resolved_groups_dir = os.path.join(resolved_path, "groups")
self.groups = dict()
self.resolved_values_dir = os.path.join(resolved_path, "values")
self.values = dict()
self.benchmark = None
self.env_yaml = env_yaml
def find_first_groups_ids(self, start_dir):
group_files = glob.glob(os.path.join(start_dir, "*", "group.yml"))
group_ids = [fname.split(os.path.sep)[-2] for fname in group_files]
return group_ids
def load_entities_by_id(self, filenames, destination, cls):
for fname in filenames:
entity = cls.from_yaml(fname, self.env_yaml)
destination[entity.id_] = entity
def load_benchmark(self, directory):
self.benchmark = Benchmark.from_yaml(
os.path.join(directory, "benchmark.yml"), self.env_yaml, "product-name")
self.benchmark.add_profiles_from_dir(self.resolved_profiles_dir, self.env_yaml)
benchmark_first_groups = self.find_first_groups_ids(directory)
for gid in benchmark_first_groups:
self.benchmark.add_group(self.groups[gid], self.env_yaml)
def load_compiled_content(self):
filenames = glob.glob(os.path.join(self.resolved_rules_dir, "*.yml"))
self.load_entities_by_id(filenames, self.rules, Rule)
filenames = glob.glob(os.path.join(self.resolved_groups_dir, "*.yml"))
self.load_entities_by_id(filenames, self.groups, Group)
filenames = glob.glob(os.path.join(self.resolved_profiles_dir, "*.yml"))
self.load_entities_by_id(filenames, self.profiles, Profile)
filenames = glob.glob(os.path.join(self.resolved_values_dir, "*.yml"))
self.load_entities_by_id(filenames, self.values, Value)
for g in self.groups.values():
g.load_entities(self.rules, self.values, self.groups)
def export_benchmark_to_file(self, filename):
return self.benchmark.to_file(filename)
def export_ocil_to_file(self, filename):
root = ET.Element('ocil')
root.set('xmlns:xsi', xsi_namespace)
root.set("xmlns", ocil_namespace)
root.set("xmlns:xhtml", xhtml_namespace)
tree = ET.ElementTree(root)
generator = ET.SubElement(root, "generator")
product_name = ET.SubElement(generator, "product_name")
product_name.text = "build_shorthand.py from SCAP Security Guide"
product_version = ET.SubElement(generator, "product_version")
product_version.text = "ssg: " + self.env_yaml["ssg_version_str"]
schema_version = ET.SubElement(generator, "schema_version")
schema_version.text = "2.0"
timestamp_el = ET.SubElement(generator, "timestamp")
timestamp_el.text = timestamp
questionnaires = ET.SubElement(root, "questionnaires")
test_actions = ET.SubElement(root, "test_actions")
questions = ET.SubElement(root, "questions")
for rule in self.rules.values():
if not rule.ocil and not rule.ocil_clause:
continue
questionnaire, action, boolean_question = rule.to_ocil()
questionnaires.append(questionnaire)
test_actions.append(action)
questions.append(boolean_question)
tree.write(filename)
| [
"copy.deepcopy",
"os.path.basename",
"os.path.isdir",
"os.path.getsize",
"os.path.dirname",
"yaml.dump",
"datetime.date.today",
"xml.sax.saxutils.escape",
"collections.defaultdict",
"os.path.isfile",
"os.path.normpath",
"os.path.splitext",
"sys.stderr.write",
"os.path.join",
"os.listdir"... | [((2532, 2549), 're.compile', 're.compile', (['regex'], {}), '(regex)\n', (2542, 2549), False, 'import re\n'), ((922, 983), 'yaml.dump', 'yaml.dump', (['dictionary', 'file_object'], {'indent': '(4)', 'sort_keys': '(False)'}), '(dictionary, file_object, indent=4, sort_keys=False)\n', (931, 983), False, 'import yaml\n'), ((4229, 4246), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (4240, 4246), False, 'from collections import defaultdict\n'), ((6986, 7012), 'copy.deepcopy', 'deepcopy', (['rhs.refine_rules'], {}), '(rhs.refine_rules)\n', (6994, 7012), False, 'from copy import deepcopy\n'), ((9948, 9975), 'os.path.basename', 'os.path.basename', (['yaml_file'], {}), '(yaml_file)\n', (9964, 9975), False, 'import os\n'), ((11067, 11094), 'os.path.normpath', 'os.path.normpath', (['yaml_file'], {}), '(yaml_file)\n', (11083, 11094), False, 'import os\n'), ((23080, 23097), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (23091, 23097), False, 'from collections import defaultdict\n'), ((47218, 47246), 're.compile', 're.compile', (['"""^CCI-[0-9]{6}$"""'], {}), "('^CCI-[0-9]{6}$')\n", (47228, 47246), False, 'import re\n'), ((69320, 69351), 'os.path.join', 'os.path.join', (['base_dir', '"""rules"""'], {}), "(base_dir, 'rules')\n", (69332, 69351), False, 'import os\n'), ((69488, 69520), 'os.path.join', 'os.path.join', (['base_dir', '"""groups"""'], {}), "(base_dir, 'groups')\n", (69500, 69520), False, 'import os\n'), ((69659, 69691), 'os.path.join', 'os.path.join', (['base_dir', '"""values"""'], {}), "(base_dir, 'values')\n", (69671, 69691), False, 'import os\n'), ((71912, 71948), 'os.path.join', 'os.path.join', (['resolved_path', '"""rules"""'], {}), "(resolved_path, 'rules')\n", (71924, 71948), False, 'import os\n'), ((72015, 72054), 'os.path.join', 'os.path.join', (['resolved_path', '"""profiles"""'], {}), "(resolved_path, 'profiles')\n", (72027, 72054), False, 'import os\n'), ((72122, 72159), 'os.path.join', 'os.path.join', (['resolved_path', '"""groups"""'], {}), "(resolved_path, 'groups')\n", (72134, 72159), False, 'import os\n'), ((72225, 72262), 'os.path.join', 'os.path.join', (['resolved_path', '"""values"""'], {}), "(resolved_path, 'values')\n", (72237, 72262), False, 'import os\n'), ((1163, 1207), 'yaml.dump', 'yaml.dump', (['dictionary', 'file_object'], {'indent': '(4)'}), '(dictionary, file_object, indent=4)\n', (1172, 1207), False, 'import yaml\n'), ((29726, 29742), 'os.listdir', 'os.listdir', (['dir_'], {}), '(dir_)\n', (29736, 29742), False, 'import os\n'), ((29773, 29801), 'os.path.join', 'os.path.join', (['dir_', 'dir_item'], {}), '(dir_, dir_item)\n', (29785, 29801), False, 'import os\n'), ((46579, 46657), 'os.path.join', 'os.path.join', (["env_yaml['product']", '"""checks/sce"""', "rule.sce_metadata['filename']"], {}), "(env_yaml['product'], 'checks/sce', rule.sce_metadata['filename'])\n", (46591, 46657), False, 'import os\n'), ((63442, 63476), 're.sub', 're.sub', (['"""</?[^>]+>"""', '""""""', 'self.ocil'], {}), "('</?[^>]+>', '', self.ocil)\n", (63448, 63476), False, 'import re\n'), ((65494, 65521), 'os.listdir', 'os.listdir', (['guide_directory'], {}), '(guide_directory)\n', (65504, 65521), False, 'import os\n'), ((65552, 65591), 'os.path.join', 'os.path.join', (['guide_directory', 'dir_item'], {}), '(guide_directory, dir_item)\n', (65564, 65591), False, 'import os\n'), ((65619, 65645), 'os.path.splitext', 'os.path.splitext', (['dir_item'], {}), '(dir_item)\n', (65635, 65645), False, 'import os\n'), ((70007, 70038), 'os.path.join', 'os.path.join', (['destdir', 'basename'], {}), '(destdir, basename)\n', (70019, 70038), False, 'import os\n'), ((70343, 70377), 'os.path.getsize', 'os.path.getsize', (['sce_metadata_path'], {}), '(sce_metadata_path)\n', (70358, 70377), False, 'import os\n'), ((72437, 72478), 'os.path.join', 'os.path.join', (['start_dir', '"""*"""', '"""group.yml"""'], {}), "(start_dir, '*', 'group.yml')\n", (72449, 72478), False, 'import os\n'), ((72880, 72920), 'os.path.join', 'os.path.join', (['directory', '"""benchmark.yml"""'], {}), "(directory, 'benchmark.yml')\n", (72892, 72920), False, 'import os\n'), ((73295, 73341), 'os.path.join', 'os.path.join', (['self.resolved_rules_dir', '"""*.yml"""'], {}), "(self.resolved_rules_dir, '*.yml')\n", (73307, 73341), False, 'import os\n'), ((73436, 73483), 'os.path.join', 'os.path.join', (['self.resolved_groups_dir', '"""*.yml"""'], {}), "(self.resolved_groups_dir, '*.yml')\n", (73448, 73483), False, 'import os\n'), ((73580, 73629), 'os.path.join', 'os.path.join', (['self.resolved_profiles_dir', '"""*.yml"""'], {}), "(self.resolved_profiles_dir, '*.yml')\n", (73592, 73629), False, 'import os\n'), ((73730, 73777), 'os.path.join', 'os.path.join', (['self.resolved_values_dir', '"""*.yml"""'], {}), "(self.resolved_values_dir, '*.yml')\n", (73742, 73777), False, 'import os\n'), ((10115, 10141), 'os.path.dirname', 'os.path.dirname', (['yaml_file'], {}), '(yaml_file)\n', (10130, 10141), False, 'import os\n'), ((14510, 14532), 'xml.sax.saxutils.escape', 'escape', (['self.reference'], {}), '(self.reference)\n', (14516, 14532), False, 'from xml.sax.saxutils import escape\n'), ((29821, 29850), 'os.path.isfile', 'os.path.isfile', (['dir_item_path'], {}), '(dir_item_path)\n', (29835, 29850), False, 'import os\n'), ((29916, 29947), 'os.path.basename', 'os.path.basename', (['dir_item_path'], {}), '(dir_item_path)\n', (29932, 29947), False, 'import os\n'), ((29999, 30139), 'sys.stderr.write', 'sys.stderr.write', (['("Encountered file \'%s\' while looking for profiles, extension \'%s\' is unknown. Skipping..\\n"\n % (dir_item, ext))'], {}), '(\n """Encountered file \'%s\' while looking for profiles, extension \'%s\' is unknown. Skipping..\n"""\n % (dir_item, ext))\n', (30015, 30139), False, 'import sys\n'), ((31425, 31446), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (31444, 31446), False, 'import datetime\n'), ((44926, 44943), 'copy.deepcopy', 'deepcopy', (['v', 'memo'], {}), '(v, memo)\n', (44934, 44943), False, 'from copy import deepcopy\n'), ((66420, 66448), 'os.path.isdir', 'os.path.isdir', (['dir_item_path'], {}), '(dir_item_path)\n', (66433, 66448), False, 'import os\n'), ((66554, 66686), 'sys.stderr.write', 'sys.stderr.write', (['("Encountered file \'%s\' while recursing, extension \'%s\' is unknown. Skipping..\\n"\n % (dir_item, extension))'], {}), '(\n "Encountered file \'%s\' while recursing, extension \'%s\' is unknown. Skipping..\\n"\n % (dir_item, extension))\n', (66570, 66686), False, 'import sys\n')] |
# Generated by Django 3.1 on 2020-11-09 11:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("selfswab", "0004_auto_20201029_1046"),
]
operations = [
migrations.AlterField(
model_name="selfswabtest",
name="barcode",
field=models.CharField(max_length=255, unique=True),
),
]
| [
"django.db.models.CharField"
] | [((342, 387), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'unique': '(True)'}), '(max_length=255, unique=True)\n', (358, 387), False, 'from django.db import migrations, models\n')] |
from wsgiref.simple_server import make_server
from framework import settings
from framework import wsgi
from framework.consts import SERVER_RUNNING_BANNER
def run():
banner = SERVER_RUNNING_BANNER.format(host=settings.HOST, port=settings.PORT)
with make_server(settings.HOST, settings.PORT, wsgi.application) as httpd:
print(banner)
try:
httpd.serve_forever()
except KeyboardInterrupt:
print("\n! stopping server\n")
finally:
httpd.shutdown()
print("\n--- server has been shut down ---\n\n")
if __name__ == "__main__":
run()
| [
"wsgiref.simple_server.make_server",
"framework.consts.SERVER_RUNNING_BANNER.format"
] | [((182, 250), 'framework.consts.SERVER_RUNNING_BANNER.format', 'SERVER_RUNNING_BANNER.format', ([], {'host': 'settings.HOST', 'port': 'settings.PORT'}), '(host=settings.HOST, port=settings.PORT)\n', (210, 250), False, 'from framework.consts import SERVER_RUNNING_BANNER\n'), ((260, 319), 'wsgiref.simple_server.make_server', 'make_server', (['settings.HOST', 'settings.PORT', 'wsgi.application'], {}), '(settings.HOST, settings.PORT, wsgi.application)\n', (271, 319), False, 'from wsgiref.simple_server import make_server\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# @Author: <NAME> (<EMAIL>)
# @Date: 2020-09-01
# @Filename: test_command.py
# @License: BSD 3-clause (http://www.opensource.org/licenses/BSD-3-Clause)
import asyncio
import pytest
from clu import Command, CommandError, CommandStatus
from clu.exceptions import CluWarning
@pytest.fixture
def command(mocker):
yield Command(command_string="say-hello", command_id=100, actor=mocker.MagicMock())
def test_command(command):
assert command.body == "say-hello"
def test_set_status(command):
command.set_status(CommandStatus.DONE)
assert command.status.is_done
assert command.status == CommandStatus.DONE
assert command.done()
def test_set_done_command(command):
command.set_status(CommandStatus.DONE)
with pytest.warns(CluWarning):
command.set_status(CommandStatus.DONE)
def test_set_status_fails(command):
with pytest.raises(TypeError):
command.set_status({})
def test_set_status_int(command):
command.set_status(CommandStatus.FAILED.value)
assert command.status.is_done
assert command.status.did_fail
assert command.done()
def test_set_status_str(command):
command.status = "TIMEDOUT"
assert command.status.is_done
assert command.status.did_fail
assert command.status == CommandStatus.TIMEDOUT
assert command.done()
def test_set_status_str_fails(command):
with pytest.raises(TypeError):
command.set_status("AAAAA")
def test_child_command(command):
child = Command(command_string="new-command", parent=command)
assert child.parent == command
def test_child_command_write(command):
command.command_id = 666
child = Command(command_string="new-command", parent=command)
child.write("i", "hello")
command.actor.write.assert_called_with(
"i",
message={"text": "hello"},
command=command,
broadcast=False,
silent=False,
**{},
)
def test_child_command_finished(command):
child = Command(command_string="new-command", parent=command)
child.finish(text="Finished")
command.actor.write.assert_called_with(
"i",
message={},
text="Finished",
command=command,
broadcast=False,
silent=False,
**{},
)
assert child.status.did_succeed
def test_child_command_running(command):
child = Command(command_string="new-command", parent=command)
child.set_status("RUNNING")
command.actor.write.assert_not_called()
def test_child_command_failed(command):
child = Command(command_string="new-command", parent=command)
child.fail(error="Failed")
command.actor.write.assert_called_with(
"e",
message={},
error="Failed",
command=command,
broadcast=False,
silent=False,
**{},
)
assert child.status.did_fail
def test_write_str(command):
command.write("i", "hello")
command.actor.write.assert_called_with(
"i",
message={"text": "hello"},
command=command,
broadcast=False,
silent=False,
**{},
)
def test_write_dict(command):
command.write("i", {"key": "hello"})
command.actor.write.assert_called_with(
"i",
message={"key": "hello"},
command=command,
broadcast=False,
silent=False,
**{},
)
def test_write_bad_message(command):
with pytest.raises(ValueError):
command.write("i", 100)
def test_write_no_actor(command):
command.actor = None
with pytest.raises(CommandError):
command.write("i", "hi")
@pytest.mark.asyncio
async def test_wait_for_status(command, event_loop):
async def mark_cancelled():
await asyncio.sleep(0.01)
command.set_status(CommandStatus.CANCELLED)
event_loop.create_task(mark_cancelled())
await command.wait_for_status(CommandStatus.CANCELLED)
assert True
@pytest.mark.asyncio
async def test_status_callback(command):
global result
result = 0
def callback(status):
global result
result = result + 1
assert isinstance(status, CommandStatus)
command.callbacks.append(callback)
command.finish()
await asyncio.sleep(0.01)
assert result
@pytest.mark.asyncio
async def test_time_limit(event_loop):
command = Command(command_string="new-command", time_limit=0.5)
await asyncio.sleep(0.6)
assert command.status == CommandStatus.TIMEDOUT
assert command.done()
| [
"pytest.warns",
"pytest.raises",
"asyncio.sleep",
"clu.Command"
] | [((1535, 1588), 'clu.Command', 'Command', ([], {'command_string': '"""new-command"""', 'parent': 'command'}), "(command_string='new-command', parent=command)\n", (1542, 1588), False, 'from clu import Command, CommandError, CommandStatus\n'), ((1707, 1760), 'clu.Command', 'Command', ([], {'command_string': '"""new-command"""', 'parent': 'command'}), "(command_string='new-command', parent=command)\n", (1714, 1760), False, 'from clu import Command, CommandError, CommandStatus\n'), ((2033, 2086), 'clu.Command', 'Command', ([], {'command_string': '"""new-command"""', 'parent': 'command'}), "(command_string='new-command', parent=command)\n", (2040, 2086), False, 'from clu import Command, CommandError, CommandStatus\n'), ((2409, 2462), 'clu.Command', 'Command', ([], {'command_string': '"""new-command"""', 'parent': 'command'}), "(command_string='new-command', parent=command)\n", (2416, 2462), False, 'from clu import Command, CommandError, CommandStatus\n'), ((2595, 2648), 'clu.Command', 'Command', ([], {'command_string': '"""new-command"""', 'parent': 'command'}), "(command_string='new-command', parent=command)\n", (2602, 2648), False, 'from clu import Command, CommandError, CommandStatus\n'), ((4383, 4436), 'clu.Command', 'Command', ([], {'command_string': '"""new-command"""', 'time_limit': '(0.5)'}), "(command_string='new-command', time_limit=0.5)\n", (4390, 4436), False, 'from clu import Command, CommandError, CommandStatus\n'), ((796, 820), 'pytest.warns', 'pytest.warns', (['CluWarning'], {}), '(CluWarning)\n', (808, 820), False, 'import pytest\n'), ((917, 941), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (930, 941), False, 'import pytest\n'), ((1425, 1449), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (1438, 1449), False, 'import pytest\n'), ((3462, 3487), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3475, 3487), False, 'import pytest\n'), ((3593, 3620), 'pytest.raises', 'pytest.raises', (['CommandError'], {}), '(CommandError)\n', (3606, 3620), False, 'import pytest\n'), ((4267, 4286), 'asyncio.sleep', 'asyncio.sleep', (['(0.01)'], {}), '(0.01)\n', (4280, 4286), False, 'import asyncio\n'), ((4447, 4465), 'asyncio.sleep', 'asyncio.sleep', (['(0.6)'], {}), '(0.6)\n', (4460, 4465), False, 'import asyncio\n'), ((3777, 3796), 'asyncio.sleep', 'asyncio.sleep', (['(0.01)'], {}), '(0.01)\n', (3790, 3796), False, 'import asyncio\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.