code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
from flask import request, render_template, redirect, url_for, session, jsonify, abort
from flask_babel import _
import os.path
import pandas as pd
from transparentai import sustainable
from ..models import Project
from ..models.modules import ModuleSustainable
from .services.projects import format_project, control_project, load_modules, init_anwsers
from .services.modules import sustainable as sustainable_module
from .services.commons import get_header_attributes
from .controller_class import Controller
from ..utils import add_in_db, exists_in_db
from ..src import get_questions
project_controller = Controller(component=Project,
format_fn=format_project,
control_fn=control_project,
module_fn=load_modules)
sustainable_controller = Controller(
component=ModuleSustainable,
format_fn=sustainable_module.format_module,
control_fn=sustainable_module.control_module)
def index():
title = _('Projects')
header = get_header_attributes()
projects = project_controller.index()
return render_template("projects/index.html",
title=title,
session=session,
projects=projects,
header=header)
def get_all_instances_json():
projects = project_controller.index()
projects = [elem.to_dict() for elem in projects]
return jsonify(projects)
def new():
title = _('Create a new project')
header = get_header_attributes()
previous = request.form
if request.method == 'POST':
project = project_controller.create()
init_anwsers(project)
if project is not None:
return redirect(url_for('projects.get_instance',
name=project.name))
return render_template("projects/new.html",
title=title,
session=session,
header=header,
previous=previous)
def edit(name):
title = _('Edit ') + name
header = get_header_attributes()
project = project_controller.get_instance(name)
previous = project.to_dict()
# Temporary until handle list in form
previous['members'] = ', '.join(previous['members'])
if request.method == 'POST':
previous = request.form
project = project_controller.update(name)
if project is not None:
return redirect(url_for('projects.get_instance',
name=project.name))
return render_template("projects/edit.html",
title=title,
session=session,
header=header,
previous=previous,
project=project)
def get_instance(name):
project = project_controller.get_instance(name)
if project is None:
return redirect(url_for('projects.index'))
title = name
header = get_header_attributes()
header['current_project'] = name
questions = get_questions()
return render_template("projects/instance.html",
session=session,
project=project,
header=header,
title=title,
questions=questions)
def get_instance_json(name):
project = project_controller.get_instance(name)
return jsonify(project.to_dict())
def create():
project = project_controller.create()
init_anwsers(project)
return redirect(url_for('projects.index'))
def update(name):
project_controller.update(name)
return redirect(url_for('projects.get_instance', name=name))
def delete(name):
project_controller.delete(name)
return redirect(url_for('projects.index'))
def post_instance(name):
form_data = request.form
if '_method' not in form_data:
return redirect(url_for('projects.get_instance', name=name))
method = form_data['_method']
if method == 'POST':
return create()
elif method == 'PUT':
return update(name)
elif method == 'DELETE':
return delete(name)
return redirect(url_for('projects.get_instance', name=name))
def estimate_co2(name):
title = _('Estimate CO2: ') + name
header = get_header_attributes()
project = project_controller.get_instance(name)
if project is not None:
header['current_project'] = project.name
module = project.module_sustainable
if module is None:
return abort(404)
previous = module.to_dict()
if request.method == 'POST':
previous = request.form
module = sustainable_controller.update(module.id, id_col='id')
if module is not None:
sustainable_module.compute_co2_estimation(project)
return redirect(url_for('projects.estimate_co2', name=name))
locations = list(sustainable.get_energy_data().keys())
return render_template("modules/estimate_co2.html",
session=session,
previous=previous,
header=header,
title=title,
project=project,
locations=locations)
def modules(name):
title = _('Analytics libraries')
header = get_header_attributes()
project = project_controller.get_instance(name)
header['current_project'] = name
dataset = project.dataset
model = dataset.model if dataset is not None else None
return render_template("projects/modules/index.html",
title=title,
session=session,
header=header,
project=project,
dataset=dataset,
model=model)
def components(name):
title = _('Project components')
header = get_header_attributes()
project = project_controller.get_instance(name)
header['current_project'] = name
dataset = project.dataset
model = dataset.model if dataset is not None else None
return render_template("projects/components/index.html",
title=title,
session=session,
header=header,
project=project,
dataset=dataset,
model=model)
def evaluation(name):
title = _('Trusting evaluation')
header = get_header_attributes()
project = project_controller.get_instance(name)
header['current_project'] = name
dataset = project.dataset
model = dataset.model if dataset is not None else None
questions = get_questions()
return render_template("projects/evaluation/index.html",
title=title,
session=session,
header=header,
project=project,
dataset=dataset,
model=model,
questions=questions)
def model(name):
return redirect(url_for('models.get_instance', name=name))
def dataset(name):
return redirect(url_for('datasets.get_instance', name=name))
|
[
"flask.abort",
"flask.jsonify",
"flask.url_for",
"flask.render_template",
"transparentai.sustainable.get_energy_data",
"flask_babel._"
] |
[((1011, 1024), 'flask_babel._', '_', (['"""Projects"""'], {}), "('Projects')\n", (1012, 1024), False, 'from flask_babel import _\n'), ((1116, 1222), 'flask.render_template', 'render_template', (['"""projects/index.html"""'], {'title': 'title', 'session': 'session', 'projects': 'projects', 'header': 'header'}), "('projects/index.html', title=title, session=session,\n projects=projects, header=header)\n", (1131, 1222), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((1465, 1482), 'flask.jsonify', 'jsonify', (['projects'], {}), '(projects)\n', (1472, 1482), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((1508, 1533), 'flask_babel._', '_', (['"""Create a new project"""'], {}), "('Create a new project')\n", (1509, 1533), False, 'from flask_babel import _\n'), ((1871, 1976), 'flask.render_template', 'render_template', (['"""projects/new.html"""'], {'title': 'title', 'session': 'session', 'header': 'header', 'previous': 'previous'}), "('projects/new.html', title=title, session=session, header=\n header, previous=previous)\n", (1886, 1976), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((2628, 2751), 'flask.render_template', 'render_template', (['"""projects/edit.html"""'], {'title': 'title', 'session': 'session', 'header': 'header', 'previous': 'previous', 'project': 'project'}), "('projects/edit.html', title=title, session=session, header=\n header, previous=previous, project=project)\n", (2643, 2751), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((3172, 3300), 'flask.render_template', 'render_template', (['"""projects/instance.html"""'], {'session': 'session', 'project': 'project', 'header': 'header', 'title': 'title', 'questions': 'questions'}), "('projects/instance.html', session=session, project=project,\n header=header, title=title, questions=questions)\n", (3187, 3300), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((5063, 5214), 'flask.render_template', 'render_template', (['"""modules/estimate_co2.html"""'], {'session': 'session', 'previous': 'previous', 'header': 'header', 'title': 'title', 'project': 'project', 'locations': 'locations'}), "('modules/estimate_co2.html', session=session, previous=\n previous, header=header, title=title, project=project, locations=locations)\n", (5078, 5214), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((5405, 5429), 'flask_babel._', '_', (['"""Analytics libraries"""'], {}), "('Analytics libraries')\n", (5406, 5429), False, 'from flask_babel import _\n'), ((5658, 5800), 'flask.render_template', 'render_template', (['"""projects/modules/index.html"""'], {'title': 'title', 'session': 'session', 'header': 'header', 'project': 'project', 'dataset': 'dataset', 'model': 'model'}), "('projects/modules/index.html', title=title, session=session,\n header=header, project=project, dataset=dataset, model=model)\n", (5673, 5800), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((5995, 6018), 'flask_babel._', '_', (['"""Project components"""'], {}), "('Project components')\n", (5996, 6018), False, 'from flask_babel import _\n'), ((6247, 6393), 'flask.render_template', 'render_template', (['"""projects/components/index.html"""'], {'title': 'title', 'session': 'session', 'header': 'header', 'project': 'project', 'dataset': 'dataset', 'model': 'model'}), "('projects/components/index.html', title=title, session=\n session, header=header, project=project, dataset=dataset, model=model)\n", (6262, 6393), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((6587, 6611), 'flask_babel._', '_', (['"""Trusting evaluation"""'], {}), "('Trusting evaluation')\n", (6588, 6611), False, 'from flask_babel import _\n'), ((6873, 7044), 'flask.render_template', 'render_template', (['"""projects/evaluation/index.html"""'], {'title': 'title', 'session': 'session', 'header': 'header', 'project': 'project', 'dataset': 'dataset', 'model': 'model', 'questions': 'questions'}), "('projects/evaluation/index.html', title=title, session=\n session, header=header, project=project, dataset=dataset, model=model,\n questions=questions)\n", (6888, 7044), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((2110, 2120), 'flask_babel._', '_', (['"""Edit """'], {}), "('Edit ')\n", (2111, 2120), False, 'from flask_babel import _\n'), ((3658, 3683), 'flask.url_for', 'url_for', (['"""projects.index"""'], {}), "('projects.index')\n", (3665, 3683), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((3761, 3804), 'flask.url_for', 'url_for', (['"""projects.get_instance"""'], {'name': 'name'}), "('projects.get_instance', name=name)\n", (3768, 3804), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((3882, 3907), 'flask.url_for', 'url_for', (['"""projects.index"""'], {}), "('projects.index')\n", (3889, 3907), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((4286, 4329), 'flask.url_for', 'url_for', (['"""projects.get_instance"""'], {'name': 'name'}), "('projects.get_instance', name=name)\n", (4293, 4329), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((4369, 4388), 'flask_babel._', '_', (['"""Estimate CO2: """'], {}), "('Estimate CO2: ')\n", (4370, 4388), False, 'from flask_babel import _\n'), ((4642, 4652), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (4647, 4652), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((7264, 7305), 'flask.url_for', 'url_for', (['"""models.get_instance"""'], {'name': 'name'}), "('models.get_instance', name=name)\n", (7271, 7305), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((7348, 7391), 'flask.url_for', 'url_for', (['"""datasets.get_instance"""'], {'name': 'name'}), "('datasets.get_instance', name=name)\n", (7355, 7391), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((3008, 3033), 'flask.url_for', 'url_for', (['"""projects.index"""'], {}), "('projects.index')\n", (3015, 3033), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((4024, 4067), 'flask.url_for', 'url_for', (['"""projects.get_instance"""'], {'name': 'name'}), "('projects.get_instance', name=name)\n", (4031, 4067), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((1770, 1821), 'flask.url_for', 'url_for', (['"""projects.get_instance"""'], {'name': 'project.name'}), "('projects.get_instance', name=project.name)\n", (1777, 1821), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((2527, 2578), 'flask.url_for', 'url_for', (['"""projects.get_instance"""'], {'name': 'project.name'}), "('projects.get_instance', name=project.name)\n", (2534, 2578), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((4946, 4989), 'flask.url_for', 'url_for', (['"""projects.estimate_co2"""'], {'name': 'name'}), "('projects.estimate_co2', name=name)\n", (4953, 4989), False, 'from flask import request, render_template, redirect, url_for, session, jsonify, abort\n'), ((5013, 5042), 'transparentai.sustainable.get_energy_data', 'sustainable.get_energy_data', ([], {}), '()\n', (5040, 5042), False, 'from transparentai import sustainable\n')]
|
import pygame as pg
TILE_D = 32
# Comment in for small or big screen
# HD screen
SCREEN_TW, SCREEN_TH = 50, 30
# Low res screen
SCREEN_TW, SCREEN_TH = 35, 20
SCREEN_W_PX = SCREEN_TW * TILE_D
SCREEN_H_PX = SCREEN_TH * TILE_D
SCREEN_SIZE = (SCREEN_W_PX, SCREEN_H_PX)
MAP_VIEW_TW = int(SCREEN_TW * 0.7)
MAP_VIEW_TH = int(SCREEN_TH * 0.8)
STAT_VIEW_TW = MAP_VIEW_TW
STAT_VIEW_TH = SCREEN_TH - MAP_VIEW_TH
LOG_VIEW_TW = SCREEN_TW - MAP_VIEW_TW
LOG_VIEW_TH = SCREEN_TH
MAP_DIM = (TILE_D*MAP_VIEW_TW, TILE_D*MAP_VIEW_TH)
STAT_DIM = (TILE_D*STAT_VIEW_TW, TILE_D*STAT_VIEW_TH)
LOG_DIM = (TILE_D*LOG_VIEW_TW, TILE_D*LOG_VIEW_TH)
MAP_POS = pg.Rect(0, 0, MAP_DIM[0], MAP_DIM[1])
STAT_POS = pg.Rect(0, TILE_D*MAP_VIEW_TH, STAT_DIM[0], STAT_DIM[1])
LOG_POS = pg.Rect(TILE_D*MAP_VIEW_TW, 0, LOG_DIM[0], LOG_DIM[1])
DIRECTIONS = {'NW': (-1, -1),
'N': (0, -1),
'NE': (1, -1),
'W': (-1, 0),
'E': (1, 0),
'SW': (-1, 1),
'S': (0, 1),
'SE': (1, 1)}
FOUR_DIRECTIONS = {'N': (0, -1),
'W': (-1, 0),
'E': (1, 0),
'S': (0, 1)}
# Font
LOG_FONTSIZE = TILE_D // 2
# Main Menu
MAINMENU_ITEM_LABELS = ["Resume",
"Editor",
"Quit",
]
MAINMENU_ITEM_INFO = ["Resume the game.",
"Switch between 'Game' and 'Editor' mode. This is a future feature not yet implemented.",
"Quit the game. Without saving.",
]
MAINMENU_FONTSIZE = TILE_D * 2
MAINMENU_BGCOL = pg.color.Color("black")
MAINMENU_DEFAULT_COL = pg.color.Color("antiquewhite")
MAINMENU_SELECTED_COL = pg.color.Color("goldenrod")
|
[
"pygame.Rect",
"pygame.color.Color"
] |
[((641, 678), 'pygame.Rect', 'pg.Rect', (['(0)', '(0)', 'MAP_DIM[0]', 'MAP_DIM[1]'], {}), '(0, 0, MAP_DIM[0], MAP_DIM[1])\n', (648, 678), True, 'import pygame as pg\n'), ((690, 748), 'pygame.Rect', 'pg.Rect', (['(0)', '(TILE_D * MAP_VIEW_TH)', 'STAT_DIM[0]', 'STAT_DIM[1]'], {}), '(0, TILE_D * MAP_VIEW_TH, STAT_DIM[0], STAT_DIM[1])\n', (697, 748), True, 'import pygame as pg\n'), ((757, 813), 'pygame.Rect', 'pg.Rect', (['(TILE_D * MAP_VIEW_TW)', '(0)', 'LOG_DIM[0]', 'LOG_DIM[1]'], {}), '(TILE_D * MAP_VIEW_TW, 0, LOG_DIM[0], LOG_DIM[1])\n', (764, 813), True, 'import pygame as pg\n'), ((1624, 1647), 'pygame.color.Color', 'pg.color.Color', (['"""black"""'], {}), "('black')\n", (1638, 1647), True, 'import pygame as pg\n'), ((1671, 1701), 'pygame.color.Color', 'pg.color.Color', (['"""antiquewhite"""'], {}), "('antiquewhite')\n", (1685, 1701), True, 'import pygame as pg\n'), ((1726, 1753), 'pygame.color.Color', 'pg.color.Color', (['"""goldenrod"""'], {}), "('goldenrod')\n", (1740, 1753), True, 'import pygame as pg\n')]
|
"""Global settings and imports"""
import sys
sys.path.append("../../")
import os
import numpy as np
import zipfile
from tqdm import tqdm
import scrapbook as sb
from tempfile import TemporaryDirectory
import tensorflow as tf
tf.get_logger().setLevel('ERROR') # only show error messages
from reco_utils.recommender.deeprec.deeprec_utils import download_deeprec_resources
from reco_utils.recommender.newsrec.newsrec_utils import prepare_hparams
from reco_utils.recommender.newsrec.models.nrms import NRMSModel
from reco_utils.recommender.newsrec.io.mind_iterator import MINDIterator
from reco_utils.recommender.newsrec.newsrec_utils import get_mind_data_set
print("System version: {}".format(sys.version))
print("Tensorflow version: {}".format(tf.__version__))
"""Prepare parameters"""
epochs = 5
seed = 42
batch_size = 32
# Options: demo, small, large
MIND_type = 'demo'
"""Download and load data"""
tmpdir = TemporaryDirectory()
data_path = tmpdir.name
train_news_file = os.path.join(data_path, 'train', r'news.tsv')
train_behaviors_file = os.path.join(data_path, 'train', r'behaviors.tsv')
valid_news_file = os.path.join(data_path, 'valid', r'news.tsv')
valid_behaviors_file = os.path.join(data_path, 'valid', r'behaviors.tsv')
wordEmb_file = os.path.join(data_path, "utils", "embedding.npy")
userDict_file = os.path.join(data_path, "utils", "uid2index.pkl")
wordDict_file = os.path.join(data_path, "utils", "word_dict.pkl")
yaml_file = os.path.join(data_path, "utils", r'nrms.yaml')
mind_url, mind_train_dataset, mind_dev_dataset, mind_utils = get_mind_data_set(MIND_type)
if not os.path.exists(train_news_file):
download_deeprec_resources(mind_url, os.path.join(data_path, 'train'), mind_train_dataset)
if not os.path.exists(valid_news_file):
download_deeprec_resources(mind_url, \
os.path.join(data_path, 'valid'), mind_dev_dataset)
if not os.path.exists(yaml_file):
download_deeprec_resources(r'https://recodatasets.z20.web.core.windows.net/newsrec/', \
os.path.join(data_path, 'utils'), mind_utils)
"""Create hyper-parameters"""
hparams = prepare_hparams(yaml_file,
wordEmb_file=wordEmb_file,
wordDict_file=wordDict_file,
userDict_file=userDict_file,
batch_size=batch_size,
epochs=epochs,
show_step=10)
print(hparams)
"""Train the NRMS model"""
iterator = MINDIterator
model = NRMSModel(hparams, iterator, seed=seed)
print(model.run_eval(valid_news_file, valid_behaviors_file))
model.fit(train_news_file, train_behaviors_file, valid_news_file, valid_behaviors_file)
res_syn = model.run_eval(valid_news_file, valid_behaviors_file)
print(res_syn)
sb.glue("res_syn", res_syn)
"""Save the model"""
model_path = os.path.join(data_path, "model")
os.makedirs(model_path, exist_ok=True)
model.model.save_weights(os.path.join(model_path, "nrms_ckpt"))
"""Output Predcition File"""
group_impr_indexes, group_labels, group_preds = model.run_fast_eval(valid_news_file, valid_behaviors_file)
with open(os.path.join(data_path, 'prediction.txt'), 'w') as f:
for impr_index, preds in tqdm(zip(group_impr_indexes, group_preds)):
impr_index += 1
pred_rank = (np.argsort(np.argsort(preds)[::-1]) + 1).tolist()
pred_rank = '[' + ','.join([str(i) for i in pred_rank]) + ']'
f.write(' '.join([str(impr_index), pred_rank])+ '\n')
f = zipfile.ZipFile(os.path.join(data_path, 'prediction.zip'), 'w', zipfile.ZIP_DEFLATED)
f.write(os.path.join(data_path, 'prediction.txt'), arcname='prediction.txt')
f.close()
|
[
"sys.path.append",
"reco_utils.recommender.newsrec.newsrec_utils.get_mind_data_set",
"tempfile.TemporaryDirectory",
"os.makedirs",
"scrapbook.glue",
"os.path.exists",
"numpy.argsort",
"reco_utils.recommender.newsrec.models.nrms.NRMSModel",
"os.path.join",
"reco_utils.recommender.newsrec.newsrec_utils.prepare_hparams",
"tensorflow.get_logger"
] |
[((45, 70), 'sys.path.append', 'sys.path.append', (['"""../../"""'], {}), "('../../')\n", (60, 70), False, 'import sys\n'), ((912, 932), 'tempfile.TemporaryDirectory', 'TemporaryDirectory', ([], {}), '()\n', (930, 932), False, 'from tempfile import TemporaryDirectory\n'), ((976, 1020), 'os.path.join', 'os.path.join', (['data_path', '"""train"""', '"""news.tsv"""'], {}), "(data_path, 'train', 'news.tsv')\n", (988, 1020), False, 'import os\n'), ((1045, 1094), 'os.path.join', 'os.path.join', (['data_path', '"""train"""', '"""behaviors.tsv"""'], {}), "(data_path, 'train', 'behaviors.tsv')\n", (1057, 1094), False, 'import os\n'), ((1114, 1158), 'os.path.join', 'os.path.join', (['data_path', '"""valid"""', '"""news.tsv"""'], {}), "(data_path, 'valid', 'news.tsv')\n", (1126, 1158), False, 'import os\n'), ((1183, 1232), 'os.path.join', 'os.path.join', (['data_path', '"""valid"""', '"""behaviors.tsv"""'], {}), "(data_path, 'valid', 'behaviors.tsv')\n", (1195, 1232), False, 'import os\n'), ((1249, 1298), 'os.path.join', 'os.path.join', (['data_path', '"""utils"""', '"""embedding.npy"""'], {}), "(data_path, 'utils', 'embedding.npy')\n", (1261, 1298), False, 'import os\n'), ((1315, 1364), 'os.path.join', 'os.path.join', (['data_path', '"""utils"""', '"""uid2index.pkl"""'], {}), "(data_path, 'utils', 'uid2index.pkl')\n", (1327, 1364), False, 'import os\n'), ((1381, 1430), 'os.path.join', 'os.path.join', (['data_path', '"""utils"""', '"""word_dict.pkl"""'], {}), "(data_path, 'utils', 'word_dict.pkl')\n", (1393, 1430), False, 'import os\n'), ((1443, 1488), 'os.path.join', 'os.path.join', (['data_path', '"""utils"""', '"""nrms.yaml"""'], {}), "(data_path, 'utils', 'nrms.yaml')\n", (1455, 1488), False, 'import os\n'), ((1552, 1580), 'reco_utils.recommender.newsrec.newsrec_utils.get_mind_data_set', 'get_mind_data_set', (['MIND_type'], {}), '(MIND_type)\n', (1569, 1580), False, 'from reco_utils.recommender.newsrec.newsrec_utils import get_mind_data_set\n'), ((2128, 2300), 'reco_utils.recommender.newsrec.newsrec_utils.prepare_hparams', 'prepare_hparams', (['yaml_file'], {'wordEmb_file': 'wordEmb_file', 'wordDict_file': 'wordDict_file', 'userDict_file': 'userDict_file', 'batch_size': 'batch_size', 'epochs': 'epochs', 'show_step': '(10)'}), '(yaml_file, wordEmb_file=wordEmb_file, wordDict_file=\n wordDict_file, userDict_file=userDict_file, batch_size=batch_size,\n epochs=epochs, show_step=10)\n', (2143, 2300), False, 'from reco_utils.recommender.newsrec.newsrec_utils import prepare_hparams\n'), ((2524, 2563), 'reco_utils.recommender.newsrec.models.nrms.NRMSModel', 'NRMSModel', (['hparams', 'iterator'], {'seed': 'seed'}), '(hparams, iterator, seed=seed)\n', (2533, 2563), False, 'from reco_utils.recommender.newsrec.models.nrms import NRMSModel\n'), ((2792, 2819), 'scrapbook.glue', 'sb.glue', (['"""res_syn"""', 'res_syn'], {}), "('res_syn', res_syn)\n", (2799, 2819), True, 'import scrapbook as sb\n'), ((2855, 2887), 'os.path.join', 'os.path.join', (['data_path', '"""model"""'], {}), "(data_path, 'model')\n", (2867, 2887), False, 'import os\n'), ((2888, 2926), 'os.makedirs', 'os.makedirs', (['model_path'], {'exist_ok': '(True)'}), '(model_path, exist_ok=True)\n', (2899, 2926), False, 'import os\n'), ((1589, 1620), 'os.path.exists', 'os.path.exists', (['train_news_file'], {}), '(train_news_file)\n', (1603, 1620), False, 'import os\n'), ((1725, 1756), 'os.path.exists', 'os.path.exists', (['valid_news_file'], {}), '(valid_news_file)\n', (1739, 1756), False, 'import os\n'), ((1891, 1916), 'os.path.exists', 'os.path.exists', (['yaml_file'], {}), '(yaml_file)\n', (1905, 1916), False, 'import os\n'), ((2953, 2990), 'os.path.join', 'os.path.join', (['model_path', '"""nrms_ckpt"""'], {}), "(model_path, 'nrms_ckpt')\n", (2965, 2990), False, 'import os\n'), ((3513, 3554), 'os.path.join', 'os.path.join', (['data_path', '"""prediction.zip"""'], {}), "(data_path, 'prediction.zip')\n", (3525, 3554), False, 'import os\n'), ((3591, 3632), 'os.path.join', 'os.path.join', (['data_path', '"""prediction.txt"""'], {}), "(data_path, 'prediction.txt')\n", (3603, 3632), False, 'import os\n'), ((224, 239), 'tensorflow.get_logger', 'tf.get_logger', ([], {}), '()\n', (237, 239), True, 'import tensorflow as tf\n'), ((1663, 1695), 'os.path.join', 'os.path.join', (['data_path', '"""train"""'], {}), "(data_path, 'train')\n", (1675, 1695), False, 'import os\n'), ((1832, 1864), 'os.path.join', 'os.path.join', (['data_path', '"""valid"""'], {}), "(data_path, 'valid')\n", (1844, 1864), False, 'import os\n'), ((2041, 2073), 'os.path.join', 'os.path.join', (['data_path', '"""utils"""'], {}), "(data_path, 'utils')\n", (2053, 2073), False, 'import os\n'), ((3139, 3180), 'os.path.join', 'os.path.join', (['data_path', '"""prediction.txt"""'], {}), "(data_path, 'prediction.txt')\n", (3151, 3180), False, 'import os\n'), ((3322, 3339), 'numpy.argsort', 'np.argsort', (['preds'], {}), '(preds)\n', (3332, 3339), True, 'import numpy as np\n')]
|
#!/bin/env python
import os
import scipy as sp
import matplotlib.pyplot as pl
from mpl_toolkits.basemap.cm import sstanom, s3pcpn_l
from matplotlib import dates
from g5lib import field
# Read validation data set
obs={}
path=os.environ['NOBACKUP']+'/verification/stress_mon_clim'
execfile(path+'/ctl.py')
obs['ctl']=ctl
tx=ctl.fromfile('taux',kind=0).clim(12); tx.data*=10
ty=ctl.fromfile('tauy',kind=0).clim(12); ty.data*=10
tx.shiftgrid(30.);
tx.grid['lon']=sp.where(tx.grid['lon']<29.,tx.grid['lon']+360,\
tx.grid['lon'])
ty.shiftgrid(30.);
ty.grid['lon']=sp.where(ty.grid['lon']<29.,ty.grid['lon']+360,\
ty.grid['lon'])
var=field.cmplx(tx,ty); var.name=ctl.name+' TAU'
ind=[0,1,11]; obs['djf']=var.subset(tind=ind).ave(0); obs['djf'].name+=', DJF'
ind=[5,6,7]; obs['jja']=var.subset(tind=ind).ave(0); obs['jja'].name+=', JJA'
obs['am']=var.ave(0); obs['am'].name+=', Annual Mean'
# Calculate equatorial profile
lonind=sp.logical_and(var.grid['lon'][0]>=130.0,var.grid['lon'][0]<=280.0)
latind=sp.logical_and(var.grid['lat'][:,0]>=-2.1,var.grid['lat'][:,0]<=2.0)
obs['eqprof']=obs['am'].subset(iind=lonind,jind=latind).ave(2)
# Equatorial Annual Cycle
obs['eqac']=var.subset(iind=lonind,jind=latind).ave(2)
obs['eqac'].data-=obs['eqac'].ave(0).data
obs['eqac'].name=var.name+', Eq. Annual Cycle'
# Plots
path=os.environ['NOBACKUP']+'/verification/stress_mon_clim/pics'
copts1={}
copts1['levels']=(0.,0.2,0.4,0.6,0.8,1.,1.5,2,2.5,3)
copts1['cmap']=s3pcpn_l
def plot_map(figure,F,copts):
Nq=10
x=field.absolute(F)
pl.figure(figure); pl.clf()
x.copts=copts
x.plot_map()
F.plot_quiver(Nq)
pl.show()
# DJF
season='djf'
plot_map(1,obs[season],copts1)
pl.savefig(path+'/tau_'+season+'_qscat.png')
# JJA
season='jja'
plot_map(1,obs[season],copts1)
pl.savefig(path+'/tau_'+season+'_qscat.png')
# AM
season='am'
plot_map(1,obs[season],copts1)
pl.savefig(path+'/tau_'+season+'_qscat.png')
# Plot Equatorial Annual Cycle
pl.figure(2);pl.clf()
obs['eqac'].copts={'levels': sp.arange(-0.2,0.21,0.02),\
'cmap' : sstanom,\
'timefmt': dates.DateFormatter('%b')}
obs['eqac'].plot2d()
obs['eqac'].copts={'func': pl.contour,\
'colors': 'black',\
'levels': sp.arange(-0.2,0.21,0.04),\
'timefmt': dates.DateFormatter('%b')}
obs['eqac'].plot2d()
ax=pl.gca(); ax.yaxis.set_major_locator(dates.MonthLocator())
ax.set_title(obs['ctl'].name+' Eq. Annual cycle')
pl.grid(); pl.show()
pl.savefig(path+'/taux_eq_ac_qscat.png')
|
[
"scipy.where",
"matplotlib.dates.MonthLocator",
"matplotlib.pyplot.show",
"scipy.arange",
"matplotlib.pyplot.clf",
"g5lib.field.absolute",
"scipy.logical_and",
"matplotlib.pyplot.figure",
"matplotlib.dates.DateFormatter",
"matplotlib.pyplot.gca",
"g5lib.field.cmplx",
"matplotlib.pyplot.grid",
"matplotlib.pyplot.savefig"
] |
[((463, 532), 'scipy.where', 'sp.where', (["(tx.grid['lon'] < 29.0)", "(tx.grid['lon'] + 360)", "tx.grid['lon']"], {}), "(tx.grid['lon'] < 29.0, tx.grid['lon'] + 360, tx.grid['lon'])\n", (471, 532), True, 'import scipy as sp\n'), ((589, 658), 'scipy.where', 'sp.where', (["(ty.grid['lon'] < 29.0)", "(ty.grid['lon'] + 360)", "ty.grid['lon']"], {}), "(ty.grid['lon'] < 29.0, ty.grid['lon'] + 360, ty.grid['lon'])\n", (597, 658), True, 'import scipy as sp\n'), ((685, 704), 'g5lib.field.cmplx', 'field.cmplx', (['tx', 'ty'], {}), '(tx, ty)\n', (696, 704), False, 'from g5lib import field\n'), ((981, 1053), 'scipy.logical_and', 'sp.logical_and', (["(var.grid['lon'][0] >= 130.0)", "(var.grid['lon'][0] <= 280.0)"], {}), "(var.grid['lon'][0] >= 130.0, var.grid['lon'][0] <= 280.0)\n", (995, 1053), True, 'import scipy as sp\n'), ((1056, 1131), 'scipy.logical_and', 'sp.logical_and', (["(var.grid['lat'][:, 0] >= -2.1)", "(var.grid['lat'][:, 0] <= 2.0)"], {}), "(var.grid['lat'][:, 0] >= -2.1, var.grid['lat'][:, 0] <= 2.0)\n", (1070, 1131), True, 'import scipy as sp\n'), ((1740, 1790), 'matplotlib.pyplot.savefig', 'pl.savefig', (["(path + '/tau_' + season + '_qscat.png')"], {}), "(path + '/tau_' + season + '_qscat.png')\n", (1750, 1790), True, 'import matplotlib.pyplot as pl\n'), ((1836, 1886), 'matplotlib.pyplot.savefig', 'pl.savefig', (["(path + '/tau_' + season + '_qscat.png')"], {}), "(path + '/tau_' + season + '_qscat.png')\n", (1846, 1886), True, 'import matplotlib.pyplot as pl\n'), ((1930, 1980), 'matplotlib.pyplot.savefig', 'pl.savefig', (["(path + '/tau_' + season + '_qscat.png')"], {}), "(path + '/tau_' + season + '_qscat.png')\n", (1940, 1980), True, 'import matplotlib.pyplot as pl\n'), ((2007, 2019), 'matplotlib.pyplot.figure', 'pl.figure', (['(2)'], {}), '(2)\n', (2016, 2019), True, 'import matplotlib.pyplot as pl\n'), ((2020, 2028), 'matplotlib.pyplot.clf', 'pl.clf', ([], {}), '()\n', (2026, 2028), True, 'import matplotlib.pyplot as pl\n'), ((2419, 2427), 'matplotlib.pyplot.gca', 'pl.gca', ([], {}), '()\n', (2425, 2427), True, 'import matplotlib.pyplot as pl\n'), ((2528, 2537), 'matplotlib.pyplot.grid', 'pl.grid', ([], {}), '()\n', (2535, 2537), True, 'import matplotlib.pyplot as pl\n'), ((2539, 2548), 'matplotlib.pyplot.show', 'pl.show', ([], {}), '()\n', (2546, 2548), True, 'import matplotlib.pyplot as pl\n'), ((2549, 2591), 'matplotlib.pyplot.savefig', 'pl.savefig', (["(path + '/taux_eq_ac_qscat.png')"], {}), "(path + '/taux_eq_ac_qscat.png')\n", (2559, 2591), True, 'import matplotlib.pyplot as pl\n'), ((1568, 1585), 'g5lib.field.absolute', 'field.absolute', (['F'], {}), '(F)\n', (1582, 1585), False, 'from g5lib import field\n'), ((1590, 1607), 'matplotlib.pyplot.figure', 'pl.figure', (['figure'], {}), '(figure)\n', (1599, 1607), True, 'import matplotlib.pyplot as pl\n'), ((1609, 1617), 'matplotlib.pyplot.clf', 'pl.clf', ([], {}), '()\n', (1615, 1617), True, 'import matplotlib.pyplot as pl\n'), ((1679, 1688), 'matplotlib.pyplot.show', 'pl.show', ([], {}), '()\n', (1686, 1688), True, 'import matplotlib.pyplot as pl\n'), ((2058, 2085), 'scipy.arange', 'sp.arange', (['(-0.2)', '(0.21)', '(0.02)'], {}), '(-0.2, 0.21, 0.02)\n', (2067, 2085), True, 'import scipy as sp\n'), ((2154, 2179), 'matplotlib.dates.DateFormatter', 'dates.DateFormatter', (['"""%b"""'], {}), "('%b')\n", (2173, 2179), False, 'from matplotlib import dates\n'), ((2310, 2337), 'scipy.arange', 'sp.arange', (['(-0.2)', '(0.21)', '(0.04)'], {}), '(-0.2, 0.21, 0.04)\n', (2319, 2337), True, 'import scipy as sp\n'), ((2368, 2393), 'matplotlib.dates.DateFormatter', 'dates.DateFormatter', (['"""%b"""'], {}), "('%b')\n", (2387, 2393), False, 'from matplotlib import dates\n'), ((2456, 2476), 'matplotlib.dates.MonthLocator', 'dates.MonthLocator', ([], {}), '()\n', (2474, 2476), False, 'from matplotlib import dates\n')]
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Helper classes for twisted.test.test_ssl.
They are in a separate module so they will not prevent test_ssl importing if
pyOpenSSL is unavailable.
"""
from __future__ import division, absolute_import
from twisted.python.compat import nativeString
from twisted.internet import ssl
from twisted.python.filepath import FilePath
from OpenSSL import SSL
certPath = nativeString(FilePath(__file__.encode("utf-8")
).sibling(b"server.pem").path)
class ClientTLSContext(ssl.ClientContextFactory):
isClient = 1
def getContext(self):
return SSL.Context(SSL.TLSv1_METHOD)
class ServerTLSContext:
isClient = 0
def __init__(self, filename=certPath):
self.filename = filename
def getContext(self):
ctx = SSL.Context(SSL.TLSv1_METHOD)
ctx.use_certificate_file(self.filename)
ctx.use_privatekey_file(self.filename)
return ctx
|
[
"OpenSSL.SSL.Context"
] |
[((671, 700), 'OpenSSL.SSL.Context', 'SSL.Context', (['SSL.TLSv1_METHOD'], {}), '(SSL.TLSv1_METHOD)\n', (682, 700), False, 'from OpenSSL import SSL\n'), ((870, 899), 'OpenSSL.SSL.Context', 'SSL.Context', (['SSL.TLSv1_METHOD'], {}), '(SSL.TLSv1_METHOD)\n', (881, 899), False, 'from OpenSSL import SSL\n')]
|
from drl.envs.testing import LockstepEnv
from drl.envs.wrappers.stateless.clip_reward import ClipRewardWrapper
def test_clip_reward():
env = LockstepEnv()
wrapped = ClipRewardWrapper(env, low=0.0, high=0.5, key='extrinsic')
_ = wrapped.reset()
o_tp1, r_t, d_t, i_t = wrapped.step(0)
assert r_t['extrinsic'] == 0.5
wrapped2 = ClipRewardWrapper(wrapped, low=0.0, high=0.25, key='extrinsic')
_ = wrapped2.reset()
o_tp1, r_t, d_t, i_t = wrapped2.step(0)
assert r_t['extrinsic'] == 0.25
|
[
"drl.envs.testing.LockstepEnv",
"drl.envs.wrappers.stateless.clip_reward.ClipRewardWrapper"
] |
[((147, 160), 'drl.envs.testing.LockstepEnv', 'LockstepEnv', ([], {}), '()\n', (158, 160), False, 'from drl.envs.testing import LockstepEnv\n'), ((176, 234), 'drl.envs.wrappers.stateless.clip_reward.ClipRewardWrapper', 'ClipRewardWrapper', (['env'], {'low': '(0.0)', 'high': '(0.5)', 'key': '"""extrinsic"""'}), "(env, low=0.0, high=0.5, key='extrinsic')\n", (193, 234), False, 'from drl.envs.wrappers.stateless.clip_reward import ClipRewardWrapper\n'), ((353, 416), 'drl.envs.wrappers.stateless.clip_reward.ClipRewardWrapper', 'ClipRewardWrapper', (['wrapped'], {'low': '(0.0)', 'high': '(0.25)', 'key': '"""extrinsic"""'}), "(wrapped, low=0.0, high=0.25, key='extrinsic')\n", (370, 416), False, 'from drl.envs.wrappers.stateless.clip_reward import ClipRewardWrapper\n')]
|
from tkinter import *
from PIL import ImageTk, Image
from GameEngine.Vector import *
class Application:
def __init__(self, title, size, fps):
self.root = Tk()
self.root.title(title)
self.width, self.height = size
self.root.geometry(f"{self.width}x{self.height}")
self.root.bind("<Key>", self.__handle_key_press)
self.fps = fps
self.__widgets = []
self.__task_list = []
self.__game_objects = []
self.__action_listeners = []
self.root.after(int((1 / self.fps) * 1000), self.new_frame)
def render_window(self):
for slave in self.root.slaves():
slave.destroy()
for widget in self.__widgets:
widget[0].place(x=widget[1], y=widget[2])
self.execute_task_list()
self.root.after(int((1 / self.fps) * 1000), self.new_frame)
def new_frame(self):
sprites = []
for obj in self.__game_objects:
render_result = obj.render()
if render_result is not None:
sprites.append((render_result, obj.position, obj.rotation))
frame = Frame((self.width, self.height))
for sprite in sprites:
frame.write_sprite(sprite)
frame = ImageTk.PhotoImage(frame.image)
self.__widgets = [
[
Label(self.root, image=frame),
0, 0
]
]
self.__widgets[0][0].image = frame
self.render_window()
def execute_task_list(self):
for task in self.__task_list:
task()
def add_object(self, object):
self.__game_objects.append(object)
self.__task_list.append(object.update)
def add_action_listener(self, obj):
self.__action_listeners.append(obj)
def __handle_key_press(self, event):
for action_listener in self.__action_listeners:
if action_listener.key == event.char:
action_listener.action()
def bind_special_key(self, key_name, f):
self.root.bind(key_name, f)
class Frame:
def __init__(self, size):
self.width, self.height = size
self.image = Image.new("RGBA", size=(self.width, self.height))
self.image.putdata([(0, 0, 0, 255) for i in range(0, self.width * self.height)])
def write_sprite(self, sprite_data):
sprite, position, rotation = sprite_data
pixels_ = self.image.getdata()
pixels = []
p_counter = 0
for y in range(0, self.image.height):
pixels.append([])
for x in range(0, self.image.width):
pixels[y].append(pixels_[p_counter])
p_counter += 1
sprite_pixels_ = sprite.getdata()
sprite_pixels = []
p_counter = 0
for y in range(0, sprite.height):
sprite_pixels.append([])
for x in range(0, sprite.width):
sprite_pixels[y].append(sprite_pixels_[p_counter])
p_counter += 1
p_counter = 0
write_position = position
pixel_position = Vector()
for y in range(0, len(sprite_pixels)):
for x in range(0, len(sprite_pixels[pixel_position.y])):
pixels[write_position.y][write_position.x] = sprite_pixels[pixel_position.y][pixel_position.x]
p_counter += 1
write_position += Vector(1, 0)
pixel_position += Vector(1, 0)
pixel_position.x = 0
pixel_position.y += 1
write_position.x = position.x
write_position.y += 1
new_data = []
for y in range(0, len(pixels)):
for x in range(0, len(pixels[y])):
new_data.append(pixels[y][x])
self.image.putdata(new_data)
|
[
"PIL.Image.new",
"PIL.ImageTk.PhotoImage"
] |
[((1262, 1293), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', (['frame.image'], {}), '(frame.image)\n', (1280, 1293), False, 'from PIL import ImageTk, Image\n'), ((2179, 2228), 'PIL.Image.new', 'Image.new', (['"""RGBA"""'], {'size': '(self.width, self.height)'}), "('RGBA', size=(self.width, self.height))\n", (2188, 2228), False, 'from PIL import ImageTk, Image\n')]
|
#%%
from datetime import datetime
import xarray as xr
from cfxarray.profile import depthcoords, profiledataset
from cfxarray.base import dataarraybydepth
# %%
temperature1 = dataarraybydepth(
name="temperature",
standard_name="sea_water_temperature",
long_name="Sea water temperature",
units="degree_Celsius",
data=[10, 15],
).assign_coords(
depthcoords(
depth=[1, 2],
time=datetime.fromisoformat("1970-01-01T00:00:00"),
latitude=59.95,
longitude=10.75,
)
)
# %%
ds1 = profiledataset([temperature1], "profile1", "title", "summary", ["keyword"])
# %%
temperature2 = dataarraybydepth(
name="temperature",
standard_name="sea_water_temperature",
long_name="Sea water temperature",
units="degree_Celsius",
data=[20, 255, 2000, 100],
).assign_coords(
depthcoords(
depth=[1, 2, 10, 20],
time=datetime.fromisoformat("1980-01-01T00:00:00"),
latitude=59.95,
longitude=10.75,
)
)
# %%
ds2 = profiledataset([temperature2], "profile2", "title", "summary", ["keyword"])
# %%
ds = xr.concat([ds1, ds2], dim="profile_name")
# %%
ds.temperature.sel(profile_name="profile1").plot.line("o")
# %%
|
[
"cfxarray.base.dataarraybydepth",
"datetime.datetime.fromisoformat",
"cfxarray.profile.profiledataset",
"xarray.concat"
] |
[((533, 608), 'cfxarray.profile.profiledataset', 'profiledataset', (['[temperature1]', '"""profile1"""', '"""title"""', '"""summary"""', "['keyword']"], {}), "([temperature1], 'profile1', 'title', 'summary', ['keyword'])\n", (547, 608), False, 'from cfxarray.profile import depthcoords, profiledataset\n'), ((1005, 1080), 'cfxarray.profile.profiledataset', 'profiledataset', (['[temperature2]', '"""profile2"""', '"""title"""', '"""summary"""', "['keyword']"], {}), "([temperature2], 'profile2', 'title', 'summary', ['keyword'])\n", (1019, 1080), False, 'from cfxarray.profile import depthcoords, profiledataset\n'), ((1091, 1132), 'xarray.concat', 'xr.concat', (['[ds1, ds2]'], {'dim': '"""profile_name"""'}), "([ds1, ds2], dim='profile_name')\n", (1100, 1132), True, 'import xarray as xr\n'), ((177, 330), 'cfxarray.base.dataarraybydepth', 'dataarraybydepth', ([], {'name': '"""temperature"""', 'standard_name': '"""sea_water_temperature"""', 'long_name': '"""Sea water temperature"""', 'units': '"""degree_Celsius"""', 'data': '[10, 15]'}), "(name='temperature', standard_name='sea_water_temperature',\n long_name='Sea water temperature', units='degree_Celsius', data=[10, 15])\n", (193, 330), False, 'from cfxarray.base import dataarraybydepth\n'), ((629, 799), 'cfxarray.base.dataarraybydepth', 'dataarraybydepth', ([], {'name': '"""temperature"""', 'standard_name': '"""sea_water_temperature"""', 'long_name': '"""Sea water temperature"""', 'units': '"""degree_Celsius"""', 'data': '[20, 255, 2000, 100]'}), "(name='temperature', standard_name='sea_water_temperature',\n long_name='Sea water temperature', units='degree_Celsius', data=[20, \n 255, 2000, 100])\n", (645, 799), False, 'from cfxarray.base import dataarraybydepth\n'), ((417, 462), 'datetime.datetime.fromisoformat', 'datetime.fromisoformat', (['"""1970-01-01T00:00:00"""'], {}), "('1970-01-01T00:00:00')\n", (439, 462), False, 'from datetime import datetime\n'), ((889, 934), 'datetime.datetime.fromisoformat', 'datetime.fromisoformat', (['"""1980-01-01T00:00:00"""'], {}), "('1980-01-01T00:00:00')\n", (911, 934), False, 'from datetime import datetime\n')]
|
from chromedriver_py import binary_path as driver_path
from selenium.webdriver import DesiredCapabilities
from selenium.webdriver import Chrome, ChromeOptions # TODO: Combine these two dependencies. Leaving it for now since it touches too many sites atm.
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as ec
from selenium.webdriver.support.wait import WebDriverWait
from utils import create_msg
import random, re, requests, string, threading
# https://github.com/Hari-Nagarajan/nvidia-bot/blob/master/utils/selenium_utils.py
options = Options()
options.add_experimental_option(
"excludeSwitches", ["enable-automation", "enable-logging"]
)
options.add_experimental_option("useAutomationExtension", False)
class AnyEc:
"""Use with WebDriverWait to combine expected_conditions
in an OR.
"""
def __init__(self, *args):
self.ecs = args
def __call__(self, driver):
for fn in self.ecs:
try:
if fn(driver):
return True
except:
pass
def no_amazon_image():
prefs = {"profile.managed_default_content_settings.images": 2}
options.add_experimental_option("prefs", prefs)
def yes_amazon_image():
prefs = {"profile.managed_default_content_settings.images": 0}
options.add_experimental_option("prefs", prefs)
def wait_for_element(d, e_id, time=30):
"""
Uses webdriver(d) to wait for page title(title) to become visible
"""
return WebDriverWait(d, time).until(ec.presence_of_element_located((By.ID, e_id)))
def wait_for_element_by_xpath(d, e_path, time=30):
return WebDriverWait(d, time).until(
ec.presence_of_element_located((By.XPATH, e_path))
)
def wait_for_element_by_class(d, e_class, time=30):
"""
Uses webdriver(d) to wait for page title(title) to become visible
"""
return WebDriverWait(d, time).until(
ec.presence_of_element_located((By.CLASS_NAME, e_class))
)
def wait_for_title(d, title, path):
"""
Uses webdriver(d) to navigate to get(path) until it equals title(title)
"""
while d.title != title:
d.get(path)
WebDriverWait(d, 1000)
def wait_for_page(d, title, time=30):
"""
Uses webdriver(d) to wait for page title(title) to become visible
"""
WebDriverWait(d, time).until(ec.title_is(title))
def wait_for_either_title(d, title1, title2, time=30):
"""
Uses webdriver(d) to wait for page title(title1 or title2) to become visible
"""
try:
WebDriverWait(d, time).until(AnyEc(ec.title_is(title1), ec.title_is(title2)))
except Exception:
pass
def wait_for_any_title(d, titles, time=30):
"""
Uses webdriver(d) to wait for page title(any in the list of titles) to become visible
"""
WebDriverWait(d, time).until(AnyEc(*[ec.title_is(title) for title in titles]))
def button_click_using_xpath(d, xpath):
"""
Uses webdriver(d) to click a button using an XPath(xpath)
"""
button_menu = WebDriverWait(d, 10).until(
ec.element_to_be_clickable((By.XPATH, xpath))
)
action = ActionChains(d)
action.move_to_element(button_menu).pause(1).click().perform()
def field_send_keys(d, field, keys):
"""
Uses webdriver(d) to fiend a field(field), clears it and sends keys(keys)
"""
elem = d.find_element_by_name(field)
elem.clear()
elem.send_keys(keys)
def has_class(element, class_name):
classes = element.get_attribute("class")
return class_name in classes
def add_cookies_to_session_from_driver(driver, session):
cookies = driver.get_cookies()
[
session.cookies.set_cookie(
requests.cookies.create_cookie(
domain=cookie["domain"],
name=cookie["name"],
value=cookie["value"],
)
)
for cookie in cookies
]
def enable_headless():
options.add_argument("--headless")
options.add_argument("--no-sandbox")
options.add_argument("--disable-dev-shm-usage")
# https://stackoverflow.com/questions/33225947/can-a-website-detect-when-you-are-using-selenium-with-chromedriver
def change_driver(status_signal, loc):
fin = open(loc, 'rb')
data = fin.read()
val = "$" + "".join(random.choices(string.ascii_lowercase, k=3)) + "_" + \
"".join(random.choices(string.ascii_letters + string.digits, k=22)) + "_"
result = re.search(b"[$][a-z]{3}_[a-zA-Z0-9]{22}_", data)
if result is not None:
status_signal.emit(create_msg("Changing value in Chromedriver", "normal"))
data = data.replace(result.group(0), val.encode())
fin.close()
fin = open(loc, 'wb')
fin.truncate()
fin.write(data)
fin.close()
else:
fin.close()
def open_browser(link, cookies):
threading.Thread(target=start_browser, args=(link, cookies)).start()
def start_browser(link, cookies):
caps = DesiredCapabilities().CHROME
caps["pageLoadStrategy"] = "eager"
chrome_options = ChromeOptions()
chrome_options.add_experimental_option("excludeSwitches", ["enable-automation"])
chrome_options.add_experimental_option("useAutomationExtension", False)
driver = Chrome(desired_capabilities=caps, executable_path=driver_path, options=chrome_options)
driver.execute_cdp_cmd(
"Page.addScriptToEvaluateOnNewDocument",
{
"source": """
Object.defineProperty(window, 'navigator', {
value: new Proxy(navigator, {
has: (target, key) => (key === 'webdriver' ? false : key in target),
get: (target, key) =>
key === 'webdriver'
? undefined
: typeof target[key] === 'function'
? target[key].bind(target)
: target[key]
})
})
"""
},
)
driver.get(link)
for cookie in cookies:
driver.add_cookie({
"name": cookie["name"],
"value": cookie["value"],
"domain": cookie["domain"]
})
driver.get(link)
|
[
"selenium.webdriver.support.expected_conditions.presence_of_element_located",
"selenium.webdriver.chrome.options.Options",
"threading.Thread",
"selenium.webdriver.support.expected_conditions.element_to_be_clickable",
"selenium.webdriver.common.action_chains.ActionChains",
"random.choices",
"requests.cookies.create_cookie",
"selenium.webdriver.support.wait.WebDriverWait",
"selenium.webdriver.support.expected_conditions.title_is",
"selenium.webdriver.ChromeOptions",
"selenium.webdriver.Chrome",
"utils.create_msg",
"selenium.webdriver.DesiredCapabilities",
"re.search"
] |
[((712, 721), 'selenium.webdriver.chrome.options.Options', 'Options', ([], {}), '()\n', (719, 721), False, 'from selenium.webdriver.chrome.options import Options\n'), ((3282, 3297), 'selenium.webdriver.common.action_chains.ActionChains', 'ActionChains', (['d'], {}), '(d)\n', (3294, 3297), False, 'from selenium.webdriver.common.action_chains import ActionChains\n'), ((4595, 4643), 're.search', 're.search', (["b'[$][a-z]{3}_[a-zA-Z0-9]{22}_'", 'data'], {}), "(b'[$][a-z]{3}_[a-zA-Z0-9]{22}_', data)\n", (4604, 4643), False, 'import random, re, requests, string, threading\n'), ((5205, 5220), 'selenium.webdriver.ChromeOptions', 'ChromeOptions', ([], {}), '()\n', (5218, 5220), False, 'from selenium.webdriver import Chrome, ChromeOptions\n'), ((5395, 5486), 'selenium.webdriver.Chrome', 'Chrome', ([], {'desired_capabilities': 'caps', 'executable_path': 'driver_path', 'options': 'chrome_options'}), '(desired_capabilities=caps, executable_path=driver_path, options=\n chrome_options)\n', (5401, 5486), False, 'from selenium.webdriver import Chrome, ChromeOptions\n'), ((1678, 1723), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'ec.presence_of_element_located', (['(By.ID, e_id)'], {}), '((By.ID, e_id))\n', (1708, 1723), True, 'from selenium.webdriver.support import expected_conditions as ec\n'), ((1827, 1877), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'ec.presence_of_element_located', (['(By.XPATH, e_path)'], {}), '((By.XPATH, e_path))\n', (1857, 1877), True, 'from selenium.webdriver.support import expected_conditions as ec\n'), ((2073, 2129), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'ec.presence_of_element_located', (['(By.CLASS_NAME, e_class)'], {}), '((By.CLASS_NAME, e_class))\n', (2103, 2129), True, 'from selenium.webdriver.support import expected_conditions as ec\n'), ((2322, 2344), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['d', '(1000)'], {}), '(d, 1000)\n', (2335, 2344), False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((2504, 2522), 'selenium.webdriver.support.expected_conditions.title_is', 'ec.title_is', (['title'], {}), '(title)\n', (2515, 2522), True, 'from selenium.webdriver.support import expected_conditions as ec\n'), ((3217, 3262), 'selenium.webdriver.support.expected_conditions.element_to_be_clickable', 'ec.element_to_be_clickable', (['(By.XPATH, xpath)'], {}), '((By.XPATH, xpath))\n', (3243, 3262), True, 'from selenium.webdriver.support import expected_conditions as ec\n'), ((5116, 5137), 'selenium.webdriver.DesiredCapabilities', 'DesiredCapabilities', ([], {}), '()\n', (5135, 5137), False, 'from selenium.webdriver import DesiredCapabilities\n'), ((1649, 1671), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['d', 'time'], {}), '(d, time)\n', (1662, 1671), False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((1789, 1811), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['d', 'time'], {}), '(d, time)\n', (1802, 1811), False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((2035, 2057), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['d', 'time'], {}), '(d, time)\n', (2048, 2057), False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((2475, 2497), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['d', 'time'], {}), '(d, time)\n', (2488, 2497), False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((2964, 2986), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['d', 'time'], {}), '(d, time)\n', (2977, 2986), False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((3181, 3201), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['d', '(10)'], {}), '(d, 10)\n', (3194, 3201), False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((3847, 3950), 'requests.cookies.create_cookie', 'requests.cookies.create_cookie', ([], {'domain': "cookie['domain']", 'name': "cookie['name']", 'value': "cookie['value']"}), "(domain=cookie['domain'], name=cookie['name'],\n value=cookie['value'])\n", (3877, 3950), False, 'import random, re, requests, string, threading\n'), ((4699, 4753), 'utils.create_msg', 'create_msg', (['"""Changing value in Chromedriver"""', '"""normal"""'], {}), "('Changing value in Chromedriver', 'normal')\n", (4709, 4753), False, 'from utils import create_msg\n'), ((5000, 5060), 'threading.Thread', 'threading.Thread', ([], {'target': 'start_browser', 'args': '(link, cookies)'}), '(target=start_browser, args=(link, cookies))\n', (5016, 5060), False, 'import random, re, requests, string, threading\n'), ((2695, 2717), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['d', 'time'], {}), '(d, time)\n', (2708, 2717), False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((2730, 2749), 'selenium.webdriver.support.expected_conditions.title_is', 'ec.title_is', (['title1'], {}), '(title1)\n', (2741, 2749), True, 'from selenium.webdriver.support import expected_conditions as ec\n'), ((2751, 2770), 'selenium.webdriver.support.expected_conditions.title_is', 'ec.title_is', (['title2'], {}), '(title2)\n', (2762, 2770), True, 'from selenium.webdriver.support import expected_conditions as ec\n'), ((4515, 4573), 'random.choices', 'random.choices', (['(string.ascii_letters + string.digits)'], {'k': '(22)'}), '(string.ascii_letters + string.digits, k=22)\n', (4529, 4573), False, 'import random, re, requests, string, threading\n'), ((3001, 3019), 'selenium.webdriver.support.expected_conditions.title_is', 'ec.title_is', (['title'], {}), '(title)\n', (3012, 3019), True, 'from selenium.webdriver.support import expected_conditions as ec\n'), ((4442, 4485), 'random.choices', 'random.choices', (['string.ascii_lowercase'], {'k': '(3)'}), '(string.ascii_lowercase, k=3)\n', (4456, 4485), False, 'import random, re, requests, string, threading\n')]
|
"""add ingredient availability table
Revision ID: f0ddbf9cdd26
Revises: 7cf38c4ce08a
Create Date: 2019-06-28 21:34:49.780023
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'f0ddbf9cdd26'
down_revision = '<PASSWORD>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('INGREDIENT_AVAILABILITY',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('ingredient_id', sa.Integer(), nullable=False),
sa.Column('month', sa.Integer(), nullable=False),
sa.Column('availability', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['ingredient_id'], ['INGREDIENT.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('INGREDIENT_AVAILABILITY')
# ### end Alembic commands ###
|
[
"alembic.op.drop_table",
"sqlalchemy.Integer",
"sqlalchemy.PrimaryKeyConstraint",
"sqlalchemy.ForeignKeyConstraint"
] |
[((906, 946), 'alembic.op.drop_table', 'op.drop_table', (['"""INGREDIENT_AVAILABILITY"""'], {}), "('INGREDIENT_AVAILABILITY')\n", (919, 946), False, 'from alembic import op\n'), ((677, 738), 'sqlalchemy.ForeignKeyConstraint', 'sa.ForeignKeyConstraint', (["['ingredient_id']", "['INGREDIENT.id']"], {}), "(['ingredient_id'], ['INGREDIENT.id'])\n", (700, 738), True, 'import sqlalchemy as sa\n'), ((746, 775), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""id"""'], {}), "('id')\n", (769, 775), True, 'import sqlalchemy as sa\n'), ((465, 477), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (475, 477), True, 'import sqlalchemy as sa\n'), ((527, 539), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (537, 539), True, 'import sqlalchemy as sa\n'), ((581, 593), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (591, 593), True, 'import sqlalchemy as sa\n'), ((642, 654), 'sqlalchemy.Integer', 'sa.Integer', ([], {}), '()\n', (652, 654), True, 'import sqlalchemy as sa\n')]
|
#imports
import Cluster
import math
class Leader():
def __init__(self, i, height, leader=None):
# represents local level id
self.identity = i
# Leader of this leader (if any)
self.leader = leader
# height of this leader in the tiers
self.height = height
# contains cluster for leader;
# either a specific cluster or multiple other subleaders
self.cluster = []
# T/F value to signify if this leader has a Cluster or not
self.represents_cluster = False
# Max capacity for leader - relevant when creating subleads;
# '1' for leaders representing a cluster
self.max_capacity = 1
#Current size for leader;
# '1' for leaders representing a cluster
# > 1 for leaders representing other sub-leaders
self.size = 1
def sub_clustering(self, identity, per_lead, target_leader_size, hi_pct, height, l):
tmp_leads = []
tmp_clusters = []
if l > 1:
# set max capacity to target_size + the added extra 'Hi' percent
self.max_capacity = math.ceil((target_leader_size/100)*(100+hi_pct))
# set size to target
self.size = per_lead
for i in range(per_lead):
leader = Leader(identity, height, leader=self)
identity += 1
self.cluster.append(leader)
tmp_leads.append(leader)
leads, clusters, c_id = leader.sub_clustering(identity, per_lead, target_leader_size, hi_pct, height-1, (l-1))
identity = c_id
tmp_leads.extend(leads)
tmp_clusters.extend(clusters)
return tmp_leads, tmp_clusters, identity
else:
self.represents_cluster = True
cluster = Cluster.Cluster(leader=self)
self.cluster.append(cluster)
return tmp_leads, self.cluster, identity
# Used on cluster creation time to eliminate extra leaders.
# Can fail badly if used incorrectly.
def elim_leader(self, i):
#find index of said leader
idx = [ix for ix, x in enumerate(self.cluster) if x.identity == i]
#remove him
del self.cluster[idx[0]]
self.size = self.size -1
# Used to insert into the Cluster from this leader
def insert_into_cluster(self):
if self.represents_cluster == True:
self.cluster[0].insert()
else:
raise RuntimeError("Tried calling 'insert_into_cluster' on a leader which does not represent a Cluster")
def __str__(self):
return "Leader: " + \
"Cluster: " + str(self.cluster)
|
[
"Cluster.Cluster",
"math.ceil"
] |
[((1162, 1214), 'math.ceil', 'math.ceil', (['(target_leader_size / 100 * (100 + hi_pct))'], {}), '(target_leader_size / 100 * (100 + hi_pct))\n', (1171, 1214), False, 'import math\n'), ((1895, 1923), 'Cluster.Cluster', 'Cluster.Cluster', ([], {'leader': 'self'}), '(leader=self)\n', (1910, 1923), False, 'import Cluster\n')]
|
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
setup(
name="ricecomp-cfitsio",
version="1.0",
description="Rice compression and decompression for Python.",
long_description="Rice comression and decompression using the routines in the cfitsio library.",
author="<NAME>",
author_email="<EMAIL>",
url="https://github.com/tclarke/ricecomp-cfitsio-python.git",
license="BSD License",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Archiving :: Compression',
],
keywords = ('compression','rice','lossless','module'),
requires = ["Cython (>=0.2)","numpy (>=1.7.0)"],
cmdclass = {'build_ext':build_ext},
ext_modules = [Extension("ricecomp", ["ricecomp.pyx"],
libraries=["cfitsio"])]
)
|
[
"distutils.extension.Extension"
] |
[((1028, 1090), 'distutils.extension.Extension', 'Extension', (['"""ricecomp"""', "['ricecomp.pyx']"], {'libraries': "['cfitsio']"}), "('ricecomp', ['ricecomp.pyx'], libraries=['cfitsio'])\n", (1037, 1090), False, 'from distutils.extension import Extension\n')]
|
import os
import os.path as osp
import gym
import time
import datetime
import joblib
import logging
import numpy as np
import tensorflow as tf
from baselines import logger
from baselines.common import set_global_seeds, explained_variance
from baselines.common.vec_env.subproc_vec_env import SubprocVecEnv
from baselines.common.atari_wrappers import wrap_deepmind
from baselines.common import tf_util
from baselines.a2c.utils import discount_with_dones
from baselines.a2c.utils import Scheduler, make_path, find_trainable_variables
from baselines.a2c.utils import cat_entropy, mse
class Model(object):
def __init__(self, policy, ob_space, ac_space, nenvs, nsteps,
ent_coef=0.01, vf_coef=0.5, max_grad_norm=0.5, lr=7e-4,
alpha=0.99, epsilon=1e-5, total_timesteps=int(80e6), lrschedule='linear'):
sess = tf_util.make_session()
nact = ac_space.n
nbatch = nenvs*nsteps
A = tf.placeholder(tf.int32, [nbatch])
ADV = tf.placeholder(tf.float32, [nbatch])
R = tf.placeholder(tf.float32, [nbatch])
LR = tf.placeholder(tf.float32, [])
# Defines step_model function and train_model functions
# Pass each model a copy of 'sess'
print("Constructing model... STEP_MODEL & TRAIN_MODEL: constructing step_model policy | " + str(policy))
step_model = policy(sess, ob_space, ac_space, nenvs, 1, reuse=False)
# train_model takes in the mini-batch produced by 5 step_models, NOTE: reuse = true
train_model = policy(sess, ob_space, ac_space, nenvs*nsteps, nsteps, reuse=True)
# var init: this neglogpac is still somewhat unknown,
# looks like it does softmax over policy layer of training model
neglogpac = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=train_model.pi, labels=A)
print("MAIN: neglocpac = sparse_softmax_cross_entropy_with_logits() inputs: ")
print("MAIN: train_model_pi: " + str(train_model.pi))
print("MAIN: labels: " + str(A))
# var init: policy gradient loss determined by average of all advantage * neglogpac
pg_loss = tf.reduce_mean(ADV * neglogpac)
# value function loss is mse(tf.squeeze(train_model.vf), R)
# ^ in english, mse(model value prediction, actual Reward)
# mse == means squared error, defined in a2c/utils.py
vf_loss = tf.reduce_mean(mse(tf.squeeze(train_model.vf), R))
# entropy of policy
entropy = tf.reduce_mean(cat_entropy(train_model.pi))
# total loss calculation?
# todo: is this the loss function definition??? check with a3c paper
loss = pg_loss - entropy*ent_coef + vf_loss * vf_coef
# params gets trainable variables from model (weights of network?)
params = find_trainable_variables("model")
# computes gradients (change of weights, or direction of weights) using 'loss' and 'params' above
# computes 'symbolic derivatives of sum 'loss' w.r.t 'params'
# from tflow docs: 'gradients() adds ops to the graph to output the derivs of 'params'
grads = tf.gradients(loss, params)
if max_grad_norm is not None:
grads, grad_norm = tf.clip_by_global_norm(grads, max_grad_norm)
# TODO: how many gradients are computed here, should be 16
grads = list(zip(grads, params))
# RMSProp optimizes learning rate , check thesis notes
trainer = tf.train.RMSPropOptimizer(learning_rate=LR, decay=alpha, epsilon=epsilon)
# RMSProp pushes back new gradients over trainable variables to change weights
_train = trainer.apply_gradients(grads)
lr = Scheduler(v=lr, nvalues=total_timesteps, schedule=lrschedule)
writer = tf.summary.FileWriter("/tmp/helloTensorBoard.txt")
writer.add_graph(sess.graph)
# Trains the model,
# TODO: What is 'masks' input param
# TODO: How often does train_model (steps thru train_model) get run vs. step_model
# A: I think it does a 'train_model' for each mini-batch, which is currently 5 steps
# Does a sess.run with train_model
def train(obs, states, rewards, masks, actions, values):
advs = rewards - values
for step in range(len(obs)):
cur_lr = lr.value()
# td_map hooks up all inputs for train model?
td_map = {train_model.X:obs, A:actions, ADV:advs, R:rewards, LR:cur_lr}
if states is not None:
td_map[train_model.S] = states
td_map[train_model.M] = masks
# Policy Loss, Value Loss, and Policy Entropy calculations
# Propagates losses backwards through the neural network?
policy_loss, value_loss, policy_entropy, _ = sess.run(
[pg_loss, vf_loss, entropy, _train],
td_map
)
return policy_loss, value_loss, policy_entropy
def save(save_path):
path = logger.get_dir() + "/model.pkl"
print("Logger dir: " + logger.get_dir())
print("MODEL SAVED TO : " + str(path))
ps = sess.run(params)
#make_path(osp.dirname(save_path))
joblib.dump(ps, path)
def load(load_path):
loaded_params = joblib.load(load_path)
restores = []
for p, loaded_p in zip(params, loaded_params):
restores.append(p.assign(loaded_p))
ps = sess.run(restores)
self.train = train
self.train_model = train_model
self.step_model = step_model
self.step = step_model.step
self.value = step_model.value
self.initial_state = step_model.initial_state
self.save = save
self.load = load
tf.global_variables_initializer().run(session=sess)
class Runner(object):
# Run is passed a model and nsteps default to 5, runs both models?
def __init__(self, env, model, nsteps=5, gamma=0.99):
self.env = env
self.model = model
nh, nw, nc = env.observation_space.shape
nenv = env.num_envs
self.batch_ob_shape = (nenv*nsteps, nh, nw, nc)
self.obs = np.zeros((nenv, nh, nw, nc), dtype=np.uint8)
self.nc = nc
obs = env.reset()
self.gamma = gamma
self.nsteps = nsteps
self.states = model.initial_state
self.dones = [False for _ in range(nenv)]
# run() steps through 'nsteps' of each 'nenvs' environment, adds actions values
# 'nsteps' is 5 actions set above
def run(self):
# initializes mini-batch arrays
mb_obs, mb_rewards, mb_actions, mb_values, mb_dones = [],[],[],[],[]
mb_states = self.states
# For each step n (5), the model steps through each environment without 'learning' anything, adds rewards
for n in range(self.nsteps):
actions, values, states, _ = self.model.step(self.obs, self.states, self.dones)
print("#######************###### a2c::::: run() iter: " + str(n))
print("action(s): " + str(actions))
print("values(s): " + str(values))
# Records actions and values predicted from the model.step() call above
mb_obs.append(np.copy(self.obs))
mb_actions.append(actions)
mb_values.append(values)
mb_dones.append(self.dones)
# Executes the actions predicted above
# print("RUNNER: self.env: " + str(self.env))
obs, rewards, dones, _ = self.env.step(actions)
print("a2c::::: run(): rewards: " + str(rewards))
# print("RUNNER: len(obs): " + str(len(obs)))
# print("RUNNER: len(rewards): " + str(len(rewards)))
self.states = states
self.dones = dones
for n, done in enumerate(dones):
if done:
self.obs[n] = self.obs[n]*0
self.obs = obs
mb_rewards.append(rewards)
mb_dones.append(self.dones)
#batch of steps to batch of rollouts, aggregates all observations, rewards, actions, values, dones, swaps axis?
mb_obs = np.asarray(mb_obs, dtype=np.uint8).swapaxes(1, 0).reshape(self.batch_ob_shape)
mb_rewards = np.asarray(mb_rewards, dtype=np.float32).swapaxes(1, 0)
mb_actions = np.asarray(mb_actions, dtype=np.int32).swapaxes(1, 0)
mb_values = np.asarray(mb_values, dtype=np.float32).swapaxes(1, 0)
mb_dones = np.asarray(mb_dones, dtype=np.bool).swapaxes(1, 0)
mb_masks = mb_dones[:, :-1]
mb_dones = mb_dones[:, 1:]
last_values = self.model.value(self.obs, self.states, self.dones).tolist()
#discount/bootstrap off value fn
# For each (reward, dones, value) tuple in enumerate(zip(..,..,..) : add rewards to list, add dones to list,
for n, (rewards, dones, value) in enumerate(zip(mb_rewards, mb_dones, last_values)):
rewards = rewards.tolist()
dones = dones.tolist()
if dones[-1] == 0:
rewards = discount_with_dones(rewards+[value], dones+[0], self.gamma)[:-1]
else:
rewards = discount_with_dones(rewards, dones, self.gamma)
mb_rewards[n] = rewards
# Todo: What are these values, print out, the original data is .flattened() to produce return vals
mb_rewards = mb_rewards.flatten()
mb_actions = mb_actions.flatten()
mb_values = mb_values.flatten()
mb_masks = mb_masks.flatten()
return mb_obs, mb_states, mb_rewards, mb_masks, mb_actions, mb_values
def learn(policy, env, seed, nsteps=5, total_timesteps=int(80e6), vf_coef=0.5, ent_coef=0.01, max_grad_norm=0.5, lr=7e-4, lrschedule='linear', epsilon=1e-5, alpha=0.99, gamma=0.99, log_interval=100):
tf.reset_default_graph()
set_global_seeds(seed)
nenvs = env.num_envs
print('rockin ' + str(nenvs))
ob_space = env.observation_space
ac_space = env.action_space
print('observation space: ' + str(ob_space))
print('action space: ' + str(ac_space))
# Initializes model with all arguments obtained from run_atari
# Model DOES NOT GET the env stack object
model = Model(policy=policy, ob_space=ob_space, ac_space=ac_space, nenvs=nenvs, nsteps=nsteps, ent_coef=ent_coef, vf_coef=vf_coef,
max_grad_norm=max_grad_norm, lr=lr, alpha=alpha, epsilon=epsilon, total_timesteps=total_timesteps, lrschedule=lrschedule)
# Intializes a runner using the above model, an environment, and nsteps to run '5'
# env is the VectorFrameStack object created in run_atari, holds 16 environments
# Runner DOES GET the env stack object
# Runner DOES get the model, which lacks the env stack object
runner = Runner(env, model, nsteps=nsteps, gamma=gamma)
file = open("testOutput.txt", "w")
file.write(str(datetime.datetime.now()))
nbatch = nenvs*nsteps
tstart = time.time()
maxAvgReward = 0
# Todo: Figure out how frequently this is: loop 1 to 137,501
for update in range(1, total_timesteps//nbatch+1):
# print("__________ LEARN control loop: " + str(update) + " ------> " + str(total_timesteps//nbatch+1))
# runner.run(), steps model, returns observations, states, rewards, masks, actions, values for all agents?
obs, states, rewards, masks, actions, values = runner.run()
# 80 observations, 16 envs * 5 steps
# print("LEARNING FROM: len(obs): " + str(len(obs)))
# Printing states: TypeError: object of type 'NoneType' has no len()
#print("len(states): " + str(len(states)))
# print("LEARNING FROM: len(rewards): " + str(len(rewards)))
# model.train(), trains model, takes all that above data, processes it through train_model
policy_loss, value_loss, policy_entropy = model.train(obs, states, rewards, masks, actions, values)
nseconds = time.time()-tstart
fps = int((update*nbatch)/nseconds)
if update % log_interval == 0 or update == 1:
avgReward = 0
rewardCount = 0
for reward in rewards:
# Prints 80 reward values? (5 training steps * 16 nenvs) = 80 reward values
print("a2c::::: learn() reward(s): " + str(reward))
avgReward += reward
rewardCount += 1
avgReward = avgReward / rewardCount
ev = explained_variance(values, rewards)
logger.record_tabular("nupdates", update)
logger.record_tabular("total_timesteps", update*nbatch)
logger.record_tabular("fps", fps)
logger.record_tabular("policy_entropy", float(policy_entropy))
logger.record_tabular("value_loss", float(value_loss))
logger.record_tabular("avgReward", float(avgReward))
logger.record_tabular("explained_variance", float(ev))
logger.dump_tabular()
# If avg reward of this batch is greater than previous avg reward, save model
if avgReward > maxAvgReward:
logger.log("Saving model due to mean reward increase: {} -> {}".format(
maxAvgReward, avgReward))
# Save model
model.save("modelName")
# Set prevAvgReward = avgReward
maxAvgReward = avgReward
file.close()
env.close()
|
[
"baselines.a2c.utils.Scheduler",
"tensorflow.reset_default_graph",
"tensorflow.train.RMSPropOptimizer",
"joblib.dump",
"baselines.a2c.utils.find_trainable_variables",
"tensorflow.clip_by_global_norm",
"numpy.copy",
"tensorflow.placeholder",
"tensorflow.summary.FileWriter",
"tensorflow.squeeze",
"tensorflow.gradients",
"baselines.a2c.utils.discount_with_dones",
"datetime.datetime.now",
"baselines.a2c.utils.cat_entropy",
"tensorflow.global_variables_initializer",
"numpy.asarray",
"tensorflow.reduce_mean",
"baselines.logger.dump_tabular",
"baselines.logger.record_tabular",
"baselines.common.tf_util.make_session",
"baselines.logger.get_dir",
"numpy.zeros",
"baselines.common.set_global_seeds",
"time.time",
"baselines.common.explained_variance",
"joblib.load",
"tensorflow.nn.sparse_softmax_cross_entropy_with_logits"
] |
[((9842, 9866), 'tensorflow.reset_default_graph', 'tf.reset_default_graph', ([], {}), '()\n', (9864, 9866), True, 'import tensorflow as tf\n'), ((9871, 9893), 'baselines.common.set_global_seeds', 'set_global_seeds', (['seed'], {}), '(seed)\n', (9887, 9893), False, 'from baselines.common import set_global_seeds, explained_variance\n'), ((10969, 10980), 'time.time', 'time.time', ([], {}), '()\n', (10978, 10980), False, 'import time\n'), ((842, 864), 'baselines.common.tf_util.make_session', 'tf_util.make_session', ([], {}), '()\n', (862, 864), False, 'from baselines.common import tf_util\n'), ((934, 968), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32', '[nbatch]'], {}), '(tf.int32, [nbatch])\n', (948, 968), True, 'import tensorflow as tf\n'), ((983, 1019), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[nbatch]'], {}), '(tf.float32, [nbatch])\n', (997, 1019), True, 'import tensorflow as tf\n'), ((1032, 1068), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[nbatch]'], {}), '(tf.float32, [nbatch])\n', (1046, 1068), True, 'import tensorflow as tf\n'), ((1082, 1112), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[]'], {}), '(tf.float32, [])\n', (1096, 1112), True, 'import tensorflow as tf\n'), ((1749, 1828), 'tensorflow.nn.sparse_softmax_cross_entropy_with_logits', 'tf.nn.sparse_softmax_cross_entropy_with_logits', ([], {'logits': 'train_model.pi', 'labels': 'A'}), '(logits=train_model.pi, labels=A)\n', (1795, 1828), True, 'import tensorflow as tf\n'), ((2130, 2161), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['(ADV * neglogpac)'], {}), '(ADV * neglogpac)\n', (2144, 2161), True, 'import tensorflow as tf\n'), ((2788, 2821), 'baselines.a2c.utils.find_trainable_variables', 'find_trainable_variables', (['"""model"""'], {}), "('model')\n", (2812, 2821), False, 'from baselines.a2c.utils import Scheduler, make_path, find_trainable_variables\n'), ((3110, 3136), 'tensorflow.gradients', 'tf.gradients', (['loss', 'params'], {}), '(loss, params)\n', (3122, 3136), True, 'import tensorflow as tf\n'), ((3441, 3514), 'tensorflow.train.RMSPropOptimizer', 'tf.train.RMSPropOptimizer', ([], {'learning_rate': 'LR', 'decay': 'alpha', 'epsilon': 'epsilon'}), '(learning_rate=LR, decay=alpha, epsilon=epsilon)\n', (3466, 3514), True, 'import tensorflow as tf\n'), ((3664, 3725), 'baselines.a2c.utils.Scheduler', 'Scheduler', ([], {'v': 'lr', 'nvalues': 'total_timesteps', 'schedule': 'lrschedule'}), '(v=lr, nvalues=total_timesteps, schedule=lrschedule)\n', (3673, 3725), False, 'from baselines.a2c.utils import Scheduler, make_path, find_trainable_variables\n'), ((3745, 3795), 'tensorflow.summary.FileWriter', 'tf.summary.FileWriter', (['"""/tmp/helloTensorBoard.txt"""'], {}), "('/tmp/helloTensorBoard.txt')\n", (3766, 3795), True, 'import tensorflow as tf\n'), ((6198, 6242), 'numpy.zeros', 'np.zeros', (['(nenv, nh, nw, nc)'], {'dtype': 'np.uint8'}), '((nenv, nh, nw, nc), dtype=np.uint8)\n', (6206, 6242), True, 'import numpy as np\n'), ((2491, 2518), 'baselines.a2c.utils.cat_entropy', 'cat_entropy', (['train_model.pi'], {}), '(train_model.pi)\n', (2502, 2518), False, 'from baselines.a2c.utils import cat_entropy, mse\n'), ((3206, 3250), 'tensorflow.clip_by_global_norm', 'tf.clip_by_global_norm', (['grads', 'max_grad_norm'], {}), '(grads, max_grad_norm)\n', (3228, 3250), True, 'import tensorflow as tf\n'), ((5225, 5246), 'joblib.dump', 'joblib.dump', (['ps', 'path'], {}), '(ps, path)\n', (5236, 5246), False, 'import joblib\n'), ((5305, 5327), 'joblib.load', 'joblib.load', (['load_path'], {}), '(load_path)\n', (5316, 5327), False, 'import joblib\n'), ((10903, 10926), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (10924, 10926), False, 'import datetime\n'), ((11961, 11972), 'time.time', 'time.time', ([], {}), '()\n', (11970, 11972), False, 'import time\n'), ((12463, 12498), 'baselines.common.explained_variance', 'explained_variance', (['values', 'rewards'], {}), '(values, rewards)\n', (12481, 12498), False, 'from baselines.common import set_global_seeds, explained_variance\n'), ((12512, 12553), 'baselines.logger.record_tabular', 'logger.record_tabular', (['"""nupdates"""', 'update'], {}), "('nupdates', update)\n", (12533, 12553), False, 'from baselines import logger\n'), ((12566, 12623), 'baselines.logger.record_tabular', 'logger.record_tabular', (['"""total_timesteps"""', '(update * nbatch)'], {}), "('total_timesteps', update * nbatch)\n", (12587, 12623), False, 'from baselines import logger\n'), ((12634, 12667), 'baselines.logger.record_tabular', 'logger.record_tabular', (['"""fps"""', 'fps'], {}), "('fps', fps)\n", (12655, 12667), False, 'from baselines import logger\n'), ((12955, 12976), 'baselines.logger.dump_tabular', 'logger.dump_tabular', ([], {}), '()\n', (12974, 12976), False, 'from baselines import logger\n'), ((2397, 2423), 'tensorflow.squeeze', 'tf.squeeze', (['train_model.vf'], {}), '(train_model.vf)\n', (2407, 2423), True, 'import tensorflow as tf\n'), ((4994, 5010), 'baselines.logger.get_dir', 'logger.get_dir', ([], {}), '()\n', (5008, 5010), False, 'from baselines import logger\n'), ((5791, 5824), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (5822, 5824), True, 'import tensorflow as tf\n'), ((7258, 7275), 'numpy.copy', 'np.copy', (['self.obs'], {}), '(self.obs)\n', (7265, 7275), True, 'import numpy as np\n'), ((8281, 8321), 'numpy.asarray', 'np.asarray', (['mb_rewards'], {'dtype': 'np.float32'}), '(mb_rewards, dtype=np.float32)\n', (8291, 8321), True, 'import numpy as np\n'), ((8358, 8396), 'numpy.asarray', 'np.asarray', (['mb_actions'], {'dtype': 'np.int32'}), '(mb_actions, dtype=np.int32)\n', (8368, 8396), True, 'import numpy as np\n'), ((8432, 8471), 'numpy.asarray', 'np.asarray', (['mb_values'], {'dtype': 'np.float32'}), '(mb_values, dtype=np.float32)\n', (8442, 8471), True, 'import numpy as np\n'), ((8506, 8541), 'numpy.asarray', 'np.asarray', (['mb_dones'], {'dtype': 'np.bool'}), '(mb_dones, dtype=np.bool)\n', (8516, 8541), True, 'import numpy as np\n'), ((9205, 9252), 'baselines.a2c.utils.discount_with_dones', 'discount_with_dones', (['rewards', 'dones', 'self.gamma'], {}), '(rewards, dones, self.gamma)\n', (9224, 9252), False, 'from baselines.a2c.utils import discount_with_dones\n'), ((5062, 5078), 'baselines.logger.get_dir', 'logger.get_dir', ([], {}), '()\n', (5076, 5078), False, 'from baselines import logger\n'), ((9096, 9159), 'baselines.a2c.utils.discount_with_dones', 'discount_with_dones', (['(rewards + [value])', '(dones + [0])', 'self.gamma'], {}), '(rewards + [value], dones + [0], self.gamma)\n', (9115, 9159), False, 'from baselines.a2c.utils import discount_with_dones\n'), ((8181, 8215), 'numpy.asarray', 'np.asarray', (['mb_obs'], {'dtype': 'np.uint8'}), '(mb_obs, dtype=np.uint8)\n', (8191, 8215), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
# BioSTEAM: The Biorefinery Simulation and Techno-Economic Analysis Modules
# Copyright (C) 2020, <NAME> <<EMAIL>>
#
# This module is under the UIUC open-source license. See
# github.com/BioSTEAMDevelopmentGroup/biosteam/blob/master/LICENSE.txt
# for license details.
"""
"""
import numpy as np
import biosteam as bst
from .. import Unit
from ._mixer import Mixer
from ._hx import HXutility
from ._flash import Evaporator_PV, Evaporator_PQ
from .design_tools import (
compute_vacuum_system_power_and_cost,
compute_heat_transfer_area
)
from thermosteam import MultiStream, Stream, settings
import flexsolve as flx
from warnings import warn
from .design_tools import heat_transfer as ht
__all__ = ('MultiEffectEvaporator',)
log = np.log
exp = np.exp
# Table 22.32 Product process and design (pg 592)
# Name: ('Area range (m2)', 'Cost(A) (USD)', 'U (kJ/(hr*m2*K)))', 'Material')
evaporators = {'Horizontal tube':
((9.29, 743.224),
lambda A, CE: CE*2.304*A**0.53,
4906.02,
'Carbon steel'),
'Long-tube vertical':
((9.29, 743.224),
lambda A, CE: CE*3.086*A**0.55,
8176.699,
'Carbon steel'),
'Forced circulation':
((13.935, 8000),
lambda A, CE: CE/500*exp(8.2986 + 0.5329*log(A*0.0929)-0.000196*log(A*0.0929)**2),
10731.918,
'Carbon steel'),
'Falling film':
((13.935, 371.612),
lambda A, CE: CE*7.416*A**0.55,
10220.874,
'Stainless steel tubes/Carbon steel shell')}
class MultiEffectEvaporator(Unit):
"""
Creates evaporatorators with pressures given by P (a list of pressures).
Adjusts first evaporator vapor fraction to satisfy an overall fraction
evaporated. All evaporators after the first have zero duty. Condenses
the vapor coming out of the last evaporator. Pumps all liquid streams
to prevent back flow in later parts. All liquid evaporated is ultimately
recondensed. Cost is based on required heat transfer area. Vacuum system
is based on air leakage. Air leakage is based on volume, as given by
residence time `tau` and flow rate to each evaporator.
Parameters
----------
ins : stream
Inlet.
outs : stream sequence
* [0] Solid-rich stream.
* [1] Condensate stream.
component : str
Component being evaporated.
P : tuple[float]
Pressures describing each evaporator (Pa).
V : float
Overall molar fraction of component evaporated.
P_liq : tuple
Liquid pressure after pumping (Pa).
"""
line = 'Multi-Effect Evaporator'
_units = {'Area': 'm^2',
'Volume': 'm^3'}
_BM = {'Evaporators': 2.45,
'Liquid-ring pump': 1.0,
'Condenser': 3.17}
_N_outs = 2
_N_heat_utilities = 2
#: Residence time (hr)
tau = 0.30
# Evaporator type
_Type = 'Forced circulation'
# Data for simmulation and costing
_evap_data = evaporators[_Type]
@property
def Type(self):
"""Evaporation type."""
return self._Type
@Type.setter
def Type(self, evap_type):
try:
self._evap_data = evaporators[evap_type]
except KeyError:
dummy = str(evaporators.keys())[11:-2]
raise ValueError(f"Type must be one of the following: {dummy}")
self._Type = evap_type
def __init__(self, ID='', ins=None, outs=(), thermo=None, *, P, V):
Unit.__init__(self, ID, ins, outs, thermo)
# Unpack
out_wt_solids, liq = self.outs
self.V = V #: [float] Overall molar fraction of component evaporated.
self._V1 = V/2.
# Create components
self._N_evap = n = len(P) # Number of evaporators
first_evaporator = Evaporator_PV(None, outs=(None, None), P=P[0])
# Put liquid first, then vapor side stream
evaporators = [first_evaporator]
for i in range(1, n):
evap = Evaporator_PQ(None, outs=(None, None, None), P=P[i], Q=0)
evaporators.append(evap)
condenser = HXutility(None, outs=Stream(None), V=0)
self.heat_utilities = (first_evaporator.heat_utilities[0],
condenser.heat_utilities[0])
mixer = Mixer(None, outs=Stream(None))
self.components = {'evaporators': evaporators,
'condenser': condenser,
'mixer': mixer}
def _run(self):
out_wt_solids, liq = self.outs
ins = self.ins
n = self._N_evap # Number of evaporators
# Set-up components
components = self.components
evaporators = components['evaporators']
first_evaporator, *other_evaporators = evaporators
first_evaporator.ins[:] = [i.copy() for i in ins]
condenser = components['condenser']
mixer = components['mixer']
# Put liquid first, then vapor side stream
ins = [first_evaporator.outs[1], first_evaporator.outs[0]]
for evap in other_evaporators:
evap.ins[:] = ins
ins = [evap.outs[1], evap.outs[0]]
def compute_overall_vapor_fraction(v1):
v_overall = v1
first_evaporator.V = v1
first_evaporator._run()
for evap in other_evaporators:
evap._run()
v_overall += (1-v_overall) * evap.V
return v_overall - self.V
x0 = 0.0001
x1 = 0.9990
y0 = compute_overall_vapor_fraction(x0)
y1 = compute_overall_vapor_fraction(x1)
self._V1 = flx.IQ_interpolation(compute_overall_vapor_fraction,
x0, x1, y0, y1, self._V1,
xtol=0.0001, ytol=0.001,
checkiter=False)
# Condensing vapor from last effector
outs_vap = evaporators[-1].outs[0]
condenser.ins[:] = [outs_vap]
condenser._run()
outs_liq = [condenser.outs[0]] # list containing all output liquids
# Unpack other output streams
out_wt_solids.copy_like(evaporators[-1].outs[1])
for i in range(1, n):
evap = evaporators[i]
outs_liq.append(evap.outs[2])
# Mix liquid streams
mixer.ins[:] = outs_liq
mixer._run()
liq.copy_like(mixer.outs[0])
mixed_stream = MultiStream(thermo=self.thermo)
mixed_stream.copy_flow(self.ins[0])
mixed_stream.vle(P=evaporators[-1].P, V=self.V)
out_wt_solids.mol = mixed_stream.imol['l']
liq.mol = mixed_stream.imol['g']
def _design(self):
# This functions also finds the cost
A_range, C_func, U, _ = self._evap_data
components = self.components
evaporators = components['evaporators']
Design = self.design_results
Cost = self.purchase_costs
CE = bst.CE
first_evaporator = evaporators[0]
hu = first_evaporator.heat_utilities[0]
duty = first_evaporator.H_out - first_evaporator.H_in
Q = abs(duty)
Tci = first_evaporator.ins[0].T
Tco = first_evaporator.outs[0].T
hu(duty, Tci, Tco)
Th = hu.inlet_utility_stream.T
LMTD = ht.compute_LMTD(Th, Th, Tci, Tco)
ft = 1
A = abs(compute_heat_transfer_area(LMTD, U, Q, ft))
self._evap_costs = evap_costs = [C_func(A, CE)]
# Find condenser requirements
condenser = components['condenser']
condenser._design()
condenser._cost()
Cost['Condenser'] = condenser.purchase_cost
# Find area and cost of evaporators
As = [A]
A_min, A_max = A_range
for evap in evaporators[1:]:
Q = evap.design_results['Heat transfer']
Tc = evap.outs[0].T
Th = evap.outs[2].T
LMTD = Th - Tc
A = compute_heat_transfer_area(LMTD, U, Q, ft)
As.append(A)
if settings.debug and not A_min < A < A_max:
warn(f'area requirement ({A}) is out of range, {A_range}')
evap_costs.append(C_func(A, CE))
self._As = As
Design['Area'] = A = sum(As)
Design['Volume'] = total_volume = self._N_evap * self.tau * self.ins[0].F_vol
Cost['Evaporators'] = sum(evap_costs)
# Calculate power
power, cost = compute_vacuum_system_power_and_cost(
F_mass=0, F_vol=0, P_suction=evap.outs[0].P,
vessel_volume=total_volume,
vacuum_system_preference='Liquid-ring pump')
Cost['Liquid-ring pump'] = cost
self.power_utility(power)
|
[
"thermosteam.MultiStream",
"warnings.warn",
"flexsolve.IQ_interpolation",
"thermosteam.Stream"
] |
[((5921, 6046), 'flexsolve.IQ_interpolation', 'flx.IQ_interpolation', (['compute_overall_vapor_fraction', 'x0', 'x1', 'y0', 'y1', 'self._V1'], {'xtol': '(0.0001)', 'ytol': '(0.001)', 'checkiter': '(False)'}), '(compute_overall_vapor_fraction, x0, x1, y0, y1, self.\n _V1, xtol=0.0001, ytol=0.001, checkiter=False)\n', (5941, 6046), True, 'import flexsolve as flx\n'), ((6755, 6786), 'thermosteam.MultiStream', 'MultiStream', ([], {'thermo': 'self.thermo'}), '(thermo=self.thermo)\n', (6766, 6786), False, 'from thermosteam import MultiStream, Stream, settings\n'), ((4393, 4405), 'thermosteam.Stream', 'Stream', (['None'], {}), '(None)\n', (4399, 4405), False, 'from thermosteam import MultiStream, Stream, settings\n'), ((4572, 4584), 'thermosteam.Stream', 'Stream', (['None'], {}), '(None)\n', (4578, 4584), False, 'from thermosteam import MultiStream, Stream, settings\n'), ((8427, 8485), 'warnings.warn', 'warn', (['f"""area requirement ({A}) is out of range, {A_range}"""'], {}), "(f'area requirement ({A}) is out of range, {A_range}')\n", (8431, 8485), False, 'from warnings import warn\n')]
|
import datetime
from django.contrib import messages
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.urls import reverse
from django.views.decorators.csrf import csrf_exempt
from django.core.files.storage import FileSystemStorage
from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, \
LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment
def student_home(request):
student_obj=Students.objects.get(admin=request.user.id)
attendance_total=AttendanceReport.objects.filter(student_id=student_obj).count()
attendance_present=AttendanceReport.objects.filter(student_id=student_obj,status=True).count()
attendance_absent=AttendanceReport.objects.filter(student_id=student_obj,status=False).count()
course=Courses.objects.get(id=student_obj.course_id.id)
subjects=Subjects.objects.filter(course_id=course).count()
subjects_data=Subjects.objects.filter(course_id=course)
session_obj=SessionYearModel.object.get(id=student_obj.session_year_id.id)
user=CustomUser.objects.get(id=request.user.id)
student=Students.objects.get(admin=user)
student_notifcation=Students.objects.get(admin=request.user.id)
notifications=NotificationStudent.objects.filter(student_id=student_notifcation.id)
subject_name=[]
data_present=[]
data_absent=[]
subject_data=Subjects.objects.filter(course_id=student_obj.course_id)
for subject in subject_data:
attendance=Attendance.objects.filter(subject_id=subject.id)
attendance_present_count=AttendanceReport.objects.filter(attendance_id__in=attendance,status=True,student_id=student_obj.id).count()
attendance_absent_count=AttendanceReport.objects.filter(attendance_id__in=attendance,status=False,student_id=student_obj.id).count()
subject_name.append(subject.subject_name)
data_present.append(attendance_present_count)
data_absent.append(attendance_absent_count)
return render(request,"student_template/student_home_template.html",{"notifications":notifications,"total_attendance":attendance_total,"attendance_absent":attendance_absent,"attendance_present":attendance_present,"subjects":subjects,"data_name":subject_name,"data1":data_present,"data2":data_absent,"student":student})
def student_view_attendance(request):
student=Students.objects.get(admin=request.user.id)
course=student.course_id
subjects=Subjects.objects.filter(course_id=course)
user=CustomUser.objects.get(id=request.user.id)
student=Students.objects.get(admin=user)
student_notifcation=Students.objects.get(admin=request.user.id)
notifications=NotificationStudent.objects.filter(student_id=student_notifcation.id)
return render(request,"student_template/student_view_attendance.html",{"subjects":subjects,"student":student,"notifications":notifications})
def student_view_attendance_post(request):
subject_id=request.POST.get("subject")
start_date=request.POST.get("start_date")
end_date=request.POST.get("end_date")
start_data_parse=datetime.datetime.strptime(start_date,"%Y-%m-%d").date()
end_data_parse=datetime.datetime.strptime(end_date,"%Y-%m-%d").date()
subject_obj=Subjects.objects.get(id=subject_id)
user_object=CustomUser.objects.get(id=request.user.id)
stud_obj=Students.objects.get(admin=user_object)
user=CustomUser.objects.get(id=request.user.id)
student=Students.objects.get(admin=user)
student_notifcation=Students.objects.get(admin=request.user.id)
notifications=NotificationStudent.objects.filter(student_id=student_notifcation.id)
attendance=Attendance.objects.filter(attendance_date__range=(start_data_parse,end_data_parse),subject_id=subject_obj)
attendance_reports=AttendanceReport.objects.filter(attendance_id__in=attendance,student_id=stud_obj)
return render(request,"student_template/student_attendance_data.html",{"attendance_reports":attendance_reports,"student":student,"notifications":notifications})
def student_apply_leave(request):
student_obj = Students.objects.get(admin=request.user.id)
leave_data=LeaveReportStudent.objects.filter(student_id=student_obj)
user=CustomUser.objects.get(id=request.user.id)
student=Students.objects.get(admin=user)
student_notifcation=Students.objects.get(admin=request.user.id)
notifications=NotificationStudent.objects.filter(student_id=student_notifcation.id)
return render(request,"student_template/student_apply_leave.html",{"leave_data":leave_data,"student":student,"notifications":notifications})
def student_apply_leave_save(request):
if request.method!="POST":
return HttpResponseRedirect(reverse("student_apply_leave"))
else:
leave_start_date=request.POST.get("leave_start_date")
leave_end_date=request.POST.get("leave_end_date")
leave_date=request.POST.get("leave_date")
leave_msg=request.POST.get("leave_msg")
student_obj=Students.objects.get(admin=request.user.id)
try:
leave_report=LeaveReportStudent(student_id=student_obj,leave_start_date=leave_start_date,leave_end_date=leave_end_date,leave_message=leave_msg,leave_status=0)
leave_report.save()
messages.success(request, "Чөлөөний хүсэлт амжилттай илгээлээ")
return HttpResponseRedirect(reverse("student_apply_leave"))
except:
messages.error(request, "Хүсэлт илгээхэд алдаа гарлаа")
return HttpResponseRedirect(reverse("student_apply_leave"))
def student_feedback(request):
staff_id=Students.objects.get(admin=request.user.id)
feedback_data=FeedBackStudent.objects.filter(student_id=staff_id)
user=CustomUser.objects.get(id=request.user.id)
student=Students.objects.get(admin=user)
student_notifcation=Students.objects.get(admin=request.user.id)
notifications=NotificationStudent.objects.filter(student_id=student_notifcation.id)
return render(request,"student_template/student_feedback.html",{"feedback_data":feedback_data,"student":student,"notifications":notifications})
def student_feedback_save(request):
if request.method!="POST":
return HttpResponseRedirect(reverse("student_feedback"))
else:
feedback_msg=request.POST.get("feedback_msg")
student_obj=Students.objects.get(admin=request.user.id)
try:
feedback=FeedBackStudent(student_id=student_obj,feedback=feedback_msg,feedback_reply="")
feedback.save()
messages.success(request, "Санал хүсэлт амжилттай илгээлээ")
return HttpResponseRedirect(reverse("student_feedback"))
except:
messages.error(request, "Санал хүсэлт илгээхэд алдаа гарлаа")
return HttpResponseRedirect(reverse("student_feedback"))
def student_profile(request):
user=CustomUser.objects.get(id=request.user.id)
student=Students.objects.get(admin=user)
student_notifcation=Students.objects.get(admin=request.user.id)
notifications=NotificationStudent.objects.filter(student_id=student_notifcation.id)
return render(request,"student_template/student_profile.html",{"user":user,"student":student,"notifications":notifications})
def student_profile_save(request):
if request.method!="POST":
return HttpResponseRedirect(reverse("student_profile"))
else:
first_name=request.POST.get("first_name")
last_name=request.POST.get("last_name")
password=request.POST.get("password")
address=request.POST.get("address")
if request.FILES.get('profile_pic',False):
profile_pic=request.FILES['profile_pic']
fs=FileSystemStorage()
filename=fs.save(profile_pic.name,profile_pic)
profile_pic_url=fs.url(filename)
else:
profile_pic_url=None
try:
customuser=CustomUser.objects.get(id=request.user.id)
customuser.first_name=first_name
customuser.last_name=last_name
if password!=None and password!="":
customuser.set_password(password)
customuser.save()
student=Students.objects.get(admin=customuser)
student.address=address
if profile_pic_url!=None:
student.profile_pic=profile_pic_url
student.save()
messages.success(request, "Мэдээлэл шинэчлэгдлээ")
return HttpResponseRedirect(reverse("student_profile"))
except:
messages.error(request, "Мэдээлэл шинэчлэхэд алдаа гарлаа")
return HttpResponseRedirect(reverse("student_profile"))
def student_news(request):
news=News.objects.all().order_by('-ndate')
user=CustomUser.objects.get(id=request.user.id)
student=Students.objects.get(admin=user)
student_notifcation=Students.objects.get(admin=request.user.id)
notifications=NotificationStudent.objects.filter(student_id=student_notifcation.id)
return render(request, "student_template/student_news.html",{"news":news,"student":student,"notifications":notifications})
def view_student_news(request, news_id):
news=News.objects.get(id=news_id)
user=CustomUser.objects.get(id=request.user.id)
student=Students.objects.get(admin=user)
comment=SComment.objects.filter(News=news_id, reply=None).order_by('-id')
staff=CustomUser.objects.get(id=request.user.id)
student_notifcation=Students.objects.get(admin=request.user.id)
notifications=NotificationStudent.objects.filter(student_id=student_notifcation.id)
comments_count = 0
for b in comment:
comments_count += b.count
return render(request, "student_template/view_student_news.html",{"news":news,"notifications":notifications,"student":student,"comment":comment,"comment_count":comments_count,"staff":staff})
# Comment
def view_student_news_comment_save(request):
a = 1
staff=CustomUser.objects.get(id=request.user.id)
if request.method!="POST":
return HttpResponseRedirect(reverse("student_news"))
else:
News = request.POST.get("News_id")
body = request.POST.get("body")
reply_id = request.POST.get('comment_id')
comment_qs = None
if reply_id:
comment_qs = SComment.objects.get(id=reply_id)
try:
comment=SComment(News_id=News, staff_id=staff, body=body, count=a, reply=comment_qs)
comment.save()
messages.success(request, "Сэтгэгдэл нэмэгдлээ!")
return HttpResponseRedirect(reverse("view_student_news",kwargs={"news_id":News}))
except:
messages.error(request, "Сэтгэгдэл нэмэхэд алдаа гарлаа")
return HttpResponseRedirect(reverse("view_student_news",kwargs={"news_id":News}))
# EDIT
def view_student_news_comment_edit_save(request):
a = 1
staff=CustomUser.objects.get(id=request.user.id)
if request.method!="POST":
messages.error(request, "Method not allowed!")
return HttpResponseRedirect(reverse("student_news"))
else:
comment_id = request.POST.get("comment_id")
News = request.POST.get("News_id")
body = request.POST.get("body")
try:
comment = SComment.objects.get(id=comment_id)
comment.News_id=News
comment.staff_id=staff
comment.body=body
comment.count=a
comment.save()
messages.success(request, "Сэтгэгдэл засагдлаа!")
return HttpResponseRedirect(reverse("view_student_news",kwargs={"news_id":News}))
except:
messages.error(request, "Сэтгэгдэл засахад алдаа гарлаа")
return HttpResponseRedirect(reverse("view_student_news",kwargs={"news_id":News}))
def delete_scomment(request,comment_id,news_id):
if request.method!="GET":
return HttpResponse("<h2>Method Not Allowed</h2>")
else:
try:
a=SComment.objects.get(id=comment_id)
a.delete()
messages.success(request,"Сэтгэгдэл амжилттай устгалаа")
return HttpResponseRedirect(reverse("view_student_news",kwargs={"news_id":news_id}))
except:
messages.error(request,"Сэтгэгдэл устгахад алдаа гарлаа")
return HttpResponseRedirect(reverse("view_student_news",kwargs={"news_id":news_id}))
@csrf_exempt
def student_fcmtoken_save(request):
token=request.POST.get("token")
try:
student=Students.objects.get(admin=request.user.id)
student.fcm_token=token
student.save()
return HttpResponse("True")
except:
return HttpResponse("False")
def student_all_notification(request):
student=Students.objects.get(admin=request.user.id)
notifications=NotificationStudent.objects.filter(student_id=student.id)
user=CustomUser.objects.get(id=request.user.id)
student=Students.objects.get(admin=user)
return render(request,"student_template/all_notification.html",{"notifications":notifications,"student":student})
def student_view_result(request):
student=Students.objects.get(admin=request.user.id)
studentresult=StudentResult.objects.filter(student_id=student.id)
user=CustomUser.objects.get(id=request.user.id)
student=Students.objects.get(admin=user)
student_notifcation=Students.objects.get(admin=request.user.id)
notifications=NotificationStudent.objects.filter(student_id=student_notifcation.id)
return render(request,"student_template/student_result.html",{"studentresult":studentresult,"student":student,"notifications":notifications})
def scovid19(request):
user=CustomUser.objects.get(id=request.user.id)
student=Students.objects.get(admin=user)
student_notifcation=Students.objects.get(admin=request.user.id)
notifications=NotificationStudent.objects.filter(student_id=student_notifcation.id)
return render(request,"student_template/covid19.html",{"student":student,"notifications":notifications})
|
[
"school_management_app.models.News.objects.all",
"school_management_app.models.StudentResult.objects.filter",
"school_management_app.models.SComment.objects.filter",
"school_management_app.models.LeaveReportStudent",
"school_management_app.models.SComment",
"django.contrib.messages.error",
"school_management_app.models.Subjects.objects.get",
"school_management_app.models.Students.objects.get",
"school_management_app.models.FeedBackStudent.objects.filter",
"school_management_app.models.FeedBackStudent",
"school_management_app.models.Courses.objects.get",
"school_management_app.models.Attendance.objects.filter",
"school_management_app.models.LeaveReportStudent.objects.filter",
"school_management_app.models.Subjects.objects.filter",
"django.http.HttpResponse",
"django.shortcuts.render",
"school_management_app.models.News.objects.get",
"django.urls.reverse",
"datetime.datetime.strptime",
"school_management_app.models.NotificationStudent.objects.filter",
"school_management_app.models.AttendanceReport.objects.filter",
"django.core.files.storage.FileSystemStorage",
"school_management_app.models.SessionYearModel.object.get",
"school_management_app.models.SComment.objects.get",
"django.contrib.messages.success",
"school_management_app.models.CustomUser.objects.get"
] |
[((559, 602), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'request.user.id'}), '(admin=request.user.id)\n', (579, 602), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((897, 945), 'school_management_app.models.Courses.objects.get', 'Courses.objects.get', ([], {'id': 'student_obj.course_id.id'}), '(id=student_obj.course_id.id)\n', (916, 945), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((1027, 1068), 'school_management_app.models.Subjects.objects.filter', 'Subjects.objects.filter', ([], {'course_id': 'course'}), '(course_id=course)\n', (1050, 1068), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((1085, 1147), 'school_management_app.models.SessionYearModel.object.get', 'SessionYearModel.object.get', ([], {'id': 'student_obj.session_year_id.id'}), '(id=student_obj.session_year_id.id)\n', (1112, 1147), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((1157, 1199), 'school_management_app.models.CustomUser.objects.get', 'CustomUser.objects.get', ([], {'id': 'request.user.id'}), '(id=request.user.id)\n', (1179, 1199), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((1212, 1244), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'user'}), '(admin=user)\n', (1232, 1244), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((1270, 1313), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'request.user.id'}), '(admin=request.user.id)\n', (1290, 1313), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((1332, 1401), 'school_management_app.models.NotificationStudent.objects.filter', 'NotificationStudent.objects.filter', ([], {'student_id': 'student_notifcation.id'}), '(student_id=student_notifcation.id)\n', (1366, 1401), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((1479, 1535), 'school_management_app.models.Subjects.objects.filter', 'Subjects.objects.filter', ([], {'course_id': 'student_obj.course_id'}), '(course_id=student_obj.course_id)\n', (1502, 1535), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((2087, 2434), 'django.shortcuts.render', 'render', (['request', '"""student_template/student_home_template.html"""', "{'notifications': notifications, 'total_attendance': attendance_total,\n 'attendance_absent': attendance_absent, 'attendance_present':\n attendance_present, 'subjects': subjects, 'data_name': subject_name,\n 'data1': data_present, 'data2': data_absent, 'student': student}"], {}), "(request, 'student_template/student_home_template.html', {\n 'notifications': notifications, 'total_attendance': attendance_total,\n 'attendance_absent': attendance_absent, 'attendance_present':\n attendance_present, 'subjects': subjects, 'data_name': subject_name,\n 'data1': data_present, 'data2': data_absent, 'student': student})\n", (2093, 2434), False, 'from django.shortcuts import render\n'), ((2450, 2493), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'request.user.id'}), '(admin=request.user.id)\n', (2470, 2493), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((2536, 2577), 'school_management_app.models.Subjects.objects.filter', 'Subjects.objects.filter', ([], {'course_id': 'course'}), '(course_id=course)\n', (2559, 2577), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((2587, 2629), 'school_management_app.models.CustomUser.objects.get', 'CustomUser.objects.get', ([], {'id': 'request.user.id'}), '(id=request.user.id)\n', (2609, 2629), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((2642, 2674), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'user'}), '(admin=user)\n', (2662, 2674), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((2699, 2742), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'request.user.id'}), '(admin=request.user.id)\n', (2719, 2742), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((2761, 2830), 'school_management_app.models.NotificationStudent.objects.filter', 'NotificationStudent.objects.filter', ([], {'student_id': 'student_notifcation.id'}), '(student_id=student_notifcation.id)\n', (2795, 2830), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((2842, 2987), 'django.shortcuts.render', 'render', (['request', '"""student_template/student_view_attendance.html"""', "{'subjects': subjects, 'student': student, 'notifications': notifications}"], {}), "(request, 'student_template/student_view_attendance.html', {\n 'subjects': subjects, 'student': student, 'notifications': notifications})\n", (2848, 2987), False, 'from django.shortcuts import render\n'), ((3320, 3355), 'school_management_app.models.Subjects.objects.get', 'Subjects.objects.get', ([], {'id': 'subject_id'}), '(id=subject_id)\n', (3340, 3355), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((3372, 3414), 'school_management_app.models.CustomUser.objects.get', 'CustomUser.objects.get', ([], {'id': 'request.user.id'}), '(id=request.user.id)\n', (3394, 3414), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((3428, 3467), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'user_object'}), '(admin=user_object)\n', (3448, 3467), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((3477, 3519), 'school_management_app.models.CustomUser.objects.get', 'CustomUser.objects.get', ([], {'id': 'request.user.id'}), '(id=request.user.id)\n', (3499, 3519), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((3532, 3564), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'user'}), '(admin=user)\n', (3552, 3564), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((3589, 3632), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'request.user.id'}), '(admin=request.user.id)\n', (3609, 3632), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((3651, 3720), 'school_management_app.models.NotificationStudent.objects.filter', 'NotificationStudent.objects.filter', ([], {'student_id': 'student_notifcation.id'}), '(student_id=student_notifcation.id)\n', (3685, 3720), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((3737, 3849), 'school_management_app.models.Attendance.objects.filter', 'Attendance.objects.filter', ([], {'attendance_date__range': '(start_data_parse, end_data_parse)', 'subject_id': 'subject_obj'}), '(attendance_date__range=(start_data_parse,\n end_data_parse), subject_id=subject_obj)\n', (3762, 3849), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((3867, 3954), 'school_management_app.models.AttendanceReport.objects.filter', 'AttendanceReport.objects.filter', ([], {'attendance_id__in': 'attendance', 'student_id': 'stud_obj'}), '(attendance_id__in=attendance, student_id=\n stud_obj)\n', (3898, 3954), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((3960, 4129), 'django.shortcuts.render', 'render', (['request', '"""student_template/student_attendance_data.html"""', "{'attendance_reports': attendance_reports, 'student': student,\n 'notifications': notifications}"], {}), "(request, 'student_template/student_attendance_data.html', {\n 'attendance_reports': attendance_reports, 'student': student,\n 'notifications': notifications})\n", (3966, 4129), False, 'from django.shortcuts import render\n'), ((4167, 4210), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'request.user.id'}), '(admin=request.user.id)\n', (4187, 4210), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((4226, 4283), 'school_management_app.models.LeaveReportStudent.objects.filter', 'LeaveReportStudent.objects.filter', ([], {'student_id': 'student_obj'}), '(student_id=student_obj)\n', (4259, 4283), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((4293, 4335), 'school_management_app.models.CustomUser.objects.get', 'CustomUser.objects.get', ([], {'id': 'request.user.id'}), '(id=request.user.id)\n', (4315, 4335), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((4348, 4380), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'user'}), '(admin=user)\n', (4368, 4380), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((4405, 4448), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'request.user.id'}), '(admin=request.user.id)\n', (4425, 4448), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((4467, 4536), 'school_management_app.models.NotificationStudent.objects.filter', 'NotificationStudent.objects.filter', ([], {'student_id': 'student_notifcation.id'}), '(student_id=student_notifcation.id)\n', (4501, 4536), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((4548, 4692), 'django.shortcuts.render', 'render', (['request', '"""student_template/student_apply_leave.html"""', "{'leave_data': leave_data, 'student': student, 'notifications': notifications}"], {}), "(request, 'student_template/student_apply_leave.html', {'leave_data':\n leave_data, 'student': student, 'notifications': notifications})\n", (4554, 4692), False, 'from django.shortcuts import render\n'), ((5680, 5723), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'request.user.id'}), '(admin=request.user.id)\n', (5700, 5723), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((5742, 5793), 'school_management_app.models.FeedBackStudent.objects.filter', 'FeedBackStudent.objects.filter', ([], {'student_id': 'staff_id'}), '(student_id=staff_id)\n', (5772, 5793), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((5803, 5845), 'school_management_app.models.CustomUser.objects.get', 'CustomUser.objects.get', ([], {'id': 'request.user.id'}), '(id=request.user.id)\n', (5825, 5845), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((5858, 5890), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'user'}), '(admin=user)\n', (5878, 5890), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((5915, 5958), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'request.user.id'}), '(admin=request.user.id)\n', (5935, 5958), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((5977, 6046), 'school_management_app.models.NotificationStudent.objects.filter', 'NotificationStudent.objects.filter', ([], {'student_id': 'student_notifcation.id'}), '(student_id=student_notifcation.id)\n', (6011, 6046), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((6058, 6205), 'django.shortcuts.render', 'render', (['request', '"""student_template/student_feedback.html"""', "{'feedback_data': feedback_data, 'student': student, 'notifications':\n notifications}"], {}), "(request, 'student_template/student_feedback.html', {'feedback_data':\n feedback_data, 'student': student, 'notifications': notifications})\n", (6064, 6205), False, 'from django.shortcuts import render\n'), ((6940, 6982), 'school_management_app.models.CustomUser.objects.get', 'CustomUser.objects.get', ([], {'id': 'request.user.id'}), '(id=request.user.id)\n', (6962, 6982), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((6995, 7027), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'user'}), '(admin=user)\n', (7015, 7027), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((7052, 7095), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'request.user.id'}), '(admin=request.user.id)\n', (7072, 7095), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((7114, 7183), 'school_management_app.models.NotificationStudent.objects.filter', 'NotificationStudent.objects.filter', ([], {'student_id': 'student_notifcation.id'}), '(student_id=student_notifcation.id)\n', (7148, 7183), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((7195, 7323), 'django.shortcuts.render', 'render', (['request', '"""student_template/student_profile.html"""', "{'user': user, 'student': student, 'notifications': notifications}"], {}), "(request, 'student_template/student_profile.html', {'user': user,\n 'student': student, 'notifications': notifications})\n", (7201, 7323), False, 'from django.shortcuts import render\n'), ((8810, 8852), 'school_management_app.models.CustomUser.objects.get', 'CustomUser.objects.get', ([], {'id': 'request.user.id'}), '(id=request.user.id)\n', (8832, 8852), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((8865, 8897), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'user'}), '(admin=user)\n', (8885, 8897), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((8922, 8965), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'request.user.id'}), '(admin=request.user.id)\n', (8942, 8965), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((8984, 9053), 'school_management_app.models.NotificationStudent.objects.filter', 'NotificationStudent.objects.filter', ([], {'student_id': 'student_notifcation.id'}), '(student_id=student_notifcation.id)\n', (9018, 9053), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((9065, 9190), 'django.shortcuts.render', 'render', (['request', '"""student_template/student_news.html"""', "{'news': news, 'student': student, 'notifications': notifications}"], {}), "(request, 'student_template/student_news.html', {'news': news,\n 'student': student, 'notifications': notifications})\n", (9071, 9190), False, 'from django.shortcuts import render\n'), ((9232, 9260), 'school_management_app.models.News.objects.get', 'News.objects.get', ([], {'id': 'news_id'}), '(id=news_id)\n', (9248, 9260), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((9270, 9312), 'school_management_app.models.CustomUser.objects.get', 'CustomUser.objects.get', ([], {'id': 'request.user.id'}), '(id=request.user.id)\n', (9292, 9312), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((9325, 9357), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'user'}), '(admin=user)\n', (9345, 9357), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((9446, 9488), 'school_management_app.models.CustomUser.objects.get', 'CustomUser.objects.get', ([], {'id': 'request.user.id'}), '(id=request.user.id)\n', (9468, 9488), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((9513, 9556), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'request.user.id'}), '(admin=request.user.id)\n', (9533, 9556), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((9575, 9644), 'school_management_app.models.NotificationStudent.objects.filter', 'NotificationStudent.objects.filter', ([], {'student_id': 'student_notifcation.id'}), '(student_id=student_notifcation.id)\n', (9609, 9644), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((9735, 9938), 'django.shortcuts.render', 'render', (['request', '"""student_template/view_student_news.html"""', "{'news': news, 'notifications': notifications, 'student': student,\n 'comment': comment, 'comment_count': comments_count, 'staff': staff}"], {}), "(request, 'student_template/view_student_news.html', {'news': news,\n 'notifications': notifications, 'student': student, 'comment': comment,\n 'comment_count': comments_count, 'staff': staff})\n", (9741, 9938), False, 'from django.shortcuts import render\n'), ((9995, 10037), 'school_management_app.models.CustomUser.objects.get', 'CustomUser.objects.get', ([], {'id': 'request.user.id'}), '(id=request.user.id)\n', (10017, 10037), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((10930, 10972), 'school_management_app.models.CustomUser.objects.get', 'CustomUser.objects.get', ([], {'id': 'request.user.id'}), '(id=request.user.id)\n', (10952, 10972), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((12762, 12805), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'request.user.id'}), '(admin=request.user.id)\n', (12782, 12805), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((12824, 12881), 'school_management_app.models.NotificationStudent.objects.filter', 'NotificationStudent.objects.filter', ([], {'student_id': 'student.id'}), '(student_id=student.id)\n', (12858, 12881), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((12891, 12933), 'school_management_app.models.CustomUser.objects.get', 'CustomUser.objects.get', ([], {'id': 'request.user.id'}), '(id=request.user.id)\n', (12913, 12933), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((12946, 12978), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'user'}), '(admin=user)\n', (12966, 12978), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((12990, 13105), 'django.shortcuts.render', 'render', (['request', '"""student_template/all_notification.html"""', "{'notifications': notifications, 'student': student}"], {}), "(request, 'student_template/all_notification.html', {'notifications':\n notifications, 'student': student})\n", (12996, 13105), False, 'from django.shortcuts import render\n'), ((13144, 13187), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'request.user.id'}), '(admin=request.user.id)\n', (13164, 13187), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((13206, 13257), 'school_management_app.models.StudentResult.objects.filter', 'StudentResult.objects.filter', ([], {'student_id': 'student.id'}), '(student_id=student.id)\n', (13234, 13257), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((13267, 13309), 'school_management_app.models.CustomUser.objects.get', 'CustomUser.objects.get', ([], {'id': 'request.user.id'}), '(id=request.user.id)\n', (13289, 13309), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((13322, 13354), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'user'}), '(admin=user)\n', (13342, 13354), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((13379, 13422), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'request.user.id'}), '(admin=request.user.id)\n', (13399, 13422), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((13441, 13510), 'school_management_app.models.NotificationStudent.objects.filter', 'NotificationStudent.objects.filter', ([], {'student_id': 'student_notifcation.id'}), '(student_id=student_notifcation.id)\n', (13475, 13510), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((13522, 13667), 'django.shortcuts.render', 'render', (['request', '"""student_template/student_result.html"""', "{'studentresult': studentresult, 'student': student, 'notifications':\n notifications}"], {}), "(request, 'student_template/student_result.html', {'studentresult':\n studentresult, 'student': student, 'notifications': notifications})\n", (13528, 13667), False, 'from django.shortcuts import render\n'), ((13690, 13732), 'school_management_app.models.CustomUser.objects.get', 'CustomUser.objects.get', ([], {'id': 'request.user.id'}), '(id=request.user.id)\n', (13712, 13732), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((13745, 13777), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'user'}), '(admin=user)\n', (13765, 13777), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((13802, 13845), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'request.user.id'}), '(admin=request.user.id)\n', (13822, 13845), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((13864, 13933), 'school_management_app.models.NotificationStudent.objects.filter', 'NotificationStudent.objects.filter', ([], {'student_id': 'student_notifcation.id'}), '(student_id=student_notifcation.id)\n', (13898, 13933), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((13945, 14051), 'django.shortcuts.render', 'render', (['request', '"""student_template/covid19.html"""', "{'student': student, 'notifications': notifications}"], {}), "(request, 'student_template/covid19.html', {'student': student,\n 'notifications': notifications})\n", (13951, 14051), False, 'from django.shortcuts import render\n'), ((1588, 1636), 'school_management_app.models.Attendance.objects.filter', 'Attendance.objects.filter', ([], {'subject_id': 'subject.id'}), '(subject_id=subject.id)\n', (1613, 1636), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((5070, 5113), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'request.user.id'}), '(admin=request.user.id)\n', (5090, 5113), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((6413, 6456), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'request.user.id'}), '(admin=request.user.id)\n', (6433, 6456), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((11013, 11059), 'django.contrib.messages.error', 'messages.error', (['request', '"""Method not allowed!"""'], {}), "(request, 'Method not allowed!')\n", (11027, 11059), False, 'from django.contrib import messages\n'), ((11926, 11969), 'django.http.HttpResponse', 'HttpResponse', (['"""<h2>Method Not Allowed</h2>"""'], {}), "('<h2>Method Not Allowed</h2>')\n", (11938, 11969), False, 'from django.http import HttpResponse, HttpResponseRedirect\n'), ((12526, 12569), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'request.user.id'}), '(admin=request.user.id)\n', (12546, 12569), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((12640, 12660), 'django.http.HttpResponse', 'HttpResponse', (['"""True"""'], {}), "('True')\n", (12652, 12660), False, 'from django.http import HttpResponse, HttpResponseRedirect\n'), ((624, 679), 'school_management_app.models.AttendanceReport.objects.filter', 'AttendanceReport.objects.filter', ([], {'student_id': 'student_obj'}), '(student_id=student_obj)\n', (655, 679), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((711, 779), 'school_management_app.models.AttendanceReport.objects.filter', 'AttendanceReport.objects.filter', ([], {'student_id': 'student_obj', 'status': '(True)'}), '(student_id=student_obj, status=True)\n', (742, 779), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((809, 878), 'school_management_app.models.AttendanceReport.objects.filter', 'AttendanceReport.objects.filter', ([], {'student_id': 'student_obj', 'status': '(False)'}), '(student_id=student_obj, status=False)\n', (840, 878), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((959, 1000), 'school_management_app.models.Subjects.objects.filter', 'Subjects.objects.filter', ([], {'course_id': 'course'}), '(course_id=course)\n', (982, 1000), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((3173, 3223), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['start_date', '"""%Y-%m-%d"""'], {}), "(start_date, '%Y-%m-%d')\n", (3199, 3223), False, 'import datetime\n'), ((3249, 3297), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['end_date', '"""%Y-%m-%d"""'], {}), "(end_date, '%Y-%m-%d')\n", (3275, 3297), False, 'import datetime\n'), ((4789, 4819), 'django.urls.reverse', 'reverse', (['"""student_apply_leave"""'], {}), "('student_apply_leave')\n", (4796, 4819), False, 'from django.urls import reverse\n'), ((5152, 5311), 'school_management_app.models.LeaveReportStudent', 'LeaveReportStudent', ([], {'student_id': 'student_obj', 'leave_start_date': 'leave_start_date', 'leave_end_date': 'leave_end_date', 'leave_message': 'leave_msg', 'leave_status': '(0)'}), '(student_id=student_obj, leave_start_date=\n leave_start_date, leave_end_date=leave_end_date, leave_message=\n leave_msg, leave_status=0)\n', (5170, 5311), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((5342, 5405), 'django.contrib.messages.success', 'messages.success', (['request', '"""Чөлөөний хүсэлт амжилттай илгээлээ"""'], {}), "(request, 'Чөлөөний хүсэлт амжилттай илгээлээ')\n", (5358, 5405), False, 'from django.contrib import messages\n'), ((6299, 6326), 'django.urls.reverse', 'reverse', (['"""student_feedback"""'], {}), "('student_feedback')\n", (6306, 6326), False, 'from django.urls import reverse\n'), ((6491, 6576), 'school_management_app.models.FeedBackStudent', 'FeedBackStudent', ([], {'student_id': 'student_obj', 'feedback': 'feedback_msg', 'feedback_reply': '""""""'}), "(student_id=student_obj, feedback=feedback_msg,\n feedback_reply='')\n", (6506, 6576), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((6611, 6671), 'django.contrib.messages.success', 'messages.success', (['request', '"""Санал хүсэлт амжилттай илгээлээ"""'], {}), "(request, 'Санал хүсэлт амжилттай илгээлээ')\n", (6627, 6671), False, 'from django.contrib import messages\n'), ((7416, 7442), 'django.urls.reverse', 'reverse', (['"""student_profile"""'], {}), "('student_profile')\n", (7423, 7442), False, 'from django.urls import reverse\n'), ((7761, 7780), 'django.core.files.storage.FileSystemStorage', 'FileSystemStorage', ([], {}), '()\n', (7778, 7780), False, 'from django.core.files.storage import FileSystemStorage\n'), ((7968, 8010), 'school_management_app.models.CustomUser.objects.get', 'CustomUser.objects.get', ([], {'id': 'request.user.id'}), '(id=request.user.id)\n', (7990, 8010), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((8247, 8285), 'school_management_app.models.Students.objects.get', 'Students.objects.get', ([], {'admin': 'customuser'}), '(admin=customuser)\n', (8267, 8285), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((8451, 8501), 'django.contrib.messages.success', 'messages.success', (['request', '"""Мэдээлэл шинэчлэгдлээ"""'], {}), "(request, 'Мэдээлэл шинэчлэгдлээ')\n", (8467, 8501), False, 'from django.contrib import messages\n'), ((8763, 8781), 'school_management_app.models.News.objects.all', 'News.objects.all', ([], {}), '()\n', (8779, 8781), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((9370, 9419), 'school_management_app.models.SComment.objects.filter', 'SComment.objects.filter', ([], {'News': 'news_id', 'reply': 'None'}), '(News=news_id, reply=None)\n', (9393, 9419), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((10106, 10129), 'django.urls.reverse', 'reverse', (['"""student_news"""'], {}), "('student_news')\n", (10113, 10129), False, 'from django.urls import reverse\n'), ((10346, 10379), 'school_management_app.models.SComment.objects.get', 'SComment.objects.get', ([], {'id': 'reply_id'}), '(id=reply_id)\n', (10366, 10379), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((10413, 10489), 'school_management_app.models.SComment', 'SComment', ([], {'News_id': 'News', 'staff_id': 'staff', 'body': 'body', 'count': 'a', 'reply': 'comment_qs'}), '(News_id=News, staff_id=staff, body=body, count=a, reply=comment_qs)\n', (10421, 10489), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((10529, 10578), 'django.contrib.messages.success', 'messages.success', (['request', '"""Сэтгэгдэл нэмэгдлээ!"""'], {}), "(request, 'Сэтгэгдэл нэмэгдлээ!')\n", (10545, 10578), False, 'from django.contrib import messages\n'), ((11096, 11119), 'django.urls.reverse', 'reverse', (['"""student_news"""'], {}), "('student_news')\n", (11103, 11119), False, 'from django.urls import reverse\n'), ((11301, 11336), 'school_management_app.models.SComment.objects.get', 'SComment.objects.get', ([], {'id': 'comment_id'}), '(id=comment_id)\n', (11321, 11336), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((11502, 11551), 'django.contrib.messages.success', 'messages.success', (['request', '"""Сэтгэгдэл засагдлаа!"""'], {}), "(request, 'Сэтгэгдэл засагдлаа!')\n", (11518, 11551), False, 'from django.contrib import messages\n'), ((12007, 12042), 'school_management_app.models.SComment.objects.get', 'SComment.objects.get', ([], {'id': 'comment_id'}), '(id=comment_id)\n', (12027, 12042), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((12078, 12135), 'django.contrib.messages.success', 'messages.success', (['request', '"""Сэтгэгдэл амжилттай устгалаа"""'], {}), "(request, 'Сэтгэгдэл амжилттай устгалаа')\n", (12094, 12135), False, 'from django.contrib import messages\n'), ((12688, 12709), 'django.http.HttpResponse', 'HttpResponse', (['"""False"""'], {}), "('False')\n", (12700, 12709), False, 'from django.http import HttpResponse, HttpResponseRedirect\n'), ((1670, 1775), 'school_management_app.models.AttendanceReport.objects.filter', 'AttendanceReport.objects.filter', ([], {'attendance_id__in': 'attendance', 'status': '(True)', 'student_id': 'student_obj.id'}), '(attendance_id__in=attendance, status=True,\n student_id=student_obj.id)\n', (1701, 1775), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((1810, 1916), 'school_management_app.models.AttendanceReport.objects.filter', 'AttendanceReport.objects.filter', ([], {'attendance_id__in': 'attendance', 'status': '(False)', 'student_id': 'student_obj.id'}), '(attendance_id__in=attendance, status=False,\n student_id=student_obj.id)\n', (1841, 1916), False, 'from school_management_app.models import Students, Courses, Subjects, CustomUser, Attendance, AttendanceReport, LeaveReportStudent, FeedBackStudent, NotificationStudent, StudentResult, SessionYearModel, News, SComment\n'), ((5446, 5476), 'django.urls.reverse', 'reverse', (['"""student_apply_leave"""'], {}), "('student_apply_leave')\n", (5453, 5476), False, 'from django.urls import reverse\n'), ((5506, 5561), 'django.contrib.messages.error', 'messages.error', (['request', '"""Хүсэлт илгээхэд алдаа гарлаа"""'], {}), "(request, 'Хүсэлт илгээхэд алдаа гарлаа')\n", (5520, 5561), False, 'from django.contrib import messages\n'), ((6712, 6739), 'django.urls.reverse', 'reverse', (['"""student_feedback"""'], {}), "('student_feedback')\n", (6719, 6739), False, 'from django.urls import reverse\n'), ((6769, 6830), 'django.contrib.messages.error', 'messages.error', (['request', '"""Санал хүсэлт илгээхэд алдаа гарлаа"""'], {}), "(request, 'Санал хүсэлт илгээхэд алдаа гарлаа')\n", (6783, 6830), False, 'from django.contrib import messages\n'), ((8542, 8568), 'django.urls.reverse', 'reverse', (['"""student_profile"""'], {}), "('student_profile')\n", (8549, 8568), False, 'from django.urls import reverse\n'), ((8598, 8657), 'django.contrib.messages.error', 'messages.error', (['request', '"""Мэдээлэл шинэчлэхэд алдаа гарлаа"""'], {}), "(request, 'Мэдээлэл шинэчлэхэд алдаа гарлаа')\n", (8612, 8657), False, 'from django.contrib import messages\n'), ((10619, 10673), 'django.urls.reverse', 'reverse', (['"""view_student_news"""'], {'kwargs': "{'news_id': News}"}), "('view_student_news', kwargs={'news_id': News})\n", (10626, 10673), False, 'from django.urls import reverse\n'), ((10701, 10758), 'django.contrib.messages.error', 'messages.error', (['request', '"""Сэтгэгдэл нэмэхэд алдаа гарлаа"""'], {}), "(request, 'Сэтгэгдэл нэмэхэд алдаа гарлаа')\n", (10715, 10758), False, 'from django.contrib import messages\n'), ((11592, 11646), 'django.urls.reverse', 'reverse', (['"""view_student_news"""'], {'kwargs': "{'news_id': News}"}), "('view_student_news', kwargs={'news_id': News})\n", (11599, 11646), False, 'from django.urls import reverse\n'), ((11674, 11731), 'django.contrib.messages.error', 'messages.error', (['request', '"""Сэтгэгдэл засахад алдаа гарлаа"""'], {}), "(request, 'Сэтгэгдэл засахад алдаа гарлаа')\n", (11688, 11731), False, 'from django.contrib import messages\n'), ((12175, 12232), 'django.urls.reverse', 'reverse', (['"""view_student_news"""'], {'kwargs': "{'news_id': news_id}"}), "('view_student_news', kwargs={'news_id': news_id})\n", (12182, 12232), False, 'from django.urls import reverse\n'), ((12260, 12318), 'django.contrib.messages.error', 'messages.error', (['request', '"""Сэтгэгдэл устгахад алдаа гарлаа"""'], {}), "(request, 'Сэтгэгдэл устгахад алдаа гарлаа')\n", (12274, 12318), False, 'from django.contrib import messages\n'), ((5602, 5632), 'django.urls.reverse', 'reverse', (['"""student_apply_leave"""'], {}), "('student_apply_leave')\n", (5609, 5632), False, 'from django.urls import reverse\n'), ((6871, 6898), 'django.urls.reverse', 'reverse', (['"""student_feedback"""'], {}), "('student_feedback')\n", (6878, 6898), False, 'from django.urls import reverse\n'), ((8698, 8724), 'django.urls.reverse', 'reverse', (['"""student_profile"""'], {}), "('student_profile')\n", (8705, 8724), False, 'from django.urls import reverse\n'), ((10799, 10853), 'django.urls.reverse', 'reverse', (['"""view_student_news"""'], {'kwargs': "{'news_id': News}"}), "('view_student_news', kwargs={'news_id': News})\n", (10806, 10853), False, 'from django.urls import reverse\n'), ((11772, 11826), 'django.urls.reverse', 'reverse', (['"""view_student_news"""'], {'kwargs': "{'news_id': News}"}), "('view_student_news', kwargs={'news_id': News})\n", (11779, 11826), False, 'from django.urls import reverse\n'), ((12358, 12415), 'django.urls.reverse', 'reverse', (['"""view_student_news"""'], {'kwargs': "{'news_id': news_id}"}), "('view_student_news', kwargs={'news_id': news_id})\n", (12365, 12415), False, 'from django.urls import reverse\n')]
|
"""HomeControl representation of ESPHome entities"""
from typing import TYPE_CHECKING, Any, Dict, Tuple
import voluptuous as vol
from homecontrol.dependencies.entity_types import Item
from homecontrol.dependencies.state_proxy import StateDef, StateProxy
from homecontrol.modules.switch.module import Switch
if TYPE_CHECKING:
from homecontrol.core import Core
from .module import ESPHomeDevice
from aioesphomeapi.model import (
BinarySensorInfo, BinarySensorState,
EntityInfo, EntityState,
FanInfo, FanState,
LightInfo, LightState,
SensorInfo, SensorState,
SwitchInfo, SwitchState)
class ESPHomeItem(Item):
"""HomeControl representation for esphome entities"""
device: "ESPHomeDevice"
entity: "EntityInfo"
type: str = "esphome.ESPHomeItem"
# pylint: disable=arguments-differ
@classmethod
async def constructor(
cls, identifier: str, name: str,
core: "Core", unique_identifier: str, device: "ESPHomeDevice",
entity: "EntityInfo"
) -> "ESPHomeItem":
item = cls()
item.device = device
item.entity = entity
item.core = core
item.identifier = identifier
item.unique_identifier = unique_identifier
item.name = name
item.module = core.modules.esphome
item.actions = {}
for attribute in dir(item):
func = getattr(item, attribute)
if hasattr(func, "action_name"):
item.actions[getattr(func, "action_name")] = func
item.states = StateProxy(item, core)
return item
def update_state(self, state: "EntityState") -> None:
"""Callback for state updates from esphome"""
class SwitchItem(Switch, ESPHomeItem):
"""An esphome switch"""
entity: "SwitchInfo"
type: str = "esphome.SwitchItem"
async def set_on(self, value: bool) -> Dict[str, Any]:
await self.device.api.switch_command(self.entity.key, value)
return {}
def update_state(self, state: "SwitchState") -> None:
self.states.update("on", state.state)
class BinarySensorItem(ESPHomeItem):
"""An esphome binary_sensor"""
entity: "BinarySensorInfo"
type: str = "esphome.BinarySensorItem"
on = StateDef()
def update_state(self, state: "BinarySensorState") -> None:
self.states.update("on", state.state)
class SensorItem(ESPHomeItem):
"""An esphome sensor"""
entity: "SensorInfo"
type: str = "esphome.SensorItem"
value = StateDef()
def update_state(self, state: "SensorState") -> None:
self.states.update("value", state.state)
class FanItem(ESPHomeItem):
"""An esphome fan"""
entity: "FanInfo"
type: str = "esphome.FanItem"
on = StateDef()
oscillating = StateDef()
speed = StateDef()
def update_state(self, state: "FanState") -> None:
self.states.bulk_update(
on=state.state,
oscillating=state.oscillating,
speed=state.speed
)
class LightItem(ESPHomeItem):
"""An esphome light"""
entity: "LightInfo"
type: str = "esphome.LightItem"
on = StateDef()
brightness = StateDef()
color_temperature = StateDef()
rgb = StateDef()
white = StateDef()
@on.setter(vol.Schema(bool))
async def set_on(self, value: bool) -> Dict[str, Any]:
"""Sets the on state"""
await self.device.api.light_command(self.entity.key, value)
return {}
@brightness.setter(vol.Schema(vol.Coerce(float)))
async def set_brightness(self, brightness: float) -> Dict[str, Any]:
"""Sets the brightness state"""
await self.device.api.light_command(
self.entity.key, bool(brightness), brightness=brightness)
return {}
@rgb.setter(vol.Schema(
[vol.Coerce(float), vol.Coerce(float), vol.Coerce(float)]))
async def set_rgb(self, rgb: Tuple[float, float, float]) -> Dict[str, Any]:
"""Sets the rgb state"""
await self.device.api.light_command(
self.entity.key, all(rgb) or None, rgb=rgb)
return {}
@white.setter(vol.Schema(vol.Coerce(float)))
async def set_white(self, white: float) -> Dict[str, Any]:
"""Sets the white state"""
await self.device.api.light_command(
self.entity.key, bool(white) or None, white=white)
return {}
def update_state(self, state: "LightState") -> None:
self.states.bulk_update(
on=state.state,
brightness=state.brightness,
rgb=(state.red, state.green, state.blue),
white=state.white
)
ENTITY_TYPES = {
"SwitchInfo": SwitchItem,
"BinarySensorInfo": BinarySensorItem,
"FanInfo": FanItem,
"LightInfo": LightItem,
"SensorInfo": SensorItem,
}
|
[
"homecontrol.dependencies.state_proxy.StateDef",
"voluptuous.Schema",
"homecontrol.dependencies.state_proxy.StateProxy",
"voluptuous.Coerce"
] |
[((2279, 2289), 'homecontrol.dependencies.state_proxy.StateDef', 'StateDef', ([], {}), '()\n', (2287, 2289), False, 'from homecontrol.dependencies.state_proxy import StateDef, StateProxy\n'), ((2537, 2547), 'homecontrol.dependencies.state_proxy.StateDef', 'StateDef', ([], {}), '()\n', (2545, 2547), False, 'from homecontrol.dependencies.state_proxy import StateDef, StateProxy\n'), ((2777, 2787), 'homecontrol.dependencies.state_proxy.StateDef', 'StateDef', ([], {}), '()\n', (2785, 2787), False, 'from homecontrol.dependencies.state_proxy import StateDef, StateProxy\n'), ((2806, 2816), 'homecontrol.dependencies.state_proxy.StateDef', 'StateDef', ([], {}), '()\n', (2814, 2816), False, 'from homecontrol.dependencies.state_proxy import StateDef, StateProxy\n'), ((2829, 2839), 'homecontrol.dependencies.state_proxy.StateDef', 'StateDef', ([], {}), '()\n', (2837, 2839), False, 'from homecontrol.dependencies.state_proxy import StateDef, StateProxy\n'), ((3169, 3179), 'homecontrol.dependencies.state_proxy.StateDef', 'StateDef', ([], {}), '()\n', (3177, 3179), False, 'from homecontrol.dependencies.state_proxy import StateDef, StateProxy\n'), ((3197, 3207), 'homecontrol.dependencies.state_proxy.StateDef', 'StateDef', ([], {}), '()\n', (3205, 3207), False, 'from homecontrol.dependencies.state_proxy import StateDef, StateProxy\n'), ((3232, 3242), 'homecontrol.dependencies.state_proxy.StateDef', 'StateDef', ([], {}), '()\n', (3240, 3242), False, 'from homecontrol.dependencies.state_proxy import StateDef, StateProxy\n'), ((3253, 3263), 'homecontrol.dependencies.state_proxy.StateDef', 'StateDef', ([], {}), '()\n', (3261, 3263), False, 'from homecontrol.dependencies.state_proxy import StateDef, StateProxy\n'), ((3276, 3286), 'homecontrol.dependencies.state_proxy.StateDef', 'StateDef', ([], {}), '()\n', (3284, 3286), False, 'from homecontrol.dependencies.state_proxy import StateDef, StateProxy\n'), ((1581, 1603), 'homecontrol.dependencies.state_proxy.StateProxy', 'StateProxy', (['item', 'core'], {}), '(item, core)\n', (1591, 1603), False, 'from homecontrol.dependencies.state_proxy import StateDef, StateProxy\n'), ((3303, 3319), 'voluptuous.Schema', 'vol.Schema', (['bool'], {}), '(bool)\n', (3313, 3319), True, 'import voluptuous as vol\n'), ((3533, 3550), 'voluptuous.Coerce', 'vol.Coerce', (['float'], {}), '(float)\n', (3543, 3550), True, 'import voluptuous as vol\n'), ((4158, 4175), 'voluptuous.Coerce', 'vol.Coerce', (['float'], {}), '(float)\n', (4168, 4175), True, 'import voluptuous as vol\n'), ((3837, 3854), 'voluptuous.Coerce', 'vol.Coerce', (['float'], {}), '(float)\n', (3847, 3854), True, 'import voluptuous as vol\n'), ((3856, 3873), 'voluptuous.Coerce', 'vol.Coerce', (['float'], {}), '(float)\n', (3866, 3873), True, 'import voluptuous as vol\n'), ((3875, 3892), 'voluptuous.Coerce', 'vol.Coerce', (['float'], {}), '(float)\n', (3885, 3892), True, 'import voluptuous as vol\n')]
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# Copyright (c) SAS Institute, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
from __future__ import absolute_import
from __future__ import print_function
from os.path import dirname
from os.path import join
import io
from setuptools import setup
def read(*names, **kwargs):
return io.open(join(dirname(__file__), *names),
encoding=kwargs.get('encoding', 'utf-8')).read()
Version = "0.16.0"
install_requires = ["six"]
setup(name="epdb",
version=Version,
description="Enhanced Python Debugger",
long_description=read('README.rst'),
author="SAS Institute, Inc.",
author_email="<EMAIL>",
url="https://github.com/sassoftware/epdb",
packages=['epdb'],
license='MIT',
platforms='Posix; MacOS X; Windows',
classifiers=['Operating System :: OS Independent',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Debuggers',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
keywords='debugger pdb remote',
entry_points={
'nose.plugins.0.10': [
'epdb-debug = epdb.epdb_nose:Epdb',
],
'console_scripts': [
'epdb = epdb.epdb_client:main',
],
},
install_requires=install_requires,
)
|
[
"os.path.dirname"
] |
[((1370, 1387), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (1377, 1387), False, 'from os.path import dirname\n')]
|
import dbus
from .base import BluetoothBase
from .constants import *
class BluetoothMediaPlayer(BluetoothBase):
TRACK_TYPES = {"Title": str,
"Artist": str,
"Album": str,
"Genre": str,
"NumberOfTracks": int,
"TrackNumber": int,
"Duration": int,}
EQUALIZER_OFF = "off"
EQUALIZER_ON = "on"
REPEAT_OFF = "off"
REPEAT_SINGLE_TRACK = "singletrack"
REPEAT_ALL_TRACKS = "alltracks"
REPEAT_GROUP = "group"
SHUFFLE_OFF = "off"
SHUFFLE_ALL_TRACKS = "alltracks"
SHUFFLE_GROUP = "group"
SCAN_OFF = "off"
SCAN_ALL_TRACKS = "alltracks"
SCAN_GROUP = "group"
STATUS_PLAYING = "playing"
STATUS_STOPPED = "stopped"
STATUS_PAUSED = "paused"
STATUS_FORWARD_SEEK = "forward-seek"
STATUS_REVERSE_SEEK = "reverse-seek"
STATUS_ERROR = "error"
TYPE_AUDIO = "Audio"
TYPE_VIDEO = "Video"
TYPE_AUDIO_BROADCASTING = "Audio Broadcasting"
TYPE_VIDEO_BROADCASTING = "Video Broadcasting"
SUBTYPE_AUDIOBOOK = "Audio Book"
SUBTYPE_PODCAST = "Podcast"
def __init__(self, path):
super(BluetoothMediaPlayer, self).__init__()
self.device = dbus.SystemBus().get_object(SERVICE_NAME, path)
self.interface = dbus.Interface(self.device, PLAYER_IFACE)
self.props = dbus.Interface(self.device, PROPERTIES_IFACE)
self.get = lambda prop: self.props.Get(PLAYER_IFACE, prop)
self.set = lambda prop, value: self.props.Set(PLAYER_IFACE, prop, value)
self.meta_template = {'Album': '',
'NumberOfTracks': 0,
'Title': '',
'Artist': '',
'Duration': 0,
'Genre': '',
'TrackNumber': 0}
def __repr__(self):
try:
n = self.Name
except:
n = "no name"
return ("<pybtooth.media_player.BluetoothMediaPlayer "
"(name='{n}')>").format(n=n)
# Methods
def Play(self):
self.interface.Play()
def Stop(self):
self.interface.Stop()
def Pause(self):
self.interface.Pause()
def Next(self):
self.interface.Next()
def Previous(self):
self.interface.Previous()
def FastForward(self):
self.interface.FastForward()
def Rewind(self):
self.interface.Rewind()
# Properties
@property
def Equalizer(self):
return str(self.get("Equalizer"))
@Equalizer.setter
def Equalizer(self, value):
self.set("Equalizer", value)
@property
def Repeat(self):
return str(self.get("Repeat"))
@Repeat.setter
def Repeat(self, value):
self.set("Repeat", value)
@property
def Shuffle(self):
return str(self.get("Shuffle"))
@Shuffle.setter
def Shuffle(self, value):
self.set("Shuffle", value)
@property
def Scan(self):
return str(self.get("Scan"))
@Scan.setter
def Scan(self, value):
self.set("Scan", value)
@property
def Status(self):
return str(self.get("Status"))
@property
def Position(self):
return int(self.get("Position"))
@property
def Track(self):
try:
meta = self.get("Track")
raw = {str(k): self.TRACK_TYPES[str(k)](v)
for k, v in meta.iteritems()}
return raw
except:
return self.meta_template.copy()
@property
def Device(self):
return self.get("Device")
@property
def Type(self):
return str(self.get("Type"))
@property
def Subtype(self):
return str(self.get("Subtype"))
@property
def Browsable(self):
return bool(self.get("Browsable"))
@property
def Searchable(self):
return bool(self.get("Searchable"))
@property
def Playlist(self):
return self.get("Playlist")
@property
def Name(self):
return str(self.get("Name"))
@property
def Metadata(self):
return self.Track
|
[
"dbus.SystemBus",
"dbus.Interface"
] |
[((1318, 1359), 'dbus.Interface', 'dbus.Interface', (['self.device', 'PLAYER_IFACE'], {}), '(self.device, PLAYER_IFACE)\n', (1332, 1359), False, 'import dbus\n'), ((1381, 1426), 'dbus.Interface', 'dbus.Interface', (['self.device', 'PROPERTIES_IFACE'], {}), '(self.device, PROPERTIES_IFACE)\n', (1395, 1426), False, 'import dbus\n'), ((1245, 1261), 'dbus.SystemBus', 'dbus.SystemBus', ([], {}), '()\n', (1259, 1261), False, 'import dbus\n')]
|
import tensorflow as tf
from keras import Model
from keras.layers import Convolution2D, BatchNormalization, Activation, Add, Dense
from keras.models import load_model
from tensorforce.core.networks import Network
from keras.engine import Input
class PommNetwork(Network):
def tf_apply(self, x, internals, update, return_internals=False):
fc = self.create_network(x)
# TODO maybe {} is not ok
if return_internals:
return fc, {}
return fc
@staticmethod
def create_network(board):
inp = Input(tensor = board['board'])
x = Dense(8)(inp)
x = Activation('relu')(x)
x = Dense(8)(x)
x = Activation('relu')(x)
out = Dense(6)(x)
out = Activation('softmax')(out)
model = Model(inputs=inp, outputs=out)
model.load_weights('./dqn/model/ddgp_dense_8_2/model.h4')
return out
|
[
"keras.layers.Activation",
"keras.engine.Input",
"keras.Model",
"keras.layers.Dense"
] |
[((567, 595), 'keras.engine.Input', 'Input', ([], {'tensor': "board['board']"}), "(tensor=board['board'])\n", (572, 595), False, 'from keras.engine import Input\n'), ((806, 836), 'keras.Model', 'Model', ([], {'inputs': 'inp', 'outputs': 'out'}), '(inputs=inp, outputs=out)\n', (811, 836), False, 'from keras import Model\n'), ((611, 619), 'keras.layers.Dense', 'Dense', (['(8)'], {}), '(8)\n', (616, 619), False, 'from keras.layers import Convolution2D, BatchNormalization, Activation, Add, Dense\n'), ((638, 656), 'keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (648, 656), False, 'from keras.layers import Convolution2D, BatchNormalization, Activation, Add, Dense\n'), ((673, 681), 'keras.layers.Dense', 'Dense', (['(8)'], {}), '(8)\n', (678, 681), False, 'from keras.layers import Convolution2D, BatchNormalization, Activation, Add, Dense\n'), ((698, 716), 'keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (708, 716), False, 'from keras.layers import Convolution2D, BatchNormalization, Activation, Add, Dense\n'), ((735, 743), 'keras.layers.Dense', 'Dense', (['(6)'], {}), '(6)\n', (740, 743), False, 'from keras.layers import Convolution2D, BatchNormalization, Activation, Add, Dense\n'), ((762, 783), 'keras.layers.Activation', 'Activation', (['"""softmax"""'], {}), "('softmax')\n", (772, 783), False, 'from keras.layers import Convolution2D, BatchNormalization, Activation, Add, Dense\n')]
|
import cv2
import sys
import numpy as np
import pyperclip as ppc
from tkinter import filedialog
from tkinter import *
def record_click(event,x,y,flags,param):
global mouseX,mouseY
if event == cv2.EVENT_LBUTTONDBLCLK:
mouseX,mouseY = x,y
point = "[" + str(mouseX) + ", " + str(mouseY) + "]"
cv2.drawMarker(img, (x, y), (0, 0, 255), markerSize=10, thickness=1)
blank = np.zeros((64,172,3), np.uint8)
cv2.putText(blank, point, (2, 20), cv2.FONT_HERSHEY_TRIPLEX, 0.75, (0, 0, 255))
cv2.imshow("Point", blank)
k=cv2.waitKey(10) & 0XFF
failed = False
if len(sys.argv) == 2:
file = str(sys.argv[1])
img = cv2.imread(file)
if not img.any():
failed = True
if len(sys.argv) != 2 or failed:
root = Tk()
root.filename = filedialog.askopenfilename(initialdir = ".",title = "Select file",filetypes = (("png files","*.png"),("jpeg files","*.jpg"),("all files","*.*")))
file = root.filename
img = cv2.imread(file)
root.destroy()
height, width, layers = img.shape
cv2.namedWindow("Select Points")
cv2.namedWindow("Point")
cv2.moveWindow("Point", width+132, 38)
cv2.setMouseCallback("Select Points",record_click)
points = "["
coord = ""
while(1):
cv2.imshow("Select Points",img)
k = cv2.waitKey(20) & 0xFF
if k == ord('\r'):
break
elif k == ord('s'):
coord = "[" + str(mouseX) + ", " + str(mouseY) + "], "
points += coord
print(coord[:-2], " - saved")
elif k == ord('\b'):
points = points[:-len(coord)]
print(coord[:-2], " - removed")
if len(points) > 3:
points = points[:-2]
points += "]"
print(points)
ppc.copy(points)
|
[
"cv2.putText",
"cv2.waitKey",
"numpy.zeros",
"tkinter.filedialog.askopenfilename",
"cv2.drawMarker",
"cv2.imread",
"cv2.setMouseCallback",
"pyperclip.copy",
"cv2.moveWindow",
"cv2.imshow",
"cv2.namedWindow"
] |
[((1004, 1036), 'cv2.namedWindow', 'cv2.namedWindow', (['"""Select Points"""'], {}), "('Select Points')\n", (1019, 1036), False, 'import cv2\n'), ((1037, 1061), 'cv2.namedWindow', 'cv2.namedWindow', (['"""Point"""'], {}), "('Point')\n", (1052, 1061), False, 'import cv2\n'), ((1062, 1102), 'cv2.moveWindow', 'cv2.moveWindow', (['"""Point"""', '(width + 132)', '(38)'], {}), "('Point', width + 132, 38)\n", (1076, 1102), False, 'import cv2\n'), ((1101, 1152), 'cv2.setMouseCallback', 'cv2.setMouseCallback', (['"""Select Points"""', 'record_click'], {}), "('Select Points', record_click)\n", (1121, 1152), False, 'import cv2\n'), ((1601, 1617), 'pyperclip.copy', 'ppc.copy', (['points'], {}), '(points)\n', (1609, 1617), True, 'import pyperclip as ppc\n'), ((646, 662), 'cv2.imread', 'cv2.imread', (['file'], {}), '(file)\n', (656, 662), False, 'import cv2\n'), ((761, 912), 'tkinter.filedialog.askopenfilename', 'filedialog.askopenfilename', ([], {'initialdir': '"""."""', 'title': '"""Select file"""', 'filetypes': "(('png files', '*.png'), ('jpeg files', '*.jpg'), ('all files', '*.*'))"}), "(initialdir='.', title='Select file', filetypes=(\n ('png files', '*.png'), ('jpeg files', '*.jpg'), ('all files', '*.*')))\n", (787, 912), False, 'from tkinter import filedialog\n'), ((936, 952), 'cv2.imread', 'cv2.imread', (['file'], {}), '(file)\n', (946, 952), False, 'import cv2\n'), ((1191, 1223), 'cv2.imshow', 'cv2.imshow', (['"""Select Points"""', 'img'], {}), "('Select Points', img)\n", (1201, 1223), False, 'import cv2\n'), ((315, 383), 'cv2.drawMarker', 'cv2.drawMarker', (['img', '(x, y)', '(0, 0, 255)'], {'markerSize': '(10)', 'thickness': '(1)'}), '(img, (x, y), (0, 0, 255), markerSize=10, thickness=1)\n', (329, 383), False, 'import cv2\n'), ((397, 429), 'numpy.zeros', 'np.zeros', (['(64, 172, 3)', 'np.uint8'], {}), '((64, 172, 3), np.uint8)\n', (405, 429), True, 'import numpy as np\n'), ((433, 512), 'cv2.putText', 'cv2.putText', (['blank', 'point', '(2, 20)', 'cv2.FONT_HERSHEY_TRIPLEX', '(0.75)', '(0, 0, 255)'], {}), '(blank, point, (2, 20), cv2.FONT_HERSHEY_TRIPLEX, 0.75, (0, 0, 255))\n', (444, 512), False, 'import cv2\n'), ((518, 544), 'cv2.imshow', 'cv2.imshow', (['"""Point"""', 'blank'], {}), "('Point', blank)\n", (528, 544), False, 'import cv2\n'), ((1231, 1246), 'cv2.waitKey', 'cv2.waitKey', (['(20)'], {}), '(20)\n', (1242, 1246), False, 'import cv2\n'), ((552, 567), 'cv2.waitKey', 'cv2.waitKey', (['(10)'], {}), '(10)\n', (563, 567), False, 'import cv2\n')]
|
# -*- coding: utf-8 -*-
import os
from textwrap import fill
def wrap(text):
filled = fill(str(text[1]), width=120, initial_indent='# ' + text[0] + ': ', subsequent_indent='# ')
return '\n' + filled + '\n'
class ToPython(object):
def process_item(self, item, spider):
if spider.path:
path = os.path.expanduser(spider.path)
if os.path.exists(path):
new_path = path + '/%i-%s.py' % (item.pop('number'), item.pop('title'))
with open(new_path, 'w+') as f:
f.write('# -*- coding: utf-8 -*-\n')
for formatted in [wrap(x) for x in item.items()]:
f.write(formatted)
print("\033[31mThis file doesn't exists: %s\033[m" % spider.path)
return item
|
[
"os.path.expanduser",
"os.path.exists"
] |
[((327, 358), 'os.path.expanduser', 'os.path.expanduser', (['spider.path'], {}), '(spider.path)\n', (345, 358), False, 'import os\n'), ((374, 394), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (388, 394), False, 'import os\n')]
|
from credentials import Credential
import unittest
class TestCredentials(unittest.TestCase):
"""
Class for testing credentials methods and behaviours
"""
def setUp(self):
"""
Create new instance of credential
"""
self.new_credential = Credential("Instagram", "victormainak", "password")
def test_init_(self):
"""
Test case to check if new credential has been properly instantiated
"""
self.assertEqual(self.new_credential.account_name, "Instagram")
self.assertEqual(self.new_credential.user_name, "victormainak")
self.assertEqual(self.new_credential.password, "password")
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"credentials.Credential"
] |
[((710, 725), 'unittest.main', 'unittest.main', ([], {}), '()\n', (723, 725), False, 'import unittest\n'), ((284, 335), 'credentials.Credential', 'Credential', (['"""Instagram"""', '"""victormainak"""', '"""password"""'], {}), "('Instagram', 'victormainak', 'password')\n", (294, 335), False, 'from credentials import Credential\n')]
|
from typing import Optional, Union
from aiohttp import ClientSession, ClientTimeout # type: ignore
from asgard import conf
default_http_client_timeout = ClientTimeout(
total=conf.ASGARD_HTTP_CLIENT_TOTAL_TIMEOUT,
connect=conf.ASGARD_HTTP_CLIENT_CONNECT_TIMEOUT,
)
class _HttpClient:
_session: Optional[ClientSession]
def __init__(
self,
session_class,
url: str,
method: str,
session_class_args=[],
session_class_kwargs={},
*args,
**kwargs,
) -> None:
self._session = None
self._session_class = session_class
self._url = url
self._args = args
self._kwargs = kwargs
self._method = method
self._session_class_args = session_class_args
self._session_class_kwargs = session_class_kwargs
async def __aenter__(self):
if not self._session:
self._session = self._session_class(
*self._session_class_args, **self._session_class_kwargs
)
return await self._return_session_method(self._session, self._method)(
self._url, *self._args, **self._kwargs
)
def _return_session_method(self, session, method_name):
return getattr(session, method_name.lower())
async def __aexit__(self, exc_type, exc_value, exc_tb):
await self._session.close()
class _HttpClientMaker:
def __init__(self, session_class, *args, **kwargs):
self._session_class = session_class
self.session = None
self._session_class_args = args
self._session_class_kwargs = kwargs
def get(self, url: str, *args, **kwargs):
return _HttpClient(
self._session_class,
url,
"GET",
self._session_class_args,
self._session_class_kwargs,
*args,
**kwargs,
)
def post(self, url: str, *args, **kwargs):
return _HttpClient(
self._session_class,
url,
"POST",
self._session_class_args,
self._session_class_kwargs,
*args,
**kwargs,
)
def put(self, url: str, *args, **kwargs):
return _HttpClient(
self._session_class,
url,
"PUT",
self._session_class_args,
self._session_class_kwargs,
*args,
**kwargs,
)
def delete(self, url: str, *args, **kwargs):
return _HttpClient(
self._session_class,
url,
"DELETE",
self._session_class_args,
self._session_class_kwargs,
*args,
**kwargs,
)
async def __aenter__(self):
if not self.session:
self.session = self._session_class(
timeout=default_http_client_timeout
)
return self.session
async def __aexit__(self, exc_type, exc_value, exc_tb):
await self.session.close()
http_client = _HttpClientMaker(
ClientSession, timeout=default_http_client_timeout
)
|
[
"aiohttp.ClientTimeout"
] |
[((157, 269), 'aiohttp.ClientTimeout', 'ClientTimeout', ([], {'total': 'conf.ASGARD_HTTP_CLIENT_TOTAL_TIMEOUT', 'connect': 'conf.ASGARD_HTTP_CLIENT_CONNECT_TIMEOUT'}), '(total=conf.ASGARD_HTTP_CLIENT_TOTAL_TIMEOUT, connect=conf.\n ASGARD_HTTP_CLIENT_CONNECT_TIMEOUT)\n', (170, 269), False, 'from aiohttp import ClientSession, ClientTimeout\n')]
|
## https://nowonbun.tistory.com/668
# 소켓을 사용하기 위해서는 socket을 import해야 한다.
import socket, threading
# binder함수는 서버에서 accept가 되면 생성되는 socket 인스턴스를 통해 client로 부터 데이터를 받으면 echo형태로 재송신하는 메소드이다.
def binder(client_socket, addr):
# 커넥션이 되면 접속 주소가 나온다.
print('Connected by', addr)
try:
# 접속 상태에서는 클라이언트로 부터 받을 데이터를 무한 대기한다.
# 만약 접속이 끊기게 된다면 except가 발생해서 접속이 끊기게 된다.
while True:
# socket의 recv함수는 연결된 소켓으로부터 데이터를 받을 대기하는 함수입니다. 최초 4바이트를 대기합니다.
data = client_socket.recv(4)
# 최초 4바이트는 전송할 데이터의 크기이다. 그 크기는 little 엔디언으로 byte에서 int형식으로 변환한다.
length = int.from_bytes(data, "little")
# 다시 데이터를 수신한다.
data = client_socket.recv(length)
# 수신된 데이터를 str형식으로 decode한다.
msg = data.decode()
# 수신된 메시지를 콘솔에 출력한다.
print('Received from', addr, msg)
# 수신된 메시지 앞에 「echo:」 라는 메시지를 붙힌다.
msg = "echo : " + msg
# 바이너리(byte)형식으로 변환한다.
data = msg.encode()
# 바이너리의 데이터 사이즈를 구한다.
length = len(data)
# 데이터 사이즈를 little 엔디언 형식으로 byte로 변환한 다음 전송한다.
client_socket.sendall(length.to_bytes(4, byteorder="little"))
# 데이터를 클라이언트로 전송한다.
client_socket.sendall(data)
except:
# 접속이 끊기면 except가 발생한다.
print("except : " , addr)
finally:
# 접속이 끊기면 socket 리소스를 닫는다.
client_socket.close()
# 소켓을 만든다.
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# 소켓 레벨과 데이터 형태를 설정한다.
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# 서버는 복수 ip를 사용하는 pc의 경우는 ip를 지정하고 그렇지 않으면 None이 아닌 ''로 설정한다.
# 포트는 pc내에서 비어있는 포트를 사용한다. cmd에서 netstat -an | find "LISTEN"으로 확인할 수 있다.
server_socket.bind(('', 9999))
# server 설정이 완료되면 listen를 시작한다.
server_socket.listen()
try:
# 서버는 여러 클라이언트를 상대하기 때문에 무한 루프를 사용한다.
while True:
# client로 접속이 발생하면 accept가 발생한다.
# 그럼 client 소켓과 addr(주소)를 튜플로 받는다.
client_socket, addr = server_socket.accept()
# 쓰레드를 이용해서 client 접속 대기를 만들고 다시 accept로 넘어가서 다른 client를 대기한다.
th = threading.Thread(target=binder, args = (client_socket,addr))
th.start()
except:
print("server")
finally:
# 에러가 발생하면 서버 소켓을 닫는다.
server_socket.close()
|
[
"threading.Thread",
"socket.socket"
] |
[((1479, 1528), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (1492, 1528), False, 'import socket, threading\n'), ((2126, 2185), 'threading.Thread', 'threading.Thread', ([], {'target': 'binder', 'args': '(client_socket, addr)'}), '(target=binder, args=(client_socket, addr))\n', (2142, 2185), False, 'import socket, threading\n')]
|
import os
import flaskr
import unittest
import tempfile
class FlaskrTestCase(unittest.TestCase):
def setUp(self):
self.db_fd, flaskr.DATABASE = tempfile.mkstemp()
self.app = flaskr.app.test_client()
flaskr.init_db()
def tearDown(self):
os.close(self.db_fd)
os.unlink(flaskr.DATABASE)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"flaskr.init_db",
"os.unlink",
"tempfile.mkstemp",
"flaskr.app.test_client",
"os.close"
] |
[((367, 382), 'unittest.main', 'unittest.main', ([], {}), '()\n', (380, 382), False, 'import unittest\n'), ((158, 176), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {}), '()\n', (174, 176), False, 'import tempfile\n'), ((196, 220), 'flaskr.app.test_client', 'flaskr.app.test_client', ([], {}), '()\n', (218, 220), False, 'import flaskr\n'), ((229, 245), 'flaskr.init_db', 'flaskr.init_db', ([], {}), '()\n', (243, 245), False, 'import flaskr\n'), ((279, 299), 'os.close', 'os.close', (['self.db_fd'], {}), '(self.db_fd)\n', (287, 299), False, 'import os\n'), ((308, 334), 'os.unlink', 'os.unlink', (['flaskr.DATABASE'], {}), '(flaskr.DATABASE)\n', (317, 334), False, 'import os\n')]
|
import os
import signal
import argparse
import platform
# PyQt5 doesn't play nicely with i3 and Ubuntu 18, PyQt6 is much more stable
# Unfortunately, PyQt6 doesn't install on Ubuntu 18. Thankfully both
# libraries are interchangeable, and we just need to swap them in this
# one spot, and pyqtgraph will pick up on it and store the library under
# pyqtgraph.Qt. So from PyQt5 import x becomes from pyqtgraph.Qt import x
if "18.04" in platform.version():
import PyQt5
else:
import PyQt6
import pyqtgraph
from pyqtgraph.dockarea import *
from pyqtgraph.Qt import QtCore, QtGui
from pyqtgraph.Qt.QtWidgets import QVBoxLayout, QWidget
from proto.import_all_protos import *
from software.networking import threaded_unix_sender
from software.thunderscope.arbitrary_plot.named_value_plotter import NamedValuePlotter
from software.thunderscope.field import (
obstacle_layer,
path_layer,
validation_layer,
world_layer,
)
from software.thunderscope.field.field import Field
from software.thunderscope.log.g3log_widget import g3logWidget
from software.thunderscope.robot_diagnostics.drive_and_dribbler_widget import (
DriveAndDribblerWidget,
)
from software.thunderscope.proto_receiver import ProtoReceiver
from software.thunderscope.play.playinfo_widget import playInfoWidget
from software.thunderscope.chicker.chicker import ChickerWidget
class Thunderscope(object):
""" Thunderscope is our main visualizer that can visualize our field,
obstacles, paths, performance metrics, logs, plots. Thunderscope also
provides tools to interact with the robots.
Thunderscope uses pyqtgraph, which is highly configurable during runtime.
Users can move docks (purple bar) around, double click to pop them out into
another window, etc.
The setup_* functions return docks. See configure_default_layout for an
example. The returned docks can be arranged differently based on the
use case (robot diagnostics, simulation, robocup, demo, etc..)
"""
def __init__(self, refresh_interval_ms=5):
# Setup MainApp and initialize DockArea
self.app = pyqtgraph.mkQApp("Thunderscope")
self.app.setStyleSheet(
"QMainWindow{background-color: black;border: 1px solid black;}"
)
signal.signal(signal.SIGINT, signal.SIG_DFL)
self.dock_area = DockArea()
self.window = QtGui.QMainWindow()
self.window.setCentralWidget(self.dock_area)
self.window.setWindowTitle("Thunderscope")
# Setup unix socket directory
try:
os.mkdir("/tmp/tbots")
except:
pass
self.proto_receiver = ProtoReceiver()
self.refresh_functions = []
def __refresh():
for refresh_func in self.refresh_functions:
refresh_func()
# Setup refresh Timer
self.refresh_timer = QtCore.QTimer()
self.refresh_timer.setTimerType(QtCore.Qt.TimerType.PreciseTimer)
self.refresh_timer.timeout.connect(__refresh)
self.refresh_timer.start(refresh_interval_ms) # Refresh at 200hz
def register_refresh_function(self, refresh_func):
"""Register the refresh functions to run at the refresh_interval_ms
passed into thunderscope.
:param refresh_func: The function to call at refresh_interval_ms
"""
self.refresh_functions.append(refresh_func)
def configure_default_layout(self):
"""Configure the default layout for thunderscope
"""
# Configure Docks
field_dock = self.setup_field_widget()
log_dock = self.setup_log_widget()
performance_dock = self.setup_performance_plot()
play_info_dock = self.setup_play_info()
self.dock_area.addDock(field_dock, "left")
self.dock_area.addDock(log_dock, "bottom", field_dock)
self.dock_area.addDock(performance_dock, "right", log_dock)
self.dock_area.addDock(play_info_dock, "right", performance_dock)
def setup_field_widget(self):
"""Setup the field widget with the constituent layers
:returns: The dock containing the field widget
"""
self.field = Field()
# Create layers
world = world_layer.WorldLayer()
obstacles = obstacle_layer.ObstacleLayer()
paths = path_layer.PathLayer()
validation = validation_layer.ValidationLayer()
# Add field layers to field
self.field.add_layer("Vision", world)
self.field.add_layer("Obstacles", obstacles)
self.field.add_layer("Paths", paths)
self.field.add_layer("Validation", validation)
# Register observers
self.proto_receiver.register_observer(World, world.world_buffer)
self.proto_receiver.register_observer(Obstacles, obstacles.obstacle_buffer)
self.proto_receiver.register_observer(
PathVisualization, paths.path_visualization_buffer
)
# Register refresh functions
self.register_refresh_function(self.field.refresh)
# Create and return dock
field_dock = Dock("Field", size=(500, 2000))
field_dock.addWidget(self.field)
return field_dock
def setup_log_widget(self):
"""Setup the wiget that receives logs from full system
:returns: The dock containing the log widget
"""
# Create layout
layout = QVBoxLayout()
widget = QWidget()
# Create widget
self.logs = g3logWidget()
# Register observer
self.proto_receiver.register_observer(RobotLog, self.logs.log_buffer)
# Register refresh function
self.register_refresh_function(self.logs.refresh)
# Setup Checkbox Widget
layout.addWidget(self.logs)
layout.addWidget(self.logs.checkbox_widget)
widget.setLayout(layout)
# Create and return dock
log_dock = Dock("Logs", size=(500, 100))
log_dock.addWidget(widget)
return log_dock
def setup_performance_plot(self):
"""Setup the performance plot
:returns: The performance plot setup in a dock
"""
# Create widget
self.named_value_plotter = NamedValuePlotter()
# Register observer
self.proto_receiver.register_observer(
NamedValue, self.named_value_plotter.named_value_buffer
)
# Register refresh function
self.register_refresh_function(self.named_value_plotter.refresh)
# Create and return dock
named_value_plotter_dock = Dock("Performance", size=(500, 100))
named_value_plotter_dock.addWidget(self.named_value_plotter.plot)
return named_value_plotter_dock
def setup_play_info(self):
"""Setup the play info widget
:returns: The play info widget setup in a dock
"""
play_info = playInfoWidget()
play_info_dock = Dock("playInfo", size=(500, 100))
play_info_dock.addWidget(play_info)
self.proto_receiver.register_observer(PlayInfo, play_info.log_buffer)
self.register_refresh_function(play_info.refresh)
return play_info_dock
def setup_chicker_widget(self):
"""Setup the chicker widget for robot diagnostics
:returns: The dock containing the chicker widget
"""
# Create widget
self.chicker_widget = ChickerWidget()
# Register refresh function
self.register_refresh_function(self.chicker_widget.refresh)
# Create and return dock
chicker_dock = Dock("Chicker", size=(100, 100))
chicker_dock.addWidget(self.chicker_widget)
return chicker_dock
def setup_drive_and_dribbler_widget(self):
drive_and_dribbler = DriveAndDribblerWidget()
drive_and_dribbler_dock = Dock("robot diagnostics", size=(50, 100))
drive_and_dribbler_dock.addWidget(drive_and_dribbler)
return drive_and_dribbler_dock
def show(self):
self.window.show()
pyqtgraph.exec()
def close(self):
QtCore.QTimer.singleShot(0, self.window.close)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Thunderscope")
parser.add_argument(
"--robot_diagnostics",
action="store_true",
help="Run thunderscope in the robot diagnostics configuration",
)
parser.add_argument(
"--run_simulator", action="store_true", help="Run the standalone simulator"
)
args = parser.parse_args()
if args.robot_diagnostics:
thunderscope = Thunderscope()
log_dock = thunderscope.setup_log_widget()
thunderscope.dock_area.addDock(log_dock)
drive_and_dribbler_dock = thunderscope.setup_drive_and_dribbler_widget()
thunderscope.dock_area.addDock(drive_and_dribbler_dock)
thunderscope.show()
elif args.run_simulator:
print(
"TODO #2050, this isn't implemented, just run the current standalone simulator"
)
else:
thunderscope = Thunderscope()
thunderscope.configure_default_layout()
thunderscope.show()
|
[
"os.mkdir",
"argparse.ArgumentParser",
"pyqtgraph.exec",
"software.thunderscope.chicker.chicker.ChickerWidget",
"software.thunderscope.field.path_layer.PathLayer",
"software.thunderscope.field.field.Field",
"pyqtgraph.Qt.QtWidgets.QVBoxLayout",
"pyqtgraph.Qt.QtWidgets.QWidget",
"pyqtgraph.Qt.QtGui.QMainWindow",
"software.thunderscope.play.playinfo_widget.playInfoWidget",
"software.thunderscope.field.validation_layer.ValidationLayer",
"software.thunderscope.field.world_layer.WorldLayer",
"pyqtgraph.mkQApp",
"platform.version",
"signal.signal",
"software.thunderscope.log.g3log_widget.g3logWidget",
"software.thunderscope.arbitrary_plot.named_value_plotter.NamedValuePlotter",
"software.thunderscope.proto_receiver.ProtoReceiver",
"software.thunderscope.field.obstacle_layer.ObstacleLayer",
"pyqtgraph.Qt.QtCore.QTimer",
"software.thunderscope.robot_diagnostics.drive_and_dribbler_widget.DriveAndDribblerWidget",
"pyqtgraph.Qt.QtCore.QTimer.singleShot"
] |
[((437, 455), 'platform.version', 'platform.version', ([], {}), '()\n', (453, 455), False, 'import platform\n'), ((8135, 8186), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Thunderscope"""'}), "(description='Thunderscope')\n", (8158, 8186), False, 'import argparse\n'), ((2117, 2149), 'pyqtgraph.mkQApp', 'pyqtgraph.mkQApp', (['"""Thunderscope"""'], {}), "('Thunderscope')\n", (2133, 2149), False, 'import pyqtgraph\n'), ((2276, 2320), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'signal.SIG_DFL'], {}), '(signal.SIGINT, signal.SIG_DFL)\n', (2289, 2320), False, 'import signal\n'), ((2380, 2399), 'pyqtgraph.Qt.QtGui.QMainWindow', 'QtGui.QMainWindow', ([], {}), '()\n', (2397, 2399), False, 'from pyqtgraph.Qt import QtCore, QtGui\n'), ((2655, 2670), 'software.thunderscope.proto_receiver.ProtoReceiver', 'ProtoReceiver', ([], {}), '()\n', (2668, 2670), False, 'from software.thunderscope.proto_receiver import ProtoReceiver\n'), ((2880, 2895), 'pyqtgraph.Qt.QtCore.QTimer', 'QtCore.QTimer', ([], {}), '()\n', (2893, 2895), False, 'from pyqtgraph.Qt import QtCore, QtGui\n'), ((4178, 4185), 'software.thunderscope.field.field.Field', 'Field', ([], {}), '()\n', (4183, 4185), False, 'from software.thunderscope.field.field import Field\n'), ((4227, 4251), 'software.thunderscope.field.world_layer.WorldLayer', 'world_layer.WorldLayer', ([], {}), '()\n', (4249, 4251), False, 'from software.thunderscope.field import obstacle_layer, path_layer, validation_layer, world_layer\n'), ((4272, 4302), 'software.thunderscope.field.obstacle_layer.ObstacleLayer', 'obstacle_layer.ObstacleLayer', ([], {}), '()\n', (4300, 4302), False, 'from software.thunderscope.field import obstacle_layer, path_layer, validation_layer, world_layer\n'), ((4319, 4341), 'software.thunderscope.field.path_layer.PathLayer', 'path_layer.PathLayer', ([], {}), '()\n', (4339, 4341), False, 'from software.thunderscope.field import obstacle_layer, path_layer, validation_layer, world_layer\n'), ((4363, 4397), 'software.thunderscope.field.validation_layer.ValidationLayer', 'validation_layer.ValidationLayer', ([], {}), '()\n', (4395, 4397), False, 'from software.thunderscope.field import obstacle_layer, path_layer, validation_layer, world_layer\n'), ((5397, 5410), 'pyqtgraph.Qt.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (5408, 5410), False, 'from pyqtgraph.Qt.QtWidgets import QVBoxLayout, QWidget\n'), ((5428, 5437), 'pyqtgraph.Qt.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (5435, 5437), False, 'from pyqtgraph.Qt.QtWidgets import QVBoxLayout, QWidget\n'), ((5483, 5496), 'software.thunderscope.log.g3log_widget.g3logWidget', 'g3logWidget', ([], {}), '()\n', (5494, 5496), False, 'from software.thunderscope.log.g3log_widget import g3logWidget\n'), ((6201, 6220), 'software.thunderscope.arbitrary_plot.named_value_plotter.NamedValuePlotter', 'NamedValuePlotter', ([], {}), '()\n', (6218, 6220), False, 'from software.thunderscope.arbitrary_plot.named_value_plotter import NamedValuePlotter\n'), ((6865, 6881), 'software.thunderscope.play.playinfo_widget.playInfoWidget', 'playInfoWidget', ([], {}), '()\n', (6879, 6881), False, 'from software.thunderscope.play.playinfo_widget import playInfoWidget\n'), ((7370, 7385), 'software.thunderscope.chicker.chicker.ChickerWidget', 'ChickerWidget', ([], {}), '()\n', (7383, 7385), False, 'from software.thunderscope.chicker.chicker import ChickerWidget\n'), ((7739, 7763), 'software.thunderscope.robot_diagnostics.drive_and_dribbler_widget.DriveAndDribblerWidget', 'DriveAndDribblerWidget', ([], {}), '()\n', (7761, 7763), False, 'from software.thunderscope.robot_diagnostics.drive_and_dribbler_widget import DriveAndDribblerWidget\n'), ((7999, 8015), 'pyqtgraph.exec', 'pyqtgraph.exec', ([], {}), '()\n', (8013, 8015), False, 'import pyqtgraph\n'), ((8046, 8092), 'pyqtgraph.Qt.QtCore.QTimer.singleShot', 'QtCore.QTimer.singleShot', (['(0)', 'self.window.close'], {}), '(0, self.window.close)\n', (8070, 8092), False, 'from pyqtgraph.Qt import QtCore, QtGui\n'), ((2568, 2590), 'os.mkdir', 'os.mkdir', (['"""/tmp/tbots"""'], {}), "('/tmp/tbots')\n", (2576, 2590), False, 'import os\n')]
|
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import cm
from mpl_toolkits.mplot3d import Axes3D
pW = 0.48
pL = 1-pW
b_max = 500 #max bet ($)
def total_losses(b0, f, num_losses):
sum=0
for i in range(0, num_losses):
sum += f**i
return b0*sum
def net_winnings(b0, f, num_games):
# assumes you won on the last game and lost on all games prior
return b0*f**(num_games-1)-total_losses(b0, f, num_games-1)
def expected_outcome(b0, f, printN=False):
# print(b0)
# print(f)
N = np.int(np.log(b_max/b0)/np.log(f))+1
if printN:
print("N is {}".format(N))
sum = 0
for i in range(1, N+1):
sum += pL**(i-1)*net_winnings(b0, f, i)
expectation = pW*sum - pL**N*total_losses(b0, f, N)
return expectation
# b0 = 481.00
# f = 1.01
# print(expected_outcome(b0, f, 0))
b0 = np.arange(1, 500, 1) # initial bets
f = np.arange(1.01, 5, 0.1) # bet increase factor (=2 in typical Martingale System)
b0, f = np.meshgrid(b0, f)
results=[]
for b0i, fi in zip(b0.flatten(), f.flatten()):
# print("Expected outcome for b0 = {0}, f ={1:.2f} is {2}".format(b0i, fi, expected_outcome(b0i, fi)))
results.append(expected_outcome(b0i, fi))
results = np.asarray(results)
i_opt = np.argmax(results)
b_opt = b0.flatten()[i_opt]
f_opt = f.flatten()[i_opt]
result_opt = results[i_opt]
results=np.reshape(results, b0.shape)
for var in ['b_opt', 'f_opt', 'result_opt']:
print("{0} = {1:.04f}".format(var, eval(var)))
#plot
fig = plt.figure()
ax = fig.gca(projection='3d')
# Plot the surface.
surf = ax.plot_surface(b0, f, results, cmap=cm.coolwarm,
linewidth=0, antialiased=False)
ax.set_xlabel('Initial Bet ($)')
ax.set_ylabel('Bet Increase Factor')
ax.set_zlabel('Expected Outcome ($)')
# ax.set_zlim(0, ax.get_zlim()[1])
fig.suptitle('Expected Outcome Surface using Martingale System on Roulette')
plt.title('Min/Max Bet: \$1/\$500', fontsize=10)
plt.show()
print('bye')
|
[
"matplotlib.pyplot.title",
"numpy.meshgrid",
"matplotlib.pyplot.show",
"numpy.log",
"numpy.argmax",
"numpy.asarray",
"matplotlib.pyplot.figure",
"numpy.arange",
"numpy.reshape"
] |
[((859, 879), 'numpy.arange', 'np.arange', (['(1)', '(500)', '(1)'], {}), '(1, 500, 1)\n', (868, 879), True, 'import numpy as np\n'), ((900, 923), 'numpy.arange', 'np.arange', (['(1.01)', '(5)', '(0.1)'], {}), '(1.01, 5, 0.1)\n', (909, 923), True, 'import numpy as np\n'), ((989, 1007), 'numpy.meshgrid', 'np.meshgrid', (['b0', 'f'], {}), '(b0, f)\n', (1000, 1007), True, 'import numpy as np\n'), ((1231, 1250), 'numpy.asarray', 'np.asarray', (['results'], {}), '(results)\n', (1241, 1250), True, 'import numpy as np\n'), ((1259, 1277), 'numpy.argmax', 'np.argmax', (['results'], {}), '(results)\n', (1268, 1277), True, 'import numpy as np\n'), ((1370, 1399), 'numpy.reshape', 'np.reshape', (['results', 'b0.shape'], {}), '(results, b0.shape)\n', (1380, 1399), True, 'import numpy as np\n'), ((1510, 1522), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1520, 1522), True, 'import matplotlib.pyplot as plt\n'), ((1906, 1956), 'matplotlib.pyplot.title', 'plt.title', (['"""Min/Max Bet: \\\\$1/\\\\$500"""'], {'fontsize': '(10)'}), "('Min/Max Bet: \\\\$1/\\\\$500', fontsize=10)\n", (1915, 1956), True, 'import matplotlib.pyplot as plt\n'), ((1956, 1966), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1964, 1966), True, 'import matplotlib.pyplot as plt\n'), ((543, 561), 'numpy.log', 'np.log', (['(b_max / b0)'], {}), '(b_max / b0)\n', (549, 561), True, 'import numpy as np\n'), ((560, 569), 'numpy.log', 'np.log', (['f'], {}), '(f)\n', (566, 569), True, 'import numpy as np\n')]
|
# -*- encoding: utf-8 -*-
# Copyright (c) 2017 ZTE Corporation
#
# Authors:<NAME> <<EMAIL>>
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import mock
from watcher.common import clients
from watcher.common import exception
from watcher.common import ironic_helper
from watcher.common import utils as w_utils
from watcher.tests import base
class TestIronicHelper(base.TestCase):
def setUp(self):
super(TestIronicHelper, self).setUp()
osc = clients.OpenStackClients()
p_ironic = mock.patch.object(osc, 'ironic')
p_ironic.start()
self.addCleanup(p_ironic.stop)
self.ironic_util = ironic_helper.IronicHelper(osc=osc)
@staticmethod
def fake_ironic_node():
node = mock.MagicMock()
node.uuid = w_utils.generate_uuid()
return node
def test_get_ironic_node_list(self):
node1 = self.fake_ironic_node()
self.ironic_util.ironic.node.list.return_value = [node1]
rt_nodes = self.ironic_util.get_ironic_node_list()
self.assertEqual(rt_nodes, [node1])
def test_get_ironic_node_by_uuid_success(self):
node1 = self.fake_ironic_node()
self.ironic_util.ironic.node.get.return_value = node1
node = self.ironic_util.get_ironic_node_by_uuid(node1.uuid)
self.assertEqual(node, node1)
def test_get_ironic_node_by_uuid_failure(self):
self.ironic_util.ironic.node.get.return_value = None
self.assertRaisesRegex(
exception.IronicNodeNotFound,
"The ironic node node1 could not be found",
self.ironic_util.get_ironic_node_by_uuid, 'node1')
|
[
"mock.patch.object",
"watcher.common.ironic_helper.IronicHelper",
"watcher.common.clients.OpenStackClients",
"watcher.common.utils.generate_uuid",
"mock.MagicMock"
] |
[((963, 989), 'watcher.common.clients.OpenStackClients', 'clients.OpenStackClients', ([], {}), '()\n', (987, 989), False, 'from watcher.common import clients\n'), ((1009, 1041), 'mock.patch.object', 'mock.patch.object', (['osc', '"""ironic"""'], {}), "(osc, 'ironic')\n", (1026, 1041), False, 'import mock\n'), ((1133, 1168), 'watcher.common.ironic_helper.IronicHelper', 'ironic_helper.IronicHelper', ([], {'osc': 'osc'}), '(osc=osc)\n', (1159, 1168), False, 'from watcher.common import ironic_helper\n'), ((1231, 1247), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (1245, 1247), False, 'import mock\n'), ((1268, 1291), 'watcher.common.utils.generate_uuid', 'w_utils.generate_uuid', ([], {}), '()\n', (1289, 1291), True, 'from watcher.common import utils as w_utils\n')]
|
from django.conf import settings
from django.contrib.postgres.fields import JSONField
from django.db import models
from constants import content_types
from db.models.abstract.diff import DiffModel
from db.models.abstract.nameable import NameableModel
class Search(DiffModel, NameableModel):
"""A saved search query."""
search_content_types = (
(content_types.PROJECT, content_types.PROJECT),
(content_types.EXPERIMENT_GROUP, content_types.EXPERIMENT_GROUP),
(content_types.EXPERIMENT, content_types.EXPERIMENT),
(content_types.JOB, content_types.JOB),
(content_types.BUILD_JOB, content_types.BUILD_JOB),
)
project = models.ForeignKey(
'db.Project',
on_delete=models.CASCADE,
related_name='searches')
content_type = models.CharField(
choices=search_content_types,
max_length=24,
blank=True,
null=True)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name='+')
query = JSONField()
meta = JSONField(
null=True,
blank=True,
default=dict
)
class Meta:
app_label = 'db'
unique_together = (('user', 'project', 'name'), )
|
[
"django.db.models.ForeignKey",
"django.contrib.postgres.fields.JSONField",
"django.db.models.CharField"
] |
[((676, 763), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""db.Project"""'], {'on_delete': 'models.CASCADE', 'related_name': '"""searches"""'}), "('db.Project', on_delete=models.CASCADE, related_name=\n 'searches')\n", (693, 763), False, 'from django.db import models\n'), ((803, 891), 'django.db.models.CharField', 'models.CharField', ([], {'choices': 'search_content_types', 'max_length': '(24)', 'blank': '(True)', 'null': '(True)'}), '(choices=search_content_types, max_length=24, blank=True,\n null=True)\n', (819, 891), False, 'from django.db import models\n'), ((932, 1023), 'django.db.models.ForeignKey', 'models.ForeignKey', (['settings.AUTH_USER_MODEL'], {'on_delete': 'models.CASCADE', 'related_name': '"""+"""'}), "(settings.AUTH_USER_MODEL, on_delete=models.CASCADE,\n related_name='+')\n", (949, 1023), False, 'from django.db import models\n'), ((1057, 1068), 'django.contrib.postgres.fields.JSONField', 'JSONField', ([], {}), '()\n', (1066, 1068), False, 'from django.contrib.postgres.fields import JSONField\n'), ((1080, 1126), 'django.contrib.postgres.fields.JSONField', 'JSONField', ([], {'null': '(True)', 'blank': '(True)', 'default': 'dict'}), '(null=True, blank=True, default=dict)\n', (1089, 1126), False, 'from django.contrib.postgres.fields import JSONField\n')]
|
# MIT License
#
# Copyright (c) 2020-2021 Parakoopa and the SkyTemple Contributors
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import json
import logging
from typing import Dict, Tuple, List, Union, Optional, Iterable
logger = logging.getLogger(__name__)
class SourceMapPositionMark:
"""A position mark encoded in the source code of SSBScript / ExplorerScript."""
def __init__(self, line_number: int, column_number: int, end_line_number: int, end_column_number: int,
name: str, x_offset: int, y_offset: int, x_relative: int, y_relative: int):
self.line_number = line_number
self.column_number = column_number
self.end_line_number = end_line_number
self.end_column_number = end_column_number
self.name = name
self.x_offset = x_offset
self.y_offset = y_offset
self.x_relative = x_relative
self.y_relative = y_relative
@property
def x_with_offset(self) -> Union[int, float]:
"""
Returns the x position with offset, in tiles, as float or int
See also skytemple_files.script.ssa.position.
"""
offset = 0
if self.x_offset == 2 or self.x_offset == 3:
offset = 0.5
elif self.x_offset >= 4:
offset = 2
return self.x_relative + offset
@property
def y_with_offset(self) -> Union[int, float]:
"""
Returns the x position with offset, in tiles, as float or int
See also skytemple_files.script.ssa.position.
"""
offset = 0
if self.y_offset == 2 or self.y_offset == 3:
offset = 0.5
elif self.y_offset >= 4:
offset = 2
return self.y_relative + offset
def __str__(self):
return f'SourceMapPositionMark<' \
f'"{self.name}" @{self.line_number}:{self.column_number}->{self.end_line_number}:{self.end_column_number} - ' \
f'{self.x_relative}:{self.x_offset}, {self.y_relative}:{self.y_offset}>)'
def __repr__(self):
return str(self)
def __eq__(self, other):
if not isinstance(other, SourceMapPositionMark):
return False
return self.line_number == other.line_number and \
self.column_number == other.column_number and \
self.end_line_number == other.end_line_number and \
self.end_column_number == other.end_column_number and \
self.name == other.name and \
self.x_offset == other.x_offset and \
self.y_offset == other.y_offset and \
self.x_relative == other.x_relative and \
self.y_relative == other.y_relative
def serialize(self) -> list:
return [
self.line_number, self.column_number, self.end_line_number, self.end_column_number,
self.name, self.x_offset, self.y_offset, self.x_relative, self.y_relative
]
@classmethod
def deserialize(cls, data_list) -> 'SourceMapPositionMark':
return SourceMapPositionMark(
line_number=data_list[0], column_number=data_list[1],
end_line_number=data_list[2], end_column_number=data_list[3],
name=data_list[4], x_offset=data_list[5], y_offset=data_list[6],
x_relative=data_list[7], y_relative=data_list[8]
)
class SourceMapping:
def __init__(self, line_number: int, column: int):
self.line = line_number
self.column = column
def serialize(self) -> list:
return [
self.line, self.column
]
@classmethod
def deserialize(cls, data_list) -> 'SourceMapping':
return SourceMapping(
data_list[0], data_list[1]
)
class MacroSourceMapping(SourceMapping):
def __init__(self, relpath_included_file: str, macro_name: str,
line_number: int, column: int,
called_in: Optional[Tuple[str, int, int]],
return_addr: Optional[int], parameter_mapping: Dict[str, Union[int, str]]):
super().__init__(line_number, column)
self.relpath_included_file = relpath_included_file
self.macro_name = macro_name
self.return_addr = return_addr
# If this is the first operation in a Macro, this field contains the line number and column
# of the Macro call
# Tuple contains relative_included_file, line number, column number of the call file.
self.called_in = called_in
# The opcode address to jump to when stepping out of this macro
self.return_addr = return_addr
# The mapping of parameter values for the current macro context, only for informational
# purposes. Contains the string representation or integer value
self.parameter_mapping = parameter_mapping
def serialize(self) -> list:
return [
self.relpath_included_file, self.macro_name, self.line, self.column,
self.called_in, self.return_addr, self.parameter_mapping
]
@classmethod
def deserialize(cls, data_list) -> 'MacroSourceMapping':
return MacroSourceMapping(
data_list[0], data_list[1], data_list[2], data_list[3], data_list[4], data_list[5], data_list[6]
)
class SourceMap:
"""
A source map for ExplorerScript and SSBScript back to the SSB binary opcodes.
Takes a routine id and opcode index and returns the line in the source code, that this
operation is at.
The mapped addresses are the addresses relative to the first routine opcode address.
Can be created in the following ways:
- When loading SSB:
- For SSBScript:
- During decompilation.
- For ExplorerScript:
- Either during the decompilation, if no ExplorerScript exists yet.
- From an existing source map file.
- When compiling SSBScript or ExplorerScript the source map is also generated.
This also provides information about position marks used in the source file.
"""
def __init__(
self,
mappings: Dict[int, SourceMapping],
position_marks: List[SourceMapPositionMark],
mappings_macros: Dict[int, MacroSourceMapping],
position_marks_macro: List[Tuple[Optional[str], str, SourceMapPositionMark]]
):
"""
mappings: Actual main source mappings:
Keys are opcode offsets, values are the source mapping
position_marks: Encoded position marks
mappings_macro: Source mappings in macros.
Keys are opcode offsets, values are the macro source mapping
position_marks_macro: Position marks encoded in macros. Values are tuple of:
- relative file path, macro name, position mark
"""
self._mappings = mappings
self._position_marks = position_marks
self._mappings_macros = mappings_macros
self._position_marks_macro = position_marks_macro
@property
def is_empty(self):
return len(self._mappings) == 0
def get_op_line_and_col(self, op_offset: int) -> Optional[SourceMapping]:
if op_offset in self._mappings:
return self._mappings[op_offset]
if op_offset in self._mappings_macros:
return self._mappings_macros[op_offset]
def get_op_line_and_col__direct(self, op_offset: int) -> Optional[SourceMapping]:
if op_offset in self._mappings:
return self._mappings[op_offset]
def get_op_line_and_col__macros(self, op_offset: int) -> Optional[MacroSourceMapping]:
if op_offset in self._mappings_macros:
return self._mappings_macros[op_offset]
def get_position_marks__direct(self) -> List[SourceMapPositionMark]:
return self._position_marks
def get_position_marks__macros(self) -> List[Tuple[Optional[str], str, SourceMapPositionMark]]:
return self._position_marks_macro
def __iter__(self) -> Iterable[Tuple[int, MacroSourceMapping]]:
"""
Iterates over all source map entries, including the macro entries.
If it's a macro entry, macro_name is a string.
"""
for opcode_offset, entry in self._mappings.items():
yield opcode_offset, entry
for opcode_offset, entry in self._mappings_macros.items():
yield opcode_offset, entry
def collect_mappings__macros(self) -> Iterable[Tuple[int, MacroSourceMapping]]:
for opcode_offset, entry in self._mappings_macros.items():
yield opcode_offset, entry
def __eq__(self, other):
if not isinstance(other, SourceMap):
return False
return self._mappings == other._mappings and self._position_marks == other._position_marks
def __str__(self):
return self.serialize()
def serialize(self, pretty=False) -> str:
return json.dumps({
'map': {int(x): m.serialize() for x, m in self._mappings.items()},
'pos_marks': [m.serialize() for m in self._position_marks],
'macros': {
'map': {int(x): m.serialize() for x, m in self._mappings_macros.items()},
'pos_marks': [[y[0], y[1], y[2].serialize()] for y in self._position_marks_macro]
}
}, indent=2 if pretty else None)
@classmethod
def deserialize(cls, json_str: str) -> 'SourceMap':
json_d = json.loads(json_str)
return SourceMap(
{int(x): SourceMapping.deserialize(y) for x, y in json_d['map'].items()},
[SourceMapPositionMark.deserialize(m) for m in json_d['pos_marks']],
{int(x): MacroSourceMapping.deserialize(y) for x, y in json_d['macros']['map'].items()},
[(y[0], y[1], SourceMapPositionMark.deserialize(y[2])) for y in json_d['macros']['pos_marks']]
)
@classmethod
def create_empty(cls):
return cls({}, [], {}, [])
def rewrite_offsets(self, new_mapping: Dict[int, int]):
"""
Replace all opcode offsets (in mappings, macrco mappings, macro return addresses) with new
offsets. The parameter is a dict mapping old offsets to new offsets.
"""
# It may happen, that the new mapping contains fewer opcodes than originally added (eg. if they were optimized)
# but that's ok.
self._mappings = {new_mapping[key]: val for key, val in self._mappings.items() if key in new_mapping}
self._mappings_macros = {new_mapping[key]: val for key, val in self._mappings_macros.items() if key in new_mapping}
max_old_offset = max(new_mapping.keys())
for m in self._mappings_macros.values():
if m.return_addr:
addr = m.return_addr
while addr not in new_mapping:
# if the return addr opcode was optimized away, we take the next index. TODO: Good idea?
addr += 1
if addr > max_old_offset:
addr = None
break
if addr is not None:
m.return_addr = new_mapping[addr]
class SourceMapBuilder:
def __init__(self):
self._mappings = {}
self._pos_marks = []
self._mappings_macros = {}
self._pos_marks_macros = []
self._next_macro_called_in: Optional[SourceMapping] = None
self._macro_context__stack: List[Tuple[int, Dict[str, Union[int, str]]]] = []
#logger.debug("<%d>: Init.", id(self))
def add_opcode(self, op_offset, line_number, column):
self._mappings[op_offset] = SourceMapping(line_number, column)
#logger.debug("<%d>: Adding opcode: %d -> %d, %d", id(self), op_offset, line_number, column)
def add_position_mark(self, position_mark: SourceMapPositionMark):
self._pos_marks.append(position_mark)
#logger.debug("<%d>: Adding PositionMark: %s", id(self), position_mark)
def macro_context__push(self, opcode_to_jump_to: int, parameter_mapping: Dict[str, Union[int, str]]):
"""
Push a new macro return address and parameter mapping to the stack, all added macro ops will
use what's on the top of the stack.
"""
self._macro_context__stack.append((opcode_to_jump_to, parameter_mapping))
#logger.debug("<%d>: -- PUSH MACRO CTX --> [%d, %s]", id(self), opcode_to_jump_to, parameter_mapping)
def macro_context__pop(self):
"""
Pop a macro context from the stack.
"""
self._macro_context__stack.pop()
#logger.debug("<%d>: <-- POP MACRO CTX", id(self))
def next_macro_opcode_called_in(self, if_incl_rel_path: Optional[str], line_number, column):
"""Mark the next added macro opcode as being called in this line/column. This marks a macro call."""
self._next_macro_called_in = (if_incl_rel_path, line_number, column)
#logger.debug("<%d>: Marked next macro opcode as called in %s:%d, %d", id(self), str(if_incl_rel_path), line_number, column)
def add_macro_opcode(self, op_offset, if_incl_rel_path: Optional[str], macro_name: str,
line_number, column):
"""
Add an operation that has it's source code in a macro.
If the macro is in a different file, if_incl_rel_path should contain the relative path to this file
from the original source file that this source map is generated for.
At least one macro return address entry has to be on the call stack!
"""
if len(self._macro_context__stack) < 1:
raise ValueError("There are no return addresses on the macro return address stack, "
"can not add macro opcode.")
called_in = None
if self._next_macro_called_in is not None:
called_in = self._next_macro_called_in
self._next_macro_called_in = None
return_addr, parameter_mapping = self._macro_context__stack[-1]
#logger.debug("<%d>: Adding macro opcode: %s:%s:%d -> %d, %d", id(self), if_incl_rel_path, macro_name, op_offset, line_number, column)
self._mappings_macros[op_offset] = MacroSourceMapping(if_incl_rel_path, macro_name,
line_number,
column, called_in, return_addr, parameter_mapping)
def add_macro_position_mark(self, if_incl_rel_path: Optional[str], macro_name: str, position_mark: SourceMapPositionMark):
"""Add a position mark, that has it's source code in a macro. See notes for add_macro_opcode"""
self._pos_marks_macros.append((if_incl_rel_path, macro_name, position_mark))
#logger.debug("<%d>: Adding Macro PositionMark: %s:%s - %s", id(self), if_incl_rel_path, macro_name, position_mark)
def build(self):
return SourceMap(self._mappings, self._pos_marks,
self._mappings_macros, self._pos_marks_macros)
|
[
"json.loads",
"logging.getLogger"
] |
[((1259, 1286), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1276, 1286), False, 'import logging\n'), ((10501, 10521), 'json.loads', 'json.loads', (['json_str'], {}), '(json_str)\n', (10511, 10521), False, 'import json\n')]
|
# Copyright 2019 VMware, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from unittest import mock
from vmware_nsxlib.tests.unit.v3 import nsxlib_testcase
class TestNsxLibClusterManagement(nsxlib_testcase.NsxClientTestCase):
def test_get_restore_status(self):
cluster_api = self.nsxlib.cluster_management
with mock.patch.object(self.nsxlib.client, 'get') as get:
cluster_api.get_restore_status()
get.assert_called_with('cluster/restore/status')
|
[
"unittest.mock.patch.object"
] |
[((890, 934), 'unittest.mock.patch.object', 'mock.patch.object', (['self.nsxlib.client', '"""get"""'], {}), "(self.nsxlib.client, 'get')\n", (907, 934), False, 'from unittest import mock\n')]
|
import torch
import torch.nn as nn
import torchvision.models as models
class ResNet50_Mod(nn.Module):
def __init__(self, input_size=640):
super().__init__()
resnet50 = models.resnet50(pretrained=True)
self.resnet = nn.Sequential(*(list(resnet50.children())[:-2]))
self.avepool = nn.AvgPool2d(kernel_size=7)
self.fc = nn.Linear(int(input_size//112), 2)
def forward(self, x):
x = self.resnet(x)
x = self.avepool(x)
x = self.fc(x)
return x
|
[
"torch.nn.AvgPool2d",
"torchvision.models.resnet50"
] |
[((190, 222), 'torchvision.models.resnet50', 'models.resnet50', ([], {'pretrained': '(True)'}), '(pretrained=True)\n', (205, 222), True, 'import torchvision.models as models\n'), ((317, 344), 'torch.nn.AvgPool2d', 'nn.AvgPool2d', ([], {'kernel_size': '(7)'}), '(kernel_size=7)\n', (329, 344), True, 'import torch.nn as nn\n')]
|
# -*- coding: utf-8 -*-
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth import authenticate
from .models import CoreUser
class UserLoginForm(forms.ModelForm):
password = forms.HiddenInput(attrs={'value': '<PASSWORD>'})
username = forms.EmailInput(
attrs={
'class': 'form-control line-input',
'placeholder': '<EMAIL>'
}
)
class Meta:
model = CoreUser
fields = (
'type_login', 'pk_core_user', 'user_uf', 'user_city'
)
widgets = {
'type_login': forms.HiddenInput(attrs={'value': 'MN'}),
'pk_core_user': forms.TextInput(
attrs={
'class': 'form-control line-input',
'placeholder': 'Doc. Identif.'
}
),
'user_uf': forms.Select(
attrs={
'class': 'form-control line-input',
'choices': CoreUser.UF_CHOICES,
}
),
'user_city': forms.TextInput(
attrs={
'class': 'form-control line-input',
'placeholder': 'Cidade'
}
)
}
labels = {
'pk_core_user': 'C.P.F.',
'user_uf': 'UF: ',
'user_city': 'Cidade: ',
}
# Validar/autenticar campos de login
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
user = authenticate(username=username, password=password)
if not user or not user.is_active:
raise forms.ValidationError("Usuário ou senha inválidos.")
return self.cleaned_data
def authenticate_user(self, username, password):
user = authenticate(username=username, password=password)
if not user or not user.is_active:
raise forms.ValidationError("Usuário ou senha inválidos.")
return user
class UserRegistrationForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput(
attrs={'class': 'form-control line-input', 'placeholder': 'Senha'}), min_length=6, label='lock')
confirm = forms.CharField(widget=forms.PasswordInput(
attrs={'class': 'form-control line-input', 'placeholder': 'Confirme a senha'}), min_length=6, label='lock')
username = forms.CharField(widget=forms.TextInput(
attrs={'class': 'form-control line-input', 'placeholder': 'Nome de usuário'}), label='person')
email = forms.CharField(widget=forms.EmailInput(attrs={
'class': 'form-control line-input', 'placeholder': 'Email'}), label='email', required=False)
class Meta:
model = User
fields = ('username', 'email', 'password',)
class PasswordResetForm(forms.ModelForm):
email_or_username = forms.CharField(widget=forms.TextInput(
attrs={'class': 'form-control line-input', 'placeholder': 'Email/Usuário'}))
class SetPasswordForm(forms.Form):
new_password = forms.CharField(widget=forms.PasswordInput(
attrs={'class': 'form-control line-input', 'placeholder': 'Nova senha'}), min_length=6)
new_password_confirm = forms.CharField(widget=forms.PasswordInput(
attrs={'class': 'form-control line-input', 'placeholder': 'Confirmar a nova senha'}), min_length=6)
|
[
"django.forms.Select",
"django.forms.TextInput",
"django.forms.PasswordInput",
"django.forms.EmailInput",
"django.forms.ValidationError",
"django.contrib.auth.authenticate",
"django.forms.HiddenInput"
] |
[((225, 273), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {'attrs': "{'value': '<PASSWORD>'}"}), "(attrs={'value': '<PASSWORD>'})\n", (242, 273), False, 'from django import forms\n'), ((289, 379), 'django.forms.EmailInput', 'forms.EmailInput', ([], {'attrs': "{'class': 'form-control line-input', 'placeholder': '<EMAIL>'}"}), "(attrs={'class': 'form-control line-input', 'placeholder':\n '<EMAIL>'})\n", (305, 379), False, 'from django import forms\n'), ((1586, 1636), 'django.contrib.auth.authenticate', 'authenticate', ([], {'username': 'username', 'password': 'password'}), '(username=username, password=password)\n', (1598, 1636), False, 'from django.contrib.auth import authenticate\n'), ((1853, 1903), 'django.contrib.auth.authenticate', 'authenticate', ([], {'username': 'username', 'password': 'password'}), '(username=username, password=password)\n', (1865, 1903), False, 'from django.contrib.auth import authenticate\n'), ((606, 646), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {'attrs': "{'value': 'MN'}"}), "(attrs={'value': 'MN'})\n", (623, 646), False, 'from django import forms\n'), ((676, 771), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control line-input', 'placeholder': 'Doc. Identif.'}"}), "(attrs={'class': 'form-control line-input', 'placeholder':\n 'Doc. Identif.'})\n", (691, 771), False, 'from django import forms\n'), ((880, 973), 'django.forms.Select', 'forms.Select', ([], {'attrs': "{'class': 'form-control line-input', 'choices': CoreUser.UF_CHOICES}"}), "(attrs={'class': 'form-control line-input', 'choices': CoreUser\n .UF_CHOICES})\n", (892, 973), False, 'from django import forms\n'), ((1084, 1172), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control line-input', 'placeholder': 'Cidade'}"}), "(attrs={'class': 'form-control line-input', 'placeholder':\n 'Cidade'})\n", (1099, 1172), False, 'from django import forms\n'), ((1698, 1750), 'django.forms.ValidationError', 'forms.ValidationError', (['"""Usuário ou senha inválidos."""'], {}), "('Usuário ou senha inválidos.')\n", (1719, 1750), False, 'from django import forms\n'), ((1965, 2017), 'django.forms.ValidationError', 'forms.ValidationError', (['"""Usuário ou senha inválidos."""'], {}), "('Usuário ou senha inválidos.')\n", (1986, 2017), False, 'from django import forms\n'), ((2123, 2214), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {'attrs': "{'class': 'form-control line-input', 'placeholder': 'Senha'}"}), "(attrs={'class': 'form-control line-input',\n 'placeholder': 'Senha'})\n", (2142, 2214), False, 'from django import forms\n'), ((2286, 2388), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {'attrs': "{'class': 'form-control line-input', 'placeholder': 'Confirme a senha'}"}), "(attrs={'class': 'form-control line-input',\n 'placeholder': 'Confirme a senha'})\n", (2305, 2388), False, 'from django import forms\n'), ((2461, 2558), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control line-input', 'placeholder': 'Nome de usuário'}"}), "(attrs={'class': 'form-control line-input', 'placeholder':\n 'Nome de usuário'})\n", (2476, 2558), False, 'from django import forms\n'), ((2616, 2704), 'django.forms.EmailInput', 'forms.EmailInput', ([], {'attrs': "{'class': 'form-control line-input', 'placeholder': 'Email'}"}), "(attrs={'class': 'form-control line-input', 'placeholder':\n 'Email'})\n", (2632, 2704), False, 'from django import forms\n'), ((2943, 3038), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control line-input', 'placeholder': 'Email/Usuário'}"}), "(attrs={'class': 'form-control line-input', 'placeholder':\n 'Email/Usuário'})\n", (2958, 3038), False, 'from django import forms\n'), ((3124, 3220), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {'attrs': "{'class': 'form-control line-input', 'placeholder': 'Nova senha'}"}), "(attrs={'class': 'form-control line-input',\n 'placeholder': 'Nova senha'})\n", (3143, 3220), False, 'from django import forms\n'), ((3291, 3399), 'django.forms.PasswordInput', 'forms.PasswordInput', ([], {'attrs': "{'class': 'form-control line-input', 'placeholder': 'Confirmar a nova senha'}"}), "(attrs={'class': 'form-control line-input',\n 'placeholder': 'Confirmar a nova senha'})\n", (3310, 3399), False, 'from django import forms\n')]
|
#!/usr/bin/python3
import extractWasmExport
import extractComments
from io import StringIO
import json as JsonUtil
import sys
class FunctionDefinition:
def __init__(self, item):
self.parameters = item["params"]
self.returnType = item["returnTypes"][0] if len(item["returnTypes"]) > 0 else "void"
self.name = item["name"]
self.comment = item["comment"]
def toModuleDef(self):
parameterDefinition = ", ".join([ f"{ param['name'] }: { param['type'] }" for param in self.parameters ])
return f"{ self.comment }\nexport function { self.name }({ parameterDefinition }): {self.returnType};"
def toClassFunctionDef(self):
parameterDefinition = ", ".join([ f"{ param['name'] }: { param['type'] }" for param in self.parameters ])
return f"{ self.comment }\n{ self.name }({ parameterDefinition }): {self.returnType};"
class FunctionDefinitionFilter:
def __init__(self, comments):
self.comments = comments
def filterByExports(self, exports):
exportsName = [ export["name"] for export in exports ]
self.comments = [ comment for comment in self.comments if comment["name"] in exportsName ]
def addFunctionTypeDefinition(self, exports):
for comment in self.comments:
correspondingExports = [ export for export in exports if comment["name"] == export["name"] ]
assert len(correspondingExports) == 1
correspondingExport = correspondingExports[0]
comment["params"] = [
{ "name": nativeDef["name"], "type": nativeDef["type"], "exportType": wasmTypeName }
for nativeDef, wasmTypeName in zip(comment["params"], correspondingExport["params"])
]
comment["returnTypes"] = correspondingExport["results"]
def transformType(self):
for comment in self.comments:
comment["params"] = [
{ "name": param["name"], "type": "number" }
for param in comment["params"]
]
comment["returnTypes"] = [
"number" for _ in comment["returnTypes"]
]
def main(argv):
path = argv[1]
sourcePath = argv[2]
args = argv[3:]
commentsExecutor = extractComments.Executor(extractComments.SimpleTreeWalker(), extractComments.DictionaryGeneratingVisitor())
commentsExecutor.path = sourcePath
commentsExecutor.args = args
commentsExecutor.run()
exportExecutor = extractWasmExport.Executor()
exportExecutor.targetFilePath = path
exportOutput = exportExecutor.run()
commentsDefinition = commentsExecutor.generator.getDefinitions()
exportDefinition = JsonUtil.loads(exportOutput)
merger = FunctionDefinitionFilter(commentsDefinition)
merger.filterByExports(exportDefinition)
merger.addFunctionTypeDefinition(exportDefinition)
merger.transformType()
classDefs = "\n".join([ FunctionDefinition(comment).toModuleDef() for comment in merger.comments ])
indentedClassDefs = "\n".join([ f"{ line }" for line in classDefs.split("\n") ])
print(f"""
/* Auto-gererated type definition. */
{ indentedClassDefs }
export const memory: WebAssembly.Memory;
""")
if __name__ == "__main__":
main(sys.argv)
|
[
"extractWasmExport.Executor",
"extractComments.SimpleTreeWalker",
"json.loads",
"extractComments.DictionaryGeneratingVisitor"
] |
[((2501, 2529), 'extractWasmExport.Executor', 'extractWasmExport.Executor', ([], {}), '()\n', (2527, 2529), False, 'import extractWasmExport\n'), ((2704, 2732), 'json.loads', 'JsonUtil.loads', (['exportOutput'], {}), '(exportOutput)\n', (2718, 2732), True, 'import json as JsonUtil\n'), ((2297, 2331), 'extractComments.SimpleTreeWalker', 'extractComments.SimpleTreeWalker', ([], {}), '()\n', (2329, 2331), False, 'import extractComments\n'), ((2333, 2378), 'extractComments.DictionaryGeneratingVisitor', 'extractComments.DictionaryGeneratingVisitor', ([], {}), '()\n', (2376, 2378), False, 'import extractComments\n')]
|
import tensorflow as tf
import importlib
import pytest
from triplet_tools import triplet_batch_semihard_loss, triplet_batch_priming_loss, triplet_batch_hard_loss
try:
import keras
except ImportError:
pass
@pytest.mark.skipif(importlib.util.find_spec("keras") is None,
reason='Keras is not installed in this environment (not needed when testing tensorflow 2 )')
class TestLossFunctionsKeras:
def setup_method(self):
self.train_data, self.test_data = self.load_mnist()
def load_mnist(self):
mnist = tf.keras.datasets.mnist
return mnist.load_data()
def make_model(self):
model = keras.Sequential([
keras.layers.Flatten(input_shape=(28, 28)),
keras.layers.Dense(128, activation='relu'),
keras.layers.Dense(10, activation='linear')
])
return model
def test_batch_priming_loss(self):
model = self.make_model()
loss_func = triplet_batch_priming_loss()
model.compile('adam', loss_func)
hist = model.fit(self.train_data[0], self.train_data[1], epochs=2)
loss_hist = hist.history['loss']
assert loss_hist[-1] < 0.1
assert loss_hist[0] > loss_hist[1]
def test_batch_hard_loss(self):
model = self.make_model()
loss_func = triplet_batch_priming_loss()
model.compile('adam', loss_func)
hist = model.fit(self.train_data[0], self.train_data[1], epochs=1)
bh_loss_func = triplet_batch_hard_loss()
model.compile('adam', bh_loss_func)
hist = model.fit(self.train_data[0], self.train_data[1], epochs=10, batch_size=100, shuffle=True)
loss_hist = hist.history['loss']
assert loss_hist[-1] < 0.5
assert loss_hist[0] > loss_hist[1]
|
[
"importlib.util.find_spec",
"keras.layers.Flatten",
"keras.layers.Dense",
"triplet_tools.triplet_batch_hard_loss",
"triplet_tools.triplet_batch_priming_loss"
] |
[((967, 995), 'triplet_tools.triplet_batch_priming_loss', 'triplet_batch_priming_loss', ([], {}), '()\n', (993, 995), False, 'from triplet_tools import triplet_batch_semihard_loss, triplet_batch_priming_loss, triplet_batch_hard_loss\n'), ((1322, 1350), 'triplet_tools.triplet_batch_priming_loss', 'triplet_batch_priming_loss', ([], {}), '()\n', (1348, 1350), False, 'from triplet_tools import triplet_batch_semihard_loss, triplet_batch_priming_loss, triplet_batch_hard_loss\n'), ((1490, 1515), 'triplet_tools.triplet_batch_hard_loss', 'triplet_batch_hard_loss', ([], {}), '()\n', (1513, 1515), False, 'from triplet_tools import triplet_batch_semihard_loss, triplet_batch_priming_loss, triplet_batch_hard_loss\n'), ((236, 269), 'importlib.util.find_spec', 'importlib.util.find_spec', (['"""keras"""'], {}), "('keras')\n", (260, 269), False, 'import importlib\n'), ((685, 727), 'keras.layers.Flatten', 'keras.layers.Flatten', ([], {'input_shape': '(28, 28)'}), '(input_shape=(28, 28))\n', (705, 727), False, 'import keras\n'), ((741, 783), 'keras.layers.Dense', 'keras.layers.Dense', (['(128)'], {'activation': '"""relu"""'}), "(128, activation='relu')\n", (759, 783), False, 'import keras\n'), ((797, 840), 'keras.layers.Dense', 'keras.layers.Dense', (['(10)'], {'activation': '"""linear"""'}), "(10, activation='linear')\n", (815, 840), False, 'import keras\n')]
|
"""
..module:: crawl_dictionary
:synopsis: This module is designed to add a given parameter to a provided
dictionary under a designated parent. It searches for the parent
recursively in order to examine all possible levels of nested dictionaries.
If the parent is found, the parameter is added to the dictionary. The
entire dictionary is returned, along with a boolean flag to indicate
whether or not the insertion was successful.
..class:: ConfigGenerator
:synopsis: This class defines a PyQt widget that uses multiple methods to
collect user input in order to generate a .yaml config file needed by
../hlsp_to_xml.py. This will help to ensure that these config files are
properly formatted and include the necessary information. This form
includes functionality to add extra rows for unique parameter definitions,
load an existing .yaml file into the form, reset all changes made to the
form, save all inputs to a .yaml config file, or save a .yaml file and
immediately launch ../hlsp_to_xml.py with said file.
"""
import csv
import os
import sys
import yaml
from hlsp_to_xml import hlsp_to_xml
import lib.GUIbuttons as gb
import lib.HeaderKeyword as hk
from lib.MyError import MyError
from util.read_yaml import read_yaml
try:
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
except ImportError:
from PyQt4.QtCore import *
from PyQt4.QtGui import *
HEADER_KEYWORDS = "resources/hlsp_keywords.csv"
#--------------------
def crawl_dictionary(dictionary, parent, parameter, inserted=False):
""" Recursively look for a given parent within a potential dictionary of
dictionaries. If the parent is found, insert the new parameter and update
the 'inserted' flag. Return both the updated dictionary and inserted flag.
:param dictionary: A dict object containing CAOM parameters. Nested
dictionaries are possible in this object.
:type dictionary: dict
:param parent: The parent to search dictionary keys for. May not be
currently present.
:type parent: str
:param parameter: parameter is a single-key single-value dictionary, to be
inserted to the existing dictionary under the parent key.
:type parameter: dict
:param inserted: A flag to keep track of whether or not parent has been
found and the parameter inserted.
:type inserted: bool
"""
# Assign current dictionary items to tuples
current_keys = tuple(dictionary.keys())
current_values = tuple(dictionary.values())
# If the requested parent already exists, either assign it the parameter
# value if it is empty or update the current value. Set the inserted flag.
if parent in current_keys:
if dictionary[parent] == "":
dictionary[parent] = parameter
else:
dictionary[parent].update(parameter)
inserted = True
# If the requested parent cannot be found, recursively call
# crawl_dictionary on any subdictionaries found within the current one.
else:
for v in current_values:
ind = current_values.index(v)
if isinstance(v, dict):
results = crawl_dictionary(v, parent, parameter, inserted)
sub_dictionary = results[0]
inserted = results[1]
dictionary[current_keys[ind]] = sub_dictionary
# Return both the dictionary and the inserted flag.
return (dictionary, inserted)
#--------------------
class HeaderTypeBox(QComboBox):
""" Create a QComboBox populated with valid header type choices.
"""
def __init__(self, parent):
super().__init__(parent)
self.header_types = ["STANDARD", "HST", "KEPLER"]
for type_ in self.header_types:
self.addItem(type_)
def setTo(self, target):
if target.upper() in self.header_types:
n = self.findText(target.upper())
self.setCurrentIndex(n)
else:
self.setCurrentIndex(0)
#--------------------
class DataTypeBox(QComboBox):
""" Create a QComboBox populated with valid CAOM dataProductType choices.
"""
def __init__(self, parent):
super().__init__(parent)
self.data_types = ["", "IMAGE", "SPECTRUM", "TIMESERIES", "VISIBILITY",
"EVENTLIST", "CUBE", "CATALOG", "MEASUREMENTS"
]
for type_ in self.data_types:
self.addItem(type_)
def setTo(self, target):
if target.upper() in self.data_types:
n = self.findText(target.upper())
self.setCurrentIndex(n)
else:
self.setCurrentIndex(0)
#--------------------
class CAOMKeywordBox(QComboBox):
""" Create a QComboBox populated with valid CAOM parameter choices.
Distinguish between keywords already modified by code and those not
currently in use. Assign each to a default XML parent.
"""
def __init__(self):
super().__init__()
self.setEditable(True)
# Set up the dictionaries
self.inuse = {"algorithm": "metadataList",
"aperture_radius": "metadataList",
"collection": "metadataList",
"instrument_keywords": "metadataList",
"instrument_name": "metadataList",
"intent": "metadataList",
"name": "provenance",
"observationID": "metadataList",
"project": "provenance",
"targetPosition_coordinates_cval1": "metadataList",
"targetPosition_coordinates_cval2": "metadataList",
"targetPosition_coordsys": "metadataList",
"targetPosition_equinox": "metadataList",
"target_name": "metadataList",
"telescope_name": "metadataList",
"type": "metadataList",
"version": "provenance"
}
self.unused = {"dataRelease": "provenance",
"lastExecuted": "provenance",
"metaRelease": "metadataList",
"producer": "provenance",
"proposal_id": "provenance",
"proposal_pi": "provenance",
"proposal_title": "provenance",
"reference": "provenance",
"runID": "metadataList",
"sequenceNumber": "metadataList",
"target_keywords": "metadataList",
"target_moving": "metadataList",
"target_type": "metadataList"
}
# Create a merged dictionary
self.allvalues = dict(self.inuse)
self.allvalues.update(self.unused)
# Use a QFont object to distinguish category seperators
font = QFont()
font.setBold(True)
# Put unused parameters at the top of the list
self.addItem("")
self.addItem("Unused Keywords")
unused_parent = self.model().item(1)
unused_parent.setSelectable(False)
unused_parent.setFont(font)
for c in sorted(self.unused.keys()):
self.addItem(c)
# Add a separator, followed by parameters already in use
self.addItem("---------------")
self.addItem("Keywords In Use")
divider = self.model().item(self.count() - 2)
divider.setSelectable(False)
inuse_parent = self.model().item(self.count() - 1)
inuse_parent.setSelectable(False)
inuse_parent.setFont(font)
for d in sorted(self.inuse.keys()):
self.addItem(d)
def setTo(self, target):
""" Set the combo box to a certain index given a CAOM keyword.
"""
if target in self.allvalues:
n = self.findText(target)
self.setCurrentIndex(n)
else:
self.setCurrentIndex(0)
def getXMLParent(self, keyword):
""" Retrieve the XML parent value for a given CAOM keyword from the
dictionary.
"""
if keyword in self.allvalues.keys():
return self.allvalues[keyword]
else:
return None
#--------------------
class ConfigGenerator(QWidget):
""" This class builds a pyqt GUI for generating a properly-formatted YAML
config file to feed into the template XML generator.
"""
def __init__(self):
super().__init__()
self.file_types = None
self.initUI()
def initUI(self):
""" Create a GUI with input fields for multiple parameters, which will
be aggregated into a .yaml config file.
"""
# Create some formatting items for use throughout.
firstcol = 100
space = QSpacerItem(50, 1)
self.keywords = hk.read_header_keywords_table(HEADER_KEYWORDS)
# Create a section for input of filepath variables. Includes lineedit
# objects and buttons to launch file dialogs if the desired paths are
# local.
filepath_label = QLabel("Filepaths:", self)
data_dir_label = QLabel("HLSP Data: ", filepath_label)
data_dir_label.setAlignment(Qt.AlignRight)
data_dir_label.setToolTip(("Enter the location of the HLSP data files "
"to scan."))
self.data_dir_edit = QLineEdit(data_dir_label)
output_dir_label = QLabel("Output XML File: ", filepath_label)
output_dir_label.setAlignment(Qt.AlignRight)
output_dir_label.setToolTip(("Provide a file path and name for the XML"
" result file."))
self.output_dir_edit = QLineEdit(output_dir_label)
browse_data_dir = QPushButton()
browse_style = browse_data_dir.style()
icon = browse_style.standardIcon(QStyle.SP_DirIcon)
browse_data_dir.setIcon(icon)
browse_data_dir.setIconSize(QSize(14,14))
browse_data_dir.setMaximumWidth(26)
browse_data_dir.setMaximumHeight(22)
browse_output_dir = QPushButton()
browse_output_dir.setIcon(icon)
browse_output_dir.setIconSize(QSize(14,14))
browse_output_dir.setMaximumWidth(26)
browse_output_dir.setMaximumHeight(22)
self.filepaths_grid = QGridLayout()
self.filepaths_grid.addWidget(data_dir_label, 0, 0)
self.filepaths_grid.addWidget(self.data_dir_edit, 0, 1)
self.filepaths_grid.addWidget(browse_data_dir, 0, 2)
self.filepaths_grid.addWidget(output_dir_label, 1, 0)
self.filepaths_grid.addWidget(self.output_dir_edit, 1, 1)
self.filepaths_grid.addWidget(browse_output_dir, 1, 2)
# Set the boolean overwrite parameter with on/off radio button objects.
overwrite_label = QLabel("Overwrite: ", self)
overwrite_label.setMinimumWidth(firstcol)
overwrite_label.setToolTip(("Allow hlsp_to_xml.py to overwrite an "
"existing XML file."))
self.overwrite_on = QRadioButton("On", overwrite_label)
self.overwrite_on.setChecked(True)
self.overwrite_off = QRadioButton("Off", overwrite_label)
self.overwrite_grid = QGridLayout()
self.overwrite_grid.addItem(space, 0, 0)
self.overwrite_grid.addWidget(overwrite_label, 0, 1)
self.overwrite_grid.addWidget(self.overwrite_on, 0, 2)
self.overwrite_grid.addWidget(self.overwrite_off, 0, 3)
# Set the type of .fits headers with a modified QComboBox object.
headertype_label = QLabel("Header Type: ", self)
headertype_label.setMinimumWidth(firstcol)
headertype_label.setToolTip(("Select the FITS header type this HLSP "
"uses."))
self.headertype_box = HeaderTypeBox(headertype_label)
self.headertype_box.setMinimumWidth(175)
self.headertype_grid = QGridLayout()
self.headertype_grid.addItem(space, 0, 0)
self.headertype_grid.addWidget(headertype_label, 0, 1)
self.headertype_grid.addWidget(self.headertype_box, 0, 2)
# Select the most appropriate data type to apply to the observation
# using a modified QComboBox.
datatype_label = QLabel("Data Type: ", self)
datatype_label.setMinimumWidth(firstcol)
datatype_label.setToolTip(("Add special CAOM parameters for various "
"data types."))
self.datatype_box = DataTypeBox(datatype_label)
self.datatype_box.setMinimumWidth(175)
self.datatype_grid = QGridLayout()
self.datatype_grid.addItem(space, 0, 0, -1, 1)
self.datatype_grid.addWidget(datatype_label, 0, 1)
self.datatype_grid.addWidget(self.datatype_box, 0, 2)
# Create a layout for the HLSP-Unique Parameters title and new entry
# button.
uniques_label = QLabel("HLSP-Unique Parameters: ", self)
uniques_label.setToolTip(("Define additional CAOM parameters to "
"insert that are not defined in the FITS "
"headers."))
uniques_space = QSpacerItem(100, 1)
add_parameter = gb.GreyButton("+ add a new parameter", 20)
add_parameter.setMinimumWidth(125)
add_parameter.setMaximumWidth(200)
self.uniques_title_grid = QGridLayout()
self.uniques_title_grid.addWidget(uniques_label, 0, 0)
self.uniques_title_grid.addItem(uniques_space, 0, 1)
self.uniques_title_grid.addWidget(add_parameter, 0, 2)
# Create custom unique parameters to write into the yaml file. This
# list is expandable. Custom parents can be defined in addition to
# metadataList and provenance.
parent_label = QLabel("XML Parent:", uniques_label)
parent_label.setAlignment(Qt.AlignHCenter)
caom_label = QLabel("CAOM Keyword:", uniques_label)
caom_label.setAlignment(Qt.AlignHCenter)
value_label = QLabel("Value:", uniques_label)
value_label.setAlignment(Qt.AlignHCenter)
parent_box = QComboBox(parent_label, editable=True)
self.xml_parents = ["", "metadataList", "provenance"]
for p in self.xml_parents:
parent_box.addItem(p)
caom_box = CAOMKeywordBox()
value_edit = QLineEdit(value_label)
self.uniques_grid = QGridLayout()
self.uniques_grid.addWidget(caom_label, 0, 0)
self.uniques_grid.addWidget(parent_label, 0, 1)
self.uniques_grid.addWidget(value_label, 0, 2)
self.uniques_grid.addWidget(caom_box, 1, 0)
self.uniques_grid.addWidget(parent_box, 1, 1)
self.uniques_grid.addWidget(value_edit, 1, 2)
self.firstrow_uniques = 1
self.nextrow_uniques = 2
self.uniques_grid.setRowStretch(self.nextrow_uniques, 1)
self.uniques_grid.setColumnStretch(0, 0)
self.uniques_grid.setColumnStretch(1, 1)
self.uniques_grid.setColumnStretch(2, 1)
# Create a layout for the Update Header Defaults title and new entry
# button.
headerdefault_label = QLabel("Update Header Defaults: ", self)
headerdefault_label.setToolTip(("Entries here will update default "
"values for .fits headers if they "
"exist or create new ones if they "
"don't."))
headerdefault_space = QSpacerItem(300, 1)
add_headerdefault = gb.GreyButton("+ add a new keyword", 20)
add_headerdefault.setMaximumWidth(200)
self.headerdefault_title_grid = QGridLayout()
self.headerdefault_title_grid.addWidget(headerdefault_label, 0, 0)
self.headerdefault_title_grid.addItem(headerdefault_space, 0, 1)
self.headerdefault_title_grid.addWidget(add_headerdefault, 0, 2)
# Adjust .fits header keyword default values or add new header keywords
# along with the necessary parameters to add to the template file.
# This is an expandable list with fields that will automatically
# populate based on a user's keyword selection.
keyword_label = QLabel("FITS Keyword:", headerdefault_label)
keyword_label.setAlignment(Qt.AlignHCenter)
headcaom_label = QLabel("CAOM Keyword:", headerdefault_label)
headcaom_label.setAlignment(Qt.AlignHCenter)
xmlparent_label = QLabel("XML Parent:", headerdefault_label)
xmlparent_label.setAlignment(Qt.AlignHCenter)
extension_label = QLabel("Extension:", headerdefault_label)
extension_label.setAlignment(Qt.AlignHCenter)
default_label = QLabel("Default Value:", headerdefault_label)
default_label.setAlignment(Qt.AlignHCenter)
self.keyword_box = QComboBox(keyword_label, editable=True)
self.keyword_box.addItem("")
initial_header_type = self.headertype_box.header_types[0].lower()
self.header_keywords = self.keywords[initial_header_type]
for k in self.header_keywords:
self.keyword_box.addItem(k.keyword)
headercaom_box = CAOMKeywordBox()
xmlparent_box = QComboBox(xmlparent_label, editable=True)
for p in self.xml_parents:
xmlparent_box.addItem(p)
extension_edit = QLineEdit(extension_label)
default_edit = QLineEdit(default_label)
self.headerdefault_grid = QGridLayout()
self.headerdefault_grid.addWidget(keyword_label, 0, 0)
self.headerdefault_grid.addWidget(headcaom_label, 0, 1)
self.headerdefault_grid.addWidget(xmlparent_label, 0, 2)
self.headerdefault_grid.addWidget(extension_label, 0, 3)
self.headerdefault_grid.addWidget(default_label, 0, 4)
self.headerdefault_grid.addWidget(self.keyword_box, 1, 0)
self.headerdefault_grid.addWidget(headercaom_box, 1, 1)
self.headerdefault_grid.addWidget(xmlparent_box, 1, 2)
self.headerdefault_grid.addWidget(extension_edit, 1, 3)
self.headerdefault_grid.addWidget(default_edit, 1, 4)
self.firstrow_headers = 1
self.nextrow_headers = 2
self.headerdefault_grid.setRowStretch(self.nextrow_headers, 1)
self.headerdefault_grid.setColumnStretch(0, 0)
self.headerdefault_grid.setColumnStretch(1, 0)
self.headerdefault_grid.setColumnStretch(2, 0)
self.headerdefault_grid.setColumnStretch(3, 1)
self.headerdefault_grid.setColumnStretch(4, 1)
# Create a grid layout and add all the layouts and remaining widgets.
self.meta_grid = QGridLayout()
self.meta_grid.setColumnStretch(1, 1)
self.meta_grid.setColumnStretch(2, 1)
self.meta_grid.setColumnStretch(3, 0)
self.meta_grid.setColumnStretch(4, 0)
self.meta_grid.setColumnStretch(5, 0)
self.meta_grid.setRowStretch(9, 0)
self.meta_grid.setRowStretch(10, 1)
self.meta_grid.addWidget(filepath_label, 0, 0)
self.meta_grid.addLayout(self.overwrite_grid, 0, 4, 1, 2)
self.meta_grid.addLayout(self.filepaths_grid, 1, 0, 2, 4)
self.meta_grid.addLayout(self.headertype_grid, 1, 4)
self.meta_grid.addLayout(self.datatype_grid, 2, 4, 1, 1)
self.meta_grid.addLayout(self.uniques_title_grid, 3, 0, 1, -1)
self.meta_grid.addLayout(self.uniques_grid, 4, 0, 4, -1)
self.meta_grid.addLayout(self.headerdefault_title_grid, 8, 0, 1, -1)
self.meta_grid.addLayout(self.headerdefault_grid, 9, 0, 4, -1)
# Set the window layout and show it.
self.setLayout(self.meta_grid)
self.show()
# Add button actions.
browse_data_dir.clicked.connect(self.hlspClicked)
browse_output_dir.clicked.connect(self.outputClicked)
add_parameter.clicked.connect(self.addParameterClicked)
add_headerdefault.clicked.connect(self.addKeywordClicked)
caom_box.currentIndexChanged.connect(self.caomKeywordSelected)
headercaom_box.currentIndexChanged.connect(self.caomKeywordSelected)
self.headertype_box.currentIndexChanged.connect(self.headerTypeChanged)
self.keyword_box.currentIndexChanged.connect(self.fitsKeywordSelected)
def hlspClicked(self):
""" Launch a file dialog to select a directory containing HLSP data.
"""
navigate = QFileDialog.getExistingDirectory(self,
"Select HLSP Directory",
".")
self.data_dir_edit.clear()
self.data_dir_edit.insert(navigate)
def outputClicked(self):
""" Launch a file dialog to define the XML output file name & path.
"""
navigate = QFileDialog.getSaveFileName(self,
"Save Output XML File",
".")
path = navigate[0]
self.output_dir_edit.clear()
self.output_dir_edit.insert(path)
def headerTypeChanged(self):
""" When the header_type is changed, set the header_keywords to the
new list. Re-populate any existing empty keyword menus. Skip any
rows that have already been populated.
"""
# Get the new header type and reset the header_keywords list
# accordingly.
new_type = self.headertype_box.currentText().lower()
self.header_keywords = self.keywords[new_type]
# Iterate through all rows in the headerdefault_grid. Only update the
# list choices for any rows that are totally empty.
for row in range(self.firstrow_headers, self.nextrow_headers):
key_widg = self.headerdefault_grid.itemAtPosition(row, 0).widget()
caom_widg = self.headerdefault_grid.itemAtPosition(row, 1).widget()
xml_widg = self.headerdefault_grid.itemAtPosition(row, 2).widget()
ext_widg = self.headerdefault_grid.itemAtPosition(row, 3).widget()
def_widg = self.headerdefault_grid.itemAtPosition(row, 4).widget()
caom_text = str(caom_widg.currentText())
xml_text = str(xml_widg.currentText())
ext_text = str(ext_widg.text())
def_text = str(def_widg.text())
if (caom_text == ""
and xml_text == ""
and ext_text == ""
and def_text == ""):
key_widg.clear()
key_widg.addItem("")
for key in self.header_keywords:
key_widg.addItem(key.keyword)
def caomKeywordSelected(self):
""" In the HLSP-Unique Parameters section, we want to update the XML
Parent value when a CAOM Keyword is selected from the CAOMKeywordBox.
"""
# Determine which section is sending the signal and get the position
# of the signal sender.
sender = self.sender()
uniques_index = self.uniques_grid.indexOf(sender)
headers_index = self.headerdefault_grid.indexOf(sender)
if uniques_index >= 0:
section = self.uniques_grid
pos = section.getItemPosition(uniques_index)
elif headers_index >= 0:
section = self.headerdefault_grid
pos = section.getItemPosition(headers_index)
else:
return
row = pos[0]
col = pos[1]
# Get the widgets at this position.
caom_key_box = section.itemAtPosition(row, col).widget()
xml_parent_box = section.itemAtPosition(row, col+1).widget()
# Get the new CAOM keyword and the associated XML Parent value.
new_caom_selected = caom_key_box.currentText()
new_xml_parent = caom_key_box.getXMLParent(new_caom_selected)
# If getXMLParent finds a match, look for this value in the
# contents of xml_parent_box.
if new_xml_parent:
n = xml_parent_box.findText(new_xml_parent)
# If the chosen XML parent already exists, set the QComboBox to
# that index. Otherwise, insert it as new text.
if n >= 0:
xml_parent_box.setCurrentIndex(n)
else:
xml_parent_box.setCurrentText(new_xml_parent)
# If no corresponding XML parent is found, set it to "".
else:
xml_parent_box.setCurrentIndex(0)
def addParameterClicked(self):
""" Add a new unique parameter entry row into the self.nextrow_uniques
position, then update self.nextrow_uniques.
"""
# Make a new 'Parent:' combo box and populate it with self.xml_parents.
new_parent = QComboBox(editable=True)
for p in self.xml_parents:
new_parent.addItem(p)
# Make new line edits for 'CAOM Keyword:' and 'Value:'.
new_caom = CAOMKeywordBox()
new_value = QLineEdit()
# Add the new widgets to the uniques_grid layout.
self.uniques_grid.addWidget(new_caom, self.nextrow_uniques, 0)
self.uniques_grid.addWidget(new_parent, self.nextrow_uniques, 1)
self.uniques_grid.addWidget(new_value, self.nextrow_uniques, 2)
self.uniques_grid.setRowStretch(self.nextrow_uniques, 0)
self.uniques_grid.setRowStretch(self.nextrow_uniques+1, 1)
# Update self.nextrow_uniques.
self.nextrow_uniques += 1
# Connect the new CAOMKeywordBox object to the module that will
# update the XML Parent depending on the value selected.
new_caom.currentIndexChanged.connect(self.caomKeywordSelected)
def fitsKeywordSelected(self):
""" When a user chooses a header keyword in a headerdefault_grid row,
populate the CAOM Property, XML Parent, Extension, and Default Value
(if applicaple) fields based on the chosen keyword.
"""
# Get the position of the signal sender.
sender = self.sender()
ind = self.headerdefault_grid.indexOf(sender)
pos = self.headerdefault_grid.getItemPosition(ind)
row = pos[0]
# Get the sender widget and the new keyword chosen.
this_keyword = self.headerdefault_grid.itemAtPosition(row, 0).widget()
new_keyword = this_keyword.currentText()
# The user may have entered a new header keyword, in which case we
# simply return without populating anything.
try:
new_obj = self.header_keywords.find(new_keyword)
except KeyError:
return
# Ignore any empty string entries.
if new_obj is None:
return
# If the header already exists, populate the remaining row fields with
# data from the HeaderKeyword object. The CAOMKeywordBox change will
# update the XML parent selection automatically, so skip it here.
this_caom = self.headerdefault_grid.itemAtPosition(row, 1).widget()
this_caom.setTo(new_obj.caom)
this_ext = self.headerdefault_grid.itemAtPosition(row, 3).widget()
this_ext.setText(new_obj.headerName)
def addKeywordClicked(self):
""" Create a new row in the headerdefault_grid table for modifying
.fits header keyword properties.
"""
# Make a new keyword combo box and populate it with the current
# header_keywords list.
new_keyword_box = QComboBox(editable=True)
new_keyword_box.addItem("")
for header_key in self.header_keywords:
new_keyword_box.addItem(header_key.keyword)
# Connect the new keyword combo box to the fitsKeywordSelected action.
new_keyword_box.currentIndexChanged.connect(self.fitsKeywordSelected)
# Make a new 'Parent:' combo box and populate it with self.xml_parents.
new_xmlparent = QComboBox(editable=True)
for p in self.xml_parents:
new_xmlparent.addItem(p)
# Make new line edits for 'CAOM Property:', 'Extension:', and "Default
# value".
new_headcaom = CAOMKeywordBox()
new_headcaom.currentIndexChanged.connect(self.caomKeywordSelected)
new_extension = QLineEdit()
new_default = QLineEdit()
# Add the new widgets to the headerdefault_grid layout.
self.headerdefault_grid.addWidget(new_keyword_box,
self.nextrow_headers, 0)
self.headerdefault_grid.addWidget(new_headcaom,
self.nextrow_headers, 1)
self.headerdefault_grid.addWidget(new_xmlparent,
self.nextrow_headers, 2)
self.headerdefault_grid.addWidget(new_extension,
self.nextrow_headers, 3)
self.headerdefault_grid.addWidget(new_default,
self.nextrow_headers, 4)
self.headerdefault_grid.setRowStretch(self.nextrow_headers, 0)
self.headerdefault_grid.setRowStretch(self.nextrow_headers+1, 1)
# Update self.nextrow_headers.
self.nextrow_headers += 1
def clearConfigPaths(self):
self.data_dir_edit.clear()
self.output_dir_edit.clear()
def loadConfigPaths(self, paths_dict):
self.clearConfigPaths()
self.data_dir_edit.insert(paths_dict["InputDir"])
self.output_dir_edit.insert(paths_dict["Output"])
def setProductType(self, data_product_type):
self.datatype_box.setTo(data_product_type)
def setHeaderStandard(self, header_type):
self.headertype_box.setTo(header_type)
def loadDictionaries(self, uniques):
""" Recursively handles loading multi-level dictionaries to the unique
parameters table.
:param uniques: A dictionary containing CAOM parameters. May contain
nested dictionaries.
:type uniques: dict
"""
if uniques is None:
return
parents = uniques.keys()
for p in parents:
sub_dictionary = uniques[p]
copy_dictionary = dict(sub_dictionary)
# Look at the first row to see if you're loading into FIRST_ENTRY
# or NEXT_ENTRY.
first_parent = self.uniques_grid.itemAtPosition(
self.firstrow_uniques,0)
first_widget = first_parent.widget()
for parameter in sub_dictionary.keys():
value = sub_dictionary[parameter]
# If the first widget text is empty, start loading there.
# Otherwise, load to the self.nextrow_uniques position and
# create a new set of widgets using addParameterClicked().
if first_widget.currentText() == "":
row = self.firstrow_uniques
else:
row = self.nextrow_uniques
self.addParameterClicked()
# Get the Parent combo box for the current row.
caom_box = self.uniques_grid.itemAtPosition(row,0).widget()
parent_box = self.uniques_grid.itemAtPosition(row,1).widget()
value_box = self.uniques_grid.itemAtPosition(row,2).widget()
# If the desired parent is already an option, set to that.
# Otherwise add it as a new option in the combo box.
if p in self.xml_parents:
parent_index = self.xml_parents.index(p)
parent_box.setCurrentIndex(parent_index)
else:
parent_box.addItem(p)
parent_box.setCurrentIndex(parent_box.findText(p))
self.xml_parents.append(p)
# Fill in the CAOM line edit box.
caom_box.setTo(parameter)
# If the next level is still a dictionary, repeat this process.
# Otherwise, fill in the Value line edit box.
if isinstance(sub_dictionary[parameter], dict):
self.loadDictionaries(copy_dictionary)
else:
value_box.insert(sub_dictionary[parameter])
del copy_dictionary[parameter]
def loadFromYAML(self, filename):
""" Load configuration parameters to our ConfigGenerator form using a
YAML-formatted file.
:param filename: The location of the YAML-formatted config file.
:type filename: str
"""
# Read the YAML entries into a dictionary. select_files will also be
# opening the config file, so kill the redundant output.
yamlfile = read_yaml(filename, output=False)
# Clear any existing form values before loading the new data.
self.resetClicked()
# Get the 'filepaths' data out of the dictionary and write it into
# the appropriate lineedits
try:
filepaths = yamlfile["filepaths"]
self.data_dir_edit.insert(filepaths["hlsppath"])
self.output_dir_edit.insert(filepaths["output"])
except KeyError:
msg = "'filepaths' either missing or not formatted in config file"
raise MyError(msg)
# Get the 'overwrite' information out of the dictionary and set the
# radio button
try:
if filepaths["overwrite"]:
self.overwrite_on.setChecked(True)
else:
self.overwrite_off.setChecked(True)
except KeyError:
msg = "'overwrite' not provided in config file"
raise MyError(msg)
# Get the 'header_type' data out of the dictionary and set the
# QComboBox.
try:
header_type = yamlfile["header_type"].capitalize()
header_index = self.headertype_box.header_types.index(header_type)
self.headertype_box.setCurrentIndex(header_index)
except KeyError:
msg = "'header_type' not provided in config file"
raise MyError(msg)
# Get the 'data_type' data out of the dictionary and set the
# QComboBox.
try:
data_type = yamlfile["data_type"].upper()
dataType_index = self.datatype_box.data_types.index(data_type)
self.datatype_box.setCurrentIndex(dataType_index)
except KeyError:
msg = "'data_type' not provided in config file"
raise MyError(msg)
# Get the 'unique_parameters' data out of the dictionary using the
# loadDictionaries module and create new rows as needed. Error
# handling just does a pass since not all configs will have extra
# parameters.
try:
uniques = yamlfile["unique_parameters"]
self.loadDictionaries(uniques)
except KeyError:
pass
# Get the 'keyword_updates' data out of the dictionary. Error handling
# just returns since this is the last function and not all configs
# will set keyword values.
try:
keyword_updates = yamlfile["keyword_updates"]
except KeyError:
return
# Load the 'keyword_updates' data into the form and create new rows
# if necessary.
for key in sorted(keyword_updates.keys()):
values = keyword_updates[key]
# If nextrow_headers has not been moved, load into
# firstrow_headers. Otherwise, trigger an addKeywordClicked event
# and load into nextrow_headers.
if self.nextrow_headers == self.firstrow_headers + 1:
row = self.firstrow_headers
else:
row = self.nextrow_headers
self.addKeywordClicked()
load_key = self.headerdefault_grid.itemAtPosition(row, 0).widget()
load_caom = self.headerdefault_grid.itemAtPosition(row, 1).widget()
load_xml = self.headerdefault_grid.itemAtPosition(row, 2).widget()
load_ext = self.headerdefault_grid.itemAtPosition(row, 3).widget()
load_def = self.headerdefault_grid.itemAtPosition(row, 4).widget()
# Get the lists of available keyword and XML parent values that
# currently populate the two QComboBox items.
available_keys = [load_key.itemText(x)
for x in range(load_key.count())]
available_xml = [load_xml.itemText(y)
for y in range(load_xml.count())]
# If the keyword and XML parent values are already available,
# select them in the appropriate box. Otherwise, enter them as
# new values.
if key in available_keys:
load_key.setCurrentIndex(available_keys.index(key))
else:
load_key.setCurrentText(key)
load_caom.setTo(values["caom"])
if values["section"] in available_xml:
load_xml.setCurrentIndex(
available_xml.index(values["section"]))
else:
load_xml.setCurrentText(values["section"])
# Add the headerName and headerDefaultValue text to the lineedit
# objects.
load_ext.setText(values["headerName"])
load_def.setText(values["headerDefaultValue"])
def loadParamFile(self, filename):
""" Load the available information from a .param file created by the
previous metadata-checking steps of HLSP ingestion. This will not
completely fill out the .config file form.
:param filename: The filename for the YAML-formatted file to read
information from.
:type filename: str
"""
# Read the YAML entries into a dictionary. select_files will also be
# opening the config file, so kill the redundant output.
yamlfile = read_yaml(filename, output=False)
# Clear any existing form values before loading the new data.
self.resetClicked()
# Get the 'filepaths' data out of the dictionary and write it into
# the appropriate lineedits
try:
datadir = yamlfile["InputDir"]
except KeyError:
msg = "'InputDir' either missing or not formatted in .param file"
raise MyError(msg)
else:
self.data_dir_edit.insert(datadir)
# Identify the single .fits entry defined in the .param file, if
# present.
try:
fits = yamlfile["fits"]
except KeyError:
msg = "No fits parameters found in .param file"
raise MyError(msg)
else:
if len(fits) > 1:
msg = "More than one .fits product found in .param file"
raise MyError(msg)
else:
fits = fits[0]
# If the single .fits entry is present, get the standard that was
# used for metadata checking.
try:
new_head_type = fits["FileParams"]["Standard"].title()
except KeyError:
msg = "Could not find a .fits standard in .param file"
raise MyError(msg)
else:
if new_head_type in self.headertype_box.header_types:
n = self.headertype_box.header_types.index(new_head_type)
self.headertype_box.setCurrentIndex(n)
# If the single .fits entry is present, get the ProductType information
# defined in the .param file.
try:
new_data_type = fits["FileParams"]["ProductType"].upper()
except KeyError:
msg = "Could not find 'ProductType' parameter in .param file"
raise MyError(msg)
else:
if new_data_type in self.datatype_box.data_types:
n = self.datatype_box.data_types.index(new_data_type)
self.datatype_box.setCurrentIndex(n)
else:
self.datatype_box.setCurrentIndex(0)
def resetClicked(self):
""" Clear any changes to the form.
"""
#Empty the immediately-available elements.
self.clearConfigPaths()
self.overwrite_on.setChecked(True)
self.headertype_box.setCurrentIndex(0)
self.datatype_box.setCurrentIndex(0)
p_one = self.uniques_grid.itemAtPosition(self.firstrow_uniques,0)
p_one.widget().setCurrentIndex(0)
c_one = self.uniques_grid.itemAtPosition(self.firstrow_uniques,1)
c_one.widget().setCurrentIndex(0)
v_one = self.uniques_grid.itemAtPosition(self.firstrow_uniques,2)
v_one.widget().clear()
k_one = self.headerdefault_grid.itemAtPosition(self.firstrow_headers,0)
k_one.widget().setCurrentIndex(0)
h_one = self.headerdefault_grid.itemAtPosition(self.firstrow_headers,1)
h_one.widget().setCurrentIndex(0)
x_one = self.headerdefault_grid.itemAtPosition(self.firstrow_headers,2)
x_one.widget().setCurrentIndex(0)
e_one = self.headerdefault_grid.itemAtPosition(self.firstrow_headers,3)
e_one.widget().clear()
d_one = self.headerdefault_grid.itemAtPosition(self.firstrow_headers,4)
d_one.widget().clear()
# Delete any unique parameter entries beyond the first table row.
delete_these = range(self.nextrow_uniques - 1,
self.firstrow_uniques,
-1)
for a in delete_these:
test = self.uniques_grid.itemAtPosition(a, 0)
if test is None:
continue
widgets_per_row = 3
for b in range(widgets_per_row):
c = self.uniques_grid.itemAtPosition(a, b).widget()
c.setParent(None)
# Reset the nextrow_uniques variable.
self.nextrow_uniques = self.firstrow_uniques + 1
# Delete any header keyword entries beyond the first row.
delete_these = range(self.nextrow_headers - 1,
self.firstrow_headers,
-1)
for x in delete_these:
test = self.headerdefault_grid.itemAtPosition(x,0)
if test is None:
continue
widgets_per_row = 5
for y in range(widgets_per_row):
z = self.headerdefault_grid.itemAtPosition(x,y).widget()
z.setParent(None)
# Reset the nextrow_headers variable.
self.nextrow_headers = self.firstrow_headers + 1
def collectInputs(self):
""" Assemble everything the user has input to the form into a
dictionary.
"""
# Initialize dictionaries to populate.
config = {}
filepaths = {}
# Get the HLSP data filepath. Throw an error if it does not exist.
hlsppath = self.data_dir_edit.text()
if hlsppath == "":
raise MyError("HLSP Data file path is missing!")
else:
filepaths["hlsppath"] = hlsppath
# Get the output filepath from the line edit. Throw an error if it is
# empty. Append with a '.xml' if not already there. Get the overwrite
# flag from the checkbox.
out = self.output_dir_edit.text()
if out == "":
raise MyError("Output file path is missing!")
if not out.endswith(".xml"):
out = ".".join([out, "xml"])
filepaths["output"] = out
filepaths["overwrite"] = self.overwrite_on.isChecked()
config["filepaths"] = filepaths
# Grab the selected fits header type.
config["header_type"] = self.headertype_box.currentText().lower()
# Get the data type. Throw an error if none selected.
dt = self.datatype_box.currentText().lower()
if dt == "":
raise MyError("No data type selected!")
else:
config["data_type"] = dt
# Collect all the unique parameters the user has entered. Start at row
# self.firstrow_uniques and search through all rows the user may have
# added.
uniques = {}
for row in range(self.firstrow_uniques, self.nextrow_uniques):
add_caom = self.uniques_grid.itemAtPosition(row, 0)
add_parent = self.uniques_grid.itemAtPosition(row, 1)
add_value = self.uniques_grid.itemAtPosition(row, 2)
unique_parent = unique_caom = unique_value = None
# Skip totally empty rows, empty values are okay for defining a new
# parent.
if add_parent is None and add_caom is None and add_value is None:
continue
if add_parent is not None:
parent_widget = add_parent.widget()
unique_parent = str(parent_widget.currentText())
if add_caom is not None:
caom_widget = add_caom.widget()
unique_caom = str(caom_widget.currentText())
if add_value is not None:
value_widget = add_value.widget()
unique_value = str(value_widget.text())
if (unique_parent == ""
and unique_caom == ""
and unique_value == ""):
continue
elif unique_parent == "":
unique_parent = "CompositeObservation"
parameter = {}
parameter[unique_caom] = unique_value
insert = crawl_dictionary(uniques, unique_parent, parameter)
# crawl_dictionary returns a tuple:
# (updated dictionary, inserted boolean flag)
new_uniques, inserted = insert
# If crawl_dictionary did not insert the new parameter, the defined
# parent is not currently present in the dictionary, so create a
# new entry.
if inserted:
uniques = new_uniques
else:
uniques[unique_parent] = parameter
config["unique_parameters"] = uniques
# Collect all header keyword entries the user may have provided.
keywords = {}
for row in range(self.firstrow_headers, self.nextrow_headers):
add_key = self.headerdefault_grid.itemAtPosition(row, 0)
add_caom = self.headerdefault_grid.itemAtPosition(row, 1)
add_xml = self.headerdefault_grid.itemAtPosition(row, 2)
add_ext = self.headerdefault_grid.itemAtPosition(row, 3)
add_def = self.headerdefault_grid.itemAtPosition(row, 4)
unique_keyword = None
unique_caom = None
unique_xmlparent = None
unique_extension = None
unique_default = None
# Skip rows with any missing properties, otherwise load the info
# into variables.
if (add_key is None
or add_caom is None
or add_xml is None
or add_ext is None
or add_def is None):
continue
else:
unique_keyword = str(add_key.widget().currentText())
unique_caom = str(add_caom.widget().currentText())
unique_xmlparent = str(add_xml.widget().currentText())
unique_extension = str(add_ext.widget().text())
unique_default = str(add_def.widget().text())
# Skip the row if any of those variables are empty strings.
# Otherwise, add the information to a dictionary of properties
# stored under the given header keyword.
if (unique_keyword == ""
or unique_caom == ""
or unique_xmlparent == ""
or unique_extension == ""
or unique_default == ""):
continue
else:
new_entries = {}
new_entries["caom"] = unique_caom
new_entries["section"] = unique_xmlparent
new_entries["headerName"] = unique_extension
new_entries["headerDefaultValue"] = unique_default
keywords[unique_keyword] = new_entries
config["keyword_updates"] = keywords
# Return the config dictionary
return config
#--------------------
if __name__=="__main__":
app = QApplication(sys.argv)
w = ConfigGenerator()
sys.exit(app.exec_())
|
[
"lib.GUIbuttons.GreyButton",
"lib.MyError.MyError",
"lib.HeaderKeyword.read_header_keywords_table",
"util.read_yaml.read_yaml"
] |
[((8946, 8992), 'lib.HeaderKeyword.read_header_keywords_table', 'hk.read_header_keywords_table', (['HEADER_KEYWORDS'], {}), '(HEADER_KEYWORDS)\n', (8975, 8992), True, 'import lib.HeaderKeyword as hk\n'), ((13318, 13360), 'lib.GUIbuttons.GreyButton', 'gb.GreyButton', (['"""+ add a new parameter"""', '(20)'], {}), "('+ add a new parameter', 20)\n", (13331, 13360), True, 'import lib.GUIbuttons as gb\n'), ((15640, 15680), 'lib.GUIbuttons.GreyButton', 'gb.GreyButton', (['"""+ add a new keyword"""', '(20)'], {}), "('+ add a new keyword', 20)\n", (15653, 15680), True, 'import lib.GUIbuttons as gb\n'), ((32685, 32718), 'util.read_yaml.read_yaml', 'read_yaml', (['filename'], {'output': '(False)'}), '(filename, output=False)\n', (32694, 32718), False, 'from util.read_yaml import read_yaml\n'), ((37973, 38006), 'util.read_yaml.read_yaml', 'read_yaml', (['filename'], {'output': '(False)'}), '(filename, output=False)\n', (37982, 38006), False, 'from util.read_yaml import read_yaml\n'), ((42957, 42999), 'lib.MyError.MyError', 'MyError', (['"""HLSP Data file path is missing!"""'], {}), "('HLSP Data file path is missing!')\n", (42964, 42999), False, 'from lib.MyError import MyError\n'), ((43335, 43374), 'lib.MyError.MyError', 'MyError', (['"""Output file path is missing!"""'], {}), "('Output file path is missing!')\n", (43342, 43374), False, 'from lib.MyError import MyError\n'), ((43867, 43900), 'lib.MyError.MyError', 'MyError', (['"""No data type selected!"""'], {}), "('No data type selected!')\n", (43874, 43900), False, 'from lib.MyError import MyError\n'), ((33233, 33245), 'lib.MyError.MyError', 'MyError', (['msg'], {}), '(msg)\n', (33240, 33245), False, 'from lib.MyError import MyError\n'), ((33622, 33634), 'lib.MyError.MyError', 'MyError', (['msg'], {}), '(msg)\n', (33629, 33634), False, 'from lib.MyError import MyError\n'), ((34050, 34062), 'lib.MyError.MyError', 'MyError', (['msg'], {}), '(msg)\n', (34057, 34062), False, 'from lib.MyError import MyError\n'), ((34461, 34473), 'lib.MyError.MyError', 'MyError', (['msg'], {}), '(msg)\n', (34468, 34473), False, 'from lib.MyError import MyError\n'), ((38395, 38407), 'lib.MyError.MyError', 'MyError', (['msg'], {}), '(msg)\n', (38402, 38407), False, 'from lib.MyError import MyError\n'), ((38714, 38726), 'lib.MyError.MyError', 'MyError', (['msg'], {}), '(msg)\n', (38721, 38726), False, 'from lib.MyError import MyError\n'), ((38866, 38878), 'lib.MyError.MyError', 'MyError', (['msg'], {}), '(msg)\n', (38873, 38878), False, 'from lib.MyError import MyError\n'), ((39231, 39243), 'lib.MyError.MyError', 'MyError', (['msg'], {}), '(msg)\n', (39238, 39243), False, 'from lib.MyError import MyError\n'), ((39772, 39784), 'lib.MyError.MyError', 'MyError', (['msg'], {}), '(msg)\n', (39779, 39784), False, 'from lib.MyError import MyError\n')]
|
from common import activities, prefix
from discord.ext import commands
class RemoveActivity(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(name="rm-activity")
@commands.is_owner()
async def remove_activity(ctx):
activity = ctx.message.content[(
len(prefix) + len("rm-activity")):].strip()
if not activity:
await ctx.send(
"Please paste the activity you would like to"
f" remove.\n```\n{prefix}rm-activity your activity text here\n```"
)
return
removed = activities.remove(activity)
if removed:
await ctx.send(f"The activity `{activity}` was removed.")
else:
await ctx.send("The activity you mentioned does not exist.")
|
[
"discord.ext.commands.command",
"discord.ext.commands.is_owner",
"common.activities.remove"
] |
[((167, 203), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""rm-activity"""'}), "(name='rm-activity')\n", (183, 203), False, 'from discord.ext import commands\n'), ((209, 228), 'discord.ext.commands.is_owner', 'commands.is_owner', ([], {}), '()\n', (226, 228), False, 'from discord.ext import commands\n'), ((612, 639), 'common.activities.remove', 'activities.remove', (['activity'], {}), '(activity)\n', (629, 639), False, 'from common import activities, prefix\n')]
|
import datetime
import json
from lxml.etree import Element, fromstring, tostring
from passari.config import CONFIG, MUSEUMPLUS_URL
from passari.museumplus.settings import ZETCOM_NS
from passari.utils import retrieve_xml
async def get_object_field(session, object_id: int, name: str):
"""
Get the value of a single Object field
:param session: aiohttp.Session instance
:param object_id: ID of the Object to retrieve
:param name: Field to retrieve
:returns: Value of the field as string if it exists, None otherwise
"""
# Retrieving the entire document seems to be the only option
xml = await retrieve_xml(
session, f"{MUSEUMPLUS_URL}/module/Object/{object_id}"
)
try:
return xml.find(
f"{{{ZETCOM_NS}}}modules//"
f"{{{ZETCOM_NS}}}moduleItem[@id='{object_id}']"
f"{{{ZETCOM_NS}}}*[@name='{name}']"
f"{{{ZETCOM_NS}}}value"
).text
except AttributeError:
return None
UPDATE_FIELD_TEMPLATE = """
<?xml version="1.0" encoding="UTF-8"?>
<application xmlns="http://www.zetcom.com/ria/ws/module">
<modules>
<module name="Object">
<moduleItem id="{object_id}">
</moduleItem>
</module>
</modules>
</application>
"""[1:]
async def set_object_field(
session, object_id: int, name: str, field_type: str,
value: str):
"""
Set the value of a single Object field. Value will be created if the
field does not exist already.
:param session: aiohttp.Session instance
:param object_id: ID of the Object to retrieve
:param name: Field name to retrieve
:param field_type: Field type (eg. "dataField")
:param value: Value to set
"""
root = fromstring(
UPDATE_FIELD_TEMPLATE.format(object_id=object_id).encode("utf-8")
)
module_elem = root.find(
f"{{{ZETCOM_NS}}}modules//{{{ZETCOM_NS}}}moduleItem"
)
field_elem = Element(field_type)
field_elem.attrib["name"] = name
value_elem = Element("value")
value_elem.text = value
field_elem.append(value_elem)
module_elem.append(field_elem)
data = tostring(root, encoding="utf-8", xml_declaration=True)
response = await session.put(
f"{MUSEUMPLUS_URL}/module/Object/{object_id}/{name}",
headers={"Content-Type": "application/xml"},
data=data
)
response.raise_for_status()
return True
async def add_preservation_event(museum_package, status):
"""
Add a preservation event to the MuseumPlus service
"""
event = {
"filename": museum_package.sip_filename,
"status": status,
"object_modify_date": museum_package.museum_object.modified_date.isoformat(),
"date": datetime.datetime.now(datetime.timezone.utc).isoformat()
}
field_name = CONFIG["museumplus"]["object_preservation_field_name"]
field_type = CONFIG["museumplus"]["object_preservation_field_type"]
# Get the current events
events = await get_object_field(
session=museum_package.session,
object_id=museum_package.museum_object.object_id,
name=field_name
)
if not events:
events = "[]"
try:
events = json.loads(events)
except json.decoder.JSONDecodeError as exc:
raise ValueError(
"Could not decode MuseumPlus preservation log entries. The "
"preservation field's content might be corrupted."
) from exc
events.append(event)
# Update the preservation events
await set_object_field(
session=museum_package.session,
object_id=museum_package.museum_object.object_id,
name=field_name,
field_type=field_type,
value=json.dumps(events)
)
|
[
"json.loads",
"lxml.etree.Element",
"json.dumps",
"lxml.etree.tostring",
"datetime.datetime.now",
"passari.utils.retrieve_xml"
] |
[((1939, 1958), 'lxml.etree.Element', 'Element', (['field_type'], {}), '(field_type)\n', (1946, 1958), False, 'from lxml.etree import Element, fromstring, tostring\n'), ((2014, 2030), 'lxml.etree.Element', 'Element', (['"""value"""'], {}), "('value')\n", (2021, 2030), False, 'from lxml.etree import Element, fromstring, tostring\n'), ((2141, 2195), 'lxml.etree.tostring', 'tostring', (['root'], {'encoding': '"""utf-8"""', 'xml_declaration': '(True)'}), "(root, encoding='utf-8', xml_declaration=True)\n", (2149, 2195), False, 'from lxml.etree import Element, fromstring, tostring\n'), ((633, 701), 'passari.utils.retrieve_xml', 'retrieve_xml', (['session', 'f"""{MUSEUMPLUS_URL}/module/Object/{object_id}"""'], {}), "(session, f'{MUSEUMPLUS_URL}/module/Object/{object_id}')\n", (645, 701), False, 'from passari.utils import retrieve_xml\n'), ((3213, 3231), 'json.loads', 'json.loads', (['events'], {}), '(events)\n', (3223, 3231), False, 'import json\n'), ((2741, 2785), 'datetime.datetime.now', 'datetime.datetime.now', (['datetime.timezone.utc'], {}), '(datetime.timezone.utc)\n', (2762, 2785), False, 'import datetime\n'), ((3721, 3739), 'json.dumps', 'json.dumps', (['events'], {}), '(events)\n', (3731, 3739), False, 'import json\n')]
|
import re
from bs4 import *
import requests
import random
import json
from hashlib import md5
# 设置翻译API的账号和密码 BAIDU Setup your APIid and Appkey acquired from baidu API
appid = ''
appkey = ''
# 设置从A语音翻译到B语言,其他语言码查看 If you need more language code refer to:`https://api.fanyi.baidu.com/doc/21`
from_lang = 'en'
to_lang = 'zh'
#组合翻译的地址 sent your translation to translate to here
endpoint = 'http://api.fanyi.baidu.com'
path = '/api/trans/vip/translate'
url = endpoint + path
#example query
query = 'Hello World! This is 1st paragraph.\nThis is 2nd paragraph.'
# Generate salt and sign 组装校验码
def make_md5(s, encoding='utf-8'):
return md5(s.encode(encoding)).hexdigest()
#前面都是调用翻译API的部分 Everything about machine translation is above
#下面是提取XML的部分 Everything about extracting texts from Xmls is underneath
#从xml文本里面提取需要翻译的key extract key from XML
with open("totsk_troops.xml", "r", encoding='utf-8') as f:
origin_content = f.read()
strings_src = re.findall(r'"(.*?)"', origin_content)
strings = list()
for string in strings_src:
if '=' in string:
strings.append(string)
#从打开模板XML open Template XML
with open("template.xml", "r", encoding='utf-8') as f:
template = BeautifulSoup(f.read(), 'xml')
common = template.find('strings')
#获取提取前的string id Extract String ID before extraction
def get_id(_string):
index_right = _string.index('}')
index_left = _string.index('=')
temp = _string[index_left + 1:index_right].replace(" ","_")
return temp.replace("'","_")
#用text生成id并且替换 Generate new id based on old text
def replace_id(_string):
index_left = _string.index('}')
temp= _string[index_left + 1:].replace(" ","_")
for r in ((" ", "_"), ("'", "_"),(":", "_"),(")", "_"),("(", "_")):
temp = temp.replace(*r)
return temp
def get_string(_string):
index_left = _string.index('}')
return _string[index_left + 1:]
def translate(_string):
print(_string)
query = _string
salt = random.randint(32768, 65536)
sign = make_md5(appid + query + str(salt) + appkey)
# Build request
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
payload = {'appid': appid, 'q': query, 'from': from_lang, 'to': to_lang, 'salt': salt, 'sign': sign}
# Send request
r = requests.post(url, params=payload, headers=headers)
result = r.json()
translate_result = result["trans_result"][0]["dst"]
print(translate_result)
return translate_result
# Show response
#print(json.dumps(result, indent=4, ensure_ascii=False))
for string in strings:
# enable this line to make new id from name
#tag = template.new_tag(name='string', attrs={'id': replace_id(string).lower(), 'text': get_string(string)})
original = get_string(string)
translation = translate(original)
# enable this line to read original String IDs
tag = template.new_tag(name='string', attrs={'id': get_id(string), 'text': translation})
common.append(tag)
with open("totsk_troops_CNs.xml", "w+", encoding='utf-8') as f:
f.write(template.prettify())
|
[
"requests.post",
"re.findall",
"random.randint"
] |
[((962, 999), 're.findall', 're.findall', (['""""(.*?)\\""""', 'origin_content'], {}), '(\'"(.*?)"\', origin_content)\n', (972, 999), False, 'import re\n'), ((1969, 1997), 'random.randint', 'random.randint', (['(32768)', '(65536)'], {}), '(32768, 65536)\n', (1983, 1997), False, 'import random\n'), ((2275, 2326), 'requests.post', 'requests.post', (['url'], {'params': 'payload', 'headers': 'headers'}), '(url, params=payload, headers=headers)\n', (2288, 2326), False, 'import requests\n')]
|
# 2 Using Manual threading in python
# Import Threading & Time
import threading
import time
# Start counting
start = time.perf_counter()
# Create simple function that sleep in 1 second
def do_something():
print('Sleeping 1 second..')
time.sleep(1)
print('Done Sleeping..')
# Create threading, start and join
t1 = threading.Thread(target=do_something)
t2 = threading.Thread(target=do_something)
t1.start()
t2.start()
t1.join()
t2.join()
# Finish counting and show script runtime
finish = time.perf_counter()
print(f"Finished in {round(finish-start,2)} second(s)")
|
[
"threading.Thread",
"time.perf_counter",
"time.sleep"
] |
[((119, 138), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (136, 138), False, 'import time\n'), ((331, 368), 'threading.Thread', 'threading.Thread', ([], {'target': 'do_something'}), '(target=do_something)\n', (347, 368), False, 'import threading\n'), ((374, 411), 'threading.Thread', 'threading.Thread', ([], {'target': 'do_something'}), '(target=do_something)\n', (390, 411), False, 'import threading\n'), ((508, 527), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (525, 527), False, 'import time\n'), ((246, 259), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (256, 259), False, 'import time\n')]
|
import asyncio
import base64
import itertools
import json
import os
from enum import Enum
from pydantic import BaseModel
from pathlib import Path, PosixPath
from typing import Union, List, cast, Mapping, Callable, Iterable, Any
import aiohttp
from fastapi import HTTPException
from starlette.requests import Request
from youwol_utils import JSON
from youwol_utils.clients.types import DocDb
from youwol_utils.clients.utils import raise_exception_from_response, to_group_id, to_group_scope
flatten = itertools.chain.from_iterable
def find_platform_path():
return Path(__file__.split('/services')[0])
def user_info(request: Request):
return request.state.user_info
def get_user_id(request: Request):
return user_info(request)['sub']
def private_group_id(user):
return f"private_{user['sub']}"
def is_authorized_write(request: Request, group_id):
user = user_info(request)
group_ids = get_user_group_ids(user)
if group_id not in group_ids:
return False
permissions = {
'/youwol-users': ['<EMAIL>']
}
scope = to_group_scope(group_id)
if scope in permissions:
return user['preferred_username'] in permissions[scope]
return True
def get_all_individual_groups(groups: List[str]) -> List[Union[str, None]]:
def get_combinations(elements: List[str]):
result = []
for i in range(1, len(elements)):
result.append('/'.join(elements[0:i]))
return result
parts = [group.split('/') for group in groups if group]
parts_flat = flatten([get_combinations(part) for part in parts])
parts_flat = [e for e in parts_flat if e] + cast(any, [None])
return list(set(groups + parts_flat))
def get_user_group_ids(user) -> List[Union[str, None]]:
group_ids = [to_group_id(g) for g in get_all_individual_groups(user["memberof"]) if g is not None]
return [private_group_id(user)] + group_ids
def get_leaf_group_ids(user) -> List[Union[str, None]]:
group_ids = [to_group_id(g) for g in user["memberof"] if g is not None]
return [private_group_id(user)] + group_ids
def ensure_group_permission(request: Request, group_id: str):
user = user_info(request)
allowed_groups = get_user_group_ids(user)
if group_id not in allowed_groups:
raise HTTPException(status_code=401, detail=f"User can not get/post resource")
def full_local_fake_user(request):
user_name = request.headers.get('user-name', "<EMAIL>")
if user_name == "public":
return {
"sub": to_group_id(user_name), "email_verified": True, "name": "public account",
"preferred_username": "public account", "email": "<EMAIL>",
"memberof": [
"/youwol-users"
],
}
if user_name == "test":
return {
"sub": to_group_id(user_name), "email_verified": True, "name": "test account",
"preferred_username": "test account", "email": "<EMAIL>",
"memberof": ["/youwol-users/postman-tester/subchildtest1",
"/youwol-users/postman-tester/subchildtest2",
"/youwol-users/youwol-devs",
],
}
return {
"sub": "82bcba26-65d7-4072-afc4-a28bb58611c4",
"email_verified": True,
"name": "test account",
"preferred_username": user_name,
"memberof": [
"/youwol-users/postman-tester/subchildtest1",
"/youwol-users/postman-tester/subchildtest2",
"/youwol-users/youwol-devs",
"/youwol-users/arche"
],
"email": user_name,
}
async def get_access_token(client_id: str, client_secret: str, client_scope: str, openid_host: str):
body = {
"client_id": client_id,
"grant_type": "client_credentials",
"client_secret": client_secret,
"scope": client_scope
}
url = f"https://{openid_host}/auth/realms/youwol/protocol/openid-connect/token"
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
async with await session.post(url, data=body, headers=headers) as resp:
if resp.status == 200:
return await resp.json()
await raise_exception_from_response(resp)
async def get_headers_auth_admin_from_env():
client_id = os.getenv("AUTH_CLIENT_ID")
client_secret = os.getenv("AUTH_CLIENT_SECRET")
client_scope = os.getenv("AUTH_CLIENT_SCOPE")
openid_host = os.getenv("AUTH_HOST")
resp = await get_access_token(client_id=client_id, client_secret=client_secret, client_scope=client_scope,
openid_host=openid_host)
access_token = resp['access_token']
return {"Authorization": f"Bearer {access_token}"}
async def get_headers_auth_admin_from_secrets_file(file_path: Path, url_cluster: str, openid_host: str):
secret = json.loads(file_path.read_text())[url_cluster]
resp = await get_access_token(secret["clientId"], secret["clientSecret"], secret["scope"], openid_host=openid_host)
access_token = resp['access_token']
return {"Authorization": f"Bearer {access_token}"}
def generate_headers_downstream(incoming_headers):
headers = {}
if "Authorization" in incoming_headers:
headers["Authorization"] = incoming_headers.get("Authorization")
if "user-name" in incoming_headers:
headers["user-name"] = incoming_headers.get("user-name")
return headers
def chunks(lst, n):
"""Yield successive n-sized chunks from lst."""
for i in range(0, len(lst), n):
yield lst[i:i + n]
async def get_group(primary_key: str, primary_value: Union[str, float, int, bool], groups: List[str], doc_db: DocDb,
headers: Mapping[str, str]):
requests = [doc_db.query(query_body=f"{primary_key}={primary_value}#1", owner=group, headers=headers)
for group in groups]
responses = await asyncio.gather(*requests)
group = next((g for i, g in enumerate(groups) if responses[i]["documents"]), None)
return group
def check_permission_or_raise(target_group: Union[str, None], allowed_groups: List[Union[None, str]]):
if not target_group:
return
compatible_groups = [g for g in allowed_groups if target_group in g]
if len(compatible_groups) == 0:
raise HTTPException(status_code=401,
detail=f"scope '{target_group}' not included in user groups")
def get_content_type(file_name: str):
extensions = file_name.split('.')[1:]
if "json" in extensions:
return "application/json"
if "yaml" in extensions:
return "application/yaml"
if "js" in extensions:
return "application/javascript;charset=UTF-8"
if "css" in extensions:
return "text/css"
if "woff2" in extensions:
return "font/woff2"
if 'svg' in extensions:
return "image/svg+xml"
if 'png' in extensions:
return "image/png"
if 'txt' in extensions:
return 'text/plain'
return "application/octet-stream"
def get_content_encoding(file_name: str):
extension = file_name.split('.')[-1]
if extension == "br":
return "br"
if extension == "gzip":
return "gzip"
return ""
async def retrieve_user_info(auth_token: str, openid_host: str):
headers = {"authorization": f"Bearer {auth_token}"}
url = f"https://{openid_host}/auth/realms/youwol/protocol/openid-connect/userinfo"
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
async with await session.post(url=url, headers=headers) as resp:
if resp.status != 200:
raise HTTPException(status_code=resp.status, detail=await resp.read())
resp = await resp.json()
return resp
async def get_myself_auth_token(secret_path: Path, openid_host):
secret = json.loads(open(str(secret_path)).read())
form = aiohttp.FormData()
form.add_field("username", secret["myself"]["username"])
form.add_field("password", secret["<PASSWORD>"]["password"])
form.add_field("client_id", secret["dev.platform.youwol.com"]["clientId"])
form.add_field("grant_type", "password")
form.add_field("client_secret", secret["dev.platform.youwol.com"]["clientSecret"])
form.add_field("scope", "email profile youwol_dev")
url = f"https://{openid_host}/auth/realms/youwol/protocol/openid-connect/token"
async with aiohttp.ClientSession() as session:
async with await session.post(url=url, data=form) as resp:
resp = await resp.json()
return resp['access_token']
def exception_message(error: Exception):
if isinstance(error, HTTPException):
return error.detail
return str(error)
def decode_id(asset_id) -> str:
b = str.encode(asset_id)
return base64.urlsafe_b64decode(b).decode()
def encode_id(raw_id) -> str:
b = str.encode(raw_id)
return base64.urlsafe_b64encode(b).decode()
def to_json(obj: BaseModel) -> JSON:
def to_serializable(v):
if isinstance(v, Path):
return str(v)
if isinstance(v, PosixPath):
return str(v)
if isinstance(v, Callable):
return "function"
if isinstance(v, Enum):
return v.name
if isinstance(v, Iterable) and not isinstance(v, list) and not isinstance(v, str):
v = list(v)
return v
base = obj.dict()
def to_json_rec(_obj: Any):
if isinstance(_obj, dict):
for k, v in _obj.items():
if not isinstance(v, dict) and not isinstance(v, list):
_obj[k] = to_serializable(v)
if isinstance(v, dict):
to_json_rec(v)
if isinstance(v, list):
for i, e in enumerate(v):
if not isinstance(e, dict) and not isinstance(e, list):
_obj[k][i] = to_serializable(e)
else:
to_json_rec(e)
to_json_rec(base)
return base
|
[
"asyncio.gather",
"youwol_utils.clients.utils.to_group_id",
"base64.urlsafe_b64encode",
"aiohttp.FormData",
"typing.cast",
"fastapi.HTTPException",
"aiohttp.ClientSession",
"youwol_utils.clients.utils.raise_exception_from_response",
"base64.urlsafe_b64decode",
"youwol_utils.clients.utils.to_group_scope",
"os.getenv",
"aiohttp.TCPConnector"
] |
[((1080, 1104), 'youwol_utils.clients.utils.to_group_scope', 'to_group_scope', (['group_id'], {}), '(group_id)\n', (1094, 1104), False, 'from youwol_utils.clients.utils import raise_exception_from_response, to_group_id, to_group_scope\n'), ((4456, 4483), 'os.getenv', 'os.getenv', (['"""AUTH_CLIENT_ID"""'], {}), "('AUTH_CLIENT_ID')\n", (4465, 4483), False, 'import os\n'), ((4504, 4535), 'os.getenv', 'os.getenv', (['"""AUTH_CLIENT_SECRET"""'], {}), "('AUTH_CLIENT_SECRET')\n", (4513, 4535), False, 'import os\n'), ((4555, 4585), 'os.getenv', 'os.getenv', (['"""AUTH_CLIENT_SCOPE"""'], {}), "('AUTH_CLIENT_SCOPE')\n", (4564, 4585), False, 'import os\n'), ((4604, 4626), 'os.getenv', 'os.getenv', (['"""AUTH_HOST"""'], {}), "('AUTH_HOST')\n", (4613, 4626), False, 'import os\n'), ((8085, 8103), 'aiohttp.FormData', 'aiohttp.FormData', ([], {}), '()\n', (8101, 8103), False, 'import aiohttp\n'), ((1654, 1671), 'typing.cast', 'cast', (['any', '[None]'], {}), '(any, [None])\n', (1658, 1671), False, 'from typing import Union, List, cast, Mapping, Callable, Iterable, Any\n'), ((1790, 1804), 'youwol_utils.clients.utils.to_group_id', 'to_group_id', (['g'], {}), '(g)\n', (1801, 1804), False, 'from youwol_utils.clients.utils import raise_exception_from_response, to_group_id, to_group_scope\n'), ((2000, 2014), 'youwol_utils.clients.utils.to_group_id', 'to_group_id', (['g'], {}), '(g)\n', (2011, 2014), False, 'from youwol_utils.clients.utils import raise_exception_from_response, to_group_id, to_group_scope\n'), ((2301, 2373), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(401)', 'detail': 'f"""User can not get/post resource"""'}), "(status_code=401, detail=f'User can not get/post resource')\n", (2314, 2373), False, 'from fastapi import HTTPException\n'), ((6058, 6083), 'asyncio.gather', 'asyncio.gather', (['*requests'], {}), '(*requests)\n', (6072, 6083), False, 'import asyncio\n'), ((6457, 6554), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(401)', 'detail': 'f"""scope \'{target_group}\' not included in user groups"""'}), '(status_code=401, detail=\n f"scope \'{target_group}\' not included in user groups")\n', (6470, 6554), False, 'from fastapi import HTTPException\n'), ((8596, 8619), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (8617, 8619), False, 'import aiohttp\n'), ((2538, 2560), 'youwol_utils.clients.utils.to_group_id', 'to_group_id', (['user_name'], {}), '(user_name)\n', (2549, 2560), False, 'from youwol_utils.clients.utils import raise_exception_from_response, to_group_id, to_group_scope\n'), ((2839, 2861), 'youwol_utils.clients.utils.to_group_id', 'to_group_id', (['user_name'], {}), '(user_name)\n', (2850, 2861), False, 'from youwol_utils.clients.utils import raise_exception_from_response, to_group_id, to_group_scope\n'), ((8986, 9013), 'base64.urlsafe_b64decode', 'base64.urlsafe_b64decode', (['b'], {}), '(b)\n', (9010, 9013), False, 'import base64\n'), ((9093, 9120), 'base64.urlsafe_b64encode', 'base64.urlsafe_b64encode', (['b'], {}), '(b)\n', (9117, 9120), False, 'import base64\n'), ((4131, 4169), 'aiohttp.TCPConnector', 'aiohttp.TCPConnector', ([], {'verify_ssl': '(False)'}), '(verify_ssl=False)\n', (4151, 4169), False, 'import aiohttp\n'), ((4357, 4392), 'youwol_utils.clients.utils.raise_exception_from_response', 'raise_exception_from_response', (['resp'], {}), '(resp)\n', (4386, 4392), False, 'from youwol_utils.clients.utils import raise_exception_from_response, to_group_id, to_group_scope\n'), ((7643, 7681), 'aiohttp.TCPConnector', 'aiohttp.TCPConnector', ([], {'verify_ssl': '(False)'}), '(verify_ssl=False)\n', (7663, 7681), False, 'import aiohttp\n')]
|
#!/usr/bin/env python3
import logging
import sqlite3
import os
from json import load
from urllib.request import urlopen
from bs4 import BeautifulSoup
from re import compile
from datetime import date, datetime
def GetScriptPath():
return '/'.join(os.path.abspath(__file__).split('/')[:-1])
def GetConfig(path, fname):
with open(os.path.join(path, fname), 'r') as config_file:
config = load(config_file)
return config
def GetOptions(web_page, config):
def InputToFloat(inpt):
inpt = inpt.replace(' ', '')
inpt = inpt.replace(u'\xa0', u'')
inpt = inpt.replace('%', '')
inpt = inpt.replace(',', '.')
return float(inpt)
def GetOptionType(opt_name, config):
if opt_name[4] in config['opcje_call']:
opt_type = 'call'
elif opt_name[4] in config['opcje_put']:
opt_type = 'put'
else:
opt_type = 'undefined'
return opt_type
def GetTD(obj, pattern):
return obj.find('td', {'class': compile(pattern)})
def GetTDContent(obj, pattern):
return GetTD(obj, pattern).contents[0].strip()
def ISODate(date_str, format_str):
return datetime.strptime(date_str, format_str).isoformat()
opcje = {}
for walor in BeautifulSoup(web_page.read(), 'lxml').findAll('tr'):
pole_nazwa = walor.find('td', {'class': compile('colWalor*')})
if pole_nazwa is None:
continue
nazwa = pole_nazwa.contents[0].strip().upper()
opcje[nazwa] = {}
opcje[nazwa]['type'] = GetOptionType(nazwa, config)
opcje[nazwa]['wigp'] = float(nazwa[-4:])
opcje[nazwa]['exchange'] = InputToFloat(GetTDContent(walor, 'colKurs*'))
opcje[nazwa]['change'] = InputToFloat(GetTDContent(walor, 'colZmiana*'))
opcje[nazwa]['pchange'] = InputToFloat(GetTDContent(walor, 'colZmianaProcentowa*'))
opcje[nazwa]['open'] = InputToFloat(GetTDContent(walor, 'colOtwarcie*'))
opcje[nazwa]['max'] = InputToFloat(GetTDContent(walor, 'calMaxi*'))
opcje[nazwa]['min'] = InputToFloat(GetTDContent(walor, 'calMini*'))
date_str = str(date.today().year) + '.' + GetTDContent(walor, 'colAktualizacja*')
opcje[nazwa]['date'] = ISODate(date_str, "%Y.%d.%m %H:%M")
date_str = GetTDContent(walor, 'colTermin*')
opcje[nazwa]['term'] = ISODate(date_str, "%Y-%m-%d")
opcje[nazwa]['tstamp'] = datetime.now().isoformat()
return opcje
def main():
path = GetScriptPath()
cfg = GetConfig(path, 'gpw_opcje_db.conf')
logging.basicConfig(
format='%(asctime)s %(levelname)s:%(message)s',
filename=os.path.join(path, cfg['logfile']),
level=logging.DEBUG)
if not os.path.isdir(os.path.join(path, 'database')):
logging.warning('Folder "database" does not exist - creating one.')
os.makedirs(os.path.join(path, 'database'), mode=0o755)
try:
strona = urlopen(cfg['adres_opcje'])
except Exception as e:
logging.error(e)
raise SystemExit()
try:
db = sqlite3.connect(os.path.join(path, 'database', cfg['database']))
except Exception as e:
logging.error(e)
raise SystemExit()
cur = db.cursor()
for key, value in GetOptions(strona, cfg).items():
cols = value.keys()
vals = value.values()
query = "CREATE TABLE IF NOT EXISTS {0} (type TEXT, wigp REAL," \
" exchange REAL, change REAL, pchange REAL, open REAL," \
"max REAL, min REAL, date TEXT, term TEXT," \
"tstamp TEXT PRIMARY KEY)".format(key)
cur.execute(query)
query = "INSERT INTO {0} ({1}) VALUES ({2})".format(
key,', '.join(cols),', '.join(['?'] * len(value)))
cur.execute(query, list(vals))
db.commit()
db.close()
if __name__ == "__main__":
main()
|
[
"logging.error",
"json.load",
"os.path.abspath",
"logging.warning",
"urllib.request.urlopen",
"datetime.datetime.now",
"datetime.date.today",
"datetime.datetime.strptime",
"os.path.join",
"re.compile"
] |
[((405, 422), 'json.load', 'load', (['config_file'], {}), '(config_file)\n', (409, 422), False, 'from json import load\n'), ((2816, 2883), 'logging.warning', 'logging.warning', (['"""Folder "database" does not exist - creating one."""'], {}), '(\'Folder "database" does not exist - creating one.\')\n', (2831, 2883), False, 'import logging\n'), ((2974, 3001), 'urllib.request.urlopen', 'urlopen', (["cfg['adres_opcje']"], {}), "(cfg['adres_opcje'])\n", (2981, 3001), False, 'from urllib.request import urlopen\n'), ((340, 365), 'os.path.join', 'os.path.join', (['path', 'fname'], {}), '(path, fname)\n', (352, 365), False, 'import os\n'), ((2681, 2715), 'os.path.join', 'os.path.join', (['path', "cfg['logfile']"], {}), "(path, cfg['logfile'])\n", (2693, 2715), False, 'import os\n'), ((2775, 2805), 'os.path.join', 'os.path.join', (['path', '"""database"""'], {}), "(path, 'database')\n", (2787, 2805), False, 'import os\n'), ((2904, 2934), 'os.path.join', 'os.path.join', (['path', '"""database"""'], {}), "(path, 'database')\n", (2916, 2934), False, 'import os\n'), ((3037, 3053), 'logging.error', 'logging.error', (['e'], {}), '(e)\n', (3050, 3053), False, 'import logging\n'), ((3119, 3166), 'os.path.join', 'os.path.join', (['path', '"""database"""', "cfg['database']"], {}), "(path, 'database', cfg['database'])\n", (3131, 3166), False, 'import os\n'), ((3203, 3219), 'logging.error', 'logging.error', (['e'], {}), '(e)\n', (3216, 3219), False, 'import logging\n'), ((1031, 1047), 're.compile', 'compile', (['pattern'], {}), '(pattern)\n', (1038, 1047), False, 'from re import compile\n'), ((1199, 1238), 'datetime.datetime.strptime', 'datetime.strptime', (['date_str', 'format_str'], {}), '(date_str, format_str)\n', (1216, 1238), False, 'from datetime import date, datetime\n'), ((1387, 1407), 're.compile', 'compile', (['"""colWalor*"""'], {}), "('colWalor*')\n", (1394, 1407), False, 'from re import compile\n'), ((2443, 2457), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2455, 2457), False, 'from datetime import date, datetime\n'), ((253, 278), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (268, 278), False, 'import os\n'), ((2162, 2174), 'datetime.date.today', 'date.today', ([], {}), '()\n', (2172, 2174), False, 'from datetime import date, datetime\n')]
|
"""sync-my-tasks.
Usage:
sync-my-tasks (--from-asana --asana-workspace=<name> [--asana-token-file PATH]) (--to-mstodo)
sync-my-tasks (-h | --help)
sync-my-tasks --version
Options:
-h --help Show this screen.
--version Show version.
--from-asana Pull tasks from Asana.
--asana-workspace=<name> Name of workspace
--asana-token-file PATH Path to file containing the Asana Personal token. [default: ./asana-token]
--to-mstodo Push tasks to Microsoft To-Do.
"""
from docopt import docopt
from sync_my_tasks.provider_asana import AsanaProvider
from sync_my_tasks.provider_mstodo import MsTodoProvider
def main():
arguments = docopt(__doc__, version='sync-my-tasks 0.1.0')
# Set up Asana as an export provider
if arguments['--from-asana']:
with open(arguments['--asana-token-file']) as asana_token_file:
asana_token = asana_token_file.read()
export_provider = AsanaProvider(asana_token, arguments['--asana-workspace'])
# Set up Microsoft To-Do as an import provider
if arguments['--to-mstodo']:
import_provider = MsTodoProvider()
# Export tasks to memory
task_lists = export_provider.export_tasks()
# Import tasks from memory
import_provider.import_tasks(task_lists)
if __name__ == '__main__':
main()
|
[
"sync_my_tasks.provider_asana.AsanaProvider",
"sync_my_tasks.provider_mstodo.MsTodoProvider",
"docopt.docopt"
] |
[((731, 777), 'docopt.docopt', 'docopt', (['__doc__'], {'version': '"""sync-my-tasks 0.1.0"""'}), "(__doc__, version='sync-my-tasks 0.1.0')\n", (737, 777), False, 'from docopt import docopt\n'), ((1005, 1063), 'sync_my_tasks.provider_asana.AsanaProvider', 'AsanaProvider', (['asana_token', "arguments['--asana-workspace']"], {}), "(asana_token, arguments['--asana-workspace'])\n", (1018, 1063), False, 'from sync_my_tasks.provider_asana import AsanaProvider\n'), ((1181, 1197), 'sync_my_tasks.provider_mstodo.MsTodoProvider', 'MsTodoProvider', ([], {}), '()\n', (1195, 1197), False, 'from sync_my_tasks.provider_mstodo import MsTodoProvider\n')]
|
import torch
import torch.nn as nn
from torch.nn import functional as F
from .base import get_syncbn
from .base import ASPP
class dec_deeplabv3(nn.Module):
def __init__(self, in_planes, num_classes=19, inner_planes=256, sync_bn=False, dilations=(12, 24, 36)):
super(dec_deeplabv3, self).__init__()
norm_layer = get_syncbn() if sync_bn else nn.BatchNorm2d
self.aspp = ASPP(in_planes, inner_planes=inner_planes, sync_bn=sync_bn, dilations=dilations)
self.head = nn.Sequential(
nn.Conv2d(self.aspp.get_outplanes(), 256, kernel_size=3, padding=1, dilation=1, bias=False),
norm_layer(256),
nn.ReLU(inplace=True),
nn.Dropout2d(0.1),
nn.Conv2d(256, num_classes, kernel_size=1, stride=1, padding=0, bias=True))
def forward(self, x):
#print('debug1',x.shape)
aspp_out = self.aspp(x)
res = self.head(aspp_out)
return res
class dec_deeplabv3_plus(nn.Module):
def __init__(self, in_planes, num_classes=19, inner_planes=256, sync_bn=False, dilations=(12, 24, 36)):
super(dec_deeplabv3_plus, self).__init__()
norm_layer = get_syncbn() if sync_bn else nn.BatchNorm2d
self.aspp = ASPP(in_planes, inner_planes=inner_planes, sync_bn=sync_bn, dilations=dilations)
self.head = nn.Sequential(
nn.Conv2d(self.aspp.get_outplanes(), 256, kernel_size=3, padding=1, dilation=1, bias=False),
norm_layer(256),
nn.ReLU(inplace=True),
nn.Dropout2d(0.1))
self.final = nn.Conv2d(256, num_classes, kernel_size=1, stride=1, padding=0, bias=True)
self.tail = nn.Sequential(
nn.Conv2d(512, 256, kernel_size=3, stride=1, padding=1, bias=True),
norm_layer(256),
nn.ReLU(inplace=True),
nn.Dropout2d(0.1),
nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1, bias=True),
norm_layer(256),nn.ReLU(inplace=True),nn.Dropout2d(0.1))
self.low_conv = nn.Sequential(nn.Conv2d(256, 256, kernel_size=1),
norm_layer(256),
nn.ReLU(inplace=True))
def forward(self, x):
x1, x2, x3, x4 = x
aspp_out = self.aspp(x4)
low_feat = self.low_conv(x1)
aspp_out = self.head(aspp_out)
h,w = low_feat.size()[-2:]
aspp_out = F.interpolate(aspp_out,size=(h,w),mode='bilinear',align_corners=True)
aspp_out = torch.cat((low_feat,aspp_out),dim=1)
aspp_out = self.tail(aspp_out)
res = self.final(aspp_out)
return res
class Aux_Module(nn.Module):
def __init__(self, in_planes, num_classes=19, sync_bn=False):
super(Aux_Module, self).__init__()
norm_layer = get_syncbn() if sync_bn else nn.BatchNorm2d
self.aux = nn.Sequential(
nn.Conv2d(in_planes, 256, kernel_size=3, stride=1, padding=1),
norm_layer(256),
nn.ReLU(inplace=True),
nn.Dropout2d(0.1),
nn.Conv2d(256, num_classes, kernel_size=1, stride=1, padding=0, bias=True))
def forward(self, x):
res = self.aux(x)
return res
|
[
"torch.nn.Dropout2d",
"torch.nn.ReLU",
"torch.nn.Conv2d",
"torch.cat",
"torch.nn.functional.interpolate"
] |
[((1579, 1653), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', 'num_classes'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)', 'bias': '(True)'}), '(256, num_classes, kernel_size=1, stride=1, padding=0, bias=True)\n', (1588, 1653), True, 'import torch.nn as nn\n'), ((2367, 2440), 'torch.nn.functional.interpolate', 'F.interpolate', (['aspp_out'], {'size': '(h, w)', 'mode': '"""bilinear"""', 'align_corners': '(True)'}), "(aspp_out, size=(h, w), mode='bilinear', align_corners=True)\n", (2380, 2440), True, 'from torch.nn import functional as F\n'), ((2456, 2494), 'torch.cat', 'torch.cat', (['(low_feat, aspp_out)'], {'dim': '(1)'}), '((low_feat, aspp_out), dim=1)\n', (2465, 2494), False, 'import torch\n'), ((665, 686), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (672, 686), True, 'import torch.nn as nn\n'), ((700, 717), 'torch.nn.Dropout2d', 'nn.Dropout2d', (['(0.1)'], {}), '(0.1)\n', (712, 717), True, 'import torch.nn as nn\n'), ((731, 805), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', 'num_classes'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)', 'bias': '(True)'}), '(256, num_classes, kernel_size=1, stride=1, padding=0, bias=True)\n', (740, 805), True, 'import torch.nn as nn\n'), ((1503, 1524), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1510, 1524), True, 'import torch.nn as nn\n'), ((1538, 1555), 'torch.nn.Dropout2d', 'nn.Dropout2d', (['(0.1)'], {}), '(0.1)\n', (1550, 1555), True, 'import torch.nn as nn\n'), ((1701, 1767), 'torch.nn.Conv2d', 'nn.Conv2d', (['(512)', '(256)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)', 'bias': '(True)'}), '(512, 256, kernel_size=3, stride=1, padding=1, bias=True)\n', (1710, 1767), True, 'import torch.nn as nn\n'), ((1810, 1831), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1817, 1831), True, 'import torch.nn as nn\n'), ((1845, 1862), 'torch.nn.Dropout2d', 'nn.Dropout2d', (['(0.1)'], {}), '(0.1)\n', (1857, 1862), True, 'import torch.nn as nn\n'), ((1876, 1942), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(256)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)', 'bias': '(True)'}), '(256, 256, kernel_size=3, stride=1, padding=1, bias=True)\n', (1885, 1942), True, 'import torch.nn as nn\n'), ((1972, 1993), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1979, 1993), True, 'import torch.nn as nn\n'), ((1994, 2011), 'torch.nn.Dropout2d', 'nn.Dropout2d', (['(0.1)'], {}), '(0.1)\n', (2006, 2011), True, 'import torch.nn as nn\n'), ((2051, 2085), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(256)'], {'kernel_size': '(1)'}), '(256, 256, kernel_size=1)\n', (2060, 2085), True, 'import torch.nn as nn\n'), ((2128, 2149), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2135, 2149), True, 'import torch.nn as nn\n'), ((2841, 2902), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_planes', '(256)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(in_planes, 256, kernel_size=3, stride=1, padding=1)\n', (2850, 2902), True, 'import torch.nn as nn\n'), ((2953, 2974), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2960, 2974), True, 'import torch.nn as nn\n'), ((2992, 3009), 'torch.nn.Dropout2d', 'nn.Dropout2d', (['(0.1)'], {}), '(0.1)\n', (3004, 3009), True, 'import torch.nn as nn\n'), ((3027, 3101), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', 'num_classes'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)', 'bias': '(True)'}), '(256, num_classes, kernel_size=1, stride=1, padding=0, bias=True)\n', (3036, 3101), True, 'import torch.nn as nn\n')]
|
import cv2
import numpy as np
from skimage.segmentation import slic
from skimage import color
from skimage.measure import regionprops
from PIL import Image, ImageDraw
import moviepy.editor as mp
import random
import os
class GifMaker():
def to_mosaic_gif(self, img_path, n_segments = 150, segments_per_frame = 3):
img = cv2.imread(img_path)
# generate superpixels
segments = slic(img, n_segments = n_segments, sigma = 5)
# generate image with superpixels avg color
superpixels_image = color.label2rgb(segments, img, kind='avg')
superpixels_image = cv2.normalize(superpixels_image, None, alpha = 0,
beta = 255, norm_type = cv2.NORM_MINMAX, dtype = cv2.CV_8U)
mask = np.zeros(img.shape[:2], dtype = "uint8")
n_segments = len(np.unique(segments))
frames = []
for (i, segVal) in enumerate(np.unique(segments)):
# construct a mask for the segment
mask[segments == segVal] = 255
a = cv2.bitwise_and(superpixels_image, superpixels_image, mask = mask)
a = np.uint8(a)
a = cv2.cvtColor(a, cv2.COLOR_BGR2RGB)
if i % segments_per_frame == 0:
n_segments -= segments_per_frame
frames.append(Image.fromarray(a))
if n_segments > 0:
frames.append(Image.fromarray(a))
path_splitted = os.path.split(img_path)
filename_with_extension = path_splitted[1]
path = path_splitted[0]
filename = filename_with_extension.split('.')[0]
self.__save_gif(path, filename, frames)
self.__to_mp4(path, filename)
def __save_gif(self, path, filename, frames):
filename = filename + '.gif'
save_path = os.path.join(path, filename)
frames[0].save(save_path,
save_all=True, format='GIF', append_images=frames[1:],
optimize=True, quality=20, duration=1, loop=0)
def __to_mp4(self, path, filename):
read_path = os.path.join(path, filename + '.gif')
save_path = os.path.join(path, filename + '.mp4')
clip = mp.VideoFileClip(read_path)
clip.write_videofile(save_path)
if __name__ == "__main__":
img_path = './data/prova_gif.jpg'
g = GifMaker()
g.to_mosaic_gif(img_path)
|
[
"numpy.uint8",
"skimage.color.label2rgb",
"moviepy.editor.VideoFileClip",
"cv2.bitwise_and",
"cv2.cvtColor",
"numpy.zeros",
"PIL.Image.fromarray",
"cv2.imread",
"cv2.normalize",
"skimage.segmentation.slic",
"os.path.split",
"os.path.join",
"numpy.unique"
] |
[((326, 346), 'cv2.imread', 'cv2.imread', (['img_path'], {}), '(img_path)\n', (336, 346), False, 'import cv2\n'), ((385, 426), 'skimage.segmentation.slic', 'slic', (['img'], {'n_segments': 'n_segments', 'sigma': '(5)'}), '(img, n_segments=n_segments, sigma=5)\n', (389, 426), False, 'from skimage.segmentation import slic\n'), ((499, 541), 'skimage.color.label2rgb', 'color.label2rgb', (['segments', 'img'], {'kind': '"""avg"""'}), "(segments, img, kind='avg')\n", (514, 541), False, 'from skimage import color\n'), ((564, 670), 'cv2.normalize', 'cv2.normalize', (['superpixels_image', 'None'], {'alpha': '(0)', 'beta': '(255)', 'norm_type': 'cv2.NORM_MINMAX', 'dtype': 'cv2.CV_8U'}), '(superpixels_image, None, alpha=0, beta=255, norm_type=cv2.\n NORM_MINMAX, dtype=cv2.CV_8U)\n', (577, 670), False, 'import cv2\n'), ((696, 734), 'numpy.zeros', 'np.zeros', (['img.shape[:2]'], {'dtype': '"""uint8"""'}), "(img.shape[:2], dtype='uint8')\n", (704, 734), True, 'import numpy as np\n'), ((1247, 1270), 'os.path.split', 'os.path.split', (['img_path'], {}), '(img_path)\n', (1260, 1270), False, 'import os\n'), ((1567, 1595), 'os.path.join', 'os.path.join', (['path', 'filename'], {}), '(path, filename)\n', (1579, 1595), False, 'import os\n'), ((1800, 1837), 'os.path.join', 'os.path.join', (['path', "(filename + '.gif')"], {}), "(path, filename + '.gif')\n", (1812, 1837), False, 'import os\n'), ((1852, 1889), 'os.path.join', 'os.path.join', (['path', "(filename + '.mp4')"], {}), "(path, filename + '.mp4')\n", (1864, 1889), False, 'import os\n'), ((1899, 1926), 'moviepy.editor.VideoFileClip', 'mp.VideoFileClip', (['read_path'], {}), '(read_path)\n', (1915, 1926), True, 'import moviepy.editor as mp\n'), ((759, 778), 'numpy.unique', 'np.unique', (['segments'], {}), '(segments)\n', (768, 778), True, 'import numpy as np\n'), ((826, 845), 'numpy.unique', 'np.unique', (['segments'], {}), '(segments)\n', (835, 845), True, 'import numpy as np\n'), ((928, 992), 'cv2.bitwise_and', 'cv2.bitwise_and', (['superpixels_image', 'superpixels_image'], {'mask': 'mask'}), '(superpixels_image, superpixels_image, mask=mask)\n', (943, 992), False, 'import cv2\n'), ((1002, 1013), 'numpy.uint8', 'np.uint8', (['a'], {}), '(a)\n', (1010, 1013), True, 'import numpy as np\n'), ((1021, 1055), 'cv2.cvtColor', 'cv2.cvtColor', (['a', 'cv2.COLOR_BGR2RGB'], {}), '(a, cv2.COLOR_BGR2RGB)\n', (1033, 1055), False, 'import cv2\n'), ((1208, 1226), 'PIL.Image.fromarray', 'Image.fromarray', (['a'], {}), '(a)\n', (1223, 1226), False, 'from PIL import Image, ImageDraw\n'), ((1147, 1165), 'PIL.Image.fromarray', 'Image.fromarray', (['a'], {}), '(a)\n', (1162, 1165), False, 'from PIL import Image, ImageDraw\n')]
|
from django.contrib import admin
from .models import Student
# Register your models here.
class StudentModelAdmin(admin.ModelAdmin):
list_display = ["__str__"]
class Meta:
model = Student
admin.site.register(Student,StudentModelAdmin)
|
[
"django.contrib.admin.site.register"
] |
[((215, 262), 'django.contrib.admin.site.register', 'admin.site.register', (['Student', 'StudentModelAdmin'], {}), '(Student, StudentModelAdmin)\n', (234, 262), False, 'from django.contrib import admin\n')]
|
from typing import List
from typing import Tuple
import numpy as np
import yaml
from music_genre_classifier import dataset
from music_genre_classifier import models
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(prog="Music Genre Classifier")
parser.add_argument("classifier_conf_path", help="path to yaml config for classifier")
parser.add_argument(
"--display_results",
help="if should display results of training",
action="store_true",
)
args = parser.parse_args()
with open(args.classifier_conf_path) as classifier_conf_file:
classifier_conf = yaml.load(classifier_conf_file, Loader=yaml.Loader)
# create dataset
full_ds: np.ndarray = dataset.create_gtzan_dataset(**classifier_conf["dataset"])
# create train, test, validation split
train_ds, test_ds = dataset.split_dataset(full_ds)
# create models from config
model_trainables: List[models.ModelTrainable] = [
models.build_from_config(model_conf, train_ds, test_ds)
for model_conf in classifier_conf["models"]
]
# find hyperparameters (TODO: parallelize this later)
for model in model_trainables:
model.tune()
# train models (TODO: parallelize this later)
for model in model_trainables:
model.train()
# evaluate models (TODO: parallelize this later)
results: List[Tuple[float, float]] = []
for model in model_trainables:
results.append(model.test())
# display results
if args.display_results:
for model, result in zip(model_trainables, results):
print(str(model), model._best_hyperparams.values, result) # type: ignore
|
[
"yaml.load",
"argparse.ArgumentParser",
"music_genre_classifier.dataset.split_dataset",
"music_genre_classifier.models.build_from_config",
"music_genre_classifier.dataset.create_gtzan_dataset"
] |
[((231, 285), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""Music Genre Classifier"""'}), "(prog='Music Genre Classifier')\n", (254, 285), False, 'import argparse\n'), ((744, 802), 'music_genre_classifier.dataset.create_gtzan_dataset', 'dataset.create_gtzan_dataset', ([], {}), "(**classifier_conf['dataset'])\n", (772, 802), False, 'from music_genre_classifier import dataset\n'), ((871, 901), 'music_genre_classifier.dataset.split_dataset', 'dataset.split_dataset', (['full_ds'], {}), '(full_ds)\n', (892, 901), False, 'from music_genre_classifier import dataset\n'), ((644, 695), 'yaml.load', 'yaml.load', (['classifier_conf_file'], {'Loader': 'yaml.Loader'}), '(classifier_conf_file, Loader=yaml.Loader)\n', (653, 695), False, 'import yaml\n'), ((997, 1052), 'music_genre_classifier.models.build_from_config', 'models.build_from_config', (['model_conf', 'train_ds', 'test_ds'], {}), '(model_conf, train_ds, test_ds)\n', (1021, 1052), False, 'from music_genre_classifier import models\n')]
|
import MeCab
# text = "昨日の天気は晴れでした。"
text = input()
mecab = MeCab.Tagger()
parses = mecab.parse(text)
parse = parses.split('\n')
for par in parse:
p = par.split(',')
if p[0] == "EOS":
break
print(p[0], "\t", p[-3])
|
[
"MeCab.Tagger"
] |
[((61, 75), 'MeCab.Tagger', 'MeCab.Tagger', ([], {}), '()\n', (73, 75), False, 'import MeCab\n')]
|
# game.py
import pygame
from field import GameField
from preview import Preview
from brick import Brick
from figure import generate_randomized_figures as FigureFactory
from control import Control
from score import Score
import colors
GAME_TITLE = "Shricktris"
START_FPS = 12
START_GAME_STEPOVER = 8
SCREEN_RESOLUTION = (600, 800)
GRID_COLUMNS = 16
GRID_ROWS = 30
class Game:
def __init__(self):
# init pygame components
pygame.init()
pygame.display.set_caption(GAME_TITLE)
self._screen = pygame.display.set_mode(SCREEN_RESOLUTION)
self._background = pygame.Surface(self._screen.get_size()).convert()
self._background.fill(colors.WHITE)
self._font = pygame.font.SysFont(None, 24)
self._set_message("Press PAUSE key to start!", colors.GREEN)
# init game components
rect_pixel_length = 20
self._field = GameField(self._screen, GRID_COLUMNS, GRID_ROWS, rect_pixel_length)
self._preview = Preview(self._screen, SCREEN_RESOLUTION[0] - 100, 20,
Brick.DIMENSION, int(rect_pixel_length / 2))
self._figure_factory = FigureFactory(self._field)
self._figure = next(self._figure_factory)
self._next_figure = next(self._figure_factory)
self._score = Score()
self._control = Control(START_FPS)
# init speed and game state
self._stepover = START_GAME_STEPOVER
self._nostep = self._stepover
self._looping = True
self._was_started = False
self._has_stopped = False
self._is_paused = True
def _set_message(self, text, color):
self._text_image = self._font.render(text, True, colors.GRAY if color is None else color)
def _display_score(self):
if self._has_stopped:
score_text = self._score.get_final_score()
self._set_message(score_text + " Game finished. Press Q to quit!", colors.RED)
else:
score_text = self._score.get_current_score()
self._set_message(score_text, colors.CYAN)
print(score_text)
def _adjust_speed(self, delta):
old_stepover = self._stepover
self._stepover = max(self._stepover + delta, 1)
if self._stepover != old_stepover:
print("[DEBUG] game_stepover = " + str(self._stepover))
def _check_states(self):
# game speed
if self._control.speed_up():
self._adjust_speed(-1)
if self._control.speed_down():
self._adjust_speed(+1)
# game state
if self._control.pause() and not self._has_stopped:
self._is_paused = not self._is_paused
if self._is_paused:
self._set_message("Press PAUSE key to continue.", colors.BLUE)
else:
self._was_started = True
self._set_message("Press PAUSE key to pause.", colors.BLUE)
if self._control.quit():
print("Quitting...")
self._looping = False
def _move_figure(self):
if self._control.step_left():
self._figure.step_left()
if self._control.step_right():
self._figure.step_right()
if self._control.step_down():
self._figure.step_down()
if self._control.fall_down():
self._figure.fall_down()
if self._control.rotate():
self._figure.rotate()
def _resolve_lines(self):
lines = self._field.resolve_lines()
if lines:
self._score.add_lines(lines)
self._display_score()
# increase game speed
self._stepover = max(self._stepover - 1, 1)
print("[DEBUG] game_stepover = " + str(self._stepover))
def _spawn_new_figure(self):
if self._figure.is_freezed():
self._figure = self._next_figure
if self._field.collides(self._figure):
self._has_stopped = True
self._display_score()
else:
self._next_figure = next(self._figure_factory)
print("Next figure: " + self._next_figure.get_name())
def _draw(self):
self._screen.blit(self._background, (0, 0))
self._field.draw_grid()
if self._was_started:
if not self._has_stopped:
self._field.draw_figure(self._figure)
self._preview.draw_figure(self._next_figure)
else:
# hack in some flickering
self._nostep = (self._nostep + 1) % 3
if not self._nostep:
self._field.draw_figure(self._figure, colors.GRAY)
if self._text_image is not None:
rect = self._text_image.get_rect()
rect.topleft = (20, 20)
self._screen.blit(self._text_image, rect)
pygame.display.update()
def loop(self):
while self._looping:
self._control.process_events()
self._check_states()
if not self._is_paused and not self._has_stopped:
self._move_figure()
self._nostep = (self._nostep + 1) % self._stepover
if not self._nostep:
# advance game
self._figure.step_down()
self._resolve_lines()
self._spawn_new_figure()
self._draw()
if not self._has_stopped:
print(self._score.get_final_score())
pygame.quit()
|
[
"pygame.quit",
"pygame.display.set_caption",
"pygame.font.SysFont",
"pygame.display.set_mode",
"control.Control",
"score.Score",
"pygame.init",
"figure.generate_randomized_figures",
"pygame.display.update",
"field.GameField"
] |
[((445, 458), 'pygame.init', 'pygame.init', ([], {}), '()\n', (456, 458), False, 'import pygame\n'), ((467, 505), 'pygame.display.set_caption', 'pygame.display.set_caption', (['GAME_TITLE'], {}), '(GAME_TITLE)\n', (493, 505), False, 'import pygame\n'), ((529, 571), 'pygame.display.set_mode', 'pygame.display.set_mode', (['SCREEN_RESOLUTION'], {}), '(SCREEN_RESOLUTION)\n', (552, 571), False, 'import pygame\n'), ((714, 743), 'pygame.font.SysFont', 'pygame.font.SysFont', (['None', '(24)'], {}), '(None, 24)\n', (733, 743), False, 'import pygame\n'), ((898, 965), 'field.GameField', 'GameField', (['self._screen', 'GRID_COLUMNS', 'GRID_ROWS', 'rect_pixel_length'], {}), '(self._screen, GRID_COLUMNS, GRID_ROWS, rect_pixel_length)\n', (907, 965), False, 'from field import GameField\n'), ((1132, 1158), 'figure.generate_randomized_figures', 'FigureFactory', (['self._field'], {}), '(self._field)\n', (1145, 1158), True, 'from figure import generate_randomized_figures as FigureFactory\n'), ((1286, 1293), 'score.Score', 'Score', ([], {}), '()\n', (1291, 1293), False, 'from score import Score\n'), ((1318, 1336), 'control.Control', 'Control', (['START_FPS'], {}), '(START_FPS)\n', (1325, 1336), False, 'from control import Control\n'), ((4889, 4912), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (4910, 4912), False, 'import pygame\n'), ((5578, 5591), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (5589, 5591), False, 'import pygame\n')]
|
# conda activate pymesh
import math
import numpy as np
import trimesh
import cv2
import os
import configs.config_loader as cfg_loader
import NDF_combine as NDF
def str2bool(inp):
return inp.lower() in 'true'
class Renderer():
def __init__(self):
self.get_args()
self.create_plane_points_from_bounds()
self.define_screen_points()
self.define_unit_rays()
def get_args(self):
"""
:return:
"""
self.args = cfg_loader.get_config()
# print(self.args.cam_position)
# print(self.args.cam_orientation)
os.makedirs(self.args.folder, exist_ok=True)
def create_plane_points_from_bounds(self):
"""
Creates a plane of points which acts as the screen for rendering
"""
# create an xy plane
x = np.linspace(-self.args.screen_bound, self.args.screen_bound, self.args.size)
y = np.linspace(-self.args.screen_bound, self.args.screen_bound, self.args.size)
X, Y = np.meshgrid(x, y, indexing='ij')
X = X.reshape((np.prod(X.shape),))
Y = Y.reshape((np.prod(Y.shape),))
# append the third dimension coordinate to the xy plane
points_list = np.column_stack((X, Y))
points_list = np.insert(points_list, 2, self.args.screen_depth, axis=1)
self.points_list = points_list
def to_rotation_matrix(self):
"""
Creates rotation matrix from the input euler angles
"""
euler_angles = np.array(self.args.cam_orientation)
R_x = np.array([[1, 0, 0],
[0, math.cos(math.radians(euler_angles[0])), -math.sin(math.radians(euler_angles[0]))],
[0, math.sin(math.radians(euler_angles[0])), math.cos(math.radians(euler_angles[0]))]
])
R_y = np.array([[math.cos(math.radians(euler_angles[1])), 0, math.sin(math.radians(euler_angles[1]))],
[0, 1, 0],
[-math.sin(math.radians(euler_angles[1])), 0, math.cos(math.radians(euler_angles[1]))]
])
R_z = np.array([[math.cos(math.radians(euler_angles[2])), -math.sin(math.radians(euler_angles[2])), 0],
[math.sin(math.radians(euler_angles[2])), math.cos(math.radians(euler_angles[2])), 0],
[0, 0, 1]
])
R = np.dot(R_z, np.dot(R_y, R_x))
self.rot_matrix = R
def to_transf_matrix(self):
"""
Creates a transformation matrix from rotation matrix and translation vector
"""
self.to_rotation_matrix()
temp_trans = np.array([0, 0, 0])
temp_trans = np.reshape(temp_trans, (1, 3))
rot = np.concatenate((self.rot_matrix, temp_trans), axis=0)
rot = np.concatenate((rot, np.reshape(np.array([0, 0, 0, 1]), (4, 1))), axis=1)
inp_trans = np.reshape(self.args.cam_position, (3,))
inp_trans = np.concatenate((inp_trans, [1]), axis=0)
rot[:, 3] = inp_trans
self.trans_mat = rot
def append_one(self, arr):
"""
:param arr:
:return:
"""
append = np.ones(arr.shape[0])
append = np.reshape(append, (append.shape[0], 1))
new_arr = np.concatenate((arr, append), axis=1)
return new_arr
def define_screen_points(self):
"""
Transforms the screen points and camera position using the camera translation and orientation information provided by the user
"""
self.create_plane_points_from_bounds()
self.to_transf_matrix()
cam_loc = np.array([0, 0, 0])
screen_and_cam = np.vstack((cam_loc, self.points_list))
screen_and_cam_hom = self.append_one(screen_and_cam)
# 4 X SIZE^2
screen_and_cam_hom_T = np.transpose(screen_and_cam_hom, (1, 0))
screen_and_cam_hom_T_transformed = np.matmul(self.trans_mat, screen_and_cam_hom_T)
# SIZE^2 X 4
screen_and_cam_hom_transformed = np.transpose(screen_and_cam_hom_T_transformed, (1, 0))
# SIZE^2 X 3
self.screen_and_cam_transformed = screen_and_cam_hom_transformed[:, :3]
if self.args.debug_mode:
trimesh.Trimesh(vertices=self.screen_and_cam_transformed, faces=[]).export('setup_camera_rot.off')
def define_unit_rays(self):
"""
Defines rays from camera to the screen along which
"""
# Separate screen points and camera point
points = self.screen_and_cam_transformed[1:, :]
self.cam_trans = np.reshape(self.screen_and_cam_transformed[0, :], (1, 3))
# Define ray paths from camera
ray_vector = (points - self.cam_trans)
# Normalize ray vectors
norm_ray = np.linalg.norm(ray_vector, ord=2, axis=1)
norm_ray = np.reshape(norm_ray, (self.args.size * self.args.size, 1))
self.unit_rays = ray_vector / norm_ray
def get_lgth_rays(self):
"""
:return:
"""
src_batch = np.repeat([self.args.light_position], self.args.size * self.args.size, axis=0)
rays = src_batch - self.final_points
norm_ray = np.linalg.norm(rays, ord=2, axis=1)
norm_ray = np.reshape(norm_ray, (self.args.size * self.args.size, 1))
self.ray_to_src = rays / norm_ray
def run(self):
"""
Runs the ray marching algorithm
"""
print(self.args)
NDF.loadNDF(
mode = 'test', index = self.args.index,
pointcloud_samples = self.args.pc_samples,
exp_name = self.args.exp_name, data_dir = self.args.data_dir,
split_file = self.args.split_file, sample_distribution = self.args.sample_ratio,
sample_sigmas = self.args.sample_std_dev, res = self.args.input_res
)
depth = np.zeros((self.args.size * self.args.size, 1))
cam_batch = np.repeat(self.cam_trans, self.args.size * self.args.size, axis=0)
points = cam_batch.copy()
iter = 1
ray = self.unit_rays.copy()
indices_cont_all = list(range(self.args.size * self.args.size))
while len(indices_cont_all) > 0:
print('Iter:', iter)
dists_points = NDF.predictRotNDF(points)
dists_points = np.reshape(dists_points, (self.args.size * self.args.size, 1))
indices_stop = np.where(dists_points < self.args.epsilon)[0]
indices_stop2 = np.where(depth > self.args.max_depth)[0]
indices_stop_all = list(set(indices_stop).union(set(indices_stop2)))
# print(len(indices_stop_all))
ray[indices_stop_all] = 0
setA = set(range(self.args.size * self.args.size))
setB = set(indices_stop_all)
indices_cont_all = list(setA.difference(setB))
# print(len(indices_cont_all))
depth[indices_cont_all] = depth[indices_cont_all] + self.args.alpha * dists_points[indices_cont_all]
points = points + (ray * (self.args.alpha * dists_points))
iter = iter + 1
points = points - (self.unit_rays * self.args.step_back)
self.final_points = points.copy()
## NORMALS
self.depth_np = depth.copy()
self.depth_np[self.depth_np > self.args.max_depth] = self.args.max_depth
dists, gradients = NDF.predictRotGradientNDF(points)
self.final_gradients = gradients.copy()
self.normals = np.reshape(gradients, (self.args.size * self.args.size, 3))
def save(self, image, name, size, normalize):
"""
:param image: Input image as np array
:param name: Name of file to be stored
:param size: Size of the image
:param normalize: whether to normalize all values to 0-1
Saves individual images
"""
if normalize:
image = (image + 1)/2
image = np.reshape(image, (self.args.size, self.args.size, size))
image = cv2.transpose(image)
image = cv2.flip(image, 0)
image = image[90:610, :]
cv2.imwrite(os.path.join(self.args.folder, name), np.uint8(255 * image))
def save_images(self):
"""
Saves Images after completion of the rendering algorithm
"""
shade = np.sum(np.multiply(-self.unit_rays, self.normals), axis=1)
shade = np.reshape(shade, (shade.shape[0], 1))
shade[self.depth_np == self.args.max_depth] = 1
self.save(shade, 'shade.jpg', 1, True)
# SHADE WITH LIGhT SOURCE
if self.args.shade:
self.get_lgth_rays()
shd_lgth = np.sum(np.multiply(self.ray_to_src, self.normals), axis=1)
shd_lgth = np.reshape(shd_lgth, (shd_lgth.shape[0], 1))
shd_lgth[self.depth_np == self.args.max_depth ] = 1
self.save(shd_lgth, 'shade_src.jpg', 1, True)
if self.args.normal:
RGB_normals = self.final_gradients.copy()
inds = (self.depth_np == self.args.max_depth)
for j in range(3):
new_arr = np.reshape(RGB_normals[:, j], (self.args.size * self.args.size, 1))
new_arr[inds] = 1
black_pixels_mask = np.all(RGB_normals == [0, 0, 0], axis=-1)
RGB_normals[black_pixels_mask] = np.array([1, 1, 1])
self.save(RGB_normals, 'normals.jpg', 3, True)
if self.args.depth:
depth_normalized = np.copy(self.depth_np / self.args.max_depth)
self.save(depth_normalized, 'depth_final.jpg', 1, False)
if __name__ == "__main__":
renderer = Renderer()
renderer.run()
renderer.save_images()
|
[
"numpy.ones",
"cv2.transpose",
"numpy.linalg.norm",
"configs.config_loader.get_config",
"os.path.join",
"numpy.prod",
"numpy.meshgrid",
"numpy.multiply",
"numpy.copy",
"math.radians",
"numpy.transpose",
"numpy.insert",
"NDF_combine.predictRotGradientNDF",
"numpy.reshape",
"numpy.linspace",
"NDF_combine.predictRotNDF",
"numpy.repeat",
"trimesh.Trimesh",
"numpy.uint8",
"cv2.flip",
"numpy.dot",
"numpy.vstack",
"numpy.concatenate",
"numpy.all",
"os.makedirs",
"numpy.zeros",
"numpy.where",
"numpy.array",
"numpy.matmul",
"numpy.column_stack",
"NDF_combine.loadNDF"
] |
[((484, 507), 'configs.config_loader.get_config', 'cfg_loader.get_config', ([], {}), '()\n', (505, 507), True, 'import configs.config_loader as cfg_loader\n'), ((600, 644), 'os.makedirs', 'os.makedirs', (['self.args.folder'], {'exist_ok': '(True)'}), '(self.args.folder, exist_ok=True)\n', (611, 644), False, 'import os\n'), ((836, 912), 'numpy.linspace', 'np.linspace', (['(-self.args.screen_bound)', 'self.args.screen_bound', 'self.args.size'], {}), '(-self.args.screen_bound, self.args.screen_bound, self.args.size)\n', (847, 912), True, 'import numpy as np\n'), ((925, 1001), 'numpy.linspace', 'np.linspace', (['(-self.args.screen_bound)', 'self.args.screen_bound', 'self.args.size'], {}), '(-self.args.screen_bound, self.args.screen_bound, self.args.size)\n', (936, 1001), True, 'import numpy as np\n'), ((1017, 1049), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {'indexing': '"""ij"""'}), "(x, y, indexing='ij')\n", (1028, 1049), True, 'import numpy as np\n'), ((1223, 1246), 'numpy.column_stack', 'np.column_stack', (['(X, Y)'], {}), '((X, Y))\n', (1238, 1246), True, 'import numpy as np\n'), ((1269, 1326), 'numpy.insert', 'np.insert', (['points_list', '(2)', 'self.args.screen_depth'], {'axis': '(1)'}), '(points_list, 2, self.args.screen_depth, axis=1)\n', (1278, 1326), True, 'import numpy as np\n'), ((1512, 1547), 'numpy.array', 'np.array', (['self.args.cam_orientation'], {}), '(self.args.cam_orientation)\n', (1520, 1547), True, 'import numpy as np\n'), ((2675, 2694), 'numpy.array', 'np.array', (['[0, 0, 0]'], {}), '([0, 0, 0])\n', (2683, 2694), True, 'import numpy as np\n'), ((2716, 2746), 'numpy.reshape', 'np.reshape', (['temp_trans', '(1, 3)'], {}), '(temp_trans, (1, 3))\n', (2726, 2746), True, 'import numpy as np\n'), ((2761, 2814), 'numpy.concatenate', 'np.concatenate', (['(self.rot_matrix, temp_trans)'], {'axis': '(0)'}), '((self.rot_matrix, temp_trans), axis=0)\n', (2775, 2814), True, 'import numpy as np\n'), ((2924, 2964), 'numpy.reshape', 'np.reshape', (['self.args.cam_position', '(3,)'], {}), '(self.args.cam_position, (3,))\n', (2934, 2964), True, 'import numpy as np\n'), ((2985, 3025), 'numpy.concatenate', 'np.concatenate', (['(inp_trans, [1])'], {'axis': '(0)'}), '((inp_trans, [1]), axis=0)\n', (2999, 3025), True, 'import numpy as np\n'), ((3197, 3218), 'numpy.ones', 'np.ones', (['arr.shape[0]'], {}), '(arr.shape[0])\n', (3204, 3218), True, 'import numpy as np\n'), ((3236, 3276), 'numpy.reshape', 'np.reshape', (['append', '(append.shape[0], 1)'], {}), '(append, (append.shape[0], 1))\n', (3246, 3276), True, 'import numpy as np\n'), ((3295, 3332), 'numpy.concatenate', 'np.concatenate', (['(arr, append)'], {'axis': '(1)'}), '((arr, append), axis=1)\n', (3309, 3332), True, 'import numpy as np\n'), ((3654, 3673), 'numpy.array', 'np.array', (['[0, 0, 0]'], {}), '([0, 0, 0])\n', (3662, 3673), True, 'import numpy as np\n'), ((3699, 3737), 'numpy.vstack', 'np.vstack', (['(cam_loc, self.points_list)'], {}), '((cam_loc, self.points_list))\n', (3708, 3737), True, 'import numpy as np\n'), ((3852, 3892), 'numpy.transpose', 'np.transpose', (['screen_and_cam_hom', '(1, 0)'], {}), '(screen_and_cam_hom, (1, 0))\n', (3864, 3892), True, 'import numpy as np\n'), ((3936, 3983), 'numpy.matmul', 'np.matmul', (['self.trans_mat', 'screen_and_cam_hom_T'], {}), '(self.trans_mat, screen_and_cam_hom_T)\n', (3945, 3983), True, 'import numpy as np\n'), ((4047, 4101), 'numpy.transpose', 'np.transpose', (['screen_and_cam_hom_T_transformed', '(1, 0)'], {}), '(screen_and_cam_hom_T_transformed, (1, 0))\n', (4059, 4101), True, 'import numpy as np\n'), ((4596, 4653), 'numpy.reshape', 'np.reshape', (['self.screen_and_cam_transformed[0, :]', '(1, 3)'], {}), '(self.screen_and_cam_transformed[0, :], (1, 3))\n', (4606, 4653), True, 'import numpy as np\n'), ((4793, 4834), 'numpy.linalg.norm', 'np.linalg.norm', (['ray_vector'], {'ord': '(2)', 'axis': '(1)'}), '(ray_vector, ord=2, axis=1)\n', (4807, 4834), True, 'import numpy as np\n'), ((4854, 4912), 'numpy.reshape', 'np.reshape', (['norm_ray', '(self.args.size * self.args.size, 1)'], {}), '(norm_ray, (self.args.size * self.args.size, 1))\n', (4864, 4912), True, 'import numpy as np\n'), ((5052, 5130), 'numpy.repeat', 'np.repeat', (['[self.args.light_position]', '(self.args.size * self.args.size)'], {'axis': '(0)'}), '([self.args.light_position], self.args.size * self.args.size, axis=0)\n', (5061, 5130), True, 'import numpy as np\n'), ((5195, 5230), 'numpy.linalg.norm', 'np.linalg.norm', (['rays'], {'ord': '(2)', 'axis': '(1)'}), '(rays, ord=2, axis=1)\n', (5209, 5230), True, 'import numpy as np\n'), ((5250, 5308), 'numpy.reshape', 'np.reshape', (['norm_ray', '(self.args.size * self.args.size, 1)'], {}), '(norm_ray, (self.args.size * self.args.size, 1))\n', (5260, 5308), True, 'import numpy as np\n'), ((5469, 5777), 'NDF_combine.loadNDF', 'NDF.loadNDF', ([], {'mode': '"""test"""', 'index': 'self.args.index', 'pointcloud_samples': 'self.args.pc_samples', 'exp_name': 'self.args.exp_name', 'data_dir': 'self.args.data_dir', 'split_file': 'self.args.split_file', 'sample_distribution': 'self.args.sample_ratio', 'sample_sigmas': 'self.args.sample_std_dev', 'res': 'self.args.input_res'}), "(mode='test', index=self.args.index, pointcloud_samples=self.\n args.pc_samples, exp_name=self.args.exp_name, data_dir=self.args.\n data_dir, split_file=self.args.split_file, sample_distribution=self.\n args.sample_ratio, sample_sigmas=self.args.sample_std_dev, res=self.\n args.input_res)\n", (5480, 5777), True, 'import NDF_combine as NDF\n'), ((5915, 5961), 'numpy.zeros', 'np.zeros', (['(self.args.size * self.args.size, 1)'], {}), '((self.args.size * self.args.size, 1))\n', (5923, 5961), True, 'import numpy as np\n'), ((5983, 6049), 'numpy.repeat', 'np.repeat', (['self.cam_trans', '(self.args.size * self.args.size)'], {'axis': '(0)'}), '(self.cam_trans, self.args.size * self.args.size, axis=0)\n', (5992, 6049), True, 'import numpy as np\n'), ((7429, 7462), 'NDF_combine.predictRotGradientNDF', 'NDF.predictRotGradientNDF', (['points'], {}), '(points)\n', (7454, 7462), True, 'import NDF_combine as NDF\n'), ((7534, 7593), 'numpy.reshape', 'np.reshape', (['gradients', '(self.args.size * self.args.size, 3)'], {}), '(gradients, (self.args.size * self.args.size, 3))\n', (7544, 7593), True, 'import numpy as np\n'), ((7971, 8028), 'numpy.reshape', 'np.reshape', (['image', '(self.args.size, self.args.size, size)'], {}), '(image, (self.args.size, self.args.size, size))\n', (7981, 8028), True, 'import numpy as np\n'), ((8046, 8066), 'cv2.transpose', 'cv2.transpose', (['image'], {}), '(image)\n', (8059, 8066), False, 'import cv2\n'), ((8083, 8101), 'cv2.flip', 'cv2.flip', (['image', '(0)'], {}), '(image, 0)\n', (8091, 8101), False, 'import cv2\n'), ((8425, 8463), 'numpy.reshape', 'np.reshape', (['shade', '(shade.shape[0], 1)'], {}), '(shade, (shade.shape[0], 1))\n', (8435, 8463), True, 'import numpy as np\n'), ((2427, 2443), 'numpy.dot', 'np.dot', (['R_y', 'R_x'], {}), '(R_y, R_x)\n', (2433, 2443), True, 'import numpy as np\n'), ((6312, 6337), 'NDF_combine.predictRotNDF', 'NDF.predictRotNDF', (['points'], {}), '(points)\n', (6329, 6337), True, 'import NDF_combine as NDF\n'), ((6365, 6427), 'numpy.reshape', 'np.reshape', (['dists_points', '(self.args.size * self.args.size, 1)'], {}), '(dists_points, (self.args.size * self.args.size, 1))\n', (6375, 6427), True, 'import numpy as np\n'), ((8156, 8192), 'os.path.join', 'os.path.join', (['self.args.folder', 'name'], {}), '(self.args.folder, name)\n', (8168, 8192), False, 'import os\n'), ((8194, 8215), 'numpy.uint8', 'np.uint8', (['(255 * image)'], {}), '(255 * image)\n', (8202, 8215), True, 'import numpy as np\n'), ((8357, 8399), 'numpy.multiply', 'np.multiply', (['(-self.unit_rays)', 'self.normals'], {}), '(-self.unit_rays, self.normals)\n', (8368, 8399), True, 'import numpy as np\n'), ((8769, 8813), 'numpy.reshape', 'np.reshape', (['shd_lgth', '(shd_lgth.shape[0], 1)'], {}), '(shd_lgth, (shd_lgth.shape[0], 1))\n', (8779, 8813), True, 'import numpy as np\n'), ((9270, 9311), 'numpy.all', 'np.all', (['(RGB_normals == [0, 0, 0])'], {'axis': '(-1)'}), '(RGB_normals == [0, 0, 0], axis=-1)\n', (9276, 9311), True, 'import numpy as np\n'), ((9357, 9376), 'numpy.array', 'np.array', (['[1, 1, 1]'], {}), '([1, 1, 1])\n', (9365, 9376), True, 'import numpy as np\n'), ((9496, 9540), 'numpy.copy', 'np.copy', (['(self.depth_np / self.args.max_depth)'], {}), '(self.depth_np / self.args.max_depth)\n', (9503, 9540), True, 'import numpy as np\n'), ((1073, 1089), 'numpy.prod', 'np.prod', (['X.shape'], {}), '(X.shape)\n', (1080, 1089), True, 'import numpy as np\n'), ((1116, 1132), 'numpy.prod', 'np.prod', (['Y.shape'], {}), '(Y.shape)\n', (1123, 1132), True, 'import numpy as np\n'), ((6456, 6498), 'numpy.where', 'np.where', (['(dists_points < self.args.epsilon)'], {}), '(dists_points < self.args.epsilon)\n', (6464, 6498), True, 'import numpy as np\n'), ((6530, 6567), 'numpy.where', 'np.where', (['(depth > self.args.max_depth)'], {}), '(depth > self.args.max_depth)\n', (6538, 6567), True, 'import numpy as np\n'), ((8694, 8736), 'numpy.multiply', 'np.multiply', (['self.ray_to_src', 'self.normals'], {}), '(self.ray_to_src, self.normals)\n', (8705, 8736), True, 'import numpy as np\n'), ((9135, 9202), 'numpy.reshape', 'np.reshape', (['RGB_normals[:, j]', '(self.args.size * self.args.size, 1)'], {}), '(RGB_normals[:, j], (self.args.size * self.args.size, 1))\n', (9145, 9202), True, 'import numpy as np\n'), ((2861, 2883), 'numpy.array', 'np.array', (['[0, 0, 0, 1]'], {}), '([0, 0, 0, 1])\n', (2869, 2883), True, 'import numpy as np\n'), ((4250, 4317), 'trimesh.Trimesh', 'trimesh.Trimesh', ([], {'vertices': 'self.screen_and_cam_transformed', 'faces': '[]'}), '(vertices=self.screen_and_cam_transformed, faces=[])\n', (4265, 4317), False, 'import trimesh\n'), ((1620, 1649), 'math.radians', 'math.radians', (['euler_angles[0]'], {}), '(euler_angles[0])\n', (1632, 1649), False, 'import math\n'), ((1732, 1761), 'math.radians', 'math.radians', (['euler_angles[0]'], {}), '(euler_angles[0])\n', (1744, 1761), False, 'import math\n'), ((1773, 1802), 'math.radians', 'math.radians', (['euler_angles[0]'], {}), '(euler_angles[0])\n', (1785, 1802), False, 'import math\n'), ((1867, 1896), 'math.radians', 'math.radians', (['euler_angles[1]'], {}), '(euler_angles[1])\n', (1879, 1896), False, 'import math\n'), ((1911, 1940), 'math.radians', 'math.radians', (['euler_angles[1]'], {}), '(euler_angles[1])\n', (1923, 1940), False, 'import math\n'), ((2058, 2087), 'math.radians', 'math.radians', (['euler_angles[1]'], {}), '(euler_angles[1])\n', (2070, 2087), False, 'import math\n'), ((2152, 2181), 'math.radians', 'math.radians', (['euler_angles[2]'], {}), '(euler_angles[2])\n', (2164, 2181), False, 'import math\n'), ((2264, 2293), 'math.radians', 'math.radians', (['euler_angles[2]'], {}), '(euler_angles[2])\n', (2276, 2293), False, 'import math\n'), ((2305, 2334), 'math.radians', 'math.radians', (['euler_angles[2]'], {}), '(euler_angles[2])\n', (2317, 2334), False, 'import math\n'), ((1662, 1691), 'math.radians', 'math.radians', (['euler_angles[0]'], {}), '(euler_angles[0])\n', (1674, 1691), False, 'import math\n'), ((2014, 2043), 'math.radians', 'math.radians', (['euler_angles[1]'], {}), '(euler_angles[1])\n', (2026, 2043), False, 'import math\n'), ((2194, 2223), 'math.radians', 'math.radians', (['euler_angles[2]'], {}), '(euler_angles[2])\n', (2206, 2223), False, 'import math\n')]
|
import os, sys
import numpy as np
from copy import deepcopy
from warnings import warn
from .Mesh import Mesh
from .GeometricPath import *
from Florence.Tensor import totuple, unique2d
__all__ = ['HarvesterPatch', 'SubdivisionArc', 'SubdivisionCircle', 'QuadBall',
'QuadBallSphericalArc']
"""
A series of custom meshes
"""
def HarvesterPatch(ndisc=20, nradial=4, show_plot=False):
"""Creates a custom mesh for an energy harvester patch. [Not to be modified]
ndisc: [int] number of discretisation in c
ndradial: [int] number of discretisation in radial directions for different
components of harevester
"""
center = np.array([30.6979,20.5])
p1 = np.array([30.,20.])
p2 = np.array([30.,21.])
p1line = p1 - center
p2line = p2 - center
radius = np.linalg.norm(p1line)
pp = np.array([center[0],center[1]+radius])
y_line = pp - center
start_angle = -np.pi/2. - np.arccos(np.linalg.norm(y_line*p1line)/np.linalg.norm(y_line)/np.linalg.norm(p1line))
end_angle = np.pi/2. + np.arccos(np.linalg.norm(y_line*p1line)/np.linalg.norm(y_line)/np.linalg.norm(p1line))
points = np.array([p1,p2,center])
# nradial = 4
mesh = Mesh()
mesh.Arc(element_type="quad", radius=radius, start_angle=start_angle,
end_angle=end_angle, nrad=nradial, ncirc=ndisc, center=(center[0],center[1]), refinement=True)
mesh1 = Mesh()
mesh1.Triangle(element_type="quad",npoints=nradial, c1=totuple(center), c2=totuple(p1), c3=totuple(p2))
mesh += mesh1
mesh_patch = Mesh()
mesh_patch.HollowArc(ncirc=ndisc, nrad=nradial, center=(-7.818181,44.22727272),
start_angle=np.arctan(44.22727272/-7.818181), end_angle=np.arctan(-24.22727272/37.818181),
element_type="quad", inner_radius=43.9129782, outer_radius=44.9129782)
mesh3 = Mesh()
mesh3.Triangle(element_type="quad",npoints=nradial, c2=totuple(p1), c3=totuple(p2), c1=(mesh_patch.points[0,0], mesh_patch.points[0,1]))
mesh += mesh3
mesh += mesh_patch
mesh.Extrude(nlong=ndisc,length=40)
if show_plot:
mesh.SimplePlot()
return mesh
def CurvedPlate(ncirc=2, nlong=20, show_plot=False):
"""Creates custom mesh for plate with curved edges
ncirc discretisation around circular fillets
nlong discretisation along the length - X
"""
mesh_arc = Mesh()
mesh_arc.Arc(element_type="quad",nrad=ncirc,ncirc=ncirc, radius=5)
mesh_arc1 = deepcopy(mesh_arc)
mesh_arc1.points[:,1] += 15
mesh_arc1.points[:,0] += 95
mesh_arc2 = deepcopy(mesh_arc)
mesh_arc2.points[:,1] +=15
mesh_arc2.points[:,0] *= -1.
mesh_arc2.points[:,0] += 5.
mesh_plate1 = Mesh()
mesh_plate1.Rectangle(element_type="quad",lower_left_point=(5,15),upper_right_point=(95,20),ny=ncirc, nx=nlong)
mesh_plate2 = deepcopy(mesh_plate1)
mesh_plate2.points[:,1] -= 5.
mesh_square1 = Mesh()
mesh_square1.Square(element_type="quad",lower_left_point=(0,10), side_length=5,nx=ncirc,ny=ncirc)
mesh_square2 = deepcopy(mesh_square1)
mesh_square2.points[:,0] += 95
mesh = mesh_plate1 + mesh_plate2 + mesh_arc1 + mesh_arc2 + mesh_square1 + mesh_square2
mesh.Extrude(length=0.5,nlong=1)
mesh2 = deepcopy(mesh)
mesh2.points[:,2] += 0.5
mesh += mesh2
if show_plot:
mesh.SimplePlot()
return mesh
def SubdivisionArc(center=(0.,0.), radius=1., nrad=16, ncirc=40,
start_angle=0., end_angle=np.pi/2., element_type="tri", refinement=False, refinement_level=2):
"""Creates a mesh on circle using midpoint subdivision.
This function is internally called from Mesh.Circle if
'midpoint_subdivision' algorithm is selected
"""
if start_angle!=0. and end_angle!=np.pi/2.:
raise ValueError("Subdivision based arc only produces meshes for a quarter-circle arc for now")
r = float(radius)
h_r = float(radius)/2.
nx = int(ncirc/4.)
ny = int(nrad/2.)
if nx < 3:
warn("Number of division in circumferential direction too low")
mesh = Mesh()
mesh.Rectangle(element_type="quad", lower_left_point=(-1.,-1.),
upper_right_point=(1.,1.), nx=nx, ny=ny)
uv = np.array([
[-1.,-1],
[1.,-1],
[1.,1],
[-1.,1],
])
t = np.pi/4.
end_points = np.array([
[0.,h_r*np.sin(t)],
[h_r*np.cos(t),h_r*np.sin(t)],
[r*np.cos(t),r*np.sin(t)],
[0.,radius],
])
edge_points = mesh.points[np.unique(mesh.edges),:]
new_end_points = []
new_end_points.append(end_points[0,:])
new_end_points.append(end_points[1,:])
new_end_points.append(end_points[2,:])
tt = np.linspace(np.pi/4,np.pi/2,nx)
x = r*np.cos(tt)
y = r*np.sin(tt)
interp_p = np.vstack((x,y)).T
for i in range(1,len(x)-1):
new_end_points.append([x[i], y[i]])
new_end_points.append(end_points[3,:])
new_end_points = np.array(new_end_points)
new_uv = []
new_uv.append(uv[0,:])
new_uv.append(uv[1,:])
new_uv.append(uv[2,:])
L = 0.
for i in range(1,interp_p.shape[0]):
L += np.linalg.norm(interp_p[i,:] - interp_p[i-1,:])
interp_uv = []
last_uv = uv[2,:]
for i in range(1,interp_p.shape[0]-1):
val = (uv[3,:] - uv[2,:])*np.linalg.norm(interp_p[i,:] - interp_p[i-1,:])/L + last_uv
last_uv = np.copy(val)
interp_uv.append(val)
interp_uv = np.array(interp_uv)
new_uv = np.array(new_uv)
if interp_uv.shape[0] !=0:
new_uv = np.vstack((new_uv,interp_uv))
new_uv = np.vstack((new_uv,uv[3,:]))
from Florence.FunctionSpace import MeanValueCoordinateMapping
new_points = np.zeros_like(mesh.points)
# All nodes barring the ones lying on the arc
for i in range(mesh.nnode - nx - 1):
point = MeanValueCoordinateMapping(mesh.points[i,:], new_uv, new_end_points)
new_points[i,:] = point
# The nodes on the arc are not exactly on the arc
# so they need to be snapped/clipped
tt = np.linspace(np.pi/4,np.pi/2,nx+1)[::-1]
x = r*np.cos(tt)
y = r*np.sin(tt)
new_points[mesh.nnode-nx-1:,:] = np.vstack((x,y)).T
mesh.points = new_points
rmesh = deepcopy(mesh)
rmesh.points = mesh.Rotate(angle=-np.pi/2., copy=True)
rmesh.points[:,1] *= -1.
mesh += rmesh
mesh.LaplacianSmoothing(niter=10)
qmesh = Mesh()
qmesh.Rectangle(element_type="quad", lower_left_point=(0.0,0.0),
upper_right_point=(h_r*np.cos(t),h_r*np.sin(t)),
nx=nx,
ny=nx)
mesh += qmesh
# mesh.LaplacianSmoothing(niter=20)
NodeSliderSmootherArc(mesh, niter=20)
mesh.points[:,0] += center[0]
mesh.points[:,1] += center[1]
if refinement:
mesh.Refine(level=refinement_level)
if element_type == "tri":
sys.stdout = open(os.devnull, "w")
mesh.ConvertQuadsToTris()
sys.stdout = sys.__stdout__
return mesh
def SubdivisionCircle(center=(0.,0.), radius=1., nrad=16, ncirc=40,
element_type="tri", refinement=False, refinement_level=2):
"""Creates a mesh on circle using midpoint subdivision.
This function is internally called from Mesh.Circle if
'midpoint_subdivision' algorithm is selected
"""
r = float(radius)
h_r = float(radius)/2.
nx = int(ncirc/4.)
ny = int(nrad/2.)
if nx < 3:
warn("Number of division in circumferential direction too low")
mesh = Mesh()
mesh.Rectangle(element_type="quad", lower_left_point=(-1.,-1.),
upper_right_point=(1.,1.), nx=nx, ny=ny)
uv = np.array([
[-1.,-1],
[1.,-1],
[1.,1],
[-1.,1],
])
t = np.pi/4
end_points = np.array([
[-h_r*np.cos(t),h_r*np.sin(t)],
[h_r*np.cos(t),h_r*np.sin(t)],
[r*np.cos(t),r*np.sin(t)],
[-r*np.cos(t),r*np.sin(t)],
])
edge_points = mesh.points[np.unique(mesh.edges),:]
new_end_points = []
new_end_points.append(end_points[0,:])
new_end_points.append(end_points[1,:])
new_end_points.append(end_points[2,:])
tt = np.linspace(np.pi/4,3*np.pi/4,nx)
x = r*np.cos(tt)
y = r*np.sin(tt)
interp_p = np.vstack((x,y)).T
for i in range(1,len(x)-1):
new_end_points.append([x[i], y[i]])
new_end_points.append(end_points[3,:])
new_end_points = np.array(new_end_points)
new_uv = []
new_uv.append(uv[0,:])
new_uv.append(uv[1,:])
new_uv.append(uv[2,:])
L = 0.
for i in range(1,interp_p.shape[0]):
L += np.linalg.norm(interp_p[i,:] - interp_p[i-1,:])
interp_uv = []
last_uv = uv[2,:]
for i in range(1,interp_p.shape[0]-1):
val = (uv[3,:] - uv[2,:])*np.linalg.norm(interp_p[i,:] - interp_p[i-1,:])/L + last_uv
last_uv = np.copy(val)
interp_uv.append(val)
interp_uv = np.array(interp_uv)
new_uv = np.array(new_uv)
if interp_uv.shape[0] !=0:
new_uv = np.vstack((new_uv,interp_uv))
new_uv = np.vstack((new_uv,uv[3,:]))
from Florence.FunctionSpace import MeanValueCoordinateMapping
new_points = np.zeros_like(mesh.points)
for i in range(mesh.nnode):
point = MeanValueCoordinateMapping(mesh.points[i,:], new_uv, new_end_points)
new_points[i,:] = point
mesh.points = new_points
rmesh = deepcopy(mesh)
rmesh.points = mesh.Rotate(angle=np.pi/2., copy=True)
mesh += rmesh
rmesh.points = rmesh.Rotate(angle=np.pi/2., copy=True)
mesh += rmesh
rmesh.points = rmesh.Rotate(angle=np.pi/2., copy=True)
mesh += rmesh
mesh.LaplacianSmoothing(niter=10)
qmesh = Mesh()
qmesh.Rectangle(element_type="quad", lower_left_point=(-h_r*np.cos(t),-h_r*np.sin(t)),
upper_right_point=(h_r*np.cos(t),h_r*np.sin(t)),
nx=nx,
ny=nx)
mesh += qmesh
mesh.LaplacianSmoothing(niter=20)
mesh.points[:,0] += center[0]
mesh.points[:,1] += center[1]
if refinement:
mesh.Refine(level=refinement_level)
if element_type == "tri":
sys.stdout = open(os.devnull, "w")
mesh.ConvertQuadsToTris()
sys.stdout = sys.__stdout__
return mesh
def QuadBall(center=(0.,0.,0.), radius=1., n=10, element_type="hex"):
"""Creates a fully hexahedral mesh on sphere using midpoint subdivision algorithm
by creating a cube and spherifying it using PostMesh's projection schemes
inputs:
n: [int] number of divsion in every direction.
Given that this implementation is based on
high order bases different divisions in
different directions is not possible
"""
try:
from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver
from Florence import LinearElastic, NeoHookean
from Florence.Tensor import prime_number_factorisation
except ImportError:
raise ImportError("This function needs Florence's core support")
n = int(n)
if n > 50:
# Values beyond this result in >1M DoFs due to internal prime factoristaion splitting
raise ValueError("The value of n={} (division in each direction) is too high".format(str(n)))
if not isinstance(center,tuple):
raise ValueError("The center of the circle should be given in a tuple with two elements (x,y,z)")
if len(center) != 3:
raise ValueError("The center of the circle should be given in a tuple with two elements (x,y,z)")
if n == 2 or n==3 or n==5 or n==7:
ps = [n]
else:
def factorise_all(n):
if n < 2:
n = 2
factors = prime_number_factorisation(n)
if len(factors) == 1 and n > 2:
n += 1
factors = prime_number_factorisation(n)
return factors
factors = factorise_all(n)
ps = []
for factor in factors:
ps +=factorise_all(factor)
# Do high ps first
ps = np.sort(ps)[::-1].tolist()
niter = len(ps)
# IGS file for sphere with radius 1000.
sphere_igs_file_content = SphereIGS()
with open("sphere_cad_file.igs", "w") as f:
f.write(sphere_igs_file_content)
sys.stdout = open(os.devnull, "w")
ndim = 3
scale = 1000.
condition = 1.e020
mesh = Mesh()
material = LinearElastic(ndim, mu=1., lamb=4.)
# Keep the solver iterative for low memory consumption. All boundary points are Dirichlet BCs
# so they will be exact anyway
solver = LinearSolver(linear_solver="iterative", linear_solver_type="cg2",
dont_switch_solver=True, iterative_solver_tolerance=1e-9)
for it in range(niter):
if it == 0:
mesh.Parallelepiped(element_type="hex", nx=1, ny=1, nz=1, lower_left_rear_point=(-0.5,-0.5,-0.5),
upper_right_front_point=(0.5,0.5,0.5))
mesh.GetHighOrderMesh(p=ps[it], equally_spaced=True)
boundary_condition = BoundaryCondition()
boundary_condition.SetCADProjectionParameters(
"sphere_cad_file.igs",
scale=scale,condition=condition, project_on_curves=True, solve_for_planar_faces=True,
modify_linear_mesh_on_projection=True, fix_dof_elsewhere=False
)
boundary_condition.GetProjectionCriteria(mesh)
formulation = DisplacementFormulation(mesh)
fem_solver = FEMSolver(
number_of_load_increments=1,
analysis_nature="linear",
force_not_computing_mesh_qualities=True,
report_log_level=0,
optimise=True)
solution = fem_solver.Solve(formulation=formulation, mesh=mesh,
material=material, boundary_condition=boundary_condition, solver=solver)
mesh.points += solution.sol[:,:,-1]
mesh = mesh.ConvertToLinearMesh()
os.remove("sphere_cad_file.igs")
if not np.isclose(radius,1):
mesh.points *= radius
mesh.points[:,0] += center[0]
mesh.points[:,1] += center[1]
mesh.points[:,2] += center[2]
if element_type == "tet":
mesh.ConvertHexesToTets()
sys.stdout = sys.__stdout__
return mesh
def QuadBallSurface(center=(0.,0.,0.), radius=1., n=10, element_type="quad"):
"""Creates a surface quad mesh on sphere using midpoint subdivision algorithm
by creating a cube and spherifying it using PostMesh's projection schemes.
Unlike the volume QuadBall method there is no restriction on number of divisions
here as no system of equations is solved
inputs:
n: [int] number of divsion in every direction.
Given that this implementation is based on
high order bases different divisions in
different directions is not possible
"""
try:
from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver
from Florence import LinearElastic, NeoHookean
from Florence.Tensor import prime_number_factorisation
except ImportError:
raise ImportError("This function needs Florence's core support")
n = int(n)
if not isinstance(center,tuple):
raise ValueError("The center of the circle should be given in a tuple with two elements (x,y,z)")
if len(center) != 3:
raise ValueError("The center of the circle should be given in a tuple with two elements (x,y,z)")
if n == 2 or n==3 or n==5 or n==7:
ps = [n]
else:
def factorise_all(n):
if n < 2:
n = 2
factors = prime_number_factorisation(n)
if len(factors) == 1 and n > 2:
n += 1
factors = prime_number_factorisation(n)
return factors
factors = factorise_all(n)
ps = []
for factor in factors:
ps +=factorise_all(factor)
# Do high ps first
ps = np.sort(ps)[::-1].tolist()
niter = len(ps)
sphere_igs_file_content = SphereIGS()
with open("sphere_cad_file.igs", "w") as f:
f.write(sphere_igs_file_content)
sys.stdout = open(os.devnull, "w")
ndim = 3
scale = 1000.
condition = 1.e020
mesh = Mesh()
material = LinearElastic(ndim, mu=1., lamb=4.)
for it in range(niter):
if it == 0:
mesh.Parallelepiped(element_type="hex", nx=1, ny=1, nz=1, lower_left_rear_point=(-0.5,-0.5,-0.5),
upper_right_front_point=(0.5,0.5,0.5))
mesh = mesh.CreateSurface2DMeshfrom3DMesh()
mesh.GetHighOrderMesh(p=ps[it], equally_spaced=True)
mesh = mesh.CreateDummy3DMeshfrom2DMesh()
formulation = DisplacementFormulation(mesh)
else:
mesh.GetHighOrderMesh(p=ps[it], equally_spaced=True)
mesh = mesh.CreateDummy3DMeshfrom2DMesh()
boundary_condition = BoundaryCondition()
boundary_condition.SetCADProjectionParameters(
"sphere_cad_file.igs",
scale=scale,condition=condition,
project_on_curves=True,
solve_for_planar_faces=True,
modify_linear_mesh_on_projection=True,
fix_dof_elsewhere=False
)
boundary_condition.GetProjectionCriteria(mesh)
nodesDBC, Dirichlet = boundary_condition.PostMeshWrapper(formulation, mesh, None, None, FEMSolver())
mesh.points[nodesDBC.ravel(),:] += Dirichlet
mesh = mesh.CreateSurface2DMeshfrom3DMesh()
mesh = mesh.ConvertToLinearMesh()
os.remove("sphere_cad_file.igs")
if not np.isclose(radius,1):
mesh.points *= radius
mesh.points[:,0] += center[0]
mesh.points[:,1] += center[1]
mesh.points[:,2] += center[2]
if element_type == "tri":
mesh.ConvertQuadsToTris()
sys.stdout = sys.__stdout__
return mesh
def QuadBallSphericalArc(center=(0.,0.,0.), inner_radius=9., outer_radius=10., n=10, nthick=1,
element_type="hex", cut_threshold=None, portion=1./8.):
"""Similar to QuadBall but hollow and creates only 1/8th or 1/4th or 1/2th of the sphere.
Starting and ending angles are not supported. Radial division (nthick: to be consistent
with SphericalArc method of Mesh class) is supported
input:
cut_threshold [float] cutting threshold for element removal since this function is based
QuadBall. Ideal value is zero, so prescribe a value as close to zero
as possible, however that might not always be possible as the cut
might take remove some wanted elements [default = -0.01]
portion [float] portion of the sphere to take. Can only be 1/8., 1/4., 1/2.
"""
assert inner_radius < outer_radius
mm = QuadBallSurface(n=n, element_type=element_type)
offset = outer_radius*2.
if cut_threshold is None:
cut_threshold = -0.01
if portion == 1./8.:
mm.RemoveElements(np.array([ [ cut_threshold, cut_threshold, cut_threshold], [ offset, offset, offset]]))
elif portion == 1./4.:
mm.RemoveElements(np.array([ [ cut_threshold, cut_threshold, -offset], [ offset, offset, offset]]))
elif portion == 1./2.:
mm.RemoveElements(np.array([ [ cut_threshold, -offset, -offset], [ offset, offset, offset]]))
else:
raise ValueError("The value of portion can only be 1/8., 1/4. or 1/2.")
radii = np.linspace(inner_radius, outer_radius, nthick+1)
mesh = Mesh()
mesh.element_type = "hex"
mesh.nelem = 0
mesh.nnode = 0
for i in range(nthick):
mm1, mm2 = deepcopy(mm), deepcopy(mm)
if not np.isclose(radii[i],1):
mm1.points *= radii[i]
if not np.isclose(radii[i+1],1):
mm2.points *= radii[i+1]
if i == 0:
elements = np.hstack((mm1.elements, mm1.nnode + mm2.elements)).astype(np.int64)
mesh.elements = np.copy(elements)
mesh.points = np.vstack((mm1.points, mm2.points))
else:
elements = np.hstack((mesh.elements[(i-1)*mm2.nelem:i*mm2.nelem,4:],
mesh.nnode + mm2.elements)).astype(np.int64)
mesh.elements = np.vstack((mesh.elements, elements))
mesh.points = np.vstack((mesh.points, mm2.points))
mesh.nelem = mesh.elements.shape[0]
mesh.nnode = mesh.points.shape[0]
mesh.elements = np.ascontiguousarray(mesh.elements, dtype=np.int64)
mesh.nelem = mesh.elements.shape[0]
mesh.nnode = mesh.points.shape[0]
mesh.GetBoundaryFaces()
mesh.GetBoundaryEdges()
mesh.points[:,0] += center[0]
mesh.points[:,1] += center[1]
mesh.points[:,2] += center[2]
return mesh
def Torus(show_plot=False):
"""Custom mesh for torus
"""
raise NotImplementedError("Not fully implemented yet")
# MAKE TORUS WORK
from copy import deepcopy
from numpy.linalg import norm
mesh = Mesh()
mesh.Circle(element_type="quad", ncirc=2, nrad=2)
tmesh = deepcopy(mesh)
arc = GeometricArc(start=(10,10,8),end=(10,10,-8))
# arc.GeometricArc()
nlong = 10
points = mesh.Extrude(path=arc, nlong=nlong)
# mesh.SimplePlot()
# print points
# elem_nodes = tmesh.elements[0,:]
# p1 = tmesh.points[elem_nodes[0],:]
# p2 = tmesh.points[elem_nodes[1],:]
# p3 = tmesh.points[elem_nodes[2],:]
# p4 = tmesh.points[elem_nodes[3],:]
# E1 = np.append(p2 - p1, 0.0)
# E2 = np.append(p4 - p1, 0.0)
# E3 = np.array([0,0,1.])
# E1 /= norm(E1)
# E2 /= norm(E2)
# # print E1,E2,E3
# elem_nodes = mesh.elements[0,:]
# p1 = mesh.points[elem_nodes[0],:]
# p2 = mesh.points[elem_nodes[1],:]
# p3 = mesh.points[elem_nodes[2],:]
# p4 = mesh.points[elem_nodes[3],:]
# p5 = mesh.points[elem_nodes[4],:]
# e1 = p2 - p1
# e2 = p4 - p1
# e3 = p5 - p1
# e1 /= norm(e1)
# e2 /= norm(e2)
# e3 /= norm(e3)
# # print e1,e2,e3
# # TRANSFORMATION MATRIX
# Q = np.array([
# [np.einsum('i,i',e1,E1), np.einsum('i,i',e1,E2), np.einsum('i,i',e1,E3)],
# [np.einsum('i,i',e2,E1), np.einsum('i,i',e2,E2), np.einsum('i,i',e2,E3)],
# [np.einsum('i,i',e3,E1), np.einsum('i,i',e3,E2), np.einsum('i,i',e3,E3)]
# ])
# mesh.points = np.dot(mesh.points,Q.T)
# points = np.dot(points,Q)
# E1 = np.array([1,0,0.])
E3 = np.array([0.,0.,1.])
nnode_2D = tmesh.points.shape[0]
for i in range(nlong+1):
# e1 = points[i,:][None,:]/norm(points[i,:])
# Q = np.dot(E1[:,None],e1)
# vpoints = np.dot(points,Q)
e3 = points[i+1,:] - points[i,:]; e3 /= norm(e3)
Q = np.dot(e3[:,None],E3[None,:])
# print Q
# print np.dot(Q,points[i,:][:,None])
vpoints = np.dot(points,Q)
# print current_points
mesh.points[nnode_2D*i:nnode_2D*(i+1),:2] = tmesh.points + points[i,:2]
mesh.points[nnode_2D*i:nnode_2D*(i+1), 2] = vpoints[i,2]
# print Q
# print tmesh.points
# mesh = Mesh.HexahedralProjection()
if show_plot:
mesh.SimplePlot()
return mesh
def NodeSliderSmootherArc(mesh, niter=10):
"""This is less than half-baked node slider smoother that only works
for arc type meshes
"""
if mesh.element_type != "quad":
raise RuntimeError("Only implemented for quads")
un_edges = np.unique(mesh.edges)
points = mesh.points[un_edges,:]
radius = mesh.Bounds[1,1]
# For all x==0
idx = np.where(np.isclose(mesh.points[:,0], 0.0)==True)[0]
idx_sort = np.lexsort((mesh.points[idx,1],mesh.points[idx,0]))
mesh.points[idx[idx_sort],1] = np.linspace(0.,radius, idx_sort.shape[0])
# For all y==0
idx = np.where(np.isclose(mesh.points[:,1], 0.0)==True)[0]
idx_sort = np.lexsort((mesh.points[idx,0],mesh.points[idx,1]))
mesh.points[idx[idx_sort],0] = np.linspace(0.,radius, idx_sort.shape[0])
mesh.LaplacianSmoothing(niter)
# -----------------------------------------------------------------------------------------
def SphereIGS():
# IGS file for sphere with radius 1000.
sphere_igs_file_content ="""
S0000001
,,31HOpen CASCADE IGES processor 6.7,13HFilename.iges, G0000001
16HOpen CASCADE 6.7,31HOpen CASCADE IGES processor 6.7,32,308,15,308,15,G0000002
,1.,6,1HM,1,0.00001,15H20150628.043945,1E-07,1.007104,5Hroman,,11,0, G0000003
15H20150628.043945,; G0000004
186 1 0 0 0 0 0 000000000D0000001
186 0 0 1 0 0D0000002
514 2 0 0 0 0 0 000010000D0000003
514 0 0 1 1 0D0000004
510 3 0 0 0 0 0 000010000D0000005
510 0 0 1 1 0D0000006
196 4 0 0 0 0 0 000010000D0000007
196 0 0 1 1 0D0000008
116 5 0 0 0 0 0 000010400D0000009
116 0 0 1 0 0D0000010
123 6 0 0 0 0 0 000010200D0000011
123 0 0 1 0 0D0000012
123 7 0 0 0 0 0 000010200D0000013
123 0 0 1 0 0D0000014
508 8 0 0 0 0 0 000010000D0000015
508 0 0 2 1 0D0000016
502 10 0 0 0 0 0 000010000D0000017
502 0 0 2 1 0D0000018
110 12 0 0 0 0 0 000010000D0000019
110 0 0 1 0 0D0000020
504 13 0 0 0 0 0 000010001D0000021
504 0 0 1 1 0D0000022
100 14 0 0 0 0 25 000010000D0000023
100 0 0 1 0 0D0000024
124 15 0 0 0 0 0 000000000D0000025
124 0 0 2 0 0D0000026
110 17 0 0 0 0 0 000010000D0000027
110 0 0 1 0 0D0000028
110 18 0 0 0 0 0 000010000D0000029
110 0 0 1 0 0D0000030
110 19 0 0 0 0 0 000010000D0000031
110 0 0 1 0 0D0000032
186,3,1,0; 0000001P0000001
514,1,5,1; 0000003P0000002
510,7,1,1,15; 0000005P0000003
196,9,1.,11,13; 0000007P0000004
116,0.,0.,0.,0; 0000009P0000005
123,0.,0.,1.; 0000011P0000006
123,1.,0.,-0.; 0000013P0000007
508,4,1,17,1,0,1,0,19,0,21,1,0,1,0,27,1,17,2,1,1,0,29,0,21,1,1, 0000015P0000008
1,0,31; 0000015P0000009
502,2,6.123233996E-17,-1.499759783E-32,1.,6.123233996E-17, 0000017P0000010
-1.499759783E-32,-1.; 0000017P0000011
110,360.,90.,0.,0.,90.,0.; 0000019P0000012
504,1,23,17,2,17,1; 0000021P0000013
100,0.,0.,0.,-1.836970199E-16,-1.,3.061616998E-16,1.; 0000023P0000014
124,1.,0.,-2.449293598E-16,0.,-2.449293598E-16,0.,-1.,0.,0.,1., 0000025P0000015
0.,0.; 0000025P0000016
110,0.,90.,-0.,0.,-90.,-0.; 0000027P0000017
110,0.,-90.,0.,360.,-90.,0.; 0000029P0000018
110,360.,-90.,0.,360.,90.,0.; 0000031P0000019
S 1G 4D 32P 19 T0000001
"""
return sphere_igs_file_content
|
[
"os.remove",
"Florence.Tensor.totuple",
"numpy.isclose",
"numpy.sin",
"numpy.linalg.norm",
"Florence.LinearElastic",
"Florence.Mesh",
"numpy.unique",
"Florence.BoundaryCondition",
"Florence.Tensor.prime_number_factorisation",
"numpy.zeros_like",
"numpy.copy",
"numpy.linspace",
"copy.deepcopy",
"numpy.hstack",
"numpy.sort",
"numpy.cos",
"numpy.dot",
"numpy.arctan",
"Florence.DisplacementFormulation",
"numpy.vstack",
"Florence.LinearSolver",
"numpy.lexsort",
"Florence.FunctionSpace.MeanValueCoordinateMapping",
"numpy.array",
"Florence.FEMSolver",
"warnings.warn",
"numpy.ascontiguousarray"
] |
[((700, 725), 'numpy.array', 'np.array', (['[30.6979, 20.5]'], {}), '([30.6979, 20.5])\n', (708, 725), True, 'import numpy as np\n'), ((738, 760), 'numpy.array', 'np.array', (['[30.0, 20.0]'], {}), '([30.0, 20.0])\n', (746, 760), True, 'import numpy as np\n'), ((771, 793), 'numpy.array', 'np.array', (['[30.0, 21.0]'], {}), '([30.0, 21.0])\n', (779, 793), True, 'import numpy as np\n'), ((854, 876), 'numpy.linalg.norm', 'np.linalg.norm', (['p1line'], {}), '(p1line)\n', (868, 876), True, 'import numpy as np\n'), ((886, 927), 'numpy.array', 'np.array', (['[center[0], center[1] + radius]'], {}), '([center[0], center[1] + radius])\n', (894, 927), True, 'import numpy as np\n'), ((1196, 1222), 'numpy.array', 'np.array', (['[p1, p2, center]'], {}), '([p1, p2, center])\n', (1204, 1222), True, 'import numpy as np\n'), ((1251, 1257), 'Florence.Mesh', 'Mesh', ([], {}), '()\n', (1255, 1257), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((1448, 1454), 'Florence.Mesh', 'Mesh', ([], {}), '()\n', (1452, 1454), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((1600, 1606), 'Florence.Mesh', 'Mesh', ([], {}), '()\n', (1604, 1606), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((1882, 1888), 'Florence.Mesh', 'Mesh', ([], {}), '()\n', (1886, 1888), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((2433, 2439), 'Florence.Mesh', 'Mesh', ([], {}), '()\n', (2437, 2439), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((2528, 2546), 'copy.deepcopy', 'deepcopy', (['mesh_arc'], {}), '(mesh_arc)\n', (2536, 2546), False, 'from copy import deepcopy\n'), ((2627, 2645), 'copy.deepcopy', 'deepcopy', (['mesh_arc'], {}), '(mesh_arc)\n', (2635, 2645), False, 'from copy import deepcopy\n'), ((2761, 2767), 'Florence.Mesh', 'Mesh', ([], {}), '()\n', (2765, 2767), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((2903, 2924), 'copy.deepcopy', 'deepcopy', (['mesh_plate1'], {}), '(mesh_plate1)\n', (2911, 2924), False, 'from copy import deepcopy\n'), ((2979, 2985), 'Florence.Mesh', 'Mesh', ([], {}), '()\n', (2983, 2985), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((3108, 3130), 'copy.deepcopy', 'deepcopy', (['mesh_square1'], {}), '(mesh_square1)\n', (3116, 3130), False, 'from copy import deepcopy\n'), ((3309, 3323), 'copy.deepcopy', 'deepcopy', (['mesh'], {}), '(mesh)\n', (3317, 3323), False, 'from copy import deepcopy\n'), ((4136, 4142), 'Florence.Mesh', 'Mesh', ([], {}), '()\n', (4140, 4142), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((4270, 4324), 'numpy.array', 'np.array', (['[[-1.0, -1], [1.0, -1], [1.0, 1], [-1.0, 1]]'], {}), '([[-1.0, -1], [1.0, -1], [1.0, 1], [-1.0, 1]])\n', (4278, 4324), True, 'import numpy as np\n'), ((4760, 4797), 'numpy.linspace', 'np.linspace', (['(np.pi / 4)', '(np.pi / 2)', 'nx'], {}), '(np.pi / 4, np.pi / 2, nx)\n', (4771, 4797), True, 'import numpy as np\n'), ((5010, 5034), 'numpy.array', 'np.array', (['new_end_points'], {}), '(new_end_points)\n', (5018, 5034), True, 'import numpy as np\n'), ((5503, 5522), 'numpy.array', 'np.array', (['interp_uv'], {}), '(interp_uv)\n', (5511, 5522), True, 'import numpy as np\n'), ((5537, 5553), 'numpy.array', 'np.array', (['new_uv'], {}), '(new_uv)\n', (5545, 5553), True, 'import numpy as np\n'), ((5645, 5674), 'numpy.vstack', 'np.vstack', (['(new_uv, uv[3, :])'], {}), '((new_uv, uv[3, :]))\n', (5654, 5674), True, 'import numpy as np\n'), ((5757, 5783), 'numpy.zeros_like', 'np.zeros_like', (['mesh.points'], {}), '(mesh.points)\n', (5770, 5783), True, 'import numpy as np\n'), ((6276, 6290), 'copy.deepcopy', 'deepcopy', (['mesh'], {}), '(mesh)\n', (6284, 6290), False, 'from copy import deepcopy\n'), ((6448, 6454), 'Florence.Mesh', 'Mesh', ([], {}), '()\n', (6452, 6454), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((7524, 7530), 'Florence.Mesh', 'Mesh', ([], {}), '()\n', (7528, 7530), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((7658, 7712), 'numpy.array', 'np.array', (['[[-1.0, -1], [1.0, -1], [1.0, 1], [-1.0, 1]]'], {}), '([[-1.0, -1], [1.0, -1], [1.0, 1], [-1.0, 1]])\n', (7666, 7712), True, 'import numpy as np\n'), ((8174, 8215), 'numpy.linspace', 'np.linspace', (['(np.pi / 4)', '(3 * np.pi / 4)', 'nx'], {}), '(np.pi / 4, 3 * np.pi / 4, nx)\n', (8185, 8215), True, 'import numpy as np\n'), ((8426, 8450), 'numpy.array', 'np.array', (['new_end_points'], {}), '(new_end_points)\n', (8434, 8450), True, 'import numpy as np\n'), ((8919, 8938), 'numpy.array', 'np.array', (['interp_uv'], {}), '(interp_uv)\n', (8927, 8938), True, 'import numpy as np\n'), ((8953, 8969), 'numpy.array', 'np.array', (['new_uv'], {}), '(new_uv)\n', (8961, 8969), True, 'import numpy as np\n'), ((9061, 9090), 'numpy.vstack', 'np.vstack', (['(new_uv, uv[3, :])'], {}), '((new_uv, uv[3, :]))\n', (9070, 9090), True, 'import numpy as np\n'), ((9173, 9199), 'numpy.zeros_like', 'np.zeros_like', (['mesh.points'], {}), '(mesh.points)\n', (9186, 9199), True, 'import numpy as np\n'), ((9391, 9405), 'copy.deepcopy', 'deepcopy', (['mesh'], {}), '(mesh)\n', (9399, 9405), False, 'from copy import deepcopy\n'), ((9687, 9693), 'Florence.Mesh', 'Mesh', ([], {}), '()\n', (9691, 9693), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((12476, 12482), 'Florence.Mesh', 'Mesh', ([], {}), '()\n', (12480, 12482), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((12498, 12535), 'Florence.LinearElastic', 'LinearElastic', (['ndim'], {'mu': '(1.0)', 'lamb': '(4.0)'}), '(ndim, mu=1.0, lamb=4.0)\n', (12511, 12535), False, 'from Florence import LinearElastic, NeoHookean\n'), ((12680, 12808), 'Florence.LinearSolver', 'LinearSolver', ([], {'linear_solver': '"""iterative"""', 'linear_solver_type': '"""cg2"""', 'dont_switch_solver': '(True)', 'iterative_solver_tolerance': '(1e-09)'}), "(linear_solver='iterative', linear_solver_type='cg2',\n dont_switch_solver=True, iterative_solver_tolerance=1e-09)\n", (12692, 12808), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((14000, 14032), 'os.remove', 'os.remove', (['"""sphere_cad_file.igs"""'], {}), "('sphere_cad_file.igs')\n", (14009, 14032), False, 'import os, sys\n'), ((16452, 16458), 'Florence.Mesh', 'Mesh', ([], {}), '()\n', (16456, 16458), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((16474, 16511), 'Florence.LinearElastic', 'LinearElastic', (['ndim'], {'mu': '(1.0)', 'lamb': '(4.0)'}), '(ndim, mu=1.0, lamb=4.0)\n', (16487, 16511), False, 'from Florence import LinearElastic, NeoHookean\n'), ((17770, 17802), 'os.remove', 'os.remove', (['"""sphere_cad_file.igs"""'], {}), "('sphere_cad_file.igs')\n", (17779, 17802), False, 'import os, sys\n'), ((19751, 19802), 'numpy.linspace', 'np.linspace', (['inner_radius', 'outer_radius', '(nthick + 1)'], {}), '(inner_radius, outer_radius, nthick + 1)\n', (19762, 19802), True, 'import numpy as np\n'), ((19813, 19819), 'Florence.Mesh', 'Mesh', ([], {}), '()\n', (19817, 19819), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((20726, 20777), 'numpy.ascontiguousarray', 'np.ascontiguousarray', (['mesh.elements'], {'dtype': 'np.int64'}), '(mesh.elements, dtype=np.int64)\n', (20746, 20777), True, 'import numpy as np\n'), ((21258, 21264), 'Florence.Mesh', 'Mesh', ([], {}), '()\n', (21262, 21264), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((21331, 21345), 'copy.deepcopy', 'deepcopy', (['mesh'], {}), '(mesh)\n', (21339, 21345), False, 'from copy import deepcopy\n'), ((22720, 22745), 'numpy.array', 'np.array', (['[0.0, 0.0, 1.0]'], {}), '([0.0, 0.0, 1.0])\n', (22728, 22745), True, 'import numpy as np\n'), ((23724, 23745), 'numpy.unique', 'np.unique', (['mesh.edges'], {}), '(mesh.edges)\n', (23733, 23745), True, 'import numpy as np\n'), ((23912, 23966), 'numpy.lexsort', 'np.lexsort', (['(mesh.points[idx, 1], mesh.points[idx, 0])'], {}), '((mesh.points[idx, 1], mesh.points[idx, 0]))\n', (23922, 23966), True, 'import numpy as np\n'), ((23999, 24042), 'numpy.linspace', 'np.linspace', (['(0.0)', 'radius', 'idx_sort.shape[0]'], {}), '(0.0, radius, idx_sort.shape[0])\n', (24010, 24042), True, 'import numpy as np\n'), ((24139, 24193), 'numpy.lexsort', 'np.lexsort', (['(mesh.points[idx, 0], mesh.points[idx, 1])'], {}), '((mesh.points[idx, 0], mesh.points[idx, 1]))\n', (24149, 24193), True, 'import numpy as np\n'), ((24226, 24269), 'numpy.linspace', 'np.linspace', (['(0.0)', 'radius', 'idx_sort.shape[0]'], {}), '(0.0, radius, idx_sort.shape[0])\n', (24237, 24269), True, 'import numpy as np\n'), ((4060, 4123), 'warnings.warn', 'warn', (['"""Number of division in circumferential direction too low"""'], {}), "('Number of division in circumferential direction too low')\n", (4064, 4123), False, 'from warnings import warn\n'), ((4802, 4812), 'numpy.cos', 'np.cos', (['tt'], {}), '(tt)\n', (4808, 4812), True, 'import numpy as np\n'), ((4823, 4833), 'numpy.sin', 'np.sin', (['tt'], {}), '(tt)\n', (4829, 4833), True, 'import numpy as np\n'), ((4849, 4866), 'numpy.vstack', 'np.vstack', (['(x, y)'], {}), '((x, y))\n', (4858, 4866), True, 'import numpy as np\n'), ((5199, 5250), 'numpy.linalg.norm', 'np.linalg.norm', (['(interp_p[i, :] - interp_p[i - 1, :])'], {}), '(interp_p[i, :] - interp_p[i - 1, :])\n', (5213, 5250), True, 'import numpy as np\n'), ((5444, 5456), 'numpy.copy', 'np.copy', (['val'], {}), '(val)\n', (5451, 5456), True, 'import numpy as np\n'), ((5602, 5632), 'numpy.vstack', 'np.vstack', (['(new_uv, interp_uv)'], {}), '((new_uv, interp_uv))\n', (5611, 5632), True, 'import numpy as np\n'), ((5891, 5960), 'Florence.FunctionSpace.MeanValueCoordinateMapping', 'MeanValueCoordinateMapping', (['mesh.points[i, :]', 'new_uv', 'new_end_points'], {}), '(mesh.points[i, :], new_uv, new_end_points)\n', (5917, 5960), False, 'from Florence.FunctionSpace import MeanValueCoordinateMapping\n'), ((6096, 6137), 'numpy.linspace', 'np.linspace', (['(np.pi / 4)', '(np.pi / 2)', '(nx + 1)'], {}), '(np.pi / 4, np.pi / 2, nx + 1)\n', (6107, 6137), True, 'import numpy as np\n'), ((6146, 6156), 'numpy.cos', 'np.cos', (['tt'], {}), '(tt)\n', (6152, 6156), True, 'import numpy as np\n'), ((6167, 6177), 'numpy.sin', 'np.sin', (['tt'], {}), '(tt)\n', (6173, 6177), True, 'import numpy as np\n'), ((6215, 6232), 'numpy.vstack', 'np.vstack', (['(x, y)'], {}), '((x, y))\n', (6224, 6232), True, 'import numpy as np\n'), ((7448, 7511), 'warnings.warn', 'warn', (['"""Number of division in circumferential direction too low"""'], {}), "('Number of division in circumferential direction too low')\n", (7452, 7511), False, 'from warnings import warn\n'), ((8218, 8228), 'numpy.cos', 'np.cos', (['tt'], {}), '(tt)\n', (8224, 8228), True, 'import numpy as np\n'), ((8239, 8249), 'numpy.sin', 'np.sin', (['tt'], {}), '(tt)\n', (8245, 8249), True, 'import numpy as np\n'), ((8265, 8282), 'numpy.vstack', 'np.vstack', (['(x, y)'], {}), '((x, y))\n', (8274, 8282), True, 'import numpy as np\n'), ((8615, 8666), 'numpy.linalg.norm', 'np.linalg.norm', (['(interp_p[i, :] - interp_p[i - 1, :])'], {}), '(interp_p[i, :] - interp_p[i - 1, :])\n', (8629, 8666), True, 'import numpy as np\n'), ((8860, 8872), 'numpy.copy', 'np.copy', (['val'], {}), '(val)\n', (8867, 8872), True, 'import numpy as np\n'), ((9018, 9048), 'numpy.vstack', 'np.vstack', (['(new_uv, interp_uv)'], {}), '((new_uv, interp_uv))\n', (9027, 9048), True, 'import numpy as np\n'), ((9248, 9317), 'Florence.FunctionSpace.MeanValueCoordinateMapping', 'MeanValueCoordinateMapping', (['mesh.points[i, :]', 'new_uv', 'new_end_points'], {}), '(mesh.points[i, :], new_uv, new_end_points)\n', (9274, 9317), False, 'from Florence.FunctionSpace import MeanValueCoordinateMapping\n'), ((13118, 13137), 'Florence.BoundaryCondition', 'BoundaryCondition', ([], {}), '()\n', (13135, 13137), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((13493, 13522), 'Florence.DisplacementFormulation', 'DisplacementFormulation', (['mesh'], {}), '(mesh)\n', (13516, 13522), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((13544, 13688), 'Florence.FEMSolver', 'FEMSolver', ([], {'number_of_load_increments': '(1)', 'analysis_nature': '"""linear"""', 'force_not_computing_mesh_qualities': '(True)', 'report_log_level': '(0)', 'optimise': '(True)'}), "(number_of_load_increments=1, analysis_nature='linear',\n force_not_computing_mesh_qualities=True, report_log_level=0, optimise=True)\n", (13553, 13688), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((14046, 14067), 'numpy.isclose', 'np.isclose', (['radius', '(1)'], {}), '(radius, 1)\n', (14056, 14067), True, 'import numpy as np\n'), ((17119, 17138), 'Florence.BoundaryCondition', 'BoundaryCondition', ([], {}), '()\n', (17136, 17138), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((17816, 17837), 'numpy.isclose', 'np.isclose', (['radius', '(1)'], {}), '(radius, 1)\n', (17826, 17837), True, 'import numpy as np\n'), ((22982, 22990), 'numpy.linalg.norm', 'norm', (['e3'], {}), '(e3)\n', (22986, 22990), False, 'from numpy.linalg import norm\n'), ((23003, 23035), 'numpy.dot', 'np.dot', (['e3[:, None]', 'E3[None, :]'], {}), '(e3[:, None], E3[None, :])\n', (23009, 23035), True, 'import numpy as np\n'), ((23115, 23132), 'numpy.dot', 'np.dot', (['points', 'Q'], {}), '(points, Q)\n', (23121, 23132), True, 'import numpy as np\n'), ((1514, 1529), 'Florence.Tensor.totuple', 'totuple', (['center'], {}), '(center)\n', (1521, 1529), False, 'from Florence.Tensor import totuple, unique2d\n'), ((1534, 1545), 'Florence.Tensor.totuple', 'totuple', (['p1'], {}), '(p1)\n', (1541, 1545), False, 'from Florence.Tensor import totuple, unique2d\n'), ((1550, 1561), 'Florence.Tensor.totuple', 'totuple', (['p2'], {}), '(p2)\n', (1557, 1561), False, 'from Florence.Tensor import totuple, unique2d\n'), ((1711, 1745), 'numpy.arctan', 'np.arctan', (['(44.22727272 / -7.818181)'], {}), '(44.22727272 / -7.818181)\n', (1720, 1745), True, 'import numpy as np\n'), ((1755, 1790), 'numpy.arctan', 'np.arctan', (['(-24.22727272 / 37.818181)'], {}), '(-24.22727272 / 37.818181)\n', (1764, 1790), True, 'import numpy as np\n'), ((1948, 1959), 'Florence.Tensor.totuple', 'totuple', (['p1'], {}), '(p1)\n', (1955, 1959), False, 'from Florence.Tensor import totuple, unique2d\n'), ((1964, 1975), 'Florence.Tensor.totuple', 'totuple', (['p2'], {}), '(p2)\n', (1971, 1975), False, 'from Florence.Tensor import totuple, unique2d\n'), ((4571, 4592), 'numpy.unique', 'np.unique', (['mesh.edges'], {}), '(mesh.edges)\n', (4580, 4592), True, 'import numpy as np\n'), ((7985, 8006), 'numpy.unique', 'np.unique', (['mesh.edges'], {}), '(mesh.edges)\n', (7994, 8006), True, 'import numpy as np\n'), ((11810, 11839), 'Florence.Tensor.prime_number_factorisation', 'prime_number_factorisation', (['n'], {}), '(n)\n', (11836, 11839), False, 'from Florence.Tensor import prime_number_factorisation\n'), ((15831, 15860), 'Florence.Tensor.prime_number_factorisation', 'prime_number_factorisation', (['n'], {}), '(n)\n', (15857, 15860), False, 'from Florence.Tensor import prime_number_factorisation\n'), ((16926, 16955), 'Florence.DisplacementFormulation', 'DisplacementFormulation', (['mesh'], {}), '(mesh)\n', (16949, 16955), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((17603, 17614), 'Florence.FEMSolver', 'FEMSolver', ([], {}), '()\n', (17612, 17614), False, 'from Florence import Mesh, BoundaryCondition, DisplacementFormulation, FEMSolver, LinearSolver\n'), ((19293, 19380), 'numpy.array', 'np.array', (['[[cut_threshold, cut_threshold, cut_threshold], [offset, offset, offset]]'], {}), '([[cut_threshold, cut_threshold, cut_threshold], [offset, offset,\n offset]])\n', (19301, 19380), True, 'import numpy as np\n'), ((19936, 19948), 'copy.deepcopy', 'deepcopy', (['mm'], {}), '(mm)\n', (19944, 19948), False, 'from copy import deepcopy\n'), ((19950, 19962), 'copy.deepcopy', 'deepcopy', (['mm'], {}), '(mm)\n', (19958, 19962), False, 'from copy import deepcopy\n'), ((19978, 20001), 'numpy.isclose', 'np.isclose', (['radii[i]', '(1)'], {}), '(radii[i], 1)\n', (19988, 20001), True, 'import numpy as np\n'), ((20052, 20079), 'numpy.isclose', 'np.isclose', (['radii[i + 1]', '(1)'], {}), '(radii[i + 1], 1)\n', (20062, 20079), True, 'import numpy as np\n'), ((20255, 20272), 'numpy.copy', 'np.copy', (['elements'], {}), '(elements)\n', (20262, 20272), True, 'import numpy as np\n'), ((20299, 20334), 'numpy.vstack', 'np.vstack', (['(mm1.points, mm2.points)'], {}), '((mm1.points, mm2.points))\n', (20308, 20334), True, 'import numpy as np\n'), ((20519, 20555), 'numpy.vstack', 'np.vstack', (['(mesh.elements, elements)'], {}), '((mesh.elements, elements))\n', (20528, 20555), True, 'import numpy as np\n'), ((20582, 20618), 'numpy.vstack', 'np.vstack', (['(mesh.points, mm2.points)'], {}), '((mesh.points, mm2.points))\n', (20591, 20618), True, 'import numpy as np\n'), ((1043, 1065), 'numpy.linalg.norm', 'np.linalg.norm', (['p1line'], {}), '(p1line)\n', (1057, 1065), True, 'import numpy as np\n'), ((1159, 1181), 'numpy.linalg.norm', 'np.linalg.norm', (['p1line'], {}), '(p1line)\n', (1173, 1181), True, 'import numpy as np\n'), ((11933, 11962), 'Florence.Tensor.prime_number_factorisation', 'prime_number_factorisation', (['n'], {}), '(n)\n', (11959, 11962), False, 'from Florence.Tensor import prime_number_factorisation\n'), ((12145, 12156), 'numpy.sort', 'np.sort', (['ps'], {}), '(ps)\n', (12152, 12156), True, 'import numpy as np\n'), ((15954, 15983), 'Florence.Tensor.prime_number_factorisation', 'prime_number_factorisation', (['n'], {}), '(n)\n', (15980, 15983), False, 'from Florence.Tensor import prime_number_factorisation\n'), ((16166, 16177), 'numpy.sort', 'np.sort', (['ps'], {}), '(ps)\n', (16173, 16177), True, 'import numpy as np\n'), ((19435, 19512), 'numpy.array', 'np.array', (['[[cut_threshold, cut_threshold, -offset], [offset, offset, offset]]'], {}), '([[cut_threshold, cut_threshold, -offset], [offset, offset, offset]])\n', (19443, 19512), True, 'import numpy as np\n'), ((23853, 23887), 'numpy.isclose', 'np.isclose', (['mesh.points[:, 0]', '(0.0)'], {}), '(mesh.points[:, 0], 0.0)\n', (23863, 23887), True, 'import numpy as np\n'), ((24080, 24114), 'numpy.isclose', 'np.isclose', (['mesh.points[:, 1]', '(0.0)'], {}), '(mesh.points[:, 1], 0.0)\n', (24090, 24114), True, 'import numpy as np\n'), ((990, 1021), 'numpy.linalg.norm', 'np.linalg.norm', (['(y_line * p1line)'], {}), '(y_line * p1line)\n', (1004, 1021), True, 'import numpy as np\n'), ((1020, 1042), 'numpy.linalg.norm', 'np.linalg.norm', (['y_line'], {}), '(y_line)\n', (1034, 1042), True, 'import numpy as np\n'), ((1106, 1137), 'numpy.linalg.norm', 'np.linalg.norm', (['(y_line * p1line)'], {}), '(y_line * p1line)\n', (1120, 1137), True, 'import numpy as np\n'), ((1136, 1158), 'numpy.linalg.norm', 'np.linalg.norm', (['y_line'], {}), '(y_line)\n', (1150, 1158), True, 'import numpy as np\n'), ((4422, 4431), 'numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (4428, 4431), True, 'import numpy as np\n'), ((4447, 4456), 'numpy.cos', 'np.cos', (['t'], {}), '(t)\n', (4453, 4456), True, 'import numpy as np\n'), ((4461, 4470), 'numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (4467, 4470), True, 'import numpy as np\n'), ((4484, 4493), 'numpy.cos', 'np.cos', (['t'], {}), '(t)\n', (4490, 4493), True, 'import numpy as np\n'), ((4496, 4505), 'numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (4502, 4505), True, 'import numpy as np\n'), ((5366, 5417), 'numpy.linalg.norm', 'np.linalg.norm', (['(interp_p[i, :] - interp_p[i - 1, :])'], {}), '(interp_p[i, :] - interp_p[i - 1, :])\n', (5380, 5417), True, 'import numpy as np\n'), ((6555, 6564), 'numpy.cos', 'np.cos', (['t'], {}), '(t)\n', (6561, 6564), True, 'import numpy as np\n'), ((6569, 6578), 'numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (6575, 6578), True, 'import numpy as np\n'), ((7807, 7816), 'numpy.cos', 'np.cos', (['t'], {}), '(t)\n', (7813, 7816), True, 'import numpy as np\n'), ((7821, 7830), 'numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (7827, 7830), True, 'import numpy as np\n'), ((7846, 7855), 'numpy.cos', 'np.cos', (['t'], {}), '(t)\n', (7852, 7855), True, 'import numpy as np\n'), ((7860, 7869), 'numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (7866, 7869), True, 'import numpy as np\n'), ((7883, 7892), 'numpy.cos', 'np.cos', (['t'], {}), '(t)\n', (7889, 7892), True, 'import numpy as np\n'), ((7895, 7904), 'numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (7901, 7904), True, 'import numpy as np\n'), ((7919, 7928), 'numpy.cos', 'np.cos', (['t'], {}), '(t)\n', (7925, 7928), True, 'import numpy as np\n'), ((7931, 7940), 'numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (7937, 7940), True, 'import numpy as np\n'), ((8782, 8833), 'numpy.linalg.norm', 'np.linalg.norm', (['(interp_p[i, :] - interp_p[i - 1, :])'], {}), '(interp_p[i, :] - interp_p[i - 1, :])\n', (8796, 8833), True, 'import numpy as np\n'), ((9758, 9767), 'numpy.cos', 'np.cos', (['t'], {}), '(t)\n', (9764, 9767), True, 'import numpy as np\n'), ((9773, 9782), 'numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (9779, 9782), True, 'import numpy as np\n'), ((9816, 9825), 'numpy.cos', 'np.cos', (['t'], {}), '(t)\n', (9822, 9825), True, 'import numpy as np\n'), ((9830, 9839), 'numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (9836, 9839), True, 'import numpy as np\n'), ((19571, 19642), 'numpy.array', 'np.array', (['[[cut_threshold, -offset, -offset], [offset, offset, offset]]'], {}), '([[cut_threshold, -offset, -offset], [offset, offset, offset]])\n', (19579, 19642), True, 'import numpy as np\n'), ((20158, 20209), 'numpy.hstack', 'np.hstack', (['(mm1.elements, mm1.nnode + mm2.elements)'], {}), '((mm1.elements, mm1.nnode + mm2.elements))\n', (20167, 20209), True, 'import numpy as np\n'), ((20372, 20468), 'numpy.hstack', 'np.hstack', (['(mesh.elements[(i - 1) * mm2.nelem:i * mm2.nelem, 4:], mesh.nnode + mm2.\n elements)'], {}), '((mesh.elements[(i - 1) * mm2.nelem:i * mm2.nelem, 4:], mesh.nnode +\n mm2.elements))\n', (20381, 20468), True, 'import numpy as np\n')]
|
import os
import pytest
from capreolus.collection import COLLECTIONS, Collection
from capreolus.index.anserini import AnseriniIndex
from capreolus.utils.common import Anserini
@pytest.fixture(scope="function")
def trec_index(request, tmpdir):
"""
Build an index based on sample data and create an AnseriniIndex instance based on it
"""
indir = os.path.join(COLLECTIONS["dummy"].basepath, "dummy")
outdir = os.path.join(tmpdir, "index")
anserini_fat_jar = Anserini.get_fat_jar()
cmd = f"java -classpath {anserini_fat_jar} -Xms512M -Xmx31G -Dapp.name=IndexCollection io.anserini.index.IndexCollection -collection TrecCollection -generator JsoupGenerator -threads 1 -input {indir} -index {outdir} -storeTransformedDocs"
os.system(cmd)
collection = Collection(dummy_collection_config())
anserini_index = AnseriniIndex(collection, outdir, os.path.join(tmpdir, "index_cache"))
anserini_index.open()
return anserini_index
@pytest.fixture(scope="module")
def dummy_collection_config():
collection_path = COLLECTIONS["dummy"].basepath
return {
"name": "dummy",
"topics": {"type": "trec", "path": os.path.join(collection_path, "topics.dummy.txt")},
"qrels": {"type": "trec", "path": os.path.join(collection_path, "qrels.dummy.txt")},
"documents": {"type": "trec", "path": os.path.join(collection_path, "dummy")},
}
|
[
"capreolus.utils.common.Anserini.get_fat_jar",
"os.system",
"pytest.fixture",
"os.path.join"
] |
[((181, 213), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (195, 213), False, 'import pytest\n'), ((971, 1001), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (985, 1001), False, 'import pytest\n'), ((364, 416), 'os.path.join', 'os.path.join', (["COLLECTIONS['dummy'].basepath", '"""dummy"""'], {}), "(COLLECTIONS['dummy'].basepath, 'dummy')\n", (376, 416), False, 'import os\n'), ((430, 459), 'os.path.join', 'os.path.join', (['tmpdir', '"""index"""'], {}), "(tmpdir, 'index')\n", (442, 459), False, 'import os\n'), ((483, 505), 'capreolus.utils.common.Anserini.get_fat_jar', 'Anserini.get_fat_jar', ([], {}), '()\n', (503, 505), False, 'from capreolus.utils.common import Anserini\n'), ((754, 768), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (763, 768), False, 'import os\n'), ((879, 914), 'os.path.join', 'os.path.join', (['tmpdir', '"""index_cache"""'], {}), "(tmpdir, 'index_cache')\n", (891, 914), False, 'import os\n'), ((1166, 1215), 'os.path.join', 'os.path.join', (['collection_path', '"""topics.dummy.txt"""'], {}), "(collection_path, 'topics.dummy.txt')\n", (1178, 1215), False, 'import os\n'), ((1260, 1308), 'os.path.join', 'os.path.join', (['collection_path', '"""qrels.dummy.txt"""'], {}), "(collection_path, 'qrels.dummy.txt')\n", (1272, 1308), False, 'import os\n'), ((1357, 1395), 'os.path.join', 'os.path.join', (['collection_path', '"""dummy"""'], {}), "(collection_path, 'dummy')\n", (1369, 1395), False, 'import os\n')]
|
import tensorflow as tf
from PIL import Image
import numpy as np
import os
from util import check_or_makedirs
im = Image.open("1.jpg")
print(im.mode, im.size)
np_im = np.array(im)
tf_im = tf.constant(np_im)
print(tf_im.dtype)
img = tf.image.grayscale_to_rgb(tf_im[:, :, tf.newaxis])
# scale image to fixed size
fixed_size = tf.constant([640, 640], dtype=tf.float32) # 16的倍数
raw_shape = tf.cast(tf.shape(img)[:2], tf.float32)
scale_ratio = tf.reduce_min(fixed_size / raw_shape)
new_size = tf.cast(raw_shape * scale_ratio, dtype=tf.int32)
img = tf.image.resize(img, size=new_size)
delta = tf.cast(fixed_size, tf.int32) - new_size
dh, dw = delta[0], delta[1]
img = tf.pad(img, paddings=[[0, dh], [0, dw], [0, 0]], mode='CONSTANT', constant_values=255) # fixed_size, 白底黑字
# image = tf.image.random_brightness(img, max_delta=0.5)
# image = tf.image.random_contrast(image, lower=0.5, upper=2.)
# image = tf.image.random_hue(image, max_delta=0.4)
# image = tf.image.random_jpeg_quality(image, min_jpeg_quality=20, max_jpeg_quality=80)
# image = tf.image.random_saturation(image, lower=0.5, upper=5)
# check_or_makedirs(os.path.join("..", "summary"))
# summary_writer = tf.summary.create_file_writer(os.path.join("..", "summary"))
# with summary_writer.as_default():
# print(np_im.dtype)
# tf.summary.image("image", np_im.reshape((1, 897, 708, 1)).astype("float32")/255, step=0)
# summary_writer.flush()
noise = tf.random.normal(img.shape, mean=0.0, stddev=30.0)
img = img + noise
img = tf.where(img < 0, 0, img)
img = tf.where(img > 255, 255, img)
img = tf.cast(img, tf.uint8)
for i in range(100):
print(i, img.dtype)
# ****************************
delta = -1 + i * 2 / 100
im = tf.image.adjust_brightness(img, delta=delta)
print(im.dtype)
np_im = im.numpy().astype(np.uint8)
p_im = Image.fromarray(np_im)
check_or_makedirs(os.path.join("..", "tf_image", "brightness"))
im_path = os.path.join("..", "tf_image", "brightness", "delta_" + str(delta) + ".jpg")
p_im.save(im_path, format="jpeg")
# ****************************
contrast_factor = 0.3 + i * 1.5 / 100
im = tf.image.adjust_contrast(img, contrast_factor=contrast_factor)
print(im.dtype)
np_im = im.numpy().astype(np.uint8)
p_im = Image.fromarray(np_im)
check_or_makedirs(os.path.join("..", "tf_image", "contrast"))
im_path = os.path.join("..", "tf_image", "contrast", "contrast_factor_" + str(contrast_factor) + ".jpg")
p_im.save(im_path, format="jpeg")
# ****************************
delta = -1 + i * 2 / 100
im = tf.image.adjust_hue(img, delta=delta)
print(im.dtype)
np_im = im.numpy().astype(np.uint8)
p_im = Image.fromarray(np_im)
check_or_makedirs(os.path.join("..", "tf_image", "hue"))
im_path = os.path.join("..", "tf_image", "hue", "delta_" + str(delta) + ".jpg")
p_im.save(im_path, format="jpeg")
# ****************************
jpeg_quality = 0 + i
im = tf.image.adjust_jpeg_quality(img, jpeg_quality=jpeg_quality)
print(im.dtype)
np_im = (im.numpy() * 255).astype(np.uint8)
# print(np_im)
p_im = Image.fromarray(np_im)
check_or_makedirs(os.path.join("..", "tf_image", "jpeg_quality"))
im_path = os.path.join("..", "tf_image", "jpeg_quality", "quality_" + str(jpeg_quality) + ".jpg")
p_im.save(im_path, format="jpeg")
# ****************************
saturation_factor = 0 + i * 100 / 100
im = tf.image.adjust_saturation(img, saturation_factor=saturation_factor)
print(im.dtype)
np_im = im.numpy().astype(np.uint8)
p_im = Image.fromarray(np_im)
check_or_makedirs(os.path.join("..", "tf_image", "saturation"))
im_path = os.path.join("..", "tf_image", "saturation", "saturation_factor_" + str(saturation_factor) + ".jpg")
p_im.save(im_path, format="jpeg")
|
[
"tensorflow.image.grayscale_to_rgb",
"os.path.join",
"tensorflow.random.normal",
"tensorflow.image.adjust_jpeg_quality",
"tensorflow.image.adjust_hue",
"tensorflow.pad",
"tensorflow.constant",
"PIL.Image.open",
"tensorflow.cast",
"tensorflow.shape",
"numpy.array",
"tensorflow.image.adjust_contrast",
"tensorflow.where",
"PIL.Image.fromarray",
"tensorflow.image.resize",
"tensorflow.reduce_min",
"tensorflow.image.adjust_brightness",
"tensorflow.image.adjust_saturation"
] |
[((116, 135), 'PIL.Image.open', 'Image.open', (['"""1.jpg"""'], {}), "('1.jpg')\n", (126, 135), False, 'from PIL import Image\n'), ((169, 181), 'numpy.array', 'np.array', (['im'], {}), '(im)\n', (177, 181), True, 'import numpy as np\n'), ((190, 208), 'tensorflow.constant', 'tf.constant', (['np_im'], {}), '(np_im)\n', (201, 208), True, 'import tensorflow as tf\n'), ((235, 285), 'tensorflow.image.grayscale_to_rgb', 'tf.image.grayscale_to_rgb', (['tf_im[:, :, tf.newaxis]'], {}), '(tf_im[:, :, tf.newaxis])\n', (260, 285), True, 'import tensorflow as tf\n'), ((328, 369), 'tensorflow.constant', 'tf.constant', (['[640, 640]'], {'dtype': 'tf.float32'}), '([640, 640], dtype=tf.float32)\n', (339, 369), True, 'import tensorflow as tf\n'), ((444, 481), 'tensorflow.reduce_min', 'tf.reduce_min', (['(fixed_size / raw_shape)'], {}), '(fixed_size / raw_shape)\n', (457, 481), True, 'import tensorflow as tf\n'), ((493, 541), 'tensorflow.cast', 'tf.cast', (['(raw_shape * scale_ratio)'], {'dtype': 'tf.int32'}), '(raw_shape * scale_ratio, dtype=tf.int32)\n', (500, 541), True, 'import tensorflow as tf\n'), ((548, 583), 'tensorflow.image.resize', 'tf.image.resize', (['img'], {'size': 'new_size'}), '(img, size=new_size)\n', (563, 583), True, 'import tensorflow as tf\n'), ((667, 757), 'tensorflow.pad', 'tf.pad', (['img'], {'paddings': '[[0, dh], [0, dw], [0, 0]]', 'mode': '"""CONSTANT"""', 'constant_values': '(255)'}), "(img, paddings=[[0, dh], [0, dw], [0, 0]], mode='CONSTANT',\n constant_values=255)\n", (673, 757), True, 'import tensorflow as tf\n'), ((1421, 1471), 'tensorflow.random.normal', 'tf.random.normal', (['img.shape'], {'mean': '(0.0)', 'stddev': '(30.0)'}), '(img.shape, mean=0.0, stddev=30.0)\n', (1437, 1471), True, 'import tensorflow as tf\n'), ((1497, 1522), 'tensorflow.where', 'tf.where', (['(img < 0)', '(0)', 'img'], {}), '(img < 0, 0, img)\n', (1505, 1522), True, 'import tensorflow as tf\n'), ((1529, 1558), 'tensorflow.where', 'tf.where', (['(img > 255)', '(255)', 'img'], {}), '(img > 255, 255, img)\n', (1537, 1558), True, 'import tensorflow as tf\n'), ((1565, 1587), 'tensorflow.cast', 'tf.cast', (['img', 'tf.uint8'], {}), '(img, tf.uint8)\n', (1572, 1587), True, 'import tensorflow as tf\n'), ((592, 621), 'tensorflow.cast', 'tf.cast', (['fixed_size', 'tf.int32'], {}), '(fixed_size, tf.int32)\n', (599, 621), True, 'import tensorflow as tf\n'), ((1708, 1752), 'tensorflow.image.adjust_brightness', 'tf.image.adjust_brightness', (['img'], {'delta': 'delta'}), '(img, delta=delta)\n', (1734, 1752), True, 'import tensorflow as tf\n'), ((1824, 1846), 'PIL.Image.fromarray', 'Image.fromarray', (['np_im'], {}), '(np_im)\n', (1839, 1846), False, 'from PIL import Image\n'), ((2131, 2193), 'tensorflow.image.adjust_contrast', 'tf.image.adjust_contrast', (['img'], {'contrast_factor': 'contrast_factor'}), '(img, contrast_factor=contrast_factor)\n', (2155, 2193), True, 'import tensorflow as tf\n'), ((2265, 2287), 'PIL.Image.fromarray', 'Image.fromarray', (['np_im'], {}), '(np_im)\n', (2280, 2287), False, 'from PIL import Image\n'), ((2575, 2612), 'tensorflow.image.adjust_hue', 'tf.image.adjust_hue', (['img'], {'delta': 'delta'}), '(img, delta=delta)\n', (2594, 2612), True, 'import tensorflow as tf\n'), ((2684, 2706), 'PIL.Image.fromarray', 'Image.fromarray', (['np_im'], {}), '(np_im)\n', (2699, 2706), False, 'from PIL import Image\n'), ((2960, 3020), 'tensorflow.image.adjust_jpeg_quality', 'tf.image.adjust_jpeg_quality', (['img'], {'jpeg_quality': 'jpeg_quality'}), '(img, jpeg_quality=jpeg_quality)\n', (2988, 3020), True, 'import tensorflow as tf\n'), ((3119, 3141), 'PIL.Image.fromarray', 'Image.fromarray', (['np_im'], {}), '(np_im)\n', (3134, 3141), False, 'from PIL import Image\n'), ((3439, 3507), 'tensorflow.image.adjust_saturation', 'tf.image.adjust_saturation', (['img'], {'saturation_factor': 'saturation_factor'}), '(img, saturation_factor=saturation_factor)\n', (3465, 3507), True, 'import tensorflow as tf\n'), ((3579, 3601), 'PIL.Image.fromarray', 'Image.fromarray', (['np_im'], {}), '(np_im)\n', (3594, 3601), False, 'from PIL import Image\n'), ((399, 412), 'tensorflow.shape', 'tf.shape', (['img'], {}), '(img)\n', (407, 412), True, 'import tensorflow as tf\n'), ((1869, 1913), 'os.path.join', 'os.path.join', (['""".."""', '"""tf_image"""', '"""brightness"""'], {}), "('..', 'tf_image', 'brightness')\n", (1881, 1913), False, 'import os\n'), ((2310, 2352), 'os.path.join', 'os.path.join', (['""".."""', '"""tf_image"""', '"""contrast"""'], {}), "('..', 'tf_image', 'contrast')\n", (2322, 2352), False, 'import os\n'), ((2729, 2766), 'os.path.join', 'os.path.join', (['""".."""', '"""tf_image"""', '"""hue"""'], {}), "('..', 'tf_image', 'hue')\n", (2741, 2766), False, 'import os\n'), ((3164, 3210), 'os.path.join', 'os.path.join', (['""".."""', '"""tf_image"""', '"""jpeg_quality"""'], {}), "('..', 'tf_image', 'jpeg_quality')\n", (3176, 3210), False, 'import os\n'), ((3624, 3668), 'os.path.join', 'os.path.join', (['""".."""', '"""tf_image"""', '"""saturation"""'], {}), "('..', 'tf_image', 'saturation')\n", (3636, 3668), False, 'import os\n')]
|
import glob
import random
import os
import numpy as np
from torch.utils.data import Dataset
from PIL import Image
import torchvision.transforms as transforms
class ImageDataset(Dataset):
def __init__(self, root, transforms_=None, unaligned=False, mode='train', portion=None):
self.transform = transforms.Compose(transforms_)
self.unaligned = unaligned
self._portion = portion
self.files_A_total = sorted(glob.glob(os.path.join(root, '%s/A' % mode) + '/*.jpg'))
self.files_B_total = sorted(glob.glob(os.path.join(root, '%s/B' % mode) + '/*.jpg'))
if self._portion is not None:
num_files_A = len(self.files_A_total)
num_files_B = len(self.files_B_total)
if self._portion > 0:
split_A = int(np.floor(self._portion * num_files_A))
self.files_A = self.files_A_total[:split_A]
split_B = int(np.floor(self._portion * num_files_B))
self.files_B = self.files_B_total[:split_B]
elif self._portion < 0:
split_A = int(np.floor((1 + self._portion) * num_files_A))
self.files_A = self.files_A_total[split_A:]
split_B = int(np.floor((1 + self._portion) * num_files_B))
self.files_B = self.files_B_total[split_B:]
else:
self.files_A = self.files_A_total
self.files_B = self.files_B_total
def __getitem__(self, index):
item_A = self.transform(Image.open(self.files_A[index % len(self.files_A)]))
if self.unaligned:
item_B = self.transform(Image.open(self.files_B[random.randint(0, len(self.files_B) - 1)]).convert('RGB'))
else:
item_B = self.transform(Image.open(self.files_B[index % len(self.files_B)]).convert('RGB'))
return {'A': item_A, 'B': item_B}
def __len__(self):
# return max(len(self.files_A), len(self.files_B))
return len(self.files_A)
class PairedImageDataset(Dataset):
def __init__(self, dataset_dir, soft_data_dir, mode='train', portion=None, transforms_=None):
'''
Construct a dataset with all images from a dir.
dataset: str. dataset name
style: str. 'A2B' or 'B2A'
'''
self.transform = transforms.Compose(transforms_)
self._portion = portion
path_A = os.path.join(dataset_dir, '%s/A' % mode)
path_B = os.path.join(soft_data_dir)
self.files_A_total = sorted(glob.glob(path_A + '/*.jpg'))
self.files_B_total = sorted(glob.glob(path_B + '/*.png'))
assert len(self.files_A_total) == len(self.files_B_total)
if self._portion is not None:
num_files = len(self.files_A_total)
if self._portion > 0:
split = int(np.floor(self._portion * num_files))
self.files_A = self.files_A_total[:split]
self.files_B = self.files_B_total[:split]
elif self._portion < 0:
split = int(np.floor((1 + self._portion) * num_files))
self.files_A = self.files_A_total[split:]
self.files_B = self.files_B_total[split:]
else:
self.files_A = self.files_A_total
self.files_B = self.files_B_total
print('files_A:', len(self.files_A))
print('files_B:', len(self.files_B))
def __getitem__(self, index):
if np.random.rand() < 0.5:
flip = True
else:
flip = False
img_A = Image.open(self.files_A[index % len(self.files_A)])
img_A = img_A.convert("RGB")
if flip:
img_A= np.asarray(img_A) # PIL.Image to np.ndarray
img_A = np.flip(img_A, axis=1) # data augumentation: horrizental flip
img_A = Image.fromarray(np.uint8(img_A)) # np.ndarray to PIL.Image
item_A = self.transform(img_A)
img_B = Image.open(self.files_B[index % len(self.files_B)])
img_B = img_B.convert("RGB")
if flip:
img_B= np.asarray(img_B) # PIL.Image to np.ndarray
img_B = np.flip(img_B, axis=1) # data augumentation: horrizental flip
img_B = Image.fromarray(np.uint8(img_B)) # np.ndarray to PIL.Image
item_B = self.transform(img_B)
return {'A': item_A, 'B': item_B}
def __len__(self):
return len(self.files_A)
|
[
"numpy.uint8",
"numpy.flip",
"numpy.asarray",
"numpy.floor",
"torchvision.transforms.Compose",
"glob.glob",
"numpy.random.rand",
"os.path.join"
] |
[((316, 347), 'torchvision.transforms.Compose', 'transforms.Compose', (['transforms_'], {}), '(transforms_)\n', (334, 347), True, 'import torchvision.transforms as transforms\n'), ((2362, 2393), 'torchvision.transforms.Compose', 'transforms.Compose', (['transforms_'], {}), '(transforms_)\n', (2380, 2393), True, 'import torchvision.transforms as transforms\n'), ((2455, 2495), 'os.path.join', 'os.path.join', (['dataset_dir', "('%s/A' % mode)"], {}), "(dataset_dir, '%s/A' % mode)\n", (2467, 2495), False, 'import os\n'), ((2514, 2541), 'os.path.join', 'os.path.join', (['soft_data_dir'], {}), '(soft_data_dir)\n', (2526, 2541), False, 'import os\n'), ((2579, 2607), 'glob.glob', 'glob.glob', (["(path_A + '/*.jpg')"], {}), "(path_A + '/*.jpg')\n", (2588, 2607), False, 'import glob\n'), ((2646, 2674), 'glob.glob', 'glob.glob', (["(path_B + '/*.png')"], {}), "(path_B + '/*.png')\n", (2655, 2674), False, 'import glob\n'), ((3543, 3559), 'numpy.random.rand', 'np.random.rand', ([], {}), '()\n', (3557, 3559), True, 'import numpy as np\n'), ((3782, 3799), 'numpy.asarray', 'np.asarray', (['img_A'], {}), '(img_A)\n', (3792, 3799), True, 'import numpy as np\n'), ((3847, 3869), 'numpy.flip', 'np.flip', (['img_A'], {'axis': '(1)'}), '(img_A, axis=1)\n', (3854, 3869), True, 'import numpy as np\n'), ((4192, 4209), 'numpy.asarray', 'np.asarray', (['img_B'], {}), '(img_B)\n', (4202, 4209), True, 'import numpy as np\n'), ((4257, 4279), 'numpy.flip', 'np.flip', (['img_B'], {'axis': '(1)'}), '(img_B, axis=1)\n', (4264, 4279), True, 'import numpy as np\n'), ((3946, 3961), 'numpy.uint8', 'np.uint8', (['img_A'], {}), '(img_A)\n', (3954, 3961), True, 'import numpy as np\n'), ((4356, 4371), 'numpy.uint8', 'np.uint8', (['img_B'], {}), '(img_B)\n', (4364, 4371), True, 'import numpy as np\n'), ((466, 499), 'os.path.join', 'os.path.join', (['root', "('%s/A' % mode)"], {}), "(root, '%s/A' % mode)\n", (478, 499), False, 'import os\n'), ((560, 593), 'os.path.join', 'os.path.join', (['root', "('%s/B' % mode)"], {}), "(root, '%s/B' % mode)\n", (572, 593), False, 'import os\n'), ((818, 855), 'numpy.floor', 'np.floor', (['(self._portion * num_files_A)'], {}), '(self._portion * num_files_A)\n', (826, 855), True, 'import numpy as np\n'), ((951, 988), 'numpy.floor', 'np.floor', (['(self._portion * num_files_B)'], {}), '(self._portion * num_files_B)\n', (959, 988), True, 'import numpy as np\n'), ((2901, 2936), 'numpy.floor', 'np.floor', (['(self._portion * num_files)'], {}), '(self._portion * num_files)\n', (2909, 2936), True, 'import numpy as np\n'), ((1124, 1167), 'numpy.floor', 'np.floor', (['((1 + self._portion) * num_files_A)'], {}), '((1 + self._portion) * num_files_A)\n', (1132, 1167), True, 'import numpy as np\n'), ((1263, 1306), 'numpy.floor', 'np.floor', (['((1 + self._portion) * num_files_B)'], {}), '((1 + self._portion) * num_files_B)\n', (1271, 1306), True, 'import numpy as np\n'), ((3124, 3165), 'numpy.floor', 'np.floor', (['((1 + self._portion) * num_files)'], {}), '((1 + self._portion) * num_files)\n', (3132, 3165), True, 'import numpy as np\n')]
|
import numpy as np
from sas7bdat import SAS7BDAT
import glob
import pandas as pd
from sklearn import preprocessing
from sas7bdat import SAS7BDAT
import glob
import pandas as pd
import matplotlib as mpl
import matplotlib.pyplot as plt
from sklearn import utils, model_selection, metrics, linear_model, neighbors, ensemble
def convertAllCHSData(year = [], onlySubjectsWBiomarkers = 0):
if onlySubjectsWBiomarkers:
print('Only obtaining data for subjects/households with biomarker data.')
dataDirs = glob.glob('./data/Master*')
for dir in dataDirs:
SASfiles = glob.glob(dir + '/*.sas7bdat')
for SASfile in SASfiles:
convertSASfile(SASfile, year)
def convertSASfile(inputFullPath, year = [], onlySubjectsWBiomarkers = 0):
print('Converting ' + inputFullPath)
df = SAS2DataFrame(inputFullPath, year = year)
outputName = inputFullPath.split('/')[-1].split('.')[0]
outputDir = '/'.join(inputFullPath.split('/')[0:-2])
if year:
outputFullPath = outputDir + '/' + outputName
outputFullPath = outputFullPath + '_' + str(year) + 'only' + '.csv'
else:
outputFullPath = outputDir + '/' + outputName + '.csv'
if onlySubjectsWBiomarkers:
subjectsWithBiomarkers = pd.read_csv('./data/subjectsWithBiomarkers.csv')
tmp = set(df.columns)
identifyingFields = list(tmp.intersection(set(subjectsWithBiomarkers.columns)))
if not identifyingFields:
print('No identifying fields found.')
return
elif identifyingFields.count('idind'):
selFactor = 'idind'
selidinds = list(set(df[selFactor]).intersection(set(subjectsWithBiomarkers[selFactor])))
selIdxs = [a in selidinds for a in df[selFactor]]
df = df[selIdxs]
elif identifyingFields.count('hhid'):
selFactor = 'hhid'
selidinds = list(set(df[selFactor]).intersection(set(subjectsWithBiomarkers[selFactor])))
selIdxs = [a in selidinds for a in df[selFactor]]
df = df[selIdxs]
elif identifyingFields.count('commid'):
selFactor = 'commid'
selidinds = list(set(df[selFactor]).intersection(set(subjectsWithBiomarkers[selFactor])))
selIdxs = [a in selidinds for a in df[selFactor]]
df = df[selIdxs]
print(str(df.shape[0]) + ' valid rows')
df.to_csv(outputFullPath)
return
def SAS2DataFrame(inputFullPath, year = []):
with SAS7BDAT(inputFullPath, skip_header=False) as reader:
df = reader.to_data_frame()
df.columns = [col.lower() for col in df.columns]
if (not not year) & any(df.columns == 'wave'):
df = df[df['wave'] == year]
return df
def getSurveyData():
''' Gets relevant survey data for dHealth project
i.e. survey data for subjects that have biomarker data
'''
surveyPath = './data/Master_ID_201908/surveys_pub_12.sas7bdat'
surveyData = SAS2DataFrame(surveyPath)
surveyData = surveyData[(surveyData['biomaker'] == 1) & (surveyData['wave'] == 2009)]
return surveyData
def getBiomarkerData():
surveyData = getSurveyData()
biomarkerPath = './data/Master_Biomarker_2009/biomarker_09.sas7bdat'
biomarkerData = SAS2DataFrame(biomarkerPath)
ids1 = set(biomarkerData.idind)
ids2 = set(surveyData.idind)
excludeIds = list(ids1.difference(ids2))
for id in excludeIds:
tmp = list(biomarkerData.idind)
idx = tmp.index(id)
biomarkerData = biomarkerData.drop(idx)
return biomarkerData
def createSubjectsWithBiomarkersCSV():
surveyData = getSurveyData()
surveyData = surveyData.iloc[:,[0,1,5,3]]
surveyData.columns = ['idind', 'hhid', 'commid', 'Age']
surveyData.to_csv('./data/subjectsWithBiomarkers.csv')
# createSubjectsWithBiomarkersCSV()
featureMap = pd.read_csv('featureTableMap.csv')
subjects = pd.read_csv('./data/subjectsWithBiomarkers.csv',usecols = ['idind','Age']) # Could add others too'hhid','commid'
def createGenderCSV():
print('Extracting gender data...')
subjects = pd.read_csv('./data/subjectsWithBiomarkers.csv',usecols = ['idind','hhid','commid'])
subjects = subjects.astype({'idind': 'int',
'hhid': 'int',
'commid': 'int'})
def getGender(subjectIdx, idind_1, idind_2, sex_1, sex_2):
gender = np.nan
if subjects.idind[subjectIdx] in idind_1:
idx = idind_1.index(subjects.idind[subjectIdx])
gender = int(sex_1[idx])
elif subjects.idind[subjectIdx] in idind_2:
idx = idind_2.index(subjects.idind[subjectIdx])
gender = int(sex_2[idx])
else:
gender = np.nan
if gender == 1:
gender = int(1)
elif gender == 2:
gender = 0
if subjectIdx % 500 == 0:
print(str(100*subjectIdx/9548) + '% complete')
return gender
relations = pd.read_csv('./data/relationmast_pub_00_2009only.csv')
idind_1 = list(relations.idind_1)
idind_2 = list(relations.idind_2)
sex_1 = list(relations.sex_1)
sex_2 = list(relations.sex_2)
gender = [getGender(i, idind_1, idind_2, sex_1, sex_2) for i in range(len(subjects))]
d = {'idind': subjects.idind, 'Sex': gender}
df = pd.DataFrame(data=d)
df.to_csv('./data/gender.csv')
def createSleep_ScreenTimeCSV():
sleep_screenTime = pd.read_csv('./data/pact_12_2009only.csv',usecols = ['idind', 'u324', 'u339','u340_mn', 'u341_mn','u508', 'u509_mn','u510_mn','u345','u346_mn', 'u347_mn'])
sleep_screenTime.columns = ['idind', 'Hours_of_sleep', 'watchTV','TVhours_week','TVhours_weekend','goesOnline','online_week','online_weekend', 'play_videoGames', 'videoGames_week', 'videoGames_weekend']
sleep_screenTime = sleep_screenTime.replace({'watchTV':{9:1,np.nan:1}, 'goesOnline':{9:0,np.nan:0}, 'play_videoGames':{9:0,np.nan:0}, 'Hours_of_sleep':{-9: np.nan}})
sleep_screenTime = sleep_screenTime.fillna(sleep_screenTime.median())
sleep_screenTime_subjects= list(sleep_screenTime.idind)
def getDailyScreenTime(subjectIdx):
weeklyScreenTime = 0
if subjects.idind[subjectIdx] in sleep_screenTime_subjects:
idx = sleep_screenTime_subjects.index(subjects.idind[subjectIdx])
else:
return np.nan
if sleep_screenTime.watchTV[idx]:
weeklyScreenTime = weeklyScreenTime + sleep_screenTime.TVhours_week[idx] + sleep_screenTime.TVhours_weekend[idx]
else:
pass
if sleep_screenTime.goesOnline[idx]:
weeklyScreenTime = weeklyScreenTime + sleep_screenTime.online_week[idx] + sleep_screenTime.online_weekend[idx]
else:
pass
if sleep_screenTime.play_videoGames[idx]:
weeklyScreenTime = weeklyScreenTime + sleep_screenTime.videoGames_week[idx] + sleep_screenTime.videoGames_weekend[idx]
else:
pass
return np.round(weeklyScreenTime/7)
def getDailySleepTime(subjectIdx):
if subjects.idind[subjectIdx] in sleep_screenTime_subjects:
idx = sleep_screenTime_subjects.index(subjects.idind[subjectIdx])
else:
return np.nan
return sleep_screenTime.Hours_of_sleep[idx]
subjects = pd.read_csv('./data/subjectsWithBiomarkers.csv',usecols = ['idind'])
Daily_screen_time = [getDailyScreenTime(i) for i in range(len(subjects))]
Hours_of_sleep = [getDailySleepTime(i) for i in range(len(subjects))]
d = {'idind': subjects.idind, 'Daily_screen_time': Daily_screen_time, 'Hours_of_sleep': Hours_of_sleep}
df = pd.DataFrame(data=d)
df.to_csv('./data/sleep_screentime.csv')
return df
# Define these variables for default inputs for the functions below:
def preprocessRawChinaHealthStudyData():
createSubjectsWithBiomarkersCSV()
convertAllCHSData(year = 2009, onlySubjectsWBiomarkers = 1)
createGenderCSV()
createSleep_ScreenTimeCSV()
def getAndMergeTables(subjects = subjects, tableNum = 1):
newDF = pd.read_csv('./data/'+featureMap['tablename'][tableNum],usecols = eval(featureMap['varnames'][tableNum]))
newDF.columns = eval(featureMap['newnames'][tableNum])
try:
replaceDict = eval(featureMap['replacements'][tableNum])
print('This should not work for surveys')
newDF.replace(replaceDict, inplace = True)
except:
print('Could not replace values or none exists.')
subjects = pd.merge(subjects,newDF,how='left', on ='idind')
print(list(newDF.columns))
print(subjects.columns)
return subjects
def createDataTable():
subjects = pd.read_csv('./data/subjectsWithBiomarkers.csv',usecols = ['idind','Age'])
print('Adding demographic info')
for i in range(1,4):
print('Adding ' + featureMap['tablename'][i])
subjects = getAndMergeTables(subjects = subjects, tableNum = i)
print('One-hot-encoding medical conditions...')
# One-hot-encode medical conditions:
medicalConditions = subjects['Medical_condition'].fillna('noReport')
medicalConditions = medicalConditions.fillna('noReport')
medicalConditions = pd.DataFrame(medicalConditions)
enc = preprocessing.OneHotEncoder(categories = "auto")
enc.fit(medicalConditions)
data = enc.transform(medicalConditions).toarray()
columnNames = enc.categories_[0]
medicalConditions = pd.DataFrame(data,columns=columnNames)
# Replace old medical condition column to one-hot-encoded vars:
subjects.drop('Medical_condition', axis=1, inplace=True)
subjects=pd.concat([subjects,medicalConditions], axis=1, ignore_index=False)
# Add physical exam:
print('Adding lifestyle features...')
i = 4
print('Adding ' + featureMap['tablename'][i])
subjects = getAndMergeTables(subjects = subjects, tableNum = i)
# Add lifestyle features:
print('Adding lifestyle features...')
for i in range(5,featureMap.shape[0]-1):
print('Adding ' + featureMap['tablename'][i])
subjects = getAndMergeTables(subjects = subjects, tableNum = i)
print('Adding reponse variables...')
# Add the response variables (biomarker levels):
i = featureMap.shape[0]-1
print('Adding ' + featureMap['tablename'][i])
subjects = getAndMergeTables(subjects = subjects, tableNum = i)
# Median impute missing data:
subjects = subjects.fillna(subjects.median())
#Change data types:
subjects = subjects.astype({'idind': 'int',
'Sex': 'int',
'Urban': 'int',
'Activity_level': 'int'})
return subjects
def shuffleAndSplit(featureMatrix, targetMatrix, test_size=.2, n_splits=5):
# Shuffle datasets:
X,Y = utils.shuffle(featureMatrix,targetMatrix, random_state = 0)
# Split X and y into training and test sets (80% Train : 20% Test):
X_Train, X_Test, Y_Train, Y_Test = model_selection.train_test_split(
X, Y, random_state = 0, test_size = test_size)
cv=model_selection.KFold(n_splits = n_splits, shuffle = False)
return X_Train, X_Test, Y_Train, Y_Test, cv
def showDataSplits(Y_Train, Y_Test, cv):
''' Helper function to show how the data was split
'''
fig, ax = plt.subplots(figsize = (12,3))
plt.xlim(0,len(Y_Train)+len(Y_Test))
plt.ylim(0,cv.n_splits+1.5)
ax.set_title('Training and Validation splits \n (after shuffling)')
plt.xlabel('Dataset indicies')
yticklabels= [];
offset = -.4
i = 0
for train_idxs, cval_idxs in cv.split(Y_Train):
# training data:
i += 1
start = (min(train_idxs),i+offset)
width = max(train_idxs)-min(train_idxs)
if i == 1:
ax.add_patch(mpl.patches.Rectangle(start, width = width, height = .8, color = 'c', label = 'CV_train'))
ax.add_patch(mpl.patches.Rectangle(start, width = width, height = .8, color = 'c'))
# cross-validation data:
start = (min(cval_idxs),i+offset)
width = max(cval_idxs)-min(cval_idxs)
if i == 1:
ax.add_patch(mpl.patches.Rectangle(start, width = width, height = .8, color = 'orange', label = 'CV_validation'))
ax.add_patch(mpl.patches.Rectangle(start, width = width, height = .8, color = 'orange'))
yticklabels.append('Cross validation_' + str(i))
start = (0,cv.n_splits+1+offset)
width = len(Y_Train)
ax.add_patch(mpl.patches.Rectangle(start, width = width, height = .8, color = 'g', label = 'Train'))
start = (len(Y_Train),cv.n_splits+1+offset)
width = len(Y_Train)
ax.add_patch(mpl.patches.Rectangle(start, width = width, height = .8, color = 'r', label = 'Test'))
yticklabels.append('Final test')
#Format plot
plt.yticks(np.arange(1,cv.n_splits+2),yticklabels)
handles, labels = ax.get_legend_handles_labels()
ax.legend(handles, labels)
plt.show()
def createHealthForecasterModels():
import pickle
## Aggregate relevant data for ML:
data = createDataTable()
fixedFactors = ['Age', 'Sex', 'Urban', 'ENT', 'OBGYN', 'Old_age_midLife_syndrome', 'alcohol_poisoning',
'dermatological', 'digestive', 'endocrine', 'heart', 'hematological', 'infectious_parasitic', 'injury',
'muscular_rheumatological', 'neurological', 'noDiagnosis', 'noReport', 'other', 'pyschiatric', 'respiratory',
'sexualDysfunction', 'tumor', 'unknown', 'urinary', 'High_BP', 'Diabetes', 'Heart_attack', 'Internal_bleeding',
'Pregnant','Height']
fixedFactorIdxs = [list(data.columns).index(varName) for varName in fixedFactors]
lifestyleFactors = ['Smoker', 'Cups_water_daily', 'Alcohol_frequency', 'Weight', 'Kcal', 'Carbs', 'Fat', 'Protein', 'Activity_level', 'Daily_screen_time', 'Hours_of_sleep']
lifestyleFactorIdxs = [list(data.columns).index(varName) for varName in lifestyleFactors]
responseVariables = ['Insulin','Triglycerides','HDL_C', 'LDL_C','Urea', 'Uric_acid', 'APO_A', 'Lipoprotein_A','High_sensitivity_CRP', 'Creatinine',
'APO_B', 'Mg', 'Ferritin', 'Hemoglobin', 'White_blood_cell',
'Red_blood_cell', 'Platelet', 'Glucose_field','HbA1c', 'Total_protein','Albumin', 'Glucose',
'Total_cholestorol', 'Alanine_AT', 'Transferrin', 'Transferrin_receptor','Systol', 'Diastol']
responseVariableIdxs = [list(data.columns).index(varName) for varName in responseVariables]
fatRelatedIdxs = [responseVariables.index('APO_A'),
responseVariables.index('Lipoprotein_A'),
responseVariables.index('HDL_C'),
responseVariables.index('LDL_C'),
responseVariables.index('APO_B'),
responseVariables.index('Triglycerides'),
responseVariables.index('Total_cholestorol')]
gluRelatedIdxs = [responseVariables.index('Insulin'),
responseVariables.index('HbA1c'),
responseVariables.index('Glucose')]
inputFeatures = fixedFactors + lifestyleFactors
X = data[inputFeatures].to_numpy()
Y = data[responseVariables].to_numpy()
# Y_zscore = (Y-np.mean(Y,axis=0))/np.std(Y,axis=0)
# X_Train, X_Test, Y_Train, Y_Test, cv = shuffleAndSplit(X, Y, test_size=.2, n_splits=5)
# X_Train, X_Test, Y_Train_zscore, Y_Test_zscore, cv = shuffleAndSplit(X, Y_zscore, test_size=.2, n_splits=5)
## Create a second model to predict weight:
# fixedFactors2 = ['age', 'sex', 'urban', 'ENT', 'OBGYN', 'Old_age_midLife_syndrome', 'alcohol_poisoning',
# 'dermatological', 'digestive', 'endocrine', 'heart', 'hematological', 'infectious_parasitic', 'injury',
# 'muscular_rheumatological', 'neurological', 'noDiagnosis', 'noReport', 'other', 'pyschiatric', 'respiratory',
# 'sexualDysfunction', 'tumor', 'unknown', 'urinary', 'highBP', 'diabetes', 'heart_attack', 'internal_bleeding',
# 'pregnant','height']
# fixedFactorIdxs2 = [list(data.columns).index(varName) for varName in fixedFactors]
# lifestyleFactors2 = ['smoker', 'cups_water_daily', 'Alcohol_frequency', 'kcal', 'carbo', 'fat', 'protn', 'Activity_level', 'Daily_screen_time', 'Hours_of_sleep']
# lifestyleFactorIdxs2 = [list(data.columns).index(varName) for varName in lifestyleFactors]
# responseVariables2 = ['weight']
# responseVariableIdxs2 = [list(data.columns).index(varName) for varName in responseVariables2]
# inputFeatures2 = fixedFactors2+lifestyleFactors2
# X2 = data[fixedFactors2 + lifestyleFactors2].to_numpy()
# Y2 = data[responseVariables2].to_numpy()
# X_Train2, X_Test2, Y_Train2, Y_Test2, cv = shuffleAndSplit(X2, Y2, test_size=.2, n_splits=5)
models = dict(ols=linear_model.LinearRegression(),
lasso=linear_model.Lasso(alpha=0.75),
ridge=linear_model.Ridge(alpha=0.75),
elastic=linear_model.ElasticNet(alpha=0.1, l1_ratio=0.75),
randomForest = ensemble.RandomForestRegressor(random_state=0,
max_features = 'auto',
min_samples_leaf = 50, #max_depth = 3,
n_estimators = 200)
)
# Also define models to predict z_score Target Matrix
# models_zscore = dict(ols=linear_model.LinearRegression(),
# lasso=linear_model.Lasso(alpha=.5),
# ridge=linear_model.Ridge(alpha=.5),
# elastic=linear_model.ElasticNet(alpha=.5, l1_ratio=0.5),
# randomForest = ensemble.RandomForestRegressor(random_state=0,
# max_features = 'auto',
# min_samples_leaf = 10,
# n_estimators = 200)
# weightModel = dict(ols=linear_model.LinearRegression(),
# lasso=linear_model.Lasso(alpha=.5),
# ridge=linear_model.Ridge(alpha=.5),
# elastic=linear_model.ElasticNet(alpha=.5, l1_ratio=0.5),
# randomForest = ensemble.RandomForestRegressor(random_state=0,
# max_features = 'auto',
# min_samples_leaf = 10,
# n_estimators = 200))
# print('Training trainedWeightBPModels')
# trainedWeightModels = {}
# for name, mdl in weightModel.items():
# print('Training ' + str(name) + '...')
# trainedWeightModels.update({name : mdl.fit(X2,Y2.ravel())})
# print('finished')
# Train models
print('Training trainedModels')
trainedModels = {}
for name, mdl in models.items():
print('Training ' + str(name) + '...')
trainedModels.update({name : mdl.fit(X,Y)})
print('finished')
# pickle.dump([trainedModels, trainedWeightModels, inputFeatures, responseVariables, inputFeatures2, responseVariables2], open("models.p", "wb"))
pickle.dump([trainedModels, inputFeatures, responseVariables, data], open("models.p", "wb"))
# return trainedModels, trainedWeightModels, inputFeatures, responseVariables, inputFeatures2, responseVariables2
return trainedModels, inputFeatures, responseVariables
def parseInputs(inputDict,inputFeatures):
# inputValues = np.zeros(len(inputFeatures))
currentValues = np.zeros(len(inputFeatures))
futureValues = np.zeros(len(inputFeatures))
# Age
currentValues[inputFeatures.index('Age')] = inputDict['Age']
futureValues[inputFeatures.index('Age')] = inputDict['Age']
# Sex
if inputDict['Sex'] == 'M':
currentValues[inputFeatures.index('Sex')] = 1
futureValues[inputFeatures.index('Sex')] = 1
else:
currentValues[inputFeatures.index('Sex')] = 0
futureValues[inputFeatures.index('Sex')] = 0
# Location:
if inputDict['Location'] == 'Urban':
currentValues[inputFeatures.index('Urban')] = 1
futureValues[inputFeatures.index('Urban')] = 1
else:
currentValues[inputFeatures.index('Urban')] = 0
futureValues[inputFeatures.index('Urban')] = 0
# Physical exam/Medical Conditions:
currentValues[inputFeatures.index('Height')] = inputDict['Height']*2.54
futureValues[inputFeatures.index('Height')] = inputDict['Height']*2.54
currentValues[inputFeatures.index(inputDict['Medical_condition'])] = 1
futureValues[inputFeatures.index(inputDict['Medical_condition'])] = 1
if inputDict['Pregnant']:
currentValues[inputFeatures.index('Pregnant')] = 1
futureValues[inputFeatures.index('Pregnant')] = 1
if inputDict['Diabetes']:
currentValues[inputFeatures.index('Diabetes')] = 1
futureValues[inputFeatures.index('Diabetes')] = 1
if inputDict['High_BP']:
currentValues[inputFeatures.index('High_BP')] = 1
futureValues[inputFeatures.index('High_BP')] = 1
if inputDict['Heart_attack']:
currentValues[inputFeatures.index('Heart_attack')] = 1
futureValues[inputFeatures.index('Heart_attack')] = 1
if inputDict['Internal_bleeding']:
currentValues[inputFeatures.index('Internal_bleeding')] = 1
futureValues[inputFeatures.index('Internal_bleeding')] = 1
# currentValues = futureValues = inputValues # This may have done some weird cloning thing?
### Current lifestyle
# Habits:
if inputDict['currAlcohol_frequency'] == 'daily':
currentValues[inputFeatures.index('Alcohol_frequency')] = 1
elif inputDict['currAlcohol_frequency'] == '3-4 times a week':
currentValues[inputFeatures.index('Alcohol_frequency')] = 2
elif inputDict['currAlcohol_frequency'] == 'Once or twice a week':
currentValues[inputFeatures.index('Alcohol_frequency')] = 3
elif inputDict['currAlcohol_frequency'] == 'Once or twice a month':
currentValues[inputFeatures.index('Alcohol_frequency')] = 4
elif inputDict['currAlcohol_frequency'] == 'No more than once a month':
currentValues[inputFeatures.index('Alcohol_frequency')] = 5
else:
currentValues[inputFeatures.index('Alcohol_frequency')] = 3
currentValues[inputFeatures.index('Cups_water_daily')] = inputDict['currCups_water_daily']
if inputDict['currSmoker']:
currentValues[inputFeatures.index('Smoker')] = 1
# Diet/Weight:
currentValues[inputFeatures.index('Kcal')] = inputDict['currCarbo']*4 + inputDict['currProtn']*4 + inputDict['currFat']*9 #currKcal
currentValues[inputFeatures.index('Carbs')] = inputDict['currCarbo']
currentValues[inputFeatures.index('Fat')] = inputDict['currFat']
currentValues[inputFeatures.index('Protein')] = inputDict['currProtn']
# Activity
currentValues[inputFeatures.index('Activity_level')] = inputDict['currActivityLevel']
currentValues[inputFeatures.index('Daily_screen_time')] = inputDict['currDailyScreenTime']
currentValues[inputFeatures.index('Hours_of_sleep')] = inputDict['currHours_of_sleep']
if 'Weight' in inputFeatures:
currentValues[inputFeatures.index('Weight')] = inputDict['currWeight']/2.205
### Lifestyle intervention
# Habits:
if inputDict['intAlcohol_frequency'] == 'daily':
futureValues[inputFeatures.index('Alcohol_frequency')] = 1
elif inputDict['intAlcohol_frequency'] == '3-4 times a week':
futureValues[inputFeatures.index('Alcohol_frequency')] = 2
elif inputDict['intAlcohol_frequency'] == 'Once or twice a week':
futureValues[inputFeatures.index('Alcohol_frequency')] = 3
elif inputDict['intAlcohol_frequency'] == 'Once or twice a month':
futureValues[inputFeatures.index('Alcohol_frequency')] = 4
elif inputDict['intAlcohol_frequency'] == 'No more than once a month':
futureValues[inputFeatures.index('Alcohol_frequency')] = 5
else:
futureValues[inputFeatures.index('Alcohol_frequency')] = 3
futureValues[inputFeatures.index('Cups_water_daily')] = inputDict['intCups_water_daily']
if inputDict['intSmoker']:
futureValues[inputFeatures.index('Smoker')] = 1
# Diet/Weight:
futureValues[inputFeatures.index('Kcal')] = inputDict['intCarbo']*4 + inputDict['intProtn']*4 + inputDict['intFat']*9 #currKcal
futureValues[inputFeatures.index('Carbs')] = inputDict['intCarbo']
futureValues[inputFeatures.index('Fat')] = inputDict['intFat']
futureValues[inputFeatures.index('Protein')] = inputDict['intProtn']
# Activity
futureValues[inputFeatures.index('Activity_level')] = inputDict['intActivityLevel']
futureValues[inputFeatures.index('Daily_screen_time')] = inputDict['intDailyScreenTime']
futureValues[inputFeatures.index('Hours_of_sleep')] = inputDict['intHours_of_sleep']
if 'Weight' in inputFeatures:
futureValues[inputFeatures.index('Weight')] = inputDict['intWeight']/2.205
return currentValues, futureValues
def plotSubjectModelPrediction(trainedModels, X, Y, responseVariables, modelName = 'randomForest', subjectIdx = 3):
import matplotlib.pyplot as plt
f, ax = plt.subplots(figsize=(11, 5))
y_predict = trainedModels[modelName].predict(X[subjectIdx,:].reshape(1, -1))
plt.scatter(range(0,26), Y[subjectIdx,:].T,color = 'b',label = 'actual')
plt.scatter(range(0,26), y_predict.T,color = 'r',label = 'prediction')
plt.xticks(range(0,26))
plt.xticks(rotation='vertical')
ax.set_xticklabels(responseVariables)
handles, labels = ax.get_legend_handles_labels()
ax.legend(handles, labels)
plt.show()
## Helper functions
def update_progress(numerator, denominator=1, taskName = 'Progress'):
from IPython.display import clear_output
bar_length = 20
if isinstance(numerator, int):
numerator = float(numerator)
if not isinstance(numerator, float):
numerator = 0
if numerator/denominator < 0:
numerator = 0
if numerator/denominator >= 1:
numerator = denominator
block = int(round(bar_length * (numerator/denominator)))
clear_output(wait = True)
text = taskName + ": [{0}] {1:.1f}%".format( "#" * block + "-" * (bar_length - block), (numerator/denominator) * 100)
print(text)
|
[
"pandas.read_csv",
"sklearn.model_selection.train_test_split",
"numpy.arange",
"glob.glob",
"numpy.round",
"pandas.DataFrame",
"matplotlib.patches.Rectangle",
"sklearn.linear_model.ElasticNet",
"pandas.merge",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.subplots",
"pandas.concat",
"sklearn.linear_model.Lasso",
"sklearn.linear_model.Ridge",
"matplotlib.pyplot.show",
"matplotlib.pyplot.ylim",
"sklearn.preprocessing.OneHotEncoder",
"sklearn.ensemble.RandomForestRegressor",
"sklearn.linear_model.LinearRegression",
"sas7bdat.SAS7BDAT",
"IPython.display.clear_output",
"sklearn.model_selection.KFold",
"sklearn.utils.shuffle",
"matplotlib.pyplot.xlabel"
] |
[((3880, 3914), 'pandas.read_csv', 'pd.read_csv', (['"""featureTableMap.csv"""'], {}), "('featureTableMap.csv')\n", (3891, 3914), True, 'import pandas as pd\n'), ((3926, 4000), 'pandas.read_csv', 'pd.read_csv', (['"""./data/subjectsWithBiomarkers.csv"""'], {'usecols': "['idind', 'Age']"}), "('./data/subjectsWithBiomarkers.csv', usecols=['idind', 'Age'])\n", (3937, 4000), True, 'import pandas as pd\n'), ((515, 542), 'glob.glob', 'glob.glob', (['"""./data/Master*"""'], {}), "('./data/Master*')\n", (524, 542), False, 'import glob\n'), ((4117, 4206), 'pandas.read_csv', 'pd.read_csv', (['"""./data/subjectsWithBiomarkers.csv"""'], {'usecols': "['idind', 'hhid', 'commid']"}), "('./data/subjectsWithBiomarkers.csv', usecols=['idind', 'hhid',\n 'commid'])\n", (4128, 4206), True, 'import pandas as pd\n'), ((5024, 5078), 'pandas.read_csv', 'pd.read_csv', (['"""./data/relationmast_pub_00_2009only.csv"""'], {}), "('./data/relationmast_pub_00_2009only.csv')\n", (5035, 5078), True, 'import pandas as pd\n'), ((5376, 5396), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'd'}), '(data=d)\n', (5388, 5396), True, 'import pandas as pd\n'), ((5489, 5656), 'pandas.read_csv', 'pd.read_csv', (['"""./data/pact_12_2009only.csv"""'], {'usecols': "['idind', 'u324', 'u339', 'u340_mn', 'u341_mn', 'u508', 'u509_mn',\n 'u510_mn', 'u345', 'u346_mn', 'u347_mn']"}), "('./data/pact_12_2009only.csv', usecols=['idind', 'u324', 'u339',\n 'u340_mn', 'u341_mn', 'u508', 'u509_mn', 'u510_mn', 'u345', 'u346_mn',\n 'u347_mn'])\n", (5500, 5656), True, 'import pandas as pd\n'), ((7415, 7482), 'pandas.read_csv', 'pd.read_csv', (['"""./data/subjectsWithBiomarkers.csv"""'], {'usecols': "['idind']"}), "('./data/subjectsWithBiomarkers.csv', usecols=['idind'])\n", (7426, 7482), True, 'import pandas as pd\n'), ((7753, 7773), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'd'}), '(data=d)\n', (7765, 7773), True, 'import pandas as pd\n'), ((8611, 8660), 'pandas.merge', 'pd.merge', (['subjects', 'newDF'], {'how': '"""left"""', 'on': '"""idind"""'}), "(subjects, newDF, how='left', on='idind')\n", (8619, 8660), True, 'import pandas as pd\n'), ((8778, 8852), 'pandas.read_csv', 'pd.read_csv', (['"""./data/subjectsWithBiomarkers.csv"""'], {'usecols': "['idind', 'Age']"}), "('./data/subjectsWithBiomarkers.csv', usecols=['idind', 'Age'])\n", (8789, 8852), True, 'import pandas as pd\n'), ((9303, 9334), 'pandas.DataFrame', 'pd.DataFrame', (['medicalConditions'], {}), '(medicalConditions)\n', (9315, 9334), True, 'import pandas as pd\n'), ((9345, 9391), 'sklearn.preprocessing.OneHotEncoder', 'preprocessing.OneHotEncoder', ([], {'categories': '"""auto"""'}), "(categories='auto')\n", (9372, 9391), False, 'from sklearn import preprocessing\n'), ((9540, 9579), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {'columns': 'columnNames'}), '(data, columns=columnNames)\n', (9552, 9579), True, 'import pandas as pd\n'), ((9721, 9789), 'pandas.concat', 'pd.concat', (['[subjects, medicalConditions]'], {'axis': '(1)', 'ignore_index': '(False)'}), '([subjects, medicalConditions], axis=1, ignore_index=False)\n', (9730, 9789), True, 'import pandas as pd\n'), ((10897, 10955), 'sklearn.utils.shuffle', 'utils.shuffle', (['featureMatrix', 'targetMatrix'], {'random_state': '(0)'}), '(featureMatrix, targetMatrix, random_state=0)\n', (10910, 10955), False, 'from sklearn import utils, model_selection, metrics, linear_model, neighbors, ensemble\n'), ((11070, 11145), 'sklearn.model_selection.train_test_split', 'model_selection.train_test_split', (['X', 'Y'], {'random_state': '(0)', 'test_size': 'test_size'}), '(X, Y, random_state=0, test_size=test_size)\n', (11102, 11145), False, 'from sklearn import utils, model_selection, metrics, linear_model, neighbors, ensemble\n'), ((11167, 11222), 'sklearn.model_selection.KFold', 'model_selection.KFold', ([], {'n_splits': 'n_splits', 'shuffle': '(False)'}), '(n_splits=n_splits, shuffle=False)\n', (11188, 11222), False, 'from sklearn import utils, model_selection, metrics, linear_model, neighbors, ensemble\n'), ((11394, 11423), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(12, 3)'}), '(figsize=(12, 3))\n', (11406, 11423), True, 'import matplotlib.pyplot as plt\n'), ((11470, 11500), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0)', '(cv.n_splits + 1.5)'], {}), '(0, cv.n_splits + 1.5)\n', (11478, 11500), True, 'import matplotlib.pyplot as plt\n'), ((11574, 11604), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Dataset indicies"""'], {}), "('Dataset indicies')\n", (11584, 11604), True, 'import matplotlib.pyplot as plt\n'), ((13040, 13050), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (13048, 13050), True, 'import matplotlib.pyplot as plt\n'), ((25631, 25660), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(11, 5)'}), '(figsize=(11, 5))\n', (25643, 25660), True, 'import matplotlib.pyplot as plt\n'), ((25926, 25957), 'matplotlib.pyplot.xticks', 'plt.xticks', ([], {'rotation': '"""vertical"""'}), "(rotation='vertical')\n", (25936, 25957), True, 'import matplotlib.pyplot as plt\n'), ((26088, 26098), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (26096, 26098), True, 'import matplotlib.pyplot as plt\n'), ((26580, 26603), 'IPython.display.clear_output', 'clear_output', ([], {'wait': '(True)'}), '(wait=True)\n', (26592, 26603), False, 'from IPython.display import clear_output\n'), ((587, 617), 'glob.glob', 'glob.glob', (["(dir + '/*.sas7bdat')"], {}), "(dir + '/*.sas7bdat')\n", (596, 617), False, 'import glob\n'), ((1270, 1318), 'pandas.read_csv', 'pd.read_csv', (['"""./data/subjectsWithBiomarkers.csv"""'], {}), "('./data/subjectsWithBiomarkers.csv')\n", (1281, 1318), True, 'import pandas as pd\n'), ((2509, 2551), 'sas7bdat.SAS7BDAT', 'SAS7BDAT', (['inputFullPath'], {'skip_header': '(False)'}), '(inputFullPath, skip_header=False)\n', (2517, 2551), False, 'from sas7bdat import SAS7BDAT\n'), ((7077, 7107), 'numpy.round', 'np.round', (['(weeklyScreenTime / 7)'], {}), '(weeklyScreenTime / 7)\n', (7085, 7107), True, 'import numpy as np\n'), ((12571, 12650), 'matplotlib.patches.Rectangle', 'mpl.patches.Rectangle', (['start'], {'width': 'width', 'height': '(0.8)', 'color': '"""g"""', 'label': '"""Train"""'}), "(start, width=width, height=0.8, color='g', label='Train')\n", (12592, 12650), True, 'import matplotlib as mpl\n'), ((12750, 12828), 'matplotlib.patches.Rectangle', 'mpl.patches.Rectangle', (['start'], {'width': 'width', 'height': '(0.8)', 'color': '"""r"""', 'label': '"""Test"""'}), "(start, width=width, height=0.8, color='r', label='Test')\n", (12771, 12828), True, 'import matplotlib as mpl\n'), ((12912, 12941), 'numpy.arange', 'np.arange', (['(1)', '(cv.n_splits + 2)'], {}), '(1, cv.n_splits + 2)\n', (12921, 12941), True, 'import numpy as np\n'), ((11994, 12058), 'matplotlib.patches.Rectangle', 'mpl.patches.Rectangle', (['start'], {'width': 'width', 'height': '(0.8)', 'color': '"""c"""'}), "(start, width=width, height=0.8, color='c')\n", (12015, 12058), True, 'import matplotlib as mpl\n'), ((12354, 12423), 'matplotlib.patches.Rectangle', 'mpl.patches.Rectangle', (['start'], {'width': 'width', 'height': '(0.8)', 'color': '"""orange"""'}), "(start, width=width, height=0.8, color='orange')\n", (12375, 12423), True, 'import matplotlib as mpl\n'), ((17009, 17040), 'sklearn.linear_model.LinearRegression', 'linear_model.LinearRegression', ([], {}), '()\n', (17038, 17040), False, 'from sklearn import utils, model_selection, metrics, linear_model, neighbors, ensemble\n'), ((17062, 17092), 'sklearn.linear_model.Lasso', 'linear_model.Lasso', ([], {'alpha': '(0.75)'}), '(alpha=0.75)\n', (17080, 17092), False, 'from sklearn import utils, model_selection, metrics, linear_model, neighbors, ensemble\n'), ((17114, 17144), 'sklearn.linear_model.Ridge', 'linear_model.Ridge', ([], {'alpha': '(0.75)'}), '(alpha=0.75)\n', (17132, 17144), False, 'from sklearn import utils, model_selection, metrics, linear_model, neighbors, ensemble\n'), ((17168, 17217), 'sklearn.linear_model.ElasticNet', 'linear_model.ElasticNet', ([], {'alpha': '(0.1)', 'l1_ratio': '(0.75)'}), '(alpha=0.1, l1_ratio=0.75)\n', (17191, 17217), False, 'from sklearn import utils, model_selection, metrics, linear_model, neighbors, ensemble\n'), ((17248, 17358), 'sklearn.ensemble.RandomForestRegressor', 'ensemble.RandomForestRegressor', ([], {'random_state': '(0)', 'max_features': '"""auto"""', 'min_samples_leaf': '(50)', 'n_estimators': '(200)'}), "(random_state=0, max_features='auto',\n min_samples_leaf=50, n_estimators=200)\n", (17278, 17358), False, 'from sklearn import utils, model_selection, metrics, linear_model, neighbors, ensemble\n'), ((11882, 11969), 'matplotlib.patches.Rectangle', 'mpl.patches.Rectangle', (['start'], {'width': 'width', 'height': '(0.8)', 'color': '"""c"""', 'label': '"""CV_train"""'}), "(start, width=width, height=0.8, color='c', label=\n 'CV_train')\n", (11903, 11969), True, 'import matplotlib as mpl\n'), ((12231, 12328), 'matplotlib.patches.Rectangle', 'mpl.patches.Rectangle', (['start'], {'width': 'width', 'height': '(0.8)', 'color': '"""orange"""', 'label': '"""CV_validation"""'}), "(start, width=width, height=0.8, color='orange', label\n ='CV_validation')\n", (12252, 12328), True, 'import matplotlib as mpl\n')]
|
# Public python modules
import numpy as np
import pandas as pd
import pickle
import feature
from os import path
# If categories of test data = categories of the training data
class load():
def __init__(self, data_path, batch_size):
self.pointer = 0
self.dataframe = pickle.load(open(data_path,"rb"))
self.batch_size = batch_size
self.n_batch = int(len(self.dataframe) / self.batch_size) # The number of batches
self.n_class = len(set(self.dataframe['category'].values)) # The number of classes
# Batch
def batch(self, dataframe):
x_data = []
y_data = []
# get patches from the saved data (in here, "/dataset/train.p" OR "/dataset/valid.p") and append
for i, row in dataframe.iterrows():
# Select dataframe[row, 'patch']
patch = row['patch']
# Append "patch" to "x_data"
x_data.append(np.float32(patch))
# One-hot encoding
cl = row['category']
y = np.zeros(self.n_class)
y[cl] = 1
y_data.append(y)
return x_data, y_data
#print("x:", x_data)
#print("y:", y_data)
# Mini-batch (via batch(self, dataframe) )
def next_batch(self):
start_pos = self.pointer * self.batch_size
batch_df = self.dataframe.iloc[start_pos:start_pos + self.batch_size]
self.pointer = (self.pointer + 1) % self.n_batch # Move pointer for the next mini-batch
return self.batch(batch_df)
# If categories of test data ~= categories of the training data
class load2():
def __init__(self, data_path, batch_size):
self.pointer = 0
self.dataframe = pickle.load(open(data_path,"rb"))
self.batch_size = batch_size
self.n_batch = int(len(self.dataframe) / self.batch_size)
self.n_class = len(set(self.dataframe['category'].values))
# Batch
def batch(self, dataframe):
x_data = []
y_data = []
# get patches from the saved data (in here, "/dataset/train.p" OR "/dataset/valid.p") and append
for i, row in dataframe.iterrows():
# Select dataframe[row, 'patch']
patch = row['patch']
# Append "patch" to "x_data"
x_data.append(np.float32(patch))
# Append category
cl = row['track_id']
y_data.append(cl)
return x_data, y_data
# Mini-batch (via batch(self, dataframe) )
def next_batch(self):
start_pos = self.pointer * self.batch_size
batch_df = self.dataframe.iloc[start_pos:start_pos + self.batch_size]
self.pointer = (self.pointer + 1) % self.n_batch # Move pointer for the next mini-batch
return self.batch(batch_df)
if __name__ == "__main__":
import gen_data
|
[
"numpy.float32",
"numpy.zeros"
] |
[((1029, 1051), 'numpy.zeros', 'np.zeros', (['self.n_class'], {}), '(self.n_class)\n', (1037, 1051), True, 'import numpy as np\n'), ((929, 946), 'numpy.float32', 'np.float32', (['patch'], {}), '(patch)\n', (939, 946), True, 'import numpy as np\n'), ((2291, 2308), 'numpy.float32', 'np.float32', (['patch'], {}), '(patch)\n', (2301, 2308), True, 'import numpy as np\n')]
|
from random import choices
from typing import Callable
import humanize
from .covid import Covid
from .graph import Graph
from .image import Image
from .testing import Testing
from .twitter import Twitter
class Alerts(Covid, Graph, Image, Testing, Twitter):
def __init__(self):
super().__init__()
@property
def chosen_data(self) -> Callable:
"""
Chooses at random with weighted distribution whether to get
data for the whole world or any specific country. We want
to post countries more.
"""
chosen: Callable = choices(
[
self.world_data,
self.random_country_data,
self.random_country_graph,
self.random_image,
self.random_country_tests,
self.random_country_group_graph,
],
weights=[0.2, 0.1, 0.25, 0.05, 0.15, 0.25],
k=1,
)
return chosen[0]()
def generate(self):
"""
Generates the alert.
Data for a given country looks like this:
{'country': 'Malta', 'cases': 21, 'todayCases': 3, 'deaths': 0, 'todayDeaths': 0, 'recovered': 2, 'critical': 0}
Data for the world looks like:
{'cases': 162386, 'deaths': 5984, 'recovered': 75967}
"""
data = self.chosen_data
if data.get("image"):
self.__image(data)
elif data.get("tests"):
self.__tests(data)
elif data.get("graph"):
self.__graph(data)
elif data.get("graph_group"):
self.__graph_group(data)
elif not data.get("country"):
self.__world(data)
elif data.get("cases") == 0:
self.__no_cases(data)
elif data.get("cases") == data.get("todayCases"):
self.__first_batch(data)
elif data.get("deaths") == data.get("todayDeaths") and data.get("deaths") != 0:
self.__first_deaths(data)
else:
self.__country(data)
def __image(self, data):
img_path = data["img_path"]
media_id = self.upload_image(img_path)
self.post(
f"Guidance from the World Health Organization (WHO)", media_ids=[media_id],
)
def __graph(self, data):
cases = data["cases"]
country = data["country"]
img_path = data["img_path"]
media_id = self.upload_image(img_path)
self.post(
f"Evolution of number of cases for {country.replace('*', '')}, with a total confirmed of {humanize.intcomma(cases)}",
media_ids=[media_id],
)
def __graph_group(self, data):
countries = data["countries"]
img_path = data["img_path"]
media_id = self.upload_image(img_path)
self.post(
f"Evolution of cases in {', '.join(countries)}, since 100th confirmed case.",
media_ids=[media_id],
)
def __world(self, data):
cases = data["cases"]
deaths = data["deaths"]
rate = round(deaths / cases * 100, 2)
self.post(
f"Latest worldwide COVID-19 data: {humanize.intcomma(cases)} cases, {humanize.intcomma(deaths)} deaths.\n\nA {rate}% fatality rate."
)
def __country(self, data):
cases = data["cases"]
deaths = data["deaths"]
today_cases = data["todayCases"]
today_deaths = data["todayDeaths"]
rate = round(deaths / cases * 100, 2)
self.post(
f"Latest COVID-19 data for {data['country']}: {humanize.intcomma(cases)} case{'s' if cases > 1 else ''}, of those {humanize.intcomma(today_cases)} today; {humanize.intcomma(deaths)} death{'s' if deaths > 1 else ''}, of those {humanize.intcomma(today_deaths)} today.\n\nA {rate}% fatality rate."
)
def __first_batch(self, data):
cases = data["cases"]
deaths = data["deaths"]
self.post(
f"First case{'s' if cases > 1 else ''} of COVID-19 confirmed in {data['country']}: {humanize.intcomma(cases)} case{'s' if cases > 1 else ''}, with {humanize.intcomma(deaths)} death{'s' if deaths > 1 else ''} reported."
)
def __first_deaths(self, data):
cases = data["cases"]
deaths = data["deaths"]
rate = round(deaths / cases * 100, 2)
self.post(
f"First death{'s' if cases > 1 else ''} by COVID-19 reported in {data['country']}: {humanize.intcomma(deaths)} {'people' if cases > 1 else 'person'} have died out of {humanize.intcomma(cases)} confirmed cases.\n\nA {rate}% fatality rate."
)
def __no_cases(self, data):
self.post(
f"Latest COVID-19 data: {data['country']} still reports no infections or deaths."
)
def __tests(self, data):
try: # lets try to enrich this with other statistics from the country in question
if len(data["country"]) <= 2:
raise ValueError("Likely a state. Skipping...")
country_data = self.country(data["country"])
cases = country_data["cases"]
today_cases = country_data["todayCases"]
deaths = country_data["deaths"]
today_deaths = country_data["todayDeaths"]
except Exception as e: # if anything blows up here and we can't find the country by FuzzyMatching, no biggie
print(str(e))
country_data = None
message = (
f"Total COVID-19 tests performed in {data['country']}: {data['tests']}."
)
if country_data:
message = (
message
+ f" {humanize.intcomma(cases)} case{'s' if cases > 1 else ''}, of those {humanize.intcomma(today_cases)} today; {humanize.intcomma(deaths)} death{'s' if deaths > 1 else ''}, of those {humanize.intcomma(today_deaths)} today."
)
self.post(message)
|
[
"random.choices",
"humanize.intcomma"
] |
[((583, 804), 'random.choices', 'choices', (['[self.world_data, self.random_country_data, self.random_country_graph, self\n .random_image, self.random_country_tests, self.random_country_group_graph]'], {'weights': '[0.2, 0.1, 0.25, 0.05, 0.15, 0.25]', 'k': '(1)'}), '([self.world_data, self.random_country_data, self.\n random_country_graph, self.random_image, self.random_country_tests,\n self.random_country_group_graph], weights=[0.2, 0.1, 0.25, 0.05, 0.15, \n 0.25], k=1)\n', (590, 804), False, 'from random import choices\n'), ((2561, 2585), 'humanize.intcomma', 'humanize.intcomma', (['cases'], {}), '(cases)\n', (2578, 2585), False, 'import humanize\n'), ((3150, 3174), 'humanize.intcomma', 'humanize.intcomma', (['cases'], {}), '(cases)\n', (3167, 3174), False, 'import humanize\n'), ((3184, 3209), 'humanize.intcomma', 'humanize.intcomma', (['deaths'], {}), '(deaths)\n', (3201, 3209), False, 'import humanize\n'), ((3561, 3585), 'humanize.intcomma', 'humanize.intcomma', (['cases'], {}), '(cases)\n', (3578, 3585), False, 'import humanize\n'), ((3629, 3659), 'humanize.intcomma', 'humanize.intcomma', (['today_cases'], {}), '(today_cases)\n', (3646, 3659), False, 'import humanize\n'), ((3669, 3694), 'humanize.intcomma', 'humanize.intcomma', (['deaths'], {}), '(deaths)\n', (3686, 3694), False, 'import humanize\n'), ((3740, 3771), 'humanize.intcomma', 'humanize.intcomma', (['today_deaths'], {}), '(today_deaths)\n', (3757, 3771), False, 'import humanize\n'), ((4033, 4057), 'humanize.intcomma', 'humanize.intcomma', (['cases'], {}), '(cases)\n', (4050, 4057), False, 'import humanize\n'), ((4097, 4122), 'humanize.intcomma', 'humanize.intcomma', (['deaths'], {}), '(deaths)\n', (4114, 4122), False, 'import humanize\n'), ((4439, 4464), 'humanize.intcomma', 'humanize.intcomma', (['deaths'], {}), '(deaths)\n', (4456, 4464), False, 'import humanize\n'), ((4522, 4546), 'humanize.intcomma', 'humanize.intcomma', (['cases'], {}), '(cases)\n', (4539, 4546), False, 'import humanize\n'), ((5626, 5650), 'humanize.intcomma', 'humanize.intcomma', (['cases'], {}), '(cases)\n', (5643, 5650), False, 'import humanize\n'), ((5694, 5724), 'humanize.intcomma', 'humanize.intcomma', (['today_cases'], {}), '(today_cases)\n', (5711, 5724), False, 'import humanize\n'), ((5734, 5759), 'humanize.intcomma', 'humanize.intcomma', (['deaths'], {}), '(deaths)\n', (5751, 5759), False, 'import humanize\n'), ((5805, 5836), 'humanize.intcomma', 'humanize.intcomma', (['today_deaths'], {}), '(today_deaths)\n', (5822, 5836), False, 'import humanize\n')]
|
#!/usr/bin/env python3
#
# __init__.py
"""
Use black with formate.
"""
#
# Copyright © 2021 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
#
# stdlib
import itertools
from collections.abc import Mapping
from typing import Optional
# 3rd party
import black
from black import TargetVersion
from black.lines import Line
from domdf_python_tools.paths import PathPlus
from domdf_python_tools.typing import PathLike
from domdf_python_tools.words import TAB
from formate.config import wants_filename, wants_global_config
__author__: str = "<NAME>"
__copyright__: str = "2021 <NAME>"
__license__: str = "MIT License"
__version__: str = "0.1.0"
__email__: str = "<EMAIL>"
__all__ = ["black_hook"]
@wants_filename
@wants_global_config
def black_hook(
source: str,
formate_filename: PathLike,
formate_global_config: Optional[Mapping] = None,
**kwargs,
) -> str:
r"""
Call `black <https://pypi.org/project/black/>`_, using the given keyword arguments as its configuration.
:param source: The source to reformat.
:param formate_global_config: The global configuration dictionary. Optional.
:param \*\*kwargs:
:returns: The reformatted source.
"""
black_mode_cls = black.Mode
if "use_tabs" in kwargs:
if kwargs.pop("use_tabs"):
black_mode_cls = TabsMode
elif formate_global_config:
if formate_global_config.get("indent") == TAB:
black_mode_cls = TabsMode
if "line_length" not in kwargs and formate_global_config:
if "line_length" in (formate_global_config or {}):
kwargs["line_length"] = formate_global_config["line_length"]
kwargs["is_pyi"] = PathPlus(formate_filename).suffix == ".pyi"
if "target_versions" in kwargs:
kwargs["target_versions"] = {TargetVersion[val.upper()] for val in kwargs["target_versions"]}
else:
kwargs["target_versions"] = set()
if "target_version" in kwargs:
kwargs["target_versions"].add(TargetVersion[kwargs.pop("target_version").upper()])
return black.format_str(source, mode=black_mode_cls(**kwargs))
# The following adapted from black itself
# https://github.com/psf/black
# MIT Licensed
# Copyright (c) 2018 <NAME>
def line_str(self: Line) -> str:
"""
Render the line.
"""
if not self:
return '\n'
if getattr(self.mode, "use_tabs", False):
indent = '\t' * self.depth
else:
indent = " " * self.depth
leaves = iter(self.leaves)
first = next(leaves)
res = f"{first.prefix}{indent}{first.value}"
for leaf in leaves:
res += str(leaf)
for comment in itertools.chain.from_iterable(self.comments.values()):
res += str(comment)
return res + '\n'
Line.__str__ = line_str # type: ignore
class TabsMode(black.Mode):
use_tabs = True
|
[
"domdf_python_tools.paths.PathPlus"
] |
[((2623, 2649), 'domdf_python_tools.paths.PathPlus', 'PathPlus', (['formate_filename'], {}), '(formate_filename)\n', (2631, 2649), False, 'from domdf_python_tools.paths import PathPlus\n')]
|
import floppyforms as forms
from django.forms.models import modelformset_factory
from django.utils.translation import ugettext_lazy as _
from horizon import tables
from horizon.tables.formset import FormsetDataTable, FormsetRow
from leonardo.module.web.models import WidgetDimension
class Slider(forms.RangeInput):
min = 1
max = 12
step = 1
template_name = 'floppyforms/slider.html'
class OffsetSlider(Slider):
min = 0
class HeightSlider(OffsetSlider):
max = 24
class WidgetDimensionForm(forms.ModelForm):
width = forms.CharField(widget=Slider(), initial=12)
height = forms.CharField(widget=HeightSlider(), initial=0)
offset = forms.CharField(widget=OffsetSlider(), initial=0)
def __init__(self, *args, **kw):
super(WidgetDimensionForm, self).__init__(*args, **kw)
self.fields['size'].initial = 'xs'
class Meta:
model = WidgetDimension
exclude = tuple()
WidgetDimensionFormset = modelformset_factory(
WidgetDimension, form=WidgetDimensionForm, can_delete=True, extra=1)
class CustomFormsetRow(FormsetRow):
def __init__(self, column, datum, form):
self.form = form
super(CustomFormsetRow, self).__init__(column, datum, form)
# add initial
if not datum and column.data:
try:
previous = column.data[0]
self.form.fields['widget_type'].initial = previous.widget_type
self.form.fields['widget_id'].initial = previous.widget_id
self.form.fields['id'].initial = previous.id + 1
except Exception:
pass
class WidgetDimensionTable(FormsetDataTable):
formset_class = WidgetDimensionFormset
def get_formset(self):
"""Provide the formset corresponding to this DataTable.
Use this to validate the formset and to get the submitted data back.
"""
if self.widget:
queryset = self.widget.dimensions
else:
queryset = WidgetDimension.objects.none()
if self._formset is None:
self._formset = self.formset_class(
self.request.POST or None,
initial=self._get_formset_data(),
prefix=self._meta.name,
queryset=queryset)
return self._formset
def __init__(self, *args, **kwargs):
self._meta.row_class = CustomFormsetRow
self.widget = kwargs.pop('widget', None)
super(WidgetDimensionTable, self).__init__(*args, **kwargs)
widget_id = tables.Column('widget_id', hidden=True)
widget_type = tables.Column('widget_type', hidden=True)
size = tables.Column('size', verbose_name=_('Size'))
width = tables.Column('width', verbose_name=('Width'))
height = tables.Column('height', verbose_name=_('Height'))
offset = tables.Column('offset', verbose_name=_('Offset'))
name = 'dimensions'
class Meta:
name = 'dimensions'
table_name = 'Dimensions'
|
[
"leonardo.module.web.models.WidgetDimension.objects.none",
"horizon.tables.Column",
"django.utils.translation.ugettext_lazy",
"django.forms.models.modelformset_factory"
] |
[((969, 1063), 'django.forms.models.modelformset_factory', 'modelformset_factory', (['WidgetDimension'], {'form': 'WidgetDimensionForm', 'can_delete': '(True)', 'extra': '(1)'}), '(WidgetDimension, form=WidgetDimensionForm, can_delete=\n True, extra=1)\n', (989, 1063), False, 'from django.forms.models import modelformset_factory\n'), ((2545, 2584), 'horizon.tables.Column', 'tables.Column', (['"""widget_id"""'], {'hidden': '(True)'}), "('widget_id', hidden=True)\n", (2558, 2584), False, 'from horizon import tables\n'), ((2603, 2644), 'horizon.tables.Column', 'tables.Column', (['"""widget_type"""'], {'hidden': '(True)'}), "('widget_type', hidden=True)\n", (2616, 2644), False, 'from horizon import tables\n'), ((2714, 2758), 'horizon.tables.Column', 'tables.Column', (['"""width"""'], {'verbose_name': '"""Width"""'}), "('width', verbose_name='Width')\n", (2727, 2758), False, 'from horizon import tables\n'), ((2011, 2041), 'leonardo.module.web.models.WidgetDimension.objects.none', 'WidgetDimension.objects.none', ([], {}), '()\n', (2039, 2041), False, 'from leonardo.module.web.models import WidgetDimension\n'), ((2691, 2700), 'django.utils.translation.ugettext_lazy', '_', (['"""Size"""'], {}), "('Size')\n", (2692, 2700), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2811, 2822), 'django.utils.translation.ugettext_lazy', '_', (['"""Height"""'], {}), "('Height')\n", (2812, 2822), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2874, 2885), 'django.utils.translation.ugettext_lazy', '_', (['"""Offset"""'], {}), "('Offset')\n", (2875, 2885), True, 'from django.utils.translation import ugettext_lazy as _\n')]
|
from swcpm import click, swc_pm
from .run import run_command
from .info import info_command
from .wget import wget_command
from .install import install_command
from .update import update_command
from .remove import remove_command
########################################################################################################################
@swc_pm.command("debug", short_help="Debugs the application.")
def debug():
"""Debugs the application."""
cmd_list = [run_command, info_command, wget_command, install_command, update_command, remove_command]
for cmd in cmd_list:
click.echo("Found command module: " + cmd.__name__)
if __name__ == '__main__':
swc_pm()
|
[
"swcpm.swc_pm.command",
"swcpm.swc_pm",
"swcpm.click.echo"
] |
[((370, 431), 'swcpm.swc_pm.command', 'swc_pm.command', (['"""debug"""'], {'short_help': '"""Debugs the application."""'}), "('debug', short_help='Debugs the application.')\n", (384, 431), False, 'from swcpm import click, swc_pm\n'), ((712, 720), 'swcpm.swc_pm', 'swc_pm', ([], {}), '()\n', (718, 720), False, 'from swcpm import click, swc_pm\n'), ((623, 674), 'swcpm.click.echo', 'click.echo', (["('Found command module: ' + cmd.__name__)"], {}), "('Found command module: ' + cmd.__name__)\n", (633, 674), False, 'from swcpm import click, swc_pm\n')]
|
# Copyright (c) 2015, Palo Alto Networks
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# Author: <NAME> <<EMAIL>>
"""Update users on the firewall from logs in Splunk
About this script
-----------------
User-ID is a mechanism in the firewall that maps users to IP addresses.
These User to IP mappings can be updated using many methods including from
Active Directory or by sending syslogs to a firewall from a Radius or other
authentication server.
Many organizations send authentication logs to Splunk, so it is natural for
Splunk to communicate these authentication events to the firewalls so their
User to IP mappings are always up-to-date.
There are two methods to synchronize authentication events from Splunk to the firewall:
Method 1: Forward logs from Splunk to the User-ID firewall.
Method 2: Use this script to update the firewall using its API.
Method 1 is preferred because it is more efficient. However, Method 2 is
useful in cases where the user and the IP are not in the same logs. Splunk
can correlate the user to the IP before passing the mapping to the firewall
via API.
This script supports connection to a firewall or to Panorama.
"""
#########################################################
# Do NOT modify anything below this line unless you are
# certain of the ramifications of the changes
#########################################################
import sys # for system params and sys.exit()
import os
libpath = os.path.dirname(os.path.abspath(__file__))
sys.path[:0] = [os.path.join(libpath, 'lib')]
import common
import environment
logger = common.logging.getLogger().getChild('panUserUpdate')
try:
if environment.run_by_splunk():
import splunk.Intersplunk # so you can interact with Splunk
import splunk.entity as entity # for splunk config info
libpath = os.path.dirname(os.path.abspath(__file__))
sys.path[:0] = [os.path.join(libpath, 'lib')]
sys.path[:0] = [os.path.join(libpath, 'lib', 'pan-python', 'lib')]
sys.path[:0] = [os.path.join(libpath, 'lib', 'pandevice')]
import pandevice
from pandevice.panorama import Panorama
from pandevice.firewall import Firewall
import pan.xapi
from common import log
except Exception as e:
# Handle exception to produce logs to python.log
common.exit_with_error(e)
def main_splunk():
# Get arguments
args, kwargs = splunk.Intersplunk.getKeywordsAndOptions()
# Enable debugging by passing 'debug=yes' as an argument of
# the command on the Splunk searchbar.
debug = common.check_debug(kwargs)
# kwargs contains important parameters.
# parameters from splunk searchbar include:
# action
# device
# panorama
# serial
# vsys
# user_field
# ip_field
# timeout
# debug
# Verify required args were passed to command
log(debug, "Determining if required arguments are present")
if 'device' not in kwargs and 'panorama' not in kwargs:
common.exit_with_error("Missing required command argument: device or panorama", 3)
if 'panorama' in kwargs and 'serial' not in kwargs:
common.exit_with_error("Found 'panorama' arguments, but missing 'serial' argument", 3)
# Assign defaults to fields that aren't specified
action = kwargs['action'] if 'action' in kwargs else "login"
vsys = kwargs['vsys'] if 'vsys' in kwargs else "vsys1"
ip_field = kwargs['ip_field'] if 'ip_field' in kwargs else "src_ip"
user_field = kwargs['user_field'] if 'user_field' in kwargs else "user"
timeout = kwargs['timeout'] if 'timeout' in kwargs else None
# Determine if device hostname or serial was provided as argument or should be pulled from entries
log(debug, "Determining how firewalls should be contacted based on arguments")
use_panorama = False
hostname = None
serial = None
if "device" in kwargs:
hostname = kwargs['device']
elif "panorama" in kwargs:
use_panorama = True
hostname = kwargs['panorama']
serial = kwargs['serial']
else:
common.exit_with_error("Missing required command argument: device or panorama", 3)
log(debug, "Use Panorama: %s" % use_panorama)
log(debug, "VSys: %s" % vsys)
log(debug, "Hostname: %s" % hostname)
if use_panorama and serial is not None:
log(debug, "Device Serial: %s" % serial)
# Results contains the data from the search results and settings
# contains the sessionKey that we can use to talk to Splunk
results, unused1, settings = splunk.Intersplunk.getOrganizedResults()
# Get the sessionKey
sessionKey = settings['sessionKey']
log(debug, "Begin get API key")
# Get the API key from the Splunk store or from the device at hostname if no apikey is stored
apikey = common.apikey(sessionKey, hostname, debug)
# Create the connection to the firewall or Panorama
if use_panorama:
# For Panorama, create the Panorama object, and the firewall object
panorama = Panorama(hostname, api_key=apikey)
firewall = Firewall(serial=serial, vsys=vsys)
panorama.add(firewall)
firewall.userid.batch_start()
else:
# No Panorama, so just create the firewall object
firewall = Firewall(hostname, api_key=apikey, vsys=vsys)
firewall.userid.batch_start()
# Collect all the ip addresses and users into firewall batch requests
for result in results:
## Find the user (if a user_field was specified)
try:
this_user = result[user_field]
except KeyError as e:
result['status'] = "ERROR: Unable to determine user from field: %s" % user_field
continue
## Find the IP
try:
this_ip = result[ip_field]
except KeyError as e:
result['status'] = "ERROR: Unable to determine ip from field: %s" % ip_field
## Create a request in the batch user-id update for the firewall
## No API call to the firewall happens until all batch requests are created.
if action == "login":
log(debug, "Login event on firewall %s: %s - %s" % (firewall, this_ip, this_user))
firewall.userid.login(this_user, this_ip, timeout=timeout)
else:
log(debug, "Logout event on firewall %s: %s - %s" % (firewall, this_ip, this_user))
firewall.userid.logout(this_user, this_ip)
result['status'] = "Submitted successfully"
## Make the API calls to the User-ID API of each firewall
try:
firewall.userid.batch_end()
except pan.xapi.PanXapiError as e:
common.exit_with_error(str(e))
except Exception as e:
common.exit_with_error(str(e))
# output results
splunk.Intersplunk.outputResults(results)
def main_cli():
raise NotImplementedError
if __name__ == "__main__":
if environment.run_by_splunk():
try:
main_splunk()
except Exception as e:
common.exit_with_error(e)
else:
main_cli()
|
[
"common.exit_with_error",
"os.path.abspath",
"common.log",
"os.path.join",
"environment.run_by_splunk",
"pandevice.firewall.Firewall",
"pandevice.panorama.Panorama",
"common.apikey",
"common.check_debug",
"common.logging.getLogger"
] |
[((2150, 2175), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (2165, 2175), False, 'import os\n'), ((2193, 2221), 'os.path.join', 'os.path.join', (['libpath', '"""lib"""'], {}), "(libpath, 'lib')\n", (2205, 2221), False, 'import os\n'), ((2332, 2359), 'environment.run_by_splunk', 'environment.run_by_splunk', ([], {}), '()\n', (2357, 2359), False, 'import environment\n'), ((3225, 3251), 'common.check_debug', 'common.check_debug', (['kwargs'], {}), '(kwargs)\n', (3243, 3251), False, 'import common\n'), ((3541, 3600), 'common.log', 'log', (['debug', '"""Determining if required arguments are present"""'], {}), "(debug, 'Determining if required arguments are present')\n", (3544, 3600), False, 'from common import log\n'), ((4403, 4481), 'common.log', 'log', (['debug', '"""Determining how firewalls should be contacted based on arguments"""'], {}), "(debug, 'Determining how firewalls should be contacted based on arguments')\n", (4406, 4481), False, 'from common import log\n'), ((4844, 4889), 'common.log', 'log', (['debug', "('Use Panorama: %s' % use_panorama)"], {}), "(debug, 'Use Panorama: %s' % use_panorama)\n", (4847, 4889), False, 'from common import log\n'), ((4894, 4923), 'common.log', 'log', (['debug', "('VSys: %s' % vsys)"], {}), "(debug, 'VSys: %s' % vsys)\n", (4897, 4923), False, 'from common import log\n'), ((4928, 4965), 'common.log', 'log', (['debug', "('Hostname: %s' % hostname)"], {}), "(debug, 'Hostname: %s' % hostname)\n", (4931, 4965), False, 'from common import log\n'), ((5337, 5368), 'common.log', 'log', (['debug', '"""Begin get API key"""'], {}), "(debug, 'Begin get API key')\n", (5340, 5368), False, 'from common import log\n'), ((5480, 5522), 'common.apikey', 'common.apikey', (['sessionKey', 'hostname', 'debug'], {}), '(sessionKey, hostname, debug)\n', (5493, 5522), False, 'import common\n'), ((7563, 7590), 'environment.run_by_splunk', 'environment.run_by_splunk', ([], {}), '()\n', (7588, 7590), False, 'import environment\n'), ((2266, 2292), 'common.logging.getLogger', 'common.logging.getLogger', ([], {}), '()\n', (2290, 2292), False, 'import common\n'), ((2526, 2551), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (2541, 2551), False, 'import os\n'), ((2573, 2601), 'os.path.join', 'os.path.join', (['libpath', '"""lib"""'], {}), "(libpath, 'lib')\n", (2585, 2601), False, 'import os\n'), ((2623, 2672), 'os.path.join', 'os.path.join', (['libpath', '"""lib"""', '"""pan-python"""', '"""lib"""'], {}), "(libpath, 'lib', 'pan-python', 'lib')\n", (2635, 2672), False, 'import os\n'), ((2694, 2735), 'os.path.join', 'os.path.join', (['libpath', '"""lib"""', '"""pandevice"""'], {}), "(libpath, 'lib', 'pandevice')\n", (2706, 2735), False, 'import os\n'), ((2975, 3000), 'common.exit_with_error', 'common.exit_with_error', (['e'], {}), '(e)\n', (2997, 3000), False, 'import common\n'), ((3669, 3755), 'common.exit_with_error', 'common.exit_with_error', (['"""Missing required command argument: device or panorama"""', '(3)'], {}), "('Missing required command argument: device or panorama',\n 3)\n", (3691, 3755), False, 'import common\n'), ((3816, 3907), 'common.exit_with_error', 'common.exit_with_error', (['"""Found \'panorama\' arguments, but missing \'serial\' argument"""', '(3)'], {}), '(\n "Found \'panorama\' arguments, but missing \'serial\' argument", 3)\n', (3838, 3907), False, 'import common\n'), ((5018, 5058), 'common.log', 'log', (['debug', "('Device Serial: %s' % serial)"], {}), "(debug, 'Device Serial: %s' % serial)\n", (5021, 5058), False, 'from common import log\n'), ((5696, 5730), 'pandevice.panorama.Panorama', 'Panorama', (['hostname'], {'api_key': 'apikey'}), '(hostname, api_key=apikey)\n', (5704, 5730), False, 'from pandevice.panorama import Panorama\n'), ((5750, 5784), 'pandevice.firewall.Firewall', 'Firewall', ([], {'serial': 'serial', 'vsys': 'vsys'}), '(serial=serial, vsys=vsys)\n', (5758, 5784), False, 'from pandevice.firewall import Firewall\n'), ((5941, 5986), 'pandevice.firewall.Firewall', 'Firewall', (['hostname'], {'api_key': 'apikey', 'vsys': 'vsys'}), '(hostname, api_key=apikey, vsys=vsys)\n', (5949, 5986), False, 'from pandevice.firewall import Firewall\n'), ((4757, 4843), 'common.exit_with_error', 'common.exit_with_error', (['"""Missing required command argument: device or panorama"""', '(3)'], {}), "('Missing required command argument: device or panorama',\n 3)\n", (4779, 4843), False, 'import common\n'), ((6784, 6870), 'common.log', 'log', (['debug', "('Login event on firewall %s: %s - %s' % (firewall, this_ip, this_user))"], {}), "(debug, 'Login event on firewall %s: %s - %s' % (firewall, this_ip,\n this_user))\n", (6787, 6870), False, 'from common import log\n'), ((6964, 7051), 'common.log', 'log', (['debug', "('Logout event on firewall %s: %s - %s' % (firewall, this_ip, this_user))"], {}), "(debug, 'Logout event on firewall %s: %s - %s' % (firewall, this_ip,\n this_user))\n", (6967, 7051), False, 'from common import log\n'), ((7674, 7699), 'common.exit_with_error', 'common.exit_with_error', (['e'], {}), '(e)\n', (7696, 7699), False, 'import common\n')]
|
# flake8: noqa
import os
WTF_CSRF_ENABLED = False # On production, delete this line!
SECRET_KEY = ''
SERVER_ADDRESS = os.getenv('SERVER_ADDRESS', '127.0.0.1:80')
FEATURE_FLAG_CHECK_IDENTICAL_CODE_ON = os.getenv(
'FEATURE_FLAG_CHECK_IDENTICAL_CODE_ON', False,
)
USERS_CSV = 'users.csv'
# Babel config
LANGUAGES = {
'en': 'English',
'he': 'Hebrew',
}
|
[
"os.getenv"
] |
[((122, 165), 'os.getenv', 'os.getenv', (['"""SERVER_ADDRESS"""', '"""127.0.0.1:80"""'], {}), "('SERVER_ADDRESS', '127.0.0.1:80')\n", (131, 165), False, 'import os\n'), ((206, 262), 'os.getenv', 'os.getenv', (['"""FEATURE_FLAG_CHECK_IDENTICAL_CODE_ON"""', '(False)'], {}), "('FEATURE_FLAG_CHECK_IDENTICAL_CODE_ON', False)\n", (215, 262), False, 'import os\n')]
|
"""
================================================
Toy Injected Glucose Phosphorylation Compartment
================================================
This is a toy example referenced in the documentation.
"""
from vivarium.core.experiment import Experiment
from vivarium.core.process import Composite
from vivarium.library.pretty import format_dict
from vivarium_cell.processes.glucose_phosphorylation import GlucosePhosphorylation
from vivarium_cell.processes.injector import Injector
class InjectedGlcPhosphorylation(Composite):
defaults = {
'glucose_phosphorylation': {
'k_cat': 1e-2,
},
'injector': {
'substrate_rate_map': {
'GLC': 1e-4,
'ATP': 1e-3,
},
},
}
def __init__(self, config):
super(InjectedGlcPhosphorylation, self).__init__(config)
def generate_processes(self, config):
injector = Injector(self.config['injector'])
glucose_phosphorylation = GlucosePhosphorylation(
self.config['glucose_phosphorylation'])
return {
'injector': injector,
'glucose_phosphorylation': glucose_phosphorylation,
}
def generate_topology(self, config):
return {
'injector': {
'internal': ('cell', ),
},
'glucose_phosphorylation': {
'cytoplasm': ('cell', ),
'nucleoside_phosphates': ('cell', ),
'global': ('global', ),
},
}
|
[
"vivarium_cell.processes.glucose_phosphorylation.GlucosePhosphorylation",
"vivarium_cell.processes.injector.Injector"
] |
[((938, 971), 'vivarium_cell.processes.injector.Injector', 'Injector', (["self.config['injector']"], {}), "(self.config['injector'])\n", (946, 971), False, 'from vivarium_cell.processes.injector import Injector\n'), ((1006, 1068), 'vivarium_cell.processes.glucose_phosphorylation.GlucosePhosphorylation', 'GlucosePhosphorylation', (["self.config['glucose_phosphorylation']"], {}), "(self.config['glucose_phosphorylation'])\n", (1028, 1068), False, 'from vivarium_cell.processes.glucose_phosphorylation import GlucosePhosphorylation\n')]
|
#https://docs.python.org/ko/3/library/__main__.html
#main.py
#from module import *
import module
if __name__ == "__main__":
print(__name__)
#hello()
module.hello()
|
[
"module.hello"
] |
[((178, 192), 'module.hello', 'module.hello', ([], {}), '()\n', (190, 192), False, 'import module\n')]
|
import sys
import os
def readDepths(filePath):
with open(filePath) as f:
depths = f.readlines()
return depths
#Process Individual depths
def processDepthReadings(depthReadings):
previousDepth = -1
depthIncreases = 0
for depth in depthReadings:
depth = int(depth)
if previousDepth == -1 :
previousDepth = depth
elif previousDepth < depth :
previousDepth = depth
depthIncreases += 1
else:
previousDepth = depth
print(depthIncreases)
#Process Sum of Readings
def processDepthReadingsSum(depthReadings):
previousSum = -1
sumIncreases = 0
for i in range(len(depthReadings)):
if i < len(depthReadings) - 2:
sum = int(depthReadings[i]) + int(depthReadings[i+1]) + int(depthReadings[i+2])
if sum > previousSum and previousSum != -1:
sumIncreases += 1
previousSum = sum
else:
break
print(sumIncreases)
def main() -> int:
#Check Args
if len(sys.argv) <= 1:
print("Please specify the path for the file of depths and optionally set the second argument to 1 to process a moving sum of the next 3 depth readings")
return 0
elif not os.path.isfile(sys.argv[1]):
print ("File path provided is not a file or does not exist")
return 1
else:
depthReadings = readDepths(sys.argv[1])
#Process Depth Readings
if len(sys.argv) > 2 and int(sys.argv[2]) == 1:
print("hi")
processDepthReadingsSum(depthReadings)
else:
processDepthReadings(depthReadings)
#Return OK to Sys.Exit
return 0
if __name__ == '__main__':
sys.exit(main())
|
[
"os.path.isfile"
] |
[((1270, 1297), 'os.path.isfile', 'os.path.isfile', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (1284, 1297), False, 'import os\n')]
|
import math
class Cache(object):
"""docstring for cache"""
def __init__(self, size, length, associativity, cycle_time, writing_policy,parent ):
#super(cache, self).__init__()
#self.arg = arg
self.index = int(math.log(size / (length * associativity),2))
self.offset = int(math.log(length,2))
self.tag = 16 - (self.index + self.offset)
self.num_of_sets = int( size / (length * associativity))
self.set_size = associativity
self.writing_policy = writing_policy
#self.hit_cycle_time = hit_cycle_time
#self.miss_cycle_time = miss_cycle_time
self.cycle_time = cycle_time
self.entries = []
self.hits = 0
self.misses = 0
for i in range(self.num_of_sets):
self.entries.append({})
self.child = None
self.parent = parent
if(parent != None):
parent.child = self
# print(self.num_of_sets)
def hit_ratio(self):
return (self.hits / (self.hits + self.misses))*100
def __repr__(self):
return "Index Bits: %s Offset Bits: %s Tag Bits: %s" % (self.index , self.offset , self.tag)
#testing
#a = Cache(2048,8,4,4,"wb",None)
#b = cache(4,4,4,4,"ahmed",a)
#print(b.parent)
#
# print(a)
# mask = 0
# for i in range(1 , a.index + 1):
# mask |= (1 << (16 - (a.tag + i)))
# print(bin(mask))
|
[
"math.log"
] |
[((221, 265), 'math.log', 'math.log', (['(size / (length * associativity))', '(2)'], {}), '(size / (length * associativity), 2)\n', (229, 265), False, 'import math\n'), ((289, 308), 'math.log', 'math.log', (['length', '(2)'], {}), '(length, 2)\n', (297, 308), False, 'import math\n')]
|
# -*- coding: utf-8 -*-
# Learn more: https://github.com/kennethreitz/setup.py
from setuptools import setup, find_packages
with open('README.md') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
setup(
name='livedata-subscribetags',
version='0.1.0',
description='Sample script to subscribe to changes of tag\'s value ',
long_description=readme,
author='HMS Industrial Netwoks S.A.',
author_email='<EMAIL>',
url='https://developer.ewon.biz/content/apiv2',
license=license,
packages=find_packages(exclude=('tests', 'docs')),
scripts=['APIv2/example.py'],
entry_points={
'console_scripts': [
'livedata-subscribetags=APIv2.example:launch',
],
},
install_requires=[
'stomp.py',
'websocket-client',
],
dependency_links=['git+https://github.com/gschizas/websocket-client.git@patch-1#egg=websocket-client-0'],
)
|
[
"setuptools.find_packages"
] |
[((553, 593), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "('tests', 'docs')"}), "(exclude=('tests', 'docs'))\n", (566, 593), False, 'from setuptools import setup, find_packages\n')]
|
import pprint
template = {
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [-111.6782379150,39.32373809814] # Lat then Long
}
},
{
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [-74.00714111328,40.71455001831]
}
}]}
feat = {
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [-111.6782379150,39.32373809814] # Lat then Long
}
}
with open("locations_with_long_lat.txt", "r") as f: #technically lat than long
locations = f.readlines()
spots = []
for location in locations:
feat = {
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [-111.6782379150, 39.32373809814] # Lat then Long
}
}
try:
row = location.split("\t")
# # print(row)
# feat["geometry"]['coordinates'] = [row[-1].strip(), row[-2].strip()]
# # int(row[-1].strip())
float(row[-1].strip())
# place = feat["geometry"]['coordinates']
# # print(feat)
# spots.append(feat)
# # print(feat)
# template['features'] = spots
spots.append({"longitude": row[-1].strip(), "latitude": row[-2].strip()})
except Exception as e:
# print(str(e))
pass
pprint.pprint(spots)
# with open("")
|
[
"pprint.pprint"
] |
[((1453, 1473), 'pprint.pprint', 'pprint.pprint', (['spots'], {}), '(spots)\n', (1466, 1473), False, 'import pprint\n')]
|
#! /usr/bin/env python3
"""
UI class for Serial Port hardware. This will have an instantiation of a
Serial port.
"""
#
# The GUI libraries since we build some GUI components here
#
import PyQt5
import PyQt5.QtCore
import PyQt5.QtWidgets
import SerialPort
class SerialPortUI(PyQt5.QtCore.QObject):
connectButtonSignal = PyQt5.QtCore.pyqtSignal()
def __init__(self, parent=None, name="Serial Port", port="/dev/ttyUSB0",
baud_rate="115200", bits=8, parity=None, stop_bits=1):
super(SerialPortUI, self).__init__()
#
# Serial Port
#
self.serial_port = SerialPort.SerialPort(port, baud_rate,
bits, parity,
stop_bits)
#
# GUI components
#
self.SerialPortName = PyQt5.QtWidgets.QLabel(name)
self.SerialPortComboBox = PyQt5.QtWidgets.QComboBox()
self.SerialPortComboBox.addItems(self.serial_port.get_list_of_ports())
baud_rate_list = ["115200", "57600", "38400", "9600"]
self.BaudRateSelected = baud_rate_list[0]
self.BaudRateComboBox = PyQt5.QtWidgets.QComboBox()
self.BaudRateComboBox.addItems(baud_rate_list)
self.SerialPortLayout = PyQt5.QtWidgets.QHBoxLayout()
self.SerialConnectButton = PyQt5.QtWidgets.QPushButton("Connect")
self.SerialDisConnectButton = PyQt5.QtWidgets.QPushButton("Disconnect")
self.SerialPortLayout.addWidget(self.SerialPortName)
self.SerialPortLayout.addWidget(PyQt5.QtWidgets.QLabel("Select Port"))
self.SerialPortLayout.addWidget(self.SerialPortComboBox)
self.SerialPortLayout.addWidget(
PyQt5.QtWidgets.QLabel("Select Baud Rate"))
self.SerialPortLayout.addWidget(self.BaudRateComboBox)
self.SerialPortLayout.addWidget(self.SerialConnectButton)
self.SerialPortLayout.addWidget(self.SerialDisConnectButton)
#
# Serial port configs based on GUI selection (defaults)
#
# self.serial_port.setBaudrate(self.BaudRateSelected)
# self.serial_port.setPort("/dev/ttyUSB0")
self.SerialConnectButton.clicked.connect(self.connectClicked)
self.SerialDisConnectButton.clicked.connect(self.disconnectClicked)
pass
def getLayout(self):
"""
Return our layout for easy GUI integration
"""
return self.SerialPortLayout
def connectClicked(self):
print("Connect Clicked")
print("BaudRate {}".format(self.BaudRateComboBox.currentText()))
print("Port {}".format(self.SerialPortComboBox.currentText()))
if self.serial_port.is_open:
self.serial_port.close()
self.serial_port.setPort(self.SerialPortComboBox.currentText())
self.serial_port.baudrate = self.BaudRateComboBox.currentText()
try:
self.serial_port.open()
except:
print("FAILED TO OPEN PORT {}".format(
self.SerialPortComboBox.currentText()))
if self.serial_port.is_open:
self.SerialPortComboBox.setEnabled(False)
self.BaudRateComboBox.setEnabled(False)
self.connectButtonSignal.emit()
pass
def disconnectClicked(self):
print("Disconnect Clicked")
self.serial_port.close()
self.SerialPortComboBox.setEnabled(True)
self.BaudRateComboBox.setEnabled(True)
pass
if __name__ == "__main__":
import sys
class TestUI(PyQt5.QtWidgets.QDialog):
def __init__(self, parent=None):
super(TestUI, self).__init__(parent)
layOut = PyQt5.QtWidgets.QHBoxLayout()
self.serial_port_ui = SerialPortUI()
layOut.addLayout(self.serial_port_ui.getLayout())
self.setLayout(layOut)
pass
app = PyQt5.QtWidgets.QApplication(sys.argv)
GUI = TestUI()
GUI.show()
app.exec_()
|
[
"PyQt5.QtCore.pyqtSignal",
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtWidgets.QComboBox",
"PyQt5.QtWidgets.QHBoxLayout",
"PyQt5.QtWidgets.QPushButton",
"SerialPort.SerialPort",
"PyQt5.QtWidgets.QApplication"
] |
[((329, 354), 'PyQt5.QtCore.pyqtSignal', 'PyQt5.QtCore.pyqtSignal', ([], {}), '()\n', (352, 354), False, 'import PyQt5\n'), ((3889, 3927), 'PyQt5.QtWidgets.QApplication', 'PyQt5.QtWidgets.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (3917, 3927), False, 'import PyQt5\n'), ((619, 682), 'SerialPort.SerialPort', 'SerialPort.SerialPort', (['port', 'baud_rate', 'bits', 'parity', 'stop_bits'], {}), '(port, baud_rate, bits, parity, stop_bits)\n', (640, 682), False, 'import SerialPort\n'), ((857, 885), 'PyQt5.QtWidgets.QLabel', 'PyQt5.QtWidgets.QLabel', (['name'], {}), '(name)\n', (879, 885), False, 'import PyQt5\n'), ((920, 947), 'PyQt5.QtWidgets.QComboBox', 'PyQt5.QtWidgets.QComboBox', ([], {}), '()\n', (945, 947), False, 'import PyQt5\n'), ((1172, 1199), 'PyQt5.QtWidgets.QComboBox', 'PyQt5.QtWidgets.QComboBox', ([], {}), '()\n', (1197, 1199), False, 'import PyQt5\n'), ((1288, 1317), 'PyQt5.QtWidgets.QHBoxLayout', 'PyQt5.QtWidgets.QHBoxLayout', ([], {}), '()\n', (1315, 1317), False, 'import PyQt5\n'), ((1354, 1392), 'PyQt5.QtWidgets.QPushButton', 'PyQt5.QtWidgets.QPushButton', (['"""Connect"""'], {}), "('Connect')\n", (1381, 1392), False, 'import PyQt5\n'), ((1431, 1472), 'PyQt5.QtWidgets.QPushButton', 'PyQt5.QtWidgets.QPushButton', (['"""Disconnect"""'], {}), "('Disconnect')\n", (1458, 1472), False, 'import PyQt5\n'), ((1575, 1612), 'PyQt5.QtWidgets.QLabel', 'PyQt5.QtWidgets.QLabel', (['"""Select Port"""'], {}), "('Select Port')\n", (1597, 1612), False, 'import PyQt5\n'), ((1733, 1775), 'PyQt5.QtWidgets.QLabel', 'PyQt5.QtWidgets.QLabel', (['"""Select Baud Rate"""'], {}), "('Select Baud Rate')\n", (1755, 1775), False, 'import PyQt5\n'), ((3684, 3713), 'PyQt5.QtWidgets.QHBoxLayout', 'PyQt5.QtWidgets.QHBoxLayout', ([], {}), '()\n', (3711, 3713), False, 'import PyQt5\n')]
|
"""Main module."""
import itertools as it
import numpy as np
def read_data(filepath, sep=" "):
"""This function reads file containing Points Coordinates
Arguments:
filepath (str) -- Path to the file to be read
Keyword Arguments:
sep (str) -- Separator for columns in file (default: " ")
Returns:
(list) -- List of points read from input file, in the format [x,y,z]
"""
with open(filepath, "r") as file:
raw_lines = file.readlines()
points = []
for raw_line in raw_lines:
coordinates = raw_line.split(sep)[1:4]
for i in range(3):
coordinates[i] = float(coordinates[i])
points.append(coordinates)
return points
def rototranslation(points):
"""This function generates a rototranslator starting from three
non-collinear points
Arguments:
points (numpy.array) -- Three non-collinear points in a 3x3
numpy array [x,y,z]
Returns:
[numpy.array] -- Rototranslation matrix (4x4 numpy array)
"""
origin = points[0, :]
x = points[1, :] - points[0, :]
x_versor = np.divide(x, np.linalg.norm(x))
y_1 = points[1, :] - points[0, :]
y_2 = points[2, :] - points[0, :]
y = np.cross(y_1, y_2)
y_versor = np.divide(y, np.linalg.norm(y))
z_versor = np.cross(x_versor, y_versor)
rototranslator = np.array(
[
np.append(x_versor, 0.0),
np.append(y_versor, 0.0),
np.append(z_versor, 0.0),
np.append(origin, 1.0),
]
).T
return rototranslator
def calibrate(points_G, points_R):
"""This function performs the actual Robot to World Calibration.
It computes every possibile combination between three non-collinear points,
computes the correspoding rototranslator and then average the mean
rototranslator. Everything is expressed in mm.
Arguments:
points_G (numpy.array) -- Points in World Coordinates
points_R (numpy.array) -- Points in Robot Coordinates
Raises:
Exception: Number of points in Robot and World Coordinates
file is not correspoding.
Returns:
[dict] -- Dictionary containing the computed rototranslator
and some informations about the error (mean and standard
deviation).
"""
# Remove offset from data
if len(points_G) != len(points_R):
raise Exception(
"""
Number of points must match in robot and world files!
Found {} points in World file and {} points in Robot file""".format(
len(points_G), len(points_R)
)
)
num_points = len(points_G)
offset_G_x = -80 # coherence
offset_G_y = 0 # No offset on y axis
offset_G_z = 250 # coherence
offset_G = [offset_G_x, offset_G_y, offset_G_z]
offset_R_x = 80 - 80 # from TCP to SMR + coherence
offset_R_y = 0 # No offset on y axis
offset_R_z = (
20 + 25 + 250
) # pointer basement along z + SMR along z + coherence
offset_R = [offset_R_x, offset_R_y, offset_R_z]
# Remove offset
points_G = np.array(points_G)
points_R = np.array(points_R)
points_G[:, :] = points_G[:, :] - offset_G
points_R[:, :] = points_R[:, :] - offset_R
# Generate creation dataset and control dataset
creation_perc = 0.3
num_creation_points = round(num_points * creation_perc)
num_star_points = round(num_points * (1 - creation_perc))
# At least three points are needed in creation set
if num_creation_points <= 2:
num_creation_points = 3
num_star_points = num_points - num_creation_points
if num_creation_points + num_star_points != num_points:
num_star_points = num_star_points - 1
index_creation = np.round(
np.linspace(0, num_points - 1, num_creation_points)
)
index_creation = [int(i) for i in index_creation]
index_star = [i for i in range(num_points) if i not in index_creation]
points_G_creation = points_G[index_creation, :]
points_R_creation = points_R[index_creation, :]
points_star_G_real = points_G[index_star, :]
points_star_R = points_R[index_star, :]
# Mean Rototranslation Method
index_perm = list(
it.permutations(range(num_creation_points), 3)
) # permutations without ripetitions
creation_perm_G = np.zeros([len(index_perm), 9])
creation_perm_R = np.zeros([len(index_perm), 9])
for i in range(len(index_perm)):
creation_perm_G[i, :3] = points_G_creation[index_perm[i][0], :3]
creation_perm_G[i, 3:6] = points_G_creation[index_perm[i][1], :3]
creation_perm_G[i, 6:] = points_G_creation[index_perm[i][2], :3]
creation_perm_R[i, :3] = points_R_creation[index_perm[i][0], :3]
creation_perm_R[i, 3:6] = points_R_creation[index_perm[i][1], :3]
creation_perm_R[i, 6:] = points_R_creation[index_perm[i][2], :3]
LG_T = np.zeros([4, 4, len(index_perm)])
LR_T = np.zeros([4, 4, len(index_perm)])
RL_T = np.zeros([4, 4, len(index_perm)])
RG_T_temp = np.zeros([4, 4, len(index_perm)])
# for each permutation, generate the rototranslator
for i in range(len(index_perm)):
points_G_current = np.array(
[
creation_perm_G[i, :3],
creation_perm_G[i, 3:6],
creation_perm_G[i, 6:],
]
)
points_R_current = np.array(
[
creation_perm_R[i, :3],
creation_perm_R[i, 3:6],
creation_perm_R[i, 6:],
]
)
LG_T[:, :, i] = rototranslation(points_G_current)
LR_T[:, :, i] = rototranslation(points_R_current)
RL_T[:, :, i] = np.linalg.inv(LR_T[:, :, i])
RG_T_temp[:, :, i] = np.matmul(LG_T[:, :, i], RL_T[:, :, i])
RG_T = np.mean(RG_T_temp, axis=2) # Mean rototranslator
# Comparison between the three methods
points_star_R = np.append(
points_star_R, np.ones([len(points_star_R), 1]), axis=1
) # homogeneous
points_star_G_real = np.append(
points_star_G_real, np.ones([len(points_star_G_real), 1]), axis=1
) # homogeneous
# estimation starting from T and robot data
points_star_G_estimated = np.matmul(RG_T, points_star_R.T).T
# comparison between real and estimated
error = abs(points_star_G_real - points_star_G_estimated)
error_mean = np.mean(error, axis=0)[:3]
error_std_dev = np.std(error, axis=0)[:3]
results = {
"Rototranslator": RG_T,
"Error Mean": error_mean,
"Error Std Dev": error_std_dev,
}
return results
|
[
"numpy.std",
"numpy.cross",
"numpy.append",
"numpy.mean",
"numpy.array",
"numpy.linalg.norm",
"numpy.linspace",
"numpy.linalg.inv",
"numpy.matmul"
] |
[((1264, 1282), 'numpy.cross', 'np.cross', (['y_1', 'y_2'], {}), '(y_1, y_2)\n', (1272, 1282), True, 'import numpy as np\n'), ((1345, 1373), 'numpy.cross', 'np.cross', (['x_versor', 'y_versor'], {}), '(x_versor, y_versor)\n', (1353, 1373), True, 'import numpy as np\n'), ((3134, 3152), 'numpy.array', 'np.array', (['points_G'], {}), '(points_G)\n', (3142, 3152), True, 'import numpy as np\n'), ((3168, 3186), 'numpy.array', 'np.array', (['points_R'], {}), '(points_R)\n', (3176, 3186), True, 'import numpy as np\n'), ((5857, 5883), 'numpy.mean', 'np.mean', (['RG_T_temp'], {'axis': '(2)'}), '(RG_T_temp, axis=2)\n', (5864, 5883), True, 'import numpy as np\n'), ((1161, 1178), 'numpy.linalg.norm', 'np.linalg.norm', (['x'], {}), '(x)\n', (1175, 1178), True, 'import numpy as np\n'), ((1311, 1328), 'numpy.linalg.norm', 'np.linalg.norm', (['y'], {}), '(y)\n', (1325, 1328), True, 'import numpy as np\n'), ((3807, 3858), 'numpy.linspace', 'np.linspace', (['(0)', '(num_points - 1)', 'num_creation_points'], {}), '(0, num_points - 1, num_creation_points)\n', (3818, 3858), True, 'import numpy as np\n'), ((5240, 5328), 'numpy.array', 'np.array', (['[creation_perm_G[i, :3], creation_perm_G[i, 3:6], creation_perm_G[i, 6:]]'], {}), '([creation_perm_G[i, :3], creation_perm_G[i, 3:6], creation_perm_G[\n i, 6:]])\n', (5248, 5328), True, 'import numpy as np\n'), ((5437, 5525), 'numpy.array', 'np.array', (['[creation_perm_R[i, :3], creation_perm_R[i, 3:6], creation_perm_R[i, 6:]]'], {}), '([creation_perm_R[i, :3], creation_perm_R[i, 3:6], creation_perm_R[\n i, 6:]])\n', (5445, 5525), True, 'import numpy as np\n'), ((5747, 5775), 'numpy.linalg.inv', 'np.linalg.inv', (['LR_T[:, :, i]'], {}), '(LR_T[:, :, i])\n', (5760, 5775), True, 'import numpy as np\n'), ((5805, 5844), 'numpy.matmul', 'np.matmul', (['LG_T[:, :, i]', 'RL_T[:, :, i]'], {}), '(LG_T[:, :, i], RL_T[:, :, i])\n', (5814, 5844), True, 'import numpy as np\n'), ((6278, 6310), 'numpy.matmul', 'np.matmul', (['RG_T', 'points_star_R.T'], {}), '(RG_T, points_star_R.T)\n', (6287, 6310), True, 'import numpy as np\n'), ((6436, 6458), 'numpy.mean', 'np.mean', (['error'], {'axis': '(0)'}), '(error, axis=0)\n', (6443, 6458), True, 'import numpy as np\n'), ((6484, 6505), 'numpy.std', 'np.std', (['error'], {'axis': '(0)'}), '(error, axis=0)\n', (6490, 6505), True, 'import numpy as np\n'), ((1428, 1452), 'numpy.append', 'np.append', (['x_versor', '(0.0)'], {}), '(x_versor, 0.0)\n', (1437, 1452), True, 'import numpy as np\n'), ((1466, 1490), 'numpy.append', 'np.append', (['y_versor', '(0.0)'], {}), '(y_versor, 0.0)\n', (1475, 1490), True, 'import numpy as np\n'), ((1504, 1528), 'numpy.append', 'np.append', (['z_versor', '(0.0)'], {}), '(z_versor, 0.0)\n', (1513, 1528), True, 'import numpy as np\n'), ((1542, 1564), 'numpy.append', 'np.append', (['origin', '(1.0)'], {}), '(origin, 1.0)\n', (1551, 1564), True, 'import numpy as np\n')]
|
# Lines starting with # are comments and are not run by Python.
"""
Multi-line comments are possible with triple quotes like this.
"""
# import pandas and matplotlib
# Load the pandas library as pd
import pandas as pd
# Load the matplotlib library as plt
import matplotlib.pyplot as plt
# load the numpy library as np (called "aliasing")
import numpy as np
# This data comes from the UCI ML repository:
# https://archive.ics.uci.edu/ml/datasets/Bike+Sharing+Dataset
# It is the daily number of users from a bike share program
df = pd.read_csv('day.csv')
# shows a preview of the data
df.head()
# shows some basic stats of the data
df.describe()
# Use the examples in the jupyter notebook to help you here.
# calculate the mean and standard deviation of the hourly data counts (the 'cnt' column)
# mean
df['cnt'].mean()
# standard deviation
df['cnt'].std()
# plot the counts ('cnt' column)
df['cnt'].plot()
# Carry out other EDA and analysis if you wish for more practice.
# You can also carry this out in a Jupyter Notebook.
|
[
"pandas.read_csv"
] |
[((548, 570), 'pandas.read_csv', 'pd.read_csv', (['"""day.csv"""'], {}), "('day.csv')\n", (559, 570), True, 'import pandas as pd\n')]
|
import os
import numpy as np
import glob
from sklearn.model_selection import StratifiedShuffleSplit
import sys, os
sys.path.insert(0, os.path.join(
os.path.dirname(os.path.realpath(__file__)), "../../"))
from deep_audio_features.bin import config
import wave
import contextlib
def load(folders=None, test_val=[0.2, 0.2], test=True, validation=True):
"""Loads a dataset from some folders.
Arguments
----------
folders {list} : A list of folders containing all samples.
test_val {list} : A list containing the percentages for test and validation split.
test {boolean} : If False only train samples and labels are returned.
validation {boolean} : If False only train and test samples and
labels are returned.
Returns
--------
X_train {list} : All filenames for train.
y_train {list} : Labels for train.
X_test {list} : Filenames for test.
y_test {list} : Labels for train.
if `validation` is `True` also returns the following:
X_valid {list} : Filenames for validation.
y_valid {list} : Labels for validation.
"""
if folders is None:
raise AssertionError()
filenames = []
labels = []
# Match filenames with labels
for folder in folders:
for f in glob.iglob(os.path.join(folder, '*.wav')):
filenames.append(f)
labels.append(folder)
# Convert labels to int
folder2idx, idx2folder = folders_mapping(folders=folders)
labels = list(map(lambda x: folder2idx[x], labels))
# Split
if test is False and validation is False:
# Use this data only to train
return filenames, labels
# Get percentages
test_p, val_p = test_val
X_train_, y_train_ = [], []
X_test, y_test = [], []
X_train, y_train = [], []
X_val, y_val = [], []
# First split
sss = StratifiedShuffleSplit(n_splits=1, test_size=test_p, random_state=0)
train_idx, test_idx = next(
sss.split(filenames, labels))
# Train
for idx in train_idx:
X_train_.append(filenames[idx])
y_train_.append(labels[idx])
# Test
for idx in test_idx:
X_test.append(filenames[idx])
y_test.append(labels[idx])
# If validation split is not needed return
if validation is False:
return X_train_, y_train_, X_test, y_test
# If valuation is True split again
sss = StratifiedShuffleSplit(n_splits=1, test_size=val_p, random_state=0)
train_idx, val_idx = next(sss.split(X_train_, y_train_))
# Train after both splits
for idx in train_idx:
X_train.append(X_train_[idx])
y_train.append(y_train_[idx])
# validation
for idx in val_idx:
X_val.append(X_train_[idx])
y_val.append(y_train_[idx])
return X_train, y_train, X_test, y_test, X_val, y_val
def compute_max_seq_len(reload=False, X=None, folders=None):
"""Return max sequence length for all files."""
# TAKE THE WINDOW STEPS
if reload is True:
if folders is None:
raise AssertionError()
# Get all sample labels
X_train, _, X_test, _, X_val, _ = load(folders=folders)
X = X_train+X_test+X_val
# DEFAULT
else:
if X is None:
raise AssertionError()
# Calculate and print max sequence number
print(config.HOP_LENGTH, config.WINDOW_LENGTH)
lengths = []
for f in X:
with contextlib.closing(wave.open(f, 'r')) as fp:
frames = fp.getnframes()
fs = fp.getframerate()
duration = frames / float(fs)
length = int((duration -
(config.WINDOW_LENGTH - config.HOP_LENGTH)) / \
(config.HOP_LENGTH) + 1)
lengths.append(length)
max_seq = np.max(lengths)
print(f"Max sequence length in dataset: {max_seq}")
return max_seq
def folders_mapping(folders):
"""Return a mapping from folder to class and a mapping from class to folder."""
folder2idx = {}
idx2folder = {}
for idx, folder in enumerate(folders):
folder2idx[folder] = idx
idx2folder[idx] = folder
return folder2idx, idx2folder
def get_categories_population_dictionary(labels, n_classes=9):
"""Return a mapping (category) -> Population."""
mapping = {i: 0 for i in range(0, n_classes)}
# Iterate each file and map
for l in labels:
if l >= n_classes:
continue
mapping[l] += 1
return mapping
|
[
"wave.open",
"os.path.realpath",
"sklearn.model_selection.StratifiedShuffleSplit",
"numpy.max",
"os.path.join"
] |
[((1898, 1966), 'sklearn.model_selection.StratifiedShuffleSplit', 'StratifiedShuffleSplit', ([], {'n_splits': '(1)', 'test_size': 'test_p', 'random_state': '(0)'}), '(n_splits=1, test_size=test_p, random_state=0)\n', (1920, 1966), False, 'from sklearn.model_selection import StratifiedShuffleSplit\n'), ((2437, 2504), 'sklearn.model_selection.StratifiedShuffleSplit', 'StratifiedShuffleSplit', ([], {'n_splits': '(1)', 'test_size': 'val_p', 'random_state': '(0)'}), '(n_splits=1, test_size=val_p, random_state=0)\n', (2459, 2504), False, 'from sklearn.model_selection import StratifiedShuffleSplit\n'), ((3822, 3837), 'numpy.max', 'np.max', (['lengths'], {}), '(lengths)\n', (3828, 3837), True, 'import numpy as np\n'), ((168, 194), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (184, 194), False, 'import sys, os\n'), ((1326, 1355), 'os.path.join', 'os.path.join', (['folder', '"""*.wav"""'], {}), "(folder, '*.wav')\n", (1338, 1355), False, 'import sys, os\n'), ((3472, 3489), 'wave.open', 'wave.open', (['f', '"""r"""'], {}), "(f, 'r')\n", (3481, 3489), False, 'import wave\n')]
|
#!/usr/bin/env python3
"""
:problem: https://www.hackerrank.com/challenges/frequency-queries/problem
"""
from typing import List, Tuple
from collections import Counter
def process_queries(queries: List[Tuple[int, int]]) -> List[int]:
"""Execute queries and report whether a value with a given count exists."""
values = Counter()
counts = Counter()
results = []
for query, val in queries:
if query == 1:
counts[values[val]] -= 1
counts[values[val] + 1] += 1
values[val] += 1
elif query == 2:
if val in values and values[val]:
counts[values[val]] -= 1
counts[values[val] - 1] += 1
values[val] -= 1
else:
results.append(int(bool(counts[val])))
return results
def main():
q = int(input())
queries = []
for _ in range(q):
query, val = [int(x) for x in input().split()]
queries.append((query, val))
results = process_queries(queries)
print(*results, sep='\n')
if __name__ == '__main__':
main()
|
[
"collections.Counter"
] |
[((330, 339), 'collections.Counter', 'Counter', ([], {}), '()\n', (337, 339), False, 'from collections import Counter\n'), ((353, 362), 'collections.Counter', 'Counter', ([], {}), '()\n', (360, 362), False, 'from collections import Counter\n')]
|
"""
Copyright [2009-2019] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import pathlib
"""
Any setting defined here can be overridden by:
Settings the appropriate environment variable, eg. to override FOOBAR, `export APP_FOOBAR="whatever"`.
This is useful in production for secrets you do not wish to save in code and
also plays nicely with docker(-compose). Settings will attempt to convert environment variables to match the
type of the value here. See also activate.settings.sh.
Or, passing the custom setting as a keyword argument when initialising settings (useful when testing)
"""
# consumer folder, where media, static, templates and other subfolders are located
PROJECT_ROOT = pathlib.Path(__file__).parent.parent
# minimum query sequence length
MIN_LENGTH = 10
# maximum query sequence length
MAX_LENGTH = 7000
# results expiration time
EXPIRATION = 60 * 60 * 24 * 7 # seconds
# maximum time to run nhmmer
MAX_RUN_TIME = 5 * 60 # seconds
ENVIRONMENT = os.getenv('ENVIRONMENT', 'LOCAL')
# add settings from environment-specific files
if ENVIRONMENT == "LOCAL":
from .local import *
elif ENVIRONMENT == "TEST":
from .test import *
elif ENVIRONMENT == "DOCKER-COMPOSE":
from .docker_compose import *
elif ENVIRONMENT == "PRODUCTION":
from .production import *
# hostname to listen on
HOST = '0.0.0.0'
# TCP port for the server to listen on
PORT = 8000
def substitute_environment_variables():
"""
Substitute environment variables into settings.
This function is stolen from the default project, generated by
aiohttp-devtools 'adev start' command.
"""
for attr_name in globals():
env_var = os.getenv(attr_name, None)
if attr_name.startswith('_') or attr_name.upper() != attr_name:
continue
elif env_var is not None:
# convert environment variable to the same type as the variable in settings
original_type = type(globals()[attr_name])
if issubclass(original_type, bool):
env_var = env_var.upper() in ('1', 'TRUE')
elif issubclass(original_type, int):
env_var = int(env_var)
elif issubclass(original_type, float):
env_var = float(env_var)
elif issubclass(original_type, pathlib.Path):
env_var = pathlib.Path(env_var)
elif issubclass(original_type, bytes):
env_var = env_var.encode()
globals()[attr_name] = env_var
substitute_environment_variables()
|
[
"pathlib.Path",
"os.getenv"
] |
[((1504, 1537), 'os.getenv', 'os.getenv', (['"""ENVIRONMENT"""', '"""LOCAL"""'], {}), "('ENVIRONMENT', 'LOCAL')\n", (1513, 1537), False, 'import os\n'), ((1221, 1243), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (1233, 1243), False, 'import pathlib\n'), ((2191, 2217), 'os.getenv', 'os.getenv', (['attr_name', 'None'], {}), '(attr_name, None)\n', (2200, 2217), False, 'import os\n'), ((2860, 2881), 'pathlib.Path', 'pathlib.Path', (['env_var'], {}), '(env_var)\n', (2872, 2881), False, 'import pathlib\n')]
|
#
# Copyright (c) Contributors to the Open 3D Engine Project.
# For complete copyright and license terms please see the LICENSE at the root of this distribution.
#
# SPDX-License-Identifier: Apache-2.0 OR MIT
#
#
import platform
if platform.system() == 'Windows':
from tempfile import TemporaryDirectory
from pathlib import Path
import os
import stat
realTempdirCleanup = TemporaryDirectory.cleanup
def cleanup(self):
"""
Make files writable before removing them
In Windows and with Python < 3.8, TemporaryDirectory() will fail to clean up files that are read-only. Git marks
files in the object store as read-only, so running git clone in a tempdir will fail. This wrapper marks files as
writable before the cleanup runs.
"""
for (dirpath, dirnames, filenames) in os.walk(self.name):
for filename in filenames:
(Path(dirpath) / filename).chmod(stat.S_IWRITE)
realTempdirCleanup(self)
TemporaryDirectory.cleanup = cleanup
|
[
"platform.system",
"pathlib.Path",
"os.walk"
] |
[((235, 252), 'platform.system', 'platform.system', ([], {}), '()\n', (250, 252), False, 'import platform\n'), ((850, 868), 'os.walk', 'os.walk', (['self.name'], {}), '(self.name)\n', (857, 868), False, 'import os\n'), ((926, 939), 'pathlib.Path', 'Path', (['dirpath'], {}), '(dirpath)\n', (930, 939), False, 'from pathlib import Path\n')]
|
#!/usr/bin/env python3
# encoding: utf-8
# Copyright 2019 <NAME>
# Licensed under the Apache License, Version 2.0 (the "License")
import os
import argparse
import torch
import torch.distributed as dist
import torch.multiprocessing as mp
from pynn.util import save_object_param
from pynn.net.lm_lstm import SeqLM
from pynn.bin import print_model, train_language_model
parser = argparse.ArgumentParser(description='pynn')
parser.add_argument('--train-seq', help='path to train seq', required=True)
parser.add_argument('--valid-seq', help='path to validation seq', required=True)
parser.add_argument('--n-classes', type=int, required=True)
parser.add_argument('--n-layer', type=int, default=2)
parser.add_argument('--d-model', type=int, default=512)
parser.add_argument('--d-emb', type=int, default=0)
parser.add_argument('--d-project', type=int, default=0)
parser.add_argument('--shared-emb', help='sharing embedding', action='store_true')
parser.add_argument('--no-sek', help='without start and end tokens', action='store_true')
parser.add_argument('--label-smooth', type=float, default=0.1)
parser.add_argument('--dropout', type=float, default=0.2)
parser.add_argument('--dropconnect', type=float, default=0.)
parser.add_argument('--emb-drop', type=float, default=0.)
parser.add_argument('--model-path', help='model saving path', default='model')
parser.add_argument('--n-epoch', type=int, default=50)
parser.add_argument('--n-save', type=int, default=5)
parser.add_argument('--n-warmup', help='warm-up steps', type=int, default=6000)
parser.add_argument('--n-const', help='constant steps', type=int, default=0)
parser.add_argument('--n-print', help='inputs per update', type=int, default=40000)
parser.add_argument('--b-input', help='inputs per load', type=int, default=0)
parser.add_argument('--b-sample', help='maximum samples per batch', type=int, default=64)
parser.add_argument('--b-update', help='characters per update', type=int, default=12000)
parser.add_argument('--b-sync', help='steps per update', type=int, default=0)
parser.add_argument('--lr', help='learning rate', type=float, default=0.001)
parser.add_argument('--grad-norm', help='divide gradient by updated tokens', action='store_true')
parser.add_argument('--fp16', help='fp16 or not', action='store_true')
def create_model(args, device):
params = {
'n_vocab': args.n_classes,
'd_model': args.d_model,
'n_layer': args.n_layer,
'd_emb': args.d_emb,
'd_project': args.d_project,
'shared_emb': args.shared_emb,
'dropout': args.dropout,
'dropconnect': args.dropconnect,
'emb_drop': args.emb_drop}
model = SeqLM(**params)
save_object_param(model, params, args.model_path+'/model.cfg')
return model
def train(device, args):
model = create_model(args, device)
print_model(model)
train_language_model(model, args, device)
def train_distributed(device, gpus, args):
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '12355'
dist.init_process_group("nccl", rank=device, world_size=gpus)
torch.manual_seed(0)
model = create_model(args, device)
if device == 0: print_model(model)
train_language_model(model, args, device, gpus)
dist.destroy_process_group()
if __name__ == '__main__':
args = parser.parse_args()
print(args)
if torch.cuda.device_count() > 1:
gpus = torch.cuda.device_count()
print('Training with distributed data parallel. Number of devices: %d' % gpus)
mp.spawn(train_distributed, nprocs=gpus, args=(gpus, args), join=True)
else:
device = 0 if torch.cuda.is_available() else torch.device('cpu')
train(device, args)
|
[
"pynn.bin.train_language_model",
"pynn.net.lm_lstm.SeqLM",
"torch.distributed.init_process_group",
"argparse.ArgumentParser",
"torch.distributed.destroy_process_group",
"torch.multiprocessing.spawn",
"torch.manual_seed",
"pynn.util.save_object_param",
"torch.cuda.device_count",
"pynn.bin.print_model",
"torch.cuda.is_available",
"torch.device"
] |
[((381, 424), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""pynn"""'}), "(description='pynn')\n", (404, 424), False, 'import argparse\n'), ((2660, 2675), 'pynn.net.lm_lstm.SeqLM', 'SeqLM', ([], {}), '(**params)\n', (2665, 2675), False, 'from pynn.net.lm_lstm import SeqLM\n'), ((2680, 2744), 'pynn.util.save_object_param', 'save_object_param', (['model', 'params', "(args.model_path + '/model.cfg')"], {}), "(model, params, args.model_path + '/model.cfg')\n", (2697, 2744), False, 'from pynn.util import save_object_param\n'), ((2829, 2847), 'pynn.bin.print_model', 'print_model', (['model'], {}), '(model)\n', (2840, 2847), False, 'from pynn.bin import print_model, train_language_model\n'), ((2852, 2893), 'pynn.bin.train_language_model', 'train_language_model', (['model', 'args', 'device'], {}), '(model, args, device)\n', (2872, 2893), False, 'from pynn.bin import print_model, train_language_model\n'), ((3026, 3087), 'torch.distributed.init_process_group', 'dist.init_process_group', (['"""nccl"""'], {'rank': 'device', 'world_size': 'gpus'}), "('nccl', rank=device, world_size=gpus)\n", (3049, 3087), True, 'import torch.distributed as dist\n'), ((3092, 3112), 'torch.manual_seed', 'torch.manual_seed', (['(0)'], {}), '(0)\n', (3109, 3112), False, 'import torch\n'), ((3196, 3243), 'pynn.bin.train_language_model', 'train_language_model', (['model', 'args', 'device', 'gpus'], {}), '(model, args, device, gpus)\n', (3216, 3243), False, 'from pynn.bin import print_model, train_language_model\n'), ((3249, 3277), 'torch.distributed.destroy_process_group', 'dist.destroy_process_group', ([], {}), '()\n', (3275, 3277), True, 'import torch.distributed as dist\n'), ((3173, 3191), 'pynn.bin.print_model', 'print_model', (['model'], {}), '(model)\n', (3184, 3191), False, 'from pynn.bin import print_model, train_language_model\n'), ((3365, 3390), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (3388, 3390), False, 'import torch\n'), ((3411, 3436), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (3434, 3436), False, 'import torch\n'), ((3532, 3602), 'torch.multiprocessing.spawn', 'mp.spawn', (['train_distributed'], {'nprocs': 'gpus', 'args': '(gpus, args)', 'join': '(True)'}), '(train_distributed, nprocs=gpus, args=(gpus, args), join=True)\n', (3540, 3602), True, 'import torch.multiprocessing as mp\n'), ((3635, 3660), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (3658, 3660), False, 'import torch\n'), ((3666, 3685), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (3678, 3685), False, 'import torch\n')]
|
import datetime
import logging
import uuid
import marshmallow as ma
from flask import url_for, g, jsonify
from flask.views import MethodView
from flask_smorest import Blueprint, abort
import http.client as http_client
from drift.core.extensions.jwt import current_user, requires_roles
from drift.core.extensions.urlregistry import Endpoints
from driftbase.config import get_server_heartbeat_config
from driftbase.models.db import (
Machine, Server, Match, ServerDaemonCommand
)
log = logging.getLogger(__name__)
bp = Blueprint("servers", __name__, url_prefix="/servers", description="Battle server processes")
endpoints = Endpoints()
def drift_init_extension(app, api, **kwargs):
api.register_blueprint(bp)
endpoints.init_app(app)
def utcnow():
return datetime.datetime.utcnow()
class ServersGetArgsSchema(ma.Schema):
machine_id = ma.fields.Integer()
rows = ma.fields.Integer()
class ServersPostRequestSchema(ma.Schema):
machine_id = ma.fields.Integer()
version = ma.fields.String()
public_ip = ma.fields.IPv4()
port = ma.fields.Integer()
command_line = ma.fields.String()
command_line_custom = ma.fields.String()
pid = ma.fields.Integer()
status = ma.fields.String()
image_name = ma.fields.String()
instance_name = ma.fields.String()
branch = ma.fields.String()
commit_id = ma.fields.String()
process_info = ma.fields.Dict()
details = ma.fields.Dict()
repository = ma.fields.String()
ref = ma.fields.String()
build = ma.fields.String()
build_number = ma.fields.Integer()
target_platform = ma.fields.String()
build_info = ma.fields.Dict()
placement = ma.fields.String()
class ServersPostResponseSchema(ma.Schema):
server_id = ma.fields.Integer(required=True)
machine_id = ma.fields.Integer(required=True)
url = ma.fields.Url(required=True)
machine_url = ma.fields.Url(required=True)
heartbeat_url = ma.fields.Url(required=True)
commands_url = ma.fields.Url(required=True)
token = ma.fields.String(required=True)
next_heartbeat_seconds = ma.fields.Number(required=True)
heartbeat_timeout = ma.fields.Str(required=True)
class ServerPutRequestSchema(ma.Schema):
status = ma.fields.String(required=True)
machine_id = ma.fields.Integer()
version = ma.fields.String()
public_ip = ma.fields.IPv4()
port = ma.fields.Integer()
command_line = ma.fields.String()
command_line_custom = ma.fields.String()
pid = ma.fields.Integer()
image_name = ma.fields.String()
error = ma.fields.String()
branch = ma.fields.String()
commit_id = ma.fields.String()
process_info = ma.fields.Dict()
details = ma.fields.Dict()
repository = ma.fields.String()
ref = ma.fields.String()
build = ma.fields.String()
build_number = ma.fields.Integer()
target_platform = ma.fields.String()
build_info = ma.fields.Dict()
class ServerPutResponseSchema(ma.Schema):
server_id = ma.fields.Integer(required=True)
machine_id = ma.fields.Integer(required=True)
url = ma.fields.Url(required=True)
machine_url = ma.fields.Url(required=True)
heartbeat_url = ma.fields.Url(required=True)
class ServerHeartbeatPutResponseSchema(ma.Schema):
last_heartbeat = ma.fields.DateTime(metadata=dict(description="Timestamp of the previous heartbeat"))
this_heartbeat = ma.fields.DateTime(metadata=dict(description="Timestamp of this heartbeat"))
next_heartbeat = ma.fields.DateTime(metadata=dict(description="Timestamp when the next heartbeat is expected"))
next_heartbeat_seconds = ma.fields.Integer(metadata=dict(description="Number of seconds until the next heartbeat is expected"))
heartbeat_timeout = ma.fields.DateTime(
metadata=dict(description="Timestamp when the server times out if no heartbeat is received"))
heartbeat_timeout_seconds = ma.fields.Integer(
metadata=dict(description="Number of seconds until the server times out if no heartbeat is received"))
@bp.route('', endpoint='list')
class ServersAPI(MethodView):
@requires_roles("service")
@bp.arguments(ServersGetArgsSchema, location='query')
def get(self, args):
"""
Get a list of the last 100 battle servers that have been registered in
the system.
"""
num_rows = args.get("rows") or 100
query = g.db.query(Server)
if args.get("machine_id"):
query = query.filter(Server.machine_id == args.get("machine_id"))
query = query.order_by(-Server.server_id)
query = query.limit(num_rows)
rows = query.all()
ret = []
for row in rows:
record = row.as_dict()
record["url"] = url_for("servers.entry", server_id=row.server_id, _external=True)
ret.append(record)
return jsonify(ret)
@requires_roles("service")
@bp.arguments(ServersPostRequestSchema)
@bp.response(http_client.CREATED, ServersPostResponseSchema)
def post(self, args):
"""
The daemon process (and server, for local development) post here
to register the server instance with the backend. You need to
register the server before you can register a battle.
"""
machine_id = args.get("machine_id")
log.info("registering a server on machine_id %s, realm %s and public_ip %s",
machine_id, args.get("realm"), args.get("public_ip"))
# If we don't already have a machine we make one just in time now on the realm "Local".
# This is to support local devs where an external daemon is not running and the server iself
# does this registration without a prior registration on the machines endpoint
if not machine_id:
realm = "local"
instance_name = args.get("instance_name")
placement = args.get("placement") or "<unknown placement>"
if not instance_name:
abort(http_client.BAD_REQUEST, description="You need to supply an instance_name")
machine = g.db.query(Machine).filter(Machine.realm == realm,
Machine.instance_name == instance_name,
Machine.placement == placement).first()
if machine:
machine_id = machine.machine_id
log.info("machine_id %s found for server", machine_id)
else:
machine = Machine(realm=realm, instance_name=instance_name,
placement=placement, server_count=0)
g.db.add(machine)
g.db.flush()
machine_id = machine.machine_id
log.info("Created machine_id %s for server instance \"%s\"",
machine_id, instance_name)
else:
machine = g.db.query(Machine).get(machine_id)
if not machine:
abort(http_client.NOT_FOUND, description="Machine %s was not found" % machine_id)
token = str(uuid.uuid4()).replace("-", "")[:20]
def get_or_null(ip):
return ip and str(ip) or None
server = Server(machine_id=machine_id,
version=args.get("version"),
public_ip=get_or_null(args.get("public_ip")),
port=args.get("port"),
command_line=args.get("command_line"),
command_line_custom=args.get("command_line_custom"),
pid=args.get("pid"),
status=args.get("status"),
image_name=args.get("image_name"),
branch=args.get("branch"),
commit_id=args.get("commit_id"),
process_info=args.get("process_info"),
details=args.get("details"),
repository=args.get("repository"),
ref=args.get("ref"),
build=args.get("build"),
build_number=args.get("build_number"),
target_platform=args.get("target_platform"),
build_info=args.get("build_info"),
token=token
)
g.db.add(server)
machine.server_count += 1
machine.server_date = utcnow()
g.db.commit()
server_id = server.server_id
resource_url = url_for("servers.entry", server_id=server_id, _external=True)
machine_url = url_for("machines.entry", machine_id=machine_id, _external=True)
heartbeat_url = url_for("servers.heartbeat", server_id=server_id, _external=True)
commands_url = url_for("servers.commands", server_id=server_id, _external=True)
response_header = {
"Location": resource_url,
}
log.info("Server %s has been registered on machine_id %s", server_id, machine_id)
heartbeat_period, heartbeat_timeout = get_server_heartbeat_config()
return {"server_id": server_id,
"url": resource_url,
"machine_id": machine_id,
"machine_url": machine_url,
"heartbeat_url": heartbeat_url,
"commands_url": commands_url,
"token": token,
"next_heartbeat_seconds": heartbeat_period,
"heartbeat_timeout": utcnow() + datetime.timedelta(seconds=heartbeat_timeout),
}, None, response_header
@bp.route('/<int:server_id>', endpoint='entry')
class ServerAPI(MethodView):
"""
Interface to battle servers instances. A battle server instance is
a single run of a battle server executable. The battle server will
have a single battle on it. You should never have a battle resource
without an associated battle server resource.
"""
@requires_roles("service")
def get(self, server_id):
"""
Get information about a single battle server instance.
Returns information from the machine and the associated
battle if found.
"""
server = g.db.query(Server).get(server_id)
if not server:
log.warning("Requested a non-existant battle server: %s", server_id)
abort(http_client.NOT_FOUND, description="Server not found")
machine_id = server.machine_id
record = server.as_dict()
record["url"] = url_for("servers.entry", server_id=server_id, _external=True)
record["heartbeat_url"] = url_for("servers.heartbeat", server_id=server_id, _external=True)
record["commands_url"] = url_for("servers.commands", server_id=server_id, _external=True)
record["machine_url"] = None
if machine_id:
machine = g.db.query(Machine).get(machine_id)
if machine:
record["machine_url"] = url_for("machines.entry", machine_id=machine_id,
_external=True)
matches = []
rows = g.db.query(Match).filter(Match.server_id == server_id).all()
for row in rows:
match_id = row.match_id
match = {"match_id": match_id,
"url": url_for("matches.entry", match_id=match_id, _external=True),
"num_players": row.num_players,
}
matches.append(match)
record["matches"] = matches
commands = []
rows = g.db.query(ServerDaemonCommand).filter(ServerDaemonCommand.server_id == server_id,
ServerDaemonCommand.status == "pending").all()
for row in rows:
command = {"command_id": row.command_id,
"command": row.command,
"arguments": row.arguments,
"create_date": row.create_date,
"url": url_for("servers.command", server_id=server_id,
command_id=row.command_id, _external=True)
}
commands.append(command)
record["pending_commands"] = commands
log.debug("Returning info for battle server %s", server_id)
return jsonify(record)
@requires_roles("service")
@bp.arguments(ServerPutRequestSchema)
@bp.response(http_client.OK, ServerPutResponseSchema)
def put(self, args, server_id):
"""
The battle server management (celery) process calls this to update
the status of running a specific battle server task
"""
log.info("Updating battle server %s", server_id)
server = g.db.query(Server).get(server_id)
if not server:
abort(http_client.NOT_FOUND)
if args.get("status"):
log.info("Changing status of battle server %s from '%s' to '%s'",
server_id, server.status, args["status"])
public_ip = args.pop("public_ip", None)
if public_ip:
server.public_ip = str(public_ip)
for arg in args:
setattr(server, arg, args[arg])
g.db.commit()
machine_id = server.machine_id
machine_url = None
if machine_id:
machine_url = url_for("machines.entry", machine_id=machine_id, _external=True)
return {"server_id": server_id,
"url": url_for("servers.entry", server_id=server_id, _external=True),
"machine_id": machine_id,
"machine_url": machine_url,
"heartbeat_url": url_for("servers.heartbeat", server_id=server_id, _external=True),
}
@bp.route('/<int:server_id>/heartbeat', endpoint='heartbeat')
class ServerHeartbeatAPI(MethodView):
"""
Thin heartbeat API
"""
@requires_roles("service")
@bp.response(http_client.OK, ServerHeartbeatPutResponseSchema)
def put(self, server_id):
"""
Battle server heartbeat
"""
log.debug("%s is heart beating battle server %s",
current_user.get("user_name", "unknown"), server_id)
server = g.db.query(Server).get(server_id)
if not server:
abort(http_client.NOT_FOUND, description="Server not found")
heartbeat_period, heartbeat_timeout = get_server_heartbeat_config()
now = utcnow()
last_heartbeat = server.heartbeat_date
if last_heartbeat + datetime.timedelta(seconds=heartbeat_timeout) < now:
msg = "Heartbeat timeout. Last heartbeat was at {} and now we are at {}" \
.format(last_heartbeat, now)
log.info(msg)
abort(http_client.NOT_FOUND, message=msg)
server.heartbeat_count += 1
server.heartbeat_date = now
g.db.commit()
return {
"last_heartbeat": last_heartbeat,
"this_heartbeat": server.heartbeat_date,
"next_heartbeat": server.heartbeat_date + datetime.timedelta(seconds=heartbeat_period),
"next_heartbeat_seconds": heartbeat_period,
"heartbeat_timeout": now + datetime.timedelta(seconds=heartbeat_timeout),
"heartbeat_timeout_seconds": heartbeat_timeout,
}
class ServerCommandsPostSchema(ma.Schema):
command = ma.fields.String(required=True)
arguments = ma.fields.Dict()
details = ma.fields.Dict()
@bp.route('/<int:server_id>/commands', endpoint='commands')
class ServerCommandsAPI(MethodView):
"""
Commands for the battle server daemon
"""
@requires_roles("service")
@bp.arguments(ServerCommandsPostSchema)
def post(self, args, server_id):
"""
Add a new command for the daemon to execute
"""
server = g.db.query(Server).get(server_id)
if not server:
abort(http_client.NOT_FOUND)
status = "pending"
command = ServerDaemonCommand(server_id=server_id,
command=args["command"],
arguments=args.get("arguments"),
details=args.get("details"),
status=status,
)
g.db.add(command)
g.db.commit()
resource_url = url_for("servers.command", server_id=server_id,
command_id=command.command_id, _external=True)
return jsonify({"command_id": command.command_id,
"url": resource_url,
"status": status,
}), http_client.CREATED, None
@requires_roles("service")
def get(self, server_id):
rows = g.db.query(ServerDaemonCommand) \
.filter(ServerDaemonCommand.server_id == server_id) \
.all()
ret = []
for r in rows:
command = r.as_dict()
command["url"] = url_for("servers.command",
server_id=server_id,
command_id=r.command_id,
_external=True)
ret.append(command)
return jsonify(ret)
class ServerCommandPatchSchema(ma.Schema):
status = ma.fields.String(required=True)
details = ma.fields.Dict()
@bp.route('/<int:server_id>/commands/<int:command_id>', endpoint='command')
class ServerCommandAPI(MethodView):
@requires_roles("service")
@bp.arguments(ServerCommandPatchSchema)
def patch(self, args, server_id, command_id):
return self._patch(args, server_id, command_id)
@requires_roles("service")
@bp.arguments(ServerCommandPatchSchema)
def put(self, args, server_id, command_id):
return self._patch(args, server_id, command_id)
def _patch(self, args, server_id, command_id):
"""
Add a new command for the daemon to execute
"""
server = g.db.query(Server).get(server_id)
if not server:
abort(http_client.NOT_FOUND)
row = g.db.query(ServerDaemonCommand).get(command_id)
row.status = args["status"]
row.status_date = utcnow()
if "details" in args:
row.details = args["details"]
g.db.commit()
ret = row.as_dict()
ret["url"] = url_for("servers.command", server_id=server_id, command_id=row.command_id,
_external=True)
return jsonify(ret)
@requires_roles("service")
def get(self, server_id, command_id):
row = g.db.query(ServerDaemonCommand).get(command_id)
ret = row.as_dict()
ret["url"] = url_for("servers.command", server_id=server_id, command_id=row.command_id,
_external=True)
return jsonify(ret)
@endpoints.register
def endpoint_info(*args):
ret = {"servers": url_for("servers.list", _external=True), }
return ret
|
[
"marshmallow.fields.Dict",
"flask.g.db.commit",
"logging.getLogger",
"flask_smorest.Blueprint",
"datetime.datetime.utcnow",
"flask.jsonify",
"flask.url_for",
"driftbase.models.db.Machine",
"marshmallow.fields.Url",
"marshmallow.fields.Integer",
"datetime.timedelta",
"marshmallow.fields.String",
"drift.core.extensions.jwt.current_user.get",
"marshmallow.fields.Number",
"flask.g.db.add",
"flask.g.db.query",
"flask.g.db.flush",
"uuid.uuid4",
"driftbase.config.get_server_heartbeat_config",
"marshmallow.fields.Str",
"drift.core.extensions.urlregistry.Endpoints",
"drift.core.extensions.jwt.requires_roles",
"marshmallow.fields.IPv4",
"flask_smorest.abort"
] |
[((491, 518), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (508, 518), False, 'import logging\n'), ((525, 622), 'flask_smorest.Blueprint', 'Blueprint', (['"""servers"""', '__name__'], {'url_prefix': '"""/servers"""', 'description': '"""Battle server processes"""'}), "('servers', __name__, url_prefix='/servers', description=\n 'Battle server processes')\n", (534, 622), False, 'from flask_smorest import Blueprint, abort\n'), ((630, 641), 'drift.core.extensions.urlregistry.Endpoints', 'Endpoints', ([], {}), '()\n', (639, 641), False, 'from drift.core.extensions.urlregistry import Endpoints\n'), ((776, 802), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (800, 802), False, 'import datetime\n'), ((861, 880), 'marshmallow.fields.Integer', 'ma.fields.Integer', ([], {}), '()\n', (878, 880), True, 'import marshmallow as ma\n'), ((892, 911), 'marshmallow.fields.Integer', 'ma.fields.Integer', ([], {}), '()\n', (909, 911), True, 'import marshmallow as ma\n'), ((974, 993), 'marshmallow.fields.Integer', 'ma.fields.Integer', ([], {}), '()\n', (991, 993), True, 'import marshmallow as ma\n'), ((1008, 1026), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (1024, 1026), True, 'import marshmallow as ma\n'), ((1043, 1059), 'marshmallow.fields.IPv4', 'ma.fields.IPv4', ([], {}), '()\n', (1057, 1059), True, 'import marshmallow as ma\n'), ((1071, 1090), 'marshmallow.fields.Integer', 'ma.fields.Integer', ([], {}), '()\n', (1088, 1090), True, 'import marshmallow as ma\n'), ((1110, 1128), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (1126, 1128), True, 'import marshmallow as ma\n'), ((1155, 1173), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (1171, 1173), True, 'import marshmallow as ma\n'), ((1184, 1203), 'marshmallow.fields.Integer', 'ma.fields.Integer', ([], {}), '()\n', (1201, 1203), True, 'import marshmallow as ma\n'), ((1217, 1235), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (1233, 1235), True, 'import marshmallow as ma\n'), ((1253, 1271), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (1269, 1271), True, 'import marshmallow as ma\n'), ((1292, 1310), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (1308, 1310), True, 'import marshmallow as ma\n'), ((1324, 1342), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (1340, 1342), True, 'import marshmallow as ma\n'), ((1359, 1377), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (1375, 1377), True, 'import marshmallow as ma\n'), ((1397, 1413), 'marshmallow.fields.Dict', 'ma.fields.Dict', ([], {}), '()\n', (1411, 1413), True, 'import marshmallow as ma\n'), ((1428, 1444), 'marshmallow.fields.Dict', 'ma.fields.Dict', ([], {}), '()\n', (1442, 1444), True, 'import marshmallow as ma\n'), ((1462, 1480), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (1478, 1480), True, 'import marshmallow as ma\n'), ((1491, 1509), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (1507, 1509), True, 'import marshmallow as ma\n'), ((1522, 1540), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (1538, 1540), True, 'import marshmallow as ma\n'), ((1560, 1579), 'marshmallow.fields.Integer', 'ma.fields.Integer', ([], {}), '()\n', (1577, 1579), True, 'import marshmallow as ma\n'), ((1602, 1620), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (1618, 1620), True, 'import marshmallow as ma\n'), ((1638, 1654), 'marshmallow.fields.Dict', 'ma.fields.Dict', ([], {}), '()\n', (1652, 1654), True, 'import marshmallow as ma\n'), ((1671, 1689), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (1687, 1689), True, 'import marshmallow as ma\n'), ((1752, 1784), 'marshmallow.fields.Integer', 'ma.fields.Integer', ([], {'required': '(True)'}), '(required=True)\n', (1769, 1784), True, 'import marshmallow as ma\n'), ((1802, 1834), 'marshmallow.fields.Integer', 'ma.fields.Integer', ([], {'required': '(True)'}), '(required=True)\n', (1819, 1834), True, 'import marshmallow as ma\n'), ((1845, 1873), 'marshmallow.fields.Url', 'ma.fields.Url', ([], {'required': '(True)'}), '(required=True)\n', (1858, 1873), True, 'import marshmallow as ma\n'), ((1892, 1920), 'marshmallow.fields.Url', 'ma.fields.Url', ([], {'required': '(True)'}), '(required=True)\n', (1905, 1920), True, 'import marshmallow as ma\n'), ((1941, 1969), 'marshmallow.fields.Url', 'ma.fields.Url', ([], {'required': '(True)'}), '(required=True)\n', (1954, 1969), True, 'import marshmallow as ma\n'), ((1989, 2017), 'marshmallow.fields.Url', 'ma.fields.Url', ([], {'required': '(True)'}), '(required=True)\n', (2002, 2017), True, 'import marshmallow as ma\n'), ((2030, 2061), 'marshmallow.fields.String', 'ma.fields.String', ([], {'required': '(True)'}), '(required=True)\n', (2046, 2061), True, 'import marshmallow as ma\n'), ((2091, 2122), 'marshmallow.fields.Number', 'ma.fields.Number', ([], {'required': '(True)'}), '(required=True)\n', (2107, 2122), True, 'import marshmallow as ma\n'), ((2147, 2175), 'marshmallow.fields.Str', 'ma.fields.Str', ([], {'required': '(True)'}), '(required=True)\n', (2160, 2175), True, 'import marshmallow as ma\n'), ((2232, 2263), 'marshmallow.fields.String', 'ma.fields.String', ([], {'required': '(True)'}), '(required=True)\n', (2248, 2263), True, 'import marshmallow as ma\n'), ((2282, 2301), 'marshmallow.fields.Integer', 'ma.fields.Integer', ([], {}), '()\n', (2299, 2301), True, 'import marshmallow as ma\n'), ((2316, 2334), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (2332, 2334), True, 'import marshmallow as ma\n'), ((2351, 2367), 'marshmallow.fields.IPv4', 'ma.fields.IPv4', ([], {}), '()\n', (2365, 2367), True, 'import marshmallow as ma\n'), ((2379, 2398), 'marshmallow.fields.Integer', 'ma.fields.Integer', ([], {}), '()\n', (2396, 2398), True, 'import marshmallow as ma\n'), ((2418, 2436), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (2434, 2436), True, 'import marshmallow as ma\n'), ((2463, 2481), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (2479, 2481), True, 'import marshmallow as ma\n'), ((2492, 2511), 'marshmallow.fields.Integer', 'ma.fields.Integer', ([], {}), '()\n', (2509, 2511), True, 'import marshmallow as ma\n'), ((2529, 2547), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (2545, 2547), True, 'import marshmallow as ma\n'), ((2560, 2578), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (2576, 2578), True, 'import marshmallow as ma\n'), ((2592, 2610), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (2608, 2610), True, 'import marshmallow as ma\n'), ((2627, 2645), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (2643, 2645), True, 'import marshmallow as ma\n'), ((2665, 2681), 'marshmallow.fields.Dict', 'ma.fields.Dict', ([], {}), '()\n', (2679, 2681), True, 'import marshmallow as ma\n'), ((2696, 2712), 'marshmallow.fields.Dict', 'ma.fields.Dict', ([], {}), '()\n', (2710, 2712), True, 'import marshmallow as ma\n'), ((2730, 2748), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (2746, 2748), True, 'import marshmallow as ma\n'), ((2759, 2777), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (2775, 2777), True, 'import marshmallow as ma\n'), ((2790, 2808), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (2806, 2808), True, 'import marshmallow as ma\n'), ((2828, 2847), 'marshmallow.fields.Integer', 'ma.fields.Integer', ([], {}), '()\n', (2845, 2847), True, 'import marshmallow as ma\n'), ((2870, 2888), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (2886, 2888), True, 'import marshmallow as ma\n'), ((2906, 2922), 'marshmallow.fields.Dict', 'ma.fields.Dict', ([], {}), '()\n', (2920, 2922), True, 'import marshmallow as ma\n'), ((2983, 3015), 'marshmallow.fields.Integer', 'ma.fields.Integer', ([], {'required': '(True)'}), '(required=True)\n', (3000, 3015), True, 'import marshmallow as ma\n'), ((3033, 3065), 'marshmallow.fields.Integer', 'ma.fields.Integer', ([], {'required': '(True)'}), '(required=True)\n', (3050, 3065), True, 'import marshmallow as ma\n'), ((3076, 3104), 'marshmallow.fields.Url', 'ma.fields.Url', ([], {'required': '(True)'}), '(required=True)\n', (3089, 3104), True, 'import marshmallow as ma\n'), ((3123, 3151), 'marshmallow.fields.Url', 'ma.fields.Url', ([], {'required': '(True)'}), '(required=True)\n', (3136, 3151), True, 'import marshmallow as ma\n'), ((3172, 3200), 'marshmallow.fields.Url', 'ma.fields.Url', ([], {'required': '(True)'}), '(required=True)\n', (3185, 3200), True, 'import marshmallow as ma\n'), ((4082, 4107), 'drift.core.extensions.jwt.requires_roles', 'requires_roles', (['"""service"""'], {}), "('service')\n", (4096, 4107), False, 'from drift.core.extensions.jwt import current_user, requires_roles\n'), ((4857, 4882), 'drift.core.extensions.jwt.requires_roles', 'requires_roles', (['"""service"""'], {}), "('service')\n", (4871, 4882), False, 'from drift.core.extensions.jwt import current_user, requires_roles\n'), ((9915, 9940), 'drift.core.extensions.jwt.requires_roles', 'requires_roles', (['"""service"""'], {}), "('service')\n", (9929, 9940), False, 'from drift.core.extensions.jwt import current_user, requires_roles\n'), ((12293, 12318), 'drift.core.extensions.jwt.requires_roles', 'requires_roles', (['"""service"""'], {}), "('service')\n", (12307, 12318), False, 'from drift.core.extensions.jwt import current_user, requires_roles\n'), ((13824, 13849), 'drift.core.extensions.jwt.requires_roles', 'requires_roles', (['"""service"""'], {}), "('service')\n", (13838, 13849), False, 'from drift.core.extensions.jwt import current_user, requires_roles\n'), ((15302, 15333), 'marshmallow.fields.String', 'ma.fields.String', ([], {'required': '(True)'}), '(required=True)\n', (15318, 15333), True, 'import marshmallow as ma\n'), ((15350, 15366), 'marshmallow.fields.Dict', 'ma.fields.Dict', ([], {}), '()\n', (15364, 15366), True, 'import marshmallow as ma\n'), ((15381, 15397), 'marshmallow.fields.Dict', 'ma.fields.Dict', ([], {}), '()\n', (15395, 15397), True, 'import marshmallow as ma\n'), ((15561, 15586), 'drift.core.extensions.jwt.requires_roles', 'requires_roles', (['"""service"""'], {}), "('service')\n", (15575, 15586), False, 'from drift.core.extensions.jwt import current_user, requires_roles\n'), ((16643, 16668), 'drift.core.extensions.jwt.requires_roles', 'requires_roles', (['"""service"""'], {}), "('service')\n", (16657, 16668), False, 'from drift.core.extensions.jwt import current_user, requires_roles\n'), ((17254, 17285), 'marshmallow.fields.String', 'ma.fields.String', ([], {'required': '(True)'}), '(required=True)\n', (17270, 17285), True, 'import marshmallow as ma\n'), ((17300, 17316), 'marshmallow.fields.Dict', 'ma.fields.Dict', ([], {}), '()\n', (17314, 17316), True, 'import marshmallow as ma\n'), ((17436, 17461), 'drift.core.extensions.jwt.requires_roles', 'requires_roles', (['"""service"""'], {}), "('service')\n", (17450, 17461), False, 'from drift.core.extensions.jwt import current_user, requires_roles\n'), ((17618, 17643), 'drift.core.extensions.jwt.requires_roles', 'requires_roles', (['"""service"""'], {}), "('service')\n", (17632, 17643), False, 'from drift.core.extensions.jwt import current_user, requires_roles\n'), ((18467, 18492), 'drift.core.extensions.jwt.requires_roles', 'requires_roles', (['"""service"""'], {}), "('service')\n", (18481, 18492), False, 'from drift.core.extensions.jwt import current_user, requires_roles\n'), ((4373, 4391), 'flask.g.db.query', 'g.db.query', (['Server'], {}), '(Server)\n', (4383, 4391), False, 'from flask import url_for, g, jsonify\n'), ((4838, 4850), 'flask.jsonify', 'jsonify', (['ret'], {}), '(ret)\n', (4845, 4850), False, 'from flask import url_for, g, jsonify\n'), ((8321, 8337), 'flask.g.db.add', 'g.db.add', (['server'], {}), '(server)\n', (8329, 8337), False, 'from flask import url_for, g, jsonify\n'), ((8420, 8433), 'flask.g.db.commit', 'g.db.commit', ([], {}), '()\n', (8431, 8433), False, 'from flask import url_for, g, jsonify\n'), ((8496, 8557), 'flask.url_for', 'url_for', (['"""servers.entry"""'], {'server_id': 'server_id', '_external': '(True)'}), "('servers.entry', server_id=server_id, _external=True)\n", (8503, 8557), False, 'from flask import url_for, g, jsonify\n'), ((8580, 8644), 'flask.url_for', 'url_for', (['"""machines.entry"""'], {'machine_id': 'machine_id', '_external': '(True)'}), "('machines.entry', machine_id=machine_id, _external=True)\n", (8587, 8644), False, 'from flask import url_for, g, jsonify\n'), ((8669, 8734), 'flask.url_for', 'url_for', (['"""servers.heartbeat"""'], {'server_id': 'server_id', '_external': '(True)'}), "('servers.heartbeat', server_id=server_id, _external=True)\n", (8676, 8734), False, 'from flask import url_for, g, jsonify\n'), ((8758, 8822), 'flask.url_for', 'url_for', (['"""servers.commands"""'], {'server_id': 'server_id', '_external': '(True)'}), "('servers.commands', server_id=server_id, _external=True)\n", (8765, 8822), False, 'from flask import url_for, g, jsonify\n'), ((9035, 9064), 'driftbase.config.get_server_heartbeat_config', 'get_server_heartbeat_config', ([], {}), '()\n', (9062, 9064), False, 'from driftbase.config import get_server_heartbeat_config\n'), ((10474, 10535), 'flask.url_for', 'url_for', (['"""servers.entry"""'], {'server_id': 'server_id', '_external': '(True)'}), "('servers.entry', server_id=server_id, _external=True)\n", (10481, 10535), False, 'from flask import url_for, g, jsonify\n'), ((10570, 10635), 'flask.url_for', 'url_for', (['"""servers.heartbeat"""'], {'server_id': 'server_id', '_external': '(True)'}), "('servers.heartbeat', server_id=server_id, _external=True)\n", (10577, 10635), False, 'from flask import url_for, g, jsonify\n'), ((10669, 10733), 'flask.url_for', 'url_for', (['"""servers.commands"""'], {'server_id': 'server_id', '_external': '(True)'}), "('servers.commands', server_id=server_id, _external=True)\n", (10676, 10733), False, 'from flask import url_for, g, jsonify\n'), ((12271, 12286), 'flask.jsonify', 'jsonify', (['record'], {}), '(record)\n', (12278, 12286), False, 'from flask import url_for, g, jsonify\n'), ((13151, 13164), 'flask.g.db.commit', 'g.db.commit', ([], {}), '()\n', (13162, 13164), False, 'from flask import url_for, g, jsonify\n'), ((14326, 14355), 'driftbase.config.get_server_heartbeat_config', 'get_server_heartbeat_config', ([], {}), '()\n', (14353, 14355), False, 'from driftbase.config import get_server_heartbeat_config\n'), ((14800, 14813), 'flask.g.db.commit', 'g.db.commit', ([], {}), '()\n', (14811, 14813), False, 'from flask import url_for, g, jsonify\n'), ((16248, 16265), 'flask.g.db.add', 'g.db.add', (['command'], {}), '(command)\n', (16256, 16265), False, 'from flask import url_for, g, jsonify\n'), ((16274, 16287), 'flask.g.db.commit', 'g.db.commit', ([], {}), '()\n', (16285, 16287), False, 'from flask import url_for, g, jsonify\n'), ((16312, 16411), 'flask.url_for', 'url_for', (['"""servers.command"""'], {'server_id': 'server_id', 'command_id': 'command.command_id', '_external': '(True)'}), "('servers.command', server_id=server_id, command_id=command.\n command_id, _external=True)\n", (16319, 16411), False, 'from flask import url_for, g, jsonify\n'), ((17183, 17195), 'flask.jsonify', 'jsonify', (['ret'], {}), '(ret)\n', (17190, 17195), False, 'from flask import url_for, g, jsonify\n'), ((18249, 18262), 'flask.g.db.commit', 'g.db.commit', ([], {}), '()\n', (18260, 18262), False, 'from flask import url_for, g, jsonify\n'), ((18313, 18407), 'flask.url_for', 'url_for', (['"""servers.command"""'], {'server_id': 'server_id', 'command_id': 'row.command_id', '_external': '(True)'}), "('servers.command', server_id=server_id, command_id=row.command_id,\n _external=True)\n", (18320, 18407), False, 'from flask import url_for, g, jsonify\n'), ((18448, 18460), 'flask.jsonify', 'jsonify', (['ret'], {}), '(ret)\n', (18455, 18460), False, 'from flask import url_for, g, jsonify\n'), ((18646, 18740), 'flask.url_for', 'url_for', (['"""servers.command"""'], {'server_id': 'server_id', 'command_id': 'row.command_id', '_external': '(True)'}), "('servers.command', server_id=server_id, command_id=row.command_id,\n _external=True)\n", (18653, 18740), False, 'from flask import url_for, g, jsonify\n'), ((18781, 18793), 'flask.jsonify', 'jsonify', (['ret'], {}), '(ret)\n', (18788, 18793), False, 'from flask import url_for, g, jsonify\n'), ((18864, 18903), 'flask.url_for', 'url_for', (['"""servers.list"""'], {'_external': '(True)'}), "('servers.list', _external=True)\n", (18871, 18903), False, 'from flask import url_for, g, jsonify\n'), ((4726, 4791), 'flask.url_for', 'url_for', (['"""servers.entry"""'], {'server_id': 'row.server_id', '_external': '(True)'}), "('servers.entry', server_id=row.server_id, _external=True)\n", (4733, 4791), False, 'from flask import url_for, g, jsonify\n'), ((10315, 10375), 'flask_smorest.abort', 'abort', (['http_client.NOT_FOUND'], {'description': '"""Server not found"""'}), "(http_client.NOT_FOUND, description='Server not found')\n", (10320, 10375), False, 'from flask_smorest import Blueprint, abort\n'), ((12757, 12785), 'flask_smorest.abort', 'abort', (['http_client.NOT_FOUND'], {}), '(http_client.NOT_FOUND)\n', (12762, 12785), False, 'from flask_smorest import Blueprint, abort\n'), ((13281, 13345), 'flask.url_for', 'url_for', (['"""machines.entry"""'], {'machine_id': 'machine_id', '_external': '(True)'}), "('machines.entry', machine_id=machine_id, _external=True)\n", (13288, 13345), False, 'from flask import url_for, g, jsonify\n'), ((13410, 13471), 'flask.url_for', 'url_for', (['"""servers.entry"""'], {'server_id': 'server_id', '_external': '(True)'}), "('servers.entry', server_id=server_id, _external=True)\n", (13417, 13471), False, 'from flask import url_for, g, jsonify\n'), ((13592, 13657), 'flask.url_for', 'url_for', (['"""servers.heartbeat"""'], {'server_id': 'server_id', '_external': '(True)'}), "('servers.heartbeat', server_id=server_id, _external=True)\n", (13599, 13657), False, 'from flask import url_for, g, jsonify\n'), ((14079, 14119), 'drift.core.extensions.jwt.current_user.get', 'current_user.get', (['"""user_name"""', '"""unknown"""'], {}), "('user_name', 'unknown')\n", (14095, 14119), False, 'from drift.core.extensions.jwt import current_user, requires_roles\n'), ((14218, 14278), 'flask_smorest.abort', 'abort', (['http_client.NOT_FOUND'], {'description': '"""Server not found"""'}), "(http_client.NOT_FOUND, description='Server not found')\n", (14223, 14278), False, 'from flask_smorest import Blueprint, abort\n'), ((14678, 14719), 'flask_smorest.abort', 'abort', (['http_client.NOT_FOUND'], {'message': 'msg'}), '(http_client.NOT_FOUND, message=msg)\n', (14683, 14719), False, 'from flask_smorest import Blueprint, abort\n'), ((15830, 15858), 'flask_smorest.abort', 'abort', (['http_client.NOT_FOUND'], {}), '(http_client.NOT_FOUND)\n', (15835, 15858), False, 'from flask_smorest import Blueprint, abort\n'), ((16453, 16539), 'flask.jsonify', 'jsonify', (["{'command_id': command.command_id, 'url': resource_url, 'status': status}"], {}), "({'command_id': command.command_id, 'url': resource_url, 'status':\n status})\n", (16460, 16539), False, 'from flask import url_for, g, jsonify\n'), ((16936, 17028), 'flask.url_for', 'url_for', (['"""servers.command"""'], {'server_id': 'server_id', 'command_id': 'r.command_id', '_external': '(True)'}), "('servers.command', server_id=server_id, command_id=r.command_id,\n _external=True)\n", (16943, 17028), False, 'from flask import url_for, g, jsonify\n'), ((18006, 18034), 'flask_smorest.abort', 'abort', (['http_client.NOT_FOUND'], {}), '(http_client.NOT_FOUND)\n', (18011, 18034), False, 'from flask_smorest import Blueprint, abort\n'), ((5961, 6047), 'flask_smorest.abort', 'abort', (['http_client.BAD_REQUEST'], {'description': '"""You need to supply an instance_name"""'}), "(http_client.BAD_REQUEST, description=\n 'You need to supply an instance_name')\n", (5966, 6047), False, 'from flask_smorest import Blueprint, abort\n'), ((6482, 6572), 'driftbase.models.db.Machine', 'Machine', ([], {'realm': 'realm', 'instance_name': 'instance_name', 'placement': 'placement', 'server_count': '(0)'}), '(realm=realm, instance_name=instance_name, placement=placement,\n server_count=0)\n', (6489, 6572), False, 'from driftbase.models.db import Machine, Server, Match, ServerDaemonCommand\n'), ((6619, 6636), 'flask.g.db.add', 'g.db.add', (['machine'], {}), '(machine)\n', (6627, 6636), False, 'from flask import url_for, g, jsonify\n'), ((6653, 6665), 'flask.g.db.flush', 'g.db.flush', ([], {}), '()\n', (6663, 6665), False, 'from flask import url_for, g, jsonify\n'), ((6959, 7044), 'flask_smorest.abort', 'abort', (['http_client.NOT_FOUND'], {'description': "('Machine %s was not found' % machine_id)"}), "(http_client.NOT_FOUND, description='Machine %s was not found' %\n machine_id)\n", (6964, 7044), False, 'from flask_smorest import Blueprint, abort\n'), ((10164, 10182), 'flask.g.db.query', 'g.db.query', (['Server'], {}), '(Server)\n', (10174, 10182), False, 'from flask import url_for, g, jsonify\n'), ((10917, 10981), 'flask.url_for', 'url_for', (['"""machines.entry"""'], {'machine_id': 'machine_id', '_external': '(True)'}), "('machines.entry', machine_id=machine_id, _external=True)\n", (10924, 10981), False, 'from flask import url_for, g, jsonify\n'), ((11260, 11319), 'flask.url_for', 'url_for', (['"""matches.entry"""'], {'match_id': 'match_id', '_external': '(True)'}), "('matches.entry', match_id=match_id, _external=True)\n", (11267, 11319), False, 'from flask import url_for, g, jsonify\n'), ((11950, 12044), 'flask.url_for', 'url_for', (['"""servers.command"""'], {'server_id': 'server_id', 'command_id': 'row.command_id', '_external': '(True)'}), "('servers.command', server_id=server_id, command_id=row.command_id,\n _external=True)\n", (11957, 12044), False, 'from flask import url_for, g, jsonify\n'), ((12688, 12706), 'flask.g.db.query', 'g.db.query', (['Server'], {}), '(Server)\n', (12698, 12706), False, 'from flask import url_for, g, jsonify\n'), ((14149, 14167), 'flask.g.db.query', 'g.db.query', (['Server'], {}), '(Server)\n', (14159, 14167), False, 'from flask import url_for, g, jsonify\n'), ((14455, 14500), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'heartbeat_timeout'}), '(seconds=heartbeat_timeout)\n', (14473, 14500), False, 'import datetime\n'), ((14985, 15029), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'heartbeat_period'}), '(seconds=heartbeat_period)\n', (15003, 15029), False, 'import datetime\n'), ((15126, 15171), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'heartbeat_timeout'}), '(seconds=heartbeat_timeout)\n', (15144, 15171), False, 'import datetime\n'), ((15761, 15779), 'flask.g.db.query', 'g.db.query', (['Server'], {}), '(Server)\n', (15771, 15779), False, 'from flask import url_for, g, jsonify\n'), ((17937, 17955), 'flask.g.db.query', 'g.db.query', (['Server'], {}), '(Server)\n', (17947, 17955), False, 'from flask import url_for, g, jsonify\n'), ((18050, 18081), 'flask.g.db.query', 'g.db.query', (['ServerDaemonCommand'], {}), '(ServerDaemonCommand)\n', (18060, 18081), False, 'from flask import url_for, g, jsonify\n'), ((18549, 18580), 'flask.g.db.query', 'g.db.query', (['ServerDaemonCommand'], {}), '(ServerDaemonCommand)\n', (18559, 18580), False, 'from flask import url_for, g, jsonify\n'), ((6879, 6898), 'flask.g.db.query', 'g.db.query', (['Machine'], {}), '(Machine)\n', (6889, 6898), False, 'from flask import url_for, g, jsonify\n'), ((9462, 9507), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'heartbeat_timeout'}), '(seconds=heartbeat_timeout)\n', (9480, 9507), False, 'import datetime\n'), ((10817, 10836), 'flask.g.db.query', 'g.db.query', (['Machine'], {}), '(Machine)\n', (10827, 10836), False, 'from flask import url_for, g, jsonify\n'), ((7062, 7074), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (7072, 7074), False, 'import uuid\n'), ((11067, 11084), 'flask.g.db.query', 'g.db.query', (['Match'], {}), '(Match)\n', (11077, 11084), False, 'from flask import url_for, g, jsonify\n'), ((11505, 11536), 'flask.g.db.query', 'g.db.query', (['ServerDaemonCommand'], {}), '(ServerDaemonCommand)\n', (11515, 11536), False, 'from flask import url_for, g, jsonify\n'), ((16714, 16745), 'flask.g.db.query', 'g.db.query', (['ServerDaemonCommand'], {}), '(ServerDaemonCommand)\n', (16724, 16745), False, 'from flask import url_for, g, jsonify\n'), ((6066, 6085), 'flask.g.db.query', 'g.db.query', (['Machine'], {}), '(Machine)\n', (6076, 6085), False, 'from flask import url_for, g, jsonify\n')]
|
import os
stream = os.popen('echo Returned output')
output = stream.read()
output
|
[
"os.popen"
] |
[((20, 52), 'os.popen', 'os.popen', (['"""echo Returned output"""'], {}), "('echo Returned output')\n", (28, 52), False, 'import os\n')]
|
import time
import logging
from mcsf.commands.base import Command
from mcsf.services.backup import BackupService
from mcsf.services.json_storage import JsonStorage
from mcsf.services.ssh import SshService
from mcsf.services.vultr import VultrService
class UpCommand(Command):
def __init__(self):
self.json_storage = JsonStorage()
def handle(self, args):
alias = args.alias
storage_key = 'SERVER_{}'.format(alias)
if not self.json_storage.has('SSHKEYID'):
logging.error('Please configure MCSF first.')
exit(1)
vultr = VultrService()
if self.json_storage.has(storage_key):
logging.error('This alias is in use.')
exit(2)
logging.info('Creating new server ...')
sub_id = vultr.start_new_server()
self.json_storage.set(storage_key, sub_id)
logging.info('Waiting server to get online ...')
server = {}
while True:
try:
server = vultr.get_server_info(sub_id)
time.sleep(5)
if server['main_ip'] == '0.0.0.0':
continue
except KeyError:
continue
break
logging.info('Connecting to server ...')
ssh = SshService(server['main_ip'])
backup_service = BackupService(alias, ssh)
logging.info('Installing Java Runtime Environment ...')
ssh.exec('apt-get update')
ssh.exec('apt-get install -y default-jre')
logging.info('Installing unzip ...')
ssh.exec('apt-get install -y zip unzip')
if backup_service.has_backup():
logging.info('Restoring backup ...')
backup_service.restore()
else:
logging.info('Downloading Minecraft server ...')
ssh.exec('wget https://launcher.mojang.com/v1/objects/3dc3d84a581f14691199cf6831b71ed1296a9fdf/server.jar')
logging.info('Running the server first time ...')
ssh.exec('java -Xmx1024M -Xms1024M -jar server.jar nogui')
logging.info('Accepting EULA ...')
ssh.exec("sed -i 's/false/true/g' eula.txt")
logging.info('Installation completed.')
logging.info('Starting Minecraft server ...')
ssh.exec('nohup java -Xmx1024M -Xms1024M -jar server.jar nogui &')
logging.info('Connect to server:')
logging.info('{}:{}'.format(server['main_ip'], 25565))
logging.info('Please wait while server is initializing!')
|
[
"logging.error",
"mcsf.services.vultr.VultrService",
"time.sleep",
"logging.info",
"mcsf.services.ssh.SshService",
"mcsf.services.backup.BackupService",
"mcsf.services.json_storage.JsonStorage"
] |
[((331, 344), 'mcsf.services.json_storage.JsonStorage', 'JsonStorage', ([], {}), '()\n', (342, 344), False, 'from mcsf.services.json_storage import JsonStorage\n'), ((595, 609), 'mcsf.services.vultr.VultrService', 'VultrService', ([], {}), '()\n', (607, 609), False, 'from mcsf.services.vultr import VultrService\n'), ((738, 777), 'logging.info', 'logging.info', (['"""Creating new server ..."""'], {}), "('Creating new server ...')\n", (750, 777), False, 'import logging\n'), ((880, 928), 'logging.info', 'logging.info', (['"""Waiting server to get online ..."""'], {}), "('Waiting server to get online ...')\n", (892, 928), False, 'import logging\n'), ((1233, 1273), 'logging.info', 'logging.info', (['"""Connecting to server ..."""'], {}), "('Connecting to server ...')\n", (1245, 1273), False, 'import logging\n'), ((1289, 1318), 'mcsf.services.ssh.SshService', 'SshService', (["server['main_ip']"], {}), "(server['main_ip'])\n", (1299, 1318), False, 'from mcsf.services.ssh import SshService\n'), ((1344, 1369), 'mcsf.services.backup.BackupService', 'BackupService', (['alias', 'ssh'], {}), '(alias, ssh)\n', (1357, 1369), False, 'from mcsf.services.backup import BackupService\n'), ((1379, 1434), 'logging.info', 'logging.info', (['"""Installing Java Runtime Environment ..."""'], {}), "('Installing Java Runtime Environment ...')\n", (1391, 1434), False, 'import logging\n'), ((1529, 1565), 'logging.info', 'logging.info', (['"""Installing unzip ..."""'], {}), "('Installing unzip ...')\n", (1541, 1565), False, 'import logging\n'), ((2235, 2280), 'logging.info', 'logging.info', (['"""Starting Minecraft server ..."""'], {}), "('Starting Minecraft server ...')\n", (2247, 2280), False, 'import logging\n'), ((2365, 2399), 'logging.info', 'logging.info', (['"""Connect to server:"""'], {}), "('Connect to server:')\n", (2377, 2399), False, 'import logging\n'), ((2471, 2528), 'logging.info', 'logging.info', (['"""Please wait while server is initializing!"""'], {}), "('Please wait while server is initializing!')\n", (2483, 2528), False, 'import logging\n'), ((512, 557), 'logging.error', 'logging.error', (['"""Please configure MCSF first."""'], {}), "('Please configure MCSF first.')\n", (525, 557), False, 'import logging\n'), ((670, 708), 'logging.error', 'logging.error', (['"""This alias is in use."""'], {}), "('This alias is in use.')\n", (683, 708), False, 'import logging\n'), ((1668, 1704), 'logging.info', 'logging.info', (['"""Restoring backup ..."""'], {}), "('Restoring backup ...')\n", (1680, 1704), False, 'import logging\n'), ((1768, 1816), 'logging.info', 'logging.info', (['"""Downloading Minecraft server ..."""'], {}), "('Downloading Minecraft server ...')\n", (1780, 1816), False, 'import logging\n'), ((1949, 1998), 'logging.info', 'logging.info', (['"""Running the server first time ..."""'], {}), "('Running the server first time ...')\n", (1961, 1998), False, 'import logging\n'), ((2082, 2116), 'logging.info', 'logging.info', (['"""Accepting EULA ..."""'], {}), "('Accepting EULA ...')\n", (2094, 2116), False, 'import logging\n'), ((2186, 2225), 'logging.info', 'logging.info', (['"""Installation completed."""'], {}), "('Installation completed.')\n", (2198, 2225), False, 'import logging\n'), ((1057, 1070), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1067, 1070), False, 'import time\n')]
|
#-*- coding: utf-8 -*-
import numpy as np
class GPSConverter(object):
'''
GPS Converter class which is able to perform convertions between the
CH1903 and WGS84 system.
'''
# Convert CH y/x/h to WGS height
def CHtoWGSheight(self, y, x, h):
# Axiliary values (% Bern)
y_aux = (y - 600000) / 1000000
x_aux = (x - 200000) / 1000000
h = (h + 49.55) - (12.60 * y_aux) - (22.64 * x_aux)
return h
# Convert CH y/x to WGS lat
def CHtoWGSlat(self, y, x):
# Axiliary values (% Bern)
y_aux = (y - 600000) / 1000000
x_aux = (x - 200000) / 1000000
lat = (16.9023892 + (3.238272 * x_aux)) + \
- (0.270978 * pow(y_aux, 2)) + \
- (0.002528 * pow(x_aux, 2)) + \
- (0.0447 * pow(y_aux, 2) * x_aux) + \
- (0.0140 * pow(x_aux, 3))
# Unit 10000" to 1" and convert seconds to degrees (dec)
lat = (lat * 100) / 36
return lat
# Convert CH y/x to WGS long
def CHtoWGSlng(self, y, x):
# Axiliary values (% Bern)
y_aux = (y - 600000) / 1000000
x_aux = (x - 200000) / 1000000
lng = (2.6779094 + (4.728982 * y_aux) + \
+ (0.791484 * y_aux * x_aux) + \
+ (0.1306 * y_aux * pow(x_aux, 2))) + \
- (0.0436 * pow(y_aux, 3))
# Unit 10000" to 1" and convert seconds to degrees (dec)
lng = (lng * 100) / 36
return lng
# Convert decimal angle (° dec) to sexagesimal angle (dd.mmss,ss)
def DecToSexAngle(self, dec):
degree = dec.astype(int)
minute = (np.floor((dec - degree) * 60)).astype(int)
second = (((dec - degree) * 60) - minute) * 60
return degree + ((minute).astype(float) / 100) + (second / 10000)
# Convert sexagesimal angle (dd.mmss,ss) to seconds
def SexAngleToSeconds(self, dms):
degree = 0
minute = 0
second = 0
degree = dms.astype(float)
minute = ((dms - degree) * 100).astype(float)
second = (((dms - degree) * 100) - minute) * 100
return second + (minute * 60) + (degree * 3600)
# Convert sexagesimal angle (dd.mmss) to decimal angle (degrees)
def SexToDecAngle(self, dms):
degree = 0
minute = 0
second = 0
degree = dms.astype(float)
minute = ((dms - degree) * 100).astype(float)
second = (((dms - degree) * 100) - minute) * 100
return degree + (minute / 60) + (second / 3600)
# Convert WGS lat/long (° dec) and height to CH h
def WGStoCHh(self, lat, lng, h):
lat = self.DecToSexAngle(lat)
lng = self.DecToSexAngle(lng)
lat = self.SexAngleToSeconds(lat)
lng = self.SexAngleToSeconds(lng)
# Axiliary values (% Bern)
lat_aux = (lat - 169028.66) / 10000
lng_aux = (lng - 26782.5) / 10000
h = (h - 49.55) + (2.73 * lng_aux) + (6.94 * lat_aux)
return h
# Convert WGS lat/long (° dec) to CH x
def WGStoCHx(self, lat, lng):
lat = self.DecToSexAngle(lat)
lng = self.DecToSexAngle(lng)
lat = self.SexAngleToSeconds(lat)
lng = self.SexAngleToSeconds(lng)
# Axiliary values (% Bern)
lat_aux = (lat - 169028.66) / 10000
lng_aux = (lng - 26782.5) / 10000
x = ((200147.07 + (308807.95 * lat_aux) + \
+ (3745.25 * lng_aux**2)) + \
+ (76.63 * lat_aux**2)) + \
- (194.56 *lng_aux**2 * lat_aux) + \
+ (119.79 * lat_aux**3)
return x
# Convert WGS lat/long (° dec) to CH y
def WGStoCHy(self, lat, lng):
lat = self.DecToSexAngle(lat)
lng = self.DecToSexAngle(lng)
lat = self.SexAngleToSeconds(lat)
lng = self.SexAngleToSeconds(lng)
# Axiliary values (% Bern)
lat_aux = (lat - 169028.66) / 10000
lng_aux = (lng - 26782.5) / 10000
y = (600072.37 + (211455.93 * lng_aux)) + \
- (10938.51 * lng_aux * lat_aux) + \
- (0.36 * lng_aux * lat_aux**2) + \
- (44.54 * lat_aux**3)
return y
def LV03toWGS84(self, east, north, height):
'''
Convert LV03 to WGS84 Return a array of double that contain lat, long,
and height
'''
d = []
d.append(self.CHtoWGSlat(east, north))
d.append(self.CHtoWGSlng(east, north))
d.append(self.CHtoWGSheight(east, north, height))
return d
def WGS84toLV03(self, latitude, longitude, ellHeight):
'''
Convert WGS84 to LV03 Return an array of double that contaign east,
north, and height
'''
d = []
d.append(self.WGStoCHy(latitude, longitude))
d.append(self.WGStoCHx(latitude, longitude))
d.append(self.WGStoCHh(latitude, longitude, ellHeight))
return d
converter = GPSConverter()
|
[
"numpy.floor"
] |
[((1645, 1674), 'numpy.floor', 'np.floor', (['((dec - degree) * 60)'], {}), '((dec - degree) * 60)\n', (1653, 1674), True, 'import numpy as np\n')]
|
# Authors: <NAME> <<EMAIL>>
#
# License: Simplified BSD
import pytest
from mne.viz._mpl_figure import _psd_figure
from mne.viz._figure import _get_browser
def test_browse_figure_constructor():
"""Test error handling in MNEBrowseFigure constructor."""
with pytest.raises(TypeError, match='an instance of Raw, Epochs, or ICA'):
_get_browser(inst='foo')
def test_psd_figure_constructor():
"""Test error handling in MNELineFigure constructor."""
with pytest.raises(TypeError, match='an instance of Raw or Epochs, got'):
_psd_figure('foo', *((None,) * 20))
|
[
"pytest.raises",
"mne.viz._mpl_figure._psd_figure",
"mne.viz._figure._get_browser"
] |
[((267, 335), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""an instance of Raw, Epochs, or ICA"""'}), "(TypeError, match='an instance of Raw, Epochs, or ICA')\n", (280, 335), False, 'import pytest\n'), ((345, 369), 'mne.viz._figure._get_browser', '_get_browser', ([], {'inst': '"""foo"""'}), "(inst='foo')\n", (357, 369), False, 'from mne.viz._figure import _get_browser\n'), ((476, 543), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""an instance of Raw or Epochs, got"""'}), "(TypeError, match='an instance of Raw or Epochs, got')\n", (489, 543), False, 'import pytest\n'), ((553, 588), 'mne.viz._mpl_figure._psd_figure', '_psd_figure', (['"""foo"""', '*((None,) * 20)'], {}), "('foo', *((None,) * 20))\n", (564, 588), False, 'from mne.viz._mpl_figure import _psd_figure\n')]
|
import copy
from typing import Tuple
import numpy as np
from odyssey.distribution import Distribution
from iliad.integrators.info import SoftAbsLeapfrogInfo
from iliad.integrators.states import SoftAbsLeapfrogState
from iliad.integrators.terminal import cond
from iliad.integrators.fields import riemannian, softabs
def momentum_step(
val: Tuple[np.ndarray, np.ndarray, int],
step_size: float,
state: SoftAbsLeapfrogState,
) -> Tuple[np.ndarray, np.ndarray, int]:
"""Computes the update to the momentum variable using the equations of motion
determined by the SoftAbs metric.
Args:
val: A tuple containing the current guess for the fixed point of the
momentum, the difference between the momentum at this fixed point
iteration and the last, and the number of fixed point iterations
considered so far.
step_size: The integration step-size.
state: The current state of the SoftAbs metric system.
Returns:
pm: The updated momentum variable.
delta: The difference between the updated momentum variable and the
guess.
num_iters: The number of fixed point iterations attempted so far.
"""
pmcand, _, num_iters = val
f = softabs.force(pmcand,
state.grad_log_posterior,
state.jac_hessian,
state.hessian_eigenvals,
state.softabs_eigenvals,
state.softabs_inv_eigenvals,
state.hessian_eigenvecs,
state.alpha)
pm = state.momentum + step_size * f
delta = pm - pmcand
num_iters += 1
return pm, delta, num_iters
def position_step(
val: Tuple[np.ndarray, np.ndarray, int],
step_size: float,
distr: Distribution,
state: SoftAbsLeapfrogState,
) -> Tuple[np.ndarray, np.ndarray, int]:
"""Computes the update to the position variable using the equations of motion
determined by the SoftAbs metric.
Args:
val: A tuple containing the current guess for the fixed point of the
position, the difference between the position at this fixed point
iteration and the last, and the number of fixed point iterations
considered so far.
step_size: The integration step-size.
distr: The distribution that guides the time evolution of the Euclidean
Hamiltonian trajectory.
state: The current state of the SoftAbs metric system.
Returns:
qn: The updated momentum variable.
delta; The difference between the updated position variable and the
guess.
num_iters: The number of fixed point iterations attempted so far.
"""
qncand, _, num_iters = val
H = distr.hessian(qncand)
l, U, lt, inv_lt, metric, inv_metric = softabs.decomposition(H, state.alpha)
newvel = inv_metric@state.momentum
qn = state.position + step_size * newvel
delta = qn - qncand
num_iters += 1
return qn, delta, num_iters
def euler_a_single_step(
distr: Distribution,
state: SoftAbsLeapfrogState,
info: SoftAbsLeapfrogInfo,
step_size: float,
thresh: float,
max_iters: int,
) -> Tuple[SoftAbsLeapfrogState, SoftAbsLeapfrogInfo]:
"""The Euler-A integrator is a symplectic map that integrates Hamilton's
equations of motion for a general non-separable
Hamiltonian. It updates the position implicitly and then computes an
explicit update to the momentum variable.
Args:
distr: The distribution that guides the time evolution of the Euclidean
Hamiltonian trajectory.
state: An object containing the position and momentum variables of the
state in phase space, and possibly previously computed log-posterior,
metrics, and gradients.
info: An object that keeps track of the number of fixed point iterations
and whether or not integration has been successful.
step_size: Integration step-size.
thresh: Convergence tolerance for fixed point iterations.
max_iters: Maximum number of fixed point iterations.
Returns:
state: An augmented state object with the updated position and momentum
and values for the log-posterior and metric and their gradients.
info: An augmented information object with the updated number of fixed
point iterations and boolean indicator for successful integration.
"""
# Unpack the position and momentum.
qo, po = state.position, state.momentum
num_dims = len(qo)
# Precompute the initial difference vector, which is set to be an array of
# infinite values.
delta = np.inf*np.ones(num_dims)
# Fixed point iteration to solve the implicit update to the position.
val = (qo + step_size*state.velocity, delta, 0)
while cond(val, thresh, max_iters):
val = position_step(val, step_size, distr, state)
qn, delta, num_iters = val
success = np.max(np.abs(delta)) < thresh
# Update the state with the new position and compute the updated momentum.
state.position = qn
state.update(distr)
state.momentum += step_size*state.force
info.num_iters_pos += num_iters
info.success &= success
return state, info
def euler_b_single_step(
distr: Distribution,
state: SoftAbsLeapfrogState,
info: SoftAbsLeapfrogInfo,
step_size: float,
thresh: float,
max_iters: int,
) -> Tuple[SoftAbsLeapfrogState, SoftAbsLeapfrogInfo]:
"""The Euler-B integrator is a symplectic map that integrates Hamilton's
equations of motion for a general non-separable Hamiltonian. It updates the
momentum implicitly and then computes an explicit update to the position
variable.
Args:
distr: The distribution that guides the time evolution of the Euclidean
Hamiltonian trajectory.
state: An object containing the position and momentum variables of the
state in phase space, and possibly previously computed log-posterior,
metrics, and gradients.
info: An object that keeps track of the number of fixed point iterations
and whether or not integration has been successful.
step_size: Integration step-size.
thresh: Convergence tolerance for fixed point iterations.
max_iters: Maximum number of fixed point iterations.
Returns:
state: An augmented state object with the updated position and momentum
and values for the log-posterior and metric and their gradients.
info: An augmented information object with the updated number of fixed
point iterations and boolean indicator for successful integration.
"""
# Unpack the position and momentum.
qo, po = state.position, state.momentum
num_dims = len(qo)
# Precompute the initial difference vector, which is set to be an array of
# infinite values.
delta = np.inf*np.ones(num_dims)
# Fixed point iteration to solve the implicit update to the momentum.
val = (po + step_size*state.force, delta, 0)
while cond(val, thresh, max_iters):
val = momentum_step(val, step_size, state)
pn, delta, num_iters = val
vn = state.inv_metric@pn
success = np.max(np.abs(delta)) < thresh
# Update the state's new position.
state.momentum = pn
state.velocity = vn
state.position += step_size*vn
state.update(distr)
info.num_iters_mom += num_iters
info.success &= success
return state, info
def softabs_euler_a(
state: SoftAbsLeapfrogState,
step_size: float,
num_steps: int,
distr: Distribution,
thresh: float,
max_iters: int,
) -> Tuple[SoftAbsLeapfrogState, SoftAbsLeapfrogInfo]:
state = copy.copy(state)
info = SoftAbsLeapfrogInfo()
for i in range(num_steps):
state, info = euler_a_single_step(
distr,
state,
info,
step_size,
thresh,
max_iters,
)
L = np.linalg.cholesky(state.metric)
state.velocity = state.inv_metric.dot(state.momentum)
state.sqrtm_metric = L
state.logdet_metric = 2.0*np.sum(np.log(np.diag(L)))
return state, info
def softabs_euler_b(
state: SoftAbsLeapfrogState,
step_size: float,
num_steps: int,
distr: Distribution,
thresh: float,
max_iters: int,
) -> Tuple[SoftAbsLeapfrogState, SoftAbsLeapfrogInfo]:
state = copy.copy(state)
info = SoftAbsLeapfrogInfo()
for i in range(num_steps):
state, info = euler_b_single_step(
distr,
state,
info,
step_size,
thresh,
max_iters,
)
L = np.linalg.cholesky(state.metric)
state.velocity = state.inv_metric.dot(state.momentum)
state.sqrtm_metric = L
state.logdet_metric = 2.0*np.sum(np.log(np.diag(L)))
return state, info
|
[
"numpy.abs",
"iliad.integrators.info.SoftAbsLeapfrogInfo",
"iliad.integrators.fields.softabs.decomposition",
"iliad.integrators.fields.softabs.force",
"copy.copy",
"numpy.ones",
"numpy.diag",
"iliad.integrators.terminal.cond",
"numpy.linalg.cholesky"
] |
[((1270, 1462), 'iliad.integrators.fields.softabs.force', 'softabs.force', (['pmcand', 'state.grad_log_posterior', 'state.jac_hessian', 'state.hessian_eigenvals', 'state.softabs_eigenvals', 'state.softabs_inv_eigenvals', 'state.hessian_eigenvecs', 'state.alpha'], {}), '(pmcand, state.grad_log_posterior, state.jac_hessian, state.\n hessian_eigenvals, state.softabs_eigenvals, state.softabs_inv_eigenvals,\n state.hessian_eigenvecs, state.alpha)\n', (1283, 1462), False, 'from iliad.integrators.fields import riemannian, softabs\n'), ((2883, 2920), 'iliad.integrators.fields.softabs.decomposition', 'softabs.decomposition', (['H', 'state.alpha'], {}), '(H, state.alpha)\n', (2904, 2920), False, 'from iliad.integrators.fields import riemannian, softabs\n'), ((4941, 4969), 'iliad.integrators.terminal.cond', 'cond', (['val', 'thresh', 'max_iters'], {}), '(val, thresh, max_iters)\n', (4945, 4969), False, 'from iliad.integrators.terminal import cond\n'), ((7222, 7250), 'iliad.integrators.terminal.cond', 'cond', (['val', 'thresh', 'max_iters'], {}), '(val, thresh, max_iters)\n', (7226, 7250), False, 'from iliad.integrators.terminal import cond\n'), ((7894, 7910), 'copy.copy', 'copy.copy', (['state'], {}), '(state)\n', (7903, 7910), False, 'import copy\n'), ((7922, 7943), 'iliad.integrators.info.SoftAbsLeapfrogInfo', 'SoftAbsLeapfrogInfo', ([], {}), '()\n', (7941, 7943), False, 'from iliad.integrators.info import SoftAbsLeapfrogInfo\n'), ((8159, 8191), 'numpy.linalg.cholesky', 'np.linalg.cholesky', (['state.metric'], {}), '(state.metric)\n', (8177, 8191), True, 'import numpy as np\n'), ((8609, 8625), 'copy.copy', 'copy.copy', (['state'], {}), '(state)\n', (8618, 8625), False, 'import copy\n'), ((8637, 8658), 'iliad.integrators.info.SoftAbsLeapfrogInfo', 'SoftAbsLeapfrogInfo', ([], {}), '()\n', (8656, 8658), False, 'from iliad.integrators.info import SoftAbsLeapfrogInfo\n'), ((8874, 8906), 'numpy.linalg.cholesky', 'np.linalg.cholesky', (['state.metric'], {}), '(state.metric)\n', (8892, 8906), True, 'import numpy as np\n'), ((4787, 4804), 'numpy.ones', 'np.ones', (['num_dims'], {}), '(num_dims)\n', (4794, 4804), True, 'import numpy as np\n'), ((7071, 7088), 'numpy.ones', 'np.ones', (['num_dims'], {}), '(num_dims)\n', (7078, 7088), True, 'import numpy as np\n'), ((5082, 5095), 'numpy.abs', 'np.abs', (['delta'], {}), '(delta)\n', (5088, 5095), True, 'import numpy as np\n'), ((7385, 7398), 'numpy.abs', 'np.abs', (['delta'], {}), '(delta)\n', (7391, 7398), True, 'import numpy as np\n'), ((8321, 8331), 'numpy.diag', 'np.diag', (['L'], {}), '(L)\n', (8328, 8331), True, 'import numpy as np\n'), ((9036, 9046), 'numpy.diag', 'np.diag', (['L'], {}), '(L)\n', (9043, 9046), True, 'import numpy as np\n')]
|
import logging
logging = logging.getLogger()
import constants
from mailchimp3 import MailChimp
import web_template
from string import Template
client = MailChimp(mc_api=constants.MAILCHIMPAPI, mc_user=constants.MAILCHIMPUSENAME)
campaign_name="trading_alert"
from_name="<NAME>"
reply_to="<EMAIL>"
audience_id="4e7840abaf"
def getAudiencesId():
try:
return client.lists.all(get_all=True, fields="lists.name,lists.id")
except Exception as e:
logging.error(e)
def campaign_creation_function(campaign_name, audience_id, from_name, reply_to, client=client):
campaign_name = campaign_name
audience_id = audience_id
from_name = from_name
reply_to = reply_to
data = {
"recipients" :
{
"list_id": audience_id
},
"settings":
{
"subject_line": campaign_name,
"from_name": from_name,
"reply_to": reply_to
},
"type": "regular"
}
new_campaign = client.campaigns.create(data=data)
return new_campaign
def customized_template(html_code, campaign_id, client=client):
html_code = html_code
campaign_id = campaign_id
string_template = Template(html_code).safe_substitute()
try:
client.campaigns.content.update(
campaign_id=campaign_id,
data={'message': 'Campaign message', 'html': string_template}
)
except Exception as error:
logging.error(error)
def send_mail(client=client):
campaign = campaign_creation_function(campaign_name, audience_id, from_name, reply_to)
campaign_id = campaign['id']
try:
customized_template(web_template.html_code, campaign_id)
client.campaigns.actions.send(campaign_id=campaign_id)
except Exception as error:
logging.error(error)
|
[
"logging.error",
"mailchimp3.MailChimp",
"logging.getLogger",
"string.Template"
] |
[((25, 44), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (42, 44), False, 'import logging\n'), ((153, 229), 'mailchimp3.MailChimp', 'MailChimp', ([], {'mc_api': 'constants.MAILCHIMPAPI', 'mc_user': 'constants.MAILCHIMPUSENAME'}), '(mc_api=constants.MAILCHIMPAPI, mc_user=constants.MAILCHIMPUSENAME)\n', (162, 229), False, 'from mailchimp3 import MailChimp\n'), ((467, 483), 'logging.error', 'logging.error', (['e'], {}), '(e)\n', (480, 483), False, 'import logging\n'), ((1200, 1219), 'string.Template', 'Template', (['html_code'], {}), '(html_code)\n', (1208, 1219), False, 'from string import Template\n'), ((1469, 1489), 'logging.error', 'logging.error', (['error'], {}), '(error)\n', (1482, 1489), False, 'import logging\n'), ((1828, 1848), 'logging.error', 'logging.error', (['error'], {}), '(error)\n', (1841, 1848), False, 'import logging\n')]
|
from abc import abstractmethod
from typing import List
from typing import Optional
from typing import Tuple
from typing import Union
import tensorflow as tf
from config_state import builder
from config_state import ConfigField
from config_state import ConfigState
from config_state import DeferredConf
from config_state import register
from config_state import stateproperty
@builder
class Model(ConfigState):
input_shape: Tuple[int] = ConfigField(...,
"Input shape of the model",
type=tuple)
output_units: Optional[int] = ConfigField(...,
"Model's output units count",
type=int)
def __init__(self, config):
super().__init__(config)
self._keras_model = None
@abstractmethod
def _build_keras_model(self) -> tf.keras.Model:
"""Build the keras model"""
@property
def keras_model(self) -> tf.keras.Model:
if self._keras_model is None and not isinstance(
self.input_shape, DeferredConf) and not isinstance(
self.output_units, DeferredConf):
self._keras_model = self._build_keras_model()
return self._keras_model
@stateproperty
def weights(self):
return self.keras_model.get_weights()
@weights.setter
def weights(self, weights):
self.keras_model.set_weights(weights)
@register
class MultiLayerPerceptron(Model):
structure: List[int] = ConfigField([128], "hidden structure of the MLP")
dropout_rate: float = ConfigField(
0.0, "Dropout rate applied on the last "
"hidden layer.")
def _build_keras_model(self) -> tf.keras.Model:
layers = [tf.keras.layers.Flatten(input_shape=self.input_shape)]
for units in self.structure:
layers.append(tf.keras.layers.Dense(units, activation='relu'))
if self.dropout_rate > 0.0:
layers.append(tf.keras.layers.Dropout(self.dropout_rate))
if self.output_units is not None:
layers.append(tf.keras.layers.Dense(self.output_units))
return tf.keras.models.Sequential(layers)
@register
class CNN(Model):
structure: List[Union[int, str]] = ConfigField([32, 'max', 64, 'max', 64],
"Convolutional structure. "
"Conv2D layers units "
"are integers, pooling "
"layers type are str among "
"'max' or 'average'.")
def _build_keras_model(self) -> tf.keras.Model:
layers = [tf.keras.layers.InputLayer(input_shape=self.input_shape)]
for layer in self.structure:
if isinstance(layer, int):
layers.append(tf.keras.layers.Conv2D(layer, (3, 3), activation='relu'))
elif layer == 'max':
layers.append(tf.keras.layers.MaxPooling2D((2, 2)))
elif layer == 'average':
layers.append(tf.keras.layers.AveragePooling2D((2, 2)))
else:
raise ValueError(f"Unknown layer spec {layer}.")
layers.append(tf.keras.layers.Flatten())
if self.output_units is not None:
layers.append(tf.keras.layers.Dense(self.output_units))
return tf.keras.models.Sequential(layers)
@register
class Ensembler(Model):
model: Model = ConfigField(type=Model, doc="The model to be ensembled")
ensemble_size: int = ConfigField(2, "Size of the ensemble", force_type=True)
input_shape = ConfigField(model.input_shape)
output_units = ConfigField(model.output_units)
def _build_keras_model(self) -> tf.keras.Model:
models = [
self.model._build_keras_model() for _ in range(self.ensemble_size)
]
input = tf.keras.layers.InputLayer(input_shape=self.input_shape).output
inputs = tf.keras.layers.Lambda(self.lambda_splitter)(input)
outputs = []
for sub_input, model in zip(inputs, models):
sub_output = model(sub_input)
outputs.append(sub_output)
output = tf.keras.layers.Lambda(self.lambda_merger)(outputs)
return tf.keras.Model(inputs=input, outputs=output)
@tf.function
def lambda_splitter(self, input: tf.Tensor, training: bool = False):
outputs = []
slice_size = tf.cast(tf.shape(input)[0] // self.ensemble_size, tf.int64)
if training:
tf.assert_equal(tf.math.mod(tf.shape(input)[0], self.ensemble_size), 0)
for i in range(self.ensemble_size):
if training:
outputs.append(input[i * slice_size:(i + 1) * slice_size, :, :])
else:
outputs.append(input)
return outputs
@tf.function
def lambda_merger(self, inputs, training=False):
if training:
output = tf.concat(inputs, axis=0)
else:
# Average during inference
output = tf.add_n(inputs) / tf.cast(len(inputs), inputs[0].dtype)
return output
|
[
"tensorflow.add_n",
"tensorflow.keras.layers.Conv2D",
"tensorflow.keras.layers.MaxPooling2D",
"tensorflow.keras.layers.Dropout",
"tensorflow.keras.layers.Dense",
"tensorflow.keras.layers.AveragePooling2D",
"tensorflow.concat",
"tensorflow.keras.layers.InputLayer",
"tensorflow.keras.Model",
"tensorflow.shape",
"tensorflow.keras.models.Sequential",
"tensorflow.keras.layers.Lambda",
"config_state.ConfigField",
"tensorflow.keras.layers.Flatten"
] |
[((442, 498), 'config_state.ConfigField', 'ConfigField', (['...', '"""Input shape of the model"""'], {'type': 'tuple'}), "(..., 'Input shape of the model', type=tuple)\n", (453, 498), False, 'from config_state import ConfigField\n'), ((611, 667), 'config_state.ConfigField', 'ConfigField', (['...', '"""Model\'s output units count"""'], {'type': 'int'}), '(..., "Model\'s output units count", type=int)\n', (622, 667), False, 'from config_state import ConfigField\n'), ((1486, 1535), 'config_state.ConfigField', 'ConfigField', (['[128]', '"""hidden structure of the MLP"""'], {}), "([128], 'hidden structure of the MLP')\n", (1497, 1535), False, 'from config_state import ConfigField\n'), ((1560, 1626), 'config_state.ConfigField', 'ConfigField', (['(0.0)', '"""Dropout rate applied on the last hidden layer."""'], {}), "(0.0, 'Dropout rate applied on the last hidden layer.')\n", (1571, 1626), False, 'from config_state import ConfigField\n'), ((2177, 2341), 'config_state.ConfigField', 'ConfigField', (["[32, 'max', 64, 'max', 64]", '"""Convolutional structure. Conv2D layers units are integers, pooling layers type are str among \'max\' or \'average\'."""'], {}), '([32, \'max\', 64, \'max\', 64],\n "Convolutional structure. Conv2D layers units are integers, pooling layers type are str among \'max\' or \'average\'."\n )\n', (2188, 2341), False, 'from config_state import ConfigField\n'), ((3358, 3414), 'config_state.ConfigField', 'ConfigField', ([], {'type': 'Model', 'doc': '"""The model to be ensembled"""'}), "(type=Model, doc='The model to be ensembled')\n", (3369, 3414), False, 'from config_state import ConfigField\n'), ((3438, 3493), 'config_state.ConfigField', 'ConfigField', (['(2)', '"""Size of the ensemble"""'], {'force_type': '(True)'}), "(2, 'Size of the ensemble', force_type=True)\n", (3449, 3493), False, 'from config_state import ConfigField\n'), ((3510, 3540), 'config_state.ConfigField', 'ConfigField', (['model.input_shape'], {}), '(model.input_shape)\n', (3521, 3540), False, 'from config_state import ConfigField\n'), ((3558, 3589), 'config_state.ConfigField', 'ConfigField', (['model.output_units'], {}), '(model.output_units)\n', (3569, 3589), False, 'from config_state import ConfigField\n'), ((2075, 2109), 'tensorflow.keras.models.Sequential', 'tf.keras.models.Sequential', (['layers'], {}), '(layers)\n', (2101, 2109), True, 'import tensorflow as tf\n'), ((3270, 3304), 'tensorflow.keras.models.Sequential', 'tf.keras.models.Sequential', (['layers'], {}), '(layers)\n', (3296, 3304), True, 'import tensorflow as tf\n'), ((4094, 4138), 'tensorflow.keras.Model', 'tf.keras.Model', ([], {'inputs': 'input', 'outputs': 'output'}), '(inputs=input, outputs=output)\n', (4108, 4138), True, 'import tensorflow as tf\n'), ((1708, 1761), 'tensorflow.keras.layers.Flatten', 'tf.keras.layers.Flatten', ([], {'input_shape': 'self.input_shape'}), '(input_shape=self.input_shape)\n', (1731, 1761), True, 'import tensorflow as tf\n'), ((2655, 2711), 'tensorflow.keras.layers.InputLayer', 'tf.keras.layers.InputLayer', ([], {'input_shape': 'self.input_shape'}), '(input_shape=self.input_shape)\n', (2681, 2711), True, 'import tensorflow as tf\n'), ((3130, 3155), 'tensorflow.keras.layers.Flatten', 'tf.keras.layers.Flatten', ([], {}), '()\n', (3153, 3155), True, 'import tensorflow as tf\n'), ((3750, 3806), 'tensorflow.keras.layers.InputLayer', 'tf.keras.layers.InputLayer', ([], {'input_shape': 'self.input_shape'}), '(input_shape=self.input_shape)\n', (3776, 3806), True, 'import tensorflow as tf\n'), ((3828, 3872), 'tensorflow.keras.layers.Lambda', 'tf.keras.layers.Lambda', (['self.lambda_splitter'], {}), '(self.lambda_splitter)\n', (3850, 3872), True, 'import tensorflow as tf\n'), ((4031, 4073), 'tensorflow.keras.layers.Lambda', 'tf.keras.layers.Lambda', (['self.lambda_merger'], {}), '(self.lambda_merger)\n', (4053, 4073), True, 'import tensorflow as tf\n'), ((4707, 4732), 'tensorflow.concat', 'tf.concat', (['inputs'], {'axis': '(0)'}), '(inputs, axis=0)\n', (4716, 4732), True, 'import tensorflow as tf\n'), ((1816, 1863), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['units'], {'activation': '"""relu"""'}), "(units, activation='relu')\n", (1837, 1863), True, 'import tensorflow as tf\n'), ((1918, 1960), 'tensorflow.keras.layers.Dropout', 'tf.keras.layers.Dropout', (['self.dropout_rate'], {}), '(self.dropout_rate)\n', (1941, 1960), True, 'import tensorflow as tf\n'), ((2021, 2061), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['self.output_units'], {}), '(self.output_units)\n', (2042, 2061), True, 'import tensorflow as tf\n'), ((3216, 3256), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['self.output_units'], {}), '(self.output_units)\n', (3237, 3256), True, 'import tensorflow as tf\n'), ((4791, 4807), 'tensorflow.add_n', 'tf.add_n', (['inputs'], {}), '(inputs)\n', (4799, 4807), True, 'import tensorflow as tf\n'), ((2802, 2858), 'tensorflow.keras.layers.Conv2D', 'tf.keras.layers.Conv2D', (['layer', '(3, 3)'], {'activation': '"""relu"""'}), "(layer, (3, 3), activation='relu')\n", (2824, 2858), True, 'import tensorflow as tf\n'), ((4268, 4283), 'tensorflow.shape', 'tf.shape', (['input'], {}), '(input)\n', (4276, 4283), True, 'import tensorflow as tf\n'), ((2909, 2945), 'tensorflow.keras.layers.MaxPooling2D', 'tf.keras.layers.MaxPooling2D', (['(2, 2)'], {}), '((2, 2))\n', (2937, 2945), True, 'import tensorflow as tf\n'), ((4371, 4386), 'tensorflow.shape', 'tf.shape', (['input'], {}), '(input)\n', (4379, 4386), True, 'import tensorflow as tf\n'), ((3000, 3040), 'tensorflow.keras.layers.AveragePooling2D', 'tf.keras.layers.AveragePooling2D', (['(2, 2)'], {}), '((2, 2))\n', (3032, 3040), True, 'import tensorflow as tf\n')]
|
# -*- coding: utf-8 -*-
"""
test_scrape_selector
~~~~~~~~~~~~~~~~~~~~
Test the HTML/XML Selector.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import unittest
from chemdataextractor.scrape.selector import Selector
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger(__name__)
HTML = '''
<html>
<body>
<div><h1>Heading</h1></div>
<div><a href="page">Link</a></div>
<div><div>Nested</div></div>
</body>
</html>
'''
class TestSelector(unittest.TestCase):
def test_html_xpath(self):
selector = Selector.from_text(HTML)
self.assertEqual(len(selector.xpath('.//div')), 4)
self.assertEqual(selector.xpath('.//a').extract(), ['Link'])
self.assertEqual(selector.xpath('.//a').extract(raw=True), ['<a href="page">Link</a>'])
self.assertEqual(selector.xpath('.//a/text()').extract(), ['Link'])
self.assertEqual(selector.xpath('.//a/@href').extract(), ['page'])
self.assertEqual(selector.xpath('/html/body/div/h1/text()').extract(), ['Heading'])
def test_html_css(self):
selector = Selector.from_text(HTML)
self.assertEqual(len(selector.css('div')), 4)
self.assertEqual(selector.css('a').extract(), ['Link'])
self.assertEqual(selector.css('a').extract(raw=True), ['<a href="page">Link</a>'])
self.assertEqual(selector.css('a::text').extract(), ['Link'])
self.assertEqual(selector.css('a::attr(href)').extract(), ['page'])
self.assertEqual(selector.css('html>body>div>h1::text').extract(), ['Heading'])
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"chemdataextractor.scrape.selector.Selector.from_text",
"logging.getLogger",
"logging.basicConfig"
] |
[((362, 402), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (381, 402), False, 'import logging\n'), ((410, 437), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (427, 437), False, 'import logging\n'), ((1763, 1778), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1776, 1778), False, 'import unittest\n'), ((702, 726), 'chemdataextractor.scrape.selector.Selector.from_text', 'Selector.from_text', (['HTML'], {}), '(HTML)\n', (720, 726), False, 'from chemdataextractor.scrape.selector import Selector\n'), ((1252, 1276), 'chemdataextractor.scrape.selector.Selector.from_text', 'Selector.from_text', (['HTML'], {}), '(HTML)\n', (1270, 1276), False, 'from chemdataextractor.scrape.selector import Selector\n')]
|
from ibidem.advent_of_code.board import Board
from ibidem.advent_of_code.util import get_input_name
PART1_SLOPE = (3, 1)
PART2_SLOPES = (
(1, 1),
(3, 1),
(5, 1),
(7, 1),
(1, 2),
)
def load():
with open(get_input_name(3, 2020)) as fobj:
return Board.from_string(fobj.read())
def part1():
slope = PART1_SLOPE
return count_slope(slope)
def part2():
result = 1
for slope in PART2_SLOPES:
result *= count_slope(slope)
print(f"Part 2 result: {result}")
def count_slope(slope):
board = load()
x = y = 0
count = 0
while y < board.size_y:
c = board.get(x, y)
if c == "#":
count += 1
board.set(x, y, "X")
else:
board.set(x, y, "O")
x += slope[0]
if x >= board.size_x:
x = x - board.size_x
y += slope[1]
print(f"Counted {count} trees for slope {slope}")
return count
if __name__ == "__main__":
part1()
part2()
|
[
"ibidem.advent_of_code.util.get_input_name"
] |
[((229, 252), 'ibidem.advent_of_code.util.get_input_name', 'get_input_name', (['(3)', '(2020)'], {}), '(3, 2020)\n', (243, 252), False, 'from ibidem.advent_of_code.util import get_input_name\n')]
|
"""
.. _ref_contact_example:
Contact Element Example
~~~~~~~~~~~~~~~~~~~~~~~
This example demonstrates how to create contact elements for general
contact.
Begin by launching MAPDL.
"""
from ansys.mapdl import core as pymapdl
mapdl = pymapdl.launch_mapdl()
###############################################################################
# Enter the pre-processor, create a block and mesh it with tetrahedral
# elements.
#
mapdl.prep7()
vnum0 = mapdl.block(0, 1, 0, 1, 0, 0.5)
mapdl.et(1, 187)
mapdl.esize(0.1)
mapdl.vmesh(vnum0)
mapdl.eplot()
###############################################################################
# Second a volume block above the existing block and mesh it with
# quadratic hexahedral elements. Ensure that these blocks do not
# touch by starting it slightly higher than the existing block.
#
# Note how these two blocks do not touch and the mesh is non-conformal.
mapdl.esize(0.09)
mapdl.et(2, 186)
mapdl.type(2)
vnum1 = mapdl.block(0, 1, 0, 1, 0.50001, 1)
mapdl.vmesh(vnum1)
mapdl.eplot()
###############################################################################
# Select all the elements at the intersection between the two blocks
# and generate contact elements.
mapdl.nsel("s", "loc", "z", 0.5, 0.50001)
mapdl.esln("s")
output = mapdl.gcgen("NEW", splitkey="SPLIT", selopt="SELECT")
print(output)
###############################################################################
# Plot the contact element pairs. Note from the command output above
# that the section IDs are 5 and 6.
#
# Here, we plot the element mesh as a wire-frame to show that the
# contact pairs overlap.
mapdl.esel("S", "SEC", vmin=5, vmax=6)
mapdl.eplot(style="wireframe", line_width=3)
|
[
"ansys.mapdl.core.launch_mapdl"
] |
[((238, 260), 'ansys.mapdl.core.launch_mapdl', 'pymapdl.launch_mapdl', ([], {}), '()\n', (258, 260), True, 'from ansys.mapdl import core as pymapdl\n')]
|
from chalice import Blueprint
from chalicelib import _overrides
from chalicelib.utils.SAML2_helper import prepare_request, init_saml_auth
app = Blueprint(__name__)
_overrides.chalice_app(app)
from chalicelib.utils.helper import environ
from onelogin.saml2.auth import OneLogin_Saml2_Logout_Request
from onelogin.saml2.utils import OneLogin_Saml2_Utils
from chalice import Response
from chalicelib.core import users, tenants
@app.route("/saml2", methods=['GET'], authorizer=None)
def start_sso():
app.current_request.path = ''
req = prepare_request(request=app.current_request)
auth = init_saml_auth(req)
sso_built_url = auth.login()
return Response(
# status_code=301,
status_code=307,
body='',
headers={'Location': sso_built_url, 'Content-Type': 'text/plain'})
@app.route('/saml2/acs', methods=['POST'], content_types=['application/x-www-form-urlencoded'], authorizer=None)
def process_sso_assertion():
req = prepare_request(request=app.current_request)
session = req["cookie"]["session"]
request = req['request']
auth = init_saml_auth(req)
request_id = None
if 'AuthNRequestID' in session:
request_id = session['AuthNRequestID']
auth.process_response(request_id=request_id)
errors = auth.get_errors()
user_data = {}
if len(errors) == 0:
if 'AuthNRequestID' in session:
del session['AuthNRequestID']
user_data = auth.get_attributes()
# session['samlUserdata'] = user_data
# session['samlNameId'] = auth.get_nameid()
# session['samlNameIdFormat'] = auth.get_nameid_format()
# session['samlNameIdNameQualifier'] = auth.get_nameid_nq()
# session['samlNameIdSPNameQualifier'] = auth.get_nameid_spnq()
# session['samlSessionIndex'] = auth.get_session_index()
# session['samlSessionExpiration'] = auth.get_session_expiration()
# print('>>>>')
# print(session)
self_url = OneLogin_Saml2_Utils.get_self_url(req)
if 'RelayState' in request.form and self_url != request.form['RelayState']:
print("====>redirect")
return Response(
status_code=307,
body='',
headers={'Location': auth.redirect_to(request.form['RelayState']), 'Content-Type': 'text/plain'})
elif auth.get_settings().is_debug_active():
error_reason = auth.get_last_error_reason()
return {"errors": [error_reason]}
email = auth.get_nameid()
existing = users.get_by_email_only(auth.get_nameid())
internal_id = next(iter(user_data.get("internalId", [])), None)
if len(existing) == 0 or existing[0].get("origin") != 'saml':
tenant_key = user_data.get("tenantKey", [])
if len(tenant_key) == 0:
print("tenantKey not present in assertion")
return Response(
status_code=307,
body={"errors": ["tenantKey not present in assertion"]},
headers={'Location': auth.redirect_to(request.form['RelayState']), 'Content-Type': 'text/plain'})
else:
t = tenants.get_by_tenant_key(tenant_key[0])
if t is None:
return Response(
status_code=307,
body={"errors": ["Unknown tenantKey"]},
headers={'Location': auth.redirect_to(request.form['RelayState']), 'Content-Type': 'text/plain'})
if len(existing) == 0:
print("== new user ==")
users.create_sso_user(tenant_id=t['tenantId'], email=email, admin=True, origin='saml',
name=" ".join(user_data.get("firstName", []) + user_data.get("lastName", [])),
internal_id=internal_id)
else:
existing = existing[0]
if existing.get("origin") != 'saml':
print("== migrating user to SAML ==")
users.update(tenant_id=t['tenantId'], user_id=existing["id"],
changes={"origin": 'saml', "internal_id": internal_id})
return users.authenticate_sso(email=email, internal_id=internal_id, exp=auth.get_session_expiration())
@app.route('/saml2/slo', methods=['GET'])
def process_slo_request(context):
req = prepare_request(request=app.current_request)
session = req["cookie"]["session"]
request = req['request']
auth = init_saml_auth(req)
name_id = session_index = name_id_format = name_id_nq = name_id_spnq = None
if 'samlNameId' in session:
name_id = session['samlNameId']
if 'samlSessionIndex' in session:
session_index = session['samlSessionIndex']
if 'samlNameIdFormat' in session:
name_id_format = session['samlNameIdFormat']
if 'samlNameIdNameQualifier' in session:
name_id_nq = session['samlNameIdNameQualifier']
if 'samlNameIdSPNameQualifier' in session:
name_id_spnq = session['samlNameIdSPNameQualifier']
users.change_jwt_iat(context["userId"])
return Response(
status_code=307,
body='',
headers={'Location': auth.logout(name_id=name_id, session_index=session_index, nq=name_id_nq,
name_id_format=name_id_format,
spnq=name_id_spnq), 'Content-Type': 'text/plain'})
@app.route('/saml2/sls', methods=['GET'], authorizer=None)
def process_sls_assertion():
req = prepare_request(request=app.current_request)
session = req["cookie"]["session"]
request = req['request']
auth = init_saml_auth(req)
request_id = None
if 'LogoutRequestID' in session:
request_id = session['LogoutRequestID']
def dscb():
session.clear()
url = auth.process_slo(request_id=request_id, delete_session_cb=dscb)
errors = auth.get_errors()
if len(errors) == 0:
if 'SAMLRequest' in req['get_data']:
logout_request = OneLogin_Saml2_Logout_Request(auth.get_settings(), req['get_data']['SAMLRequest'])
user_email = logout_request.get_nameid(auth.get_last_request_xml())
to_logout = users.get_by_email_only(user_email)
if len(to_logout) > 0:
to_logout = to_logout[0]['id']
users.change_jwt_iat(to_logout)
else:
print("Unknown user SLS-Request By IdP")
else:
print("Preprocessed SLS-Request by SP")
if url is not None:
return Response(
status_code=307,
body='',
headers={'Location': url, 'Content-Type': 'text/plain'})
return Response(
status_code=307,
body='',
headers={'Location': environ["SITE_URL"], 'Content-Type': 'text/plain'})
@app.route('/saml2/metadata', methods=['GET'], authorizer=None)
def saml2_metadata():
req = prepare_request(request=app.current_request)
auth = init_saml_auth(req)
settings = auth.get_settings()
metadata = settings.get_sp_metadata()
errors = settings.validate_metadata(metadata)
if len(errors) == 0:
return Response(
status_code=200,
body=metadata,
headers={'Content-Type': 'text/xml'})
else:
return Response(
status_code=500,
body=', '.join(errors))
|
[
"chalicelib.utils.SAML2_helper.init_saml_auth",
"chalicelib._overrides.chalice_app",
"chalicelib.core.users.update",
"chalice.Response",
"onelogin.saml2.utils.OneLogin_Saml2_Utils.get_self_url",
"chalicelib.core.users.get_by_email_only",
"chalice.Blueprint",
"chalicelib.core.tenants.get_by_tenant_key",
"chalicelib.core.users.change_jwt_iat",
"chalicelib.utils.SAML2_helper.prepare_request"
] |
[((146, 165), 'chalice.Blueprint', 'Blueprint', (['__name__'], {}), '(__name__)\n', (155, 165), False, 'from chalice import Blueprint\n'), ((166, 193), 'chalicelib._overrides.chalice_app', '_overrides.chalice_app', (['app'], {}), '(app)\n', (188, 193), False, 'from chalicelib import _overrides\n'), ((547, 591), 'chalicelib.utils.SAML2_helper.prepare_request', 'prepare_request', ([], {'request': 'app.current_request'}), '(request=app.current_request)\n', (562, 591), False, 'from chalicelib.utils.SAML2_helper import prepare_request, init_saml_auth\n'), ((603, 622), 'chalicelib.utils.SAML2_helper.init_saml_auth', 'init_saml_auth', (['req'], {}), '(req)\n', (617, 622), False, 'from chalicelib.utils.SAML2_helper import prepare_request, init_saml_auth\n'), ((667, 772), 'chalice.Response', 'Response', ([], {'status_code': '(307)', 'body': '""""""', 'headers': "{'Location': sso_built_url, 'Content-Type': 'text/plain'}"}), "(status_code=307, body='', headers={'Location': sso_built_url,\n 'Content-Type': 'text/plain'})\n", (675, 772), False, 'from chalice import Response\n'), ((975, 1019), 'chalicelib.utils.SAML2_helper.prepare_request', 'prepare_request', ([], {'request': 'app.current_request'}), '(request=app.current_request)\n', (990, 1019), False, 'from chalicelib.utils.SAML2_helper import prepare_request, init_saml_auth\n'), ((1099, 1118), 'chalicelib.utils.SAML2_helper.init_saml_auth', 'init_saml_auth', (['req'], {}), '(req)\n', (1113, 1118), False, 'from chalicelib.utils.SAML2_helper import prepare_request, init_saml_auth\n'), ((4294, 4338), 'chalicelib.utils.SAML2_helper.prepare_request', 'prepare_request', ([], {'request': 'app.current_request'}), '(request=app.current_request)\n', (4309, 4338), False, 'from chalicelib.utils.SAML2_helper import prepare_request, init_saml_auth\n'), ((4418, 4437), 'chalicelib.utils.SAML2_helper.init_saml_auth', 'init_saml_auth', (['req'], {}), '(req)\n', (4432, 4437), False, 'from chalicelib.utils.SAML2_helper import prepare_request, init_saml_auth\n'), ((4984, 5023), 'chalicelib.core.users.change_jwt_iat', 'users.change_jwt_iat', (["context['userId']"], {}), "(context['userId'])\n", (5004, 5023), False, 'from chalicelib.core import users, tenants\n'), ((5453, 5497), 'chalicelib.utils.SAML2_helper.prepare_request', 'prepare_request', ([], {'request': 'app.current_request'}), '(request=app.current_request)\n', (5468, 5497), False, 'from chalicelib.utils.SAML2_helper import prepare_request, init_saml_auth\n'), ((5577, 5596), 'chalicelib.utils.SAML2_helper.init_saml_auth', 'init_saml_auth', (['req'], {}), '(req)\n', (5591, 5596), False, 'from chalicelib.utils.SAML2_helper import prepare_request, init_saml_auth\n'), ((6647, 6758), 'chalice.Response', 'Response', ([], {'status_code': '(307)', 'body': '""""""', 'headers': "{'Location': environ['SITE_URL'], 'Content-Type': 'text/plain'}"}), "(status_code=307, body='', headers={'Location': environ['SITE_URL'],\n 'Content-Type': 'text/plain'})\n", (6655, 6758), False, 'from chalice import Response\n'), ((6878, 6922), 'chalicelib.utils.SAML2_helper.prepare_request', 'prepare_request', ([], {'request': 'app.current_request'}), '(request=app.current_request)\n', (6893, 6922), False, 'from chalicelib.utils.SAML2_helper import prepare_request, init_saml_auth\n'), ((6934, 6953), 'chalicelib.utils.SAML2_helper.init_saml_auth', 'init_saml_auth', (['req'], {}), '(req)\n', (6948, 6953), False, 'from chalicelib.utils.SAML2_helper import prepare_request, init_saml_auth\n'), ((1985, 2023), 'onelogin.saml2.utils.OneLogin_Saml2_Utils.get_self_url', 'OneLogin_Saml2_Utils.get_self_url', (['req'], {}), '(req)\n', (2018, 2023), False, 'from onelogin.saml2.utils import OneLogin_Saml2_Utils\n'), ((7122, 7200), 'chalice.Response', 'Response', ([], {'status_code': '(200)', 'body': 'metadata', 'headers': "{'Content-Type': 'text/xml'}"}), "(status_code=200, body=metadata, headers={'Content-Type': 'text/xml'})\n", (7130, 7200), False, 'from chalice import Response\n'), ((3130, 3170), 'chalicelib.core.tenants.get_by_tenant_key', 'tenants.get_by_tenant_key', (['tenant_key[0]'], {}), '(tenant_key[0])\n', (3155, 3170), False, 'from chalicelib.core import users, tenants\n'), ((6138, 6173), 'chalicelib.core.users.get_by_email_only', 'users.get_by_email_only', (['user_email'], {}), '(user_email)\n', (6161, 6173), False, 'from chalicelib.core import users, tenants\n'), ((6494, 6589), 'chalice.Response', 'Response', ([], {'status_code': '(307)', 'body': '""""""', 'headers': "{'Location': url, 'Content-Type': 'text/plain'}"}), "(status_code=307, body='', headers={'Location': url, 'Content-Type':\n 'text/plain'})\n", (6502, 6589), False, 'from chalice import Response\n'), ((3951, 4073), 'chalicelib.core.users.update', 'users.update', ([], {'tenant_id': "t['tenantId']", 'user_id': "existing['id']", 'changes': "{'origin': 'saml', 'internal_id': internal_id}"}), "(tenant_id=t['tenantId'], user_id=existing['id'], changes={\n 'origin': 'saml', 'internal_id': internal_id})\n", (3963, 4073), False, 'from chalicelib.core import users, tenants\n'), ((6273, 6304), 'chalicelib.core.users.change_jwt_iat', 'users.change_jwt_iat', (['to_logout'], {}), '(to_logout)\n', (6293, 6304), False, 'from chalicelib.core import users, tenants\n')]
|
import numpy as np
def rank5_accuracy(predictions, labels):
# initialize the rank-1 and rank-5 accuracies
rank_1 = 0
rank_5 = 0
# new_predictions = []
# loop over the predictions and the ground-truth labels
for (prediction_, ground_truth) in zip(predictions, labels):
# sort the probabilities by their index in descending order
# so that the more confident guesses are at the front of the list
prediction_ = np.argsort(prediction_)[::-1]
# check if the ground-truth label is in the top-5 predictions
if ground_truth in prediction_[:5]:
rank_5 += 1
# check if the ground-truth is in #1 prediction
if ground_truth == prediction_[0]:
rank_1 += 1
# compute the final rank-1 and rank-5 accuracy
rank_1 /= float(len(labels))
rank_5 /= float(len(labels))
# return a tuple of the rank-1 and rank-5 accuracies
return rank_1, rank_5
|
[
"numpy.argsort"
] |
[((459, 482), 'numpy.argsort', 'np.argsort', (['prediction_'], {}), '(prediction_)\n', (469, 482), True, 'import numpy as np\n')]
|
from numbers import Number
from phi import math
from phi.math.blas import conjugate_gradient
from phi.math.helper import _dim_shifted
from phi.physics.field import CenteredGrid
from .solver_api import PoissonDomain, PoissonSolver
class GeometricCG(PoissonSolver):
def __init__(self, accuracy=1e-5, gradient_accuracy='same',
max_iterations=2000, max_gradient_iterations='same',
autodiff=False):
"""
Conjugate gradient solver that geometrically calculates laplace pressure in each iteration.
Unlike most other solvers, this algorithm is TPU compatible but usually performs worse than SparseCG.
Obstacles are allowed to vary between examples but the same number of iterations is performed for each example in one batch.
:param accuracy: the maximally allowed error on the divergence channel for each cell
:param gradient_accuracy: accuracy applied during backpropagation, number of 'same' to use forward accuracy
:param max_iterations: integer specifying maximum conjugent gradient loop iterations or None for no limit
:param max_gradient_iterations: maximum loop iterations during backpropagation,
'same' uses the number from max_iterations,
'mirror' sets the maximum to the number of iterations that were actually performed in the forward pass
:param autodiff: If autodiff=True, use the built-in autodiff for backpropagation.
The intermediate results of each loop iteration will be permanently stored if backpropagation is used.
If False, replaces autodiff by a forward pressure solve in reverse accumulation backpropagation.
This requires less memory but is only accurate if the solution is fully converged.
"""
PoissonSolver.__init__(self, 'Single-Phase Conjugate Gradient',
supported_devices=('CPU', 'GPU', 'TPU'),
supports_guess=True, supports_loop_counter=True, supports_continuous_masks=True)
assert isinstance(accuracy, Number), 'invalid accuracy: %s' % accuracy
assert gradient_accuracy == 'same' or isinstance(gradient_accuracy, Number), 'invalid gradient_accuracy: %s' % gradient_accuracy
assert max_gradient_iterations in ['same', 'mirror'] or isinstance(max_gradient_iterations, Number), 'invalid max_gradient_iterations: %s' % max_gradient_iterations
self.accuracy = accuracy
self.gradient_accuracy = accuracy if gradient_accuracy == 'same' else gradient_accuracy
self.max_iterations = max_iterations
if max_gradient_iterations == 'same':
self.max_gradient_iterations = max_iterations
elif max_gradient_iterations == 'mirror':
self.max_gradient_iterations = 'mirror'
else:
self.max_gradient_iterations = max_gradient_iterations
assert not autodiff, 'Cannot specify max_gradient_iterations when autodiff=True'
self.autodiff = autodiff
def solve(self, divergence, domain, guess):
assert isinstance(domain, PoissonDomain)
fluid_mask = domain.accessible_tensor(extend=1)
if self.autodiff:
return solve_pressure_forward(divergence, fluid_mask, self.max_iterations, guess, self.accuracy, domain, back_prop=True)
else:
def pressure_gradient(op, grad):
return solve_pressure_forward(grad, fluid_mask, max_gradient_iterations, None, self.gradient_accuracy, domain)[0]
pressure, iteration = math.with_custom_gradient(
solve_pressure_forward,
[divergence, fluid_mask, self.max_iterations, guess, self.accuracy, domain],
pressure_gradient,
input_index=0, output_index=0, name_base='geom_solve'
)
max_gradient_iterations = iteration if self.max_gradient_iterations == 'mirror' else self.max_gradient_iterations
return pressure, iteration
def solve_pressure_forward(divergence, fluid_mask, max_iterations, guess, accuracy, domain, back_prop=False):
from phi.physics.material import Material
extrapolation = Material.extrapolation_mode(domain.domain.boundaries)
def apply_A(pressure):
pressure = CenteredGrid(pressure, extrapolation=extrapolation)
pressure_padded = pressure.padded([[1, 1]] * pressure.rank)
return _weighted_sliced_laplace_nd(pressure_padded.data, weights=fluid_mask)
return conjugate_gradient(divergence, apply_A, guess, accuracy, max_iterations, back_prop=back_prop)
def _weighted_sliced_laplace_nd(tensor, weights):
if tensor.shape[-1] != 1:
raise ValueError('Laplace operator requires a scalar channel as input')
dims = range(math.spatial_rank(tensor))
components = []
for dimension in dims:
lower_weights, center_weights, upper_weights = _dim_shifted(weights, dimension, (-1, 0, 1), diminish_others=(1, 1))
lower_values, center_values, upper_values = _dim_shifted(tensor, dimension, (-1, 0, 1), diminish_others=(1, 1))
diff = math.mul(upper_values, upper_weights * center_weights) + math.mul(lower_values, lower_weights * center_weights) + math.mul(center_values, - lower_weights - upper_weights)
components.append(diff)
return math.sum(components, 0)
|
[
"phi.math.with_custom_gradient",
"phi.physics.material.Material.extrapolation_mode",
"phi.math.helper._dim_shifted",
"phi.math.spatial_rank",
"phi.math.sum",
"phi.math.mul",
"phi.math.blas.conjugate_gradient",
"phi.physics.field.CenteredGrid"
] |
[((4165, 4218), 'phi.physics.material.Material.extrapolation_mode', 'Material.extrapolation_mode', (['domain.domain.boundaries'], {}), '(domain.domain.boundaries)\n', (4192, 4218), False, 'from phi.physics.material import Material\n'), ((4483, 4580), 'phi.math.blas.conjugate_gradient', 'conjugate_gradient', (['divergence', 'apply_A', 'guess', 'accuracy', 'max_iterations'], {'back_prop': 'back_prop'}), '(divergence, apply_A, guess, accuracy, max_iterations,\n back_prop=back_prop)\n', (4501, 4580), False, 'from phi.math.blas import conjugate_gradient\n'), ((5303, 5326), 'phi.math.sum', 'math.sum', (['components', '(0)'], {}), '(components, 0)\n', (5311, 5326), False, 'from phi import math\n'), ((4266, 4317), 'phi.physics.field.CenteredGrid', 'CenteredGrid', (['pressure'], {'extrapolation': 'extrapolation'}), '(pressure, extrapolation=extrapolation)\n', (4278, 4317), False, 'from phi.physics.field import CenteredGrid\n'), ((4756, 4781), 'phi.math.spatial_rank', 'math.spatial_rank', (['tensor'], {}), '(tensor)\n', (4773, 4781), False, 'from phi import math\n'), ((4885, 4953), 'phi.math.helper._dim_shifted', '_dim_shifted', (['weights', 'dimension', '(-1, 0, 1)'], {'diminish_others': '(1, 1)'}), '(weights, dimension, (-1, 0, 1), diminish_others=(1, 1))\n', (4897, 4953), False, 'from phi.math.helper import _dim_shifted\n'), ((5006, 5073), 'phi.math.helper._dim_shifted', '_dim_shifted', (['tensor', 'dimension', '(-1, 0, 1)'], {'diminish_others': '(1, 1)'}), '(tensor, dimension, (-1, 0, 1), diminish_others=(1, 1))\n', (5018, 5073), False, 'from phi.math.helper import _dim_shifted\n'), ((3542, 3750), 'phi.math.with_custom_gradient', 'math.with_custom_gradient', (['solve_pressure_forward', '[divergence, fluid_mask, self.max_iterations, guess, self.accuracy, domain]', 'pressure_gradient'], {'input_index': '(0)', 'output_index': '(0)', 'name_base': '"""geom_solve"""'}), "(solve_pressure_forward, [divergence, fluid_mask,\n self.max_iterations, guess, self.accuracy, domain], pressure_gradient,\n input_index=0, output_index=0, name_base='geom_solve')\n", (3567, 3750), False, 'from phi import math\n'), ((5203, 5258), 'phi.math.mul', 'math.mul', (['center_values', '(-lower_weights - upper_weights)'], {}), '(center_values, -lower_weights - upper_weights)\n', (5211, 5258), False, 'from phi import math\n'), ((5089, 5143), 'phi.math.mul', 'math.mul', (['upper_values', '(upper_weights * center_weights)'], {}), '(upper_values, upper_weights * center_weights)\n', (5097, 5143), False, 'from phi import math\n'), ((5146, 5200), 'phi.math.mul', 'math.mul', (['lower_values', '(lower_weights * center_weights)'], {}), '(lower_values, lower_weights * center_weights)\n', (5154, 5200), False, 'from phi import math\n')]
|
from flask import Flask
import locale
from flask_sqlalchemy import SQLAlchemy
from config import Config
app = Flask(__name__)
app.config.from_object(Config)
locale.setlocale(locale.LC_ALL, '')
db = SQLAlchemy(app)
@app.route('/')
def index():
return 'UnitPay API'
from models import UnitpayPayments, AccountData
from unitpay import UnitPay
from flask import request
from sqlalchemy import exc
from datetime import datetime
import decimal
@app.route('/api/v1.0/unitpay/payment/', methods=['GET'])
def unitpay_processor():
unitpay = UnitPay('SECRET_KEY') # ВВести ключ от UnitPay
if unitpay.check_handler_request():
try:
sum_count = decimal.Decimal(request.args.get('params[profit]'))
account = db.session.query(AccountData).filter(AccountData.name == request.args.get('params[account]')).first()
if request.args.get('method') == 'pay':
if account:
pay = UnitpayPayments(unitpay_id=request.args.get('params[unitpayId]'),
account=request.args.get('params[account]'),
sum=request.args.get('params[payerSum]'),
payment_type=request.args.get('params[paymentType]'),
payer_currency=request.args.get('params[payerCurrency]'),
signature=request.args.get('params[signature]'),
profit=request.args.get('params[profit]'))
db.session.add(pay)
db.session.commit()
update_count = sum_count
db.session.query(AccountData).filter(AccountData.name == request.args.get('params[account]')).update({
'balance': account.balance + update_count
})
db.session.query(UnitpayPayments).filter(UnitpayPayments.unitpay_id == request.args.get('params[unitpayId]')).update({
'date_complete': datetime.now(),
'status': 1
})
db.session.commit()
app.logger.info('The request was successfully processed by the system.')
return unitpay.get_success_handler_response("The request was successfully processed by the system.")
else:
app.logger.info('Account with this email does not exist.')
return unitpay.get_error_handler_response("Account with this email does not exist.")
else:
app.logger.info('The request was successfully processed by the system without writing to the database because of.')
return unitpay.get_success_handler_response("The request was successfully processed by the system without writing to the database because of.")
except exc.SQLAlchemyError as e:
print(e)
app.logger.error(e)
db.session.rollback()
return unitpay.get_error_handler_response("The request has been processed by the system.")
else:
return unitpay.get_error_handler_response("The request has been processed by the system.")
if __name__ == '__main__':
app.run(host='127.0.0.1', port=8080)
|
[
"flask.request.args.get",
"flask.Flask",
"flask_sqlalchemy.SQLAlchemy",
"locale.setlocale",
"unitpay.UnitPay",
"datetime.datetime.now"
] |
[((111, 126), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (116, 126), False, 'from flask import Flask\n'), ((158, 193), 'locale.setlocale', 'locale.setlocale', (['locale.LC_ALL', '""""""'], {}), "(locale.LC_ALL, '')\n", (174, 193), False, 'import locale\n'), ((199, 214), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (209, 214), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((546, 567), 'unitpay.UnitPay', 'UnitPay', (['"""SECRET_KEY"""'], {}), "('SECRET_KEY')\n", (553, 567), False, 'from unitpay import UnitPay\n'), ((687, 721), 'flask.request.args.get', 'request.args.get', (['"""params[profit]"""'], {}), "('params[profit]')\n", (703, 721), False, 'from flask import request\n'), ((862, 888), 'flask.request.args.get', 'request.args.get', (['"""method"""'], {}), "('method')\n", (878, 888), False, 'from flask import request\n'), ((802, 837), 'flask.request.args.get', 'request.args.get', (['"""params[account]"""'], {}), "('params[account]')\n", (818, 837), False, 'from flask import request\n'), ((980, 1017), 'flask.request.args.get', 'request.args.get', (['"""params[unitpayId]"""'], {}), "('params[unitpayId]')\n", (996, 1017), False, 'from flask import request\n'), ((1069, 1104), 'flask.request.args.get', 'request.args.get', (['"""params[account]"""'], {}), "('params[account]')\n", (1085, 1104), False, 'from flask import request\n'), ((1152, 1188), 'flask.request.args.get', 'request.args.get', (['"""params[payerSum]"""'], {}), "('params[payerSum]')\n", (1168, 1188), False, 'from flask import request\n'), ((1245, 1284), 'flask.request.args.get', 'request.args.get', (['"""params[paymentType]"""'], {}), "('params[paymentType]')\n", (1261, 1284), False, 'from flask import request\n'), ((1343, 1384), 'flask.request.args.get', 'request.args.get', (['"""params[payerCurrency]"""'], {}), "('params[payerCurrency]')\n", (1359, 1384), False, 'from flask import request\n'), ((1438, 1475), 'flask.request.args.get', 'request.args.get', (['"""params[signature]"""'], {}), "('params[signature]')\n", (1454, 1475), False, 'from flask import request\n'), ((1526, 1560), 'flask.request.args.get', 'request.args.get', (['"""params[profit]"""'], {}), "('params[profit]')\n", (1542, 1560), False, 'from flask import request\n'), ((2081, 2095), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2093, 2095), False, 'from datetime import datetime\n'), ((1766, 1801), 'flask.request.args.get', 'request.args.get', (['"""params[account]"""'], {}), "('params[account]')\n", (1782, 1801), False, 'from flask import request\n'), ((1992, 2029), 'flask.request.args.get', 'request.args.get', (['"""params[unitpayId]"""'], {}), "('params[unitpayId]')\n", (2008, 2029), False, 'from flask import request\n')]
|
import numpy as np
from astropy.io import fits
from astropy.table import Table
from scipy.interpolate import InterpolatedUnivariateSpline
import matplotlib.pyplot as plt
#from scipy.signal import medfilt
# Your input template
template = 'Template_s1d_Gl699_sc1d_v_file_AB.fits'
# template = 'Template_s1d_Gl15A_sc1d_v_file_AB.fits'
# template = 'Template_s1d_HD189733_sc1d_v_file_AB.fits'
c = 2.99792458e5 # speed of light
# read wavelength and flux. The wavelength is expressed in Ang, we convert to µm
wave_phoenix = fits.getdata('WAVE_PHOENIX-ACES-AGSS-COND-2011.fits') / 10
# bit of code to download goettigen models. Use only you don't have the models locally and change the False to True
if False:
import os as os
for temperature in np.arange(3000, 6100, 100):
temperature = str(np.int(np.round(temperature, -2)))
print(temperature)
os.system(
'wget ftp://phoenix.astro.physik.uni-goettingen.de/HiResFITS/PHOENIX-ACES-AGSS-COND-2011/Z-0.0/lte0' + temperature + '-4.50-0.0.PHOENIX-ACES-AGSS-COND-2011-HiRes.fits')
# read template and header
tbl, hdr = fits.getdata(template, ext=1, header=True)
# round temperature in header to nearest 100 and get the right model
if 'OBJTEMP' in hdr:
temperature = hdr['OBJTEMP']
if temperature < 3000:
temperature = 3000
if temperature > 6000:
temperature = 6000
temperature = str(np.int(np.round(temperature, -2)))
else:
# if the header does not have a temperature value, assume it is an early-M. This does not really change much
temperature = '3600'
# tell the user which model you are using
print('Temperature = ', temperature)
model_file = 'lte0' + temperature + '-4.50-0.0.PHOENIX-ACES-AGSS-COND-2011-HiRes.fits'
print('Model file = ', model_file)
flux_phoenix = fits.getdata(model_file)
# get wave and flux vectors for the template
w = np.array(tbl['wavelength'])
f = np.array(tbl['flux'])
# smooth the template with a 7 km/s boxcar. This avoids lines due to spurious noise excursions
f2 = np.array(f)
mask = np.isfinite(f)
f2[~mask] = 0
mask = mask*1.0
# smooth by a boxcar and divide by a weight vector to avoid discontinuities at the edge or regions with NaNs
f = np.convolve(f2,np.ones(7), mode = 'same')/np.convolve(mask,np.ones(7), mode = 'same')
# find the first and second derivative of the flux
df = np.gradient(f)
ddf = np.gradient(np.gradient(f))
# lines are regions there is a sign change in the derivative of the flux
# we also have some checks for NaNs
line = np.where((np.sign(df[1:]) != np.sign(df[:-1])) &
np.isfinite(ddf[1:])
& np.isfinite(df[1:])
& np.isfinite(df[:-1]))[0]
# create the output table
tbl = Table()
tbl['ll_mask_s'] = np.zeros_like(line, dtype=float)
tbl['ll_mask_e'] = np.zeros_like(line, dtype=float)
dv = 0# mask width in km/s. Set to zero, but could be changed
for i in range(len(line)):
# we perform a linear interpolation to find the exact wavelength
# where the derivatives goes to zero
wave_cen = (np.polyfit(df[line[i]:line[i] + 2], w[line[i]:line[i] + 2], 1))[1]
# historically, masks are defined over a box of a given width (hence the 's' "start" and the 'e' "end" here)
# here the two values are that same, but one could have a non-zero dv value
corrv = np.sqrt((1 + (-dv / 2) / c) / (1 - (-dv / 2) / c))
tbl['ll_mask_s'][i] = wave_cen * corrv
# same but for the upper bound to the line position
corrv = np.sqrt((1 + (dv / 2) / c) / (1 - (dv / 2) / c))
tbl['ll_mask_e'][i] = wave_cen * corrv
# wavelength of lines is the mean of start and end.
wavelines = (tbl['ll_mask_s'] + tbl['ll_mask_e']) / 2.0
# the weight is the second derivative of the flux. The sharper the line,
# the more weight we give it
g = np.isfinite(ddf)
weight = InterpolatedUnivariateSpline(w[g], ddf[g])(wavelines)
# weight will be the second derivative
tbl['w_mask'] = weight
# create a spline of the model
model = InterpolatedUnivariateSpline(wave_phoenix, flux_phoenix)
# assume a 0 velocity and search
dv0 = 0.0 #
scale = 1.0
for ite in range(2):
dvs = np.arange(400, dtype=float)
dvs -= np.mean(dvs)
dvs *= scale
dvs += dv0
# loop in velocity space and fill the CCF values for each velocity step
ccf = np.zeros_like(dvs)
# this is the line to change if you want to have positive or negative features
mask = weight>0
for i in range(len(dvs)):
corrv = np.sqrt((1 + dvs[i] / c) / (1 - dvs[i] / c))
# lines that will be used in the CCF mask
ccf[i] = np.sum(model(wavelines[mask] / corrv))
# just centering the cc around one and removing low-f trends.
mini = np.argmin(ccf)
dv0 = dvs[mini]
scale /= 10.0
plt.plot(dvs, ccf)
plt.show()
# find the lowest point in the CCF
minpos = np.argmin(ccf)
# fit a 2nd order polynomial to the bottom pixels (-1 to +1 from bottom) and find minimimum point
fit = np.polyfit(dvs[minpos - 1:minpos + 2], ccf[minpos - 1:minpos + 2], 2)
# minimum of a 2nd order polynomial
systemic_velocity = -.5 * fit[1] / fit[0]
# return systemic velocity measured
print('systemic velocity : ', systemic_velocity, 'km/s')
# generate a nice plot to show positive/negative features
wavelines =tbl['ll_mask_s']
# find flux at sub-pixel position of lines
g = np.isfinite(f)
flux_lines = InterpolatedUnivariateSpline(w[g], f[g])(wavelines)
plt.plot(w,f, 'g-',label = 'spectrum')
pos_lines = np.array(tbl['w_mask'] < 0)
plt.plot(np.array(wavelines[pos_lines]),flux_lines[pos_lines],'r.', label = 'positive features')
neg_lines = np.array(tbl['w_mask'] > 0)
plt.plot(np.array(wavelines[neg_lines]),flux_lines[neg_lines],'b.', label = 'negative features')
plt.legend()
plt.show()
# updating the table to account for systemic velocity of star
corrv = np.sqrt((1 + systemic_velocity / c) / (1 - systemic_velocity / c)) # relativistic Doppler
tbl['ll_mask_s'] = tbl['ll_mask_s'] / corrv
tbl['ll_mask_e'] = tbl['ll_mask_e'] / corrv
# write the output table
fits.writeto(hdr['OBJECT'] + '.fits', tbl, hdr, overwrite=True)
tbl[tbl['w_mask'] < 0].write(hdr['OBJECT'] + '_pos.mas', format='ascii', overwrite=True)
tbl[tbl['w_mask'] > 0].write(hdr['OBJECT'] + '_neg.mas', format='ascii', overwrite=True)
tbl.write(hdr['OBJECT'] + '_full.mas', format='ascii', overwrite=True)
|
[
"numpy.polyfit",
"numpy.ones",
"numpy.argmin",
"numpy.mean",
"numpy.arange",
"numpy.round",
"numpy.zeros_like",
"scipy.interpolate.InterpolatedUnivariateSpline",
"astropy.io.fits.getdata",
"numpy.isfinite",
"matplotlib.pyplot.show",
"matplotlib.pyplot.legend",
"os.system",
"astropy.table.Table",
"matplotlib.pyplot.plot",
"astropy.io.fits.writeto",
"numpy.array",
"numpy.sign",
"numpy.gradient",
"numpy.sqrt"
] |
[((1107, 1149), 'astropy.io.fits.getdata', 'fits.getdata', (['template'], {'ext': '(1)', 'header': '(True)'}), '(template, ext=1, header=True)\n', (1119, 1149), False, 'from astropy.io import fits\n'), ((1801, 1825), 'astropy.io.fits.getdata', 'fits.getdata', (['model_file'], {}), '(model_file)\n', (1813, 1825), False, 'from astropy.io import fits\n'), ((1876, 1903), 'numpy.array', 'np.array', (["tbl['wavelength']"], {}), "(tbl['wavelength'])\n", (1884, 1903), True, 'import numpy as np\n'), ((1908, 1929), 'numpy.array', 'np.array', (["tbl['flux']"], {}), "(tbl['flux'])\n", (1916, 1929), True, 'import numpy as np\n'), ((2031, 2042), 'numpy.array', 'np.array', (['f'], {}), '(f)\n', (2039, 2042), True, 'import numpy as np\n'), ((2050, 2064), 'numpy.isfinite', 'np.isfinite', (['f'], {}), '(f)\n', (2061, 2064), True, 'import numpy as np\n'), ((2351, 2365), 'numpy.gradient', 'np.gradient', (['f'], {}), '(f)\n', (2362, 2365), True, 'import numpy as np\n'), ((2717, 2724), 'astropy.table.Table', 'Table', ([], {}), '()\n', (2722, 2724), False, 'from astropy.table import Table\n'), ((2744, 2776), 'numpy.zeros_like', 'np.zeros_like', (['line'], {'dtype': 'float'}), '(line, dtype=float)\n', (2757, 2776), True, 'import numpy as np\n'), ((2796, 2828), 'numpy.zeros_like', 'np.zeros_like', (['line'], {'dtype': 'float'}), '(line, dtype=float)\n', (2809, 2828), True, 'import numpy as np\n'), ((3791, 3807), 'numpy.isfinite', 'np.isfinite', (['ddf'], {}), '(ddf)\n', (3802, 3807), True, 'import numpy as np\n'), ((3974, 4030), 'scipy.interpolate.InterpolatedUnivariateSpline', 'InterpolatedUnivariateSpline', (['wave_phoenix', 'flux_phoenix'], {}), '(wave_phoenix, flux_phoenix)\n', (4002, 4030), False, 'from scipy.interpolate import InterpolatedUnivariateSpline\n'), ((4769, 4779), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4777, 4779), True, 'import matplotlib.pyplot as plt\n'), ((4825, 4839), 'numpy.argmin', 'np.argmin', (['ccf'], {}), '(ccf)\n', (4834, 4839), True, 'import numpy as np\n'), ((4944, 5013), 'numpy.polyfit', 'np.polyfit', (['dvs[minpos - 1:minpos + 2]', 'ccf[minpos - 1:minpos + 2]', '(2)'], {}), '(dvs[minpos - 1:minpos + 2], ccf[minpos - 1:minpos + 2], 2)\n', (4954, 5013), True, 'import numpy as np\n'), ((5321, 5335), 'numpy.isfinite', 'np.isfinite', (['f'], {}), '(f)\n', (5332, 5335), True, 'import numpy as np\n'), ((5402, 5440), 'matplotlib.pyplot.plot', 'plt.plot', (['w', 'f', '"""g-"""'], {'label': '"""spectrum"""'}), "(w, f, 'g-', label='spectrum')\n", (5410, 5440), True, 'import matplotlib.pyplot as plt\n'), ((5453, 5480), 'numpy.array', 'np.array', (["(tbl['w_mask'] < 0)"], {}), "(tbl['w_mask'] < 0)\n", (5461, 5480), True, 'import numpy as np\n'), ((5590, 5617), 'numpy.array', 'np.array', (["(tbl['w_mask'] > 0)"], {}), "(tbl['w_mask'] > 0)\n", (5598, 5617), True, 'import numpy as np\n'), ((5715, 5727), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (5725, 5727), True, 'import matplotlib.pyplot as plt\n'), ((5728, 5738), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5736, 5738), True, 'import matplotlib.pyplot as plt\n'), ((5810, 5876), 'numpy.sqrt', 'np.sqrt', (['((1 + systemic_velocity / c) / (1 - systemic_velocity / c))'], {}), '((1 + systemic_velocity / c) / (1 - systemic_velocity / c))\n', (5817, 5876), True, 'import numpy as np\n'), ((6015, 6078), 'astropy.io.fits.writeto', 'fits.writeto', (["(hdr['OBJECT'] + '.fits')", 'tbl', 'hdr'], {'overwrite': '(True)'}), "(hdr['OBJECT'] + '.fits', tbl, hdr, overwrite=True)\n", (6027, 6078), False, 'from astropy.io import fits\n'), ((522, 575), 'astropy.io.fits.getdata', 'fits.getdata', (['"""WAVE_PHOENIX-ACES-AGSS-COND-2011.fits"""'], {}), "('WAVE_PHOENIX-ACES-AGSS-COND-2011.fits')\n", (534, 575), False, 'from astropy.io import fits\n'), ((752, 778), 'numpy.arange', 'np.arange', (['(3000)', '(6100)', '(100)'], {}), '(3000, 6100, 100)\n', (761, 778), True, 'import numpy as np\n'), ((2384, 2398), 'numpy.gradient', 'np.gradient', (['f'], {}), '(f)\n', (2395, 2398), True, 'import numpy as np\n'), ((3319, 3365), 'numpy.sqrt', 'np.sqrt', (['((1 + -dv / 2 / c) / (1 - -dv / 2 / c))'], {}), '((1 + -dv / 2 / c) / (1 - -dv / 2 / c))\n', (3326, 3365), True, 'import numpy as np\n'), ((3482, 3526), 'numpy.sqrt', 'np.sqrt', (['((1 + dv / 2 / c) / (1 - dv / 2 / c))'], {}), '((1 + dv / 2 / c) / (1 - dv / 2 / c))\n', (3489, 3526), True, 'import numpy as np\n'), ((3817, 3859), 'scipy.interpolate.InterpolatedUnivariateSpline', 'InterpolatedUnivariateSpline', (['w[g]', 'ddf[g]'], {}), '(w[g], ddf[g])\n', (3845, 3859), False, 'from scipy.interpolate import InterpolatedUnivariateSpline\n'), ((4120, 4147), 'numpy.arange', 'np.arange', (['(400)'], {'dtype': 'float'}), '(400, dtype=float)\n', (4129, 4147), True, 'import numpy as np\n'), ((4159, 4171), 'numpy.mean', 'np.mean', (['dvs'], {}), '(dvs)\n', (4166, 4171), True, 'import numpy as np\n'), ((4292, 4310), 'numpy.zeros_like', 'np.zeros_like', (['dvs'], {}), '(dvs)\n', (4305, 4310), True, 'import numpy as np\n'), ((4691, 4705), 'numpy.argmin', 'np.argmin', (['ccf'], {}), '(ccf)\n', (4700, 4705), True, 'import numpy as np\n'), ((4750, 4768), 'matplotlib.pyplot.plot', 'plt.plot', (['dvs', 'ccf'], {}), '(dvs, ccf)\n', (4758, 4768), True, 'import matplotlib.pyplot as plt\n'), ((5349, 5389), 'scipy.interpolate.InterpolatedUnivariateSpline', 'InterpolatedUnivariateSpline', (['w[g]', 'f[g]'], {}), '(w[g], f[g])\n', (5377, 5389), False, 'from scipy.interpolate import InterpolatedUnivariateSpline\n'), ((5490, 5520), 'numpy.array', 'np.array', (['wavelines[pos_lines]'], {}), '(wavelines[pos_lines])\n', (5498, 5520), True, 'import numpy as np\n'), ((5627, 5657), 'numpy.array', 'np.array', (['wavelines[neg_lines]'], {}), '(wavelines[neg_lines])\n', (5635, 5657), True, 'import numpy as np\n'), ((876, 1064), 'os.system', 'os.system', (["(\n 'wget ftp://phoenix.astro.physik.uni-goettingen.de/HiResFITS/PHOENIX-ACES-AGSS-COND-2011/Z-0.0/lte0'\n + temperature + '-4.50-0.0.PHOENIX-ACES-AGSS-COND-2011-HiRes.fits')"], {}), "(\n 'wget ftp://phoenix.astro.physik.uni-goettingen.de/HiResFITS/PHOENIX-ACES-AGSS-COND-2011/Z-0.0/lte0'\n + temperature + '-4.50-0.0.PHOENIX-ACES-AGSS-COND-2011-HiRes.fits')\n", (885, 1064), True, 'import os as os\n'), ((2223, 2233), 'numpy.ones', 'np.ones', (['(7)'], {}), '(7)\n', (2230, 2233), True, 'import numpy as np\n'), ((2267, 2277), 'numpy.ones', 'np.ones', (['(7)'], {}), '(7)\n', (2274, 2277), True, 'import numpy as np\n'), ((3046, 3108), 'numpy.polyfit', 'np.polyfit', (['df[line[i]:line[i] + 2]', 'w[line[i]:line[i] + 2]', '(1)'], {}), '(df[line[i]:line[i] + 2], w[line[i]:line[i] + 2], 1)\n', (3056, 3108), True, 'import numpy as np\n'), ((4461, 4505), 'numpy.sqrt', 'np.sqrt', (['((1 + dvs[i] / c) / (1 - dvs[i] / c))'], {}), '((1 + dvs[i] / c) / (1 - dvs[i] / c))\n', (4468, 4505), True, 'import numpy as np\n'), ((1412, 1437), 'numpy.round', 'np.round', (['temperature', '(-2)'], {}), '(temperature, -2)\n', (1420, 1437), True, 'import numpy as np\n'), ((2659, 2679), 'numpy.isfinite', 'np.isfinite', (['df[:-1]'], {}), '(df[:-1])\n', (2670, 2679), True, 'import numpy as np\n'), ((813, 838), 'numpy.round', 'np.round', (['temperature', '(-2)'], {}), '(temperature, -2)\n', (821, 838), True, 'import numpy as np\n'), ((2621, 2640), 'numpy.isfinite', 'np.isfinite', (['df[1:]'], {}), '(df[1:])\n', (2632, 2640), True, 'import numpy as np\n'), ((2582, 2602), 'numpy.isfinite', 'np.isfinite', (['ddf[1:]'], {}), '(ddf[1:])\n', (2593, 2602), True, 'import numpy as np\n'), ((2527, 2542), 'numpy.sign', 'np.sign', (['df[1:]'], {}), '(df[1:])\n', (2534, 2542), True, 'import numpy as np\n'), ((2546, 2562), 'numpy.sign', 'np.sign', (['df[:-1]'], {}), '(df[:-1])\n', (2553, 2562), True, 'import numpy as np\n')]
|
# This program displays a plot of the functions x, x2 and 2x in the range [0, 4]
# <NAME> 2019-03-24
# I formulated this solution using the week 9 lectures as a starting point followed by further reading and research which is detailed further in the references section in the Readme file
# Additional reading included the matplotlib pyplot tutorial as recommended in lectures: https://matplotlib.org/users/pyplot_tutorial.html
print("The Plot should appear on your screen momentarily") # I have returned this line to the user when the script is run
import numpy as np # numpy module is imported and given the short name np
import matplotlib.pyplot as pl # mathplotlib.pyplot module is imported and given a shorter name pl
x = np.arange(start = 0, stop = 4) #The range is defined as being between 0 and 4 using the numpy.arange function
# Using numpy.arange to set the range between 0 and 4 using start and stop parameters
pl.xlabel("x axis", fontsize=12, fontweight= 'bold') # Adding name to x and y axis (read in pyplot tutorial text section)
pl.ylabel("y axis", fontsize=12, fontweight= 'bold') # Added a font size and font weight to the x and y axis labels
pl.title("Plot Generated from Solution_10.py", fontsize= 14, fontweight='bold')
# Adding a title to the plot and formatting as read in the text section of the pyplot tutorial
a = x # I introduced a variable called a set it equal to the value of x
b = x*x # I introduced a variable called b and set it equal to the value of x squared
c = 2**x # I introduced a variable called C and set it equal to 2 to the power of x. ** the power operator is used
pl.plot(a, c='r', lw= 4.0, ls= '--', label= 'x') # I have asked for 'a' to be plotted with a red line
# c short for color, ls short for linestyle and lw short for linewidth
# I formatted the line that will be plotted using the attributes that I read about in the 'controlling line properties' section of the pyplot tutorial
pl.plot(b, c='g', lw= 4.0, ls= '--', label= 'x²') # I have asked for 'b' to be plotted with a green line
pl.plot(c, c='y', lw= 4.0, ls= '--', label= '2x') # I have asked for 'c' to be plotted with a yellow line
pl.legend(loc= 'upper left') # This command shows the legend on the plot, I have given the names in pl.plot above using label
# I have used loc and upper left ask for the legend to be placed in the top left corner of the plot
pl.grid(True) # I revisited the solution and asked for a grid to be shown on the plot after further reading
pl.show() # This is the command used to show the plot created above
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.legend",
"numpy.arange",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.grid"
] |
[((728, 754), 'numpy.arange', 'np.arange', ([], {'start': '(0)', 'stop': '(4)'}), '(start=0, stop=4)\n', (737, 754), True, 'import numpy as np\n'), ((924, 975), 'matplotlib.pyplot.xlabel', 'pl.xlabel', (['"""x axis"""'], {'fontsize': '(12)', 'fontweight': '"""bold"""'}), "('x axis', fontsize=12, fontweight='bold')\n", (933, 975), True, 'import matplotlib.pyplot as pl\n'), ((1046, 1097), 'matplotlib.pyplot.ylabel', 'pl.ylabel', (['"""y axis"""'], {'fontsize': '(12)', 'fontweight': '"""bold"""'}), "('y axis', fontsize=12, fontweight='bold')\n", (1055, 1097), True, 'import matplotlib.pyplot as pl\n'), ((1162, 1240), 'matplotlib.pyplot.title', 'pl.title', (['"""Plot Generated from Solution_10.py"""'], {'fontsize': '(14)', 'fontweight': '"""bold"""'}), "('Plot Generated from Solution_10.py', fontsize=14, fontweight='bold')\n", (1170, 1240), True, 'import matplotlib.pyplot as pl\n'), ((1614, 1659), 'matplotlib.pyplot.plot', 'pl.plot', (['a'], {'c': '"""r"""', 'lw': '(4.0)', 'ls': '"""--"""', 'label': '"""x"""'}), "(a, c='r', lw=4.0, ls='--', label='x')\n", (1621, 1659), True, 'import matplotlib.pyplot as pl\n'), ((1939, 1985), 'matplotlib.pyplot.plot', 'pl.plot', (['b'], {'c': '"""g"""', 'lw': '(4.0)', 'ls': '"""--"""', 'label': '"""x²"""'}), "(b, c='g', lw=4.0, ls='--', label='x²')\n", (1946, 1985), True, 'import matplotlib.pyplot as pl\n'), ((2044, 2090), 'matplotlib.pyplot.plot', 'pl.plot', (['c'], {'c': '"""y"""', 'lw': '(4.0)', 'ls': '"""--"""', 'label': '"""2x"""'}), "(c, c='y', lw=4.0, ls='--', label='2x')\n", (2051, 2090), True, 'import matplotlib.pyplot as pl\n'), ((2151, 2178), 'matplotlib.pyplot.legend', 'pl.legend', ([], {'loc': '"""upper left"""'}), "(loc='upper left')\n", (2160, 2178), True, 'import matplotlib.pyplot as pl\n'), ((2377, 2390), 'matplotlib.pyplot.grid', 'pl.grid', (['(True)'], {}), '(True)\n', (2384, 2390), True, 'import matplotlib.pyplot as pl\n'), ((2485, 2494), 'matplotlib.pyplot.show', 'pl.show', ([], {}), '()\n', (2492, 2494), True, 'import matplotlib.pyplot as pl\n')]
|
import numpy as np
from scipy.linalg import expm
class Env( object):
def __init__(self,
action_space=[0,1,2],
dt=0.1):
super(Env, self).__init__()
self.action_space = action_space
self.n_actions = len(self.action_space)
self.n_features = 4
self.state = np.array([1,0,0,0])
self.nstep=0
self.dt=dt
def reset(self):
self.state = np.array([1,0,0,0])
self.nstep = 0
return self.state
def step(self, action):
psi = np.array([self.state[0:int(len(self.state) / 2)] + self.state[int(len(self.state) / 2):int(len(self.state))] * 1j])
psi = psi.T
psi=np.mat(psi)
J = 4 # control field strength
sx = np.mat([[0, 1], [1, 0]], dtype=complex)
sz = np.mat([[1, 0], [0, -1]], dtype=complex)
U = np.matrix(np.identity(2, dtype=complex))
H = J *float(action)/(self.n_actions-1)* sz + 1 * sx
U = expm(-1j * H * self.dt)
psi = U * psi # final state
target = np.mat([[0], [1]], dtype=complex)
err = 1 - (np.abs(psi.H * target) ** 2).item(0).real
rwd = 10 * (err<0.5)+100 * (err<0.1)+5000*(err < 10e-3)
done =( (err < 10e-3) or self.nstep>=np.pi/self.dt )
self.nstep +=1
psi=np.array(psi)
psi_T = psi.T
self.state = np.array(psi_T.real.tolist()[0] + psi_T.imag.tolist()[0])
return self.state, rwd, done, 1 - err
|
[
"scipy.linalg.expm",
"numpy.abs",
"numpy.identity",
"numpy.array",
"numpy.mat"
] |
[((314, 336), 'numpy.array', 'np.array', (['[1, 0, 0, 0]'], {}), '([1, 0, 0, 0])\n', (322, 336), True, 'import numpy as np\n'), ((419, 441), 'numpy.array', 'np.array', (['[1, 0, 0, 0]'], {}), '([1, 0, 0, 0])\n', (427, 441), True, 'import numpy as np\n'), ((683, 694), 'numpy.mat', 'np.mat', (['psi'], {}), '(psi)\n', (689, 694), True, 'import numpy as np\n'), ((749, 788), 'numpy.mat', 'np.mat', (['[[0, 1], [1, 0]]'], {'dtype': 'complex'}), '([[0, 1], [1, 0]], dtype=complex)\n', (755, 788), True, 'import numpy as np\n'), ((802, 842), 'numpy.mat', 'np.mat', (['[[1, 0], [0, -1]]'], {'dtype': 'complex'}), '([[1, 0], [0, -1]], dtype=complex)\n', (808, 842), True, 'import numpy as np\n'), ((981, 1006), 'scipy.linalg.expm', 'expm', (['(-1.0j * H * self.dt)'], {}), '(-1.0j * H * self.dt)\n', (985, 1006), False, 'from scipy.linalg import expm\n'), ((1063, 1096), 'numpy.mat', 'np.mat', (['[[0], [1]]'], {'dtype': 'complex'}), '([[0], [1]], dtype=complex)\n', (1069, 1096), True, 'import numpy as np\n'), ((1330, 1343), 'numpy.array', 'np.array', (['psi'], {}), '(psi)\n', (1338, 1343), True, 'import numpy as np\n'), ((866, 895), 'numpy.identity', 'np.identity', (['(2)'], {'dtype': 'complex'}), '(2, dtype=complex)\n', (877, 895), True, 'import numpy as np\n'), ((1118, 1140), 'numpy.abs', 'np.abs', (['(psi.H * target)'], {}), '(psi.H * target)\n', (1124, 1140), True, 'import numpy as np\n')]
|
# CSC 321, Assignment 4
#
# This is the main training file for the vanilla GAN part of the assignment.
#
# Usage:
# ======
# To train with the default hyperparamters (saves results to checkpoints_vanilla/ and samples_vanilla/):
# python vanilla_gan.py
import os
import pdb
import pickle
import argparse
import warnings
warnings.filterwarnings("ignore")
# Numpy & Scipy imports
import numpy as np
import scipy
import scipy.misc
# Torch imports
import torch
import torch.nn as nn
import torch.optim as optim
# Local imports
import utils
from data_loader import get_emoji_loader
from models import DCGenerator, DCDiscriminator
from models import WGANDiscriminator, WGANGenerator
from models import WGANGPDiscriminator, WGANGPGenerator
SEED = 11
# Set the random seed manually for reproducibility.
np.random.seed(SEED)
torch.manual_seed(SEED)
if torch.cuda.is_available():
torch.cuda.manual_seed(SEED)
def print_models(G, D):
"""Prints model information for the generators and discriminators.
"""
print(" G ")
print("---------------------------------------")
print(G)
print("---------------------------------------")
print(" D ")
print("---------------------------------------")
print(D)
print("---------------------------------------")
def create_model(opts):
"""Builds the generators and discriminators.
"""
if opts.GAN_type == 'LSGAN':
G = DCGenerator(noise_size=opts.noise_size, conv_dim=opts.conv_dim)
D = DCDiscriminator(conv_dim=opts.conv_dim, batch_norm=not opts.disable_bn)
elif opts.GAN_type == 'WGAN':
G = WGANGenerator(noise_size=opts.noise_size, conv_dim=opts.conv_dim)
D = WGANDiscriminator(conv_dim=opts.conv_dim, batch_norm=not opts.disable_bn)
elif opts.GAN_type == 'WGANGP':
G = WGANGPGenerator(noise_size=opts.noise_size, conv_dim=opts.conv_dim)
D = WGANGPDiscriminator(conv_dim=opts.conv_dim)
#print_models(G, D)
#move to device
G.to(opts.device) # in-place
D.to(opts.device) # in-place
print_models(G, D)
print('Models are at:'+str(opts.device))
return G, D
def checkpoint(iteration, G, D, opts):
"""Saves the parameters of the generator G and discriminator D.
"""
G_path = os.path.join(opts.checkpoint_dir, 'G.pkl')
D_path = os.path.join(opts.checkpoint_dir, 'D.pkl')
torch.save(G.state_dict(), G_path)
torch.save(D.state_dict(), D_path)
def create_image_grid(array, ncols=None):
"""
"""
num_images, channels, cell_h, cell_w = array.shape
if not ncols:
ncols = int(np.sqrt(num_images))
nrows = int(np.math.floor(num_images / float(ncols)))
result = np.zeros((cell_h*nrows, cell_w*ncols, channels), dtype=array.dtype)
for i in range(0, nrows):
for j in range(0, ncols):
result[i*cell_h:(i+1)*cell_h, j*cell_w:(j+1)*cell_w, :] = array[i*ncols+j].transpose(1, 2, 0)
if channels == 1:
result = result.squeeze()
return result
def save_samples(G, fixed_noise, iteration, opts):
generated_images = G(fixed_noise)
generated_images = utils.to_data(generated_images)
grid = create_image_grid(generated_images)
# merged = merge_images(X, fake_Y, opts)
path = os.path.join(opts.sample_dir, 'sample-{:06d}.png'.format(iteration))
scipy.misc.imsave(path, grid)
print('Saved {}'.format(path))
def sample_noise(dim):
"""
Generate a PyTorch Variable of uniform random noise.
Input:
- batch_size: Integer giving the batch size of noise to generate.
- dim: Integer giving the dimension of noise to generate.
Output:
- A PyTorch Variable of shape (batch_size, dim, 1, 1) containing uniform
random noise in the range (-1, 1).
"""
return utils.to_var(torch.rand(batch_size, dim) * 2 - 1).unsqueeze(2).unsqueeze(3)
def training_loop_LSGAN(train_dataloader, opts):
"""Runs the training loop.
* Saves checkpoints every opts.checkpoint_every iterations
* Saves generated samples every opts.sample_every iterations
"""
# Create generators and discriminators
G, D = create_model(opts)
# Create optimizers for the generators and discriminators
if opts.optimizer == 'Adam':
d_optimizer = optim.Adam(D.parameters(), opts.lr, [opts.beta1, opts.beta2])
g_optimizer = optim.Adam(G.parameters(), opts.lr, [opts.beta1, opts.beta2])
elif opts.optimizer == 'RMSProp' or opts.GAN_type == 'WGAN':
d_optimizer = optim.RMSprop(D.parameters(), opts.lr)
g_optimizer = optim.RMSprop(G.parameters(), opts.lr)
print(d_optimizer)
print(g_optimizer)
# Generate fixed noise for sampling from the generator
fixed_noise = sample_noise(opts.noise_size) # batch_size x noise_size x 1 x 1
iteration = 1
total_train_iters = opts.num_epochs * len(train_dataloader)
device = opts.device
noise_dim = opts.noise_size
#batch_size = opts.batch_size
for epoch in range(opts.num_epochs):
for batch in train_dataloader:
real_images, _ = batch
#print(real_images.device)
real_images = real_images.to(device)
#print(real_images.device)
#real_images, labels = utils.to_var(real_images), utils.to_var(labels).long().squeeze()
#print(real_images.shape)
################################################
### TRAIN THE DISCRIMINATOR ####
################################################
d_optimizer.zero_grad()
# FILL THIS IN
# 1. Compute the discriminator loss on real images
D_real_loss = 0.5 * torch.sum((D(real_images) - 1)**2) / batch_size
#D_real_loss = 0.5 * torch.sum((D(real_images) - 0.9)**2) / batch_size
#print(D_real_loss)
# 2. Sample noise
noise = 2 * torch.rand(batch_size, noise_dim) - 1
noise = noise.view(batch_size, noise_dim, 1, 1).to(device)
#print(noise.shape)
# 3. Generate fake images from the noise
fake_images = G(noise)
# 4. Compute the discriminator loss on the fake images
D_fake_loss = 0.5 * torch.sum(D(fake_images)**2) / batch_size
# 5. Compute the total discriminator loss
D_total_loss = D_fake_loss + D_real_loss
D_total_loss.backward()
d_optimizer.step()
###########################################
### TRAIN THE GENERATOR ###
###########################################
g_optimizer.zero_grad()
# FILL THIS IN
# 1. Sample noise
noise = 2 * torch.rand(batch_size, noise_dim) - 1
noise = noise.view(batch_size, noise_dim, 1, 1).to(device)
# 2. Generate fake images from the noise
fake_images = G(noise)
# 3. Compute the generator loss
G_loss = torch.sum((D(fake_images) -1)**2)/ batch_size
#G_loss = torch.sum((D(fake_images) -0.9)**2)/ batch_size
G_loss.backward()
g_optimizer.step()
# Print the log info
if iteration % opts.log_step == 0:
print('Iteration [{:4d}/{:4d}] | D_real_loss: {:6.4f} | D_fake_loss: {:6.4f} | G_loss: {:6.4f}'.format(
iteration, total_train_iters, D_real_loss.data[0], D_fake_loss.data[0], G_loss.data[0]))
# Save the generated samples
if iteration % opts.sample_every == 0:
save_samples(G, fixed_noise, iteration, opts)
# Save the model parameters
if iteration % opts.checkpoint_every == 0:
checkpoint(iteration, G, D, opts)
iteration += 1
def training_loop_WGAN(train_dataloader, opts):
"""Runs the training loop.
* Saves checkpoints every opts.checkpoint_every iterations
* Saves generated samples every opts.sample_every iterations
"""
# Create generators and discriminators
G, D = create_model(opts)
# Create optimizers for the generators and discriminators
if opts.optimizer == 'Adam':
d_optimizer = optim.Adam(D.parameters(), opts.lr, [opts.beta1, opts.beta2])
g_optimizer = optim.Adam(G.parameters(), opts.lr, [opts.beta1, opts.beta2])
elif opts.optimizer == 'RMSProp' or opts.GAN_type == 'WGAN':
d_optimizer = optim.RMSprop(D.parameters(), opts.lr)
g_optimizer = optim.RMSprop(G.parameters(), opts.lr)
print(d_optimizer)
print(g_optimizer)
# Generate fixed noise for sampling from the generator
fixed_noise = sample_noise(opts.noise_size) # batch_size x noise_size x 1 x 1
iteration = 1
total_train_iters = opts.num_epochs * len(train_dataloader)
device = opts.device
noise_dim = opts.noise_size
clip_value = 0.01
for epoch in range(opts.num_epochs):
for batch in train_dataloader:
real_images, _ = batch
#print(real_images.device)
real_images = real_images.to(device)
#print(real_images.device)
#real_images, labels = utils.to_var(real_images), utils.to_var(labels).long().squeeze()
#print(real_images.shape)
################################################
### TRAIN THE DISCRIMINATOR ####
################################################
d_optimizer.zero_grad()
# 2. Sample noise
noise = 2 * torch.rand(batch_size, noise_dim) - 1
noise = noise.view(batch_size, noise_dim, 1, 1).to(device)
# 3. Generate fake images from the noise
fake_images = G(noise).detach()
# 5. Compute the total discriminator loss
D_total_loss = torch.mean(D(fake_images)) - torch.mean(D(real_images))
D_total_loss.backward()
d_optimizer.step()
# CLIP WEIGHTS!!!! of Dicriminator
for p in D.parameters():
p.data.clamp_(-clip_value, clip_value)
###########################################
### TRAIN THE GENERATOR ###
###########################################
g_optimizer.zero_grad()
# FILL THIS IN
# 1. Sample noise
noise = 2 * torch.rand(batch_size, noise_dim) - 1
noise = noise.view(batch_size, noise_dim, 1, 1).to(device)
# 2. Generate fake images from the noise
fake_images = G(noise)
# 3. Compute the generator loss
G_loss = -torch.mean(D(fake_images))
#G_loss = torch.sum((D(fake_images) -0.9)**2)/ batch_size
G_loss.backward()
g_optimizer.step()
# Print the log info
with torch.no_grad():
if iteration % opts.log_step == 0:
print('Iteration [{:4d}/{:4d}] | D_total_loss: {:6.4f} | G_loss: {:6.4f}'.format(
iteration, total_train_iters, D_total_loss.data[0], G_loss.data[0]))
# Save the generated samples
if iteration % opts.sample_every == 0:
save_samples(G, fixed_noise, iteration, opts)
# Save the model parameters
if iteration % opts.checkpoint_every == 0:
checkpoint(iteration, G, D, opts)
iteration += 1
def training_loop_WGANGP(train_dataloader, opts):
"""Runs the training loop.
* Saves checkpoints every opts.checkpoint_every iterations
* Saves generated samples every opts.sample_every iterations
"""
# Create generators and discriminators
G, D = create_model(opts)
# Create optimizers for the generators and discriminators
if opts.optimizer == 'Adam':
d_optimizer = optim.Adam(D.parameters(), opts.lr, [opts.beta1, opts.beta2])
g_optimizer = optim.Adam(G.parameters(), opts.lr, [opts.beta1, opts.beta2])
elif opts.optimizer == 'RMSProp':
d_optimizer = optim.RMSprop(D.parameters(), opts.lr)
g_optimizer = optim.RMSprop(G.parameters(), opts.lr)
print(d_optimizer)
print(g_optimizer)
# Generate fixed noise for sampling from the generator
fixed_noise = sample_noise(opts.noise_size) # batch_size x noise_size x 1 x 1
iteration = 1
total_train_iters = opts.num_epochs * len(train_dataloader)
device = opts.device
noise_dim = opts.noise_size
lambda_GP = 10
for epoch in range(opts.num_epochs):
for batch in train_dataloader:
real_images, _ = batch
batch_size = real_images.shape[0]
#print(real_images.device)
real_images = real_images.to(device)
#print(real_images.device)
#real_images, labels = utils.to_var(real_images), utils.to_var(labels).long().squeeze()
#print(real_images.shape)
################################################
### TRAIN THE DISCRIMINATOR ####
################################################
d_optimizer.zero_grad()
# 2. Sample noise
noise = 2 * torch.rand(batch_size, noise_dim) - 1
noise = noise.view(batch_size, noise_dim, 1, 1).to(device)
# 3. Generate fake images from the noise
fake_images = G(noise)
D_fake_loss = torch.mean(D(fake_images))
# 4. Calculate gradient penalty(GP)
random_eps = torch.rand(1, device=device)
#print(fake_images.shape)
#print(real_images.shape)
interpolates = (1 - random_eps) * fake_images + random_eps * real_images
D_interpolates = D(interpolates)
# 5. Compute the total discriminator loss
fake = torch.ones(D_interpolates.size(), device=device)
#print(fake_images.shape)
#print(D_fake_loss.shape)
gradients = torch.autograd.grad(
outputs=D_interpolates, inputs=interpolates, grad_outputs=fake, create_graph=True, retain_graph=True, only_inputs=True)[0]
#print(gradients[0].shape)
D_total_loss = D_fake_loss - \
torch.mean(D(real_images)) \
+ lambda_GP * \
(gradients.norm(2) - 1)**2
D_total_loss.backward()
d_optimizer.step()
###########################################
### TRAIN THE GENERATOR ###
###########################################
g_optimizer.zero_grad()
# FILL THIS IN
# 1. Sample noise
noise = 2 * torch.rand(batch_size, noise_dim) - 1
noise = noise.view(batch_size, noise_dim, 1, 1).to(device)
# 2. Generate fake images from the noise
fake_images = G(noise)
# 3. Compute the generator loss
G_loss = -torch.mean(D(fake_images))
#G_loss = torch.sum((D(fake_images) -0.9)**2)/ batch_size
G_loss.backward()
g_optimizer.step()
# Print the log info
with torch.no_grad():
if iteration % opts.log_step == 0:
print('Iteration [{:4d}/{:4d}] | D_total_loss: {:6.4f} | G_loss: {:6.4f}'.format(
iteration, total_train_iters, D_total_loss.data[0], G_loss.data[0]))
# Save the generated samples
if iteration % opts.sample_every == 0:
save_samples(G, fixed_noise, iteration, opts)
# Save the model parameters
if iteration % opts.checkpoint_every == 0:
checkpoint(iteration, G, D, opts)
iteration += 1
def main(opts):
"""Loads the data, creates checkpoint and sample directories, and starts the training loop.
"""
# Create a dataloader for the training images
train_dataloader, _ = get_emoji_loader(opts.emoji, opts)
# Create checkpoint and sample directories
utils.create_dir(opts.checkpoint_dir)
utils.create_dir(opts.sample_dir)
if opts.GAN_type == 'LSGAN':
training_loop_LSGAN(train_dataloader, opts)
elif opts.GAN_type == 'WGAN':
training_loop_WGAN(train_dataloader, opts)
elif opts.GAN_type == 'WGANGP':
training_loop_WGANGP(train_dataloader, opts)
def create_parser():
"""Creates a parser for command-line arguments.
"""
parser = argparse.ArgumentParser()
# Model hyper-parameters
parser.add_argument('--image_size', type=int, default=32, help='The side length N to convert images to NxN.')
parser.add_argument('--conv_dim', type=int, default=32)
parser.add_argument('--noise_size', type=int, default=100)
parser.add_argument('--disable_bn', action='store_true', help='Disable Batch Normalization(BN)')
# Training hyper-parameters
parser.add_argument('--num_epochs', type=int, default=40)
parser.add_argument('--batch_size', type=int, default=16, help='The number of images in a batch.')
parser.add_argument('--num_workers', type=int, default=0, help='The number of threads to use for the DataLoader.')
parser.add_argument('--lr', type=float, default=0.0003, help='The learning rate (default 0.0003)')
parser.add_argument('--beta1', type=float, default=0.5)
parser.add_argument('--beta2', type=float, default=0.999)
# Data sources
parser.add_argument('--emoji', type=str, default='Apple', choices=['Apple', 'Facebook', 'Windows'], help='Choose the type of emojis to generate.')
# Directories and checkpoint/sample iterations
parser.add_argument('--checkpoint_dir', type=str, default='./checkpoints_vanilla')
parser.add_argument('--sample_dir', type=str, default='./samples_vanilla')
parser.add_argument('--log_step', type=int , default=10)
parser.add_argument('--sample_every', type=int , default=200)
parser.add_argument('--checkpoint_every', type=int , default=400)
# GPU or CPU
parser.add_argument('--disable-cuda', action='store_true', help='Disable CUDA')
# GAN training object:
parser.add_argument('--GAN_type', type=str, default='WGANGP', choices=['LSGAN','WGAN','WGANGP'], help='Choose the type of GAN')
# optmizer
parser.add_argument('--optimizer', type=str, default='Adam', choices=['Adam','RMSProp'], help='Choose the type of Optimizer')
return parser
if __name__ == '__main__':
parser = create_parser()
opts = parser.parse_args()
opts.device = None
if not opts.disable_cuda and torch.cuda.is_available():
opts.device = torch.device('cuda')
else:
opts.device = torch.device('cpu')
batch_size = opts.batch_size
print(opts)
main(opts)
|
[
"models.WGANGenerator",
"numpy.random.seed",
"argparse.ArgumentParser",
"torch.autograd.grad",
"torch.device",
"scipy.misc.imsave",
"torch.no_grad",
"os.path.join",
"models.WGANDiscriminator",
"utils.create_dir",
"utils.to_data",
"torch.manual_seed",
"torch.cuda.manual_seed",
"models.WGANGPGenerator",
"torch.cuda.is_available",
"torch.rand",
"models.DCDiscriminator",
"models.DCGenerator",
"warnings.filterwarnings",
"numpy.zeros",
"data_loader.get_emoji_loader",
"models.WGANGPDiscriminator",
"numpy.sqrt"
] |
[((330, 363), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (353, 363), False, 'import warnings\n'), ((809, 829), 'numpy.random.seed', 'np.random.seed', (['SEED'], {}), '(SEED)\n', (823, 829), True, 'import numpy as np\n'), ((830, 853), 'torch.manual_seed', 'torch.manual_seed', (['SEED'], {}), '(SEED)\n', (847, 853), False, 'import torch\n'), ((857, 882), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (880, 882), False, 'import torch\n'), ((888, 916), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['SEED'], {}), '(SEED)\n', (910, 916), False, 'import torch\n'), ((2341, 2383), 'os.path.join', 'os.path.join', (['opts.checkpoint_dir', '"""G.pkl"""'], {}), "(opts.checkpoint_dir, 'G.pkl')\n", (2353, 2383), False, 'import os\n'), ((2397, 2439), 'os.path.join', 'os.path.join', (['opts.checkpoint_dir', '"""D.pkl"""'], {}), "(opts.checkpoint_dir, 'D.pkl')\n", (2409, 2439), False, 'import os\n'), ((2764, 2835), 'numpy.zeros', 'np.zeros', (['(cell_h * nrows, cell_w * ncols, channels)'], {'dtype': 'array.dtype'}), '((cell_h * nrows, cell_w * ncols, channels), dtype=array.dtype)\n', (2772, 2835), True, 'import numpy as np\n'), ((3191, 3222), 'utils.to_data', 'utils.to_data', (['generated_images'], {}), '(generated_images)\n', (3204, 3222), False, 'import utils\n'), ((3401, 3430), 'scipy.misc.imsave', 'scipy.misc.imsave', (['path', 'grid'], {}), '(path, grid)\n', (3418, 3430), False, 'import scipy\n'), ((16255, 16289), 'data_loader.get_emoji_loader', 'get_emoji_loader', (['opts.emoji', 'opts'], {}), '(opts.emoji, opts)\n', (16271, 16289), False, 'from data_loader import get_emoji_loader\n'), ((16342, 16379), 'utils.create_dir', 'utils.create_dir', (['opts.checkpoint_dir'], {}), '(opts.checkpoint_dir)\n', (16358, 16379), False, 'import utils\n'), ((16384, 16417), 'utils.create_dir', 'utils.create_dir', (['opts.sample_dir'], {}), '(opts.sample_dir)\n', (16400, 16417), False, 'import utils\n'), ((16774, 16799), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (16797, 16799), False, 'import argparse\n'), ((1495, 1558), 'models.DCGenerator', 'DCGenerator', ([], {'noise_size': 'opts.noise_size', 'conv_dim': 'opts.conv_dim'}), '(noise_size=opts.noise_size, conv_dim=opts.conv_dim)\n', (1506, 1558), False, 'from models import DCGenerator, DCDiscriminator\n'), ((1571, 1642), 'models.DCDiscriminator', 'DCDiscriminator', ([], {'conv_dim': 'opts.conv_dim', 'batch_norm': '(not opts.disable_bn)'}), '(conv_dim=opts.conv_dim, batch_norm=not opts.disable_bn)\n', (1586, 1642), False, 'from models import DCGenerator, DCDiscriminator\n'), ((18866, 18891), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (18889, 18891), False, 'import torch\n'), ((18915, 18935), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (18927, 18935), False, 'import torch\n'), ((18968, 18987), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (18980, 18987), False, 'import torch\n'), ((1689, 1754), 'models.WGANGenerator', 'WGANGenerator', ([], {'noise_size': 'opts.noise_size', 'conv_dim': 'opts.conv_dim'}), '(noise_size=opts.noise_size, conv_dim=opts.conv_dim)\n', (1702, 1754), False, 'from models import WGANDiscriminator, WGANGenerator\n'), ((1767, 1840), 'models.WGANDiscriminator', 'WGANDiscriminator', ([], {'conv_dim': 'opts.conv_dim', 'batch_norm': '(not opts.disable_bn)'}), '(conv_dim=opts.conv_dim, batch_norm=not opts.disable_bn)\n', (1784, 1840), False, 'from models import WGANDiscriminator, WGANGenerator\n'), ((2672, 2691), 'numpy.sqrt', 'np.sqrt', (['num_images'], {}), '(num_images)\n', (2679, 2691), True, 'import numpy as np\n'), ((13787, 13815), 'torch.rand', 'torch.rand', (['(1)'], {'device': 'device'}), '(1, device=device)\n', (13797, 13815), False, 'import torch\n'), ((1889, 1956), 'models.WGANGPGenerator', 'WGANGPGenerator', ([], {'noise_size': 'opts.noise_size', 'conv_dim': 'opts.conv_dim'}), '(noise_size=opts.noise_size, conv_dim=opts.conv_dim)\n', (1904, 1956), False, 'from models import WGANGPDiscriminator, WGANGPGenerator\n'), ((1969, 2012), 'models.WGANGPDiscriminator', 'WGANGPDiscriminator', ([], {'conv_dim': 'opts.conv_dim'}), '(conv_dim=opts.conv_dim)\n', (1988, 2012), False, 'from models import WGANGPDiscriminator, WGANGPGenerator\n'), ((11036, 11051), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (11049, 11051), False, 'import torch\n'), ((14244, 14387), 'torch.autograd.grad', 'torch.autograd.grad', ([], {'outputs': 'D_interpolates', 'inputs': 'interpolates', 'grad_outputs': 'fake', 'create_graph': '(True)', 'retain_graph': '(True)', 'only_inputs': '(True)'}), '(outputs=D_interpolates, inputs=interpolates,\n grad_outputs=fake, create_graph=True, retain_graph=True, only_inputs=True)\n', (14263, 14387), False, 'import torch\n'), ((15438, 15453), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (15451, 15453), False, 'import torch\n'), ((6013, 6046), 'torch.rand', 'torch.rand', (['batch_size', 'noise_dim'], {}), '(batch_size, noise_dim)\n', (6023, 6046), False, 'import torch\n'), ((6849, 6882), 'torch.rand', 'torch.rand', (['batch_size', 'noise_dim'], {}), '(batch_size, noise_dim)\n', (6859, 6882), False, 'import torch\n'), ((9720, 9753), 'torch.rand', 'torch.rand', (['batch_size', 'noise_dim'], {}), '(batch_size, noise_dim)\n', (9730, 9753), False, 'import torch\n'), ((10561, 10594), 'torch.rand', 'torch.rand', (['batch_size', 'noise_dim'], {}), '(batch_size, noise_dim)\n', (10571, 10594), False, 'import torch\n'), ((13463, 13496), 'torch.rand', 'torch.rand', (['batch_size', 'noise_dim'], {}), '(batch_size, noise_dim)\n', (13473, 13496), False, 'import torch\n'), ((14963, 14996), 'torch.rand', 'torch.rand', (['batch_size', 'noise_dim'], {}), '(batch_size, noise_dim)\n', (14973, 14996), False, 'import torch\n'), ((3863, 3890), 'torch.rand', 'torch.rand', (['batch_size', 'dim'], {}), '(batch_size, dim)\n', (3873, 3890), False, 'import torch\n')]
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
df = pd.read_csv('fifa-world-cup/WorldCupMatches.csv')
goles = map(sum,zip(df['Home Team Goals'], df['Away Team Goals']))
fig, ax = plt.subplots()
# the histogram of the data
ax.boxplot(list(goles),
vert=True, # vertical box alignment
patch_artist=True, # fill with color
labels=['Goles']) # will be used to label x-ticks
ax.set_ylim(-1, 15)
# add a 'best fit' line
ax.set_ylabel('Goles')
ax.set_title(r'Goles por partido en Copas del Mundo hasta 2014')
ax.yaxis.grid(True)
# Tweak spacing to prevent clipping of ylabel
fig.tight_layout()
plt.show()
|
[
"pandas.read_csv",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] |
[((77, 126), 'pandas.read_csv', 'pd.read_csv', (['"""fifa-world-cup/WorldCupMatches.csv"""'], {}), "('fifa-world-cup/WorldCupMatches.csv')\n", (88, 126), True, 'import pandas as pd\n'), ((206, 220), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (218, 220), True, 'import matplotlib.pyplot as plt\n'), ((647, 657), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (655, 657), True, 'import matplotlib.pyplot as plt\n')]
|
# Generated by Django 3.0.4 on 2020-03-26 14:44
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('neighbourhoodapp', '0004_delete_signupforms'),
]
operations = [
migrations.RenameModel(
old_name='User',
new_name='Users',
),
migrations.AlterField(
model_name='business',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('photo', models.ImageField(upload_to='posted')),
('comment', models.CharField(max_length=300)),
('neighbourhood', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='neighbourhoodapp.Neighbourhood')),
],
),
]
|
[
"django.db.migrations.swappable_dependency",
"django.db.migrations.RenameModel",
"django.db.models.ForeignKey",
"django.db.models.CharField",
"django.db.models.AutoField",
"django.db.models.ImageField"
] |
[((227, 284), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (258, 284), False, 'from django.db import migrations, models\n'), ((377, 434), 'django.db.migrations.RenameModel', 'migrations.RenameModel', ([], {'old_name': '"""User"""', 'new_name': '"""Users"""'}), "(old_name='User', new_name='Users')\n", (399, 434), False, 'from django.db import migrations, models\n'), ((580, 676), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(on_delete=django.db.models.deletion.CASCADE, to=settings.\n AUTH_USER_MODEL)\n', (597, 676), False, 'from django.db import migrations, models\n'), ((785, 878), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (801, 878), False, 'from django.db import migrations, models\n'), ((903, 940), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""posted"""'}), "(upload_to='posted')\n", (920, 940), False, 'from django.db import migrations, models\n'), ((971, 1003), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (987, 1003), False, 'from django.db import migrations, models\n'), ((1040, 1144), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""neighbourhoodapp.Neighbourhood"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'neighbourhoodapp.Neighbourhood')\n", (1057, 1144), False, 'from django.db import migrations, models\n')]
|
''''
this is a customize trainer for T5-like mode training,
in this class, the training loop is customized for more flexibility and control over
'''
import math
import os
import sys
import warnings
import tensorflow as tf
from tqdm import tqdm
from sklearn.metrics import accuracy_score, classification_report
import numpy as np
from keras import backend as K
from ttt.utils import add_filehandler_for_logger, get_existing_cks
from tensorboardX import SummaryWriter
# for translation evaluation from: https://github.com/mjpost/sacrebleu
# which is also used in the original T5 paper
import sacrebleu
from .utils import write_args_enhance, save_ck, dictionize_t2t_dataset, set_seed
class T2TTrainer():
def __init__(self, args, logger):
self.eval_on = args.eval_on
assert self.eval_on in ["acc",
"bleu"], "now t2t training only supports --eval_on acc, bleu, only works when --do_eval=True"
# self.best = -np.Inf
self.patience = args.patience
self.wait = 0
self.logger = logger
self.args = args
self.use_tb = self.args.__dict__.get('use_tb', False)
self._tb_writer = None
if self.use_tb:
self._tb_writer = SummaryWriter(log_dir=self.args.__dict__.get('output_folder', "runs"))
self.scheduler = args.scheduler
if "learning_rate" in self.args.__dict__:
self.lr_to_reach = args.learning_rate
else:
self.lr_to_reach = args.lr
self.args.best = np.Inf if self.args.eval_on == "loss" or self.args.eval_on == "perplexity" else - np.Inf
self.best = self.args.best
def train(self, model, strategy, tokenizer, inputs=None, train_dataset=None, eval_dataset=None, evaluate_fn=None, verbose=False):
if inputs is None:
assert train_dataset is not None, "you have to pass either inputs or train_dataset"
else:
warnings.warn(
"Passing `inputs` as a keyword argument is deprecated. Use train_dataset and eval_dataset instead.",
FutureWarning,
)
if isinstance(inputs, tuple):
inputs = dictionize_t2t_dataset(*inputs)
if inputs is not None:
x_train, y_train = inputs["x_train"], inputs["y_train"]
num_train_examples = len(inputs["y_train"]["target_input_ids"])
train_dataset = tf.data.Dataset.from_tensor_slices((x_train, y_train))
else:
if hasattr(train_dataset, "num_examples"):
num_train_examples = train_dataset.num_examples
else:
num_train_examples = tf.data.experimental.cardinality(train_dataset).numpy()
self.logger.info(f"set random seed for everything with {self.args.seed}")
set_seed(self.args.seed)
global_batch_size = self.args.per_device_train_batch_size * strategy.num_replicas_in_sync
train_dataset = train_dataset.shuffle(buffer_size=self.args.seed).batch(global_batch_size)
train_dist_dataset = strategy.experimental_distribute_dataset(train_dataset)
# THERE WILL BE exceptions when switching to distributed_dataset when running on tpus if
# val_dist_dataset = strategy.experimental_distribute_dataset(eval_dataset)
train_length = math.ceil(num_train_examples / global_batch_size)
self.steps_per_epoch = train_length
if inputs is not None:
if self.args.do_eval:
assert "x_eval" in inputs and "y_eval" in inputs, "do_eval=True, and no validation data is found"
x_val, y_val = inputs["x_eval"], inputs["y_eval"]
eval_dataset = tf.data.Dataset.from_tensor_slices((x_val, y_val))
eval_dataset = eval_dataset.batch(self.args.eval_batch_size)
eval_steps = math.ceil(
len(inputs["y_eval"]["target_input_ids"]) / (self.args.eval_batch_size))
else:
if self.args.do_eval:
if hasattr(eval_dataset, "num_examples"):
eval_num_examples = eval_dataset.num_examples
else:
eval_num_examples = tf.data.experimental.cardinality(eval_dataset).numpy()
eval_steps = math.ceil(eval_num_examples / (self.args.eval_batch_size))
eval_dataset = eval_dataset.batch(self.args.eval_batch_size)
if verbose:
self.logger.info(model.summary())
# these are used for non-constant lr scheduler
if "num_train_epochs" in self.args.__dict__:
self.args.num_epochs_train = self.args.num_train_epochs
if "log_and_save_steps" in self.args.__dict__:
self.args.log_steps = self.args.log_and_save_steps
self.total_steps = self.steps_per_epoch * self.args.num_epochs_train
if "warmup_steps_or_ratio" in self.args.__dict__:
if self.args.warmup_steps_or_ratio <= 1 and self.args.warmup_steps_or_ratio > 0:
self.args.warmup_steps = int(self.total_steps * self.args.warmup_steps_or_ratio)
else:
self.args.warmup_steps = self.args.warmup_steps_or_ratio
else:
self.args.warmup_steps = int(self.total_steps * self.args.warmup_ratio)
self.warmup_steps = self.args.warmup_steps
write_args_enhance(self.args, logger=self.logger)
with strategy.scope():
optimizer = tf.keras.optimizers.Adam(lr=self.args.lr if self.scheduler.startswith("constant") else 0.0)
loss_fn = tf.keras.losses.SparseCategoricalCrossentropy(
from_logits=True, reduction=tf.keras.losses.Reduction.NONE
)
def compute_loss(labels, predictions):
per_example_loss = loss_fn(labels, predictions)
return tf.nn.compute_average_loss(per_example_loss, global_batch_size=global_batch_size)
def train_step(x_train, y_train):
with tf.GradientTape() as tape:
# here some changes has been made (compared to before commit `a07c58e` ) to fix a bug reported here: https://github.com/wangcongcong123/ttt/issues/2
# The following describes how this bug is fixed
# the compute_loss function in transformers:TFT5ForConditionalGeneration has already taken care of the loss computation (already averaged!!!!) that failed
# when switching to TPU, hence we re-compute it here using the returned logits from the model ready for backprop instead of using the internally calculated loss
outputs = model(inputs=x_train["source_input_ids"], attention_mask=x_train["source_attention_mask"],
decoder_attention_mask=x_train["target_attention_mask"],
labels=y_train["target_input_ids"], training=True, return_dict=True)
logits = outputs.logits
loss = compute_loss(tf.reshape(y_train["target_input_ids"], (-1, y_train["target_input_ids"].shape[-1])),
tf.reshape(logits, (-1, logits.shape[-1])))
gradients = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
return loss
@tf.function
def distributed_train_step(x_train, y_train):
per_replica_losses = strategy.experimental_run_v2(train_step, args=(x_train, y_train,))
return strategy.reduce(tf.distribute.ReduceOp.SUM, per_replica_losses, axis=None)
# evaluate
def evaluate(steps, tag="epoch"):
assert tag in ["epoch", "global_step"]
gts = []
preds = []
for x_eval, y_eval in tqdm(eval_dataset, total=eval_steps, desc="evaluating..."):
predictions = model.generate(input_ids=x_eval["source_input_ids"],
attention_mask=x_eval["source_attention_mask"],
max_length=self.args.max_tgt_length)
pred = [tokenizer.decode(ids) for ids in predictions]
gt = [tokenizer.decode(ids) for ids in y_eval["target_input_ids"]]
# labels (not -100 replaced since it is not used to calculate loss here)
preds.extend(pred)
gts.extend(gt)
if self.eval_on == "bleu":
# bleu = 0
bleu = sacrebleu.corpus_bleu(preds, [gts])
eval_score = bleu.score
else:
eval_score = accuracy_score(gts, preds)
self.logger.info(f"val_cls_report: {classification_report(gts, preds, digits=4)}")
if self.use_tb:
self._tb_writer.add_scalar(f"val_{self.eval_on}_{tag}", eval_score, steps)
self.logger.info("\n")
self.logger.info(f"*******eval at {tag} = {steps} on validation dataset*********")
self.logger.info(f"val_{self.eval_on}: {eval_score}")
if self.eval_on == "acc" or self.eval_on == "bleu":
if eval_score >= self.best:
self.wait = 0
self.best = eval_score
self.logger.info(
f"so far the best check point at {tag}={steps} based on eval_on {self.eval_on}")
# self.save_ck(model, steps, tag, best_ck=True)
save_ck(self.args, self.logger, model, tokenizer=tokenizer, steps=steps,
tag=tag, best_ck=False, from_tf=True)
else:
self.wait += 1
else:
raise ValueError("not support yet")
self.logger.info(f"best so far({self.eval_on}): {self.best}")
self.logger.info(f"early stop count: {self.wait}/{self.patience}")
# self.save_ck(model, steps, tag)
save_ck(self.args, self.logger, model, tokenizer=tokenizer, steps=steps,
tag=tag, best_ck=False, from_tf=True)
if self.wait >= self.patience:
self.logger.info("run out of patience, early stop")
if self.use_tb:
self._tb_writer.close()
sys.exit(0)
def update_lr(global_step):
# already tested on tpu, works fine
# global_step is dynamically passed here
if global_step <= self.warmup_steps:
if self.scheduler == "warmuplinear" or self.scheduler == "warmupcostant":
inc = self.lr_to_reach / self.warmup_steps
K.set_value(optimizer.learning_rate, K.eval(optimizer.lr) + inc)
else:
if self.scheduler == "warmuplinear" or self.scheduler == "constantlinear":
dec = self.lr_to_reach / (self.total_steps - self.warmup_steps)
K.set_value(optimizer.learning_rate, K.eval(optimizer.lr) - dec)
# for "constant" scheduler, nothing to do here
global_step = 0
early_exit = False
interval_loss = 0.0
interval_count = 0
for epoch in tqdm(range(self.args.num_epochs_train), desc="epochs"):
self.logger.info(f"start training at epoch = {epoch}")
self.logger.info(f"global train batch size = {global_batch_size}")
self.logger.info(f"using learning rate scheduler: {self.scheduler}")
self.logger.info(
f"num_train_examples: {num_train_examples}, total_steps: {self.total_steps}, steps_per_epoch: {self.steps_per_epoch}")
if self.scheduler != "constant":
self.logger.info(f"warmup_steps:{self.warmup_steps}")
pbar = tqdm(enumerate(train_dist_dataset), total=train_length)
for step, (x_train, y_train) in pbar:
# learning rate scheduler
update_lr(global_step)
loss = distributed_train_step(x_train, y_train)
interval_loss += loss.numpy()
interval_count += 1
global_step += 1
pbar.set_description(f"training - epoch {epoch + 1}/{self.args.num_epochs_train} iter {step}: train loss {loss.numpy():.5f}. lr {optimizer.lr.numpy():e}")
if self.args.log_steps != -1 and global_step % self.args.log_steps == 0:
if self.use_tb:
self._tb_writer.add_scalar("train_loss_global_step", interval_loss / interval_count,
global_step)
self._tb_writer.add_scalar("train_lr_global_step", optimizer.lr.numpy(), global_step)
if self.args.do_eval:
if evaluate_fn is not None and eval_dataset is not None:
eval_dict = evaluate_fn(self.args, self.logger, model, tokenizer, eval_dataset, steps=global_step, tag="global_step", eval_length=eval_steps)
if self._tb_writer:
if "eval_scores" in eval_dict:
for key, value in eval_dict["eval_scores"].items():
self._tb_writer.add_scalar(f"eval_{key}_global_step", value, global_step)
if "is_early_stop" in eval_dict and eval_dict["is_early_stop"]:
self.logger.info(f"run out of patience at global step = {global_step}, early stop")
if self._tb_writer:
self._tb_writer.close()
early_exit = True
break
else:
evaluate(global_step, tag="global_step")
self.logger.info(f"train loss at global_step {global_step}: {interval_loss / interval_count}")
interval_loss = 0.0
interval_count = 0
if early_exit:
break
train_loss = interval_loss / interval_count
interval_loss = 0.0
interval_count = 0
if self.args.log_steps == -1:
if self.args.do_eval:
if evaluate_fn is not None and eval_dataset is not None:
eval_dict = evaluate_fn(self.args, self.logger, model, tokenizer, eval_dataset, steps=epoch + 1, tag="epoch", eval_length=eval_steps)
if self._tb_writer:
if "eval_scores" in eval_dict:
for key, value in eval_dict["eval_scores"].items():
self._tb_writer.add_scalar(f"eval_{key}_epoch", value, epoch + 1)
if "is_early_stop" in eval_dict and eval_dict["is_early_stop"]:
self.logger.info(f"run out of patience at epoch = {epoch + 1}, early stop")
if self._tb_writer:
self._tb_writer.close()
break
else:
evaluate(epoch + 1, tag="epoch")
if self.use_tb:
self._tb_writer.add_scalar("train_loss_epoch", train_loss,
global_step)
self._tb_writer.add_scalar("train_lr_epoch", optimizer.lr.numpy(), global_step)
self.logger.info(f"train loss at end of epoch {epoch + 1}: {train_loss}")
if not self.args.do_eval:
# if do not do evaluate, the checkpoint at the end of epoch needs to be saved
# self.save_ck(model, epoch + 1, tag="epoch")
save_ck(self.args, self.logger, model, tokenizer=tokenizer, steps=epoch + 1,
tag="epoch", best_ck=False, from_tf=True)
if self.use_tb:
self._tb_writer.close()
|
[
"tensorflow.nn.compute_average_loss",
"tensorflow.keras.losses.SparseCategoricalCrossentropy",
"tqdm.tqdm",
"math.ceil",
"sklearn.metrics.accuracy_score",
"tensorflow.reshape",
"tensorflow.data.Dataset.from_tensor_slices",
"sklearn.metrics.classification_report",
"sacrebleu.corpus_bleu",
"tensorflow.data.experimental.cardinality",
"keras.backend.eval",
"warnings.warn",
"tensorflow.GradientTape",
"sys.exit"
] |
[((3310, 3359), 'math.ceil', 'math.ceil', (['(num_train_examples / global_batch_size)'], {}), '(num_train_examples / global_batch_size)\n', (3319, 3359), False, 'import math\n'), ((1936, 2075), 'warnings.warn', 'warnings.warn', (['"""Passing `inputs` as a keyword argument is deprecated. Use train_dataset and eval_dataset instead."""', 'FutureWarning'], {}), "(\n 'Passing `inputs` as a keyword argument is deprecated. Use train_dataset and eval_dataset instead.'\n , FutureWarning)\n", (1949, 2075), False, 'import warnings\n'), ((2409, 2463), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['(x_train, y_train)'], {}), '((x_train, y_train))\n', (2443, 2463), True, 'import tensorflow as tf\n'), ((5551, 5661), 'tensorflow.keras.losses.SparseCategoricalCrossentropy', 'tf.keras.losses.SparseCategoricalCrossentropy', ([], {'from_logits': '(True)', 'reduction': 'tf.keras.losses.Reduction.NONE'}), '(from_logits=True, reduction=\n tf.keras.losses.Reduction.NONE)\n', (5596, 5661), True, 'import tensorflow as tf\n'), ((3680, 3730), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['(x_val, y_val)'], {}), '((x_val, y_val))\n', (3714, 3730), True, 'import tensorflow as tf\n'), ((4259, 4315), 'math.ceil', 'math.ceil', (['(eval_num_examples / self.args.eval_batch_size)'], {}), '(eval_num_examples / self.args.eval_batch_size)\n', (4268, 4315), False, 'import math\n'), ((5826, 5912), 'tensorflow.nn.compute_average_loss', 'tf.nn.compute_average_loss', (['per_example_loss'], {'global_batch_size': 'global_batch_size'}), '(per_example_loss, global_batch_size=\n global_batch_size)\n', (5852, 5912), True, 'import tensorflow as tf\n'), ((7859, 7917), 'tqdm.tqdm', 'tqdm', (['eval_dataset'], {'total': 'eval_steps', 'desc': '"""evaluating..."""'}), "(eval_dataset, total=eval_steps, desc='evaluating...')\n", (7863, 7917), False, 'from tqdm import tqdm\n'), ((5976, 5993), 'tensorflow.GradientTape', 'tf.GradientTape', ([], {}), '()\n', (5991, 5993), True, 'import tensorflow as tf\n'), ((8619, 8654), 'sacrebleu.corpus_bleu', 'sacrebleu.corpus_bleu', (['preds', '[gts]'], {}), '(preds, [gts])\n', (8640, 8654), False, 'import sacrebleu\n'), ((8754, 8780), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['gts', 'preds'], {}), '(gts, preds)\n', (8768, 8780), False, 'from sklearn.metrics import accuracy_score, classification_report\n'), ((10542, 10553), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (10550, 10553), False, 'import sys\n'), ((2652, 2699), 'tensorflow.data.experimental.cardinality', 'tf.data.experimental.cardinality', (['train_dataset'], {}), '(train_dataset)\n', (2684, 2699), True, 'import tensorflow as tf\n'), ((6999, 7088), 'tensorflow.reshape', 'tf.reshape', (["y_train['target_input_ids']", "(-1, y_train['target_input_ids'].shape[-1])"], {}), "(y_train['target_input_ids'], (-1, y_train['target_input_ids'].\n shape[-1]))\n", (7009, 7088), True, 'import tensorflow as tf\n'), ((7125, 7167), 'tensorflow.reshape', 'tf.reshape', (['logits', '(-1, logits.shape[-1])'], {}), '(logits, (-1, logits.shape[-1]))\n', (7135, 7167), True, 'import tensorflow as tf\n'), ((4175, 4221), 'tensorflow.data.experimental.cardinality', 'tf.data.experimental.cardinality', (['eval_dataset'], {}), '(eval_dataset)\n', (4207, 4221), True, 'import tensorflow as tf\n'), ((8837, 8880), 'sklearn.metrics.classification_report', 'classification_report', (['gts', 'preds'], {'digits': '(4)'}), '(gts, preds, digits=4)\n', (8858, 8880), False, 'from sklearn.metrics import accuracy_score, classification_report\n'), ((10979, 10999), 'keras.backend.eval', 'K.eval', (['optimizer.lr'], {}), '(optimizer.lr)\n', (10985, 10999), True, 'from keras import backend as K\n'), ((11273, 11293), 'keras.backend.eval', 'K.eval', (['optimizer.lr'], {}), '(optimizer.lr)\n', (11279, 11293), True, 'from keras import backend as K\n')]
|
import tensorflow as tf
import os
import numpy as np
import time
def get_timestamp(name):
timestamp = time.asctime().replace(' ', '_').replace(':', '')
unique_name = f'{name}_at_{timestamp}'
return unique_name
def get_callbacks(config, X_train):
logs = config['logs']
unique_dir_name = get_timestamp('tb_logs')
TENSORBOARD_ROOT_LOG_DIR = os.path.join(logs['logs_dir'], logs[TENSORBOARD_ROOT_LOG_DIR], unique_dir_name)
os.makedirs(TENSORBOARD_ROOT_LOG_DIR, exist_ok=True)
tensorboard_cb = tf.keras.callbacks.TensorBoard(log_dir=TENSORBOARD_ROOT_LOG_DIR)
file_writer = tf.summary.create_file_writer(log_dir=TENSORBOARD_ROOT_LOG_DIR)
with file_writer.as_default():
images = np.reshape(X_train[10:30], (-1, 28, 28, 1))
tf.summary.image('20 handwritten digit samples', images, max_outputs=25, step=0)
params = config['params']
early_stopping_cb = tf.keras.callbacks.EarlyStopping(patience=params['patience'],
restore_best_weights=params['restore_best_weights'])
artifacts = config['artifacts']
CKPT_dir = os.path.join(artifacts['artifacts_dir'], artifacts['CHECKPOINT_DIR'])
os.makedirs(CKPT_dir, exist_ok=True)
CKPT_path = os.path.join(CKPT_dir, 'model_ckpt.h5')
checkpoint_cb = tf.keras.callbacks.ModelCheckpoint(CKPT_path, save_best_only=True)
return [tensorboard_cb, early_stopping_cb, checkpoint_cb]
|
[
"time.asctime",
"tensorflow.summary.image",
"os.makedirs",
"tensorflow.keras.callbacks.ModelCheckpoint",
"numpy.reshape",
"tensorflow.summary.create_file_writer",
"tensorflow.keras.callbacks.TensorBoard",
"os.path.join",
"tensorflow.keras.callbacks.EarlyStopping"
] |
[((366, 445), 'os.path.join', 'os.path.join', (["logs['logs_dir']", 'logs[TENSORBOARD_ROOT_LOG_DIR]', 'unique_dir_name'], {}), "(logs['logs_dir'], logs[TENSORBOARD_ROOT_LOG_DIR], unique_dir_name)\n", (378, 445), False, 'import os\n'), ((451, 503), 'os.makedirs', 'os.makedirs', (['TENSORBOARD_ROOT_LOG_DIR'], {'exist_ok': '(True)'}), '(TENSORBOARD_ROOT_LOG_DIR, exist_ok=True)\n', (462, 503), False, 'import os\n'), ((526, 590), 'tensorflow.keras.callbacks.TensorBoard', 'tf.keras.callbacks.TensorBoard', ([], {'log_dir': 'TENSORBOARD_ROOT_LOG_DIR'}), '(log_dir=TENSORBOARD_ROOT_LOG_DIR)\n', (556, 590), True, 'import tensorflow as tf\n'), ((609, 672), 'tensorflow.summary.create_file_writer', 'tf.summary.create_file_writer', ([], {'log_dir': 'TENSORBOARD_ROOT_LOG_DIR'}), '(log_dir=TENSORBOARD_ROOT_LOG_DIR)\n', (638, 672), True, 'import tensorflow as tf\n'), ((914, 1032), 'tensorflow.keras.callbacks.EarlyStopping', 'tf.keras.callbacks.EarlyStopping', ([], {'patience': "params['patience']", 'restore_best_weights': "params['restore_best_weights']"}), "(patience=params['patience'],\n restore_best_weights=params['restore_best_weights'])\n", (946, 1032), True, 'import tensorflow as tf\n'), ((1138, 1207), 'os.path.join', 'os.path.join', (["artifacts['artifacts_dir']", "artifacts['CHECKPOINT_DIR']"], {}), "(artifacts['artifacts_dir'], artifacts['CHECKPOINT_DIR'])\n", (1150, 1207), False, 'import os\n'), ((1212, 1248), 'os.makedirs', 'os.makedirs', (['CKPT_dir'], {'exist_ok': '(True)'}), '(CKPT_dir, exist_ok=True)\n', (1223, 1248), False, 'import os\n'), ((1265, 1304), 'os.path.join', 'os.path.join', (['CKPT_dir', '"""model_ckpt.h5"""'], {}), "(CKPT_dir, 'model_ckpt.h5')\n", (1277, 1304), False, 'import os\n'), ((1325, 1391), 'tensorflow.keras.callbacks.ModelCheckpoint', 'tf.keras.callbacks.ModelCheckpoint', (['CKPT_path'], {'save_best_only': '(True)'}), '(CKPT_path, save_best_only=True)\n', (1359, 1391), True, 'import tensorflow as tf\n'), ((726, 769), 'numpy.reshape', 'np.reshape', (['X_train[10:30]', '(-1, 28, 28, 1)'], {}), '(X_train[10:30], (-1, 28, 28, 1))\n', (736, 769), True, 'import numpy as np\n'), ((778, 863), 'tensorflow.summary.image', 'tf.summary.image', (['"""20 handwritten digit samples"""', 'images'], {'max_outputs': '(25)', 'step': '(0)'}), "('20 handwritten digit samples', images, max_outputs=25, step=0\n )\n", (794, 863), True, 'import tensorflow as tf\n'), ((108, 122), 'time.asctime', 'time.asctime', ([], {}), '()\n', (120, 122), False, 'import time\n')]
|
from dash import html
import dash_bootstrap_components as dbc
import pandas as pd
import json
# Reading accidents, casualty and vehicles data from last 5 years
dfa = pd.read_csv('data/dft-road-casualty-statistics-accident-last-5-years.csv', low_memory=False)
dfc = pd.read_csv('data/dft-road-casualty-statistics-casualty-last-5-years.csv', low_memory=False)
dfv = pd.read_csv('data/dft-road-casualty-statistics-vehicle-last-5-years.csv', low_memory=False)
# Reading Road safety Data guide
road_guide = pd.read_excel('data/Road-Safety-Open-Dataset-Data-Guide.xlsx')
# Loading UK districts geojson to draw choropleth map
uk_cities = json.load(open("data/uk_districts.geojson", "r"))
layout = html.Div([
dbc.Container([
dbc.Row([
dbc.Col(html.H2("Welcome to the Hawkeye Visualization tool of Group 45", className="text-center")
, className="mb-5 mt-5")
]),
dbc.Row([
dbc.Col(html.H5(children='A dataset about the Road accidents in UK in last 5 years has been analysed in this web tool. The link for the dataset you can find below.'
)
, className="mb-4")
]),
dbc.Row([
dbc.Col(html.H5(children='The tool consists of two main pages: Home tab '
'which is an Introduction page to the Group 45 visualization tool and '
'Explore tab, which gives the oppurtunity to explore the dataset and '
'find interesting patterns')
, className="mb-5")
]),
dbc.Row([
dbc.Col(dbc.Card(children=[html.H3(children='Get the original datasets used in this project',
className="text-center"),
dbc.Button("UK-Road-Safety",
href="https://data.gov.uk/dataset/cb7ae6f0-4be6-4935-9277-47e5ce24a11f/road-safety-data",
color="primary",
target="_blank",
className="mt-3")
],
body=True, color="dark", outline=True)
, width=6, className="mb-4"),
dbc.Col(dbc.Card(children=[html.H3(children='You can find the code for this project in',
className="text-center"),
dbc.Button("GitHub",
href="https://github.com/nikhil-96/Hawkeye_Viz",
color="primary",
target="_blank",
className="mt-3"),
],
body=True, color="dark", outline=True)
, width=6, className="mb-4")
], className="mb-5")
]),
dbc.Navbar(
dbc.Container(
[
html.A([
# Use row and col to control vertical alignment of logo / brand
dbc.Row(
[
dbc.Col(html.H6(children='Created By: '), className="mt-3", width=2),
dbc.Col(html.H6(children='<NAME>'), className="mt-3", width=2),
dbc.Col(html.H6(children='<NAME>'), className="mt-3", width=2),
dbc.Col(html.H6(children='<NAME>'), className="mt-3", width=2),
dbc.Col(html.H6(children='<NAME>'), className="mt-3", width=2)
],
align="center",
no_gutters=True,
),
dbc.Row(
[
dbc.Col(html.H6(children='Developed at Eindhoven University of Technology TU/e'), className="mt-3")
],
align="center",
no_gutters=True,
)
])
]
),
color="dark",
dark=True,
className="mb-4",
)
])
|
[
"dash.html.H2",
"pandas.read_csv",
"dash_bootstrap_components.Button",
"pandas.read_excel",
"dash.html.H6",
"dash.html.H5",
"dash.html.H3"
] |
[((168, 264), 'pandas.read_csv', 'pd.read_csv', (['"""data/dft-road-casualty-statistics-accident-last-5-years.csv"""'], {'low_memory': '(False)'}), "('data/dft-road-casualty-statistics-accident-last-5-years.csv',\n low_memory=False)\n", (179, 264), True, 'import pandas as pd\n'), ((267, 363), 'pandas.read_csv', 'pd.read_csv', (['"""data/dft-road-casualty-statistics-casualty-last-5-years.csv"""'], {'low_memory': '(False)'}), "('data/dft-road-casualty-statistics-casualty-last-5-years.csv',\n low_memory=False)\n", (278, 363), True, 'import pandas as pd\n'), ((366, 461), 'pandas.read_csv', 'pd.read_csv', (['"""data/dft-road-casualty-statistics-vehicle-last-5-years.csv"""'], {'low_memory': '(False)'}), "('data/dft-road-casualty-statistics-vehicle-last-5-years.csv',\n low_memory=False)\n", (377, 461), True, 'import pandas as pd\n'), ((504, 566), 'pandas.read_excel', 'pd.read_excel', (['"""data/Road-Safety-Open-Dataset-Data-Guide.xlsx"""'], {}), "('data/Road-Safety-Open-Dataset-Data-Guide.xlsx')\n", (517, 566), True, 'import pandas as pd\n'), ((762, 856), 'dash.html.H2', 'html.H2', (['"""Welcome to the Hawkeye Visualization tool of Group 45"""'], {'className': '"""text-center"""'}), "('Welcome to the Hawkeye Visualization tool of Group 45', className=\n 'text-center')\n", (769, 856), False, 'from dash import html\n'), ((947, 1114), 'dash.html.H5', 'html.H5', ([], {'children': '"""A dataset about the Road accidents in UK in last 5 years has been analysed in this web tool. The link for the dataset you can find below."""'}), "(children=\n 'A dataset about the Road accidents in UK in last 5 years has been analysed in this web tool. The link for the dataset you can find below.'\n )\n", (954, 1114), False, 'from dash import html\n'), ((1238, 1476), 'dash.html.H5', 'html.H5', ([], {'children': '"""The tool consists of two main pages: Home tab which is an Introduction page to the Group 45 visualization tool and Explore tab, which gives the oppurtunity to explore the dataset and find interesting patterns"""'}), "(children=\n 'The tool consists of two main pages: Home tab which is an Introduction page to the Group 45 visualization tool and Explore tab, which gives the oppurtunity to explore the dataset and find interesting patterns'\n )\n", (1245, 1476), False, 'from dash import html\n'), ((1697, 1792), 'dash.html.H3', 'html.H3', ([], {'children': '"""Get the original datasets used in this project"""', 'className': '"""text-center"""'}), "(children='Get the original datasets used in this project',\n className='text-center')\n", (1704, 1792), False, 'from dash import html\n'), ((1876, 2056), 'dash_bootstrap_components.Button', 'dbc.Button', (['"""UK-Road-Safety"""'], {'href': '"""https://data.gov.uk/dataset/cb7ae6f0-4be6-4935-9277-47e5ce24a11f/road-safety-data"""', 'color': '"""primary"""', 'target': '"""_blank"""', 'className': '"""mt-3"""'}), "('UK-Road-Safety', href=\n 'https://data.gov.uk/dataset/cb7ae6f0-4be6-4935-9277-47e5ce24a11f/road-safety-data'\n , color='primary', target='_blank', className='mt-3')\n", (1886, 2056), True, 'import dash_bootstrap_components as dbc\n'), ((2447, 2538), 'dash.html.H3', 'html.H3', ([], {'children': '"""You can find the code for this project in"""', 'className': '"""text-center"""'}), "(children='You can find the code for this project in', className=\n 'text-center')\n", (2454, 2538), False, 'from dash import html\n'), ((2621, 2747), 'dash_bootstrap_components.Button', 'dbc.Button', (['"""GitHub"""'], {'href': '"""https://github.com/nikhil-96/Hawkeye_Viz"""', 'color': '"""primary"""', 'target': '"""_blank"""', 'className': '"""mt-3"""'}), "('GitHub', href='https://github.com/nikhil-96/Hawkeye_Viz', color\n ='primary', target='_blank', className='mt-3')\n", (2631, 2747), True, 'import dash_bootstrap_components as dbc\n'), ((3395, 3427), 'dash.html.H6', 'html.H6', ([], {'children': '"""Created By: """'}), "(children='Created By: ')\n", (3402, 3427), False, 'from dash import html\n'), ((3493, 3519), 'dash.html.H6', 'html.H6', ([], {'children': '"""<NAME>"""'}), "(children='<NAME>')\n", (3500, 3519), False, 'from dash import html\n'), ((3585, 3611), 'dash.html.H6', 'html.H6', ([], {'children': '"""<NAME>"""'}), "(children='<NAME>')\n", (3592, 3611), False, 'from dash import html\n'), ((3677, 3703), 'dash.html.H6', 'html.H6', ([], {'children': '"""<NAME>"""'}), "(children='<NAME>')\n", (3684, 3703), False, 'from dash import html\n'), ((3769, 3795), 'dash.html.H6', 'html.H6', ([], {'children': '"""<NAME>"""'}), "(children='<NAME>')\n", (3776, 3795), False, 'from dash import html\n'), ((4046, 4118), 'dash.html.H6', 'html.H6', ([], {'children': '"""Developed at Eindhoven University of Technology TU/e"""'}), "(children='Developed at Eindhoven University of Technology TU/e')\n", (4053, 4118), False, 'from dash import html\n')]
|