code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
import os
import runpy
import subprocess
from setuptools import setup, find_packages
def get_version_from_pyfile(version_file="gitlab_registry_cleanup/_version.py"):
file_globals = runpy.run_path(version_file)
return file_globals["__version__"]
def get_install_requires_from_requirements(requirements_filename="requirements.txt"):
try:
with open(requirements_filename, "r", encoding="utf-8") as requirements_file:
requirements = requirements_file.readlines()
except OSError:
import logging
logging.warning("Could not read the requirements file.")
return requirements
def get_long_description_from_readme(readme_filename="README.md"):
rst_filename = "{}.rst".format(os.path.splitext(os.path.basename(readme_filename))[0])
created_tmp_rst = False
if not os.path.isfile(rst_filename):
try:
subprocess.check_call(["pandoc", readme_filename, "-t", "rst", "-o", rst_filename])
created_tmp_rst = True
except (OSError, subprocess.CalledProcessError):
import logging
logging.warning("Could not convert the readme file to rst.")
long_description = None
if os.path.isfile(rst_filename):
with open(rst_filename, "r", encoding="utf-8") as readme_file:
long_description = readme_file.read()
if created_tmp_rst:
os.remove(rst_filename)
return long_description
version = get_version_from_pyfile()
long_description = get_long_description_from_readme()
install_requires = get_install_requires_from_requirements()
setup(
name="gitlab-registry-cleanup",
version=version,
packages=find_packages(),
python_requires="~=3.3",
install_requires=install_requires,
entry_points={"console_scripts": ["gitlab-registry-cleanup = gitlab_registry_cleanup.cli:main"]},
author="<NAME>",
author_email="<EMAIL>",
description="A tool to clean up untagged GitLab registry images.",
long_description=long_description,
license="MIT",
url="https://github.com/sciapp/gitlab-registry-cleanup",
keywords=["Git", "GitLab", "Docker", "Registry", "Cleanup"],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3 :: Only",
"Topic :: System :: Systems Administration",
],
)
|
[
"os.remove",
"subprocess.check_call",
"os.path.basename",
"logging.warning",
"os.path.isfile",
"runpy.run_path",
"setuptools.find_packages"
] |
[((187, 215), 'runpy.run_path', 'runpy.run_path', (['version_file'], {}), '(version_file)\n', (201, 215), False, 'import runpy\n'), ((1194, 1222), 'os.path.isfile', 'os.path.isfile', (['rst_filename'], {}), '(rst_filename)\n', (1208, 1222), False, 'import os\n'), ((827, 855), 'os.path.isfile', 'os.path.isfile', (['rst_filename'], {}), '(rst_filename)\n', (841, 855), False, 'import os\n'), ((1377, 1400), 'os.remove', 'os.remove', (['rst_filename'], {}), '(rst_filename)\n', (1386, 1400), False, 'import os\n'), ((1659, 1674), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (1672, 1674), False, 'from setuptools import setup, find_packages\n'), ((547, 603), 'logging.warning', 'logging.warning', (['"""Could not read the requirements file."""'], {}), "('Could not read the requirements file.')\n", (562, 603), False, 'import logging\n'), ((882, 969), 'subprocess.check_call', 'subprocess.check_call', (["['pandoc', readme_filename, '-t', 'rst', '-o', rst_filename]"], {}), "(['pandoc', readme_filename, '-t', 'rst', '-o',\n rst_filename])\n", (903, 969), False, 'import subprocess\n'), ((749, 782), 'os.path.basename', 'os.path.basename', (['readme_filename'], {}), '(readme_filename)\n', (765, 782), False, 'import os\n'), ((1098, 1158), 'logging.warning', 'logging.warning', (['"""Could not convert the readme file to rst."""'], {}), "('Could not convert the readme file to rst.')\n", (1113, 1158), False, 'import logging\n')]
|
import pandas as pd
from datetime import datetime as dt
def validade(df):
result = []
hoje = dt.now()
list_of_etapas_access = ['ANÁLISE TÉCNICA',
'ANÁLISE TECNICA - CGCEB',
'ANÁLISE TECNICA - CCEB',
'EM DILIGÊNCIA',
'AGUARDANDO ANÁLISE',
'AGUARDANDO MANIFESTAÇÃO',
'AGUARDANDO MANIFESTAÇÃO - MEC',
'AGUARDANDO MANIFESTAÇÃO - MS',
'APRECIAÇÃO',
'APROVAÇÃO']
list_of_etapas_json = ['ANALISE_MACRO',
'VALIDACAO_DE_DOCUMENTOS',
'APROVACAO_CGCEB',
'RESPONDER_DILIGENCIA',
'DILIGENCIA',
'APRECIAR_DOCUMENTO_DE_ANALISE_CGCEB',
'VALIDACAO_PREVIA',
'VALIDAR_DECISAO_E_ASSINAR_PORTARIA',
'ELABORAR_PARECER_CONCLUSIVO',
'ELABORAR_SOLICITACAO_DE_MANIFESTACAO',
'MEC_ELABORAR_MANIFESTACAO',
'ELABORAR_MINUTA_NOTA_TECNICAPARECER_DE_ENCAMINHAMENTO',
'SAUDE_ELABORAR_MANIFESTACAO']
for row in df.iterrows():
row = row[1]
if row['DT_FIM_CERTIFICACAO_ATUAL'] >= hoje:
result.append("Vigente")
else:
if row['TIPO_PROCESSO'] == 'Renovação':
if row['FASE_PROCESSO'] in list_of_etapas_json:
result.append("Válida")
elif row['FASE_PROCESSO'] in list_of_etapas_access:
result.append("Válida")
else:
result.append("Sem Cebas")
else:
result.append("Sem Cebas")
return result
def ultimo_processo(info, coluna):
return list(info[coluna])[0]
def listagem_de_entidades(df):
rows = []
print(f"{len(df['CNPJ'])} processos detectados")
print(f"{len(set(df['CNPJ']))} CNPJs únicos detectados")
for cnpj in set(df['CNPJ']):
info_cnpj = df[df['CNPJ'] == cnpj].sort_values(by='DT_PROTOCOLO', ascending=False)
protocolo = ultimo_processo(info_cnpj, 'PROTOCOLO')
data = ultimo_processo(info_cnpj, 'DT_PROTOCOLO')
tipo = ultimo_processo(info_cnpj, 'TIPO_PROCESSO')
etapa = ultimo_processo(info_cnpj, 'FASE_PROCESSO')
nome = ultimo_processo(info_cnpj, 'ENTIDADE')
dates = info_cnpj['DT_FIM_CERTIFICACAO_ATUAL']
try:
data_fim = max(d for d in dates if isinstance(d, pd.Timestamp))
except ValueError:
data_fim = ''
rows.append({
"CNPJ": cnpj,
"ENTIDADE": nome,
"PROTOCOLO_PROCESSO_MAIS_RECENTE": protocolo,
"DT_PROTOCOLO": data,
"TIPO_PROCESSO": tipo,
"FASE_PROCESSO": etapa,
"DT_FIM_CERTIFICACAO_ATUAL": data_fim
})
print(f'{len(rows)} entidadas cadastradas detectadas')
return_df = pd.DataFrame(rows)
return return_df
df = pd.read_excel('input/processos_cebas.xlsx')
sorted_df = df.sort_values(by=['CNPJ', 'DT_FIM_CERTIFICACAO_ATUAL'])
cnpj_df = listagem_de_entidades(sorted_df)
cnpj_df['STATUS_CEBAS'] = validade(cnpj_df)
cnpj_df.to_excel('output/status_cebas.xlsx', index=False)
|
[
"pandas.read_excel",
"datetime.datetime.now",
"pandas.DataFrame"
] |
[((3218, 3261), 'pandas.read_excel', 'pd.read_excel', (['"""input/processos_cebas.xlsx"""'], {}), "('input/processos_cebas.xlsx')\n", (3231, 3261), True, 'import pandas as pd\n'), ((104, 112), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (110, 112), True, 'from datetime import datetime as dt\n'), ((3171, 3189), 'pandas.DataFrame', 'pd.DataFrame', (['rows'], {}), '(rows)\n', (3183, 3189), True, 'import pandas as pd\n')]
|
#!/usr/bin/python
# -*- encoding: utf-8 -*-
import os
import psutil
import pyexcel as pe
from pyexcel_io.exceptions import IntegerAccuracyLossError
from nose import SkipTest
from nose.tools import eq_, raises
IN_TRAVIS = "TRAVIS" in os.environ
def test_issue_10():
test_file_name = "test_issue_10.ods"
from pyexcel_ods3 import save_data
content = {"test": [[1, 2]]}
save_data(test_file_name, content)
save_data(test_file_name, content)
assert os.path.exists(test_file_name)
assert os.path.exists(test_file_name + ".bak") is False
os.unlink(test_file_name)
row_max = 2
col_max = 2
def data_gen():
for row in range(row_max // 2):
tmp = []
for col in range(col_max):
tmp.append("Row: %d Col: %d" % (row, col))
for col in range(col_max):
tmp.append((row + col))
yield tmp
def test_issue_11():
test_file = "test_file.ods"
from pyexcel_ods3 import save_data
save_data(test_file, {"generator": data_gen()})
os.unlink(test_file)
def test_issue_8():
from pyexcel_ods3 import get_data
test_file = "12_day_as_time.ods"
data = get_data(get_fixtures(test_file), skip_empty_rows=True)
eq_(data["Sheet1"][0][0].days, 12)
def test_issue_83_ods_file_handle():
# this proves that odfpy
# does not leave a file handle open at all
proc = psutil.Process()
test_file = get_fixtures("12_day_as_time.ods")
open_files_l1 = proc.open_files()
# start with a csv file
data = pe.iget_array(file_name=test_file, library="pyexcel-ods3")
open_files_l2 = proc.open_files()
delta = len(open_files_l2) - len(open_files_l1)
# cannot catch open file handle
assert delta == 0
# now the file handle get opened when we run through
# the generator
list(data)
open_files_l3 = proc.open_files()
delta = len(open_files_l3) - len(open_files_l1)
# cannot catch open file handle
assert delta == 0
# free the fish
pe.free_resources()
open_files_l4 = proc.open_files()
# this confirms that no more open file handle
eq_(open_files_l1, open_files_l4)
def test_issue_23():
if not IN_TRAVIS:
raise SkipTest()
url = (
"https://github.com/pyexcel/pyexcel-ods3/"
+ "raw/master/tests/fixtures/multilineods.ods"
)
pe.get_book(url=url)
def test_issue_30():
test_file = "issue_30.ods"
sheet = pe.Sheet()
sheet[0, 0] = 999999999999999
sheet.save_as(test_file)
sheet2 = pe.get_sheet(file_name=test_file)
eq_(sheet[0, 0], sheet2[0, 0])
os.unlink(test_file)
@raises(IntegerAccuracyLossError)
def test_issue_30_precision_loss():
test_file = "issue_30_2.ods"
sheet = pe.Sheet()
sheet[0, 0] = 9999999999999999
sheet.save_as(test_file)
def get_fixtures(filename):
return os.path.join("tests", "fixtures", filename)
|
[
"psutil.Process",
"pyexcel.get_book",
"pyexcel.get_sheet",
"os.unlink",
"os.path.join",
"nose.SkipTest",
"pyexcel_ods3.save_data",
"os.path.exists",
"nose.tools.eq_",
"pyexcel.iget_array",
"nose.tools.raises",
"pyexcel.Sheet",
"pyexcel.free_resources"
] |
[((2606, 2638), 'nose.tools.raises', 'raises', (['IntegerAccuracyLossError'], {}), '(IntegerAccuracyLossError)\n', (2612, 2638), False, 'from nose.tools import eq_, raises\n'), ((388, 422), 'pyexcel_ods3.save_data', 'save_data', (['test_file_name', 'content'], {}), '(test_file_name, content)\n', (397, 422), False, 'from pyexcel_ods3 import save_data\n'), ((427, 461), 'pyexcel_ods3.save_data', 'save_data', (['test_file_name', 'content'], {}), '(test_file_name, content)\n', (436, 461), False, 'from pyexcel_ods3 import save_data\n'), ((473, 503), 'os.path.exists', 'os.path.exists', (['test_file_name'], {}), '(test_file_name)\n', (487, 503), False, 'import os\n'), ((568, 593), 'os.unlink', 'os.unlink', (['test_file_name'], {}), '(test_file_name)\n', (577, 593), False, 'import os\n'), ((1021, 1041), 'os.unlink', 'os.unlink', (['test_file'], {}), '(test_file)\n', (1030, 1041), False, 'import os\n'), ((1211, 1245), 'nose.tools.eq_', 'eq_', (["data['Sheet1'][0][0].days", '(12)'], {}), "(data['Sheet1'][0][0].days, 12)\n", (1214, 1245), False, 'from nose.tools import eq_, raises\n'), ((1372, 1388), 'psutil.Process', 'psutil.Process', ([], {}), '()\n', (1386, 1388), False, 'import psutil\n'), ((1518, 1576), 'pyexcel.iget_array', 'pe.iget_array', ([], {'file_name': 'test_file', 'library': '"""pyexcel-ods3"""'}), "(file_name=test_file, library='pyexcel-ods3')\n", (1531, 1576), True, 'import pyexcel as pe\n'), ((1991, 2010), 'pyexcel.free_resources', 'pe.free_resources', ([], {}), '()\n', (2008, 2010), True, 'import pyexcel as pe\n'), ((2103, 2136), 'nose.tools.eq_', 'eq_', (['open_files_l1', 'open_files_l4'], {}), '(open_files_l1, open_files_l4)\n', (2106, 2136), False, 'from nose.tools import eq_, raises\n'), ((2335, 2355), 'pyexcel.get_book', 'pe.get_book', ([], {'url': 'url'}), '(url=url)\n', (2346, 2355), True, 'import pyexcel as pe\n'), ((2422, 2432), 'pyexcel.Sheet', 'pe.Sheet', ([], {}), '()\n', (2430, 2432), True, 'import pyexcel as pe\n'), ((2509, 2542), 'pyexcel.get_sheet', 'pe.get_sheet', ([], {'file_name': 'test_file'}), '(file_name=test_file)\n', (2521, 2542), True, 'import pyexcel as pe\n'), ((2547, 2577), 'nose.tools.eq_', 'eq_', (['sheet[0, 0]', 'sheet2[0, 0]'], {}), '(sheet[0, 0], sheet2[0, 0])\n', (2550, 2577), False, 'from nose.tools import eq_, raises\n'), ((2582, 2602), 'os.unlink', 'os.unlink', (['test_file'], {}), '(test_file)\n', (2591, 2602), False, 'import os\n'), ((2720, 2730), 'pyexcel.Sheet', 'pe.Sheet', ([], {}), '()\n', (2728, 2730), True, 'import pyexcel as pe\n'), ((2836, 2879), 'os.path.join', 'os.path.join', (['"""tests"""', '"""fixtures"""', 'filename'], {}), "('tests', 'fixtures', filename)\n", (2848, 2879), False, 'import os\n'), ((515, 554), 'os.path.exists', 'os.path.exists', (["(test_file_name + '.bak')"], {}), "(test_file_name + '.bak')\n", (529, 554), False, 'import os\n'), ((2196, 2206), 'nose.SkipTest', 'SkipTest', ([], {}), '()\n', (2204, 2206), False, 'from nose import SkipTest\n')]
|
# -*- coding: utf-8 -*-
import os
from simmate.conftest import copy_test_files
from simmate.calculators.vasp.inputs import Incar
from simmate.calculators.vasp.error_handlers import Unconverged
def test_unconverged_electronic(tmpdir):
copy_test_files(
tmpdir,
test_directory=__file__,
test_folder="unconverged_electronic",
)
# we reference the files several spots below so we grab its path up front
incar_filename = os.path.join(tmpdir, "INCAR")
vasprun_filename = os.path.join(tmpdir, "vasprun.xml")
# Confirm an error IS found when we have an unconverging xml
error_handler = Unconverged()
assert error_handler.check(tmpdir) == True
# Make first attempt at fixing the error
fix = error_handler.correct(tmpdir)
assert fix == "switched ALGO from VeryFast to Fast"
assert Incar.from_file(incar_filename)["ALGO"] == "Fast"
# Make second attempt at fixing the error
fix = error_handler.correct(tmpdir)
assert fix == "switched ALGO from Fast to Normal"
assert Incar.from_file(incar_filename)["ALGO"] == "Normal"
# Make third attempt at fixing the error
fix = error_handler.correct(tmpdir)
assert fix == "switched ALGO from Normal to All"
assert Incar.from_file(incar_filename)["ALGO"] == "All"
# Make fourth attempt at fixing the error
fix = error_handler.correct(tmpdir)
assert fix == (
"turned on mixing with the following settings: {'ISTART': 1, 'ALGO': "
"'Normal', 'NELMDL': -6, 'BMIX': 0.001, 'AMIX_MAG': 0.8, 'BMIX_MAG': 0.001}"
)
incar = Incar.from_file(incar_filename)
assert incar["ISTART"] == 1
assert incar["ALGO"] == "Normal"
assert incar["NELMDL"] == -6
assert incar["BMIX"] == 0.001
assert incar["AMIX_MAG"] == 0.8
assert incar["BMIX_MAG"] == 0.001
# make sure no error is raised when the xml doesn't exist
os.remove(vasprun_filename)
assert error_handler.check(tmpdir) == False
def test_unconverged_ionic(tmpdir):
copy_test_files(
tmpdir,
test_directory=__file__,
test_folder="unconverged_ionic",
)
# we reference the files several spots below so we grab its path up front
incar_filename = os.path.join(tmpdir, "INCAR")
# Confirm an error IS found when we have an unconverging xml
error_handler = Unconverged()
assert error_handler.check(tmpdir) == True
# Make first attempt at fixing the error
fix = error_handler.correct(tmpdir)
assert fix == "copied the CONTCAR into the POSCAR and switched IBRION to 1"
assert Incar.from_file(incar_filename)["IBRION"] == 1
|
[
"os.remove",
"simmate.conftest.copy_test_files",
"simmate.calculators.vasp.inputs.Incar.from_file",
"simmate.calculators.vasp.error_handlers.Unconverged",
"os.path.join"
] |
[((242, 333), 'simmate.conftest.copy_test_files', 'copy_test_files', (['tmpdir'], {'test_directory': '__file__', 'test_folder': '"""unconverged_electronic"""'}), "(tmpdir, test_directory=__file__, test_folder=\n 'unconverged_electronic')\n", (257, 333), False, 'from simmate.conftest import copy_test_files\n'), ((460, 489), 'os.path.join', 'os.path.join', (['tmpdir', '"""INCAR"""'], {}), "(tmpdir, 'INCAR')\n", (472, 489), False, 'import os\n'), ((513, 548), 'os.path.join', 'os.path.join', (['tmpdir', '"""vasprun.xml"""'], {}), "(tmpdir, 'vasprun.xml')\n", (525, 548), False, 'import os\n'), ((635, 648), 'simmate.calculators.vasp.error_handlers.Unconverged', 'Unconverged', ([], {}), '()\n', (646, 648), False, 'from simmate.calculators.vasp.error_handlers import Unconverged\n'), ((1591, 1622), 'simmate.calculators.vasp.inputs.Incar.from_file', 'Incar.from_file', (['incar_filename'], {}), '(incar_filename)\n', (1606, 1622), False, 'from simmate.calculators.vasp.inputs import Incar\n'), ((1900, 1927), 'os.remove', 'os.remove', (['vasprun_filename'], {}), '(vasprun_filename)\n', (1909, 1927), False, 'import os\n'), ((2018, 2104), 'simmate.conftest.copy_test_files', 'copy_test_files', (['tmpdir'], {'test_directory': '__file__', 'test_folder': '"""unconverged_ionic"""'}), "(tmpdir, test_directory=__file__, test_folder=\n 'unconverged_ionic')\n", (2033, 2104), False, 'from simmate.conftest import copy_test_files\n'), ((2231, 2260), 'os.path.join', 'os.path.join', (['tmpdir', '"""INCAR"""'], {}), "(tmpdir, 'INCAR')\n", (2243, 2260), False, 'import os\n'), ((2346, 2359), 'simmate.calculators.vasp.error_handlers.Unconverged', 'Unconverged', ([], {}), '()\n', (2357, 2359), False, 'from simmate.calculators.vasp.error_handlers import Unconverged\n'), ((849, 880), 'simmate.calculators.vasp.inputs.Incar.from_file', 'Incar.from_file', (['incar_filename'], {}), '(incar_filename)\n', (864, 880), False, 'from simmate.calculators.vasp.inputs import Incar\n'), ((1051, 1082), 'simmate.calculators.vasp.inputs.Incar.from_file', 'Incar.from_file', (['incar_filename'], {}), '(incar_filename)\n', (1066, 1082), False, 'from simmate.calculators.vasp.inputs import Incar\n'), ((1253, 1284), 'simmate.calculators.vasp.inputs.Incar.from_file', 'Incar.from_file', (['incar_filename'], {}), '(incar_filename)\n', (1268, 1284), False, 'from simmate.calculators.vasp.inputs import Incar\n'), ((2584, 2615), 'simmate.calculators.vasp.inputs.Incar.from_file', 'Incar.from_file', (['incar_filename'], {}), '(incar_filename)\n', (2599, 2615), False, 'from simmate.calculators.vasp.inputs import Incar\n')]
|
from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory
from tests.unit.dataactvalidator.utils import number_of_errors, query_columns
from dataactcore.models.domainModels import Zips
_FILE = 'fabs41_detached_award_financial_assistance_3'
def test_column_headers(database):
expected_subset = {"row_number", "place_of_performance_code", "place_of_performance_zip4a"}
actual = set(query_columns(_FILE, database))
assert expected_subset == actual
def test_success(database):
""" When provided, PrimaryPlaceofPerformanceZIP+4 must be in the state specified by PrimaryPlaceOfPerformanceCode.
In this specific submission row, the ZIP5 (and by extension the full ZIP+4) is not a valid ZIP code in the
state in question."""
zips = Zips(zip5="12345", zip_last4="6789", state_abbreviation="NY")
# ignored because no zip4
det_award_1 = DetachedAwardFinancialAssistanceFactory(place_of_performance_code="NY*****",
place_of_performance_zip4a="")
det_award_2 = DetachedAwardFinancialAssistanceFactory(place_of_performance_code="Ny**123",
place_of_performance_zip4a=None)
det_award_3 = DetachedAwardFinancialAssistanceFactory(place_of_performance_code="Ny**123",
place_of_performance_zip4a="city-wide")
# valid 5 digit zip
det_award_4 = DetachedAwardFinancialAssistanceFactory(place_of_performance_code="Ny**123",
place_of_performance_zip4a="12345")
det_award_5 = DetachedAwardFinancialAssistanceFactory(place_of_performance_code="NY98765",
place_of_performance_zip4a="12345")
# valid 9 digit zip
det_award_6 = DetachedAwardFinancialAssistanceFactory(place_of_performance_code="NY98765",
place_of_performance_zip4a="123456789")
det_award_7 = DetachedAwardFinancialAssistanceFactory(place_of_performance_code="ny98765",
place_of_performance_zip4a="123456789")
det_award_8 = DetachedAwardFinancialAssistanceFactory(place_of_performance_code="ny98765",
place_of_performance_zip4a="12345-6789")
# invalid 9 digit zip but this should pass for this rule, it will be handled for d_41_5
det_award_9 = DetachedAwardFinancialAssistanceFactory(place_of_performance_code="ny98765",
place_of_performance_zip4a="12345-6788")
errors = number_of_errors(_FILE, database, models=[det_award_1, det_award_2, det_award_3, det_award_4, det_award_5,
det_award_6, det_award_7, det_award_8, det_award_9, zips])
assert errors == 0
# random wrong length zips and zips with '-' in the wrong place, formatting is checked in another rule
det_award_1 = DetachedAwardFinancialAssistanceFactory(place_of_performance_code="ny10986",
place_of_performance_zip4a="12345678")
det_award_2 = DetachedAwardFinancialAssistanceFactory(place_of_performance_code="ny10986",
place_of_performance_zip4a="1234567898")
det_award_3 = DetachedAwardFinancialAssistanceFactory(place_of_performance_code="ny10986",
place_of_performance_zip4a="12345678-9")
det_award_4 = DetachedAwardFinancialAssistanceFactory(place_of_performance_code="ny10986",
place_of_performance_zip4a="123-456789")
errors = number_of_errors(_FILE, database, models=[det_award_1, det_award_2, det_award_3, det_award_4, zips])
assert errors == 0
def test_failure(database):
""" Test failure for when provided, PrimaryPlaceofPerformanceZIP+4 must be in the state specified by
PrimaryPlaceOfPerformanceCode. In this specific submission row, the ZIP5 (and by extension the full ZIP+4) is
not a valid ZIP code in the state in question."""
zips = Zips(zip5="12345", zip_last4="6789", state_abbreviation="NY")
# invalid 5 digit zip
det_award_1 = DetachedAwardFinancialAssistanceFactory(place_of_performance_code="ny10986",
place_of_performance_zip4a="12346")
det_award_2 = DetachedAwardFinancialAssistanceFactory(place_of_performance_code="NA*****",
place_of_performance_zip4a='12345')
errors = number_of_errors(_FILE, database, models=[det_award_1, det_award_2, zips])
assert errors == 2
# invalid 9 digit zip - first five fail (see d41_5 for the last four to fail)
det_award_1 = DetachedAwardFinancialAssistanceFactory(place_of_performance_code="ny10986",
place_of_performance_zip4a="123466789")
det_award_2 = DetachedAwardFinancialAssistanceFactory(place_of_performance_code="NY*****",
place_of_performance_zip4a='12346-6789')
errors = number_of_errors(_FILE, database, models=[det_award_1, det_award_2, zips])
assert errors == 2
|
[
"tests.unit.dataactvalidator.utils.number_of_errors",
"tests.unit.dataactcore.factories.staging.DetachedAwardFinancialAssistanceFactory",
"dataactcore.models.domainModels.Zips",
"tests.unit.dataactvalidator.utils.query_columns"
] |
[((801, 862), 'dataactcore.models.domainModels.Zips', 'Zips', ([], {'zip5': '"""12345"""', 'zip_last4': '"""6789"""', 'state_abbreviation': '"""NY"""'}), "(zip5='12345', zip_last4='6789', state_abbreviation='NY')\n", (805, 862), False, 'from dataactcore.models.domainModels import Zips\n'), ((911, 1022), 'tests.unit.dataactcore.factories.staging.DetachedAwardFinancialAssistanceFactory', 'DetachedAwardFinancialAssistanceFactory', ([], {'place_of_performance_code': '"""NY*****"""', 'place_of_performance_zip4a': '""""""'}), "(place_of_performance_code='NY*****',\n place_of_performance_zip4a='')\n", (950, 1022), False, 'from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory\n'), ((1095, 1208), 'tests.unit.dataactcore.factories.staging.DetachedAwardFinancialAssistanceFactory', 'DetachedAwardFinancialAssistanceFactory', ([], {'place_of_performance_code': '"""Ny**123"""', 'place_of_performance_zip4a': 'None'}), "(place_of_performance_code='Ny**123',\n place_of_performance_zip4a=None)\n", (1134, 1208), False, 'from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory\n'), ((1281, 1401), 'tests.unit.dataactcore.factories.staging.DetachedAwardFinancialAssistanceFactory', 'DetachedAwardFinancialAssistanceFactory', ([], {'place_of_performance_code': '"""Ny**123"""', 'place_of_performance_zip4a': '"""city-wide"""'}), "(place_of_performance_code='Ny**123',\n place_of_performance_zip4a='city-wide')\n", (1320, 1401), False, 'from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory\n'), ((1498, 1614), 'tests.unit.dataactcore.factories.staging.DetachedAwardFinancialAssistanceFactory', 'DetachedAwardFinancialAssistanceFactory', ([], {'place_of_performance_code': '"""Ny**123"""', 'place_of_performance_zip4a': '"""12345"""'}), "(place_of_performance_code='Ny**123',\n place_of_performance_zip4a='12345')\n", (1537, 1614), False, 'from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory\n'), ((1687, 1803), 'tests.unit.dataactcore.factories.staging.DetachedAwardFinancialAssistanceFactory', 'DetachedAwardFinancialAssistanceFactory', ([], {'place_of_performance_code': '"""NY98765"""', 'place_of_performance_zip4a': '"""12345"""'}), "(place_of_performance_code='NY98765',\n place_of_performance_zip4a='12345')\n", (1726, 1803), False, 'from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory\n'), ((1900, 2020), 'tests.unit.dataactcore.factories.staging.DetachedAwardFinancialAssistanceFactory', 'DetachedAwardFinancialAssistanceFactory', ([], {'place_of_performance_code': '"""NY98765"""', 'place_of_performance_zip4a': '"""123456789"""'}), "(place_of_performance_code='NY98765',\n place_of_performance_zip4a='123456789')\n", (1939, 2020), False, 'from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory\n'), ((2093, 2213), 'tests.unit.dataactcore.factories.staging.DetachedAwardFinancialAssistanceFactory', 'DetachedAwardFinancialAssistanceFactory', ([], {'place_of_performance_code': '"""ny98765"""', 'place_of_performance_zip4a': '"""123456789"""'}), "(place_of_performance_code='ny98765',\n place_of_performance_zip4a='123456789')\n", (2132, 2213), False, 'from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory\n'), ((2286, 2407), 'tests.unit.dataactcore.factories.staging.DetachedAwardFinancialAssistanceFactory', 'DetachedAwardFinancialAssistanceFactory', ([], {'place_of_performance_code': '"""ny98765"""', 'place_of_performance_zip4a': '"""12345-6789"""'}), "(place_of_performance_code='ny98765',\n place_of_performance_zip4a='12345-6789')\n", (2325, 2407), False, 'from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory\n'), ((2572, 2693), 'tests.unit.dataactcore.factories.staging.DetachedAwardFinancialAssistanceFactory', 'DetachedAwardFinancialAssistanceFactory', ([], {'place_of_performance_code': '"""ny98765"""', 'place_of_performance_zip4a': '"""12345-6788"""'}), "(place_of_performance_code='ny98765',\n place_of_performance_zip4a='12345-6788')\n", (2611, 2693), False, 'from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory\n'), ((2761, 2934), 'tests.unit.dataactvalidator.utils.number_of_errors', 'number_of_errors', (['_FILE', 'database'], {'models': '[det_award_1, det_award_2, det_award_3, det_award_4, det_award_5,\n det_award_6, det_award_7, det_award_8, det_award_9, zips]'}), '(_FILE, database, models=[det_award_1, det_award_2,\n det_award_3, det_award_4, det_award_5, det_award_6, det_award_7,\n det_award_8, det_award_9, zips])\n', (2777, 2934), False, 'from tests.unit.dataactvalidator.utils import number_of_errors, query_columns\n'), ((3131, 3250), 'tests.unit.dataactcore.factories.staging.DetachedAwardFinancialAssistanceFactory', 'DetachedAwardFinancialAssistanceFactory', ([], {'place_of_performance_code': '"""ny10986"""', 'place_of_performance_zip4a': '"""12345678"""'}), "(place_of_performance_code='ny10986',\n place_of_performance_zip4a='12345678')\n", (3170, 3250), False, 'from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory\n'), ((3323, 3444), 'tests.unit.dataactcore.factories.staging.DetachedAwardFinancialAssistanceFactory', 'DetachedAwardFinancialAssistanceFactory', ([], {'place_of_performance_code': '"""ny10986"""', 'place_of_performance_zip4a': '"""1234567898"""'}), "(place_of_performance_code='ny10986',\n place_of_performance_zip4a='1234567898')\n", (3362, 3444), False, 'from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory\n'), ((3517, 3638), 'tests.unit.dataactcore.factories.staging.DetachedAwardFinancialAssistanceFactory', 'DetachedAwardFinancialAssistanceFactory', ([], {'place_of_performance_code': '"""ny10986"""', 'place_of_performance_zip4a': '"""12345678-9"""'}), "(place_of_performance_code='ny10986',\n place_of_performance_zip4a='12345678-9')\n", (3556, 3638), False, 'from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory\n'), ((3711, 3832), 'tests.unit.dataactcore.factories.staging.DetachedAwardFinancialAssistanceFactory', 'DetachedAwardFinancialAssistanceFactory', ([], {'place_of_performance_code': '"""ny10986"""', 'place_of_performance_zip4a': '"""123-456789"""'}), "(place_of_performance_code='ny10986',\n place_of_performance_zip4a='123-456789')\n", (3750, 3832), False, 'from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory\n'), ((3900, 4004), 'tests.unit.dataactvalidator.utils.number_of_errors', 'number_of_errors', (['_FILE', 'database'], {'models': '[det_award_1, det_award_2, det_award_3, det_award_4, zips]'}), '(_FILE, database, models=[det_award_1, det_award_2,\n det_award_3, det_award_4, zips])\n', (3916, 4004), False, 'from tests.unit.dataactvalidator.utils import number_of_errors, query_columns\n'), ((4347, 4408), 'dataactcore.models.domainModels.Zips', 'Zips', ([], {'zip5': '"""12345"""', 'zip_last4': '"""6789"""', 'state_abbreviation': '"""NY"""'}), "(zip5='12345', zip_last4='6789', state_abbreviation='NY')\n", (4351, 4408), False, 'from dataactcore.models.domainModels import Zips\n'), ((4453, 4569), 'tests.unit.dataactcore.factories.staging.DetachedAwardFinancialAssistanceFactory', 'DetachedAwardFinancialAssistanceFactory', ([], {'place_of_performance_code': '"""ny10986"""', 'place_of_performance_zip4a': '"""12346"""'}), "(place_of_performance_code='ny10986',\n place_of_performance_zip4a='12346')\n", (4492, 4569), False, 'from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory\n'), ((4642, 4758), 'tests.unit.dataactcore.factories.staging.DetachedAwardFinancialAssistanceFactory', 'DetachedAwardFinancialAssistanceFactory', ([], {'place_of_performance_code': '"""NA*****"""', 'place_of_performance_zip4a': '"""12345"""'}), "(place_of_performance_code='NA*****',\n place_of_performance_zip4a='12345')\n", (4681, 4758), False, 'from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory\n'), ((4826, 4900), 'tests.unit.dataactvalidator.utils.number_of_errors', 'number_of_errors', (['_FILE', 'database'], {'models': '[det_award_1, det_award_2, zips]'}), '(_FILE, database, models=[det_award_1, det_award_2, zips])\n', (4842, 4900), False, 'from tests.unit.dataactvalidator.utils import number_of_errors, query_columns\n'), ((5025, 5145), 'tests.unit.dataactcore.factories.staging.DetachedAwardFinancialAssistanceFactory', 'DetachedAwardFinancialAssistanceFactory', ([], {'place_of_performance_code': '"""ny10986"""', 'place_of_performance_zip4a': '"""123466789"""'}), "(place_of_performance_code='ny10986',\n place_of_performance_zip4a='123466789')\n", (5064, 5145), False, 'from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory\n'), ((5218, 5339), 'tests.unit.dataactcore.factories.staging.DetachedAwardFinancialAssistanceFactory', 'DetachedAwardFinancialAssistanceFactory', ([], {'place_of_performance_code': '"""NY*****"""', 'place_of_performance_zip4a': '"""12346-6789"""'}), "(place_of_performance_code='NY*****',\n place_of_performance_zip4a='12346-6789')\n", (5257, 5339), False, 'from tests.unit.dataactcore.factories.staging import DetachedAwardFinancialAssistanceFactory\n'), ((5407, 5481), 'tests.unit.dataactvalidator.utils.number_of_errors', 'number_of_errors', (['_FILE', 'database'], {'models': '[det_award_1, det_award_2, zips]'}), '(_FILE, database, models=[det_award_1, det_award_2, zips])\n', (5423, 5481), False, 'from tests.unit.dataactvalidator.utils import number_of_errors, query_columns\n'), ((426, 456), 'tests.unit.dataactvalidator.utils.query_columns', 'query_columns', (['_FILE', 'database'], {}), '(_FILE, database)\n', (439, 456), False, 'from tests.unit.dataactvalidator.utils import number_of_errors, query_columns\n')]
|
# processing the SA2 and road shapefiles
# inputs: raw SA2 and road shapefiles
# Outputs: Adelaide SA2 nodal and link dataframes with transport information
# Outputs are pickles:
# sa2_node_with_only_transport_attributes.pickle
# sa2_edge_with_only_transport_attributes.pickle
# Processing files saved:
# sa2_adelaide.shp, sa2_adelaide_edge.shp, OD_full_path.pickle, sa2_roads_in_adelaide.shp, etc.
# util needed: shortest path dictionary; a function turning the road networks to the link dataframe.
# time: ~15 min
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import geopandas as gpd
from pysal.lib import weights
import networkx as nx
import momepy
import pickle
# system path
import sys
import os
# util path
utility_path = os.path.join(os.getcwd(),'src/d00_utils/')
sys.path.append(utility_path)
import utilities as util
# data path
# sw: define the path based on the root project directory.
raw_data_path = os.path.join(os.getcwd(),'data/01_raw/')
intermediate_data_path = os.path.join(os.getcwd(),'data/02_intermediate/')
# # read files
# mount_path = "/Users/shenhaowang/Dropbox (MIT)/project_econ_opportunity_south_Australia"
#region 1. Extact the SA2s for Adelaide area.
# raw data
sa2_shape = gpd.read_file(raw_data_path + "sa2/SA2_2016_AUST.shp")
# Keep Adelaide area
# info from: file:///Users/shenhaowang/Downloads/StatePublicHealthPlan_Final.pdf (page 32)
adelaide_sa4_set = ['401','402','403','404']
sa2_adelaide = sa2_shape.loc[sa2_shape.SA4_CODE16.isin(adelaide_sa4_set)]
print("Shape of SA2 in the Adelaide area is: ", sa2_adelaide.shape)
# only use the most relevant variables.
sa2_adelaide = sa2_adelaide[['SA2_MAIN16', 'SA2_NAME16', 'geometry']]
# projection
sa2_adelaide.crs = 'epsg:3112'
print(sa2_adelaide.crs)
# create a sa2_adelaide link dataframe
index = pd.MultiIndex.from_product([sa2_adelaide['SA2_MAIN16'], sa2_adelaide['SA2_MAIN16']], names=['O', 'D'])
sa2_adelaide_link_df = pd.DataFrame(index=index).reset_index()
# add the geometry part to sa2_adelaide_link_df
from shapely.geometry import LineString
edge_list = []
for idx in range(sa2_adelaide_link_df.shape[0]):
origin = sa2_adelaide_link_df.loc[idx, 'O']
destination = sa2_adelaide_link_df.loc[idx, 'D']
edge = LineString([sa2_adelaide.loc[sa2_adelaide['SA2_MAIN16'] == origin, 'geometry'].centroid.values[0],
sa2_adelaide.loc[sa2_adelaide['SA2_MAIN16'] == destination, 'geometry'].centroid.values[0]])
edge_list.append(edge)
sa2_adelaide_link_df['geometry'] = edge_list
# create the gpd object
sa2_adelaide_link = gpd.GeoDataFrame(sa2_adelaide_link_df, crs='epsg:3112')
# save the process SA2 Adelaide shapefile
sa2_adelaide.to_file(intermediate_data_path+'shapefiles/sa2_adelaide.shp')
sa2_adelaide_link.to_file(intermediate_data_path+'shapefiles/sa2_adelaide_edge.shp')
#endregion
#region 2. Create the OD shortest path dictionary for SA2 Adelaide shapefile.
sa2_adelaide=gpd.read_file(intermediate_data_path+'shapefiles/sa2_adelaide.shp')
# create the queen contiguity network
adelaide_queen=weights.contiguity.Queen.from_dataframe(sa2_adelaide)
# create the kernel network (using Euclidean distances)
sa2_adelaide_kernel = weights.distance.Kernel.from_dataframe(sa2_adelaide, k=109)
# turn the defaults to euclidean distances as weights.
for i in sa2_adelaide_kernel.neighbors.keys():
for j_idx in range(len(sa2_adelaide_kernel.neighbors[i])):
j = sa2_adelaide_kernel.neighbors[i][j_idx]
# note that kw.weights indices are
# i (node index), j_idx (index of the node on the list - not node index!)
weight = sa2_adelaide_kernel.weights[i][j_idx]
distance = (1 - weight) * sa2_adelaide_kernel.bandwidth[i]
sa2_adelaide_kernel.weights[i][j_idx] = distance[0]
# assign euclidean weights to Queen net
for o in adelaide_queen.neighbors.keys():
# print(o)
for d_idx in range(len(adelaide_queen.neighbors[o])):
d = adelaide_queen.neighbors[o][d_idx] # return the o and d SA2 original indices.
weight = sa2_adelaide_kernel[o][d] # get the kernel weight associated with the o and d.
adelaide_queen.weights[o][d_idx] = weight
# print(adelaide_queen.weights)
# create the nx object
adelaide_nx = adelaide_queen.to_networkx()
# assign weights to adelaide_nx
for o,d in adelaide_nx.edges:
adelaide_nx.edges[o,d]['weight'] = adelaide_queen[o][d]
# create the OD dictionary for the full shortest paths.
path=dict(nx.all_pairs_dijkstra(adelaide_nx, weight='weight'))
# create a OD dictionary.
OD_full_path = {}
for o in range(110):
for d in range(110):
if d == 103 or o == 103: # note that 103 is the island - this is no path to it.
pass
else:
OD_full_path[(o,d)] = path[o][1][d]
# note: OD_full_path idx is the same as sa2_adelaide!
with open(intermediate_data_path+'OD_full_path.pickle', 'wb') as f:
pickle.dump(OD_full_path, f)
#endregion
#region 3. Read road shapefiles and save them
sa2_roads = gpd.read_file(raw_data_path + "roads/Roads_GDA2020.shp")
sa2_roads = sa2_roads.loc[~sa2_roads['class'].isna(),]
# projection to epsg:3112
sa2_roads.crs = 'epsg:3112'
# combine freeway and highway as one category (HWY).
sa2_roads.loc[sa2_roads['class'] == 'FREE', 'class'] = 'HWY'
# extract three types of roads for GIS visualization
sa2_roads_LOCL = sa2_roads.loc[sa2_roads['class'] == 'LOCL', :]
sa2_roads_HWY = sa2_roads.loc[sa2_roads['class'] == 'HWY', :]
sa2_roads_UND = sa2_roads.loc[sa2_roads['class'] == 'UND', :]
# np.unique(sa2_roads['class'], return_counts = True)
# save shapefiles
sa2_roads.to_file(intermediate_data_path+"shapefiles/sa2_roads.shp")
sa2_roads_LOCL.to_file(intermediate_data_path+"shapefiles/sa2_roads_LOCL.shp")
sa2_roads_HWY.to_file(intermediate_data_path+"shapefiles/sa2_roads_HWY.shp")
sa2_roads_UND.to_file(intermediate_data_path+"shapefiles/sa2_roads_UND.shp")
#endregion
#region 4. Turn road shapefiles to node attributes of SA2s' nodes.
# attributes: number of road counts and intersection counts.
# inputs: roads and sa2 shapefiles
# outputs: sa2 shapefile with road attributes.
sa2_roads = gpd.read_file(intermediate_data_path+"shapefiles/sa2_roads.shp")
sa2_adelaide = gpd.read_file(intermediate_data_path+'shapefiles/sa2_adelaide.shp')
# augment road class info to sa2_adelaide
sa2_adelaide_road_attributes, roads_in_adelaide = util.compute_road_attributes(sa2_adelaide, sa2_roads)
sa2_adelaide_road_attributes['num_roads'] = np.sum(sa2_adelaide_road_attributes[['class_ART', 'class_BUS', 'class_COLL',
'class_HWY', 'class_LOCL','class_SUBA', 'class_TRK2',
'class_TRK4', 'class_UND']], axis = 1)
# augment intersection attributes to sa2_adelaide
sa2_adelaide_intersection_attributes = util.compute_intersection_attributes(sa2_adelaide_road_attributes, roads_in_adelaide)
# merge sa2_adelaide, sa2_adelaide_road_attributes, and sa2_adelaide_intersection_attributes
sa2_adelaide_with_transport_attributes = sa2_adelaide.merge(sa2_adelaide_road_attributes, on='SA2_MAIN16', how='outer', suffixes=("","_x"))
sa2_adelaide_with_transport_attributes.drop(columns=['SA2_NAME16_x', 'geometry_x'], inplace=True)
sa2_adelaide_with_transport_attributes = sa2_adelaide_with_transport_attributes.merge(sa2_adelaide_intersection_attributes, on='SA2_MAIN16', how='outer', suffixes=("","_x"))
# save sa2_adelaide_with_transport_attributes and roads_in_adelaide
sa2_adelaide_with_transport_attributes.to_pickle(intermediate_data_path+"sa2_node_with_only_transport_attributes.pickle")
roads_in_adelaide.to_file(intermediate_data_path+"shapefiles/sa2_roads_in_adelaide.shp")
# sw: Wow. Pickle can save & read the shapefiles with crs info kept.
# sw: I still saved to shp files because QGIS cannot read pickle, I guess.
# with open("./data/sa2_adelaide_with_transport_attributes.pickle", 'rb') as f:
# x_file = pickle.load(f)
# print(x_file.crs)
#endregion
#region 5. Turn road shapefiles to the attributes of SA2s' edges.
# It takes about five minutes for processing.
# roads_in_adelaide = gpd.read_file("./data/shapefiles/sa2_roads_in_adelaide.shp")
# 1. edge file
sa2_adelaide_edge = gpd.read_file(intermediate_data_path+'shapefiles/sa2_adelaide_edge.shp')
# 2. transport attribute file
with open(intermediate_data_path+"sa2_node_with_only_transport_attributes.pickle", 'rb') as f:
sa2_adelaide_with_transport_attributes = pickle.load(f)
# 3. OD path file
with open(intermediate_data_path+'OD_full_path.pickle', 'rb') as f:
OD_full_path = pickle.load(f)
# add the road and intersection attributes to the sa2_adelaide_edge data set.
attribute_name_list = ['class_ART', 'class_BUS', 'class_COLL',
'class_HWY', 'class_LOCL', 'class_SUBA',
'class_TRK2', 'class_TRK4', 'class_UND', 'num_roads', 'num_nodes', 'num_1degree',
'num_2degree', 'num_3degree', 'num_4degree', 'num_greater5degree']
sa2_adelaide_edge[attribute_name_list] = 0.0 # init values
# add road and intersection attributes to the edge df.
for idx in np.arange(sa2_adelaide_edge.shape[0]):
if idx%1000 == 0:
print(idx)
origin = sa2_adelaide_edge.loc[idx, 'O']
destination = sa2_adelaide_edge.loc[idx, 'D']
o_idx = sa2_adelaide_with_transport_attributes.index[sa2_adelaide_with_transport_attributes.SA2_MAIN16 == origin].tolist()[0]
d_idx = sa2_adelaide_with_transport_attributes.index[sa2_adelaide_with_transport_attributes.SA2_MAIN16 == destination].tolist()[0]
# print(o_idx,d_idx)
try:
# OD_full_path might not have all the shortest path...
# note that the OD_full_path idx is consistent with sa2_adelaide.
idx_list_on_shortest_path = OD_full_path[(o_idx, d_idx)]
for node_on_shortest_path in idx_list_on_shortest_path:
sa2_adelaide_edge.loc[idx, attribute_name_list] += sa2_adelaide_with_transport_attributes.loc[
node_on_shortest_path, attribute_name_list]
except KeyError as error:
pass
# output two pickles:
# node network with transport info: sa2_adelaide_with_transport_attributes
# edge network with transport info: sa2_adelaide_edge
sa2_adelaide_with_transport_attributes.to_pickle(intermediate_data_path+'sa2_node_with_only_transport_attributes.pickle')
sa2_adelaide_edge.to_pickle(intermediate_data_path+'sa2_edge_with_only_transport_attributes.pickle')
#endregion
|
[
"sys.path.append",
"pysal.lib.weights.distance.Kernel.from_dataframe",
"pickle.dump",
"pandas.DataFrame",
"numpy.sum",
"utilities.compute_intersection_attributes",
"os.getcwd",
"utilities.compute_road_attributes",
"pandas.MultiIndex.from_product",
"shapely.geometry.LineString",
"geopandas.GeoDataFrame",
"pickle.load",
"numpy.arange",
"pysal.lib.weights.contiguity.Queen.from_dataframe",
"networkx.all_pairs_dijkstra",
"geopandas.read_file"
] |
[((841, 870), 'sys.path.append', 'sys.path.append', (['utility_path'], {}), '(utility_path)\n', (856, 870), False, 'import sys\n'), ((1279, 1333), 'geopandas.read_file', 'gpd.read_file', (["(raw_data_path + 'sa2/SA2_2016_AUST.shp')"], {}), "(raw_data_path + 'sa2/SA2_2016_AUST.shp')\n", (1292, 1333), True, 'import geopandas as gpd\n'), ((1862, 1969), 'pandas.MultiIndex.from_product', 'pd.MultiIndex.from_product', (["[sa2_adelaide['SA2_MAIN16'], sa2_adelaide['SA2_MAIN16']]"], {'names': "['O', 'D']"}), "([sa2_adelaide['SA2_MAIN16'], sa2_adelaide[\n 'SA2_MAIN16']], names=['O', 'D'])\n", (1888, 1969), True, 'import pandas as pd\n'), ((2625, 2680), 'geopandas.GeoDataFrame', 'gpd.GeoDataFrame', (['sa2_adelaide_link_df'], {'crs': '"""epsg:3112"""'}), "(sa2_adelaide_link_df, crs='epsg:3112')\n", (2641, 2680), True, 'import geopandas as gpd\n'), ((2989, 3058), 'geopandas.read_file', 'gpd.read_file', (["(intermediate_data_path + 'shapefiles/sa2_adelaide.shp')"], {}), "(intermediate_data_path + 'shapefiles/sa2_adelaide.shp')\n", (3002, 3058), True, 'import geopandas as gpd\n'), ((3111, 3164), 'pysal.lib.weights.contiguity.Queen.from_dataframe', 'weights.contiguity.Queen.from_dataframe', (['sa2_adelaide'], {}), '(sa2_adelaide)\n', (3150, 3164), False, 'from pysal.lib import weights\n'), ((3244, 3303), 'pysal.lib.weights.distance.Kernel.from_dataframe', 'weights.distance.Kernel.from_dataframe', (['sa2_adelaide'], {'k': '(109)'}), '(sa2_adelaide, k=109)\n', (3282, 3303), False, 'from pysal.lib import weights\n'), ((5048, 5104), 'geopandas.read_file', 'gpd.read_file', (["(raw_data_path + 'roads/Roads_GDA2020.shp')"], {}), "(raw_data_path + 'roads/Roads_GDA2020.shp')\n", (5061, 5104), True, 'import geopandas as gpd\n'), ((6187, 6253), 'geopandas.read_file', 'gpd.read_file', (["(intermediate_data_path + 'shapefiles/sa2_roads.shp')"], {}), "(intermediate_data_path + 'shapefiles/sa2_roads.shp')\n", (6200, 6253), True, 'import geopandas as gpd\n'), ((6267, 6336), 'geopandas.read_file', 'gpd.read_file', (["(intermediate_data_path + 'shapefiles/sa2_adelaide.shp')"], {}), "(intermediate_data_path + 'shapefiles/sa2_adelaide.shp')\n", (6280, 6336), True, 'import geopandas as gpd\n'), ((6428, 6481), 'utilities.compute_road_attributes', 'util.compute_road_attributes', (['sa2_adelaide', 'sa2_roads'], {}), '(sa2_adelaide, sa2_roads)\n', (6456, 6481), True, 'import utilities as util\n'), ((6526, 6702), 'numpy.sum', 'np.sum', (["sa2_adelaide_road_attributes[['class_ART', 'class_BUS', 'class_COLL',\n 'class_HWY', 'class_LOCL', 'class_SUBA', 'class_TRK2', 'class_TRK4',\n 'class_UND']]"], {'axis': '(1)'}), "(sa2_adelaide_road_attributes[['class_ART', 'class_BUS', 'class_COLL',\n 'class_HWY', 'class_LOCL', 'class_SUBA', 'class_TRK2', 'class_TRK4',\n 'class_UND']], axis=1)\n", (6532, 6702), True, 'import numpy as np\n'), ((6932, 7021), 'utilities.compute_intersection_attributes', 'util.compute_intersection_attributes', (['sa2_adelaide_road_attributes', 'roads_in_adelaide'], {}), '(sa2_adelaide_road_attributes,\n roads_in_adelaide)\n', (6968, 7021), True, 'import utilities as util\n'), ((8323, 8397), 'geopandas.read_file', 'gpd.read_file', (["(intermediate_data_path + 'shapefiles/sa2_adelaide_edge.shp')"], {}), "(intermediate_data_path + 'shapefiles/sa2_adelaide_edge.shp')\n", (8336, 8397), True, 'import geopandas as gpd\n'), ((9231, 9268), 'numpy.arange', 'np.arange', (['sa2_adelaide_edge.shape[0]'], {}), '(sa2_adelaide_edge.shape[0])\n', (9240, 9268), True, 'import numpy as np\n'), ((811, 822), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (820, 822), False, 'import os\n'), ((997, 1008), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1006, 1008), False, 'import os\n'), ((1063, 1074), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1072, 1074), False, 'import os\n'), ((2293, 2493), 'shapely.geometry.LineString', 'LineString', (["[sa2_adelaide.loc[sa2_adelaide['SA2_MAIN16'] == origin, 'geometry'].\n centroid.values[0], sa2_adelaide.loc[sa2_adelaide['SA2_MAIN16'] ==\n destination, 'geometry'].centroid.values[0]]"], {}), "([sa2_adelaide.loc[sa2_adelaide['SA2_MAIN16'] == origin,\n 'geometry'].centroid.values[0], sa2_adelaide.loc[sa2_adelaide[\n 'SA2_MAIN16'] == destination, 'geometry'].centroid.values[0]])\n", (2303, 2493), False, 'from shapely.geometry import LineString\n'), ((4508, 4559), 'networkx.all_pairs_dijkstra', 'nx.all_pairs_dijkstra', (['adelaide_nx'], {'weight': '"""weight"""'}), "(adelaide_nx, weight='weight')\n", (4529, 4559), True, 'import networkx as nx\n'), ((4947, 4975), 'pickle.dump', 'pickle.dump', (['OD_full_path', 'f'], {}), '(OD_full_path, f)\n', (4958, 4975), False, 'import pickle\n'), ((8567, 8581), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (8578, 8581), False, 'import pickle\n'), ((8688, 8702), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (8699, 8702), False, 'import pickle\n'), ((1988, 2013), 'pandas.DataFrame', 'pd.DataFrame', ([], {'index': 'index'}), '(index=index)\n', (2000, 2013), True, 'import pandas as pd\n')]
|
from unittest import mock, TestCase
from typing import List, Dict
import numpy as np
from runeq import Config, stream, errors
def mock_get_json_response(
bodies: List[dict],
calls: List,
status_code=200,
headers: List[Dict[str, str]] = None
):
"""Return a function that can be used to mock .get_json_response()
Args:
bodies: list of JSON bodies to return from response.json()
calls: list. Every time the function is called, args and kwargs will
be appended to this list.
status_code: status code to return for each response
headers: the response header to apply
"""
headers = headers or [{}] * len(bodies)
num = 0
def _func(*args, **kwargs):
nonlocal num
# add inputs to the list of calls that was provided
calls.append((args, kwargs))
resp = mock.MagicMock()
resp.headers = headers[num]
resp.status_code = status_code
resp.ok = (status_code < 400)
resp.json.return_value = bodies[num]
num += 1
return resp
return _func
def mock_get_csv_response(
bodies: List[str],
calls: List,
status_code=200,
headers: List[Dict[str, str]] = None
):
"""Return a function that can be used to mock .get_csv_response()
Args:
bodies: list of response bodies to return from response.text
calls: list. Every time the function is called, args and kwargs will
be appended to this list.
status_code: status code to return for each response
headers: the response header to apply
"""
headers = headers or [{}] * len(bodies)
num = 0
def _func(*args, **kwargs):
nonlocal num
# keep track of the kwargs that were used to call this
calls.append((args, kwargs))
resp = mock.MagicMock()
resp.headers = headers[num]
resp.status_code = status_code
resp.ok = (status_code < 400)
resp.text = bodies[num]
num += 1
return resp
return _func
class TestStreamV1Client(TestCase):
"""
Test stream.V1Client and the associated accessors.
"""
def setUp(self) -> None:
"""
Initialize a client, set up basic mocking.
"""
self.cfg = Config(
client_key_id='abc',
client_access_key='abc123',
)
self.client = stream.V1Client(self.cfg)
self.use_np_orig = stream.v1.USE_NUMPY
stream.v1.USE_NUMPY = False
def tearDown(self) -> None:
"""
Tear down monkey-patching.
"""
stream.v1.USE_NUMPY = self.use_np_orig
@mock.patch('runeq.stream.v1.requests')
def test_get_json_response(self, requests):
"""
Test the signature of JSON requests.
"""
for test_num, case in enumerate((
(self.client.Accel, '/v1/accel.json'),
(self.client.BandPower, '/v1/band_power.json'),
(self.client.Event, '/v1/event.json'),
(self.client.HeartRate, '/v1/heartrate.json'),
(self.client.LFP, '/v1/lfp.json'),
(
self.client.ProbabilitySymptom,
'/v1/probability_symptom.json'
),
(self.client.Rotation, '/v1/rotation.json'),
(self.client.Span, '/v1/span.json'),
(self.client.State, '/v1/state.json'),
)):
resource_creator, endpoint = case
resource = resource_creator(leslie='knope')
resource.get_json_response(ron='swanson', test_num=test_num)
requests.get.assert_has_calls([
mock.call(
self.cfg.stream_url + endpoint,
headers=self.cfg.auth_headers,
params={
'leslie': 'knope',
'ron': 'swanson',
'test_num': test_num,
}
),
])
@mock.patch('runeq.stream.v1.requests')
def test_get_csv_response(self, requests):
"""
Test the signature of CSV requests.
"""
for test_num, case in enumerate((
(self.client.Accel, '/v1/accel.csv'),
(self.client.BandPower, '/v1/band_power.csv'),
(self.client.HeartRate, '/v1/heartrate.csv'),
(self.client.LFP, '/v1/lfp.csv'),
(
self.client.ProbabilitySymptom,
'/v1/probability_symptom.csv'
),
(self.client.Rotation, '/v1/rotation.csv'),
(self.client.State, '/v1/state.csv'),
)):
resource_creator, endpoint = case
resource = resource_creator(leslie='knope')
resource.get_csv_response(ron='swanson', test_num=test_num)
requests.get.assert_has_calls([
mock.call(
self.cfg.stream_url + endpoint,
stream=True,
headers=self.cfg.auth_headers,
params={
'leslie': 'knope',
'ron': 'swanson',
'test_num': test_num,
}
),
])
def test_iter_json_data_with_token(self):
"""
Test the iterator over JSON responses, paginating with the next page
token header.
"""
for test_num, resource_creator in enumerate((
self.client.Accel,
self.client.BandPower,
self.client.Event,
self.client.HeartRate,
self.client.LFP,
self.client.ProbabilitySymptom,
self.client.Rotation,
self.client.Span,
self.client.State,
)):
resource = resource_creator()
mock_responses = [
{'success': True, 'result': [], 'next_page': 1},
{'success': True, 'result': []}
]
calls = []
resource.get_json_response = mock_get_json_response(
mock_responses,
calls,
200,
[
{'X-Rune-Next-Page-Token': '<PASSWORD>=='},
{},
]
)
iterator = resource.iter_json_data(test_num=test_num)
self.assertEqual(len(list(iterator)), 2)
# Check that all parameters were kept the same across calls,
# except for "next_page_token"
self.assertEqual(calls, [
((), {'test_num': test_num}),
(
(),
{
'test_num': test_num,
'next_page_token': '<PASSWORD>=='
}
)
])
def test_iter_json_data(self):
"""
Test the iterator over JSON responses, following pagination with
the page number.
"""
results = [
{'a': 1},
{'b': 2}
]
mock_responses = [
{'success': True, 'result': results[0], 'next_page': 1},
{'success': True, 'result': results[1]}
]
for test_num, resource_creator in enumerate((
self.client.Accel,
self.client.BandPower,
self.client.Event,
self.client.HeartRate,
self.client.LFP,
self.client.ProbabilitySymptom,
self.client.Rotation,
self.client.State,
)):
resource = resource_creator()
#
# Successful Requests
#
calls = []
resource.get_json_response = mock_get_json_response(
mock_responses,
calls
)
# Check the results
num_results = 0
iterator = resource.iter_json_data(test_num=test_num)
for i, actual in enumerate(iterator):
self.assertEqual(results[i], actual)
num_results += 1
self.assertEqual(num_results, 2)
# Check that all parameters were kept the same across calls,
# except for "page" (which must be incremented)
self.assertEqual(calls, [
((), {'test_num': test_num}),
((), {'test_num': test_num, 'page': 1})
])
#
# Request Error
# Iterator should check the response status for each request
#
err_details = {
"message": "i am an intentional error!",
"type": "TestError",
}
resource.get_json_response = mock_get_json_response(
[{'success': False, 'error': err_details}],
[],
status_code=404,
)
with self.assertRaises(errors.APIError) as e:
next(resource.iter_json_data())
err = e.exception
self.assertEqual(err.status_code, 404)
self.assertEqual(err.details, err_details)
def test_iter_csv_data_with_token(self):
"""
Test the iterator over CSV responses, which follows new pagination
"""
mock_responses = [
'good,better\nskiing,hiking\n',
'good,better\ncupcakes,brownies\n',
'',
]
for test_num, resource_creator in enumerate((
self.client.Accel,
self.client.BandPower,
self.client.HeartRate,
self.client.LFP,
self.client.ProbabilitySymptom,
self.client.Rotation,
self.client.State,
)):
resource = resource_creator()
#
# Successful Requests
#
calls = []
resource.get_csv_response = mock_get_csv_response(
mock_responses,
calls,
200,
[
{'X-Rune-Next-Page-Token': 'MTIzNDU2MDAwMA=='},
{'X-Rune-Next-Page-Token': 'MTIzNDU2MDAwMA=='},
{},
],
)
# Check the results
iterator = resource.iter_csv_text(test_num=test_num)
self.assertEqual(len(list(iterator)), 2)
# Check that all parameters were kept the same across calls,
# except for "next_page_token" (which will normally be different
# for each response)
self.assertEqual(calls, [
((), {'test_num': test_num}),
(
(),
{
'test_num': test_num,
'next_page_token': '<PASSWORD>MA=='
}
),
(
(),
{
'test_num': test_num,
'next_page_token': 'MTIzNDU2MDAwMA=='
}
),
])
def test_iter_csv_data(self):
"""
Test the iterator over CSV responses, which follows pagination
"""
mock_responses = [
'good,better\nskiing,hiking\n',
'good,better\ncupcakes,brownies\n',
'',
]
for test_num, resource_creator in enumerate((
self.client.Accel,
self.client.BandPower,
self.client.HeartRate,
self.client.LFP,
self.client.ProbabilitySymptom,
self.client.Rotation,
self.client.State,
)):
resource = resource_creator()
#
# Successful Requests
#
calls = []
resource.get_csv_response = mock_get_csv_response(
mock_responses,
calls,
)
# Check the results
num_results = 0
iterator = resource.iter_csv_text(test_num=test_num)
for i, actual in enumerate(iterator):
self.assertEqual(mock_responses[i], actual)
num_results += 1
# although there are 3 responses, the last (empty) body should not
# be returned by the iterator
self.assertEqual(num_results, 2)
# Check that all parameters were kept the same across calls,
# except for "page" (which must be incremented)
self.assertEqual(calls, [
((), {'test_num': test_num}),
((), {'test_num': test_num, 'page': 1}),
((), {'test_num': test_num, 'page': 2}),
])
#
# Request Error
# Iterator should check the response status for each request
#
err_details = {
"message": "i am an intentional error!",
"type": "TestError",
}
# note: CSV endpoints return JSON on API errors
resource.get_csv_response = mock_get_json_response(
[{'success': False, 'error': err_details}],
[],
status_code=404,
)
with self.assertRaises(errors.APIError) as e:
next(resource.iter_csv_text())
err = e.exception
self.assertEqual(err.status_code, 404)
self.assertEqual(err.details, err_details)
def test_iter_points(self):
"""
Test iterating over data as points. Uses the CSV endpoint.
"""
mock_responses = [
'lower,higher,label\n1,2,ints\n3.5,6.7,floats\n',
'lower,higher,label\n,8.9,missing data\n',
'',
]
expected = [
{'lower': 1, 'higher': 2, 'label': 'ints'},
{'lower': 3.5, 'higher': 6.7, 'label': 'floats'},
{'lower': None, 'higher': 8.9, 'label': 'missing data'},
]
for test_num, resource_creator in enumerate((
self.client.Accel,
self.client.BandPower,
self.client.HeartRate,
self.client.LFP,
self.client.ProbabilitySymptom,
self.client.Rotation,
self.client.State,
)):
resource = resource_creator()
# replace get_csv_response on the resource
resource.get_csv_response = mock_get_csv_response(
mock_responses,
[],
)
for i, point in enumerate(resource.points()):
self.assertDictEqual(expected[i], point)
# replace get_csv_response again, to restart the mock responses
resource.get_csv_response = mock_get_csv_response(
mock_responses,
[],
)
for i, point in enumerate(resource):
self.assertDictEqual(expected[i], point)
# check dtype for "higher", which always has a numeric
# value in the test data
self.assertNotIsInstance(point['higher'], np.float64)
def test_iter_points_numpy(self):
"""
Test iterating over data as points, using Numpy to convert.
"""
stream.v1.USE_NUMPY = True
mock_responses = [
'lower,higher,label\n1,2,ints\n3.5,6.7,floats\n',
'lower,higher,label\n,8.9,missing data\n',
'',
]
expected = [
{'lower': 1, 'higher': 2, 'label': 'ints'},
{'lower': 3.5, 'higher': 6.7, 'label': 'floats'},
{'lower': np.NaN, 'higher': 8.9, 'label': 'missing data'},
]
for test_num, resource_creator in enumerate((
self.client.Accel,
self.client.BandPower,
self.client.HeartRate,
self.client.LFP,
self.client.ProbabilitySymptom,
self.client.Rotation,
self.client.State,
)):
resource = resource_creator()
# replace get_csv_response on the resource
resource.get_csv_response = mock_get_csv_response(
mock_responses,
[],
)
for i, point in enumerate(resource.points()):
self.assertDictEqual(expected[i], point)
# check dtype for "higher", which always has a numeric
# value in the test data
self.assertIsInstance(point['higher'], np.float64)
|
[
"unittest.mock.MagicMock",
"runeq.Config",
"unittest.mock.patch",
"runeq.stream.V1Client",
"unittest.mock.call"
] |
[((2690, 2728), 'unittest.mock.patch', 'mock.patch', (['"""runeq.stream.v1.requests"""'], {}), "('runeq.stream.v1.requests')\n", (2700, 2728), False, 'from unittest import mock, TestCase\n'), ((4077, 4115), 'unittest.mock.patch', 'mock.patch', (['"""runeq.stream.v1.requests"""'], {}), "('runeq.stream.v1.requests')\n", (4087, 4115), False, 'from unittest import mock, TestCase\n'), ((883, 899), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (897, 899), False, 'from unittest import mock, TestCase\n'), ((1870, 1886), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (1884, 1886), False, 'from unittest import mock, TestCase\n'), ((2322, 2377), 'runeq.Config', 'Config', ([], {'client_key_id': '"""abc"""', 'client_access_key': '"""abc123"""'}), "(client_key_id='abc', client_access_key='abc123')\n", (2328, 2377), False, 'from runeq import Config, stream, errors\n'), ((2435, 2460), 'runeq.stream.V1Client', 'stream.V1Client', (['self.cfg'], {}), '(self.cfg)\n', (2450, 2460), False, 'from runeq import Config, stream, errors\n'), ((3741, 3885), 'unittest.mock.call', 'mock.call', (['(self.cfg.stream_url + endpoint)'], {'headers': 'self.cfg.auth_headers', 'params': "{'leslie': 'knope', 'ron': 'swanson', 'test_num': test_num}"}), "(self.cfg.stream_url + endpoint, headers=self.cfg.auth_headers,\n params={'leslie': 'knope', 'ron': 'swanson', 'test_num': test_num})\n", (3750, 3885), False, 'from unittest import mock, TestCase\n'), ((5010, 5172), 'unittest.mock.call', 'mock.call', (['(self.cfg.stream_url + endpoint)'], {'stream': '(True)', 'headers': 'self.cfg.auth_headers', 'params': "{'leslie': 'knope', 'ron': 'swanson', 'test_num': test_num}"}), "(self.cfg.stream_url + endpoint, stream=True, headers=self.cfg.\n auth_headers, params={'leslie': 'knope', 'ron': 'swanson', 'test_num':\n test_num})\n", (5019, 5172), False, 'from unittest import mock, TestCase\n')]
|
import unittest
from project.service.dockerfile_service import DockerfileParser, DockerfileService
class Dockerfile_Service_Test(unittest.TestCase):
def test_given_a_dockerfile_when_parse_then_instructions_are_parsed(self):
parser = DockerfileParser('test/resources/Dockerfile_basic')
instructions = parser.structure()
self.assertEqual(len(instructions), 4)
self.assertEqual(instructions.__getitem__(0).get("instruction"), 'FROM')
self.assertEqual(instructions.__getitem__(1).get("instruction"), 'RUN')
self.assertEqual(instructions.__getitem__(0).get("value"), 'alpine:3.4')
def test_given_an_empty_dockerfile_when_parse_then_instructions_is_empty(self):
parser = DockerfileParser('test/resources/Dockerfile_empty')
instructions = parser.structure()
self.assertEqual(len(instructions), 0)
def test_given_a_dockerfile_with_comments_when_parse_then_comments_are_not_parsed(self):
parser = DockerfileParser('test/resources/Dockerfile_basic_with_comments')
instructions = parser.structure()
self.assertEqual(len(instructions), 5)
self.assertEqual(instructions.__getitem__(0).get("instruction"), 'FROM')
self.assertEqual(instructions.__getitem__(0).get("value"), 'alpine:3.4')
self.assertEqual(instructions.__getitem__(1).get("instruction"), 'RUN')
self.assertEqual(instructions.__getitem__(2).get("instruction"), 'COMMENT_INSTRUCTION')
def test_given_a_dockerfile_with_multilines_instructions_when_parse_then_multilines_are_parsed_as_a_singleline(self):
parser = DockerfileParser('test/resources/Dockerfile_with_multiline_instructions')
instructions = parser.structure()
result = list(filter(lambda x: x['value'] == 'apt-get update && apt-get install -y bzr cvs git mercurial subversion', instructions))
self.assertEqual(result[0]['startline'],2)
self.assertEqual(result[0]['endline'],7)
self.assertEqual(len(result),1)
def test_given_a_dockerfile_without_user_when_check_then_KO(self):
result = DockerfileService.check_dockerfile(self,'test/resources/Dockerfile_basic_with_comments')
self.assertEqual(result['dockerfile_evaluation']['4_1']['evaluation'],'KO')
def test_given_a_dockerfile_with_user_when_check_then_OK(self):
result = DockerfileService.check_dockerfile(self,'test/resources/Dockerfile_with_user')
self.assertEqual(result['dockerfile_evaluation']['4_1']['evaluation'], 'OK')
def test_given_a_dockerfile_without_user_when_check_and_fix_then_KO(self):
result = DockerfileService.check_and_fix_dockerfile(self,'test/resources/Dockerfile_basic_with_comments')
self.assertEqual(result[0]['dockerfile_evaluation']['4_1']['evaluation'], 'KO')
def test_given_a_dockerfile_with_user_when_check_and_fix_then_OK(self):
result = DockerfileService.check_and_fix_dockerfile(self,'test/resources/Dockerfile_with_user')
user_check_result = list(filter(lambda x: x['4_1']['evaluation'] == 'OK', result))
self.assertEqual(len(user_check_result),1)
def test_given_a_dockerfile_without_user_when_fix_then_instruction_with_user_is_generated(self):
instructions = DockerfileService.parse_dockerfile(self,'test/resources/Dockerfile_basic_with_comments')
check_result = DockerfileService.evaluate_dockerfile(self, instructions)
result = DockerfileService.get_dockerfile_fixes(self, check_result)
user_instruction = list(filter(lambda x: x['instruction'] == 'USER', result))
user_add_instruction = list(filter(lambda x: x['instruction'] == 'RUN' and x['value'].startswith( 'useradd' ), result))
self.assertEqual(len(user_instruction),1)
self.assertEqual(len(user_add_instruction),1)
def test_given_a_dockerfile_with_user_when_check_and_fix_then_OK(self):
result = DockerfileService.check_and_fix_dockerfile(self,'test/resources/Dockerfile_with_user')
self.assertEqual(result[0]['dockerfile_evaluation']['4_1']['evaluation'],'OK')
def test_given_a_dockerfile_without_healthcheck_when_fix_then_instruction_with_healthcheck_is_generated(self):
instructions = DockerfileService.parse_dockerfile(self,'test/resources/Dockerfile_basic_with_comments')
check_result = DockerfileService.evaluate_dockerfile(self, instructions)
result = DockerfileService.get_dockerfile_fixes(self, check_result)
healthcheck_instruction = list(filter(lambda x: x['instruction'] == 'HEALTHCHECK', result))
self.assertEqual(len(healthcheck_instruction),1)
def test_given_a_dockerfile_with_wrong_add_usage_when_fix_then_instruction_with_copy_is_generated(self):
instructions = DockerfileService.parse_dockerfile(self,'test/resources/Dockerfile_wrong_ADD_usage')
check_result = DockerfileService.evaluate_dockerfile(self, instructions)
result = DockerfileService.get_dockerfile_fixes(self, check_result, instructions)
copy_instruction = list(filter(lambda x: x['instruction'] == 'COPY', result))
self.assertEqual(len(copy_instruction),2)
def test_given_a_dockerfile_with_two_apt_get_when_fix_then_one_instruction_with_apt_get_is_generated(self):
instructions = DockerfileService.parse_dockerfile(self,'test/resources/Dockerfile_with_two_apt-get')
check_result = DockerfileService.evaluate_dockerfile(self, instructions)
result = DockerfileService.get_dockerfile_fixes(self, check_result, instructions)
apt_get_instructions = list(filter(lambda x: x['instruction'] == 'COPY', result))
self.assertEqual(len(apt_get_instructions),1)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"project.service.dockerfile_service.DockerfileService.check_and_fix_dockerfile",
"project.service.dockerfile_service.DockerfileService.check_dockerfile",
"project.service.dockerfile_service.DockerfileService.evaluate_dockerfile",
"project.service.dockerfile_service.DockerfileService.get_dockerfile_fixes",
"project.service.dockerfile_service.DockerfileService.parse_dockerfile",
"project.service.dockerfile_service.DockerfileParser"
] |
[((5737, 5752), 'unittest.main', 'unittest.main', ([], {}), '()\n', (5750, 5752), False, 'import unittest\n'), ((249, 300), 'project.service.dockerfile_service.DockerfileParser', 'DockerfileParser', (['"""test/resources/Dockerfile_basic"""'], {}), "('test/resources/Dockerfile_basic')\n", (265, 300), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n'), ((734, 785), 'project.service.dockerfile_service.DockerfileParser', 'DockerfileParser', (['"""test/resources/Dockerfile_empty"""'], {}), "('test/resources/Dockerfile_empty')\n", (750, 785), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n'), ((986, 1051), 'project.service.dockerfile_service.DockerfileParser', 'DockerfileParser', (['"""test/resources/Dockerfile_basic_with_comments"""'], {}), "('test/resources/Dockerfile_basic_with_comments')\n", (1002, 1051), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n'), ((1619, 1692), 'project.service.dockerfile_service.DockerfileParser', 'DockerfileParser', (['"""test/resources/Dockerfile_with_multiline_instructions"""'], {}), "('test/resources/Dockerfile_with_multiline_instructions')\n", (1635, 1692), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n'), ((2115, 2208), 'project.service.dockerfile_service.DockerfileService.check_dockerfile', 'DockerfileService.check_dockerfile', (['self', '"""test/resources/Dockerfile_basic_with_comments"""'], {}), "(self,\n 'test/resources/Dockerfile_basic_with_comments')\n", (2149, 2208), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n'), ((2374, 2453), 'project.service.dockerfile_service.DockerfileService.check_dockerfile', 'DockerfileService.check_dockerfile', (['self', '"""test/resources/Dockerfile_with_user"""'], {}), "(self, 'test/resources/Dockerfile_with_user')\n", (2408, 2453), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n'), ((2635, 2736), 'project.service.dockerfile_service.DockerfileService.check_and_fix_dockerfile', 'DockerfileService.check_and_fix_dockerfile', (['self', '"""test/resources/Dockerfile_basic_with_comments"""'], {}), "(self,\n 'test/resources/Dockerfile_basic_with_comments')\n", (2677, 2736), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n'), ((2914, 3005), 'project.service.dockerfile_service.DockerfileService.check_and_fix_dockerfile', 'DockerfileService.check_and_fix_dockerfile', (['self', '"""test/resources/Dockerfile_with_user"""'], {}), "(self,\n 'test/resources/Dockerfile_with_user')\n", (2956, 3005), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n'), ((3268, 3361), 'project.service.dockerfile_service.DockerfileService.parse_dockerfile', 'DockerfileService.parse_dockerfile', (['self', '"""test/resources/Dockerfile_basic_with_comments"""'], {}), "(self,\n 'test/resources/Dockerfile_basic_with_comments')\n", (3302, 3361), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n'), ((3380, 3437), 'project.service.dockerfile_service.DockerfileService.evaluate_dockerfile', 'DockerfileService.evaluate_dockerfile', (['self', 'instructions'], {}), '(self, instructions)\n', (3417, 3437), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n'), ((3455, 3513), 'project.service.dockerfile_service.DockerfileService.get_dockerfile_fixes', 'DockerfileService.get_dockerfile_fixes', (['self', 'check_result'], {}), '(self, check_result)\n', (3493, 3513), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n'), ((3926, 4017), 'project.service.dockerfile_service.DockerfileService.check_and_fix_dockerfile', 'DockerfileService.check_and_fix_dockerfile', (['self', '"""test/resources/Dockerfile_with_user"""'], {}), "(self,\n 'test/resources/Dockerfile_with_user')\n", (3968, 4017), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n'), ((4239, 4332), 'project.service.dockerfile_service.DockerfileService.parse_dockerfile', 'DockerfileService.parse_dockerfile', (['self', '"""test/resources/Dockerfile_basic_with_comments"""'], {}), "(self,\n 'test/resources/Dockerfile_basic_with_comments')\n", (4273, 4332), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n'), ((4351, 4408), 'project.service.dockerfile_service.DockerfileService.evaluate_dockerfile', 'DockerfileService.evaluate_dockerfile', (['self', 'instructions'], {}), '(self, instructions)\n', (4388, 4408), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n'), ((4426, 4484), 'project.service.dockerfile_service.DockerfileService.get_dockerfile_fixes', 'DockerfileService.get_dockerfile_fixes', (['self', 'check_result'], {}), '(self, check_result)\n', (4464, 4484), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n'), ((4775, 4864), 'project.service.dockerfile_service.DockerfileService.parse_dockerfile', 'DockerfileService.parse_dockerfile', (['self', '"""test/resources/Dockerfile_wrong_ADD_usage"""'], {}), "(self,\n 'test/resources/Dockerfile_wrong_ADD_usage')\n", (4809, 4864), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n'), ((4883, 4940), 'project.service.dockerfile_service.DockerfileService.evaluate_dockerfile', 'DockerfileService.evaluate_dockerfile', (['self', 'instructions'], {}), '(self, instructions)\n', (4920, 4940), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n'), ((4958, 5030), 'project.service.dockerfile_service.DockerfileService.get_dockerfile_fixes', 'DockerfileService.get_dockerfile_fixes', (['self', 'check_result', 'instructions'], {}), '(self, check_result, instructions)\n', (4996, 5030), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n'), ((5303, 5393), 'project.service.dockerfile_service.DockerfileService.parse_dockerfile', 'DockerfileService.parse_dockerfile', (['self', '"""test/resources/Dockerfile_with_two_apt-get"""'], {}), "(self,\n 'test/resources/Dockerfile_with_two_apt-get')\n", (5337, 5393), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n'), ((5412, 5469), 'project.service.dockerfile_service.DockerfileService.evaluate_dockerfile', 'DockerfileService.evaluate_dockerfile', (['self', 'instructions'], {}), '(self, instructions)\n', (5449, 5469), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n'), ((5487, 5559), 'project.service.dockerfile_service.DockerfileService.get_dockerfile_fixes', 'DockerfileService.get_dockerfile_fixes', (['self', 'check_result', 'instructions'], {}), '(self, check_result, instructions)\n', (5525, 5559), False, 'from project.service.dockerfile_service import DockerfileParser, DockerfileService\n')]
|
import pytest
from cuml.dask.datasets.blobs import make_blobs
from cuml.dask.common.part_utils import _extract_partitions
from dask.distributed import Client
import dask.array as da
import cupy as cp
@pytest.mark.mg
@pytest.mark.parametrize("nrows", [1e4])
@pytest.mark.parametrize("ncols", [10])
@pytest.mark.parametrize("n_parts", [2, 23])
@pytest.mark.parametrize("input_type", ["dataframe", "array"])
@pytest.mark.parametrize("colocated", [True, False])
def test_extract_partitions_worker_list(nrows, ncols, n_parts, input_type,
colocated, cluster):
client = Client(cluster)
try:
X, y = make_blobs(nrows=nrows, ncols=ncols, n_parts=n_parts,
output=input_type)
if colocated:
gpu_futures = client.sync(_extract_partitions, (X, y), client)
else:
gpu_futures = client.sync(_extract_partitions, X, client)
parts = list(map(lambda x: x[1], gpu_futures))
assert len(parts) == n_parts
finally:
client.close()
@pytest.mark.mg
@pytest.mark.parametrize("nrows", [24])
@pytest.mark.parametrize("ncols", [2])
@pytest.mark.parametrize("n_parts", [2, 23])
@pytest.mark.parametrize("input_type", ["dataframe", "array"])
@pytest.mark.parametrize("colocated", [True, False])
def test_extract_partitions_shape(nrows, ncols, n_parts, input_type,
colocated, cluster):
client = Client(cluster)
try:
X, y = make_blobs(nrows=nrows, ncols=ncols, n_parts=n_parts,
output=input_type)
if input_type == "dataframe":
X_len_parts = X.map_partitions(len).compute()
y_len_parts = y.map_partitions(len).compute()
elif input_type == "array":
X_len_parts = X.chunks[0]
y_len_parts = y.chunks[0]
if colocated:
gpu_futures = client.sync(_extract_partitions, (X, y), client)
else:
gpu_futures = client.sync(_extract_partitions, X, client)
parts = [part.result() for worker, part in gpu_futures]
if colocated:
for i in range(len(parts)):
assert (parts[i][0].shape[0] == X_len_parts[i]) and (
parts[i][1].shape[0] == y_len_parts[i])
else:
for i in range(len(parts)):
assert (parts[i].shape[0] == X_len_parts[i])
finally:
client.close()
@pytest.mark.mg
@pytest.mark.parametrize("nrows", [24])
@pytest.mark.parametrize("ncols", [2])
@pytest.mark.parametrize("n_parts", [2, 12])
@pytest.mark.parametrize("X_delayed", [True, False])
@pytest.mark.parametrize("y_delayed", [True, False])
@pytest.mark.parametrize("colocated", [True, False])
def test_extract_partitions_futures(nrows, ncols, n_parts, X_delayed,
y_delayed, colocated, cluster):
client = Client(cluster)
try:
X = cp.random.standard_normal((nrows, ncols))
y = cp.random.standard_normal((nrows, ))
X = da.from_array(X, chunks=(nrows/n_parts, -1))
y = da.from_array(y, chunks=(nrows/n_parts, ))
if not X_delayed:
X = client.persist(X)
if not y_delayed:
y = client.persist(y)
if colocated:
gpu_futures = client.sync(_extract_partitions, (X, y), client)
else:
gpu_futures = client.sync(_extract_partitions, X, client)
parts = list(map(lambda x: x[1], gpu_futures))
assert len(parts) == n_parts
finally:
client.close()
|
[
"dask.distributed.Client",
"cuml.dask.datasets.blobs.make_blobs",
"cupy.random.standard_normal",
"dask.array.from_array",
"pytest.mark.parametrize"
] |
[((219, 262), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""nrows"""', '[10000.0]'], {}), "('nrows', [10000.0])\n", (242, 262), False, 'import pytest\n'), ((260, 298), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""ncols"""', '[10]'], {}), "('ncols', [10])\n", (283, 298), False, 'import pytest\n'), ((300, 343), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""n_parts"""', '[2, 23]'], {}), "('n_parts', [2, 23])\n", (323, 343), False, 'import pytest\n'), ((345, 406), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""input_type"""', "['dataframe', 'array']"], {}), "('input_type', ['dataframe', 'array'])\n", (368, 406), False, 'import pytest\n'), ((408, 459), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""colocated"""', '[True, False]'], {}), "('colocated', [True, False])\n", (431, 459), False, 'import pytest\n'), ((1079, 1117), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""nrows"""', '[24]'], {}), "('nrows', [24])\n", (1102, 1117), False, 'import pytest\n'), ((1119, 1156), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""ncols"""', '[2]'], {}), "('ncols', [2])\n", (1142, 1156), False, 'import pytest\n'), ((1158, 1201), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""n_parts"""', '[2, 23]'], {}), "('n_parts', [2, 23])\n", (1181, 1201), False, 'import pytest\n'), ((1203, 1264), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""input_type"""', "['dataframe', 'array']"], {}), "('input_type', ['dataframe', 'array'])\n", (1226, 1264), False, 'import pytest\n'), ((1266, 1317), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""colocated"""', '[True, False]'], {}), "('colocated', [True, False])\n", (1289, 1317), False, 'import pytest\n'), ((2476, 2514), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""nrows"""', '[24]'], {}), "('nrows', [24])\n", (2499, 2514), False, 'import pytest\n'), ((2516, 2553), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""ncols"""', '[2]'], {}), "('ncols', [2])\n", (2539, 2553), False, 'import pytest\n'), ((2555, 2598), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""n_parts"""', '[2, 12]'], {}), "('n_parts', [2, 12])\n", (2578, 2598), False, 'import pytest\n'), ((2600, 2651), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""X_delayed"""', '[True, False]'], {}), "('X_delayed', [True, False])\n", (2623, 2651), False, 'import pytest\n'), ((2653, 2704), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""y_delayed"""', '[True, False]'], {}), "('y_delayed', [True, False])\n", (2676, 2704), False, 'import pytest\n'), ((2706, 2757), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""colocated"""', '[True, False]'], {}), "('colocated', [True, False])\n", (2729, 2757), False, 'import pytest\n'), ((609, 624), 'dask.distributed.Client', 'Client', (['cluster'], {}), '(cluster)\n', (615, 624), False, 'from dask.distributed import Client\n'), ((1455, 1470), 'dask.distributed.Client', 'Client', (['cluster'], {}), '(cluster)\n', (1461, 1470), False, 'from dask.distributed import Client\n'), ((2910, 2925), 'dask.distributed.Client', 'Client', (['cluster'], {}), '(cluster)\n', (2916, 2925), False, 'from dask.distributed import Client\n'), ((650, 722), 'cuml.dask.datasets.blobs.make_blobs', 'make_blobs', ([], {'nrows': 'nrows', 'ncols': 'ncols', 'n_parts': 'n_parts', 'output': 'input_type'}), '(nrows=nrows, ncols=ncols, n_parts=n_parts, output=input_type)\n', (660, 722), False, 'from cuml.dask.datasets.blobs import make_blobs\n'), ((1496, 1568), 'cuml.dask.datasets.blobs.make_blobs', 'make_blobs', ([], {'nrows': 'nrows', 'ncols': 'ncols', 'n_parts': 'n_parts', 'output': 'input_type'}), '(nrows=nrows, ncols=ncols, n_parts=n_parts, output=input_type)\n', (1506, 1568), False, 'from cuml.dask.datasets.blobs import make_blobs\n'), ((2948, 2989), 'cupy.random.standard_normal', 'cp.random.standard_normal', (['(nrows, ncols)'], {}), '((nrows, ncols))\n', (2973, 2989), True, 'import cupy as cp\n'), ((3002, 3037), 'cupy.random.standard_normal', 'cp.random.standard_normal', (['(nrows,)'], {}), '((nrows,))\n', (3027, 3037), True, 'import cupy as cp\n'), ((3052, 3098), 'dask.array.from_array', 'da.from_array', (['X'], {'chunks': '(nrows / n_parts, -1)'}), '(X, chunks=(nrows / n_parts, -1))\n', (3065, 3098), True, 'import dask.array as da\n'), ((3109, 3152), 'dask.array.from_array', 'da.from_array', (['y'], {'chunks': '(nrows / n_parts,)'}), '(y, chunks=(nrows / n_parts,))\n', (3122, 3152), True, 'import dask.array as da\n')]
|
import config_with_yaml as config
class Config:
""" Base class for reading configuration parameters from config file. """
def __init__(self, filename='modules/config.yaml'):
""" Instance constructor
:param filename: configuration file (string)
"""
# Read config parameters from config file
self.params = config.load(filename)
|
[
"config_with_yaml.load"
] |
[((368, 389), 'config_with_yaml.load', 'config.load', (['filename'], {}), '(filename)\n', (379, 389), True, 'import config_with_yaml as config\n')]
|
from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class AddressConfig(AppConfig):
label = 'address'
name = 'oscar.apps.address'
verbose_name = _('Address')
|
[
"django.utils.translation.gettext_lazy"
] |
[((196, 208), 'django.utils.translation.gettext_lazy', '_', (['"""Address"""'], {}), "('Address')\n", (197, 208), True, 'from django.utils.translation import gettext_lazy as _\n')]
|
#
import pytest
from aspect.core.engines.Engine import Engine
@pytest.fixture
def engine():
return Engine()
def test_engine_instantiation(engine):
assert engine.target == "PythonMemoryModel"
def test_engine_get_operation(engine):
with pytest.raises(NotImplementedError):
engine.get_operation("common.query")
|
[
"pytest.raises",
"aspect.core.engines.Engine.Engine"
] |
[((105, 113), 'aspect.core.engines.Engine.Engine', 'Engine', ([], {}), '()\n', (111, 113), False, 'from aspect.core.engines.Engine import Engine\n'), ((251, 285), 'pytest.raises', 'pytest.raises', (['NotImplementedError'], {}), '(NotImplementedError)\n', (264, 285), False, 'import pytest\n')]
|
import scrapy
import json
import urllib.parse
import re
from scrapy import http
from scrapy_splash import SplashRequest
paper_without_doi = 0
total_paper_count = 0
class IeeeSpider(scrapy.Spider):
name = "ieee"
custom_settings = {'ROBOTSTXT_OBEY': True}
def start_requests(self):
url = 'http://ieeexplore.ieee.org/rest/publication'
header = {'Accept': 'application/json, text/plain, */*',
'Content-Type': 'application/json;charset=UTF-8',
'Origin':'http://ieeexplore.ieee.org'}
body = {"contentType": "conferences",
"tabId": "topic",
"publisher": "",
"collection": "",
"pageNumber": 1,
"selectedValue": "4291946551",
}
yield http.Request(url, method="POST", body=json.dumps(body),
headers=header, callback=self.parse)
def parse(self, response):
total_page_number = 177
url = 'http://ieeexplore.ieee.org/rest/publication'
page_number = 1
header = {'Accept': 'application/json, text/plain, */*',
'Content-Type': 'application/json;charset=UTF-8'}
body = {"contentType": "conferences",
"tabId": "topic",
"publisher": "",
"collection": "",
"pageNumber": str(page_number),
"selectedValue": "4291946551"}
for i in range(total_page_number):
yield http.Request(url, method="POST", body=json.dumps(body), headers=header,
callback=self.parse_records,
dont_filter=True)
page_number += 1
if page_number % 10 == 0:
print(('page number processed: ', page_number))
body["pageNumber"] = str(page_number)
def parse_records(self, response):
host = "http://ieeexplore.ieee.org"
list_of_records = json.loads(response.body_as_unicode())["records"]
# Each record has a list of titleHistory(25 events each page), but only get the first event anyway
for record in list_of_records:
if "titleHistory" in record:
event = record["titleHistory"][0]
yield scrapy.Request(urllib.parse.urljoin(host, event["publicationLink"]),
callback=self.parse_papers)
def parse_papers(self, response):
# Response will be: A page with a list of research paper.
# Request will be the links of individidual paper sites
host = "http://ieeexplore.ieee.org"
papers = response.xpath("//ul[contains(@class, 'results')]/li/div[contains(@class,'txt')]/h3/a/@href").extract()
# Only extract 3 papers in order widen the scope of the covered journals
paper_count = 5
count = 0
global total_paper_count
for paper in papers:
if count == paper_count:
break
if total_paper_count % 100 == 0:
print(("processed: ", total_paper_count, " pages"))
link = urllib.parse.urljoin(host, paper)
yield SplashRequest(link, self.parse_each_paper,
endpoint='render.html',
)
total_paper_count += 1
count += 1
def parse_each_paper(self, response):
# Response would be individual page of each paper
# Target: scrape the page and save to json / db
id_number = re.search(r'[0-9]+', response.url).group(0)
doi = response.xpath('//*[@id="' + str(id_number) + '"]/div[3]/div[2]/div[1]/a/text()').extract_first()
global paper_without_doi
if doi == None:
doi = response.xpath('//*[@id="' + str(id_number) + '"]/div[3]/div[2]/div[2]/a/text()').extract_first()
if doi == None:
paper_without_doi += 1
yield
if paper_without_doi % 10 == 0 and paper_without_doi > 0:
print(("Paper without DOI: ", paper_without_doi))
title = response.xpath('//*[@id="LayoutWrapper"]/div[6]/div[3]/div/section[1]/div[2]/div[1]/div[1]/h1/span/text()').extract_first()
authors = response.xpath('//*[@id="LayoutWrapper"]/div[6]/div[3]/div/div/div/div[2]/div/div/span/span/a/span/text()').extract()
abstract = response.xpath('//*[@id="' + str(id_number) + '"]/div[1]/div/div/div/text()').extract_first()
published_in = response.xpath('//*[@id="' + str(id_number) + '"]/div[2]/a/text()').extract_first()
try:
date_of_conference = response.xpath('//*[@id="' + str(id_number) + '"]/div[3]/div[1]/div[1]/text()').extract()[-1].strip()
except:
date_of_conference = None
try:
electronic_isbn = response.xpath('//*[@id="' + str(id_number) + '"]/div[3]/div[1]/div[3]/div[2]/div[1]/text()').extract()[-1].strip()
except:
electronic_isbn = None
try:
print_on_demand_isbn = response.xpath('//*[@id="' + str(id_number) + '"]/div[3]/div[1]/div[3]/div[2]/div[2]/text()').extract()[-1].strip()
except:
print_on_demand_isbn = None
pdf_link = urllib.parse.urljoin("http://ieeexplore.ieee.org", response.url)
yield {
"title": title,
"doi": doi,
"authors": authors[0] if len(authors) != 0 else None,
"other authors": authors[1:],
"abstract": abstract,
"published in": published_in,
"Date of Conference": date_of_conference,
"Electronic ISBN": electronic_isbn,
"Print on Demand ISBN": print_on_demand_isbn,
"PDF Link": pdf_link
}
|
[
"scrapy_splash.SplashRequest",
"re.search",
"json.dumps"
] |
[((3188, 3254), 'scrapy_splash.SplashRequest', 'SplashRequest', (['link', 'self.parse_each_paper'], {'endpoint': '"""render.html"""'}), "(link, self.parse_each_paper, endpoint='render.html')\n", (3201, 3254), False, 'from scrapy_splash import SplashRequest\n'), ((3556, 3589), 're.search', 're.search', (['"""[0-9]+"""', 'response.url'], {}), "('[0-9]+', response.url)\n", (3565, 3589), False, 'import re\n'), ((843, 859), 'json.dumps', 'json.dumps', (['body'], {}), '(body)\n', (853, 859), False, 'import json\n'), ((1548, 1564), 'json.dumps', 'json.dumps', (['body'], {}), '(body)\n', (1558, 1564), False, 'import json\n')]
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cron jobs which perform various Exemption maintenance tasks."""
import datetime
import logging
import webapp2
from webapp2_extras import routes
from google.appengine.ext import deferred
from upvote.gae.datastore.models import exemption as exemption_models
from upvote.gae.datastore.models import utils as model_utils
from upvote.gae.lib.exemption import api as exemption_api
from upvote.gae.lib.exemption import notify
from upvote.gae.lib.exemption import monitoring
from upvote.gae.utils import env_utils
from upvote.gae.utils import group_utils
from upvote.gae.utils import handler_utils
from upvote.gae.utils import user_utils
from upvote.shared import constants
# Done for the sake of brevity.
EXEMPTION_STATE = constants.EXEMPTION_STATE
class ProcessExemptions(handler_utils.CronJobHandler):
"""Handler for processing exemptions."""
def get(self):
logging.info('Processing Exemptions...')
exm_query = exemption_models.Exemption.query(
exemption_models.Exemption.state == EXEMPTION_STATE.REQUESTED)
exm_count = 0
for exm in exm_query:
deferred.defer(
exemption_api.Process, exm.key,
_queue=constants.TASK_QUEUE.EXEMPTIONS)
exm_count += 1
monitoring.requested_exemptions.Set(exm_count)
logging.info('Deferred %d Exemption(s) for processing', exm_count)
def _NotifyExpirationsInRange(start_dt, end_dt):
"""Sends an email for all APPROVED Exemptions that expire in the given range.
Args:
start_dt: The starting datetime of the expiration window.
end_dt: The ending datetime of the expiration window.
"""
# Query for the Keys of all Exemptions that expire in the given range.
exm_query = exemption_models.Exemption.query(
exemption_models.Exemption.state == EXEMPTION_STATE.APPROVED,
exemption_models.Exemption.deactivation_dt >= start_dt,
exemption_models.Exemption.deactivation_dt < end_dt)
exm_keys = exm_query.fetch(keys_only=True)
for exm_key in exm_keys:
notify.SendExpirationEmail(exm_key)
class NotifyUpcomingExpirations(handler_utils.CronJobHandler):
"""Handler for notifying users of upcoming exemption expirations."""
def get(self):
now = datetime.datetime.utcnow()
# Notify all users whose Exemptions now have less than a week left, in order
# to give reasonable advance warning (e.g. long weekends, holidays, etc).
one_week_start_dt = now + datetime.timedelta(days=7, hours=-1)
one_week_end_dt = now + datetime.timedelta(days=7)
# Notify all users whose Exemptions now have less that 24 hours left. This
# will act as a final reminder, and will also ensure that even users who
# choose a 1-day Exemption will get an email warning (for what it's worth).
one_day_start_dt = now + datetime.timedelta(days=1, hours=-1)
one_day_end_dt = now + datetime.timedelta(days=1)
tuples = [
(one_week_start_dt, one_week_end_dt),
(one_day_start_dt, one_day_end_dt)]
# Defer a task for each batch of notifications.
for start_dt, end_dt in tuples:
deferred.defer(
_NotifyExpirationsInRange, start_dt, end_dt,
_queue=constants.TASK_QUEUE.EXEMPTIONS)
class ExpireExemptions(handler_utils.CronJobHandler):
"""Handler for expiring exemptions."""
def get(self):
logging.info('Expiring Exemptions...')
now = datetime.datetime.utcnow()
exm_query = exemption_models.Exemption.query(
exemption_models.Exemption.state == EXEMPTION_STATE.APPROVED,
exemption_models.Exemption.deactivation_dt <= now)
exm_count = 0
for exm in exm_query:
deferred.defer(
exemption_api.Expire, exm.key,
_queue=constants.TASK_QUEUE.EXEMPTIONS)
exm_count += 1
monitoring.expired_exemptions.Set(exm_count)
logging.info('Deferred %d Exemption(s) for expiration', exm_count)
ROUTES = routes.PathPrefixRoute('/exemptions', [
webapp2.Route('/process', handler=ProcessExemptions),
webapp2.Route(
'/notify-upcoming-expirations',
handler=NotifyUpcomingExpirations),
webapp2.Route('/expire', handler=ExpireExemptions),
])
|
[
"upvote.gae.lib.exemption.monitoring.expired_exemptions.Set",
"upvote.gae.lib.exemption.monitoring.requested_exemptions.Set",
"upvote.gae.lib.exemption.notify.SendExpirationEmail",
"logging.info",
"datetime.datetime.utcnow",
"upvote.gae.datastore.models.exemption.Exemption.query",
"google.appengine.ext.deferred.defer",
"datetime.timedelta",
"webapp2.Route"
] |
[((2290, 2501), 'upvote.gae.datastore.models.exemption.Exemption.query', 'exemption_models.Exemption.query', (['(exemption_models.Exemption.state == EXEMPTION_STATE.APPROVED)', '(exemption_models.Exemption.deactivation_dt >= start_dt)', '(exemption_models.Exemption.deactivation_dt < end_dt)'], {}), '(exemption_models.Exemption.state ==\n EXEMPTION_STATE.APPROVED, exemption_models.Exemption.deactivation_dt >=\n start_dt, exemption_models.Exemption.deactivation_dt < end_dt)\n', (2322, 2501), True, 'from upvote.gae.datastore.models import exemption as exemption_models\n'), ((1471, 1511), 'logging.info', 'logging.info', (['"""Processing Exemptions..."""'], {}), "('Processing Exemptions...')\n", (1483, 1511), False, 'import logging\n'), ((1529, 1628), 'upvote.gae.datastore.models.exemption.Exemption.query', 'exemption_models.Exemption.query', (['(exemption_models.Exemption.state == EXEMPTION_STATE.REQUESTED)'], {}), '(exemption_models.Exemption.state ==\n EXEMPTION_STATE.REQUESTED)\n', (1561, 1628), True, 'from upvote.gae.datastore.models import exemption as exemption_models\n'), ((1819, 1865), 'upvote.gae.lib.exemption.monitoring.requested_exemptions.Set', 'monitoring.requested_exemptions.Set', (['exm_count'], {}), '(exm_count)\n', (1854, 1865), False, 'from upvote.gae.lib.exemption import monitoring\n'), ((1870, 1936), 'logging.info', 'logging.info', (['"""Deferred %d Exemption(s) for processing"""', 'exm_count'], {}), "('Deferred %d Exemption(s) for processing', exm_count)\n", (1882, 1936), False, 'import logging\n'), ((2590, 2625), 'upvote.gae.lib.exemption.notify.SendExpirationEmail', 'notify.SendExpirationEmail', (['exm_key'], {}), '(exm_key)\n', (2616, 2625), False, 'from upvote.gae.lib.exemption import notify\n'), ((2791, 2817), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (2815, 2817), False, 'import datetime\n'), ((3898, 3936), 'logging.info', 'logging.info', (['"""Expiring Exemptions..."""'], {}), "('Expiring Exemptions...')\n", (3910, 3936), False, 'import logging\n'), ((3948, 3974), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (3972, 3974), False, 'import datetime\n'), ((3991, 4145), 'upvote.gae.datastore.models.exemption.Exemption.query', 'exemption_models.Exemption.query', (['(exemption_models.Exemption.state == EXEMPTION_STATE.APPROVED)', '(exemption_models.Exemption.deactivation_dt <= now)'], {}), '(exemption_models.Exemption.state ==\n EXEMPTION_STATE.APPROVED, exemption_models.Exemption.deactivation_dt <= now\n )\n', (4023, 4145), True, 'from upvote.gae.datastore.models import exemption as exemption_models\n'), ((4338, 4382), 'upvote.gae.lib.exemption.monitoring.expired_exemptions.Set', 'monitoring.expired_exemptions.Set', (['exm_count'], {}), '(exm_count)\n', (4371, 4382), False, 'from upvote.gae.lib.exemption import monitoring\n'), ((4387, 4453), 'logging.info', 'logging.info', (['"""Deferred %d Exemption(s) for expiration"""', 'exm_count'], {}), "('Deferred %d Exemption(s) for expiration', exm_count)\n", (4399, 4453), False, 'import logging\n'), ((4509, 4561), 'webapp2.Route', 'webapp2.Route', (['"""/process"""'], {'handler': 'ProcessExemptions'}), "('/process', handler=ProcessExemptions)\n", (4522, 4561), False, 'import webapp2\n'), ((4567, 4652), 'webapp2.Route', 'webapp2.Route', (['"""/notify-upcoming-expirations"""'], {'handler': 'NotifyUpcomingExpirations'}), "('/notify-upcoming-expirations', handler=NotifyUpcomingExpirations\n )\n", (4580, 4652), False, 'import webapp2\n'), ((4670, 4720), 'webapp2.Route', 'webapp2.Route', (['"""/expire"""'], {'handler': 'ExpireExemptions'}), "('/expire', handler=ExpireExemptions)\n", (4683, 4720), False, 'import webapp2\n'), ((1685, 1776), 'google.appengine.ext.deferred.defer', 'deferred.defer', (['exemption_api.Process', 'exm.key'], {'_queue': 'constants.TASK_QUEUE.EXEMPTIONS'}), '(exemption_api.Process, exm.key, _queue=constants.TASK_QUEUE.\n EXEMPTIONS)\n', (1699, 1776), False, 'from google.appengine.ext import deferred\n'), ((3008, 3044), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(7)', 'hours': '(-1)'}), '(days=7, hours=-1)\n', (3026, 3044), False, 'import datetime\n'), ((3073, 3099), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(7)'}), '(days=7)\n', (3091, 3099), False, 'import datetime\n'), ((3366, 3402), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)', 'hours': '(-1)'}), '(days=1, hours=-1)\n', (3384, 3402), False, 'import datetime\n'), ((3430, 3456), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (3448, 3456), False, 'import datetime\n'), ((3658, 3762), 'google.appengine.ext.deferred.defer', 'deferred.defer', (['_NotifyExpirationsInRange', 'start_dt', 'end_dt'], {'_queue': 'constants.TASK_QUEUE.EXEMPTIONS'}), '(_NotifyExpirationsInRange, start_dt, end_dt, _queue=\n constants.TASK_QUEUE.EXEMPTIONS)\n', (3672, 3762), False, 'from google.appengine.ext import deferred\n'), ((4205, 4295), 'google.appengine.ext.deferred.defer', 'deferred.defer', (['exemption_api.Expire', 'exm.key'], {'_queue': 'constants.TASK_QUEUE.EXEMPTIONS'}), '(exemption_api.Expire, exm.key, _queue=constants.TASK_QUEUE.\n EXEMPTIONS)\n', (4219, 4295), False, 'from google.appengine.ext import deferred\n')]
|
from ledgerblue.comm import getDongle
import struct
import algosdk
from algosdk.future import transaction
import base64
import os
import sys
import inspect
import nacl.signing
from Cryptodome.Hash import SHA256
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0, parentdir)
from test import txn_utils
def get_app_create_txn():
approve_app = b'\x02 \x05\x00\x05\x04\x02\x01&\x07\x04vote\tVoteBegin\x07VoteEnd\x05voted\x08RegBegin\x06RegEnd\x07Creator1\x18"\x12@\x00\x951\x19\x12@\x00\x871\x19$\x12@\x00y1\x19%\x12@\x00R1\x19!\x04\x12@\x00<6\x1a\x00(\x12@\x00\x01\x002\x06)d\x0f2\x06*d\x0e\x10@\x00\x01\x00"2\x08+c5\x005\x014\x00A\x00\x02"C6\x1a\x016\x1a\x01d!\x04\x08g"+6\x1a\x01f!\x04C2\x06\'\x04d\x0f2\x06\'\x05d\x0e\x10C"2\x08+c5\x005\x012\x06*d\x0e4\x00\x10A\x00\t4\x014\x01d!\x04\tg!\x04C1\x00\'\x06d\x12C1\x00\'\x06d\x12C\'\x061\x00g1\x1b$\x12@\x00\x01\x00\'\x046\x1a\x00\x17g\'\x056\x1a\x01\x17g)6\x1a\x02\x17g*6\x1a\x03\x17g!\x04C'
clear_pgm = b'\x02 \x01\x01'
clear_pgm =clear_pgm + (2048 - len(clear_pgm))*b'\x22'
# the approve_app is a compiled Tealscript taken from https://pyteal.readthedocs.io/en/stable/examples.html#periodic-payment
# we truncated the pgm because of the memory limit on the Ledeger
approve_app = approve_app + (2048 - len(approve_app))*b'\x22'
local_ints = 2
local_bytes = 5
global_ints = 24
global_bytes = 1
args = [b'/x65/x87',
b'/x68/x87',
b'/x61/x87',
b'/x62/x87',
b'/x63/x87',
b'/x64/x87',
b'/x90/x87',
b'/x91/x87',
b'/x91/x87',
b'/x92/x87',
b'/x93/x87',
b'/x94/x87',
b'/x95/x87',
b'/x96/x87',
(32 -(8*14))*b'A']
global_schema = transaction.StateSchema(global_ints, global_bytes)
local_schema = transaction.StateSchema(local_ints, local_bytes)
local_sp = transaction.SuggestedParams(fee= 2100, first=6002000, last=6003000,
gen="testnet-v1.0",
gh="SGO1GKSzyE7IEPItTxCByw9x8FmnrCDexi9/cOUJOiI=",flat_fee=True)
txn = algosdk.future.transaction.ApplicationCreateTxn(sender="YK54TGVZ37C7P76GKLXTY2LAH2522VD3U2434HRKE7NMXA65VHJVLFVOE4",
sp=local_sp, approval_program=approve_app, on_complete=transaction.OnComplete.NoOpOC.real,clear_program= clear_pgm, global_schema=global_schema,
foreign_apps=[55,22], foreign_assets=[31566704,31566708], accounts=["<KEY>",
"<KEY>",
"<KEY>",
"<KEY>"],
app_args=args,
local_schema=local_schema )
return txn
def hash_bytes(bytes_array):
h = SHA256.new()
h.update(bytes_array)
return base64.b64encode(h.digest()).decode('ascii')
if __name__ == '__main__':
dongle = getDongle(True)
apdu = struct.pack('>BBBBB', 0x80, 0x3, 0x0, 0x0, 0x0)
pubKey = dongle.exchange(apdu)
print("---------------")
print("public key: ", type(pubKey))
print("---------------")
txn = get_app_create_txn()
print("---------------")
print("approval app hash: ",hash_bytes(txn.approval_program).lower())
print("---------------")
print("---------------")
print("clear app hash: ",hash_bytes(txn.clear_program).lower())
print("---------------")
for i in range(15):
print("---------------")
print("arg hash: ",hash_bytes(txn.app_args[i]).lower())
print("---------------")
decoded_txn = base64.b64decode(algosdk.encoding.msgpack_encode(txn))
sig = txn_utils.sign_algo_txn(dongle, decoded_txn)
verify_key = nacl.signing.VerifyKey(bytes(pubKey))
verify_key.verify(smessage=b'TX' + decoded_txn, signature=bytes(sig))
|
[
"Cryptodome.Hash.SHA256.new",
"algosdk.future.transaction.ApplicationCreateTxn",
"test.txn_utils.sign_algo_txn",
"os.path.dirname",
"sys.path.insert",
"algosdk.future.transaction.StateSchema",
"struct.pack",
"algosdk.encoding.msgpack_encode",
"algosdk.future.transaction.SuggestedParams",
"inspect.currentframe",
"ledgerblue.comm.getDongle"
] |
[((314, 341), 'os.path.dirname', 'os.path.dirname', (['currentdir'], {}), '(currentdir)\n', (329, 341), False, 'import os\n'), ((342, 371), 'sys.path.insert', 'sys.path.insert', (['(0)', 'parentdir'], {}), '(0, parentdir)\n', (357, 371), False, 'import sys\n'), ((1889, 1939), 'algosdk.future.transaction.StateSchema', 'transaction.StateSchema', (['global_ints', 'global_bytes'], {}), '(global_ints, global_bytes)\n', (1912, 1939), False, 'from algosdk.future import transaction\n'), ((1959, 2007), 'algosdk.future.transaction.StateSchema', 'transaction.StateSchema', (['local_ints', 'local_bytes'], {}), '(local_ints, local_bytes)\n', (1982, 2007), False, 'from algosdk.future import transaction\n'), ((2023, 2184), 'algosdk.future.transaction.SuggestedParams', 'transaction.SuggestedParams', ([], {'fee': '(2100)', 'first': '(6002000)', 'last': '(6003000)', 'gen': '"""testnet-v1.0"""', 'gh': '"""SGO1GKSzyE7IEPItTxCByw9x8FmnrCDexi9/cOUJOiI="""', 'flat_fee': '(True)'}), "(fee=2100, first=6002000, last=6003000, gen=\n 'testnet-v1.0', gh='SGO1GKSzyE7IEPItTxCByw9x8FmnrCDexi9/cOUJOiI=',\n flat_fee=True)\n", (2050, 2184), False, 'from algosdk.future import transaction\n'), ((2258, 2697), 'algosdk.future.transaction.ApplicationCreateTxn', 'algosdk.future.transaction.ApplicationCreateTxn', ([], {'sender': '"""YK54TGVZ37C7P76GKLXTY2LAH2522VD3U2434HRKE7NMXA65VHJVLFVOE4"""', 'sp': 'local_sp', 'approval_program': 'approve_app', 'on_complete': 'transaction.OnComplete.NoOpOC.real', 'clear_program': 'clear_pgm', 'global_schema': 'global_schema', 'foreign_apps': '[55, 22]', 'foreign_assets': '[31566704, 31566708]', 'accounts': "['<KEY>', '<KEY>', '<KEY>', '<KEY>']", 'app_args': 'args', 'local_schema': 'local_schema'}), "(sender=\n 'YK54TGVZ37C7P76GKLXTY2LAH2522VD3U2434HRKE7NMXA65VHJVLFVOE4', sp=\n local_sp, approval_program=approve_app, on_complete=transaction.\n OnComplete.NoOpOC.real, clear_program=clear_pgm, global_schema=\n global_schema, foreign_apps=[55, 22], foreign_assets=[31566704, \n 31566708], accounts=['<KEY>', '<KEY>', '<KEY>', '<KEY>'], app_args=args,\n local_schema=local_schema)\n", (2305, 2697), False, 'import algosdk\n'), ((3380, 3392), 'Cryptodome.Hash.SHA256.new', 'SHA256.new', ([], {}), '()\n', (3390, 3392), False, 'from Cryptodome.Hash import SHA256\n'), ((3517, 3532), 'ledgerblue.comm.getDongle', 'getDongle', (['(True)'], {}), '(True)\n', (3526, 3532), False, 'from ledgerblue.comm import getDongle\n'), ((3545, 3583), 'struct.pack', 'struct.pack', (['""">BBBBB"""', '(128)', '(3)', '(0)', '(0)', '(0)'], {}), "('>BBBBB', 128, 3, 0, 0, 0)\n", (3556, 3583), False, 'import struct\n'), ((4269, 4313), 'test.txn_utils.sign_algo_txn', 'txn_utils.sign_algo_txn', (['dongle', 'decoded_txn'], {}), '(dongle, decoded_txn)\n', (4292, 4313), False, 'from test import txn_utils\n'), ((4220, 4256), 'algosdk.encoding.msgpack_encode', 'algosdk.encoding.msgpack_encode', (['txn'], {}), '(txn)\n', (4251, 4256), False, 'import algosdk\n'), ((276, 298), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (296, 298), False, 'import inspect\n')]
|
#!/usr/bin/env python3
import sys
import os
import shutil
sys.path.append('_setup/model')
from model import *
import korali
# Creating Experiment List
eList = []
for i in range(5):
e = korali.Experiment()
data = getReferenceData("_setup/data/", i)
N = len(data)
e["Problem"]["Type"] = "Bayesian/Reference"
e["Problem"]["Likelihood Model"] = "Normal"
e["Problem"]["Reference Data"] = data
e["Problem"]["Computational Model"] = lambda d: normal(N,d)
# Configuring the problem's random distributions
e["Distributions"][0]["Name"] = "Uniform 0"
e["Distributions"][0]["Type"] = "Univariate/Uniform"
e["Distributions"][0]["Minimum"] = 0.0
e["Distributions"][0]["Maximum"] = 20.0
e["Distributions"][1]["Name"] = "Uniform 1"
e["Distributions"][1]["Type"] = "Univariate/Uniform"
e["Distributions"][1]["Minimum"] = 0.0
e["Distributions"][1]["Maximum"] = 10.0
e["Variables"][0]["Name"] = "[Mean]"
e["Variables"][0]["Prior Distribution"] = "Uniform 0"
e["Variables"][1]["Name"] = "[Sigma]"
e["Variables"][1]["Prior Distribution"] = "Uniform 1"
e["Solver"]["Type"] = "Sampler/TMCMC"
e["Solver"]["Population Size"] = 1000
e["Solver"]["Target Coefficient Of Variation"] = 0.6
e["Solver"]["Covariance Scaling"] = 0.02
e["Random Seed"] = 0xC0FFEE
e["File Output"]["Path"] = "_setup/results_phase_1/" + str(i).zfill(3)
e["Console Output"]["Verbosity"] = "Detailed"
eList.append(e)
# Starting Korali's Engine and running experiment
k = korali.Engine()
k["Conduit"]["Type"] = "Concurrent"
k["Conduit"]["Concurrent Jobs"] = 4
k.run(eList)
|
[
"sys.path.append",
"korali.Engine",
"korali.Experiment"
] |
[((58, 89), 'sys.path.append', 'sys.path.append', (['"""_setup/model"""'], {}), "('_setup/model')\n", (73, 89), False, 'import sys\n'), ((1484, 1499), 'korali.Engine', 'korali.Engine', ([], {}), '()\n', (1497, 1499), False, 'import korali\n'), ((188, 207), 'korali.Experiment', 'korali.Experiment', ([], {}), '()\n', (205, 207), False, 'import korali\n')]
|
import os
import pathlib
import time
import logging
import datetime
from dotenv import load_dotenv
import pandas as pd
import requests
load_dotenv()
TOKEN = os.getenv("TELEGRAM_TOKEN")
IVANSKA_ID = os.getenv("IVANSKA_ID")
NARA_ID = os.getenv("NARA_ID")
LOC1 = os.getenv("LOC1")
LOC2 = os.getenv("LOC2")
LOCC = os.getenv("LOCC")
MAXLAT = os.getenv("MAXLAT")
MINLAT = os.getenv("MINLAT")
MAXLONG = os.getenv("MAXLONG")
MINLONG = os.getenv("MINLONG")
SPEC = os.getenv("SPEC")
URL = "https://www.komparing.com/es/gasolina/include/process-xml_maxLat{}_minLat{}_maxLong-{}_minLong-{}_zoomMapa-11_order-gsAs_gsA" \
.format(MAXLAT, MINLAT, MAXLONG, MINLONG)
TELEGRAM_API = "https://api.telegram.org/bot{}/sendMessage?chat_id={}&parse_mode=Markdown&text={}"
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO)
logger = logging.getLogger(__name__)
def fetch_prices():
r = requests.get(URL)
df = pd.read_xml(r.text)
col_filters = ["localidad", "direcc", "rotulo",
"gasolina_95", "gasoleo_A_normal"]
loc_filters = (
(df.localidad == LOC1) |
(df.localidad == LOC2)
)
specific_df = df[df.rotulo == SPEC][col_filters].tail(1)
df = df[col_filters]
df = df[loc_filters]
df = df[(df.gasolina_95 != 0) &
(df.gasoleo_A_normal != 0)]
df = df.sort_values(by="gasolina_95").head(3)
df = pd.concat([df, specific_df], ignore_index=True)
prices_df = pd.read_csv(os.path.join(
pathlib.Path(__file__).parent.resolve(), "prices.csv"))
prices_df.loc[len(prices_df)] = df.head(1).values[0]
prices_df.to_csv("prices.csv", index_label=False)
return df.values
def get_msg():
prices = fetch_prices()
ret = "Доброе утро! Hoy los combustibles más baratos están en:\n\n"
for p in prices[:-1]:
if p[0] == LOC1:
p[0] = LOCC
ret += "{}, en {}. ({})\n".format(
p[2], p[1], p[0]
)
ret += "Gasóleo: {} €/L\nGasolina: {} €/L\n\n".format(
p[4], p[3]
)
last_p = prices[-1]
ret += "Y los precios en {} de {} son:\n\n".format(
last_p[2], last_p[0]
)
ret += "Gasóleo: {} €/L\nGasolina: {} €/L\n\n".format(
last_p[4], last_p[3]
)
ret += "Gasóleo (8% DTO.): {} €/L\nGasolina (8% DTO.): {} €/L".format(
round(last_p[4] - last_p[4]*0.08, 3),
round(last_p[3] - last_p[3]*0.08, 3)
)
return ret
if __name__ == "__main__":
H = 7
M = 0
# "Half-ass" solution made by:
# https://stackoverflow.com/questions/2031111/in-python-how-can-i-put-a-thread-to-sleep-until-a-specific-time
for _ in range(0, 365):
t = datetime.datetime.today()
future = datetime.datetime(t.year, t.month, t.day, H, M)
if t.hour >= H:
future += datetime.timedelta(days=1)
logger.info("Time sleep: {} seconds.".format((future-t).total_seconds()))
logger.info("Today: {}.".format(t))
logger.info("Future: {}.".format(future))
time.sleep((future-t).total_seconds())
requests.get(TELEGRAM_API.format(
TOKEN, IVANSKA_ID, get_msg()
))
requests.get(TELEGRAM_API.format(
TOKEN, NARA_ID, get_msg()
))
|
[
"datetime.datetime.today",
"logging.basicConfig",
"pandas.read_xml",
"datetime.datetime",
"dotenv.load_dotenv",
"pathlib.Path",
"datetime.timedelta",
"requests.get",
"pandas.concat",
"os.getenv",
"logging.getLogger"
] |
[((137, 150), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (148, 150), False, 'from dotenv import load_dotenv\n'), ((159, 186), 'os.getenv', 'os.getenv', (['"""TELEGRAM_TOKEN"""'], {}), "('TELEGRAM_TOKEN')\n", (168, 186), False, 'import os\n'), ((200, 223), 'os.getenv', 'os.getenv', (['"""IVANSKA_ID"""'], {}), "('IVANSKA_ID')\n", (209, 223), False, 'import os\n'), ((234, 254), 'os.getenv', 'os.getenv', (['"""NARA_ID"""'], {}), "('NARA_ID')\n", (243, 254), False, 'import os\n'), ((262, 279), 'os.getenv', 'os.getenv', (['"""LOC1"""'], {}), "('LOC1')\n", (271, 279), False, 'import os\n'), ((287, 304), 'os.getenv', 'os.getenv', (['"""LOC2"""'], {}), "('LOC2')\n", (296, 304), False, 'import os\n'), ((312, 329), 'os.getenv', 'os.getenv', (['"""LOCC"""'], {}), "('LOCC')\n", (321, 329), False, 'import os\n'), ((339, 358), 'os.getenv', 'os.getenv', (['"""MAXLAT"""'], {}), "('MAXLAT')\n", (348, 358), False, 'import os\n'), ((368, 387), 'os.getenv', 'os.getenv', (['"""MINLAT"""'], {}), "('MINLAT')\n", (377, 387), False, 'import os\n'), ((398, 418), 'os.getenv', 'os.getenv', (['"""MAXLONG"""'], {}), "('MAXLONG')\n", (407, 418), False, 'import os\n'), ((429, 449), 'os.getenv', 'os.getenv', (['"""MINLONG"""'], {}), "('MINLONG')\n", (438, 449), False, 'import os\n'), ((457, 474), 'os.getenv', 'os.getenv', (['"""SPEC"""'], {}), "('SPEC')\n", (466, 474), False, 'import os\n'), ((757, 864), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""', 'level': 'logging.INFO'}), "(format=\n '%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)\n", (776, 864), False, 'import logging\n'), ((890, 917), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (907, 917), False, 'import logging\n'), ((947, 964), 'requests.get', 'requests.get', (['URL'], {}), '(URL)\n', (959, 964), False, 'import requests\n'), ((974, 993), 'pandas.read_xml', 'pd.read_xml', (['r.text'], {}), '(r.text)\n', (985, 993), True, 'import pandas as pd\n'), ((1437, 1484), 'pandas.concat', 'pd.concat', (['[df, specific_df]'], {'ignore_index': '(True)'}), '([df, specific_df], ignore_index=True)\n', (1446, 1484), True, 'import pandas as pd\n'), ((2730, 2755), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (2753, 2755), False, 'import datetime\n'), ((2773, 2820), 'datetime.datetime', 'datetime.datetime', (['t.year', 't.month', 't.day', 'H', 'M'], {}), '(t.year, t.month, t.day, H, M)\n', (2790, 2820), False, 'import datetime\n'), ((2867, 2893), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (2885, 2893), False, 'import datetime\n'), ((1536, 1558), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (1548, 1558), False, 'import pathlib\n')]
|
from unittest import TestCase, main
from unittest.mock import patch
from pyrarcrack import generate_combinations
class TestCombination(TestCase):
def test_should_generate_minimal_combination(self):
self.assertEqual(
list(generate_combinations('a', 1)),
['a']
)
if __name__ == '__main__':
main()
|
[
"unittest.main",
"pyrarcrack.generate_combinations"
] |
[((356, 362), 'unittest.main', 'main', ([], {}), '()\n', (360, 362), False, 'from unittest import TestCase, main\n'), ((257, 286), 'pyrarcrack.generate_combinations', 'generate_combinations', (['"""a"""', '(1)'], {}), "('a', 1)\n", (278, 286), False, 'from pyrarcrack import generate_combinations\n')]
|
"""GOEA and report generation w/bonferroni multiple test corrections from statsmodels.
python test_goea_rpt_bonferroni.py
python test_goea_rpt_bonferroni.py [LOG FILENAME]
"""
__copyright__ = "Copyright (C) 2016-2017, <NAME>, <NAME>. All rights reserved."
__author__ = "<NAME>"
import os
import sys
from goatools.base import get_godag
from goatools.associations import read_associations
from goatools.go_enrichment import GOEnrichmentStudy
REPO = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")
def test_bonferroni():
"""Do Gene Ontology Enrichment Analysis w/Bonferroni multipletest. Print results 3 ways."""
# ---------------------------------------------------------------------
# Run Gene Ontology Analysis (GOEA)
#
# 1. Initialize
log = sys.stdout
results_nt, goea = run_bonferroni()
# ---------------------------------------------------------------------
# Print results 3 ways: to screen, to tsv (tab-separated file), to xlsx (Excel spreadsheet)
fout_tsv = "goea_bonferroni.tsv"
fout_xls = "goea_bonferroni.xlsx"
# print these in tsv and xlsx
print_fields = ['NS', 'study_count', 'p_uncorrected', 'p_bonferroni',
'level', 'depth', 'GO', 'name']
# 1. Print results to screen using format in prtfmt. For example:
#
# BP 22 3.073e-03 L06 D07 GO:0006468 protein phosphorylation
# BP 9 1.023e-02 L07 D08 GO:0006511 ubiquitin-dependent protein catabolic process
# BP 2 1.023e-02 L05 D09 GO:0019877 diaminopimelate biosynthetic process
# BP 2 1.223e-02 L04 D08 GO:0006301 postreplication repair
# BP 2 1.223e-02 L05 D09 GO:0030418 nicotianamine biosynthetic process
# BP 2 1.492e-02 L04 D06 GO:0006909 phagocytosis
# BP 2 1.492e-02 L03 D03 GO:0051322 anaphase
# ...
prtfmt = " ".join(["{NS} {study_count:3} {p_uncorrected:5.3e}",
"{p_bonferroni:5.3e} L{level:02} D{depth:02} {GO} {name}\n"])
prt_if = lambda nt: nt.p_uncorrected < 0.05
goea.prt_txt(log, results_nt, prtfmt, prt_if=prt_if)
# 2. Write results to tsv file
# Optional user defined formatting for specific fields
fld2fmt = {'p_bonferroni':'{:8.2e}', 'p_uncorrected':'{:8.2e}'}
# Sort by: 1st) BP, MF, CC; 2nd) By GO depth, deepest GO first.
sort_by = lambda nt: [nt.NS, -1*nt.depth]
goea.wr_tsv(fout_tsv, results_nt,
prt_if=prt_if, sort_by=sort_by, fld2fmt=fld2fmt, prt_flds=print_fields)
# 3. Write results to xlsx file, including specific study genes assc. w/significant GOs
# Use these headers instead of the print_fields for the xlsx header
hdrs = ['NS', 'pval', 'bonferroni', 'L', 'D', 'Term', 'Ontology Term Name', 'Cnt', 'Genes']
print_fields = ['NS', 'p_uncorrected', 'p_bonferroni',
'level', 'depth', 'GO', 'name', 'study_count', 'study_items']
goea.wr_xlsx(fout_xls, results_nt,
# optional key-word args (ie, kwargs, kws)
prt_if=prt_if, sort_by=sort_by, hdrs=hdrs, fld2fmt=fld2fmt, prt_flds=print_fields)
def run_bonferroni():
"""Do Gene Ontology Enrichment Analysis w/Bonferroni multipletest. Print results 3 ways."""
# ---------------------------------------------------------------------
# Run Gene Ontology Analysis (GOEA)
#
# 1. Initialize
godag = get_godag(os.path.join(os.getcwd(), "go-basic.obo"), loading_bar=None)
assoc = read_associations(os.path.join(REPO, "data/association"), no_top=True)
popul_ids = [line.rstrip() for line in open(os.path.join(REPO, "data/population"))]
study_ids = [line.rstrip() for line in open(os.path.join(REPO, "data/study"))]
# 2. Run enrichment analysis
goea = GOEnrichmentStudy(popul_ids, assoc, godag, alpha=0.05, methods=['bonferroni'])
results_nt = goea.run_study(study_ids)
return results_nt, goea
if __name__ == '__main__':
test_bonferroni()
# Copyright (C) 2016-2017, <NAME>, <NAME>. All rights reserved.
|
[
"os.getcwd",
"goatools.go_enrichment.GOEnrichmentStudy",
"os.path.abspath",
"os.path.join"
] |
[((3768, 3846), 'goatools.go_enrichment.GOEnrichmentStudy', 'GOEnrichmentStudy', (['popul_ids', 'assoc', 'godag'], {'alpha': '(0.05)', 'methods': "['bonferroni']"}), "(popul_ids, assoc, godag, alpha=0.05, methods=['bonferroni'])\n", (3785, 3846), False, 'from goatools.go_enrichment import GOEnrichmentStudy\n'), ((497, 522), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (512, 522), False, 'import os\n'), ((3500, 3538), 'os.path.join', 'os.path.join', (['REPO', '"""data/association"""'], {}), "(REPO, 'data/association')\n", (3512, 3538), False, 'import os\n'), ((3422, 3433), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3431, 3433), False, 'import os\n'), ((3601, 3638), 'os.path.join', 'os.path.join', (['REPO', '"""data/population"""'], {}), "(REPO, 'data/population')\n", (3613, 3638), False, 'import os\n'), ((3689, 3721), 'os.path.join', 'os.path.join', (['REPO', '"""data/study"""'], {}), "(REPO, 'data/study')\n", (3701, 3721), False, 'import os\n')]
|
# -*- coding: UTF-8 -*-
################################################################################
#
# Copyright (c) 2018 Baidu.com, Inc. All Rights Reserved
#
################################################################################
"""
**client** 模块提供对第三方组件操作的统一抽象,规范化智能运维机器人与外部系统交互的方式
* ``BaseClient`` 提供第三方组件调用的一些通用功(如http请求重试、结果处理等)的封装
* ``ESClient`` 封装对Elasticsearch(后文简称ES)的操作,ES作为全文搜索数据库,支持多条件组合的高效查询,Guardian通过其提供的RESTful web接口进行操作记录的读写
.. Note:: 本模块只提供客户端基类的定义,并封装了Guardian运行时依赖组件的调用,跟业务相关的外部系统client应在 ``assemble`` 中实现
"""
import httplib
import json
import time
from ark.are import common
from ark.are import config
from ark.are import log
from ark.are import exception
class BaseClient(common.Singleton):
"""
客户端基类,提供基本的http请求功能封装,统一对外http访问方式
.. Note:: 为避免在接口异常时的频繁调用,重试时间间隔会以重试次数的平方方式增加
"""
def http_request(self, host, port, method, url, header=None, data=None,
timeout=30,retry_times=2, response_code=None,
response_json=True):
"""
http请求接口
:param str host: 服务器地址
:param int port: 服务器端口
:param str method: http方法
:param str url: url地址
:param dict header: http消息头
:param str data: http body数据
:param int timeout: 请求超时时间
:param int retry_times: 请求重试次数
:param list response_code: 请求正确时的返回码
:param bool response_json: True 返回json格式的数据,False 返回字符串
:return: http 请求的数据
:rtype: str
:raises EFailedRequest: 请求失败
"""
log.d("http request, host:{}, port:{}, method:{}, url:{}, header:"
"{}, data:{}, timeout:{}, retry_times:{}, response code:{}, "
"response_json:{}".format(host, port, method, url, header,
data, timeout, retry_times,
response_code, response_json))
header = header or {}
res_data = None
for i in range(retry_times + 1):
stop_tag = True if i == retry_times else False
sleep_time = (i + 1) * (i + 1)
try:
conn = httplib.HTTPConnection(
host=host, port=port, timeout=timeout)
conn.request(method=method, url=url, body=data, headers=header)
resp = conn.getresponse()
res_data = resp.read()
log.d("http request ret:{}".format(res_data))
except Exception as e:
log.f("http request exe{}".format(res_data))
if stop_tag:
raise exception.EFailedRequest(
"http request failed,error:{}".format(e))
else:
time.sleep(sleep_time)
continue
else:
log.d("http request ok")
if not response_code or not isinstance(response_code, list):
if 200 <= resp.status < 300:
break
elif stop_tag:
log.e("request failed,code:{},msg:{}".format(
resp.status, resp.msg))
raise exception.EFailedRequest(
"request failed,code:{},msg:{}".format(
resp.status, resp.msg))
else:
time.sleep((i + 1) * (i + 1))
continue
else:
if resp.status in response_code:
break
elif stop_tag:
log.e("request failed,error,code:{},data:{}".format(
resp.status, data))
raise exception.EFailedRequest(
"request failed,error,code:{},data:{}".format(
resp.status, data))
else:
time.sleep(sleep_time)
continue
log.d("http response data:{}".format(res_data))
if response_json:
return json.loads(res_data)
else:
return res_data
class ESClient(BaseClient):
"""
Elasticsearch 客户端类,封装对elasticsearch操作,包括对索引(index)的增删改查
用于智能运维机器人处理消息的状态存储,感知、决策和执行的整个流程
"""
ARK_ES_HOST = "ARK_ES_HOST"
ARK_ES_PORT = "ARK_ES_PORT"
def __init__(self, index, type):
"""
初始化方法
:param str index: 索引参数
:param str type: 类型参数
"""
self.__host = config.GuardianConfig.get(self.ARK_ES_HOST)
self.__port = int(config.GuardianConfig.get(self.ARK_ES_PORT))
self.__index = index
self.__type = type
def post_data(self, data):
"""
创建elasticsearch索引(index),并存储数据
:param str data: 请求数据
:return: 请求结果
:rtype: str
:raises: None
"""
url = "/{}/{}".format(self.__index, self.__type)
method = "POST"
ret = self.http_request(self.__host, int(self.__port),
method, url, data=data)
return ret
def put_data(self, uid, data):
"""
写入elasticsearch文档(doc)
:param str uid: 待查询对象的uid
:param str data: 请求数据
:return: 请求结果
:rtype: str
:raises EFailedRequest: 请求失败
"""
url = "/{}/{}/{}".format(self.__index, self.__type, uid)
method = "PUT"
header = {"Content-Type": "application/json"}
ret = self.http_request(self.__host, self.__port,
method, url, data=data, header=header)
return ret
def get_data_with_uid(self, uid):
"""
根据uid查询doc
:param str uid: 待查询对象的uid
:return: 返回请求结果
:rtype: str
:raises EFailedRequest: 请求失败
"""
url = "/{}/{}/{}".format(self.__index, self.__type, uid)
method = "GET"
ret = self.http_request(self.__host, self.__port, method, url)
return ret
def get_data_with_condition(self, condition):
"""
根据复合条件查询elasticsearch
:param dict condition: 查询条件
:return: 返回请求结果
:rtype: str
:raises EFailedRequest: 请求失败
"""
url = "/{}/{}/_query".format(self.__index, self.__type)
method = "GET"
ret = self.http_request(self.__host, self.__port,
method, url, data=json.dumps(condition))
return ret
|
[
"json.loads",
"ark.are.log.d",
"httplib.HTTPConnection",
"ark.are.config.GuardianConfig.get",
"json.dumps",
"time.sleep"
] |
[((4552, 4595), 'ark.are.config.GuardianConfig.get', 'config.GuardianConfig.get', (['self.ARK_ES_HOST'], {}), '(self.ARK_ES_HOST)\n', (4577, 4595), False, 'from ark.are import config\n'), ((4122, 4142), 'json.loads', 'json.loads', (['res_data'], {}), '(res_data)\n', (4132, 4142), False, 'import json\n'), ((4622, 4665), 'ark.are.config.GuardianConfig.get', 'config.GuardianConfig.get', (['self.ARK_ES_PORT'], {}), '(self.ARK_ES_PORT)\n', (4647, 4665), False, 'from ark.are import config\n'), ((2151, 2212), 'httplib.HTTPConnection', 'httplib.HTTPConnection', ([], {'host': 'host', 'port': 'port', 'timeout': 'timeout'}), '(host=host, port=port, timeout=timeout)\n', (2173, 2212), False, 'import httplib\n'), ((2828, 2852), 'ark.are.log.d', 'log.d', (['"""http request ok"""'], {}), "('http request ok')\n", (2833, 2852), False, 'from ark.are import log\n'), ((6451, 6472), 'json.dumps', 'json.dumps', (['condition'], {}), '(condition)\n', (6461, 6472), False, 'import json\n'), ((2742, 2764), 'time.sleep', 'time.sleep', (['sleep_time'], {}), '(sleep_time)\n', (2752, 2764), False, 'import time\n'), ((3400, 3429), 'time.sleep', 'time.sleep', (['((i + 1) * (i + 1))'], {}), '((i + 1) * (i + 1))\n', (3410, 3429), False, 'import time\n'), ((3965, 3987), 'time.sleep', 'time.sleep', (['sleep_time'], {}), '(sleep_time)\n', (3975, 3987), False, 'import time\n')]
|
"""
Django middleware for generating request flame graphs.
Requires the flamegraph.pl perl script:
https://github.com/brendangregg/FlameGraph/blob/master/flamegraph.pl
Installation:
1. Create a directory for flame graphs
2. Copy the flamegraph.pl script to it
3. Add the FLAMES_DIR django setting
4. Add the flames.FlamesMiddleware to MIDDLEWARE_CLASSES
Usage:
To generate a flame graph just append ?flames to the requested url.
Middleware will create an svg in the FLAMES_DIR with the current timestamp.
Uncomment line 88 to automatically open the svg in a new google chrome tab.
"""
import os
import subprocess
import sys
import threading
import time
import traceback
from datetime import datetime
from xml.dom.minidom import Text
from django.conf import settings
FLAMES_DIR = os.path.abspath(settings.FLAMES_DIR)
FLAMEGRAPH_SCRIPT_PATH = os.path.join(FLAMES_DIR, 'flamegraph.pl')
def get_module_name(module_path):
for path in sys.path:
path = path or os.getcwd()
if module_path.startswith(path):
rel_path = module_path[len(path) + 1:]
return (
rel_path
.replace(u'/__init__.py', u'')
.replace(u'/', '.')
.replace(u'.py', u'')
.strip()
)
return module_path
def write_samples(file, samples):
for _, frame in samples:
stack = traceback.extract_stack(frame)
frame_strings = (
func_name + u'@' + get_module_name(module_path)
for module_path, _, func_name, _ in stack
)
stack_string = u';'.join(frame_strings)
file.write(u'{} {}\n'.format(stack_string, 1))
def write_flames(logger, title='flames'):
base_path = os.path.join(
FLAMES_DIR,
datetime.now().strftime('%Y-%m-%dT%H:%M:%S'),
)
out_txt_path = base_path + '.txt'
out_svg_path = base_path + '.svg'
title_element = Text()
title_element.data = title
title_xml = title_element.toxml()
with open(out_txt_path, 'w') as out_txt:
write_samples(out_txt, logger.samples)
with open(out_svg_path, 'w') as out_svg:
subprocess.call(
[
'perl', FLAMEGRAPH_SCRIPT_PATH, out_txt_path,
'--title', title_xml,
],
stdout=out_svg
)
# subprocess.call(['google-chrome', 'file://' + out_svg_path])
class StackLogger(object):
def __init__(self, thread_id, interval=0.001):
super(StackLogger, self).__init__()
self.thread_id = thread_id
self.interval = interval
self.should_stop = False
self.samples = []
self.started = threading.Event()
self.thread = threading.Thread(target=self.run)
def run(self):
start_time = time.time()
self.samples = []
self.started.set()
while not self.should_stop:
frame_start = time.clock()
timestamp = time.time() - start_time
frame = sys._current_frames()[self.thread_id]
self.samples.append((timestamp, frame))
frame_dt = time.clock() - frame_start
time.sleep(max(0, self.interval - frame_dt))
def start(self):
self.started.clear()
self.should_stop = False
self.thread.start()
self.started.wait()
def stop(self):
self.should_stop = True
self.thread.join()
class FlamesMiddleware(object):
def process_request(self, request):
if settings.DEBUG and 'flames' in request.GET:
logger = StackLogger(threading.current_thread().ident)
logger.start()
request._stack_logger = logger
def process_response(self, request, response):
if settings.DEBUG and hasattr(request, '_stack_logger'):
logger = request._stack_logger
logger.stop()
url = request.build_absolute_uri()
write_flames(logger, title=url)
return response
|
[
"threading.Thread",
"os.path.abspath",
"os.getcwd",
"time.clock",
"traceback.extract_stack",
"time.time",
"datetime.datetime.now",
"subprocess.call",
"threading.Event",
"xml.dom.minidom.Text",
"sys._current_frames",
"threading.current_thread",
"os.path.join"
] |
[((785, 821), 'os.path.abspath', 'os.path.abspath', (['settings.FLAMES_DIR'], {}), '(settings.FLAMES_DIR)\n', (800, 821), False, 'import os\n'), ((847, 888), 'os.path.join', 'os.path.join', (['FLAMES_DIR', '"""flamegraph.pl"""'], {}), "(FLAMES_DIR, 'flamegraph.pl')\n", (859, 888), False, 'import os\n'), ((1927, 1933), 'xml.dom.minidom.Text', 'Text', ([], {}), '()\n', (1931, 1933), False, 'from xml.dom.minidom import Text\n'), ((1391, 1421), 'traceback.extract_stack', 'traceback.extract_stack', (['frame'], {}), '(frame)\n', (1414, 1421), False, 'import traceback\n'), ((2150, 2255), 'subprocess.call', 'subprocess.call', (["['perl', FLAMEGRAPH_SCRIPT_PATH, out_txt_path, '--title', title_xml]"], {'stdout': 'out_svg'}), "(['perl', FLAMEGRAPH_SCRIPT_PATH, out_txt_path, '--title',\n title_xml], stdout=out_svg)\n", (2165, 2255), False, 'import subprocess\n'), ((2677, 2694), 'threading.Event', 'threading.Event', ([], {}), '()\n', (2692, 2694), False, 'import threading\n'), ((2717, 2750), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.run'}), '(target=self.run)\n', (2733, 2750), False, 'import threading\n'), ((2792, 2803), 'time.time', 'time.time', ([], {}), '()\n', (2801, 2803), False, 'import time\n'), ((974, 985), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (983, 985), False, 'import os\n'), ((2920, 2932), 'time.clock', 'time.clock', ([], {}), '()\n', (2930, 2932), False, 'import time\n'), ((1777, 1791), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1789, 1791), False, 'from datetime import datetime\n'), ((2958, 2969), 'time.time', 'time.time', ([], {}), '()\n', (2967, 2969), False, 'import time\n'), ((3003, 3024), 'sys._current_frames', 'sys._current_frames', ([], {}), '()\n', (3022, 3024), False, 'import sys\n'), ((3117, 3129), 'time.clock', 'time.clock', ([], {}), '()\n', (3127, 3129), False, 'import time\n'), ((3583, 3609), 'threading.current_thread', 'threading.current_thread', ([], {}), '()\n', (3607, 3609), False, 'import threading\n')]
|
"""
This module contains test cases for API endpoints.
"""
import unittest
from pay_ir.api.client import PayIrClient
class PayIrAPITestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
def setUp(self):
self.client = PayIrClient('test')
def test_send_api_correct_data(self):
response = self.client.init_transaction(1000, 'http://localhost')
trans_id = int(response['trans_id'])
self.assertIsInstance(trans_id, int)
self.assertGreater(trans_id, 0)
self.assertIsInstance(response['payment_url'], str)
|
[
"unittest.TestCase.__init__",
"pay_ir.api.client.PayIrClient"
] |
[((217, 266), 'unittest.TestCase.__init__', 'unittest.TestCase.__init__', (['self', '*args'], {}), '(self, *args, **kwargs)\n', (243, 266), False, 'import unittest\n'), ((312, 331), 'pay_ir.api.client.PayIrClient', 'PayIrClient', (['"""test"""'], {}), "('test')\n", (323, 331), False, 'from pay_ir.api.client import PayIrClient\n')]
|
from __future__ import unicode_literals, absolute_import, print_function
import calendar
from decimal import Decimal
import random
import datetime
import uuid
import mock
from django.conf import settings
from django.core.management import call_command
from dimagi.utils.dates import add_months
from dimagi.utils.data import generator as data_gen
from corehq.apps.accounting.models import (
Currency, BillingAccount, Subscription, Subscriber, SoftwareProductType,
DefaultProductPlan, SubscriptionAdjustment,
SoftwarePlanEdition, BillingContactInfo, SubscriptionType,
)
from corehq.apps.domain.models import Domain
from corehq.apps.users.models import WebUser, CommCareUser
# don't actually use the plan lists below for initializing new plans! the amounts have been changed to make
# it easier for testing:
SUBSCRIBABLE_EDITIONS = [
SoftwarePlanEdition.ADVANCED,
SoftwarePlanEdition.PRO,
SoftwarePlanEdition.STANDARD,
]
def instantiate_accounting_for_tests():
call_command('cchq_prbac_bootstrap', testing=True)
call_command('cchq_software_plan_bootstrap', testing=True)
def init_default_currency():
currency, _ = Currency.objects.get_or_create(
code=settings.DEFAULT_CURRENCY
)
currency.name = "Default Currency"
currency.rate_to_default = Decimal('1.0')
currency.symbol = settings.DEFAULT_CURRENCY_SYMBOL
currency.save()
return currency
def unique_name():
return uuid.uuid4().hex.lower()[:60]
def arbitrary_web_user(save=True, is_dimagi=False):
domain = Domain(name=unique_name()[:25])
domain.save()
username = "%s@%s.<EMAIL>" % (unique_name(), 'dimagi' if is_dimagi else 'gmail')
try:
web_user = WebUser.create(domain.name, username, 'test123')
except Exception:
web_user = WebUser.get_by_username(username)
web_user.is_active = True
if save:
web_user.save()
return web_user
def billing_account(web_user_creator, web_user_contact, currency=None, save=True):
account_name = data_gen.arbitrary_unique_name(prefix="BA")[:40]
currency = currency or Currency.objects.get(code=settings.DEFAULT_CURRENCY)
billing_account = BillingAccount(
name=account_name,
created_by=web_user_creator.username,
currency=currency,
)
if save:
billing_account.save()
billing_contact = arbitrary_contact_info(billing_account, web_user_contact)
billing_contact.save()
return billing_account
def arbitrary_contact_info(account, web_user_creator):
return BillingContactInfo(
account=account,
first_name=data_gen.arbitrary_firstname(),
last_name=data_gen.arbitrary_lastname(),
emails=web_user_creator.username,
phone_number="+15555555",
company_name="Company Name",
first_line="585 Mass Ave",
city="Cambridge",
state_province_region="MA",
postal_code="02139",
country="US",
)
def delete_all_accounts():
BillingContactInfo.objects.all().delete()
BillingAccount.objects.all().delete()
Currency.objects.all().delete()
def subscribable_plan(edition=SoftwarePlanEdition.ADVANCED):
return DefaultProductPlan.objects.get(
edition=edition,
product_type=SoftwareProductType.COMMCARE,
is_trial=False
).plan.get_version()
def generate_domain_subscription_from_date(date_start, billing_account, domain,
min_num_months=None, is_immediately_active=False,
delay_invoicing_until=None, save=True,
service_type=SubscriptionType.NOT_SET,
subscription_length=None,
plan_version=None,):
# make sure the first month is never a full month (for testing)
date_start = date_start.replace(day=max(2, date_start.day))
subscription_length = subscription_length or random.randint(min_num_months or 3, 25)
date_end_year, date_end_month = add_months(date_start.year, date_start.month, subscription_length)
date_end_last_day = calendar.monthrange(date_end_year, date_end_month)[1]
# make sure that the last month is never a full month (for testing)
date_end = datetime.date(date_end_year, date_end_month, min(date_end_last_day - 1, date_start.day + 1))
subscriber, _ = Subscriber.objects.get_or_create(domain=domain, organization=None)
subscription = Subscription(
account=billing_account,
plan_version=plan_version or subscribable_plan(),
subscriber=subscriber,
salesforce_contract_id=data_gen.arbitrary_unique_name("SFC")[:80],
date_start=date_start,
date_end=date_end,
is_active=is_immediately_active,
date_delay_invoicing=delay_invoicing_until,
service_type=service_type,
)
if save:
subscription.save()
return subscription, subscription_length
def delete_all_subscriptions():
SubscriptionAdjustment.objects.all().delete()
Subscription.objects.all().delete()
Subscriber.objects.all().delete()
def get_start_date():
start_date = datetime.date.today()
(_, last_day) = calendar.monthrange(start_date.year, start_date.month)
# make sure that the start_date does not fall on the first or last day of the month:
return start_date.replace(day=min(max(2, start_date.day), last_day-1))
def arbitrary_domain():
domain = Domain(
name=data_gen.arbitrary_unique_name()[:20],
is_active=True,
)
domain.save()
return domain
def arbitrary_domains_by_product_type():
domains = {}
for product_type, _ in SoftwareProductType.CHOICES:
domain = arbitrary_domain()
if product_type == SoftwareProductType.COMMTRACK:
domain.commtrack_enabled = True
domain.save()
if product_type == SoftwareProductType.COMMCONNECT:
domain.commconnect_enabled = True
domain.save()
domains[product_type] = domain
return domains
def arbitrary_commcare_user(domain, is_active=True):
username = unique_name()
try:
commcare_user = CommCareUser.create(domain, username, 'test123')
commcare_user.is_active = is_active
commcare_user.save()
return commcare_user
except Exception:
pass
def arbitrary_commcare_users_for_domain(domain, num_users, is_active=True):
count = 0
for _ in range(0, num_users):
count += 1
commcare_user = None
while commcare_user is None:
commcare_user = arbitrary_commcare_user(domain, is_active=is_active)
return num_users
def arbitrary_sms_billables_for_domain(domain, direction, message_month_date, num_sms):
from corehq.apps.smsbillables.models import SmsBillable, SmsGatewayFee, SmsUsageFee
gateway_fee = SmsGatewayFee.create_new('MACH', direction, Decimal(0.5))
usage_fee = SmsUsageFee.create_new(direction, Decimal(0.25))
_, last_day_message = calendar.monthrange(message_month_date.year, message_month_date.month)
for _ in range(0, num_sms):
sms_billable = SmsBillable(
gateway_fee=gateway_fee,
usage_fee=usage_fee,
log_id=data_gen.arbitrary_unique_name()[:50],
phone_number=data_gen.random_phonenumber(),
domain=domain,
direction=direction,
date_sent=datetime.date(message_month_date.year, message_month_date.month,
random.randint(1, last_day_message)),
)
sms_billable.save()
def create_excess_community_users(domain):
community_plan = DefaultProductPlan.objects.get(
product_type=SoftwareProductType.COMMCARE,
edition=SoftwarePlanEdition.COMMUNITY
).plan.get_version()
num_active_users = random.randint(community_plan.user_limit + 1,
community_plan.user_limit + 4)
arbitrary_commcare_users_for_domain(domain.name, num_active_users)
return num_active_users
class FakeStripeCard(mock.MagicMock):
def __init__(self):
super(FakeStripeCard, self).__init__()
self._metadata = {}
self.last4 = '1234'
@property
def metadata(self):
return self._metadata
@metadata.setter
def metadata(self, value):
"""Stripe returns everything as JSON. This will do for testing"""
self._metadata = {k: str(v) for k, v in value.iteritems()}
def save(self):
pass
class FakeStripeCustomer(mock.MagicMock):
def __init__(self, cards):
super(FakeStripeCustomer, self).__init__()
self.id = uuid.uuid4().hex.lower()[:25]
self.cards = mock.MagicMock()
self.cards.data = cards
|
[
"corehq.apps.accounting.models.BillingContactInfo.objects.all",
"dimagi.utils.data.generator.arbitrary_firstname",
"corehq.apps.accounting.models.Currency.objects.all",
"corehq.apps.accounting.models.SubscriptionAdjustment.objects.all",
"dimagi.utils.data.generator.random_phonenumber",
"random.randint",
"django.core.management.call_command",
"dimagi.utils.data.generator.arbitrary_unique_name",
"corehq.apps.accounting.models.Subscription.objects.all",
"corehq.apps.users.models.WebUser.get_by_username",
"dimagi.utils.data.generator.arbitrary_lastname",
"datetime.date.today",
"corehq.apps.accounting.models.Subscriber.objects.get_or_create",
"corehq.apps.accounting.models.BillingAccount",
"corehq.apps.accounting.models.Currency.objects.get_or_create",
"corehq.apps.accounting.models.Currency.objects.get",
"calendar.monthrange",
"corehq.apps.accounting.models.Subscriber.objects.all",
"uuid.uuid4",
"decimal.Decimal",
"dimagi.utils.dates.add_months",
"corehq.apps.users.models.WebUser.create",
"corehq.apps.accounting.models.BillingAccount.objects.all",
"corehq.apps.users.models.CommCareUser.create",
"corehq.apps.accounting.models.DefaultProductPlan.objects.get",
"mock.MagicMock"
] |
[((993, 1043), 'django.core.management.call_command', 'call_command', (['"""cchq_prbac_bootstrap"""'], {'testing': '(True)'}), "('cchq_prbac_bootstrap', testing=True)\n", (1005, 1043), False, 'from django.core.management import call_command\n'), ((1048, 1106), 'django.core.management.call_command', 'call_command', (['"""cchq_software_plan_bootstrap"""'], {'testing': '(True)'}), "('cchq_software_plan_bootstrap', testing=True)\n", (1060, 1106), False, 'from django.core.management import call_command\n'), ((1156, 1218), 'corehq.apps.accounting.models.Currency.objects.get_or_create', 'Currency.objects.get_or_create', ([], {'code': 'settings.DEFAULT_CURRENCY'}), '(code=settings.DEFAULT_CURRENCY)\n', (1186, 1218), False, 'from corehq.apps.accounting.models import Currency, BillingAccount, Subscription, Subscriber, SoftwareProductType, DefaultProductPlan, SubscriptionAdjustment, SoftwarePlanEdition, BillingContactInfo, SubscriptionType\n'), ((1303, 1317), 'decimal.Decimal', 'Decimal', (['"""1.0"""'], {}), "('1.0')\n", (1310, 1317), False, 'from decimal import Decimal\n'), ((2171, 2265), 'corehq.apps.accounting.models.BillingAccount', 'BillingAccount', ([], {'name': 'account_name', 'created_by': 'web_user_creator.username', 'currency': 'currency'}), '(name=account_name, created_by=web_user_creator.username,\n currency=currency)\n', (2185, 2265), False, 'from corehq.apps.accounting.models import Currency, BillingAccount, Subscription, Subscriber, SoftwareProductType, DefaultProductPlan, SubscriptionAdjustment, SoftwarePlanEdition, BillingContactInfo, SubscriptionType\n'), ((4072, 4138), 'dimagi.utils.dates.add_months', 'add_months', (['date_start.year', 'date_start.month', 'subscription_length'], {}), '(date_start.year, date_start.month, subscription_length)\n', (4082, 4138), False, 'from dimagi.utils.dates import add_months\n'), ((4419, 4485), 'corehq.apps.accounting.models.Subscriber.objects.get_or_create', 'Subscriber.objects.get_or_create', ([], {'domain': 'domain', 'organization': 'None'}), '(domain=domain, organization=None)\n', (4451, 4485), False, 'from corehq.apps.accounting.models import Currency, BillingAccount, Subscription, Subscriber, SoftwareProductType, DefaultProductPlan, SubscriptionAdjustment, SoftwarePlanEdition, BillingContactInfo, SubscriptionType\n'), ((5197, 5218), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (5216, 5218), False, 'import datetime\n'), ((5239, 5293), 'calendar.monthrange', 'calendar.monthrange', (['start_date.year', 'start_date.month'], {}), '(start_date.year, start_date.month)\n', (5258, 5293), False, 'import calendar\n'), ((7056, 7126), 'calendar.monthrange', 'calendar.monthrange', (['message_month_date.year', 'message_month_date.month'], {}), '(message_month_date.year, message_month_date.month)\n', (7075, 7126), False, 'import calendar\n'), ((7882, 7958), 'random.randint', 'random.randint', (['(community_plan.user_limit + 1)', '(community_plan.user_limit + 4)'], {}), '(community_plan.user_limit + 1, community_plan.user_limit + 4)\n', (7896, 7958), False, 'import random\n'), ((1705, 1753), 'corehq.apps.users.models.WebUser.create', 'WebUser.create', (['domain.name', 'username', '"""test123"""'], {}), "(domain.name, username, 'test123')\n", (1719, 1753), False, 'from corehq.apps.users.models import WebUser, CommCareUser\n'), ((2020, 2063), 'dimagi.utils.data.generator.arbitrary_unique_name', 'data_gen.arbitrary_unique_name', ([], {'prefix': '"""BA"""'}), "(prefix='BA')\n", (2050, 2063), True, 'from dimagi.utils.data import generator as data_gen\n'), ((2096, 2148), 'corehq.apps.accounting.models.Currency.objects.get', 'Currency.objects.get', ([], {'code': 'settings.DEFAULT_CURRENCY'}), '(code=settings.DEFAULT_CURRENCY)\n', (2116, 2148), False, 'from corehq.apps.accounting.models import Currency, BillingAccount, Subscription, Subscriber, SoftwareProductType, DefaultProductPlan, SubscriptionAdjustment, SoftwarePlanEdition, BillingContactInfo, SubscriptionType\n'), ((3996, 4035), 'random.randint', 'random.randint', (['(min_num_months or 3)', '(25)'], {}), '(min_num_months or 3, 25)\n', (4010, 4035), False, 'import random\n'), ((4163, 4213), 'calendar.monthrange', 'calendar.monthrange', (['date_end_year', 'date_end_month'], {}), '(date_end_year, date_end_month)\n', (4182, 4213), False, 'import calendar\n'), ((6210, 6258), 'corehq.apps.users.models.CommCareUser.create', 'CommCareUser.create', (['domain', 'username', '"""test123"""'], {}), "(domain, username, 'test123')\n", (6229, 6258), False, 'from corehq.apps.users.models import WebUser, CommCareUser\n'), ((6950, 6962), 'decimal.Decimal', 'Decimal', (['(0.5)'], {}), '(0.5)\n', (6957, 6962), False, 'from decimal import Decimal\n'), ((7014, 7027), 'decimal.Decimal', 'Decimal', (['(0.25)'], {}), '(0.25)\n', (7021, 7027), False, 'from decimal import Decimal\n'), ((8755, 8771), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (8769, 8771), False, 'import mock\n'), ((1795, 1828), 'corehq.apps.users.models.WebUser.get_by_username', 'WebUser.get_by_username', (['username'], {}), '(username)\n', (1818, 1828), False, 'from corehq.apps.users.models import WebUser, CommCareUser\n'), ((2611, 2641), 'dimagi.utils.data.generator.arbitrary_firstname', 'data_gen.arbitrary_firstname', ([], {}), '()\n', (2639, 2641), True, 'from dimagi.utils.data import generator as data_gen\n'), ((2661, 2690), 'dimagi.utils.data.generator.arbitrary_lastname', 'data_gen.arbitrary_lastname', ([], {}), '()\n', (2688, 2690), True, 'from dimagi.utils.data import generator as data_gen\n'), ((2992, 3024), 'corehq.apps.accounting.models.BillingContactInfo.objects.all', 'BillingContactInfo.objects.all', ([], {}), '()\n', (3022, 3024), False, 'from corehq.apps.accounting.models import Currency, BillingAccount, Subscription, Subscriber, SoftwareProductType, DefaultProductPlan, SubscriptionAdjustment, SoftwarePlanEdition, BillingContactInfo, SubscriptionType\n'), ((3038, 3066), 'corehq.apps.accounting.models.BillingAccount.objects.all', 'BillingAccount.objects.all', ([], {}), '()\n', (3064, 3066), False, 'from corehq.apps.accounting.models import Currency, BillingAccount, Subscription, Subscriber, SoftwareProductType, DefaultProductPlan, SubscriptionAdjustment, SoftwarePlanEdition, BillingContactInfo, SubscriptionType\n'), ((3080, 3102), 'corehq.apps.accounting.models.Currency.objects.all', 'Currency.objects.all', ([], {}), '()\n', (3100, 3102), False, 'from corehq.apps.accounting.models import Currency, BillingAccount, Subscription, Subscriber, SoftwareProductType, DefaultProductPlan, SubscriptionAdjustment, SoftwarePlanEdition, BillingContactInfo, SubscriptionType\n'), ((5032, 5068), 'corehq.apps.accounting.models.SubscriptionAdjustment.objects.all', 'SubscriptionAdjustment.objects.all', ([], {}), '()\n', (5066, 5068), False, 'from corehq.apps.accounting.models import Currency, BillingAccount, Subscription, Subscriber, SoftwareProductType, DefaultProductPlan, SubscriptionAdjustment, SoftwarePlanEdition, BillingContactInfo, SubscriptionType\n'), ((5082, 5108), 'corehq.apps.accounting.models.Subscription.objects.all', 'Subscription.objects.all', ([], {}), '()\n', (5106, 5108), False, 'from corehq.apps.accounting.models import Currency, BillingAccount, Subscription, Subscriber, SoftwareProductType, DefaultProductPlan, SubscriptionAdjustment, SoftwarePlanEdition, BillingContactInfo, SubscriptionType\n'), ((5122, 5146), 'corehq.apps.accounting.models.Subscriber.objects.all', 'Subscriber.objects.all', ([], {}), '()\n', (5144, 5146), False, 'from corehq.apps.accounting.models import Currency, BillingAccount, Subscription, Subscriber, SoftwareProductType, DefaultProductPlan, SubscriptionAdjustment, SoftwarePlanEdition, BillingContactInfo, SubscriptionType\n'), ((3186, 3297), 'corehq.apps.accounting.models.DefaultProductPlan.objects.get', 'DefaultProductPlan.objects.get', ([], {'edition': 'edition', 'product_type': 'SoftwareProductType.COMMCARE', 'is_trial': '(False)'}), '(edition=edition, product_type=\n SoftwareProductType.COMMCARE, is_trial=False)\n', (3216, 3297), False, 'from corehq.apps.accounting.models import Currency, BillingAccount, Subscription, Subscriber, SoftwareProductType, DefaultProductPlan, SubscriptionAdjustment, SoftwarePlanEdition, BillingContactInfo, SubscriptionType\n'), ((4672, 4709), 'dimagi.utils.data.generator.arbitrary_unique_name', 'data_gen.arbitrary_unique_name', (['"""SFC"""'], {}), "('SFC')\n", (4702, 4709), True, 'from dimagi.utils.data import generator as data_gen\n'), ((5518, 5550), 'dimagi.utils.data.generator.arbitrary_unique_name', 'data_gen.arbitrary_unique_name', ([], {}), '()\n', (5548, 5550), True, 'from dimagi.utils.data import generator as data_gen\n'), ((7349, 7378), 'dimagi.utils.data.generator.random_phonenumber', 'data_gen.random_phonenumber', ([], {}), '()\n', (7376, 7378), True, 'from dimagi.utils.data import generator as data_gen\n'), ((7705, 7821), 'corehq.apps.accounting.models.DefaultProductPlan.objects.get', 'DefaultProductPlan.objects.get', ([], {'product_type': 'SoftwareProductType.COMMCARE', 'edition': 'SoftwarePlanEdition.COMMUNITY'}), '(product_type=SoftwareProductType.COMMCARE,\n edition=SoftwarePlanEdition.COMMUNITY)\n', (7735, 7821), False, 'from corehq.apps.accounting.models import Currency, BillingAccount, Subscription, Subscriber, SoftwareProductType, DefaultProductPlan, SubscriptionAdjustment, SoftwarePlanEdition, BillingContactInfo, SubscriptionType\n'), ((1445, 1457), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1455, 1457), False, 'import uuid\n'), ((7285, 7317), 'dimagi.utils.data.generator.arbitrary_unique_name', 'data_gen.arbitrary_unique_name', ([], {}), '()\n', (7315, 7317), True, 'from dimagi.utils.data import generator as data_gen\n'), ((7563, 7598), 'random.randint', 'random.randint', (['(1)', 'last_day_message'], {}), '(1, last_day_message)\n', (7577, 7598), False, 'import random\n'), ((8704, 8716), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (8714, 8716), False, 'import uuid\n')]
|
from kivy.lang.builder import Builder
Builder.unload_file('modules/friday/login.kv')
Builder.unload_file('modules/twitter_interface/permission.kv')
Builder.unload_file('modules/twitter_interface/login.kv')
from kivy.app import App
from kivy.uix.screenmanager import ScreenManager, Screen
from kivy.properties import StringProperty, ObjectProperty
from kivy.clock import Clock
from kivy.uix.screenmanager import FadeTransition
import time
from kivy.config import Config
Config.set('graphics', 'width', '500')
Config.set('graphics', 'height', '640')
Config.set('graphics', 'resizable', False)
Config.set('kivy','window_icon','data/friday/res/icon.ico')
from kivy.core.window import Window
Window.__init__()
Clock.__init__()
userG = None
class Empty(Screen):
def __init__(self, **kwargs):
super(Screen,self).__init__(**kwargs)
class Message(Screen):
text = StringProperty("")
def __init__(self, **kwargs):
super(Screen,self).__init__(**kwargs)
self.text = "Hello, " + userG["first_name"]
class Manager(ScreenManager):
empty = ObjectProperty(None)
message = ObjectProperty(None)
def __init__(self, **kwargs):
super(ScreenManager, self).__init__(**kwargs, transition=FadeTransition())
Clock.schedule_once(self.change, 0.5)
#print(Window.top)
def change(self, dt):
self.current = "Message"
Clock.schedule_once(self.change_back, 1.75)
def change_back(self, dt):
self.current = "Empty"
Clock.schedule_once(self.end, 0.5)
def end(self, dt):
App.get_running_app().stop()
class GreetApp(App):
def build(self):
self.icon = 'data/friday/res/icon.ico'
return Manager()
def greet(user):
global userG
userG = user
GreetApp().run()
#Window.close()
if __name__ == "__main__":
greet({"first_name": "Roshan"})
|
[
"kivy.config.Config.set",
"kivy.uix.screenmanager.FadeTransition",
"kivy.properties.StringProperty",
"kivy.core.window.Window.__init__",
"kivy.clock.Clock.schedule_once",
"kivy.clock.Clock.__init__",
"kivy.app.App.get_running_app",
"kivy.properties.ObjectProperty",
"kivy.lang.builder.Builder.unload_file"
] |
[((38, 84), 'kivy.lang.builder.Builder.unload_file', 'Builder.unload_file', (['"""modules/friday/login.kv"""'], {}), "('modules/friday/login.kv')\n", (57, 84), False, 'from kivy.lang.builder import Builder\n'), ((85, 147), 'kivy.lang.builder.Builder.unload_file', 'Builder.unload_file', (['"""modules/twitter_interface/permission.kv"""'], {}), "('modules/twitter_interface/permission.kv')\n", (104, 147), False, 'from kivy.lang.builder import Builder\n'), ((148, 205), 'kivy.lang.builder.Builder.unload_file', 'Builder.unload_file', (['"""modules/twitter_interface/login.kv"""'], {}), "('modules/twitter_interface/login.kv')\n", (167, 205), False, 'from kivy.lang.builder import Builder\n'), ((469, 507), 'kivy.config.Config.set', 'Config.set', (['"""graphics"""', '"""width"""', '"""500"""'], {}), "('graphics', 'width', '500')\n", (479, 507), False, 'from kivy.config import Config\n'), ((508, 547), 'kivy.config.Config.set', 'Config.set', (['"""graphics"""', '"""height"""', '"""640"""'], {}), "('graphics', 'height', '640')\n", (518, 547), False, 'from kivy.config import Config\n'), ((548, 590), 'kivy.config.Config.set', 'Config.set', (['"""graphics"""', '"""resizable"""', '(False)'], {}), "('graphics', 'resizable', False)\n", (558, 590), False, 'from kivy.config import Config\n'), ((591, 652), 'kivy.config.Config.set', 'Config.set', (['"""kivy"""', '"""window_icon"""', '"""data/friday/res/icon.ico"""'], {}), "('kivy', 'window_icon', 'data/friday/res/icon.ico')\n", (601, 652), False, 'from kivy.config import Config\n'), ((687, 704), 'kivy.core.window.Window.__init__', 'Window.__init__', ([], {}), '()\n', (702, 704), False, 'from kivy.core.window import Window\n'), ((705, 721), 'kivy.clock.Clock.__init__', 'Clock.__init__', ([], {}), '()\n', (719, 721), False, 'from kivy.clock import Clock\n'), ((862, 880), 'kivy.properties.StringProperty', 'StringProperty', (['""""""'], {}), "('')\n", (876, 880), False, 'from kivy.properties import StringProperty, ObjectProperty\n'), ((1039, 1059), 'kivy.properties.ObjectProperty', 'ObjectProperty', (['None'], {}), '(None)\n', (1053, 1059), False, 'from kivy.properties import StringProperty, ObjectProperty\n'), ((1071, 1091), 'kivy.properties.ObjectProperty', 'ObjectProperty', (['None'], {}), '(None)\n', (1085, 1091), False, 'from kivy.properties import StringProperty, ObjectProperty\n'), ((1202, 1239), 'kivy.clock.Clock.schedule_once', 'Clock.schedule_once', (['self.change', '(0.5)'], {}), '(self.change, 0.5)\n', (1221, 1239), False, 'from kivy.clock import Clock\n'), ((1314, 1357), 'kivy.clock.Clock.schedule_once', 'Clock.schedule_once', (['self.change_back', '(1.75)'], {}), '(self.change_back, 1.75)\n', (1333, 1357), False, 'from kivy.clock import Clock\n'), ((1414, 1448), 'kivy.clock.Clock.schedule_once', 'Clock.schedule_once', (['self.end', '(0.5)'], {}), '(self.end, 0.5)\n', (1433, 1448), False, 'from kivy.clock import Clock\n'), ((1182, 1198), 'kivy.uix.screenmanager.FadeTransition', 'FadeTransition', ([], {}), '()\n', (1196, 1198), False, 'from kivy.uix.screenmanager import FadeTransition\n'), ((1472, 1493), 'kivy.app.App.get_running_app', 'App.get_running_app', ([], {}), '()\n', (1491, 1493), False, 'from kivy.app import App\n')]
|
import pygame
import sprites
import projectiles
import chest
import pygame
import enemy
import portal
class level():
def __init__(self, design, enemies,chest1,chest2,chest3,portal,screen): #design is a 2d list
self.boxGroup = pygame.sprite.Group()
self.boxes = []
self.enemies = enemies
self.chest1 = chest1
self.chest2 = chest2
self.chest3 = chest3
self.exit = portal
self.screen = screen
self.design = design
self.isPuzzleDone = False
def makeLevel(self):
counter = 0
xSpot = 16
ySpot = 16
for x in self.design:
for y in x:
print(y)
if y is '#':
self.boxes.append(sprites.sprites('Obstacles/box.png', (xSpot, ySpot)))
self.boxGroup.add(self.boxes[counter])
counter+=1
xSpot+=48
xSpot=0
ySpot+=58
xSpot = 16
ySpot = 16
for x in self.design:
for y in x:
print(y)
if y is '¡':
self.boxes.append(sprites.sprites('Obstacles/torch.gif', (xSpot, ySpot)))
self.boxGroup.add(self.boxes[counter])
counter+=1
xSpot+=48
xSpot=0
ySpot+=58
xSpot = 16
ySpot = 16
for x in self.design:
for y in x:
print(y)
if y is '!':
self.exit.append(portal.portal('images/portal.png', (xSpot, ySpot), self.boxes))
xSpot+=48
xSpot=0
ySpot+=58
#Reset counters to iterate through again -- this time for spawning enemies.
#(Obstacles list must be passed in for each enemy.)
xSpot = 16
ySpot = 16
for x in self.design:
for y in x:
print(y)
if y is '*':
self.chest1.append(chest.chest('Obstacles/chest.png', (xSpot, ySpot), self.boxes,1,self.screen))
xSpot+=48
xSpot=0
ySpot+=58
xSpot = 16
ySpot = 16
for x in self.design:
for y in x:
print(y)
if y is '%':
self.chest2.append(chest.chest('Obstacles/chest.png', (xSpot, ySpot), self.boxes,2,self.screen))
xSpot+=48
xSpot=0
ySpot+=58
xSpot = 16
ySpot = 16
for x in self.design:
for y in x:
print(y)
if y is '$':
self.chest3.append(chest.chest('Obstacles/chest.png', (xSpot, ySpot), self.boxes,3,self.screen))
xSpot+=48
xSpot=0
ySpot+=58
#Reset counters to iterate through again -- this time for spawning enemies.
#(Obstacles list must be passed in for each enemy.)
xSpot = 16
ySpot = 16
for x in self.design:
for y in x:
if y is 'X':
self.enemies.append(enemy.enemy('Enemies/slime.png', (xSpot, ySpot), self.boxes))
xSpot+=48
xSpot=0
ySpot+=58
def isComplete(self, enemyGroup): #checks to see if all enemies are cleared out
print(enemyGroup)
if not enemyGroup:
return True
return False
|
[
"portal.portal",
"pygame.sprite.Group",
"chest.chest",
"sprites.sprites",
"enemy.enemy"
] |
[((232, 253), 'pygame.sprite.Group', 'pygame.sprite.Group', ([], {}), '()\n', (251, 253), False, 'import pygame\n'), ((618, 670), 'sprites.sprites', 'sprites.sprites', (['"""Obstacles/box.png"""', '(xSpot, ySpot)'], {}), "('Obstacles/box.png', (xSpot, ySpot))\n", (633, 670), False, 'import sprites\n'), ((888, 942), 'sprites.sprites', 'sprites.sprites', (['"""Obstacles/torch.gif"""', '(xSpot, ySpot)'], {}), "('Obstacles/torch.gif', (xSpot, ySpot))\n", (903, 942), False, 'import sprites\n'), ((1159, 1221), 'portal.portal', 'portal.portal', (['"""images/portal.png"""', '(xSpot, ySpot)', 'self.boxes'], {}), "('images/portal.png', (xSpot, ySpot), self.boxes)\n", (1172, 1221), False, 'import portal\n'), ((1512, 1590), 'chest.chest', 'chest.chest', (['"""Obstacles/chest.png"""', '(xSpot, ySpot)', 'self.boxes', '(1)', 'self.screen'], {}), "('Obstacles/chest.png', (xSpot, ySpot), self.boxes, 1, self.screen)\n", (1523, 1590), False, 'import chest\n'), ((1747, 1825), 'chest.chest', 'chest.chest', (['"""Obstacles/chest.png"""', '(xSpot, ySpot)', 'self.boxes', '(2)', 'self.screen'], {}), "('Obstacles/chest.png', (xSpot, ySpot), self.boxes, 2, self.screen)\n", (1758, 1825), False, 'import chest\n'), ((1983, 2061), 'chest.chest', 'chest.chest', (['"""Obstacles/chest.png"""', '(xSpot, ySpot)', 'self.boxes', '(3)', 'self.screen'], {}), "('Obstacles/chest.png', (xSpot, ySpot), self.boxes, 3, self.screen)\n", (1994, 2061), False, 'import chest\n'), ((2338, 2398), 'enemy.enemy', 'enemy.enemy', (['"""Enemies/slime.png"""', '(xSpot, ySpot)', 'self.boxes'], {}), "('Enemies/slime.png', (xSpot, ySpot), self.boxes)\n", (2349, 2398), False, 'import enemy\n')]
|
from tempfile import NamedTemporaryFile
from os import remove
from mhctools.mixmhcpred import parse_mixmhcpred_results
from nose.tools import eq_
example_output = """Peptide\tScore_bestAllele\tBestAllele\t%Rank_bestAllele\tScore_A0201\t%Rank_A0201
MLDDFSAGA\t0.182093\tA0201\t0.3\t0.182093\t0.3
SPEGEETII\t-0.655341\tA0201\t51.0\t-0.655341\t51.0
ILDRIITNA\t0.203906\tA0201\t0.3\t0.203906\t0.3"""
def test_parse_mixmhcpred_results():
with NamedTemporaryFile(mode="r+") as f:
f.write(example_output)
f.flush()
binding_results = parse_mixmhcpred_results(f.name)
eq_(len(binding_results), 3)
eq_(binding_results[0].peptide, "MLDDFSAGA")
eq_(binding_results[1].peptide, "SPEGEETII")
eq_(binding_results[2].peptide, "ILDRIITNA")
|
[
"tempfile.NamedTemporaryFile",
"mhctools.mixmhcpred.parse_mixmhcpred_results",
"nose.tools.eq_"
] |
[((445, 474), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'mode': '"""r+"""'}), "(mode='r+')\n", (463, 474), False, 'from tempfile import NamedTemporaryFile\n'), ((557, 589), 'mhctools.mixmhcpred.parse_mixmhcpred_results', 'parse_mixmhcpred_results', (['f.name'], {}), '(f.name)\n', (581, 589), False, 'from mhctools.mixmhcpred import parse_mixmhcpred_results\n'), ((636, 680), 'nose.tools.eq_', 'eq_', (['binding_results[0].peptide', '"""MLDDFSAGA"""'], {}), "(binding_results[0].peptide, 'MLDDFSAGA')\n", (639, 680), False, 'from nose.tools import eq_\n'), ((689, 733), 'nose.tools.eq_', 'eq_', (['binding_results[1].peptide', '"""SPEGEETII"""'], {}), "(binding_results[1].peptide, 'SPEGEETII')\n", (692, 733), False, 'from nose.tools import eq_\n'), ((742, 786), 'nose.tools.eq_', 'eq_', (['binding_results[2].peptide', '"""ILDRIITNA"""'], {}), "(binding_results[2].peptide, 'ILDRIITNA')\n", (745, 786), False, 'from nose.tools import eq_\n')]
|
# ******************************************************************************
# Copyright 2017-2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ******************************************************************************
from examples.word_language_model_with_tcn.adding_problem.adding_model import TCNForAdding
from examples.word_language_model_with_tcn.toy_data.adding import Adding
def test_tcn_adding():
"""
Sanity check -
Test function checks to make sure training loss drops to ~0 on small dummy dataset
"""
n_features = 2
hidden_sizes = [64] * 3
kernel_size = 3
dropout = 0.0
seq_len = 10
n_train = 5000
n_val = 100
batch_size = 32
n_epochs = 10
num_iterations = int(n_train * n_epochs * 1.0 / batch_size)
lr = 0.002
grad_clip_value = 10
results_dir = "./"
adding_dataset = Adding(seq_len=seq_len, n_train=n_train, n_test=n_val)
model = TCNForAdding(seq_len, n_features, hidden_sizes, kernel_size=kernel_size,
dropout=dropout)
model.build_train_graph(lr, max_gradient_norm=grad_clip_value)
training_loss = model.run(adding_dataset, num_iterations=num_iterations, log_interval=1e6,
result_dir=results_dir)
assert training_loss < 1e-3
|
[
"examples.word_language_model_with_tcn.adding_problem.adding_model.TCNForAdding",
"examples.word_language_model_with_tcn.toy_data.adding.Adding"
] |
[((1383, 1437), 'examples.word_language_model_with_tcn.toy_data.adding.Adding', 'Adding', ([], {'seq_len': 'seq_len', 'n_train': 'n_train', 'n_test': 'n_val'}), '(seq_len=seq_len, n_train=n_train, n_test=n_val)\n', (1389, 1437), False, 'from examples.word_language_model_with_tcn.toy_data.adding import Adding\n'), ((1451, 1544), 'examples.word_language_model_with_tcn.adding_problem.adding_model.TCNForAdding', 'TCNForAdding', (['seq_len', 'n_features', 'hidden_sizes'], {'kernel_size': 'kernel_size', 'dropout': 'dropout'}), '(seq_len, n_features, hidden_sizes, kernel_size=kernel_size,\n dropout=dropout)\n', (1463, 1544), False, 'from examples.word_language_model_with_tcn.adding_problem.adding_model import TCNForAdding\n')]
|
'''Example streaming ffmpeg numpy processing.
Based on examples from https://github.com/kkroening/ffmpeg-python/tree/master/examples
Usage instructions:
1. Install opencv, ffmpeg-python and numpy
2. Run python ffmpeg_stream.py input_file
3. In separate terminal run ffplay -f avi http://localhost:8080 (after enabling port forwarding if running remotely)
TODO: explore methods to reduce latency (both for ffmpeg and ffplay)
Demonstrates using ffmpeg to decode video input, process the frames in
python, and then encode video output using ffmpeg.
This example uses two ffmpeg processes - one to decode the input video
and one to encode an output video - while the raw frame processing is
done in python with numpy.
In addition the audio from the same input file is also streamed and combined with
the video.
At a high level, the signal graph looks like this:
(input video) -> [ffmpeg process 1] -> [python] -> [ffmpeg process 2] -> (output video)
(input audio) -> [ffmpeg process 1_audio] -------------^
Output video is sent to http server.
The simplest processing example simply darkens each frame by
multiplying the frame's numpy array by a constant value; see
``process_frame_simple``.
The audio is read and streamed in 40ms chunks (corresponding to one video frame).
We use named FIFO pipes for the communication to ffmpeg process 2, allowing use of
two separate pipes for audio and video.
The writing to these pipes happens in distinct threads (so that blocking calls in the pipes don't cause trouble).
'''
from __future__ import print_function
import argparse
import ffmpeg
import logging
import numpy as np
import os
import subprocess
import zipfile
import threading
parser = argparse.ArgumentParser(description='Example streaming ffmpeg numpy processing')
parser.add_argument('in_filename', help='Input filename')
parser.add_argument('port', default=8080, help='Port')
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
# get video frame size using ffmpeg probe
def get_video_info(filename):
logger.info('Getting video size for {!r}'.format(filename))
probe = ffmpeg.probe(filename)
print(probe['streams'])
video_info = next(s for s in probe['streams'] if s['codec_type'] == 'video')
width = int(video_info['width'])
height = int(video_info['height'])
return width, height
# this is process for reading video file and outputting in raw frames to pipe
# we specify fps here which automatically converts the fps to the desired vale
def start_ffmpeg_process1(in_filename, fps):
logger.info('Starting ffmpeg process1')
args = (
ffmpeg
.input(in_filename)
.output('pipe:', format='rawvideo', pix_fmt='rgb24', r=fps)
.compile()
)
# all ffmpeg commands are ultimately run as subprocesses with appropriate piping for stdout
# the 'pipe:' in the output above means the output is written to stdout, which we redirect to
# subprocess.PIPE
return subprocess.Popen(args, stdout=subprocess.PIPE)
# this is process for reading audio file and outputting to pipe
# the format is pcm signed 16 bit little endian (essentially wav)
# ac=1 -> mono
# ar=16k -> audio sampling rate for output (automatically converted)
def start_ffmpeg_process1_audio(in_filename):
logger.info('Starting ffmpeg process1_audio')
args = (
ffmpeg
.input(in_filename)
.output('pipe:', format='s16le', acodec='pcm_s16le', ac=1, ar='16k')
.compile()
)
return subprocess.Popen(args, stdout=subprocess.PIPE)
# process for writing output to http url by taking input from two FIFO pipes (video and audio)
def start_ffmpeg_process2(fifo_name_video, fifo_name_audio, width, height, fps, port,
output_to='socket', output_path='None'):
logger.info('Starting ffmpeg process2')
server_url = "http://127.0.0.1:" + str(port) # any port should be fine, 127.0.0.1 is simply localhost
# inputs: parameters largely the same as in the previous two functions
input_video = ffmpeg.input(fifo_name_video, format='rawvideo', pix_fmt='rgb24', s='{}x{}'.format(width, height),
framerate=fps)
input_audio = ffmpeg.input(fifo_name_audio, format='s16le', acodec='pcm_s16le', ac=1, ar='16k')
if output_to == 'socket':
# (mp4 doesn't work because it requires random access not appropriate for streaming)
video_format = 'avi' # format supporting both video and audio.
# combine the two and output to url (listen = 1 probably sets the server)
args = (
ffmpeg
.output(input_audio, input_video, server_url, listen=1, f=video_format, vcodec='libx264',
preset='ultrafast')
# .global_args('-fflags', 'nobuffer') # .run()
# .global_args('-ss', '4')
# .global_args('-preset', 'ultrafast')
.compile()
)
elif output_to == 'file':
video_format = 'mp4'
if output_path == 'None':
raise ValueError('Asked to write in file but path not provided.')
args = (
ffmpeg
.output(input_audio, input_video, output_path, f=video_format, vcodec='libx264', preset='ultrafast')
.compile()
)
else:
raise ValueError("Wrong output format. Should be 'socket' or 'file'.")
return subprocess.Popen(args)
# read frame from process1 stdout pipe and convert to numpy
def read_frame(process1, width, height):
logger.debug('Reading frame')
# Note: RGB24 == 3 bytes per pixel.
frame_size = width * height * 3
in_bytes = process1.stdout.read(frame_size)
if len(in_bytes) == 0:
frame = None
else:
assert len(in_bytes) == frame_size
frame = (
np
.frombuffer(in_bytes, np.uint8)
.reshape([height, width, 3])
)
return frame
# read audio frame from process1_audio stdout pipe
def read_audio_frame(process1_audio, num_bytes):
logger.debug('Reading audio frame')
in_bytes = process1_audio.stdout.read(num_bytes)
return in_bytes
# darken frame
def process_frame_simple(frame):
'''Simple processing example: darken frame.'''
return frame * 0.3
# write video frame to fifo pipe as bytes
def write_video_frame(fifo_video_out, frame):
logger.debug('Writing frame')
fifo_video_out.write(
frame
.astype(np.uint8)
.tobytes()
)
# write audio frame to fifo pipe as bytes
def write_audio_frame(fifo_audio_out, in_audio_frame):
logger.debug('Writing audio frame')
fifo_audio_out.write(in_audio_frame)
def video_thread_handler(fifo_filename_video, process1, width, height):
fifo_video_out = open(fifo_filename_video, "wb")
# this blocks until the read for the fifo opens so we run in separate thread
# read frame one by one, process and write to fifo pipe
while True:
in_frame = read_frame(process1, width, height)
if in_frame is None:
logger.info('End of input stream')
break
logger.debug('Processing frame')
out_frame = process_frame(in_frame)
write_video_frame(fifo_video_out, out_frame)
fifo_video_out.close()
def audio_thread_handler(fifo_filename_audio, process1_audio, audio_bytes_per_video_frame):
fifo_audio_out = open(fifo_filename_audio, "wb")
# this blocks until the read for the fifo opens so we run in separate thread
# read frame one by one, process and write to fifo pipe
while True:
in_audio_frame = read_audio_frame(process1_audio, audio_bytes_per_video_frame)
if len(in_audio_frame) == 0:
break
write_audio_frame(fifo_audio_out, in_audio_frame)
fifo_audio_out.close()
def run(in_filename, process_frame, port):
width, height = get_video_info(in_filename)
fps = 25 # video fps
process1 = start_ffmpeg_process1(in_filename, fps)
process1_audio = start_ffmpeg_process1_audio(in_filename)
# fifo pipes (remove file name if already exists)
fifo_filename_video = '/tmp/fifovideo'
fifo_filename_audio = '/tmp/fifoaudio'
if os.path.exists(fifo_filename_video):
os.remove(fifo_filename_video)
if os.path.exists(fifo_filename_audio):
os.remove(fifo_filename_audio)
os.mkfifo(fifo_filename_video)
os.mkfifo(fifo_filename_audio)
process2 = start_ffmpeg_process2(fifo_filename_video, fifo_filename_audio, width, height, fps, port)
audio_bytes_per_video_frame = 640 * 2 # 2 bytes, 640 audio frames (16000/25)
# we run audio and video in separate threads otherwise the fifo opening blocks
# create threads
video_thread = threading.Thread(target=video_thread_handler, args=(fifo_filename_video, process1, width, height))
audio_thread = threading.Thread(target=audio_thread_handler,
args=(fifo_filename_audio, process1_audio, audio_bytes_per_video_frame))
# start threads
video_thread.start()
audio_thread.start()
# wait for threads to finish executing
video_thread.join()
audio_thread.join()
logger.info('Waiting for ffmpeg process1')
process1.wait()
logger.info('Waiting for ffmpeg process2')
process2.wait()
os.remove(fifo_filename_video)
os.remove(fifo_filename_audio)
logger.info('Done')
if __name__ == '__main__':
args = parser.parse_args()
port = args.port
process_frame = process_frame_simple
run(args.in_filename, process_frame, port)
|
[
"threading.Thread",
"subprocess.Popen",
"os.remove",
"argparse.ArgumentParser",
"logging.basicConfig",
"numpy.frombuffer",
"os.path.exists",
"ffmpeg.output",
"ffmpeg.probe",
"os.mkfifo",
"ffmpeg.input",
"logging.getLogger"
] |
[((1704, 1789), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Example streaming ffmpeg numpy processing"""'}), "(description='Example streaming ffmpeg numpy processing'\n )\n", (1727, 1789), False, 'import argparse\n'), ((1908, 1935), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1925, 1935), False, 'import logging\n'), ((1936, 1975), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (1955, 1975), False, 'import logging\n'), ((2126, 2148), 'ffmpeg.probe', 'ffmpeg.probe', (['filename'], {}), '(filename)\n', (2138, 2148), False, 'import ffmpeg\n'), ((2996, 3042), 'subprocess.Popen', 'subprocess.Popen', (['args'], {'stdout': 'subprocess.PIPE'}), '(args, stdout=subprocess.PIPE)\n', (3012, 3042), False, 'import subprocess\n'), ((3536, 3582), 'subprocess.Popen', 'subprocess.Popen', (['args'], {'stdout': 'subprocess.PIPE'}), '(args, stdout=subprocess.PIPE)\n', (3552, 3582), False, 'import subprocess\n'), ((4241, 4327), 'ffmpeg.input', 'ffmpeg.input', (['fifo_name_audio'], {'format': '"""s16le"""', 'acodec': '"""pcm_s16le"""', 'ac': '(1)', 'ar': '"""16k"""'}), "(fifo_name_audio, format='s16le', acodec='pcm_s16le', ac=1, ar=\n '16k')\n", (4253, 4327), False, 'import ffmpeg\n'), ((5454, 5476), 'subprocess.Popen', 'subprocess.Popen', (['args'], {}), '(args)\n', (5470, 5476), False, 'import subprocess\n'), ((8252, 8287), 'os.path.exists', 'os.path.exists', (['fifo_filename_video'], {}), '(fifo_filename_video)\n', (8266, 8287), False, 'import os\n'), ((8335, 8370), 'os.path.exists', 'os.path.exists', (['fifo_filename_audio'], {}), '(fifo_filename_audio)\n', (8349, 8370), False, 'import os\n'), ((8416, 8446), 'os.mkfifo', 'os.mkfifo', (['fifo_filename_video'], {}), '(fifo_filename_video)\n', (8425, 8446), False, 'import os\n'), ((8451, 8481), 'os.mkfifo', 'os.mkfifo', (['fifo_filename_audio'], {}), '(fifo_filename_audio)\n', (8460, 8481), False, 'import os\n'), ((8795, 8897), 'threading.Thread', 'threading.Thread', ([], {'target': 'video_thread_handler', 'args': '(fifo_filename_video, process1, width, height)'}), '(target=video_thread_handler, args=(fifo_filename_video,\n process1, width, height))\n', (8811, 8897), False, 'import threading\n'), ((8913, 9035), 'threading.Thread', 'threading.Thread', ([], {'target': 'audio_thread_handler', 'args': '(fifo_filename_audio, process1_audio, audio_bytes_per_video_frame)'}), '(target=audio_thread_handler, args=(fifo_filename_audio,\n process1_audio, audio_bytes_per_video_frame))\n', (8929, 9035), False, 'import threading\n'), ((9372, 9402), 'os.remove', 'os.remove', (['fifo_filename_video'], {}), '(fifo_filename_video)\n', (9381, 9402), False, 'import os\n'), ((9407, 9437), 'os.remove', 'os.remove', (['fifo_filename_audio'], {}), '(fifo_filename_audio)\n', (9416, 9437), False, 'import os\n'), ((8297, 8327), 'os.remove', 'os.remove', (['fifo_filename_video'], {}), '(fifo_filename_video)\n', (8306, 8327), False, 'import os\n'), ((8380, 8410), 'os.remove', 'os.remove', (['fifo_filename_audio'], {}), '(fifo_filename_audio)\n', (8389, 8410), False, 'import os\n'), ((4631, 4751), 'ffmpeg.output', 'ffmpeg.output', (['input_audio', 'input_video', 'server_url'], {'listen': '(1)', 'f': 'video_format', 'vcodec': '"""libx264"""', 'preset': '"""ultrafast"""'}), "(input_audio, input_video, server_url, listen=1, f=\n video_format, vcodec='libx264', preset='ultrafast')\n", (4644, 4751), False, 'import ffmpeg\n'), ((5870, 5903), 'numpy.frombuffer', 'np.frombuffer', (['in_bytes', 'np.uint8'], {}), '(in_bytes, np.uint8)\n', (5883, 5903), True, 'import numpy as np\n'), ((2628, 2653), 'ffmpeg.input', 'ffmpeg.input', (['in_filename'], {}), '(in_filename)\n', (2640, 2653), False, 'import ffmpeg\n'), ((3376, 3401), 'ffmpeg.input', 'ffmpeg.input', (['in_filename'], {}), '(in_filename)\n', (3388, 3401), False, 'import ffmpeg\n'), ((5193, 5304), 'ffmpeg.output', 'ffmpeg.output', (['input_audio', 'input_video', 'output_path'], {'f': 'video_format', 'vcodec': '"""libx264"""', 'preset': '"""ultrafast"""'}), "(input_audio, input_video, output_path, f=video_format, vcodec\n ='libx264', preset='ultrafast')\n", (5206, 5304), False, 'import ffmpeg\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2011-2013 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
Shows the Log TOC of available variables in the Crazyflie.
"""
import cfclient
from cfclient.ui.tab import Tab
from PyQt5 import QtWidgets
from PyQt5 import uic
from PyQt5.QtCore import pyqtSignal
from PyQt5.QtCore import pyqtSlot
from PyQt5.QtCore import Qt
__author__ = '<EMAIL>craze AB'
__all__ = ['LogTab']
param_tab_class = uic.loadUiType(cfclient.module_path +
"/ui/tabs/logTab.ui")[0]
class LogTab(Tab, param_tab_class):
connectedSignal = pyqtSignal(str)
disconnectedSignal = pyqtSignal(str)
def __init__(self, tabWidget, helper, *args):
super(LogTab, self).__init__(*args)
self.setupUi(self)
self.tabName = "Log TOC"
self.menuName = "Log TOC"
self.helper = helper
self.tabWidget = tabWidget
self.cf = helper.cf
# Init the tree widget
self.logTree.setHeaderLabels(['Name', 'ID', 'Unpack', 'Storage'])
self.cf.connected.add_callback(self.connectedSignal.emit)
self.connectedSignal.connect(self.connected)
# Clear the log TOC list when the Crazyflie is disconnected
self.cf.disconnected.add_callback(self.disconnectedSignal.emit)
self.disconnectedSignal.connect(self.disconnected)
@pyqtSlot('QString')
def disconnected(self, linkname):
self.logTree.clear()
@pyqtSlot(str)
def connected(self, linkURI):
self.logTree.clear()
toc = self.cf.log.toc
for group in list(toc.toc.keys()):
groupItem = QtWidgets.QTreeWidgetItem()
groupItem.setData(0, Qt.DisplayRole, group)
for param in list(toc.toc[group].keys()):
item = QtWidgets.QTreeWidgetItem()
item.setData(0, Qt.DisplayRole, param)
item.setData(1, Qt.DisplayRole, toc.toc[group][param].ident)
item.setData(2, Qt.DisplayRole, toc.toc[group][param].pytype)
item.setData(3, Qt.DisplayRole, toc.toc[group][param].ctype)
groupItem.addChild(item)
self.logTree.addTopLevelItem(groupItem)
self.logTree.expandItem(groupItem)
|
[
"PyQt5.QtCore.pyqtSignal",
"PyQt5.uic.loadUiType",
"PyQt5.QtWidgets.QTreeWidgetItem",
"PyQt5.QtCore.pyqtSlot"
] |
[((1431, 1490), 'PyQt5.uic.loadUiType', 'uic.loadUiType', (["(cfclient.module_path + '/ui/tabs/logTab.ui')"], {}), "(cfclient.module_path + '/ui/tabs/logTab.ui')\n", (1445, 1490), False, 'from PyQt5 import uic\n'), ((1587, 1602), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', (['str'], {}), '(str)\n', (1597, 1602), False, 'from PyQt5.QtCore import pyqtSignal\n'), ((1628, 1643), 'PyQt5.QtCore.pyqtSignal', 'pyqtSignal', (['str'], {}), '(str)\n', (1638, 1643), False, 'from PyQt5.QtCore import pyqtSignal\n'), ((2360, 2379), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', (['"""QString"""'], {}), "('QString')\n", (2368, 2379), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((2453, 2466), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', (['str'], {}), '(str)\n', (2461, 2466), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((2629, 2656), 'PyQt5.QtWidgets.QTreeWidgetItem', 'QtWidgets.QTreeWidgetItem', ([], {}), '()\n', (2654, 2656), False, 'from PyQt5 import QtWidgets\n'), ((2790, 2817), 'PyQt5.QtWidgets.QTreeWidgetItem', 'QtWidgets.QTreeWidgetItem', ([], {}), '()\n', (2815, 2817), False, 'from PyQt5 import QtWidgets\n')]
|
# Copyright 2020 TestProject (https://testproject.io)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from src.testproject.classes import ProxyDescriptor
from src.testproject.sdk.addons import ActionProxy
class TypeRandomPhoneAction(ActionProxy):
def __init__(self, country_code: str, max_digits: int = 10):
super().__init__()
self.proxydescriptor = ProxyDescriptor(
guid="GrQN1LQqTEmuYTnIujiEwA", classname="io.testproject.examples.sdk.actions.TypeRandomPhoneAction",
)
self.countryCode = country_code
self.maxDigits = max_digits
self.phone = None
|
[
"src.testproject.classes.ProxyDescriptor"
] |
[((870, 992), 'src.testproject.classes.ProxyDescriptor', 'ProxyDescriptor', ([], {'guid': '"""GrQN1LQqTEmuYTnIujiEwA"""', 'classname': '"""io.testproject.examples.sdk.actions.TypeRandomPhoneAction"""'}), "(guid='GrQN1LQqTEmuYTnIujiEwA', classname=\n 'io.testproject.examples.sdk.actions.TypeRandomPhoneAction')\n", (885, 992), False, 'from src.testproject.classes import ProxyDescriptor\n')]
|
#!/usr/bin/env python
import rospy
# import sys
from std_msgs.msg import ColorRGBA
from geometry_msgs.msg import PoseStamped, Twist, Vector3, Point
from ford_msgs.msg import Clusters
from visualization_msgs.msg import Marker, MarkerArray
import numpy as np
import math
from nav_msgs.msg import Odometry
import configparser
import torch
import gym
from crowd_nav.policy.cadrl import CADRL
from crowd_nav.policy.lstm_rl import LstmRL
from crowd_nav.policy.sarl import SARL
from crowd_sim.envs.utils.robot import Robot
PED_RADIUS = 0.3
# angle_1 - angle_2
# contains direction in range [-3.14, 3.14]
def find_angle_diff(angle_1, angle_2):
angle_diff_raw = angle_1 - angle_2
angle_diff = (angle_diff_raw + np.pi) % (2 * np.pi) - np.pi
return angle_diff
class NN_tb3():
def __init__(self, env, env_config, policy):
#
self.env = env
self.env_config = env_config
# configure robot
self.robot = Robot(env_config, 'robot')
self.robot.set_policy(policy)
self.env.set_robot(self.robot) #pass robot parameters into env
self.ob = env.reset('test',1) #intial some parameters from .config file such as time_step,success_reward for other instances
self.policy = policy
self.policy.set_env(env)
# for action
self.angle2Action = 0
self.distance = 0
# for subscribers
self.pose = PoseStamped()
self.vel = Vector3()
self.psi = 0.0
# for publishers
self.global_goal = PoseStamped()
self.goal = PoseStamped()
self.desired_position = PoseStamped()
self.desired_action = np.zeros((2,))
# # publishers
self.pub_twist = rospy.Publisher('/cmd_vel',Twist,queue_size=1)
# self.pub_pose_marker = rospy.Publisher('',Marker,queue_size=1)
# self.pub_agent_markers = rospy.Publisher('~agent_markers',MarkerArray,queue_size=1)
self.pub_path_marker = rospy.Publisher('/action',Marker,queue_size=1)
# self.pub_goal_path_marker = rospy.Publisher('~goal_path_marker',Marker,queue_size=1)
# # sub
self.sub_pose = rospy.Subscriber('/odom',Odometry,self.cbPose)
self.sub_global_goal = rospy.Subscriber('/goal',PoseStamped, self.cbGlobalGoal)
self.sub_subgoal = rospy.Subscriber('/plan_manager/subgoal',PoseStamped, self.cbSubGoal)
# subgoals
self.sub_goal = Vector3()
# self.sub_clusters = rospy.Subscriber('~clusters',Clusters, self.cbClusters)
# control timer
self.control_timer = rospy.Timer(rospy.Duration(0.2),self.cbControl)
self.nn_timer = rospy.Timer(rospy.Duration(0.01),self.cbComputeActionCrowdNav)
def update_angle2Action(self):
# action vector
v_a = np.array([self.desired_position.pose.position.x-self.pose.pose.position.x,self.desired_position.pose.position.y-self.pose.pose.position.y])
# pose direction
e_dir = np.array([math.cos(self.psi), math.sin(self.psi)])
# angle: <v_a, e_dir>
self.angle2Action = np.math.atan2(np.linalg.det([v_a,e_dir]),np.dot(v_a,e_dir))
def cbGlobalGoal(self,msg):
self.stop_moving_flag = True
self.new_global_goal_received = True
self.global_goal = msg
self.goal.pose.position.x = msg.pose.position.x
self.goal.pose.position.y = msg.pose.position.y
self.goal.header = msg.header
# reset subgoals
print("new goal: "+str([self.goal.pose.position.x,self.goal.pose.position.y]))
def cbSubGoal(self,msg):
# update subGoal
self.sub_goal.x = msg.pose.position.x
self.sub_goal.y = msg.pose.position.y
def goalReached(self):
# check if near to global goal
if self.distance > 0.3:
return False
else:
return True
def cbPose(self, msg):
# update robot vel (vx,vy)
self.cbVel(msg)
# get pose angle
q = msg.pose.pose.orientation
self.psi = np.arctan2(2.0*(q.w*q.z + q.x*q.y), 1-2*(q.y*q.y+q.z*q.z)) # bounded by [-pi, pi]
self.pose = msg.pose
self.visualize_path()
v_p = msg.pose.pose.position
v_g = self.sub_goal
v_pg = np.array([v_g.x-v_p.x,v_g.y-v_p.y])
self.distance = np.linalg.norm(v_pg)
# self.visualize_pose(msg.pose.pose.position,msg.pose.pose.orientation)
def cbVel(self, msg):
self.vel = msg.twist.twist.linear
def cbClusters(self,msg):
other_agents = []
xs = []; ys = []; radii = []; labels = []
num_clusters = len(msg.labels)
for i in range(num_clusters):
index = msg.labels[i]
x = msg.mean_points[i].x; y = msg.mean_points[i].y
v_x = msg.velocities[i].x; v_y = msg.velocities[i].y
radius = self.obst_rad
xs.append(x); ys.append(y); radii.append(radius); labels.append(index)
# self.visualize_other_agent(x,y,radius,msg.labels[i])
# helper fields
heading_angle = np.arctan2(v_y, v_x)
pref_speed = np.linalg.norm(np.array([v_x, v_y]))
goal_x = x + 5.0; goal_y = y + 5.0
if pref_speed < 0.2:
pref_speed = 0; v_x = 0; v_y = 0
other_agents.append(agent.Agent(x, y, goal_x, goal_y, radius, pref_speed, heading_angle, index))
self.visualize_other_agents(xs, ys, radii, labels)
self.other_agents_state = other_agents
def stop_moving(self):
twist = Twist()
self.pub_twist.publish(twist)
def update_action(self, action):
# print 'update action'
self.desired_action = action
# self.desired_position.pose.position.x = self.pose.pose.position.x + 1*action[0]*np.cos(action[1])
# self.desired_position.pose.position.y = self.pose.pose.position.y + 1*action[0]*np.sin(action[1])
self.desired_position.pose.position.x = self.pose.pose.position.x + (action[0])
self.desired_position.pose.position.y = self.pose.pose.position.y + (action[1])
# print(action[0])
def cbControl(self, event):
twist = Twist()
if not self.goalReached():
if abs(self.angle2Action) > 0.1 and self.angle2Action > 0:
twist.angular.z = -0.3
print("spinning in place +")
elif abs(self.angle2Action) > 0.1 and self.angle2Action < 0:
twist.angular.z = 0.3
print("spinning in place -")
# else:
vel = np.array([self.desired_action[0],self.desired_action[1]])
twist.linear.x = 0.1*np.linalg.norm(vel)
self.pub_twist.publish(twist)
def cbComputeActionCrowdNav(self, event):
robot_x = self.pose.pose.position.x
robot_y = self.pose.pose.position.y
# goal
goal_x = self.sub_goal.x
goal_y = self.sub_goal.y
# velocity
robot_vx = self.vel.x
robot_vy = self.vel.y
# oriantation
theta = self.psi
robot_radius = 0.3
# set robot info
self.robot.set(robot_x, robot_y, goal_x, goal_y, robot_vx, robot_vy, theta, robot_radius)
# obstacle: position, velocity, radius
# position
# obstacle_x = [0.1,0.2,0.3,0.4,0.5]
# obstacle_y = [0.1,0.2,0.3,0.4,0.5]
# # velocity
# obstacle_vx = [0.1,0.2,0.3,0.4,0.5]
# obstacle_vy = [0.1,0.2,0.3,0.4,0.5]
obstacle_x = [-6.0,-6.0,-6.0,-6.0,-6.0]
obstacle_y = [-6.0,-6.0,-6.0,-6.0,-6.0]
# velocity
obstacle_vx = [0.0,0.0,0.0,0.0,0.0]
obstacle_vy = [0.0,0.0,0.0,0.0,0.0]
obstacle_radius = 0.3
# initial obstacle instances and set value
for i in range(self.env_config.getint('sim','human_num')):
self.env.humans[i].set(obstacle_x[i], obstacle_y[i], goal_x,goal_y, obstacle_vx[i], obstacle_vy[i], theta, obstacle_radius)
self.ob[i]= self.env.humans[i].get_observable_state()
# ************************************ Output ************************************
# get action info
action = self.robot.act(self.ob)
# print('\n---------\nrobot position (X,Y):', position.position)
# print(action)
# print(theta)
self.update_action(action)
self.update_angle2Action()
def update_subgoal(self,subgoal):
self.goal.pose.position.x = subgoal[0]
self.goal.pose.position.y = subgoal[1]
def visualize_path(self):
marker = Marker()
marker.header.stamp = rospy.Time.now()
marker.header.frame_id = 'map'
marker.ns = 'path_arrow'
marker.id = 0
marker.type = marker.ARROW
marker.action = marker.ADD
marker.points.append(self.pose.pose.position)
marker.points.append(self.desired_position.pose.position)
marker.scale = Vector3(x=0.1,y=0.2,z=0.2)
marker.color = ColorRGBA(b=1.0,a=1.0)
marker.lifetime = rospy.Duration(1)
self.pub_path_marker.publish(marker)
# # Display BLUE DOT at NN desired position
# marker = Marker()
# marker.header.stamp = rospy.Time.now()
# marker.header.frame_id = 'map'
# marker.ns = 'path_trail'
# marker.id = self.num_poses
# marker.type = marker.CUBE
# marker.action = marker.ADD
# marker.pose.position = copy.deepcopy(self.pose.pose.position)
# marker.scale = Vector3(x=0.2,y=0.2,z=0.2)
# marker.color = ColorRGBA(g=0.0,r=0,b=1.0,a=0.3)
# marker.lifetime = rospy.Duration(60)
# self.pub_path_marker.publish(marker)
def on_shutdown(self):
rospy.loginfo("[%s] Shutting down.")
self.stop_moving()
rospy.loginfo("Stopped %s's velocity.")
def run():
policy_name = "lstm"
device = 'cpu'
phase = 'test'
select_policy = {"cadrl":CADRL(),"lstm":LstmRL(),"sarl":SARL()}
# the path of training result which contains configs and rl mode
env_config_file = 'crowd_nav/data/output/env.config' #path beginging without slash
policy_config_file = 'crowd_nav/data/output/policy.config'
model_weights = 'crowd_nav/data/output/rl_model_'+policy_name+'.pth'
# print(model_weights)
# select policy
policy = select_policy[policy_name] #{SARL(),CADRL(),LstmRL()}
policy_config = configparser.RawConfigParser()
policy_config.read(policy_config_file)
policy.configure(policy_config)
policy.get_model().load_state_dict(torch.load(model_weights))
policy.set_device(device)
policy.set_phase(phase)
# configure environment / obstacles
env_config = configparser.RawConfigParser()
env_config.read(env_config_file)
env = gym.make('CrowdSim-v0') #env is inherited from CrowdSim class in crowd_sim.py
env.configure(env_config)
rospy.init_node('crowdnav_tb3',anonymous=False)
print('==================================\ncrowdnav node started')
nn_tb3 = NN_tb3(env,env_config,policy)
rospy.on_shutdown(nn_tb3.on_shutdown)
rospy.spin()
if __name__ == '__main__':
run()
|
[
"geometry_msgs.msg.Vector3",
"crowd_nav.policy.sarl.SARL",
"rospy.Subscriber",
"numpy.arctan2",
"numpy.linalg.norm",
"std_msgs.msg.ColorRGBA",
"rospy.Duration",
"geometry_msgs.msg.PoseStamped",
"crowd_sim.envs.utils.robot.Robot",
"rospy.Time.now",
"configparser.RawConfigParser",
"torch.load",
"rospy.init_node",
"math.cos",
"numpy.linalg.det",
"crowd_nav.policy.cadrl.CADRL",
"geometry_msgs.msg.Twist",
"math.sin",
"rospy.loginfo",
"rospy.on_shutdown",
"visualization_msgs.msg.Marker",
"numpy.dot",
"crowd_nav.policy.lstm_rl.LstmRL",
"gym.make",
"numpy.zeros",
"rospy.Publisher",
"numpy.array",
"rospy.spin"
] |
[((10476, 10506), 'configparser.RawConfigParser', 'configparser.RawConfigParser', ([], {}), '()\n', (10504, 10506), False, 'import configparser\n'), ((10768, 10798), 'configparser.RawConfigParser', 'configparser.RawConfigParser', ([], {}), '()\n', (10796, 10798), False, 'import configparser\n'), ((10846, 10869), 'gym.make', 'gym.make', (['"""CrowdSim-v0"""'], {}), "('CrowdSim-v0')\n", (10854, 10869), False, 'import gym\n'), ((10963, 11011), 'rospy.init_node', 'rospy.init_node', (['"""crowdnav_tb3"""'], {'anonymous': '(False)'}), "('crowdnav_tb3', anonymous=False)\n", (10978, 11011), False, 'import rospy\n'), ((11130, 11167), 'rospy.on_shutdown', 'rospy.on_shutdown', (['nn_tb3.on_shutdown'], {}), '(nn_tb3.on_shutdown)\n', (11147, 11167), False, 'import rospy\n'), ((11173, 11185), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (11183, 11185), False, 'import rospy\n'), ((951, 977), 'crowd_sim.envs.utils.robot.Robot', 'Robot', (['env_config', '"""robot"""'], {}), "(env_config, 'robot')\n", (956, 977), False, 'from crowd_sim.envs.utils.robot import Robot\n'), ((1414, 1427), 'geometry_msgs.msg.PoseStamped', 'PoseStamped', ([], {}), '()\n', (1425, 1427), False, 'from geometry_msgs.msg import PoseStamped, Twist, Vector3, Point\n'), ((1447, 1456), 'geometry_msgs.msg.Vector3', 'Vector3', ([], {}), '()\n', (1454, 1456), False, 'from geometry_msgs.msg import PoseStamped, Twist, Vector3, Point\n'), ((1533, 1546), 'geometry_msgs.msg.PoseStamped', 'PoseStamped', ([], {}), '()\n', (1544, 1546), False, 'from geometry_msgs.msg import PoseStamped, Twist, Vector3, Point\n'), ((1567, 1580), 'geometry_msgs.msg.PoseStamped', 'PoseStamped', ([], {}), '()\n', (1578, 1580), False, 'from geometry_msgs.msg import PoseStamped, Twist, Vector3, Point\n'), ((1613, 1626), 'geometry_msgs.msg.PoseStamped', 'PoseStamped', ([], {}), '()\n', (1624, 1626), False, 'from geometry_msgs.msg import PoseStamped, Twist, Vector3, Point\n'), ((1657, 1671), 'numpy.zeros', 'np.zeros', (['(2,)'], {}), '((2,))\n', (1665, 1671), True, 'import numpy as np\n'), ((1721, 1769), 'rospy.Publisher', 'rospy.Publisher', (['"""/cmd_vel"""', 'Twist'], {'queue_size': '(1)'}), "('/cmd_vel', Twist, queue_size=1)\n", (1736, 1769), False, 'import rospy\n'), ((1967, 2015), 'rospy.Publisher', 'rospy.Publisher', (['"""/action"""', 'Marker'], {'queue_size': '(1)'}), "('/action', Marker, queue_size=1)\n", (1982, 2015), False, 'import rospy\n'), ((2149, 2197), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/odom"""', 'Odometry', 'self.cbPose'], {}), "('/odom', Odometry, self.cbPose)\n", (2165, 2197), False, 'import rospy\n'), ((2227, 2284), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/goal"""', 'PoseStamped', 'self.cbGlobalGoal'], {}), "('/goal', PoseStamped, self.cbGlobalGoal)\n", (2243, 2284), False, 'import rospy\n'), ((2311, 2381), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/plan_manager/subgoal"""', 'PoseStamped', 'self.cbSubGoal'], {}), "('/plan_manager/subgoal', PoseStamped, self.cbSubGoal)\n", (2327, 2381), False, 'import rospy\n'), ((2433, 2442), 'geometry_msgs.msg.Vector3', 'Vector3', ([], {}), '()\n', (2440, 2442), False, 'from geometry_msgs.msg import PoseStamped, Twist, Vector3, Point\n'), ((2794, 2942), 'numpy.array', 'np.array', (['[self.desired_position.pose.position.x - self.pose.pose.position.x, self.\n desired_position.pose.position.y - self.pose.pose.position.y]'], {}), '([self.desired_position.pose.position.x - self.pose.pose.position.x,\n self.desired_position.pose.position.y - self.pose.pose.position.y])\n', (2802, 2942), True, 'import numpy as np\n'), ((4046, 4120), 'numpy.arctan2', 'np.arctan2', (['(2.0 * (q.w * q.z + q.x * q.y))', '(1 - 2 * (q.y * q.y + q.z * q.z))'], {}), '(2.0 * (q.w * q.z + q.x * q.y), 1 - 2 * (q.y * q.y + q.z * q.z))\n', (4056, 4120), True, 'import numpy as np\n'), ((4268, 4308), 'numpy.array', 'np.array', (['[v_g.x - v_p.x, v_g.y - v_p.y]'], {}), '([v_g.x - v_p.x, v_g.y - v_p.y])\n', (4276, 4308), True, 'import numpy as np\n'), ((4328, 4348), 'numpy.linalg.norm', 'np.linalg.norm', (['v_pg'], {}), '(v_pg)\n', (4342, 4348), True, 'import numpy as np\n'), ((5576, 5583), 'geometry_msgs.msg.Twist', 'Twist', ([], {}), '()\n', (5581, 5583), False, 'from geometry_msgs.msg import PoseStamped, Twist, Vector3, Point\n'), ((6199, 6206), 'geometry_msgs.msg.Twist', 'Twist', ([], {}), '()\n', (6204, 6206), False, 'from geometry_msgs.msg import PoseStamped, Twist, Vector3, Point\n'), ((8623, 8631), 'visualization_msgs.msg.Marker', 'Marker', ([], {}), '()\n', (8629, 8631), False, 'from visualization_msgs.msg import Marker, MarkerArray\n'), ((8662, 8678), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (8676, 8678), False, 'import rospy\n'), ((8986, 9014), 'geometry_msgs.msg.Vector3', 'Vector3', ([], {'x': '(0.1)', 'y': '(0.2)', 'z': '(0.2)'}), '(x=0.1, y=0.2, z=0.2)\n', (8993, 9014), False, 'from geometry_msgs.msg import PoseStamped, Twist, Vector3, Point\n'), ((9036, 9059), 'std_msgs.msg.ColorRGBA', 'ColorRGBA', ([], {'b': '(1.0)', 'a': '(1.0)'}), '(b=1.0, a=1.0)\n', (9045, 9059), False, 'from std_msgs.msg import ColorRGBA\n'), ((9085, 9102), 'rospy.Duration', 'rospy.Duration', (['(1)'], {}), '(1)\n', (9099, 9102), False, 'import rospy\n'), ((9776, 9812), 'rospy.loginfo', 'rospy.loginfo', (['"""[%s] Shutting down."""'], {}), "('[%s] Shutting down.')\n", (9789, 9812), False, 'import rospy\n'), ((9848, 9887), 'rospy.loginfo', 'rospy.loginfo', (['"""Stopped %s\'s velocity."""'], {}), '("Stopped %s\'s velocity.")\n', (9861, 9887), False, 'import rospy\n'), ((9995, 10002), 'crowd_nav.policy.cadrl.CADRL', 'CADRL', ([], {}), '()\n', (10000, 10002), False, 'from crowd_nav.policy.cadrl import CADRL\n'), ((10010, 10018), 'crowd_nav.policy.lstm_rl.LstmRL', 'LstmRL', ([], {}), '()\n', (10016, 10018), False, 'from crowd_nav.policy.lstm_rl import LstmRL\n'), ((10026, 10032), 'crowd_nav.policy.sarl.SARL', 'SARL', ([], {}), '()\n', (10030, 10032), False, 'from crowd_nav.policy.sarl import SARL\n'), ((10625, 10650), 'torch.load', 'torch.load', (['model_weights'], {}), '(model_weights)\n', (10635, 10650), False, 'import torch\n'), ((2597, 2616), 'rospy.Duration', 'rospy.Duration', (['(0.2)'], {}), '(0.2)\n', (2611, 2616), False, 'import rospy\n'), ((2669, 2689), 'rospy.Duration', 'rospy.Duration', (['(0.01)'], {}), '(0.01)\n', (2683, 2689), False, 'import rospy\n'), ((3098, 3125), 'numpy.linalg.det', 'np.linalg.det', (['[v_a, e_dir]'], {}), '([v_a, e_dir])\n', (3111, 3125), True, 'import numpy as np\n'), ((3125, 3143), 'numpy.dot', 'np.dot', (['v_a', 'e_dir'], {}), '(v_a, e_dir)\n', (3131, 3143), True, 'import numpy as np\n'), ((5091, 5111), 'numpy.arctan2', 'np.arctan2', (['v_y', 'v_x'], {}), '(v_y, v_x)\n', (5101, 5111), True, 'import numpy as np\n'), ((6591, 6649), 'numpy.array', 'np.array', (['[self.desired_action[0], self.desired_action[1]]'], {}), '([self.desired_action[0], self.desired_action[1]])\n', (6599, 6649), True, 'import numpy as np\n'), ((2985, 3003), 'math.cos', 'math.cos', (['self.psi'], {}), '(self.psi)\n', (2993, 3003), False, 'import math\n'), ((3005, 3023), 'math.sin', 'math.sin', (['self.psi'], {}), '(self.psi)\n', (3013, 3023), False, 'import math\n'), ((5152, 5172), 'numpy.array', 'np.array', (['[v_x, v_y]'], {}), '([v_x, v_y])\n', (5160, 5172), True, 'import numpy as np\n'), ((6682, 6701), 'numpy.linalg.norm', 'np.linalg.norm', (['vel'], {}), '(vel)\n', (6696, 6701), True, 'import numpy as np\n')]
|
# standard imports
import logging
from sklearn.metrics.cluster import homogeneity_score, completeness_score
import numpy
import matplotlib.pyplot as plt
# our imports
import emission.analysis.modelling.tour_model.cluster_pipeline as cp
import emission.analysis.modelling.tour_model.similarity as similarity
"""
Functions to evaluate clustering based on groundtruth. To use these functions,
an array of the length of the data must be passed in, with different values in the
array indicating different groundtruth clusters.
These functions can be used alongside the cluster pipeline to evaluate clustering.
An example of how to run this with the cluster pipeline is in the main method. To run it,
pass in a list of groundtruth.
Note that the cluster pipeline works with trips, not sections, so to use the above
code the groundtruth has to also be by trips.
"""
#turns color array into an array of integers
def get_colors(data, colors):
if len(data) != len(colors):
raise ValueError('Data and groundtruth must have the same number of elements')
indices = [] * len(set(colors))
for n in colors:
if n not in indices:
indices.append(n)
for i in range(len(colors)):
colors[i] = indices.index(colors[i])
return colors
#update the ground truth after binning
def update_colors(bins, colors):
newcolors = []
for bin in bins:
for b in bin:
newcolors.append(colors[b])
indices = [] * len(set(newcolors))
for n in newcolors:
if n not in indices:
indices.append(n)
for i in range(len(newcolors)):
newcolors[i] = indices.index(newcolors[i])
return newcolors
#evaluates the cluster labels against the groundtruth colors
def evaluate(colors, labels):
b = homogeneity_score(colors, labels)
c = completeness_score(colors, labels)
logging.debug('homogeneity is %d' % b)
logging.debug('completeness is %d' % c)
#maps the clusters, colored by the groundtruth
#creates a map for each groundtruthed cluster and
#a map showing all the clusters.
def map_clusters_by_groundtruth(data, labels, colors, map_individuals=False):
import pygmaps
from matplotlib import colors as matcol
colormap = plt.cm.get_cmap()
import random
r = random.sample(range(len(set(labels))), len(set(labels)))
rand = []
clusters = len(set(labels))
for i in range(len(labels)):
rand.append(r[labels[i]]/float(clusters))
if map_individuals:
for color in set(colors):
first = True
num_paths = 0
for i in range(len(colors)):
if colors[i] == color:
num_paths += 1
start_lat = data[i].trip_start_location.lat
start_lon = data[i].trip_start_location.lon
end_lat = data[i].trip_end_location.lat
end_lon = data[i].trip_end_location.lon
if first:
mymap = pygmaps.maps(start_lat, start_lon, 10)
first = False
path = [(start_lat, start_lon), (end_lat, end_lon)]
mymap.addpath(path, matcol.rgb2hex(colormap(rand[i])))
mymap.draw('./mycluster' + str(color) + '.html')
mymap = pygmaps.maps(37.5, -122.32, 10)
for i in range(len(data)):
start_lat = data[i].trip_start_location.lat
start_lon = data[i].trip_start_location.lon
end_lat = data[i].trip_end_location.lat
end_lon = data[i].trip_end_location.lon
path = [(start_lat, start_lon), (end_lat, end_lon)]
mymap.addpath(path, matcol.rgb2hex(colormap(float(colors[i])/len(set(colors)))))
mymap.draw('./mymap.html')
def main(colors):
data = cp.read_data() #get the data
colors = get_colors(data, colors) #make colors the right format
data, bins = cp.remove_noise(data, .5, 300) #remove noise from data
###### the next few lines are to evaluate the binning
sim = similarity.similarity(data, .5, 300) #create a similarity object
sim.bins = bins #set the bins, since we calculated them above
sim.evaluate_bins() #evaluate them to create the labels
######
colors = update_colors(bins, colors) #update the colors to reflect deleted bins
labels = sim.labels #get labels
evaluate(numpy.array(colors), numpy.array(labels)) #evaluate the bins
clusters, labels, data = cp.cluster(data, len(bins)) #cluster
evaluate(numpy.array(colors), numpy.array(labels)) #evaluate clustering
map_clusters_by_groundtruth(data, labels, colors, map_individuals=False) #map clusters, make last parameter true to map individual clusters
|
[
"sklearn.metrics.cluster.completeness_score",
"emission.analysis.modelling.tour_model.similarity.similarity",
"logging.debug",
"emission.analysis.modelling.tour_model.cluster_pipeline.remove_noise",
"pygmaps.maps",
"emission.analysis.modelling.tour_model.cluster_pipeline.read_data",
"numpy.array",
"sklearn.metrics.cluster.homogeneity_score",
"matplotlib.pyplot.cm.get_cmap"
] |
[((1778, 1811), 'sklearn.metrics.cluster.homogeneity_score', 'homogeneity_score', (['colors', 'labels'], {}), '(colors, labels)\n', (1795, 1811), False, 'from sklearn.metrics.cluster import homogeneity_score, completeness_score\n'), ((1820, 1854), 'sklearn.metrics.cluster.completeness_score', 'completeness_score', (['colors', 'labels'], {}), '(colors, labels)\n', (1838, 1854), False, 'from sklearn.metrics.cluster import homogeneity_score, completeness_score\n'), ((1859, 1897), 'logging.debug', 'logging.debug', (["('homogeneity is %d' % b)"], {}), "('homogeneity is %d' % b)\n", (1872, 1897), False, 'import logging\n'), ((1902, 1941), 'logging.debug', 'logging.debug', (["('completeness is %d' % c)"], {}), "('completeness is %d' % c)\n", (1915, 1941), False, 'import logging\n'), ((2231, 2248), 'matplotlib.pyplot.cm.get_cmap', 'plt.cm.get_cmap', ([], {}), '()\n', (2246, 2248), True, 'import matplotlib.pyplot as plt\n'), ((3294, 3325), 'pygmaps.maps', 'pygmaps.maps', (['(37.5)', '(-122.32)', '(10)'], {}), '(37.5, -122.32, 10)\n', (3306, 3325), False, 'import pygmaps\n'), ((3767, 3781), 'emission.analysis.modelling.tour_model.cluster_pipeline.read_data', 'cp.read_data', ([], {}), '()\n', (3779, 3781), True, 'import emission.analysis.modelling.tour_model.cluster_pipeline as cp\n'), ((3881, 3912), 'emission.analysis.modelling.tour_model.cluster_pipeline.remove_noise', 'cp.remove_noise', (['data', '(0.5)', '(300)'], {}), '(data, 0.5, 300)\n', (3896, 3912), True, 'import emission.analysis.modelling.tour_model.cluster_pipeline as cp\n'), ((4004, 4041), 'emission.analysis.modelling.tour_model.similarity.similarity', 'similarity.similarity', (['data', '(0.5)', '(300)'], {}), '(data, 0.5, 300)\n', (4025, 4041), True, 'import emission.analysis.modelling.tour_model.similarity as similarity\n'), ((4339, 4358), 'numpy.array', 'numpy.array', (['colors'], {}), '(colors)\n', (4350, 4358), False, 'import numpy\n'), ((4360, 4379), 'numpy.array', 'numpy.array', (['labels'], {}), '(labels)\n', (4371, 4379), False, 'import numpy\n'), ((4479, 4498), 'numpy.array', 'numpy.array', (['colors'], {}), '(colors)\n', (4490, 4498), False, 'import numpy\n'), ((4500, 4519), 'numpy.array', 'numpy.array', (['labels'], {}), '(labels)\n', (4511, 4519), False, 'import numpy\n'), ((2996, 3034), 'pygmaps.maps', 'pygmaps.maps', (['start_lat', 'start_lon', '(10)'], {}), '(start_lat, start_lon, 10)\n', (3008, 3034), False, 'import pygmaps\n')]
|
from datetime import timedelta
from django.test import TestCase
from django.contrib.auth import get_user_model
from bestflightApp.tests.factories import (
AirlineFactory,
AirplaneFactory,
FlightClassFactory,
ReservationFactory,
AvailableFlightFactory,
AirlineFlightPathFactory,
)
from bestflightUser.tests.factories import UserFactory
User = get_user_model()
class ModelSignalTestCase(TestCase):
def setUp(self):
self.airline = AirlineFactory()
self.flight_class = FlightClassFactory()
self.airplane = AirplaneFactory()
self.user = UserFactory()
def test_airline_flightpath_creation_activates_airline(self):
"""an airline would remain inactive if it does not have a flighpath"""
self.assertEqual(self.airline.is_active, False)
AirlineFlightPathFactory(
airplane=self.airplane, airline=self.airline,
)
self.assertEqual(self.airline.is_active, True)
def test_airline_flightpath_absence_deactivate_airline(self):
"""An airline without fligh path sould be inative"""
flight_path = AirlineFlightPathFactory(
airplane=self.airplane, airline=self.airline
)
self.assertEqual(self.airline.is_active, True)
flight_path.delete()
self.assertEqual(self.airline.is_active, False)
def test_update_flightpath_date_last_flight_to_aval_flight_boarding_time(self): # noqa
flight_path = AirlineFlightPathFactory(
airplane=self.airplane, airline=self.airline,
)
flight = AvailableFlightFactory(airlinePath=flight_path)
self.assertEqual(flight_path.date_last_flight, flight.boarding_time)
def test_update_aval_flight_no_reversation_via_reservation(self):
flight_path = AirlineFlightPathFactory(
airplane=self.airplane, airline=self.airline)
flight = AvailableFlightFactory(airlinePath=flight_path)
no_reversaton = flight.no_reversaton
reversation = ReservationFactory(
flight=flight, flight_class=self.flight_class, user=self.user)
self.assertEqual(flight.no_reversaton, no_reversaton + 1)
no_reversaton = flight.no_reversaton
reversation.delete()
self.assertEqual(flight.no_reversaton, no_reversaton - 1)
class ModelValidation(TestCase):
def setUp(self):
self.airline = AirlineFactory()
self.airplane = AirplaneFactory()
self.flight_path = AirlineFlightPathFactory(
airplane=self.airplane, airline=self.airline)
self.flight = AvailableFlightFactory(airlinePath=self.flight_path)
def test_board_time_should_be_behind_takeoff(self):
self.flight.boarding_time = self.flight.take_off_time + timedelta(minutes=20) # noqa
with self.assertRaises(ValueError):
self.flight.save()
|
[
"bestflightApp.tests.factories.AirplaneFactory",
"django.contrib.auth.get_user_model",
"bestflightUser.tests.factories.UserFactory",
"bestflightApp.tests.factories.ReservationFactory",
"datetime.timedelta",
"bestflightApp.tests.factories.AvailableFlightFactory",
"bestflightApp.tests.factories.AirlineFactory",
"bestflightApp.tests.factories.AirlineFlightPathFactory",
"bestflightApp.tests.factories.FlightClassFactory"
] |
[((368, 384), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (382, 384), False, 'from django.contrib.auth import get_user_model\n'), ((469, 485), 'bestflightApp.tests.factories.AirlineFactory', 'AirlineFactory', ([], {}), '()\n', (483, 485), False, 'from bestflightApp.tests.factories import AirlineFactory, AirplaneFactory, FlightClassFactory, ReservationFactory, AvailableFlightFactory, AirlineFlightPathFactory\n'), ((514, 534), 'bestflightApp.tests.factories.FlightClassFactory', 'FlightClassFactory', ([], {}), '()\n', (532, 534), False, 'from bestflightApp.tests.factories import AirlineFactory, AirplaneFactory, FlightClassFactory, ReservationFactory, AvailableFlightFactory, AirlineFlightPathFactory\n'), ((559, 576), 'bestflightApp.tests.factories.AirplaneFactory', 'AirplaneFactory', ([], {}), '()\n', (574, 576), False, 'from bestflightApp.tests.factories import AirlineFactory, AirplaneFactory, FlightClassFactory, ReservationFactory, AvailableFlightFactory, AirlineFlightPathFactory\n'), ((597, 610), 'bestflightUser.tests.factories.UserFactory', 'UserFactory', ([], {}), '()\n', (608, 610), False, 'from bestflightUser.tests.factories import UserFactory\n'), ((822, 892), 'bestflightApp.tests.factories.AirlineFlightPathFactory', 'AirlineFlightPathFactory', ([], {'airplane': 'self.airplane', 'airline': 'self.airline'}), '(airplane=self.airplane, airline=self.airline)\n', (846, 892), False, 'from bestflightApp.tests.factories import AirlineFactory, AirplaneFactory, FlightClassFactory, ReservationFactory, AvailableFlightFactory, AirlineFlightPathFactory\n'), ((1121, 1191), 'bestflightApp.tests.factories.AirlineFlightPathFactory', 'AirlineFlightPathFactory', ([], {'airplane': 'self.airplane', 'airline': 'self.airline'}), '(airplane=self.airplane, airline=self.airline)\n', (1145, 1191), False, 'from bestflightApp.tests.factories import AirlineFactory, AirplaneFactory, FlightClassFactory, ReservationFactory, AvailableFlightFactory, AirlineFlightPathFactory\n'), ((1468, 1538), 'bestflightApp.tests.factories.AirlineFlightPathFactory', 'AirlineFlightPathFactory', ([], {'airplane': 'self.airplane', 'airline': 'self.airline'}), '(airplane=self.airplane, airline=self.airline)\n', (1492, 1538), False, 'from bestflightApp.tests.factories import AirlineFactory, AirplaneFactory, FlightClassFactory, ReservationFactory, AvailableFlightFactory, AirlineFlightPathFactory\n'), ((1579, 1626), 'bestflightApp.tests.factories.AvailableFlightFactory', 'AvailableFlightFactory', ([], {'airlinePath': 'flight_path'}), '(airlinePath=flight_path)\n', (1601, 1626), False, 'from bestflightApp.tests.factories import AirlineFactory, AirplaneFactory, FlightClassFactory, ReservationFactory, AvailableFlightFactory, AirlineFlightPathFactory\n'), ((1797, 1867), 'bestflightApp.tests.factories.AirlineFlightPathFactory', 'AirlineFlightPathFactory', ([], {'airplane': 'self.airplane', 'airline': 'self.airline'}), '(airplane=self.airplane, airline=self.airline)\n', (1821, 1867), False, 'from bestflightApp.tests.factories import AirlineFactory, AirplaneFactory, FlightClassFactory, ReservationFactory, AvailableFlightFactory, AirlineFlightPathFactory\n'), ((1898, 1945), 'bestflightApp.tests.factories.AvailableFlightFactory', 'AvailableFlightFactory', ([], {'airlinePath': 'flight_path'}), '(airlinePath=flight_path)\n', (1920, 1945), False, 'from bestflightApp.tests.factories import AirlineFactory, AirplaneFactory, FlightClassFactory, ReservationFactory, AvailableFlightFactory, AirlineFlightPathFactory\n'), ((2013, 2099), 'bestflightApp.tests.factories.ReservationFactory', 'ReservationFactory', ([], {'flight': 'flight', 'flight_class': 'self.flight_class', 'user': 'self.user'}), '(flight=flight, flight_class=self.flight_class, user=self\n .user)\n', (2031, 2099), False, 'from bestflightApp.tests.factories import AirlineFactory, AirplaneFactory, FlightClassFactory, ReservationFactory, AvailableFlightFactory, AirlineFlightPathFactory\n'), ((2394, 2410), 'bestflightApp.tests.factories.AirlineFactory', 'AirlineFactory', ([], {}), '()\n', (2408, 2410), False, 'from bestflightApp.tests.factories import AirlineFactory, AirplaneFactory, FlightClassFactory, ReservationFactory, AvailableFlightFactory, AirlineFlightPathFactory\n'), ((2435, 2452), 'bestflightApp.tests.factories.AirplaneFactory', 'AirplaneFactory', ([], {}), '()\n', (2450, 2452), False, 'from bestflightApp.tests.factories import AirlineFactory, AirplaneFactory, FlightClassFactory, ReservationFactory, AvailableFlightFactory, AirlineFlightPathFactory\n'), ((2480, 2550), 'bestflightApp.tests.factories.AirlineFlightPathFactory', 'AirlineFlightPathFactory', ([], {'airplane': 'self.airplane', 'airline': 'self.airline'}), '(airplane=self.airplane, airline=self.airline)\n', (2504, 2550), False, 'from bestflightApp.tests.factories import AirlineFactory, AirplaneFactory, FlightClassFactory, ReservationFactory, AvailableFlightFactory, AirlineFlightPathFactory\n'), ((2587, 2639), 'bestflightApp.tests.factories.AvailableFlightFactory', 'AvailableFlightFactory', ([], {'airlinePath': 'self.flight_path'}), '(airlinePath=self.flight_path)\n', (2609, 2639), False, 'from bestflightApp.tests.factories import AirlineFactory, AirplaneFactory, FlightClassFactory, ReservationFactory, AvailableFlightFactory, AirlineFlightPathFactory\n'), ((2761, 2782), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(20)'}), '(minutes=20)\n', (2770, 2782), False, 'from datetime import timedelta\n')]
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from ax.core.metric import Metric
from ax.core.objective import Objective, ScalarizedObjective
from ax.core.optimization_config import OptimizationConfig
from ax.core.outcome_constraint import OutcomeConstraint
from ax.core.types import ComparisonOp
from ax.utils.common.testutils import TestCase
CONFIG_STR = (
'OptimizationConfig(objective=Objective(metric_name="m1", minimize=False), '
"outcome_constraints=[OutcomeConstraint(m2 >= -0.25%), "
"OutcomeConstraint(m2 <= 0.25%)])"
)
class OptimizationConfigTest(TestCase):
def setUp(self):
self.metrics = {"m1": Metric(name="m1"), "m2": Metric(name="m2")}
self.objective = Objective(metric=self.metrics["m1"], minimize=False)
self.m2_objective = ScalarizedObjective(
metrics=[self.metrics["m1"], self.metrics["m2"]]
)
self.outcome_constraint = OutcomeConstraint(
metric=self.metrics["m2"], op=ComparisonOp.GEQ, bound=-0.25
)
self.additional_outcome_constraint = OutcomeConstraint(
metric=self.metrics["m2"], op=ComparisonOp.LEQ, bound=0.25
)
self.outcome_constraints = [
self.outcome_constraint,
self.additional_outcome_constraint,
]
def testInit(self):
config1 = OptimizationConfig(
objective=self.objective, outcome_constraints=self.outcome_constraints
)
self.assertEqual(str(config1), CONFIG_STR)
with self.assertRaises(ValueError):
config1.objective = self.m2_objective
# updating constraints is fine.
config1.outcome_constraints = [self.outcome_constraint]
self.assertEqual(len(config1.metrics), 2)
# objective without outcome_constraints is also supported
config2 = OptimizationConfig(objective=self.objective)
self.assertEqual(config2.outcome_constraints, [])
# setting objective is fine too, if it's compatible with constraints..
config2.objective = self.m2_objective
# setting incompatible constraints is not fine.
with self.assertRaises(ValueError):
config2.outcome_constraints = self.outcome_constraints
def testEq(self):
config1 = OptimizationConfig(
objective=self.objective, outcome_constraints=self.outcome_constraints
)
config2 = OptimizationConfig(
objective=self.objective, outcome_constraints=self.outcome_constraints
)
self.assertEqual(config1, config2)
new_outcome_constraint = OutcomeConstraint(
metric=self.metrics["m2"], op=ComparisonOp.LEQ, bound=0.5
)
config3 = OptimizationConfig(
objective=self.objective,
outcome_constraints=[self.outcome_constraint, new_outcome_constraint],
)
self.assertNotEqual(config1, config3)
def testConstraintValidation(self):
# Can't constrain on objective metric.
objective_constraint = OutcomeConstraint(
metric=self.objective.metric, op=ComparisonOp.GEQ, bound=0
)
with self.assertRaises(ValueError):
OptimizationConfig(
objective=self.objective, outcome_constraints=[objective_constraint]
)
# Two outcome_constraints on the same metric with the same op
# should raise.
duplicate_constraint = OutcomeConstraint(
metric=self.outcome_constraint.metric,
op=self.outcome_constraint.op,
bound=self.outcome_constraint.bound + 1,
)
with self.assertRaises(ValueError):
OptimizationConfig(
objective=self.objective,
outcome_constraints=[self.outcome_constraint, duplicate_constraint],
)
# Three outcome_constraints on the same metric should raise.
opposing_constraint = OutcomeConstraint(
metric=self.outcome_constraint.metric,
op=not self.outcome_constraint.op,
bound=self.outcome_constraint.bound,
)
with self.assertRaises(ValueError):
OptimizationConfig(
objective=self.objective,
outcome_constraints=self.outcome_constraints + [opposing_constraint],
)
# Two outcome_constraints on the same metric with different ops and
# flipped bounds (lower < upper) should raise.
add_bound = 1 if self.outcome_constraint.op == ComparisonOp.LEQ else -1
opposing_constraint = OutcomeConstraint(
metric=self.outcome_constraint.metric,
op=not self.outcome_constraint.op,
bound=self.outcome_constraint.bound + add_bound,
)
with self.assertRaises(ValueError):
OptimizationConfig(
objective=self.objective,
outcome_constraints=([self.outcome_constraint, opposing_constraint]),
)
# Two outcome_constraints on the same metric with different ops and
# bounds should not raise.
opposing_constraint = OutcomeConstraint(
metric=self.outcome_constraint.metric,
op=not self.outcome_constraint.op,
bound=self.outcome_constraint.bound + 1,
)
config = OptimizationConfig(
objective=self.objective,
outcome_constraints=([self.outcome_constraint, opposing_constraint]),
)
self.assertEqual(
config.outcome_constraints, [self.outcome_constraint, opposing_constraint]
)
def testClone(self):
config1 = OptimizationConfig(
objective=self.objective, outcome_constraints=self.outcome_constraints
)
self.assertEqual(config1, config1.clone())
|
[
"ax.core.outcome_constraint.OutcomeConstraint",
"ax.core.objective.Objective",
"ax.core.metric.Metric",
"ax.core.optimization_config.OptimizationConfig",
"ax.core.objective.ScalarizedObjective"
] |
[((860, 912), 'ax.core.objective.Objective', 'Objective', ([], {'metric': "self.metrics['m1']", 'minimize': '(False)'}), "(metric=self.metrics['m1'], minimize=False)\n", (869, 912), False, 'from ax.core.objective import Objective, ScalarizedObjective\n'), ((941, 1010), 'ax.core.objective.ScalarizedObjective', 'ScalarizedObjective', ([], {'metrics': "[self.metrics['m1'], self.metrics['m2']]"}), "(metrics=[self.metrics['m1'], self.metrics['m2']])\n", (960, 1010), False, 'from ax.core.objective import Objective, ScalarizedObjective\n'), ((1067, 1145), 'ax.core.outcome_constraint.OutcomeConstraint', 'OutcomeConstraint', ([], {'metric': "self.metrics['m2']", 'op': 'ComparisonOp.GEQ', 'bound': '(-0.25)'}), "(metric=self.metrics['m2'], op=ComparisonOp.GEQ, bound=-0.25)\n", (1084, 1145), False, 'from ax.core.outcome_constraint import OutcomeConstraint\n'), ((1213, 1290), 'ax.core.outcome_constraint.OutcomeConstraint', 'OutcomeConstraint', ([], {'metric': "self.metrics['m2']", 'op': 'ComparisonOp.LEQ', 'bound': '(0.25)'}), "(metric=self.metrics['m2'], op=ComparisonOp.LEQ, bound=0.25)\n", (1230, 1290), False, 'from ax.core.outcome_constraint import OutcomeConstraint\n'), ((1488, 1583), 'ax.core.optimization_config.OptimizationConfig', 'OptimizationConfig', ([], {'objective': 'self.objective', 'outcome_constraints': 'self.outcome_constraints'}), '(objective=self.objective, outcome_constraints=self.\n outcome_constraints)\n', (1506, 1583), False, 'from ax.core.optimization_config import OptimizationConfig\n'), ((1985, 2029), 'ax.core.optimization_config.OptimizationConfig', 'OptimizationConfig', ([], {'objective': 'self.objective'}), '(objective=self.objective)\n', (2003, 2029), False, 'from ax.core.optimization_config import OptimizationConfig\n'), ((2422, 2517), 'ax.core.optimization_config.OptimizationConfig', 'OptimizationConfig', ([], {'objective': 'self.objective', 'outcome_constraints': 'self.outcome_constraints'}), '(objective=self.objective, outcome_constraints=self.\n outcome_constraints)\n', (2440, 2517), False, 'from ax.core.optimization_config import OptimizationConfig\n'), ((2553, 2648), 'ax.core.optimization_config.OptimizationConfig', 'OptimizationConfig', ([], {'objective': 'self.objective', 'outcome_constraints': 'self.outcome_constraints'}), '(objective=self.objective, outcome_constraints=self.\n outcome_constraints)\n', (2571, 2648), False, 'from ax.core.optimization_config import OptimizationConfig\n'), ((2743, 2819), 'ax.core.outcome_constraint.OutcomeConstraint', 'OutcomeConstraint', ([], {'metric': "self.metrics['m2']", 'op': 'ComparisonOp.LEQ', 'bound': '(0.5)'}), "(metric=self.metrics['m2'], op=ComparisonOp.LEQ, bound=0.5)\n", (2760, 2819), False, 'from ax.core.outcome_constraint import OutcomeConstraint\n'), ((2860, 2980), 'ax.core.optimization_config.OptimizationConfig', 'OptimizationConfig', ([], {'objective': 'self.objective', 'outcome_constraints': '[self.outcome_constraint, new_outcome_constraint]'}), '(objective=self.objective, outcome_constraints=[self.\n outcome_constraint, new_outcome_constraint])\n', (2878, 2980), False, 'from ax.core.optimization_config import OptimizationConfig\n'), ((3176, 3253), 'ax.core.outcome_constraint.OutcomeConstraint', 'OutcomeConstraint', ([], {'metric': 'self.objective.metric', 'op': 'ComparisonOp.GEQ', 'bound': '(0)'}), '(metric=self.objective.metric, op=ComparisonOp.GEQ, bound=0)\n', (3193, 3253), False, 'from ax.core.outcome_constraint import OutcomeConstraint\n'), ((3577, 3710), 'ax.core.outcome_constraint.OutcomeConstraint', 'OutcomeConstraint', ([], {'metric': 'self.outcome_constraint.metric', 'op': 'self.outcome_constraint.op', 'bound': '(self.outcome_constraint.bound + 1)'}), '(metric=self.outcome_constraint.metric, op=self.\n outcome_constraint.op, bound=self.outcome_constraint.bound + 1)\n', (3594, 3710), False, 'from ax.core.outcome_constraint import OutcomeConstraint\n'), ((4070, 4203), 'ax.core.outcome_constraint.OutcomeConstraint', 'OutcomeConstraint', ([], {'metric': 'self.outcome_constraint.metric', 'op': '(not self.outcome_constraint.op)', 'bound': 'self.outcome_constraint.bound'}), '(metric=self.outcome_constraint.metric, op=not self.\n outcome_constraint.op, bound=self.outcome_constraint.bound)\n', (4087, 4203), False, 'from ax.core.outcome_constraint import OutcomeConstraint\n'), ((4706, 4851), 'ax.core.outcome_constraint.OutcomeConstraint', 'OutcomeConstraint', ([], {'metric': 'self.outcome_constraint.metric', 'op': '(not self.outcome_constraint.op)', 'bound': '(self.outcome_constraint.bound + add_bound)'}), '(metric=self.outcome_constraint.metric, op=not self.\n outcome_constraint.op, bound=self.outcome_constraint.bound + add_bound)\n', (4723, 4851), False, 'from ax.core.outcome_constraint import OutcomeConstraint\n'), ((5254, 5391), 'ax.core.outcome_constraint.OutcomeConstraint', 'OutcomeConstraint', ([], {'metric': 'self.outcome_constraint.metric', 'op': '(not self.outcome_constraint.op)', 'bound': '(self.outcome_constraint.bound + 1)'}), '(metric=self.outcome_constraint.metric, op=not self.\n outcome_constraint.op, bound=self.outcome_constraint.bound + 1)\n', (5271, 5391), False, 'from ax.core.outcome_constraint import OutcomeConstraint\n'), ((5451, 5568), 'ax.core.optimization_config.OptimizationConfig', 'OptimizationConfig', ([], {'objective': 'self.objective', 'outcome_constraints': '[self.outcome_constraint, opposing_constraint]'}), '(objective=self.objective, outcome_constraints=[self.\n outcome_constraint, opposing_constraint])\n', (5469, 5568), False, 'from ax.core.optimization_config import OptimizationConfig\n'), ((5768, 5863), 'ax.core.optimization_config.OptimizationConfig', 'OptimizationConfig', ([], {'objective': 'self.objective', 'outcome_constraints': 'self.outcome_constraints'}), '(objective=self.objective, outcome_constraints=self.\n outcome_constraints)\n', (5786, 5863), False, 'from ax.core.optimization_config import OptimizationConfig\n'), ((791, 808), 'ax.core.metric.Metric', 'Metric', ([], {'name': '"""m1"""'}), "(name='m1')\n", (797, 808), False, 'from ax.core.metric import Metric\n'), ((816, 833), 'ax.core.metric.Metric', 'Metric', ([], {'name': '"""m2"""'}), "(name='m2')\n", (822, 833), False, 'from ax.core.metric import Metric\n'), ((3332, 3425), 'ax.core.optimization_config.OptimizationConfig', 'OptimizationConfig', ([], {'objective': 'self.objective', 'outcome_constraints': '[objective_constraint]'}), '(objective=self.objective, outcome_constraints=[\n objective_constraint])\n', (3350, 3425), False, 'from ax.core.optimization_config import OptimizationConfig\n'), ((3809, 3927), 'ax.core.optimization_config.OptimizationConfig', 'OptimizationConfig', ([], {'objective': 'self.objective', 'outcome_constraints': '[self.outcome_constraint, duplicate_constraint]'}), '(objective=self.objective, outcome_constraints=[self.\n outcome_constraint, duplicate_constraint])\n', (3827, 3927), False, 'from ax.core.optimization_config import OptimizationConfig\n'), ((4302, 4421), 'ax.core.optimization_config.OptimizationConfig', 'OptimizationConfig', ([], {'objective': 'self.objective', 'outcome_constraints': '(self.outcome_constraints + [opposing_constraint])'}), '(objective=self.objective, outcome_constraints=self.\n outcome_constraints + [opposing_constraint])\n', (4320, 4421), False, 'from ax.core.optimization_config import OptimizationConfig\n'), ((4950, 5067), 'ax.core.optimization_config.OptimizationConfig', 'OptimizationConfig', ([], {'objective': 'self.objective', 'outcome_constraints': '[self.outcome_constraint, opposing_constraint]'}), '(objective=self.objective, outcome_constraints=[self.\n outcome_constraint, opposing_constraint])\n', (4968, 5067), False, 'from ax.core.optimization_config import OptimizationConfig\n')]
|
import numpy as np
from pandas import DataFrame
import matplotlib.pyplot as py
class ca(object):
'''
Docstring for function ecopy.ca
====================
Conducts correspondance analysis (CA). User supplies
an observation x descriptor matrix.
Use
----
ca(x, siteNames=None, spNames=None, scaling=1)
Returns an object of class ca
Parameters
----------
x: Data for ordination. Should either be a pandas DataFrame or numpy.ndarray.
Observations as rows and descriptors as columns. Only
positive numbers and 0's allowed.
siteNames: A list of site names
spNames: A list of species names
scaling: What type of biplot to produce. See online documentation
Attributes (see online documentation for descriptions)
---------
w_col: column weights of the transformed matrix
w_row: row weights of the transformed matrix
evals: eigenvalues of the QQ matrix
U: column eigenvectors
Uhat: row eigenvectors
cumDesc_Sp: The proportion of variance for each species explained by each
correspondance axis
cumDesc_Site: The proportion of variance for each site explained by each
correspondance axis
siteScores: Site scores along each CA axis
spScores: Species scores along each CA axis
Methods
--------
summary(): provides a pandas.DataFrame summary table of CA axes
biplot(coords=False, xax=1, yax=2, type=1, showSp=True, showSite=True, spCol='r', siteCol='k', spSize=12, siteSize=12, xlim=None, ylim=None):
Produces a biplot of the given CA axes.
showSp: Whether species should be plotted
showSite: Whether site should be plotted
spCol: Color of species text
siteCol: Color of site text
spSize: Size of species text
siteSize: Size of site text
xlim: Provide a xlim list to override default limits
ylim: Provide a ylim list to override default limits
coords: Should the plotting coordinates be returned
xax: Integer specifying CA Axes to be plotted on the x-axis (Defaults to 1)
yax: Integer specifying CA Axes to be plotted on the y-axis (Defaults to 2)
Example
--------
import ecopy as ep
BCI = ep.load_data('BCI')
bci_ca = ep.ca(BCI)
print(bci_ca.summary())
bci_ca.biplot()
'''
def __init__(self, x, siteNames=None, spNames=None, scaling=1):
# if the data is not a dataframe or array, raise error
if not isinstance(x, (DataFrame, np.ndarray)):
msg = 'Data must either be pandas.DataFrame or nump.ndarray'
raise ValueError(msg)
# if x is a DataFrame
if isinstance(x, DataFrame):
# check NAs
if x.isnull().any().any():
msg = 'DataFrame contains null values'
raise ValueError(msg)
# check for non-numeric
if (x.dtypes == 'object').any():
msg = 'DataFrame can only contain numeric values'
raise ValueError(msg)
# convert to a numpy array
y = np.array(x)
# if x is array, simple re-assign
if isinstance(x, np.ndarray):
if np.isnan(x).any():
msg = 'Array contains null values'
raise ValueError(msg)
y = x
# check for negative values
if y.any() < 0:
msg ='Matrix cannot contain negative values'
raise ValueError(msg)
if scaling not in [1,2]:
msg = 'type parameter must be 1 or 2'
raise ValueError(msg)
if y.shape[0] < y.shape[1]:
y = y.T
self.Trans = True
else:
self.Trans = False
pMat = y.astype('float')/y.sum()
self.w_row = pMat.sum(axis=1)
self.w_col = pMat.sum(axis=0)
w_rowA = self.w_row[:,np.newaxis]
w_colA = self.w_col[np.newaxis,:]
Q = (pMat - w_rowA*w_colA)/np.sqrt(w_rowA*w_colA)
self.evals, self.U = np.linalg.eig(Q.T.dot(Q))
idx = self.evals.argsort()[::-1]
self.evals = self.evals[idx]
self.U = self.U[:,idx]
self.Uhat = Q.dot(self.U).dot(np.diag(self.evals**-0.5))
self.evals = self.evals[:-1]
self.U = self.U[:,:-1]
self.Uhat = self.Uhat[:,:-1]
if isinstance(x, DataFrame):
self.siteLabs = x.index
self.spLabs = x.columns
else:
self.siteLabs = ['Site ' + str(x) for x in range(y.shape[0])]
self.spLabs = ['Sp ' + str(x) for x in range(y.shape[1])]
if siteNames is not None:
self.siteLabs = siteNames
if spNames is not None:
self.spLabs = spNames
U2 = self.U.dot(np.diag(self.evals**0.5))
Uhat2 = self.Uhat.dot(np.diag(self.evals**0.5))
if self.Trans:
self.cumDesc_Sp = DataFrame(np.apply_along_axis(lambda x: np.cumsum(x**2) / np.sum(x**2), 1, Uhat2))
self.cumDesc_Site = DataFrame(np.apply_along_axis(lambda x: np.cumsum(x**2) / np.sum(x**2), 1, U2))
else:
self.cumDesc_Sp = DataFrame(np.apply_along_axis(lambda x: np.cumsum(x**2) / np.sum(x**2), 1, U2))
self.cumDesc_Site = DataFrame(np.apply_along_axis(lambda x: np.cumsum(x**2) / np.sum(x**2), 1, Uhat2))
if isinstance(x, DataFrame):
self.cumDesc_Sp.index = x.columns
self.cumDesc_Site.index = x.index
self.cumDesc_Sp.columns = ['CA Axis ' + str(x) for x in range(1, len(self.evals) + 1)]
self.cumDesc_Site.columns = ['CA Axis ' + str(x) for x in range(1, len(self.evals) + 1)]
V = np.diag(self.w_col**-0.5).dot(self.U)
Vhat = np.diag(self.w_row**-0.5).dot(self.Uhat)
F = Vhat.dot(np.diag(self.evals**0.5))
Fhat = V.dot(np.diag(self.evals**0.5))
if self.Trans:
siteCent = Fhat
spCent = F
siteOut = V
spOut = Vhat
if scaling==1:
self.siteScores = DataFrame(siteCent, index=self.siteLabs)
self.spScores = DataFrame(spOut, index=self.spLabs)
elif scaling==2:
self.siteScores = DataFrame(siteOut, columns=self.siteLabs)
self.spScores = DataFrame(spCent, columns=self.spLabs)
else:
siteCent = F
spCent = Fhat
siteOut = Vhat
spOut = V
if scaling==1:
self.siteScores = DataFrame(siteCent, index=self.siteLabs)
self.spScores = DataFrame(spOut, index=self.spLabs)
elif scaling==2:
self.siteScores = DataFrame(siteOut, index=self.siteLabs)
self.spScores = DataFrame(spCent, index=self.spLabs)
def summary(self):
sds = np.sqrt(self.evals)
props = self.evals / np.sum(self.evals)
cumSums = np.cumsum(self.evals) / np.sum(self.evals)
colNames = ['CA Axis ' + str(x) for x in range(1, len(self.evals)+1)]
sumTable = DataFrame(np.vstack((sds, props, cumSums)), index=['Inertia', 'Prop.', 'Cum. Prop.'])
sumTable.columns = colNames
return sumTable
def biplot(self, xax=1, yax=2, showSp=True, showSite=True, spCol='r', siteCol='k', spSize=12, siteSize=12, xlim=None, ylim=None):
f, ax = py.subplots()
if showSite:
ax.plot(self.siteScores.iloc[:,xax-1], self.siteScores.iloc[:,yax-1], 'ko', ms=0)
[ax.text(x, y, s, fontsize=siteSize, color=siteCol, ha='center', va='center') for x,y,s in zip(self.siteScores.iloc[:,xax-1], self.siteScores.iloc[:,yax-1], self.siteLabs)]
if showSp:
ax.plot(self.spScores.iloc[:,xax-1], self.spScores.iloc[:,yax-1], 'k^', ms=0)
[ax.text(x,y,s, fontsize=spSize, color=spCol, ha='center', va='center') for x,y,s in zip(self.spScores.iloc[:,xax-1], self.spScores.iloc[:,yax-1], self.spLabs)]
xmax = max(np.amax(self.siteScores.iloc[:,xax-1]), np.amax(self.spScores.iloc[:,xax-1]))
xmin = min(np.amin(self.siteScores.iloc[:,xax-1]), np.amin(self.spScores.iloc[:,xax-1]))
ymax = max(np.amax(self.siteScores.iloc[:,yax-1]), np.amax(self.spScores.iloc[:,yax-1]))
ymin = min(np.min(self.siteScores.iloc[:,yax-1]), np.min(self.spScores.iloc[:,yax-1]))
ax.set_xlim([xmin*1.15, xmax*1.15])
ax.set_ylim([ymin*1.15, ymax*1.15])
if xlim is not None:
if not isinstance(xlim, list):
msg = "xlim must be a list"
raise ValueError(msg)
ax.set_xlim(xlim)
if ylim is not None:
if not isinstance(ylim, list):
msg = 'ylim must be a list'
raise ValueError(msg)
ax.set_ylim(ylim)
ax.set_xlabel('CA Axis {!s}'.format(xax))
ax.set_ylabel('CA Axis {!s}'.format(yax))
py.show()
|
[
"pandas.DataFrame",
"matplotlib.pyplot.show",
"numpy.sum",
"numpy.amin",
"numpy.isnan",
"numpy.amax",
"numpy.cumsum",
"numpy.min",
"numpy.array",
"numpy.diag",
"matplotlib.pyplot.subplots",
"numpy.vstack",
"numpy.sqrt"
] |
[((6821, 6840), 'numpy.sqrt', 'np.sqrt', (['self.evals'], {}), '(self.evals)\n', (6828, 6840), True, 'import numpy as np\n'), ((7352, 7365), 'matplotlib.pyplot.subplots', 'py.subplots', ([], {}), '()\n', (7363, 7365), True, 'import matplotlib.pyplot as py\n'), ((8922, 8931), 'matplotlib.pyplot.show', 'py.show', ([], {}), '()\n', (8929, 8931), True, 'import matplotlib.pyplot as py\n'), ((3109, 3120), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (3117, 3120), True, 'import numpy as np\n'), ((3981, 4005), 'numpy.sqrt', 'np.sqrt', (['(w_rowA * w_colA)'], {}), '(w_rowA * w_colA)\n', (3988, 4005), True, 'import numpy as np\n'), ((4206, 4233), 'numpy.diag', 'np.diag', (['(self.evals ** -0.5)'], {}), '(self.evals ** -0.5)\n', (4213, 4233), True, 'import numpy as np\n'), ((4767, 4793), 'numpy.diag', 'np.diag', (['(self.evals ** 0.5)'], {}), '(self.evals ** 0.5)\n', (4774, 4793), True, 'import numpy as np\n'), ((4823, 4849), 'numpy.diag', 'np.diag', (['(self.evals ** 0.5)'], {}), '(self.evals ** 0.5)\n', (4830, 4849), True, 'import numpy as np\n'), ((5784, 5810), 'numpy.diag', 'np.diag', (['(self.evals ** 0.5)'], {}), '(self.evals ** 0.5)\n', (5791, 5810), True, 'import numpy as np\n'), ((5831, 5857), 'numpy.diag', 'np.diag', (['(self.evals ** 0.5)'], {}), '(self.evals ** 0.5)\n', (5838, 5857), True, 'import numpy as np\n'), ((6870, 6888), 'numpy.sum', 'np.sum', (['self.evals'], {}), '(self.evals)\n', (6876, 6888), True, 'import numpy as np\n'), ((6907, 6928), 'numpy.cumsum', 'np.cumsum', (['self.evals'], {}), '(self.evals)\n', (6916, 6928), True, 'import numpy as np\n'), ((6931, 6949), 'numpy.sum', 'np.sum', (['self.evals'], {}), '(self.evals)\n', (6937, 6949), True, 'import numpy as np\n'), ((7057, 7089), 'numpy.vstack', 'np.vstack', (['(sds, props, cumSums)'], {}), '((sds, props, cumSums))\n', (7066, 7089), True, 'import numpy as np\n'), ((5669, 5696), 'numpy.diag', 'np.diag', (['(self.w_col ** -0.5)'], {}), '(self.w_col ** -0.5)\n', (5676, 5696), True, 'import numpy as np\n'), ((5722, 5749), 'numpy.diag', 'np.diag', (['(self.w_row ** -0.5)'], {}), '(self.w_row ** -0.5)\n', (5729, 5749), True, 'import numpy as np\n'), ((6041, 6081), 'pandas.DataFrame', 'DataFrame', (['siteCent'], {'index': 'self.siteLabs'}), '(siteCent, index=self.siteLabs)\n', (6050, 6081), False, 'from pandas import DataFrame\n'), ((6114, 6149), 'pandas.DataFrame', 'DataFrame', (['spOut'], {'index': 'self.spLabs'}), '(spOut, index=self.spLabs)\n', (6123, 6149), False, 'from pandas import DataFrame\n'), ((6501, 6541), 'pandas.DataFrame', 'DataFrame', (['siteCent'], {'index': 'self.siteLabs'}), '(siteCent, index=self.siteLabs)\n', (6510, 6541), False, 'from pandas import DataFrame\n'), ((6574, 6609), 'pandas.DataFrame', 'DataFrame', (['spOut'], {'index': 'self.spLabs'}), '(spOut, index=self.spLabs)\n', (6583, 6609), False, 'from pandas import DataFrame\n'), ((7971, 8012), 'numpy.amax', 'np.amax', (['self.siteScores.iloc[:, xax - 1]'], {}), '(self.siteScores.iloc[:, xax - 1])\n', (7978, 8012), True, 'import numpy as np\n'), ((8011, 8050), 'numpy.amax', 'np.amax', (['self.spScores.iloc[:, xax - 1]'], {}), '(self.spScores.iloc[:, xax - 1])\n', (8018, 8050), True, 'import numpy as np\n'), ((8072, 8113), 'numpy.amin', 'np.amin', (['self.siteScores.iloc[:, xax - 1]'], {}), '(self.siteScores.iloc[:, xax - 1])\n', (8079, 8113), True, 'import numpy as np\n'), ((8112, 8151), 'numpy.amin', 'np.amin', (['self.spScores.iloc[:, xax - 1]'], {}), '(self.spScores.iloc[:, xax - 1])\n', (8119, 8151), True, 'import numpy as np\n'), ((8173, 8214), 'numpy.amax', 'np.amax', (['self.siteScores.iloc[:, yax - 1]'], {}), '(self.siteScores.iloc[:, yax - 1])\n', (8180, 8214), True, 'import numpy as np\n'), ((8213, 8252), 'numpy.amax', 'np.amax', (['self.spScores.iloc[:, yax - 1]'], {}), '(self.spScores.iloc[:, yax - 1])\n', (8220, 8252), True, 'import numpy as np\n'), ((8274, 8314), 'numpy.min', 'np.min', (['self.siteScores.iloc[:, yax - 1]'], {}), '(self.siteScores.iloc[:, yax - 1])\n', (8280, 8314), True, 'import numpy as np\n'), ((8313, 8351), 'numpy.min', 'np.min', (['self.spScores.iloc[:, yax - 1]'], {}), '(self.spScores.iloc[:, yax - 1])\n', (8319, 8351), True, 'import numpy as np\n'), ((3220, 3231), 'numpy.isnan', 'np.isnan', (['x'], {}), '(x)\n', (3228, 3231), True, 'import numpy as np\n'), ((6213, 6254), 'pandas.DataFrame', 'DataFrame', (['siteOut'], {'columns': 'self.siteLabs'}), '(siteOut, columns=self.siteLabs)\n', (6222, 6254), False, 'from pandas import DataFrame\n'), ((6287, 6325), 'pandas.DataFrame', 'DataFrame', (['spCent'], {'columns': 'self.spLabs'}), '(spCent, columns=self.spLabs)\n', (6296, 6325), False, 'from pandas import DataFrame\n'), ((6673, 6712), 'pandas.DataFrame', 'DataFrame', (['siteOut'], {'index': 'self.siteLabs'}), '(siteOut, index=self.siteLabs)\n', (6682, 6712), False, 'from pandas import DataFrame\n'), ((6745, 6781), 'pandas.DataFrame', 'DataFrame', (['spCent'], {'index': 'self.spLabs'}), '(spCent, index=self.spLabs)\n', (6754, 6781), False, 'from pandas import DataFrame\n'), ((4942, 4959), 'numpy.cumsum', 'np.cumsum', (['(x ** 2)'], {}), '(x ** 2)\n', (4951, 4959), True, 'import numpy as np\n'), ((4960, 4974), 'numpy.sum', 'np.sum', (['(x ** 2)'], {}), '(x ** 2)\n', (4966, 4974), True, 'import numpy as np\n'), ((5057, 5074), 'numpy.cumsum', 'np.cumsum', (['(x ** 2)'], {}), '(x ** 2)\n', (5066, 5074), True, 'import numpy as np\n'), ((5075, 5089), 'numpy.sum', 'np.sum', (['(x ** 2)'], {}), '(x ** 2)\n', (5081, 5089), True, 'import numpy as np\n'), ((5181, 5198), 'numpy.cumsum', 'np.cumsum', (['(x ** 2)'], {}), '(x ** 2)\n', (5190, 5198), True, 'import numpy as np\n'), ((5199, 5213), 'numpy.sum', 'np.sum', (['(x ** 2)'], {}), '(x ** 2)\n', (5205, 5213), True, 'import numpy as np\n'), ((5293, 5310), 'numpy.cumsum', 'np.cumsum', (['(x ** 2)'], {}), '(x ** 2)\n', (5302, 5310), True, 'import numpy as np\n'), ((5311, 5325), 'numpy.sum', 'np.sum', (['(x ** 2)'], {}), '(x ** 2)\n', (5317, 5325), True, 'import numpy as np\n')]
|
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
# pylint: disable=too-many-lines
from azure.cli.core.util import sdk_no_wait
def healthcareapis_service_show(client,
resource_group_name,
resource_name):
return client.get(resource_group_name=resource_group_name,
resource_name=resource_name)
# we use this as a create or update
def healthcareapis_service_create(client,
resource_group_name,
resource_name,
kind,
location,
tags=None,
etag=None,
identity_type=None,
access_policies=None,
cosmos_db_configuration=None,
authentication_configuration=None,
cors_configuration=None,
private_endpoint_connections=None,
public_network_access=None,
export_configuration_storage_account_name=None,
no_wait=False):
properties = {
'access_policies': access_policies,
'authentication_configuration': authentication_configuration,
'cors_configuration': cors_configuration,
'cosmos_db_configuration': cosmos_db_configuration,
'private_endpoint_connections': private_endpoint_connections,
'public_network_access': public_network_access
}
if export_configuration_storage_account_name is not None:
properties['export_configuration'] = {
'storage_account_name': export_configuration_storage_account_name
}
service_description = {
'name': resource_name,
'kind': kind,
'location': location,
'etag': etag,
'properties': properties,
'tags': tags
}
if identity_type is not None:
service_description['identity'] = {
'principal_id': None,
'tenant_id': None,
'type': identity_type,
}
else:
service_description['identity'] = {
'principal_id': None,
'tenant_id': None,
'type': "None",
}
return sdk_no_wait(no_wait,
client.create_or_update,
resource_group_name=resource_group_name,
resource_name=resource_name,
service_description=service_description)
|
[
"azure.cli.core.util.sdk_no_wait"
] |
[((2695, 2861), 'azure.cli.core.util.sdk_no_wait', 'sdk_no_wait', (['no_wait', 'client.create_or_update'], {'resource_group_name': 'resource_group_name', 'resource_name': 'resource_name', 'service_description': 'service_description'}), '(no_wait, client.create_or_update, resource_group_name=\n resource_group_name, resource_name=resource_name, service_description=\n service_description)\n', (2706, 2861), False, 'from azure.cli.core.util import sdk_no_wait\n')]
|
from mqtt_kube.mqtt import TopicMatcher
class TestTopicMatcher:
def test_basic(self):
assert TopicMatcher('topic/one').match('topic/one') == True
assert TopicMatcher('topic/two').match('topic/one') == False
def test_plus(self):
assert TopicMatcher('topic/+/plus').match('topic/one/plus') == True
assert TopicMatcher('topic/+/plus').match('topic/one/extra/plus') == False
assert TopicMatcher('++/plus').match('topic/one/plus') == False
assert TopicMatcher('+one/plus').match('topic/one/plus') == False
assert TopicMatcher('+ne/plus').match('one/plus') == False
def test_hash(self):
assert TopicMatcher('#').match('topic/one/plus') == True
assert TopicMatcher('topic/#').match('topic/one/plus') == True
assert TopicMatcher('topic/two/#').match('topic/one/plus') == False
assert TopicMatcher('#/plus').match('one/plus') == False
def test_plus_and_hash(self):
assert TopicMatcher('+/+/plus/#').match('topic/one/plus/many/more') == True
assert TopicMatcher('+/+/minus/#').match('topic/one/plus/many/more') == False
|
[
"mqtt_kube.mqtt.TopicMatcher"
] |
[((107, 132), 'mqtt_kube.mqtt.TopicMatcher', 'TopicMatcher', (['"""topic/one"""'], {}), "('topic/one')\n", (119, 132), False, 'from mqtt_kube.mqtt import TopicMatcher\n'), ((175, 200), 'mqtt_kube.mqtt.TopicMatcher', 'TopicMatcher', (['"""topic/two"""'], {}), "('topic/two')\n", (187, 200), False, 'from mqtt_kube.mqtt import TopicMatcher\n'), ((270, 298), 'mqtt_kube.mqtt.TopicMatcher', 'TopicMatcher', (['"""topic/+/plus"""'], {}), "('topic/+/plus')\n", (282, 298), False, 'from mqtt_kube.mqtt import TopicMatcher\n'), ((346, 374), 'mqtt_kube.mqtt.TopicMatcher', 'TopicMatcher', (['"""topic/+/plus"""'], {}), "('topic/+/plus')\n", (358, 374), False, 'from mqtt_kube.mqtt import TopicMatcher\n'), ((429, 452), 'mqtt_kube.mqtt.TopicMatcher', 'TopicMatcher', (['"""++/plus"""'], {}), "('++/plus')\n", (441, 452), False, 'from mqtt_kube.mqtt import TopicMatcher\n'), ((501, 526), 'mqtt_kube.mqtt.TopicMatcher', 'TopicMatcher', (['"""+one/plus"""'], {}), "('+one/plus')\n", (513, 526), False, 'from mqtt_kube.mqtt import TopicMatcher\n'), ((575, 599), 'mqtt_kube.mqtt.TopicMatcher', 'TopicMatcher', (['"""+ne/plus"""'], {}), "('+ne/plus')\n", (587, 599), False, 'from mqtt_kube.mqtt import TopicMatcher\n'), ((668, 685), 'mqtt_kube.mqtt.TopicMatcher', 'TopicMatcher', (['"""#"""'], {}), "('#')\n", (680, 685), False, 'from mqtt_kube.mqtt import TopicMatcher\n'), ((733, 756), 'mqtt_kube.mqtt.TopicMatcher', 'TopicMatcher', (['"""topic/#"""'], {}), "('topic/#')\n", (745, 756), False, 'from mqtt_kube.mqtt import TopicMatcher\n'), ((804, 831), 'mqtt_kube.mqtt.TopicMatcher', 'TopicMatcher', (['"""topic/two/#"""'], {}), "('topic/two/#')\n", (816, 831), False, 'from mqtt_kube.mqtt import TopicMatcher\n'), ((880, 902), 'mqtt_kube.mqtt.TopicMatcher', 'TopicMatcher', (['"""#/plus"""'], {}), "('#/plus')\n", (892, 902), False, 'from mqtt_kube.mqtt import TopicMatcher\n'), ((980, 1006), 'mqtt_kube.mqtt.TopicMatcher', 'TopicMatcher', (['"""+/+/plus/#"""'], {}), "('+/+/plus/#')\n", (992, 1006), False, 'from mqtt_kube.mqtt import TopicMatcher\n'), ((1064, 1091), 'mqtt_kube.mqtt.TopicMatcher', 'TopicMatcher', (['"""+/+/minus/#"""'], {}), "('+/+/minus/#')\n", (1076, 1091), False, 'from mqtt_kube.mqtt import TopicMatcher\n')]
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Compute v2 API Library"""
from keystoneauth1 import exceptions as ksa_exceptions
from osc_lib.api import api
from osc_lib import exceptions
from osc_lib.i18n import _
# TODO(dtroyer): Mingrate this to osc-lib
class InvalidValue(Exception):
"""An argument value is not valid: wrong type, out of range, etc"""
message = "Supplied value is not valid"
class APIv2(api.BaseAPI):
"""Compute v2 API"""
def __init__(self, **kwargs):
super(APIv2, self).__init__(**kwargs)
# Overrides
def _check_integer(self, value, msg=None):
"""Attempt to convert value to an integer
Raises InvalidValue on failure
:param value:
Convert this to an integer. None is converted to 0 (zero).
:param msg:
An alternate message for the exception, must include exactly
one substitution to receive the attempted value.
"""
if value is None:
return 0
try:
value = int(value)
except (TypeError, ValueError):
if not msg:
msg = _("%s is not an integer") % value
raise InvalidValue(msg)
return value
# TODO(dtroyer): Override find() until these fixes get into an osc-lib
# minimum release
def find(
self,
path,
value=None,
attr=None,
):
"""Find a single resource by name or ID
:param string path:
The API-specific portion of the URL path
:param string value:
search expression (required, really)
:param string attr:
name of attribute for secondary search
"""
try:
ret = self._request('GET', "/%s/%s" % (path, value)).json()
if isinstance(ret, dict):
# strip off the enclosing dict
key = list(ret.keys())[0]
ret = ret[key]
except (
ksa_exceptions.NotFound,
ksa_exceptions.BadRequest,
):
kwargs = {attr: value}
try:
ret = self.find_one(path, **kwargs)
except ksa_exceptions.NotFound:
msg = _("%s not found") % value
raise exceptions.NotFound(msg)
return ret
# Floating IPs
def floating_ip_add(
self,
server,
address,
fixed_address=None,
):
"""Add a floating IP to a server
:param server:
The :class:`Server` (or its ID) to add an IP to.
:param address:
The FloatingIP or string floating address to add.
:param fixed_address:
The FixedIP the floatingIP should be associated with (optional)
"""
url = '/servers'
server = self.find(
url,
attr='name',
value=server,
)
address = address.ip if hasattr(address, 'ip') else address
if fixed_address:
if hasattr(fixed_address, 'ip'):
fixed_address = fixed_address.ip
body = {
'address': address,
'fixed_address': fixed_address,
}
else:
body = {
'address': address,
}
return self._request(
"POST",
"/%s/%s/action" % (url, server['id']),
json={'addFloatingIp': body},
)
def floating_ip_create(
self,
pool=None,
):
"""Create a new floating ip
https://developer.openstack.org/api-ref/compute/#create-allocate-floating-ip-address
:param pool: Name of floating IP pool
"""
url = "/os-floating-ips"
try:
return self.create(
url,
json={'pool': pool},
)['floating_ip']
except (
ksa_exceptions.NotFound,
ksa_exceptions.BadRequest,
):
msg = _("%s not found") % pool
raise exceptions.NotFound(msg)
def floating_ip_delete(
self,
floating_ip_id=None,
):
"""Delete a floating IP
https://developer.openstack.org/api-ref/compute/#delete-deallocate-floating-ip-address
:param string floating_ip_id:
Floating IP ID
"""
url = "/os-floating-ips"
if floating_ip_id is not None:
return self.delete('/%s/%s' % (url, floating_ip_id))
return None
def floating_ip_find(
self,
floating_ip=None,
):
"""Return a security group given name or ID
https://developer.openstack.org/api-ref/compute/#list-floating-ip-addresses
:param string floating_ip:
Floating IP address
:returns: A dict of the floating IP attributes
"""
url = "/os-floating-ips"
return self.find(
url,
attr='ip',
value=floating_ip,
)
def floating_ip_list(
self,
):
"""Get floating IPs
https://developer.openstack.org/api-ref/compute/#show-floating-ip-address-details
:returns:
list of floating IPs
"""
url = "/os-floating-ips"
return self.list(url)["floating_ips"]
def floating_ip_remove(
self,
server,
address,
):
"""Remove a floating IP from a server
:param server:
The :class:`Server` (or its ID) to add an IP to.
:param address:
The FloatingIP or string floating address to add.
"""
url = '/servers'
server = self.find(
url,
attr='name',
value=server,
)
address = address.ip if hasattr(address, 'ip') else address
body = {
'address': address,
}
return self._request(
"POST",
"/%s/%s/action" % (url, server['id']),
json={'removeFloatingIp': body},
)
# Floating IP Pools
def floating_ip_pool_list(
self,
):
"""Get floating IP pools
https://developer.openstack.org/api-ref/compute/?expanded=#list-floating-ip-pools
:returns:
list of floating IP pools
"""
url = "/os-floating-ip-pools"
return self.list(url)["floating_ip_pools"]
# Hosts
def host_list(
self,
zone=None,
):
"""Lists hypervisor Hosts
https://developer.openstack.org/api-ref/compute/#list-hosts
Valid for Compute 2.0 - 2.42
:param string zone:
Availability zone
:returns: A dict of the floating IP attributes
"""
url = "/os-hosts"
if zone:
url = '/os-hosts?zone=%s' % zone
return self.list(url)["hosts"]
def host_set(
self,
host=None,
status=None,
maintenance_mode=None,
**params
):
"""Modify host properties
https://developer.openstack.org/api-ref/compute/#update-host-status
Valid for Compute 2.0 - 2.42
status
maintenance_mode
"""
url = "/os-hosts"
params = {}
if status:
params['status'] = status
if maintenance_mode:
params['maintenance_mode'] = maintenance_mode
if params == {}:
# Don't bother calling if nothing given
return None
else:
return self._request(
"PUT",
"/%s/%s" % (url, host),
json=params,
).json()
def host_show(
self,
host=None,
):
"""Show host
https://developer.openstack.org/api-ref/compute/#show-host-details
Valid for Compute 2.0 - 2.42
"""
url = "/os-hosts"
r_host = self.find(
url,
attr='host_name',
value=host,
)
data = []
for h in r_host:
data.append(h['resource'])
return data
# Networks
def network_create(
self,
name=None,
subnet=None,
share_subnet=None,
):
"""Create a new network
https://developer.openstack.org/api-ref/compute/#create-network
:param string name:
Network label (required)
:param integer subnet:
Subnet for IPv4 fixed addresses in CIDR notation (required)
:param integer share_subnet:
Shared subnet between projects, True or False
:returns: A dict of the network attributes
"""
url = "/os-networks"
params = {
'label': name,
'cidr': subnet,
}
if share_subnet is not None:
params['share_address'] = share_subnet
return self.create(
url,
json={'network': params},
)['network']
def network_delete(
self,
network=None,
):
"""Delete a network
https://developer.openstack.org/api-ref/compute/#delete-network
:param string network:
Network name or ID
"""
url = "/os-networks"
network = self.find(
url,
attr='label',
value=network,
)['id']
if network is not None:
return self.delete('/%s/%s' % (url, network))
return None
def network_find(
self,
network=None,
):
"""Return a network given name or ID
https://developer.openstack.org/api-ref/compute/#show-network-details
:param string network:
Network name or ID
:returns: A dict of the network attributes
"""
url = "/os-networks"
return self.find(
url,
attr='label',
value=network,
)
def network_list(
self,
):
"""Get networks
https://developer.openstack.org/api-ref/compute/#list-networks
:returns:
list of networks
"""
url = "/os-networks"
return self.list(url)["networks"]
# Security Groups
def security_group_create(
self,
name=None,
description=None,
):
"""Create a new security group
https://developer.openstack.org/api-ref/compute/#create-security-group
:param string name:
Security group name
:param integer description:
Security group description
"""
url = "/os-security-groups"
params = {
'name': name,
'description': description,
}
return self.create(
url,
json={'security_group': params},
)['security_group']
def security_group_delete(
self,
security_group=None,
):
"""Delete a security group
https://developer.openstack.org/api-ref/compute/#delete-security-group
:param string security_group:
Security group name or ID
"""
url = "/os-security-groups"
security_group = self.find(
url,
attr='name',
value=security_group,
)['id']
if security_group is not None:
return self.delete('/%s/%s' % (url, security_group))
return None
def security_group_find(
self,
security_group=None,
):
"""Return a security group given name or ID
https://developer.openstack.org/api-ref/compute/#show-security-group-details
:param string security_group:
Security group name or ID
:returns: A dict of the security group attributes
"""
url = "/os-security-groups"
return self.find(
url,
attr='name',
value=security_group,
)
def security_group_list(
self,
limit=None,
marker=None,
search_opts=None,
):
"""Get security groups
https://developer.openstack.org/api-ref/compute/#list-security-groups
:param integer limit:
query return count limit
:param string marker:
query marker
:param search_opts:
(undocumented) Search filter dict
all_tenants: True|False - return all projects
:returns:
list of security groups names
"""
params = {}
if search_opts is not None:
params = dict((k, v) for (k, v) in search_opts.items() if v)
if limit:
params['limit'] = limit
if marker:
params['offset'] = marker
url = "/os-security-groups"
return self.list(url, **params)["security_groups"]
def security_group_set(
self,
security_group=None,
# name=None,
# description=None,
**params
):
"""Update a security group
https://developer.openstack.org/api-ref/compute/#update-security-group
:param string security_group:
Security group name or ID
TODO(dtroyer): Create an update method in osc-lib
"""
# Short-circuit no-op
if params is None:
return None
url = "/os-security-groups"
security_group = self.find(
url,
attr='name',
value=security_group,
)
if security_group is not None:
for (k, v) in params.items():
# Only set a value if it is already present
if k in security_group:
security_group[k] = v
return self._request(
"PUT",
"/%s/%s" % (url, security_group['id']),
json={'security_group': security_group},
).json()['security_group']
return None
# Security Group Rules
def security_group_rule_create(
self,
security_group_id=None,
ip_protocol=None,
from_port=None,
to_port=None,
remote_ip=None,
remote_group=None,
):
"""Create a new security group rule
https://developer.openstack.org/api-ref/compute/#create-security-group-rule
:param string security_group_id:
Security group ID
:param ip_protocol:
IP protocol, 'tcp', 'udp' or 'icmp'
:param from_port:
Source port
:param to_port:
Destination port
:param remote_ip:
Source IP address in CIDR notation
:param remote_group:
Remote security group
"""
url = "/os-security-group-rules"
if ip_protocol.lower() not in ['icmp', 'tcp', 'udp']:
raise InvalidValue(
"%(s) is not one of 'icmp', 'tcp', or 'udp'" % ip_protocol
)
params = {
'parent_group_id': security_group_id,
'ip_protocol': ip_protocol,
'from_port': self._check_integer(from_port),
'to_port': self._check_integer(to_port),
'cidr': remote_ip,
'group_id': remote_group,
}
return self.create(
url,
json={'security_group_rule': params},
)['security_group_rule']
def security_group_rule_delete(
self,
security_group_rule_id=None,
):
"""Delete a security group rule
https://developer.openstack.org/api-ref/compute/#delete-security-group-rule
:param string security_group_rule_id:
Security group rule ID
"""
url = "/os-security-group-rules"
if security_group_rule_id is not None:
return self.delete('/%s/%s' % (url, security_group_rule_id))
return None
|
[
"osc_lib.i18n._",
"osc_lib.exceptions.NotFound"
] |
[((4588, 4612), 'osc_lib.exceptions.NotFound', 'exceptions.NotFound', (['msg'], {}), '(msg)\n', (4607, 4612), False, 'from osc_lib import exceptions\n'), ((4545, 4562), 'osc_lib.i18n._', '_', (['"""%s not found"""'], {}), "('%s not found')\n", (4546, 4562), False, 'from osc_lib.i18n import _\n'), ((1657, 1682), 'osc_lib.i18n._', '_', (['"""%s is not an integer"""'], {}), "('%s is not an integer')\n", (1658, 1682), False, 'from osc_lib.i18n import _\n'), ((2814, 2838), 'osc_lib.exceptions.NotFound', 'exceptions.NotFound', (['msg'], {}), '(msg)\n', (2833, 2838), False, 'from osc_lib import exceptions\n'), ((2766, 2783), 'osc_lib.i18n._', '_', (['"""%s not found"""'], {}), "('%s not found')\n", (2767, 2783), False, 'from osc_lib.i18n import _\n')]
|
import socket
class ClientSocket:
"""
simple client socket for omnomnom game
"""
def __init__(self):
"""
creates a socket and starts a connection
"""
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.PORT = 2222
# connect on construction,
# use for duration of a game,
# close connection on destruction later
self.sock.connect(("192.168.43.180", self.PORT))
def __del__(self):
"""
close the connection on destruction
"""
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
def send(self, msg):
"""
sends a general message given by the user
base for code borrowed from https://docs.python.org/3.6/howto/sockets.html
"""
# keep track of the total sent
# so we can make sure the whole message is sent
msg = (msg+'\n').encode('utf-8')
totalsent = 0
while totalsent < len(msg):
sent = self.sock.send(msg[totalsent:])
# it is bad if we still have things to send
# but do not send anything
if sent == 0:
raise RuntimeError("connection broken")
totalsent += sent
#############################################
# functions to send specific input messages #
#############################################
# first, directions
def sendLeft(self):
self.send("left")
def sendRight(self):
self.send("right")
def sendUp(self):
self.send("up")
def sendDown(self):
self.send("down")
# om nom nom nom nom nom nom nom nom nom nom nom nom nom nom nom nom nom
def sendNom(self):
self.send("nom")
|
[
"socket.socket"
] |
[((216, 265), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (229, 265), False, 'import socket\n')]
|
# -*- coding: utf-8 -*-
__author__ = 'alex'
from PyQt4 import QtCore, QtGui
import MyWindow
class MyDialog(QtGui.QDialog):
def __init__(self, parent=None):
QtGui.QDialog.__init__(self, parent)
self.MyWidget = MyWindow.MyWindow()
self.MyWidget.vbox.setMargin(0)
self.button = QtGui.QPushButton(u"Изменить надпись")
mainBox = QtGui.QVBoxLayout()
mainBox.addWidget(self.MyWidget)
mainBox.addWidget(self.button)
self.setLayout(mainBox)
self.connect (self.button, QtCore.SIGNAL( "clicked()") ,
self.on_clicked)
def on_clicked(self):
self.MyWidget.label.setText(u"Новая запись")
self.button.setDisabled(True)
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
window = MyDialog()
# Создаем экземпляр
window.setWindowTitle(u"Приемущества OOП-cтиля")
window.resize(300, 100)
window.show()
# Отображаем окно
sys.exit(app.exec_()) #Запускаем цикл обработки
|
[
"PyQt4.QtGui.QDialog.__init__",
"PyQt4.QtGui.QVBoxLayout",
"PyQt4.QtGui.QApplication",
"PyQt4.QtGui.QPushButton",
"MyWindow.MyWindow",
"PyQt4.QtCore.SIGNAL"
] |
[((778, 806), 'PyQt4.QtGui.QApplication', 'QtGui.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (796, 806), False, 'from PyQt4 import QtCore, QtGui\n'), ((170, 206), 'PyQt4.QtGui.QDialog.__init__', 'QtGui.QDialog.__init__', (['self', 'parent'], {}), '(self, parent)\n', (192, 206), False, 'from PyQt4 import QtCore, QtGui\n'), ((231, 250), 'MyWindow.MyWindow', 'MyWindow.MyWindow', ([], {}), '()\n', (248, 250), False, 'import MyWindow\n'), ((313, 351), 'PyQt4.QtGui.QPushButton', 'QtGui.QPushButton', (['u"""Изменить надпись"""'], {}), "(u'Изменить надпись')\n", (330, 351), False, 'from PyQt4 import QtCore, QtGui\n'), ((370, 389), 'PyQt4.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', ([], {}), '()\n', (387, 389), False, 'from PyQt4 import QtCore, QtGui\n'), ((537, 563), 'PyQt4.QtCore.SIGNAL', 'QtCore.SIGNAL', (['"""clicked()"""'], {}), "('clicked()')\n", (550, 563), False, 'from PyQt4 import QtCore, QtGui\n')]
|
'''Class to find shapes in gray image with cv2'''
# import the necessary packages
import argparse
import cv2 as cv2
import os
import numpy as np
from PIL import Image
result = [0] *256
image_path = '../resources/'+os.getenv('IMAGE', 'sample.bin')
xbash = np.fromfile(image_path, dtype='uint8')
#print(xbash.shape)
image_path = 'image.png'
x = 256
y = 256
cv2.imwrite(image_path, xbash[:x*y].reshape(x,y))
# load the image, convert it to grayscale, blur it slightly,
# and threshold it
array = np.array(Image.open(image_path))
#print(array)
for row_index, line in enumerate(array):
#print (line)
for column_index, pixel in enumerate(line):
#print (pixel)
if(pixel ==200):
array[row_index][column_index]='0'
if(pixel ==0):
array[row_index][column_index]='255'
invimg = Image.fromarray(array)
invimg.save(image_path)
image = cv2.imread(image_path)
####
row, col = image.shape[:2]
bottom = image[row-2:row, 0:col]
mean = cv2.mean(bottom)[0]
bordersize = 10
border = cv2.copyMakeBorder(
image,
top=bordersize,
bottom=bordersize,
left=bordersize,
right=bordersize,
borderType=cv2.BORDER_CONSTANT,
value=[255, 255, 255]
)
image = border
print(image.shape)
image = cv2.resize(image,(100,100))
print(image.shape)
###
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
#cv2.imshow("Gray", gray)
#cv2.waitKey(0)
#gray = cv2.GaussianBlur(gray, (5,5), 0)
blurred = cv2.GaussianBlur(gray, (5, 5), 0)
#cv2.imshow("Blurred", blurred)
#cv2.waitKey(0)
thresh = cv2.threshold(blurred, 60, 255, cv2.THRESH_BINARY)[1]
cv2.imshow("Imageb", blurred)
cv2.waitKey(0)
cnts, hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
contours_ = sorted(cnts, key=cv2.contourArea, reverse=True)[:10]
contours = [cv2.approxPolyDP(cnt, 3, True) for cnt in cnts]
final = np.zeros(image.shape,np.uint8)
mask = np.zeros(gray.shape,np.uint8)
r =0
for c in cnts:
# average L*a*b* value for the masked region
mask = np.zeros(image.shape[:2], dtype="uint8")
cv2.drawContours(mask, [c], -1, 255, -1)
mask = cv2.erode(mask, None, iterations=2)
mean = cv2.mean(image, mask=mask)[:3]
# initialize the minimum distance found thus far
minDist = (np.inf, None)
print ('//////////////')
print(mean[0])
print ('black' if mean[0]<15 else 'other')
print ('--------------/////////////////////-------------')
print ('//////////////')
print ('Grey' if mean[0]<220 and mean[0]>180 else 'other')
print ('--------------/////////////////////-------------')
print ('//////////////')
print ('white' if mean[0]<256 and mean[0]>130 else 'other')
print ('--------------/////////////////////-------------')
x = ((c[0])[0])[0]
y = ((c[0])[0])[1]
#print (str(c))
M = cv2.moments(c)
d = M["m00"]
if d <= 0:
d =1
cX = int(M["m10"] / d)
cY = int(M["m01"] / d)
print(image[cX][cY])
cv2.circle(image, (cX, cY), 7, (255, 0, 0), -1)
cv2.putText(image, "center", (cX - 20, cY - 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 0, 0), 2)
print('==========================')
#for p in c:
#print(image[p[0][0]][p[0][1]])
color_index = (image[x][y])[0]
result[color_index] += 1
print ('-------------------------')
#print (image[x][y])
#print (image[x][y])
cv2.drawContours(image, [c], 0, (100), 5)
# show the image
cv2.imshow("Image"+str(r), image)
cv2.waitKey(0)
r+=1
print (result)
|
[
"cv2.GaussianBlur",
"cv2.approxPolyDP",
"cv2.erode",
"cv2.imshow",
"cv2.cvtColor",
"cv2.copyMakeBorder",
"cv2.drawContours",
"cv2.mean",
"cv2.resize",
"cv2.circle",
"cv2.waitKey",
"os.getenv",
"cv2.putText",
"numpy.fromfile",
"cv2.threshold",
"cv2.moments",
"numpy.zeros",
"PIL.Image.open",
"cv2.imread",
"PIL.Image.fromarray",
"cv2.findContours"
] |
[((258, 296), 'numpy.fromfile', 'np.fromfile', (['image_path'], {'dtype': '"""uint8"""'}), "(image_path, dtype='uint8')\n", (269, 296), True, 'import numpy as np\n'), ((827, 849), 'PIL.Image.fromarray', 'Image.fromarray', (['array'], {}), '(array)\n', (842, 849), False, 'from PIL import Image\n'), ((883, 905), 'cv2.imread', 'cv2.imread', (['image_path'], {}), '(image_path)\n', (893, 905), True, 'import cv2 as cv2\n'), ((1025, 1185), 'cv2.copyMakeBorder', 'cv2.copyMakeBorder', (['image'], {'top': 'bordersize', 'bottom': 'bordersize', 'left': 'bordersize', 'right': 'bordersize', 'borderType': 'cv2.BORDER_CONSTANT', 'value': '[255, 255, 255]'}), '(image, top=bordersize, bottom=bordersize, left=\n bordersize, right=bordersize, borderType=cv2.BORDER_CONSTANT, value=[\n 255, 255, 255])\n', (1043, 1185), True, 'import cv2 as cv2\n'), ((1249, 1278), 'cv2.resize', 'cv2.resize', (['image', '(100, 100)'], {}), '(image, (100, 100))\n', (1259, 1278), True, 'import cv2 as cv2\n'), ((1307, 1346), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2GRAY'], {}), '(image, cv2.COLOR_BGR2GRAY)\n', (1319, 1346), True, 'import cv2 as cv2\n'), ((1445, 1478), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['gray', '(5, 5)', '(0)'], {}), '(gray, (5, 5), 0)\n', (1461, 1478), True, 'import cv2 as cv2\n'), ((1596, 1625), 'cv2.imshow', 'cv2.imshow', (['"""Imageb"""', 'blurred'], {}), "('Imageb', blurred)\n", (1606, 1625), True, 'import cv2 as cv2\n'), ((1630, 1644), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (1641, 1644), True, 'import cv2 as cv2\n'), ((1664, 1728), 'cv2.findContours', 'cv2.findContours', (['thresh', 'cv2.RETR_TREE', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n', (1680, 1728), True, 'import cv2 as cv2\n'), ((1863, 1894), 'numpy.zeros', 'np.zeros', (['image.shape', 'np.uint8'], {}), '(image.shape, np.uint8)\n', (1871, 1894), True, 'import numpy as np\n'), ((1901, 1931), 'numpy.zeros', 'np.zeros', (['gray.shape', 'np.uint8'], {}), '(gray.shape, np.uint8)\n', (1909, 1931), True, 'import numpy as np\n'), ((217, 249), 'os.getenv', 'os.getenv', (['"""IMAGE"""', '"""sample.bin"""'], {}), "('IMAGE', 'sample.bin')\n", (226, 249), False, 'import os\n'), ((506, 528), 'PIL.Image.open', 'Image.open', (['image_path'], {}), '(image_path)\n', (516, 528), False, 'from PIL import Image\n'), ((979, 995), 'cv2.mean', 'cv2.mean', (['bottom'], {}), '(bottom)\n', (987, 995), True, 'import cv2 as cv2\n'), ((1541, 1591), 'cv2.threshold', 'cv2.threshold', (['blurred', '(60)', '(255)', 'cv2.THRESH_BINARY'], {}), '(blurred, 60, 255, cv2.THRESH_BINARY)\n', (1554, 1591), True, 'import cv2 as cv2\n'), ((1806, 1836), 'cv2.approxPolyDP', 'cv2.approxPolyDP', (['cnt', '(3)', '(True)'], {}), '(cnt, 3, True)\n', (1822, 1836), True, 'import cv2 as cv2\n'), ((2013, 2053), 'numpy.zeros', 'np.zeros', (['image.shape[:2]'], {'dtype': '"""uint8"""'}), "(image.shape[:2], dtype='uint8')\n", (2021, 2053), True, 'import numpy as np\n'), ((2058, 2098), 'cv2.drawContours', 'cv2.drawContours', (['mask', '[c]', '(-1)', '(255)', '(-1)'], {}), '(mask, [c], -1, 255, -1)\n', (2074, 2098), True, 'import cv2 as cv2\n'), ((2110, 2145), 'cv2.erode', 'cv2.erode', (['mask', 'None'], {'iterations': '(2)'}), '(mask, None, iterations=2)\n', (2119, 2145), True, 'import cv2 as cv2\n'), ((2817, 2831), 'cv2.moments', 'cv2.moments', (['c'], {}), '(c)\n', (2828, 2831), True, 'import cv2 as cv2\n'), ((2961, 3008), 'cv2.circle', 'cv2.circle', (['image', '(cX, cY)', '(7)', '(255, 0, 0)', '(-1)'], {}), '(image, (cX, cY), 7, (255, 0, 0), -1)\n', (2971, 3008), True, 'import cv2 as cv2\n'), ((3013, 3113), 'cv2.putText', 'cv2.putText', (['image', '"""center"""', '(cX - 20, cY - 20)', 'cv2.FONT_HERSHEY_SIMPLEX', '(0.5)', '(255, 0, 0)', '(2)'], {}), "(image, 'center', (cX - 20, cY - 20), cv2.FONT_HERSHEY_SIMPLEX, \n 0.5, (255, 0, 0), 2)\n", (3024, 3113), True, 'import cv2 as cv2\n'), ((3364, 3403), 'cv2.drawContours', 'cv2.drawContours', (['image', '[c]', '(0)', '(100)', '(5)'], {}), '(image, [c], 0, 100, 5)\n', (3380, 3403), True, 'import cv2 as cv2\n'), ((3473, 3487), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (3484, 3487), True, 'import cv2 as cv2\n'), ((2157, 2183), 'cv2.mean', 'cv2.mean', (['image'], {'mask': 'mask'}), '(image, mask=mask)\n', (2165, 2183), True, 'import cv2 as cv2\n')]
|
"""A kernel manager for multiple kernels"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import asyncio
import os
import socket
import typing as t
import uuid
import zmq
from traitlets import Any
from traitlets import Bool
from traitlets import default
from traitlets import Dict
from traitlets import DottedObjectName
from traitlets import Instance
from traitlets import observe
from traitlets import Unicode
from traitlets.config.configurable import LoggingConfigurable
from traitlets.utils.importstring import import_item
from .kernelspec import KernelSpecManager
from .kernelspec import NATIVE_KERNEL_NAME
from .manager import KernelManager
from .utils import ensure_async
from .utils import run_sync
class DuplicateKernelError(Exception):
pass
def kernel_method(f: t.Callable) -> t.Callable:
"""decorator for proxying MKM.method(kernel_id) to individual KMs by ID"""
def wrapped(
self: t.Any, kernel_id: str, *args: t.Any, **kwargs: t.Any
) -> t.Union[t.Callable, t.Awaitable]:
# get the kernel
km = self.get_kernel(kernel_id)
method = getattr(km, f.__name__)
# call the kernel's method
r = method(*args, **kwargs)
# last thing, call anything defined in the actual class method
# such as logging messages
f(self, kernel_id, *args, **kwargs)
# return the method result
return r
return wrapped
class MultiKernelManager(LoggingConfigurable):
"""A class for managing multiple kernels."""
default_kernel_name = Unicode(
NATIVE_KERNEL_NAME, help="The name of the default kernel to start"
).tag(config=True)
kernel_spec_manager = Instance(KernelSpecManager, allow_none=True)
kernel_manager_class = DottedObjectName(
"jupyter_client.ioloop.IOLoopKernelManager",
help="""The kernel manager class. This is configurable to allow
subclassing of the KernelManager for customized behavior.
""",
).tag(config=True)
@observe("kernel_manager_class")
def _kernel_manager_class_changed(self, change):
self.kernel_manager_factory = self._create_kernel_manager_factory()
kernel_manager_factory = Any(help="this is kernel_manager_class after import")
@default("kernel_manager_factory")
def _kernel_manager_factory_default(self):
return self._create_kernel_manager_factory()
def _create_kernel_manager_factory(self) -> t.Callable:
kernel_manager_ctor = import_item(self.kernel_manager_class)
def create_kernel_manager(*args: t.Any, **kwargs: t.Any) -> KernelManager:
if self.shared_context:
if self.context.closed:
# recreate context if closed
self.context = self._context_default()
kwargs.setdefault("context", self.context)
km = kernel_manager_ctor(*args, **kwargs)
return km
return create_kernel_manager
shared_context = Bool(
True,
help="Share a single zmq.Context to talk to all my kernels",
).tag(config=True)
context = Instance("zmq.Context")
_created_context = Bool(False)
_pending_kernels = Dict()
@property
def _starting_kernels(self):
"""A shim for backwards compatibility."""
return self._pending_kernels
@default("context") # type:ignore[misc]
def _context_default(self) -> zmq.Context:
self._created_context = True
return zmq.Context()
connection_dir = Unicode("")
_kernels = Dict()
def __del__(self):
"""Handle garbage collection. Destroy context if applicable."""
if self._created_context and self.context and not self.context.closed:
if self.log:
self.log.debug("Destroying zmq context for %s", self)
self.context.destroy()
try:
super_del = super().__del__
except AttributeError:
pass
else:
super_del()
def list_kernel_ids(self) -> t.List[str]:
"""Return a list of the kernel ids of the active kernels."""
# Create a copy so we can iterate over kernels in operations
# that delete keys.
return list(self._kernels.keys())
def __len__(self) -> int:
"""Return the number of running kernels."""
return len(self.list_kernel_ids())
def __contains__(self, kernel_id: str) -> bool:
return kernel_id in self._kernels
def pre_start_kernel(
self, kernel_name: t.Optional[str], kwargs: t.Any
) -> t.Tuple[KernelManager, str, str]:
# kwargs should be mutable, passing it as a dict argument.
kernel_id = kwargs.pop("kernel_id", self.new_kernel_id(**kwargs))
if kernel_id in self:
raise DuplicateKernelError("Kernel already exists: %s" % kernel_id)
if kernel_name is None:
kernel_name = self.default_kernel_name
# kernel_manager_factory is the constructor for the KernelManager
# subclass we are using. It can be configured as any Configurable,
# including things like its transport and ip.
constructor_kwargs = {}
if self.kernel_spec_manager:
constructor_kwargs["kernel_spec_manager"] = self.kernel_spec_manager
km = self.kernel_manager_factory(
connection_file=os.path.join(self.connection_dir, "kernel-%s.json" % kernel_id),
parent=self,
log=self.log,
kernel_name=kernel_name,
**constructor_kwargs,
)
return km, kernel_name, kernel_id
async def _add_kernel_when_ready(
self, kernel_id: str, km: KernelManager, kernel_awaitable: t.Awaitable
) -> None:
try:
await kernel_awaitable
self._kernels[kernel_id] = km
self._pending_kernels.pop(kernel_id, None)
except Exception as e:
self.log.exception(e)
async def _remove_kernel_when_ready(
self, kernel_id: str, kernel_awaitable: t.Awaitable
) -> None:
try:
await kernel_awaitable
self.remove_kernel(kernel_id)
self._pending_kernels.pop(kernel_id, None)
except Exception as e:
self.log.exception(e)
def _using_pending_kernels(self):
"""Returns a boolean; a clearer method for determining if
this multikernelmanager is using pending kernels or not
"""
return getattr(self, 'use_pending_kernels', False)
async def _async_start_kernel(
self, kernel_name: t.Optional[str] = None, **kwargs: t.Any
) -> str:
"""Start a new kernel.
The caller can pick a kernel_id by passing one in as a keyword arg,
otherwise one will be generated using new_kernel_id().
The kernel ID for the newly started kernel is returned.
"""
km, kernel_name, kernel_id = self.pre_start_kernel(kernel_name, kwargs)
if not isinstance(km, KernelManager):
self.log.warning(
"Kernel manager class ({km_class}) is not an instance of 'KernelManager'!".format(
km_class=self.kernel_manager_class.__class__
)
)
kwargs['kernel_id'] = kernel_id # Make kernel_id available to manager and provisioner
starter = ensure_async(km.start_kernel(**kwargs))
task = asyncio.create_task(self._add_kernel_when_ready(kernel_id, km, starter))
self._pending_kernels[kernel_id] = task
# Handling a Pending Kernel
if self._using_pending_kernels():
# If using pending kernels, do not block
# on the kernel start.
self._kernels[kernel_id] = km
else:
await task
# raise an exception if one occurred during kernel startup.
if km.ready.exception():
raise km.ready.exception() # type: ignore
return kernel_id
start_kernel = run_sync(_async_start_kernel)
async def _async_shutdown_kernel(
self,
kernel_id: str,
now: t.Optional[bool] = False,
restart: t.Optional[bool] = False,
) -> None:
"""Shutdown a kernel by its kernel uuid.
Parameters
==========
kernel_id : uuid
The id of the kernel to shutdown.
now : bool
Should the kernel be shutdown forcibly using a signal.
restart : bool
Will the kernel be restarted?
"""
self.log.info("Kernel shutdown: %s" % kernel_id)
# If the kernel is still starting, wait for it to be ready.
if kernel_id in self._pending_kernels:
task = self._pending_kernels[kernel_id]
try:
await task
km = self.get_kernel(kernel_id)
await t.cast(asyncio.Future, km.ready)
except asyncio.CancelledError:
pass
except Exception:
self.remove_kernel(kernel_id)
return
km = self.get_kernel(kernel_id)
# If a pending kernel raised an exception, remove it.
if not km.ready.cancelled() and km.ready.exception():
self.remove_kernel(kernel_id)
return
stopper = ensure_async(km.shutdown_kernel(now, restart))
fut = asyncio.ensure_future(self._remove_kernel_when_ready(kernel_id, stopper))
self._pending_kernels[kernel_id] = fut
# Await the kernel if not using pending kernels.
if not self._using_pending_kernels():
await fut
# raise an exception if one occurred during kernel shutdown.
if km.ready.exception():
raise km.ready.exception() # type: ignore
shutdown_kernel = run_sync(_async_shutdown_kernel)
@kernel_method
def request_shutdown(self, kernel_id: str, restart: t.Optional[bool] = False) -> None:
"""Ask a kernel to shut down by its kernel uuid"""
@kernel_method
def finish_shutdown(
self,
kernel_id: str,
waittime: t.Optional[float] = None,
pollinterval: t.Optional[float] = 0.1,
) -> None:
"""Wait for a kernel to finish shutting down, and kill it if it doesn't"""
self.log.info("Kernel shutdown: %s" % kernel_id)
@kernel_method
def cleanup_resources(self, kernel_id: str, restart: bool = False) -> None:
"""Clean up a kernel's resources"""
def remove_kernel(self, kernel_id: str) -> KernelManager:
"""remove a kernel from our mapping.
Mainly so that a kernel can be removed if it is already dead,
without having to call shutdown_kernel.
The kernel object is returned, or `None` if not found.
"""
return self._kernels.pop(kernel_id, None)
async def _async_shutdown_all(self, now: bool = False) -> None:
"""Shutdown all kernels."""
kids = self.list_kernel_ids()
kids += list(self._pending_kernels)
kms = list(self._kernels.values())
futs = [ensure_async(self.shutdown_kernel(kid, now=now)) for kid in set(kids)]
await asyncio.gather(*futs)
# If using pending kernels, the kernels will not have been fully shut down.
if self._using_pending_kernels():
for km in kms:
try:
await km.ready
except asyncio.CancelledError:
self._pending_kernels[km.kernel_id].cancel()
except Exception:
# Will have been logged in _add_kernel_when_ready
pass
shutdown_all = run_sync(_async_shutdown_all)
def interrupt_kernel(self, kernel_id: str) -> None:
"""Interrupt (SIGINT) the kernel by its uuid.
Parameters
==========
kernel_id : uuid
The id of the kernel to interrupt.
"""
kernel = self.get_kernel(kernel_id)
if not kernel.ready.done():
raise RuntimeError("Kernel is in a pending state. Cannot interrupt.")
out = kernel.interrupt_kernel()
self.log.info("Kernel interrupted: %s" % kernel_id)
return out
@kernel_method
def signal_kernel(self, kernel_id: str, signum: int) -> None:
"""Sends a signal to the kernel by its uuid.
Note that since only SIGTERM is supported on Windows, this function
is only useful on Unix systems.
Parameters
==========
kernel_id : uuid
The id of the kernel to signal.
signum : int
Signal number to send kernel.
"""
self.log.info("Signaled Kernel %s with %s" % (kernel_id, signum))
async def _async_restart_kernel(self, kernel_id: str, now: bool = False) -> None:
"""Restart a kernel by its uuid, keeping the same ports.
Parameters
==========
kernel_id : uuid
The id of the kernel to interrupt.
now : bool, optional
If True, the kernel is forcefully restarted *immediately*, without
having a chance to do any cleanup action. Otherwise the kernel is
given 1s to clean up before a forceful restart is issued.
In all cases the kernel is restarted, the only difference is whether
it is given a chance to perform a clean shutdown or not.
"""
kernel = self.get_kernel(kernel_id)
if self._using_pending_kernels():
if not kernel.ready.done():
raise RuntimeError("Kernel is in a pending state. Cannot restart.")
out = await ensure_async(kernel.restart_kernel(now=now))
self.log.info("Kernel restarted: %s" % kernel_id)
return out
restart_kernel = run_sync(_async_restart_kernel)
@kernel_method
def is_alive(self, kernel_id: str) -> bool:
"""Is the kernel alive.
This calls KernelManager.is_alive() which calls Popen.poll on the
actual kernel subprocess.
Parameters
==========
kernel_id : uuid
The id of the kernel.
"""
def _check_kernel_id(self, kernel_id: str) -> None:
"""check that a kernel id is valid"""
if kernel_id not in self:
raise KeyError("Kernel with id not found: %s" % kernel_id)
def get_kernel(self, kernel_id: str) -> KernelManager:
"""Get the single KernelManager object for a kernel by its uuid.
Parameters
==========
kernel_id : uuid
The id of the kernel.
"""
self._check_kernel_id(kernel_id)
return self._kernels[kernel_id]
@kernel_method
def add_restart_callback(
self, kernel_id: str, callback: t.Callable, event: str = "restart"
) -> None:
"""add a callback for the KernelRestarter"""
@kernel_method
def remove_restart_callback(
self, kernel_id: str, callback: t.Callable, event: str = "restart"
) -> None:
"""remove a callback for the KernelRestarter"""
@kernel_method
def get_connection_info(self, kernel_id: str) -> t.Dict[str, t.Any]:
"""Return a dictionary of connection data for a kernel.
Parameters
==========
kernel_id : uuid
The id of the kernel.
Returns
=======
connection_dict : dict
A dict of the information needed to connect to a kernel.
This includes the ip address and the integer port
numbers of the different channels (stdin_port, iopub_port,
shell_port, hb_port).
"""
@kernel_method
def connect_iopub(self, kernel_id: str, identity: t.Optional[bytes] = None) -> socket.socket:
"""Return a zmq Socket connected to the iopub channel.
Parameters
==========
kernel_id : uuid
The id of the kernel
identity : bytes (optional)
The zmq identity of the socket
Returns
=======
stream : zmq Socket or ZMQStream
"""
@kernel_method
def connect_shell(self, kernel_id: str, identity: t.Optional[bytes] = None) -> socket.socket:
"""Return a zmq Socket connected to the shell channel.
Parameters
==========
kernel_id : uuid
The id of the kernel
identity : bytes (optional)
The zmq identity of the socket
Returns
=======
stream : zmq Socket or ZMQStream
"""
@kernel_method
def connect_control(self, kernel_id: str, identity: t.Optional[bytes] = None) -> socket.socket:
"""Return a zmq Socket connected to the control channel.
Parameters
==========
kernel_id : uuid
The id of the kernel
identity : bytes (optional)
The zmq identity of the socket
Returns
=======
stream : zmq Socket or ZMQStream
"""
@kernel_method
def connect_stdin(self, kernel_id: str, identity: t.Optional[bytes] = None) -> socket.socket:
"""Return a zmq Socket connected to the stdin channel.
Parameters
==========
kernel_id : uuid
The id of the kernel
identity : bytes (optional)
The zmq identity of the socket
Returns
=======
stream : zmq Socket or ZMQStream
"""
@kernel_method
def connect_hb(self, kernel_id: str, identity: t.Optional[bytes] = None) -> socket.socket:
"""Return a zmq Socket connected to the hb channel.
Parameters
==========
kernel_id : uuid
The id of the kernel
identity : bytes (optional)
The zmq identity of the socket
Returns
=======
stream : zmq Socket or ZMQStream
"""
def new_kernel_id(self, **kwargs: t.Any) -> str:
"""
Returns the id to associate with the kernel for this request. Subclasses may override
this method to substitute other sources of kernel ids.
:param kwargs:
:return: string-ized version 4 uuid
"""
return str(uuid.uuid4())
class AsyncMultiKernelManager(MultiKernelManager):
kernel_manager_class = DottedObjectName(
"jupyter_client.ioloop.AsyncIOLoopKernelManager",
config=True,
help="""The kernel manager class. This is configurable to allow
subclassing of the AsyncKernelManager for customized behavior.
""",
)
use_pending_kernels = Bool(
False,
help="""Whether to make kernels available before the process has started. The
kernel has a `.ready` future which can be awaited before connecting""",
).tag(config=True)
start_kernel = MultiKernelManager._async_start_kernel
restart_kernel = MultiKernelManager._async_restart_kernel
shutdown_kernel = MultiKernelManager._async_shutdown_kernel
shutdown_all = MultiKernelManager._async_shutdown_all
|
[
"traitlets.default",
"asyncio.gather",
"traitlets.Bool",
"uuid.uuid4",
"typing.cast",
"traitlets.DottedObjectName",
"traitlets.Unicode",
"traitlets.Dict",
"traitlets.observe",
"traitlets.Any",
"os.path.join",
"traitlets.Instance",
"zmq.Context",
"traitlets.utils.importstring.import_item"
] |
[((1728, 1772), 'traitlets.Instance', 'Instance', (['KernelSpecManager'], {'allow_none': '(True)'}), '(KernelSpecManager, allow_none=True)\n', (1736, 1772), False, 'from traitlets import Instance\n'), ((2053, 2084), 'traitlets.observe', 'observe', (['"""kernel_manager_class"""'], {}), "('kernel_manager_class')\n", (2060, 2084), False, 'from traitlets import observe\n'), ((2244, 2297), 'traitlets.Any', 'Any', ([], {'help': '"""this is kernel_manager_class after import"""'}), "(help='this is kernel_manager_class after import')\n", (2247, 2297), False, 'from traitlets import Any\n'), ((2304, 2337), 'traitlets.default', 'default', (['"""kernel_manager_factory"""'], {}), "('kernel_manager_factory')\n", (2311, 2337), False, 'from traitlets import default\n'), ((3158, 3181), 'traitlets.Instance', 'Instance', (['"""zmq.Context"""'], {}), "('zmq.Context')\n", (3166, 3181), False, 'from traitlets import Instance\n'), ((3206, 3217), 'traitlets.Bool', 'Bool', (['(False)'], {}), '(False)\n', (3210, 3217), False, 'from traitlets import Bool\n'), ((3242, 3248), 'traitlets.Dict', 'Dict', ([], {}), '()\n', (3246, 3248), False, 'from traitlets import Dict\n'), ((3390, 3408), 'traitlets.default', 'default', (['"""context"""'], {}), "('context')\n", (3397, 3408), False, 'from traitlets import default\n'), ((3565, 3576), 'traitlets.Unicode', 'Unicode', (['""""""'], {}), "('')\n", (3572, 3576), False, 'from traitlets import Unicode\n'), ((3593, 3599), 'traitlets.Dict', 'Dict', ([], {}), '()\n', (3597, 3599), False, 'from traitlets import Dict\n'), ((18262, 18505), 'traitlets.DottedObjectName', 'DottedObjectName', (['"""jupyter_client.ioloop.AsyncIOLoopKernelManager"""'], {'config': '(True)', 'help': '"""The kernel manager class. This is configurable to allow\n subclassing of the AsyncKernelManager for customized behavior.\n """'}), '(\'jupyter_client.ioloop.AsyncIOLoopKernelManager\', config=\n True, help=\n """The kernel manager class. This is configurable to allow\n subclassing of the AsyncKernelManager for customized behavior.\n """\n )\n', (18278, 18505), False, 'from traitlets import DottedObjectName\n'), ((2529, 2567), 'traitlets.utils.importstring.import_item', 'import_item', (['self.kernel_manager_class'], {}), '(self.kernel_manager_class)\n', (2540, 2567), False, 'from traitlets.utils.importstring import import_item\n'), ((3529, 3542), 'zmq.Context', 'zmq.Context', ([], {}), '()\n', (3540, 3542), False, 'import zmq\n'), ((1594, 1669), 'traitlets.Unicode', 'Unicode', (['NATIVE_KERNEL_NAME'], {'help': '"""The name of the default kernel to start"""'}), "(NATIVE_KERNEL_NAME, help='The name of the default kernel to start')\n", (1601, 1669), False, 'from traitlets import Unicode\n'), ((1801, 2016), 'traitlets.DottedObjectName', 'DottedObjectName', (['"""jupyter_client.ioloop.IOLoopKernelManager"""'], {'help': '"""The kernel manager class. This is configurable to allow\n subclassing of the KernelManager for customized behavior.\n """'}), '(\'jupyter_client.ioloop.IOLoopKernelManager\', help=\n """The kernel manager class. This is configurable to allow\n subclassing of the KernelManager for customized behavior.\n """\n )\n', (1817, 2016), False, 'from traitlets import DottedObjectName\n'), ((3031, 3102), 'traitlets.Bool', 'Bool', (['(True)'], {'help': '"""Share a single zmq.Context to talk to all my kernels"""'}), "(True, help='Share a single zmq.Context to talk to all my kernels')\n", (3035, 3102), False, 'from traitlets import Bool\n'), ((11184, 11205), 'asyncio.gather', 'asyncio.gather', (['*futs'], {}), '(*futs)\n', (11198, 11205), False, 'import asyncio\n'), ((18167, 18179), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (18177, 18179), False, 'import uuid\n'), ((18549, 18729), 'traitlets.Bool', 'Bool', (['(False)'], {'help': '"""Whether to make kernels available before the process has started. The\n kernel has a `.ready` future which can be awaited before connecting"""'}), '(False, help=\n """Whether to make kernels available before the process has started. The\n kernel has a `.ready` future which can be awaited before connecting"""\n )\n', (18553, 18729), False, 'from traitlets import Bool\n'), ((5407, 5470), 'os.path.join', 'os.path.join', (['self.connection_dir', "('kernel-%s.json' % kernel_id)"], {}), "(self.connection_dir, 'kernel-%s.json' % kernel_id)\n", (5419, 5470), False, 'import os\n'), ((8886, 8918), 'typing.cast', 't.cast', (['asyncio.Future', 'km.ready'], {}), '(asyncio.Future, km.ready)\n', (8892, 8918), True, 'import typing as t\n')]
|
import pyglet
from pyglet.gl import *
# pyglet.options['debug_gl_shaders'] = True
window = pyglet.window.Window(width=540, height=540, resizable=True)
batch = pyglet.graphics.Batch()
print("OpenGL Context: {}".format(window.context.get_info().version))
##########################################################
# TESTS !
##########################################################
label = pyglet.text.Label("This is a test", x=0, y=180, dpi=200, color=(255, 25, 255, 150), batch=batch)
vertex_list = pyglet.graphics.vertex_list(3, ('position3f', (100, 300, 0, 200, 250, 0, 200, 350, 0)),
('colors4f', (1, 0, 0, 1, 0, 1, 0, 1, 0.3, 0.3, 1, 1)))
def create_quad_vertex_list(x, y, z, width, height):
return x, y, z, x + width, y, z, x + width, y + height, z, x, y + height, z
batch.add_indexed(4, GL_TRIANGLES, None, [0, 1, 2, 0, 2, 3],
('position3f', create_quad_vertex_list(480, 270, -11, 50, 50)),
('colors4f', (1, 0.5, 0.2, 1, 1, 0.5, 0.2, 1, 1, 0.5, 0.2, 1, 1, 0.5, 0.2, 1)))
batch.add_indexed(4, GL_TRIANGLES, None, [0, 1, 2, 0, 2, 3],
('position3f', (400, 400, 0, 400+50, 400, 0, 400+50, 400+50, 0, 400, 400+50, 0)),
('colors4f', (1, 0.5, 0.2, 1, 1, 0.5, 0.2, 1, 1, 0.5, 0.2, 1, 1, 0.5, 0.2, 1)))
img = pyglet.image.load("pyglet.png")
img.anchor_x = img.width // 2
img.anchor_y = img.height // 2
red = pyglet.image.SolidColorImagePattern((255, 0, 0, 255)).create_image(50, 50)
green = pyglet.image.SolidColorImagePattern((0, 255, 0, 255)).create_image(50, 50)
blue = pyglet.image.SolidColorImagePattern((0, 0, 255, 255)).create_image(50, 50)
white = pyglet.image.SolidColorImagePattern((255, 255, 255, 255)).create_image(50, 50)
sprites = [pyglet.sprite.Sprite(img=img, x=60, y=80, batch=batch),
pyglet.sprite.Sprite(img=img, x=110, y=90, batch=batch),
pyglet.sprite.Sprite(img=img, x=160, y=100, batch=batch),
pyglet.sprite.Sprite(img=img, x=210, y=110, batch=batch)]
for sprite in sprites:
sprite.opacity = 220
sprite2 = pyglet.sprite.Sprite(img=red, x=200, y=400, batch=batch)
sprite3 = pyglet.sprite.Sprite(img=green, x=300, y=300, batch=batch)
sprite4 = pyglet.sprite.Sprite(img=blue, x=400, y=200, batch=batch)
sprite5 = pyglet.sprite.Sprite(img=white, x=500, y=100, batch=batch)
standalone_sprite = pyglet.sprite.Sprite(img=white, x=600, y=0)
##########################################################
# Modify the sprite scale value by scrolling the mouse
##########################################################
@window.event
def on_mouse_scroll(x, y, mouse, direction):
for spr in sprites:
spr.scale += direction / 10
###########################################################
#
###########################################################
@window.event
def on_draw():
window.clear()
# pyglet.graphics.draw(3, GL_TRIANGLES, ('position3f', (100, 100, 0, 200, 100, 0, 150, 200, 0)),
# ('colors3f', (1, 0.5, 0.2, 1, 0.5, 0.2, 1, 0.5, 0.2)))
#
# pyglet.graphics.draw_indexed(4, GL_TRIANGLES, [0, 1, 2, 0, 2, 3],
# ('position2i', (225, 300, 250, 300, 250, 325, 225, 325)),
# ('colors3f', (0.5, 1, 0.2, 0.5, 0.2, 1, 0.2, 0.5, 1, 1, 0.5, 0.2)))
vertex_list.draw(GL_TRIANGLES)
batch.draw()
standalone_sprite.draw()
def update(dt):
for sprite in sprites:
sprite.rotation += 100 * dt % 360
if __name__ == "__main__":
pyglet.gl.glClearColor(0.2, 0.3, 0.3, 1)
pyglet.clock.schedule_interval(update, 1/60)
pyglet.app.run()
|
[
"pyglet.app.run",
"pyglet.text.Label",
"pyglet.gl.glClearColor",
"pyglet.graphics.Batch",
"pyglet.image.SolidColorImagePattern",
"pyglet.sprite.Sprite",
"pyglet.image.load",
"pyglet.window.Window",
"pyglet.graphics.vertex_list",
"pyglet.clock.schedule_interval"
] |
[((94, 153), 'pyglet.window.Window', 'pyglet.window.Window', ([], {'width': '(540)', 'height': '(540)', 'resizable': '(True)'}), '(width=540, height=540, resizable=True)\n', (114, 153), False, 'import pyglet\n'), ((162, 185), 'pyglet.graphics.Batch', 'pyglet.graphics.Batch', ([], {}), '()\n', (183, 185), False, 'import pyglet\n'), ((395, 496), 'pyglet.text.Label', 'pyglet.text.Label', (['"""This is a test"""'], {'x': '(0)', 'y': '(180)', 'dpi': '(200)', 'color': '(255, 25, 255, 150)', 'batch': 'batch'}), "('This is a test', x=0, y=180, dpi=200, color=(255, 25, \n 255, 150), batch=batch)\n", (412, 496), False, 'import pyglet\n'), ((507, 655), 'pyglet.graphics.vertex_list', 'pyglet.graphics.vertex_list', (['(3)', "('position3f', (100, 300, 0, 200, 250, 0, 200, 350, 0))", "('colors4f', (1, 0, 0, 1, 0, 1, 0, 1, 0.3, 0.3, 1, 1))"], {}), "(3, ('position3f', (100, 300, 0, 200, 250, 0, \n 200, 350, 0)), ('colors4f', (1, 0, 0, 1, 0, 1, 0, 1, 0.3, 0.3, 1, 1)))\n", (534, 655), False, 'import pyglet\n'), ((1346, 1377), 'pyglet.image.load', 'pyglet.image.load', (['"""pyglet.png"""'], {}), "('pyglet.png')\n", (1363, 1377), False, 'import pyglet\n'), ((2105, 2161), 'pyglet.sprite.Sprite', 'pyglet.sprite.Sprite', ([], {'img': 'red', 'x': '(200)', 'y': '(400)', 'batch': 'batch'}), '(img=red, x=200, y=400, batch=batch)\n', (2125, 2161), False, 'import pyglet\n'), ((2172, 2230), 'pyglet.sprite.Sprite', 'pyglet.sprite.Sprite', ([], {'img': 'green', 'x': '(300)', 'y': '(300)', 'batch': 'batch'}), '(img=green, x=300, y=300, batch=batch)\n', (2192, 2230), False, 'import pyglet\n'), ((2241, 2298), 'pyglet.sprite.Sprite', 'pyglet.sprite.Sprite', ([], {'img': 'blue', 'x': '(400)', 'y': '(200)', 'batch': 'batch'}), '(img=blue, x=400, y=200, batch=batch)\n', (2261, 2298), False, 'import pyglet\n'), ((2309, 2367), 'pyglet.sprite.Sprite', 'pyglet.sprite.Sprite', ([], {'img': 'white', 'x': '(500)', 'y': '(100)', 'batch': 'batch'}), '(img=white, x=500, y=100, batch=batch)\n', (2329, 2367), False, 'import pyglet\n'), ((2389, 2432), 'pyglet.sprite.Sprite', 'pyglet.sprite.Sprite', ([], {'img': 'white', 'x': '(600)', 'y': '(0)'}), '(img=white, x=600, y=0)\n', (2409, 2432), False, 'import pyglet\n'), ((1784, 1838), 'pyglet.sprite.Sprite', 'pyglet.sprite.Sprite', ([], {'img': 'img', 'x': '(60)', 'y': '(80)', 'batch': 'batch'}), '(img=img, x=60, y=80, batch=batch)\n', (1804, 1838), False, 'import pyglet\n'), ((1851, 1906), 'pyglet.sprite.Sprite', 'pyglet.sprite.Sprite', ([], {'img': 'img', 'x': '(110)', 'y': '(90)', 'batch': 'batch'}), '(img=img, x=110, y=90, batch=batch)\n', (1871, 1906), False, 'import pyglet\n'), ((1919, 1975), 'pyglet.sprite.Sprite', 'pyglet.sprite.Sprite', ([], {'img': 'img', 'x': '(160)', 'y': '(100)', 'batch': 'batch'}), '(img=img, x=160, y=100, batch=batch)\n', (1939, 1975), False, 'import pyglet\n'), ((1988, 2044), 'pyglet.sprite.Sprite', 'pyglet.sprite.Sprite', ([], {'img': 'img', 'x': '(210)', 'y': '(110)', 'batch': 'batch'}), '(img=img, x=210, y=110, batch=batch)\n', (2008, 2044), False, 'import pyglet\n'), ((3591, 3631), 'pyglet.gl.glClearColor', 'pyglet.gl.glClearColor', (['(0.2)', '(0.3)', '(0.3)', '(1)'], {}), '(0.2, 0.3, 0.3, 1)\n', (3613, 3631), False, 'import pyglet\n'), ((3636, 3682), 'pyglet.clock.schedule_interval', 'pyglet.clock.schedule_interval', (['update', '(1 / 60)'], {}), '(update, 1 / 60)\n', (3666, 3682), False, 'import pyglet\n'), ((3685, 3701), 'pyglet.app.run', 'pyglet.app.run', ([], {}), '()\n', (3699, 3701), False, 'import pyglet\n'), ((1445, 1498), 'pyglet.image.SolidColorImagePattern', 'pyglet.image.SolidColorImagePattern', (['(255, 0, 0, 255)'], {}), '((255, 0, 0, 255))\n', (1480, 1498), False, 'import pyglet\n'), ((1528, 1581), 'pyglet.image.SolidColorImagePattern', 'pyglet.image.SolidColorImagePattern', (['(0, 255, 0, 255)'], {}), '((0, 255, 0, 255))\n', (1563, 1581), False, 'import pyglet\n'), ((1610, 1663), 'pyglet.image.SolidColorImagePattern', 'pyglet.image.SolidColorImagePattern', (['(0, 0, 255, 255)'], {}), '((0, 0, 255, 255))\n', (1645, 1663), False, 'import pyglet\n'), ((1693, 1750), 'pyglet.image.SolidColorImagePattern', 'pyglet.image.SolidColorImagePattern', (['(255, 255, 255, 255)'], {}), '((255, 255, 255, 255))\n', (1728, 1750), False, 'import pyglet\n')]
|
from PIL import Image
import PIL.ImageOps
import numpy as np
import tensorflow as tf
import time
## 시간측정 시작
stime = time.time()
### 학습모델 불러오기
model = tf.keras.models.load_model('my_model.h5')
# model.summary()
### 유효영역 자르기
img = PIL.ImageOps.invert(Image.open('sample.bmp')).convert("1")
Newimg = np.asarray(img.crop(img.getbbox()))
### 글자 클래스
class ocr:
def __init__(self, Newimg, rowSize,colSize, idxR,idxC):
self.points = set()
self.findContour(Newimg, rowSize,colSize, idxR,idxC)
self.makeNewimage()
self.ID = model.predict(self.resize28(self.image).reshape(1,-1))
def findContour(self, Newimg, rowSize,colSize, idxR,idxC):
## 입력받은 이미지에서 연속된 점들의 처음 그룹을 points(set)로 저장
if (idxR, idxC) not in self.points:
self.points.add( (idxR, idxC) )
if idxC+1 < colSize:
if Newimg[idxR, idxC+1]:
self.findContour(Newimg, rowSize,colSize, idxR,idxC+1)
if idxR+1<rowSize and Newimg[idxR+1, idxC+1]:
self.findContour(Newimg, rowSize,colSize, idxR+1,idxC+1)
if idxR-1>=0 and Newimg[idxR-1, idxC+1]:
self.findContour(Newimg, rowSize,colSize, idxR-1,idxC+1)
if idxR+1<rowSize and Newimg[idxR+1, idxC]:
self.findContour(Newimg, rowSize,colSize, idxR+1,idxC)
if idxR-1>=0 and Newimg[idxR-1, idxC]:
self.findContour(Newimg, rowSize,colSize, idxR-1,idxC)
if idxC-1 >= 0:
if Newimg[idxR, idxC-1]:
self.findContour(Newimg, rowSize,colSize, idxR,idxC-1)
if idxR+1<rowSize and Newimg[idxR+1, idxC-1]:
self.findContour(Newimg, rowSize,colSize, idxR+1,idxC-1)
if idxR-1>=0 and Newimg[idxR-1, idxC-1]:
self.findContour(Newimg, rowSize,colSize, idxR-1,idxC-1)
def makeNewimage(self):
## points의 좌표로 새로운 이미지 만들어서 저장
cRange = np.array([[e1, e2] for (e1, e2) in self.points])
dr = np.amax(cRange, 0)
ul = np.amin(cRange, 0)
self.image = np.zeros((dr[0]-ul[0]+1,dr[1]-ul[1]+1))
self.image[cRange[:,0]-ul[0], cRange[:,1]-ul[1]] = 1
self.position = ( np.around(np.mean(cRange[:,0])), np.around(np.mean(cRange[:,1])) )
@staticmethod
def resize28(image):
## 28*28사이즈로 줄이기
rowSize, colSize = image.shape
temp_im = Image.fromarray(image)
resized = np.zeros((28,28))
if rowSize>colSize:
shortLength = int(colSize*20/rowSize)
temp_im = temp_im.resize( (shortLength,20) )
resized[4:24, int(13-shortLength/2):int(13-shortLength/2)+shortLength] = np.asarray(temp_im)
else:
shortLength = int(rowSize*20/colSize)
temp_im = temp_im.resize( (20,shortLength) )
resized[int(13-shortLength/2):int(13-shortLength/2)+shortLength, 4:24] = np.asarray(temp_im)
return resized
@classmethod
def many(cls, Newimg):
## 글자 목록 뽑아내기
ocrList = []
rowSize, colSize = Newimg.shape
idxs = np.argwhere(Newimg)
idxs = list(map(tuple, idxs[idxs[:,1].argsort()]))
while True:
eachString = cls(Newimg, rowSize,colSize, idxs[0][0],idxs[0][1])
ocrList.append(eachString)
idxs = [e for e in idxs if e not in eachString.points]
if not idxs:
break
return ocrList
### 불연속 기준으로 글자 분류
ocrList = ocr.many(Newimg)
## 시간 출력
print('소요 시간 :', time.time()-stime, '초')
### 잘라낸 이미지별로 저장하기
iter = 0
for classed_string in ocrList:
iter += 1
name = 'CLASSED{}_{}.bmp'.format(iter, np.argmax(classed_string.ID) )
print(name, ['{:.2f}'.format(item) for item in classed_string.ID.tolist()[0]])
Image.fromarray(255*classed_string.image).convert('RGB').save(name)
|
[
"tensorflow.keras.models.load_model",
"numpy.amin",
"numpy.argmax",
"numpy.asarray",
"numpy.zeros",
"time.time",
"numpy.amax",
"numpy.argwhere",
"PIL.Image.open",
"numpy.mean",
"numpy.array",
"PIL.Image.fromarray"
] |
[((117, 128), 'time.time', 'time.time', ([], {}), '()\n', (126, 128), False, 'import time\n'), ((152, 193), 'tensorflow.keras.models.load_model', 'tf.keras.models.load_model', (['"""my_model.h5"""'], {}), "('my_model.h5')\n", (178, 193), True, 'import tensorflow as tf\n'), ((1977, 2023), 'numpy.array', 'np.array', (['[[e1, e2] for e1, e2 in self.points]'], {}), '([[e1, e2] for e1, e2 in self.points])\n', (1985, 2023), True, 'import numpy as np\n'), ((2039, 2057), 'numpy.amax', 'np.amax', (['cRange', '(0)'], {}), '(cRange, 0)\n', (2046, 2057), True, 'import numpy as np\n'), ((2071, 2089), 'numpy.amin', 'np.amin', (['cRange', '(0)'], {}), '(cRange, 0)\n', (2078, 2089), True, 'import numpy as np\n'), ((2111, 2159), 'numpy.zeros', 'np.zeros', (['(dr[0] - ul[0] + 1, dr[1] - ul[1] + 1)'], {}), '((dr[0] - ul[0] + 1, dr[1] - ul[1] + 1))\n', (2119, 2159), True, 'import numpy as np\n'), ((2426, 2448), 'PIL.Image.fromarray', 'Image.fromarray', (['image'], {}), '(image)\n', (2441, 2448), False, 'from PIL import Image\n'), ((2467, 2485), 'numpy.zeros', 'np.zeros', (['(28, 28)'], {}), '((28, 28))\n', (2475, 2485), True, 'import numpy as np\n'), ((3112, 3131), 'numpy.argwhere', 'np.argwhere', (['Newimg'], {}), '(Newimg)\n', (3123, 3131), True, 'import numpy as np\n'), ((3546, 3557), 'time.time', 'time.time', ([], {}), '()\n', (3555, 3557), False, 'import time\n'), ((3679, 3707), 'numpy.argmax', 'np.argmax', (['classed_string.ID'], {}), '(classed_string.ID)\n', (3688, 3707), True, 'import numpy as np\n'), ((252, 276), 'PIL.Image.open', 'Image.open', (['"""sample.bmp"""'], {}), "('sample.bmp')\n", (262, 276), False, 'from PIL import Image\n'), ((2705, 2724), 'numpy.asarray', 'np.asarray', (['temp_im'], {}), '(temp_im)\n', (2715, 2724), True, 'import numpy as np\n'), ((2931, 2950), 'numpy.asarray', 'np.asarray', (['temp_im'], {}), '(temp_im)\n', (2941, 2950), True, 'import numpy as np\n'), ((2248, 2269), 'numpy.mean', 'np.mean', (['cRange[:, 0]'], {}), '(cRange[:, 0])\n', (2255, 2269), True, 'import numpy as np\n'), ((2281, 2302), 'numpy.mean', 'np.mean', (['cRange[:, 1]'], {}), '(cRange[:, 1])\n', (2288, 2302), True, 'import numpy as np\n'), ((3797, 3840), 'PIL.Image.fromarray', 'Image.fromarray', (['(255 * classed_string.image)'], {}), '(255 * classed_string.image)\n', (3812, 3840), False, 'from PIL import Image\n')]
|
from setuptools import setup, find_packages
setup(
name="pymaster",
packages=find_packages(exclude=["tests", "docs"]),
version="1.0.0",
description="Quick Recipes for interview problems",
author="<NAME>",
classifiers=[
"Topic:: Utilities",
"Operating System :: POSIX",
"Programming Language :: Python :: 3",
],
)
|
[
"setuptools.find_packages"
] |
[((86, 126), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['tests', 'docs']"}), "(exclude=['tests', 'docs'])\n", (99, 126), False, 'from setuptools import setup, find_packages\n')]
|
# Copyright (C) 2019 by eHealth Africa : http://www.eHealthAfrica.org
#
# See the NOTICE file distributed with this work for additional information
# regarding copyright ownership.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import uuid
from django.db import models
from django.utils.functional import cached_property
from django.utils.translation import gettext as _
from django_prometheus.models import ExportModelOperationsMixin
from model_utils.models import TimeStampedModel
from aether.sdk.multitenancy.models import MtModelAbstract, MtModelChildAbstract
from aether.sdk.utils import json_prettified
from .utils import validate_contract
'''
Data model schema:
+------------------+ +------------------+ +------------------+
| Project | | Pipeline | | Contract |
+==================+ +==================+ +==================+
| project_id |<-+ | id |<-+ | id |
| name | | | created | | | created |
| active | | | modified | | | modified |
+------------------+ | | name | | | name |
| | schema | | | entity_types |
| | input | | | mapping_rules |
| +~~~~~~~~~~~~~~~~~~+ | | mapping_errors |
| | mappingset | | | output |
| +::::::::::::::::::+ | | is_active |
+-<| project | | | is_read_only |
+------------------+ | +~~~~~~~~~~~~~~~~~~+
| | mapping |
| | kernel_refs |
| +::::::::::::::::::+
+-<| pipeline |
+------------------+
'''
class Project(ExportModelOperationsMixin('ui_project'), TimeStampedModel, MtModelAbstract):
'''
Database link of an Aether Kernel Project.
:ivar UUID project_id: Aether Kernel project ID (primary key).
:ivar text name: Project name (might match the linked Kernel project name).
:ivar bool active: Active. Defaults to ``True``.
'''
# This is needed to submit data to kernel
# (there is a one to one relation)
project_id = models.UUIDField(
primary_key=True,
default=uuid.uuid4,
verbose_name=_('project ID'),
help_text=_('This ID corresponds to an Aether Kernel project ID.'),
)
name = models.TextField(null=True, blank=True, default='', verbose_name=_('name'))
active = models.BooleanField(default=True, verbose_name=_('active'))
is_default = models.BooleanField(
default=False,
editable=False,
verbose_name=_('is the default project?'),
)
def __str__(self):
return self.name
class Meta:
app_label = 'ui'
default_related_name = 'projects'
ordering = ['name']
verbose_name = _('project')
verbose_name_plural = _('projects')
class Pipeline(ExportModelOperationsMixin('ui_pipeline'), TimeStampedModel, MtModelChildAbstract):
'''
Pipeline
:ivar UUID id: ID (primary key).
:ivar datetime created: Creation timestamp.
:ivar datetime modified: Last update timestamp.
:ivar text name: Name.
:ivar JSON schema: AVRO schema of the input.
:ivar JSON input: Data sample.
:ivar UUID mappingset: Linked Aether Mappingset ID.
:ivar Project project: Project.
'''
id = models.UUIDField(primary_key=True, default=uuid.uuid4, verbose_name=_('ID'))
name = models.TextField(verbose_name=_('name'))
# this is the avro schema
schema = models.JSONField(null=True, blank=True, default=dict, verbose_name=_('AVRO schema'))
# this is an example of the data using the avro schema
input = models.JSONField(null=True, blank=True, default=dict, verbose_name=_('input JSON'))
# this is a reference to the linked kernel mappingset
mappingset = models.UUIDField(
null=True,
blank=True,
unique=True,
verbose_name=_('mapping set ID'),
help_text=_('This ID corresponds to an Aether Kernel mapping set ID.'),
)
project = models.ForeignKey(to=Project, on_delete=models.CASCADE, verbose_name=_('project'))
@cached_property
def schema_prettified(self):
return json_prettified(self.schema)
@cached_property
def input_prettified(self):
return json_prettified(self.input)
@property
def is_read_only(self):
return Contract.objects.filter(pipeline=self, is_read_only=True).exists()
def __str__(self):
return self.name
def save(self, *args, **kwargs):
super(Pipeline, self).save(*args, **kwargs)
# revalidate linked contracts against updated fields
contracts = Contract.objects.filter(pipeline=self)
[contract.save() for contract in contracts]
# invalidates cached properties
for p in ['input_prettified', 'schema_prettified']: # pragma: no cover
if p in self.__dict__:
del self.__dict__[p]
def get_mt_instance(self):
return self.project
class Meta:
app_label = 'ui'
default_related_name = 'pipelines'
ordering = ('name',)
verbose_name = _('pipeline')
verbose_name_plural = _('pipelines')
indexes = [
models.Index(fields=['project', '-modified']),
models.Index(fields=['-modified']),
]
class Contract(ExportModelOperationsMixin('ui_contract'), TimeStampedModel, MtModelChildAbstract):
'''
Contract
:ivar UUID id: ID (primary key).
:ivar datetime created: Creation timestamp.
:ivar datetime modified: Last update timestamp.
:ivar text name: Name.
:ivar Pipeline pipeline: Pipeline.
:ivar JSON entity_types: List of AVRO schemas of the different entities.
:ivar JSON mapping_rules: List of mapping rules used to transform
the pipeline input into the entity types.
Rule format:
{
"id": uuid,
"source": "jsonpath-input-1",
"destination": "jsonpath-entity-type-1",
}
:ivar JSON mapping_errors: List of errors derived from
the mapping rules, the entity types and the pipeline input
when the Kernel ``validate-mappings`` endpoint is called.
:ivar JSON output: List of entities extracted using
the mapping rules, the entity types and the pipeline input
when the Kernel ``validate-mappings`` endpoint is called.
:ivar UUID mapping: Linked Aether Mapping ID.
:ivar JSON kernel_refs: Linked Aether Kernel artefact IDs.
Object expected format:
{
"entities": {
"entity name": uuid, # the Kernel schema decorator ID
...
},
"schemas": {
"entity name": uuid, # the Kernel schema ID
...
},
}
:ivar datetime published_on: Timestamp of last published to Aether Kernel.
:ivar bool is_active: Is the contract active?
:ivar bool is_read_only: Can the contract be modified manually?
:ivar bool is_identity: Is the contract generated as an identity contract?
'''
id = models.UUIDField(primary_key=True, default=uuid.uuid4, verbose_name=_('ID'))
name = models.TextField(verbose_name=_('name'))
pipeline = models.ForeignKey(to=Pipeline, on_delete=models.CASCADE, verbose_name=_('pipeline'))
# the list of available entity types (avro schemas)
entity_types = models.JSONField(null=True, blank=True, default=list, verbose_name=_('entity types'))
# this represents the list of mapping rules
# {
# "mapping_rules": [
# {"id": ###, "source": "jsonpath-input-1", "destination": "jsonpath-entity-type-1"},
# {"id": ###, "source": "jsonpath-input-2", "destination": "jsonpath-entity-type-2"},
# ...
# {"id": ###, "source": "jsonpath-input-n", "destination": "jsonpath-entity-type-n"},
# ]
# }
mapping_rules = models.JSONField(null=True, blank=True, default=list, verbose_name=_('mapping rules'))
# these represent the list of entities and errors returned by the
# `validate-mapping` endpoint in kernel.
# {
# "entities": [
# {...},
# {...},
# ],
# "mapping_errors": [
# {"path": "jsonpath-input-a", "description": "No match for path"},
# {"path": "jsonpath-entity-type-b", "description": "No match for path"},
# ...
# # Summary of the error with the extracted entity
# {
# "description": "Extracted record did not conform to registered schema",
# "data": {"id": "uuid:####", ...}
# }
# ]
# }
mapping_errors = models.JSONField(null=True, blank=True, editable=False, verbose_name=_('mapping errors'))
output = models.JSONField(null=True, blank=True, editable=False, verbose_name=_('output'))
# this is a reference to the linked kernel mapping
mapping = models.UUIDField(
null=True,
blank=True,
unique=True,
verbose_name=_('mapping ID'),
help_text=_('This ID corresponds to an Aether Kernel mapping ID.'),
)
# this contains the information related to the linked artefacts in kernel
# {
# "entities": {
# "entity name": uuid, # the Kernel schema decorator ID
# ...
# },
# "schemas": {
# "entity name": uuid, # the Kernel schema ID
# ...
# },
# }
kernel_refs = models.JSONField(
null=True,
blank=True,
editable=False,
verbose_name=_('Kernel artefact IDs'),
help_text=_('These IDs correspond to Aether Kernel artefact IDs.'),
)
published_on = models.DateTimeField(null=True, blank=True, editable=False, verbose_name=_('published on'))
is_active = models.BooleanField(default=True, verbose_name=_('is active?'))
# the read only property is fulfilled by kernel fetched mappings
is_read_only = models.BooleanField(default=False, editable=False, verbose_name=_('is read only?'))
is_identity = models.BooleanField(default=False, verbose_name=_('is identity?'))
@cached_property
def entity_types_prettified(self):
return json_prettified(self.entity_types)
@cached_property
def mapping_rules_prettified(self):
return json_prettified(self.mapping_rules)
@cached_property
def mapping_errors_prettified(self):
return json_prettified(self.mapping_errors)
@cached_property
def output_errors_prettified(self):
return json_prettified(self.output)
@cached_property
def kernel_refs_errors_prettified(self):
return json_prettified(self.kernel_refs)
@cached_property
def kernel_rules(self):
'''
The contract mapping_rules property corresponds
to definition mapping but with different rules format
'''
return [
[rule['source'], rule['destination']]
for rule in (self.mapping_rules or [])
]
def __str__(self):
return self.name
def save(self, *args, **kwargs):
errors, output = validate_contract(self)
self.mapping_errors = errors
self.output = output
super(Contract, self).save(*args, **kwargs)
# invalidates cached properties
for p in [
'entity_types_prettified',
'mapping_rules_prettified',
'mapping_errors_prettified',
'output_errors_prettified',
'kernel_refs_errors_prettified',
'kernel_rules',
]: # pragma: no cover
if p in self.__dict__:
del self.__dict__[p]
def get_mt_instance(self):
return self.pipeline.project
class Meta:
app_label = 'ui'
default_related_name = 'contracts'
ordering = ('name',)
verbose_name = _('contract')
verbose_name_plural = _('contracts')
indexes = [
models.Index(fields=['pipeline', '-modified']),
models.Index(fields=['-modified']),
]
|
[
"django.db.models.Index",
"django_prometheus.models.ExportModelOperationsMixin",
"aether.sdk.utils.json_prettified",
"django.utils.translation.gettext"
] |
[((2547, 2587), 'django_prometheus.models.ExportModelOperationsMixin', 'ExportModelOperationsMixin', (['"""ui_project"""'], {}), "('ui_project')\n", (2573, 2587), False, 'from django_prometheus.models import ExportModelOperationsMixin\n'), ((3760, 3801), 'django_prometheus.models.ExportModelOperationsMixin', 'ExportModelOperationsMixin', (['"""ui_pipeline"""'], {}), "('ui_pipeline')\n", (3786, 3801), False, 'from django_prometheus.models import ExportModelOperationsMixin\n'), ((6321, 6362), 'django_prometheus.models.ExportModelOperationsMixin', 'ExportModelOperationsMixin', (['"""ui_contract"""'], {}), "('ui_contract')\n", (6347, 6362), False, 'from django_prometheus.models import ExportModelOperationsMixin\n'), ((3686, 3698), 'django.utils.translation.gettext', '_', (['"""project"""'], {}), "('project')\n", (3687, 3698), True, 'from django.utils.translation import gettext as _\n'), ((3729, 3742), 'django.utils.translation.gettext', '_', (['"""projects"""'], {}), "('projects')\n", (3730, 3742), True, 'from django.utils.translation import gettext as _\n'), ((5156, 5184), 'aether.sdk.utils.json_prettified', 'json_prettified', (['self.schema'], {}), '(self.schema)\n', (5171, 5184), False, 'from aether.sdk.utils import json_prettified\n'), ((5254, 5281), 'aether.sdk.utils.json_prettified', 'json_prettified', (['self.input'], {}), '(self.input)\n', (5269, 5281), False, 'from aether.sdk.utils import json_prettified\n'), ((6108, 6121), 'django.utils.translation.gettext', '_', (['"""pipeline"""'], {}), "('pipeline')\n", (6109, 6121), True, 'from django.utils.translation import gettext as _\n'), ((6152, 6166), 'django.utils.translation.gettext', '_', (['"""pipelines"""'], {}), "('pipelines')\n", (6153, 6166), True, 'from django.utils.translation import gettext as _\n'), ((11392, 11426), 'aether.sdk.utils.json_prettified', 'json_prettified', (['self.entity_types'], {}), '(self.entity_types)\n', (11407, 11426), False, 'from aether.sdk.utils import json_prettified\n'), ((11504, 11539), 'aether.sdk.utils.json_prettified', 'json_prettified', (['self.mapping_rules'], {}), '(self.mapping_rules)\n', (11519, 11539), False, 'from aether.sdk.utils import json_prettified\n'), ((11618, 11654), 'aether.sdk.utils.json_prettified', 'json_prettified', (['self.mapping_errors'], {}), '(self.mapping_errors)\n', (11633, 11654), False, 'from aether.sdk.utils import json_prettified\n'), ((11732, 11760), 'aether.sdk.utils.json_prettified', 'json_prettified', (['self.output'], {}), '(self.output)\n', (11747, 11760), False, 'from aether.sdk.utils import json_prettified\n'), ((11843, 11876), 'aether.sdk.utils.json_prettified', 'json_prettified', (['self.kernel_refs'], {}), '(self.kernel_refs)\n', (11858, 11876), False, 'from aether.sdk.utils import json_prettified\n'), ((13054, 13067), 'django.utils.translation.gettext', '_', (['"""contract"""'], {}), "('contract')\n", (13055, 13067), True, 'from django.utils.translation import gettext as _\n'), ((13098, 13112), 'django.utils.translation.gettext', '_', (['"""contracts"""'], {}), "('contracts')\n", (13099, 13112), True, 'from django.utils.translation import gettext as _\n'), ((3101, 3116), 'django.utils.translation.gettext', '_', (['"""project ID"""'], {}), "('project ID')\n", (3102, 3116), True, 'from django.utils.translation import gettext as _\n'), ((3136, 3192), 'django.utils.translation.gettext', '_', (['"""This ID corresponds to an Aether Kernel project ID."""'], {}), "('This ID corresponds to an Aether Kernel project ID.')\n", (3137, 3192), True, 'from django.utils.translation import gettext as _\n'), ((3276, 3285), 'django.utils.translation.gettext', '_', (['"""name"""'], {}), "('name')\n", (3277, 3285), True, 'from django.utils.translation import gettext as _\n'), ((3347, 3358), 'django.utils.translation.gettext', '_', (['"""active"""'], {}), "('active')\n", (3348, 3358), True, 'from django.utils.translation import gettext as _\n'), ((3466, 3494), 'django.utils.translation.gettext', '_', (['"""is the default project?"""'], {}), "('is the default project?')\n", (3467, 3494), True, 'from django.utils.translation import gettext as _\n'), ((4360, 4367), 'django.utils.translation.gettext', '_', (['"""ID"""'], {}), "('ID')\n", (4361, 4367), True, 'from django.utils.translation import gettext as _\n'), ((4410, 4419), 'django.utils.translation.gettext', '_', (['"""name"""'], {}), "('name')\n", (4411, 4419), True, 'from django.utils.translation import gettext as _\n'), ((4532, 4548), 'django.utils.translation.gettext', '_', (['"""AVRO schema"""'], {}), "('AVRO schema')\n", (4533, 4548), True, 'from django.utils.translation import gettext as _\n'), ((4689, 4704), 'django.utils.translation.gettext', '_', (['"""input JSON"""'], {}), "('input JSON')\n", (4690, 4704), True, 'from django.utils.translation import gettext as _\n'), ((4881, 4900), 'django.utils.translation.gettext', '_', (['"""mapping set ID"""'], {}), "('mapping set ID')\n", (4882, 4900), True, 'from django.utils.translation import gettext as _\n'), ((4920, 4980), 'django.utils.translation.gettext', '_', (['"""This ID corresponds to an Aether Kernel mapping set ID."""'], {}), "('This ID corresponds to an Aether Kernel mapping set ID.')\n", (4921, 4980), True, 'from django.utils.translation import gettext as _\n'), ((5072, 5084), 'django.utils.translation.gettext', '_', (['"""project"""'], {}), "('project')\n", (5073, 5084), True, 'from django.utils.translation import gettext as _\n'), ((6199, 6244), 'django.db.models.Index', 'models.Index', ([], {'fields': "['project', '-modified']"}), "(fields=['project', '-modified'])\n", (6211, 6244), False, 'from django.db import models\n'), ((6258, 6292), 'django.db.models.Index', 'models.Index', ([], {'fields': "['-modified']"}), "(fields=['-modified'])\n", (6270, 6292), False, 'from django.db import models\n'), ((8361, 8368), 'django.utils.translation.gettext', '_', (['"""ID"""'], {}), "('ID')\n", (8362, 8368), True, 'from django.utils.translation import gettext as _\n'), ((8411, 8420), 'django.utils.translation.gettext', '_', (['"""name"""'], {}), "('name')\n", (8412, 8420), True, 'from django.utils.translation import gettext as _\n'), ((8508, 8521), 'django.utils.translation.gettext', '_', (['"""pipeline"""'], {}), "('pipeline')\n", (8509, 8521), True, 'from django.utils.translation import gettext as _\n'), ((8666, 8683), 'django.utils.translation.gettext', '_', (['"""entity types"""'], {}), "('entity types')\n", (8667, 8683), True, 'from django.utils.translation import gettext as _\n'), ((9176, 9194), 'django.utils.translation.gettext', '_', (['"""mapping rules"""'], {}), "('mapping rules')\n", (9177, 9194), True, 'from django.utils.translation import gettext as _\n'), ((9921, 9940), 'django.utils.translation.gettext', '_', (['"""mapping errors"""'], {}), "('mapping errors')\n", (9922, 9940), True, 'from django.utils.translation import gettext as _\n'), ((10024, 10035), 'django.utils.translation.gettext', '_', (['"""output"""'], {}), "('output')\n", (10025, 10035), True, 'from django.utils.translation import gettext as _\n'), ((10206, 10221), 'django.utils.translation.gettext', '_', (['"""mapping ID"""'], {}), "('mapping ID')\n", (10207, 10221), True, 'from django.utils.translation import gettext as _\n'), ((10241, 10297), 'django.utils.translation.gettext', '_', (['"""This ID corresponds to an Aether Kernel mapping ID."""'], {}), "('This ID corresponds to an Aether Kernel mapping ID.')\n", (10242, 10297), True, 'from django.utils.translation import gettext as _\n'), ((10757, 10781), 'django.utils.translation.gettext', '_', (['"""Kernel artefact IDs"""'], {}), "('Kernel artefact IDs')\n", (10758, 10781), True, 'from django.utils.translation import gettext as _\n'), ((10801, 10857), 'django.utils.translation.gettext', '_', (['"""These IDs correspond to Aether Kernel artefact IDs."""'], {}), "('These IDs correspond to Aether Kernel artefact IDs.')\n", (10802, 10857), True, 'from django.utils.translation import gettext as _\n'), ((10958, 10975), 'django.utils.translation.gettext', '_', (['"""published on"""'], {}), "('published on')\n", (10959, 10975), True, 'from django.utils.translation import gettext as _\n'), ((11040, 11055), 'django.utils.translation.gettext', '_', (['"""is active?"""'], {}), "('is active?')\n", (11041, 11055), True, 'from django.utils.translation import gettext as _\n'), ((11210, 11228), 'django.utils.translation.gettext', '_', (['"""is read only?"""'], {}), "('is read only?')\n", (11211, 11228), True, 'from django.utils.translation import gettext as _\n'), ((11297, 11314), 'django.utils.translation.gettext', '_', (['"""is identity?"""'], {}), "('is identity?')\n", (11298, 11314), True, 'from django.utils.translation import gettext as _\n'), ((13145, 13191), 'django.db.models.Index', 'models.Index', ([], {'fields': "['pipeline', '-modified']"}), "(fields=['pipeline', '-modified'])\n", (13157, 13191), False, 'from django.db import models\n'), ((13205, 13239), 'django.db.models.Index', 'models.Index', ([], {'fields': "['-modified']"}), "(fields=['-modified'])\n", (13217, 13239), False, 'from django.db import models\n')]
|
'''
Useful functions for combined signal strategy
'''
import numpy as np
def get_split_w_threshold(alpha, normalization='exponential'):
"""
Get normalize weights and thresholds from alpha vector
:param alpha: optimize Vectorize
:return: weights and thresholds
"""
w = []
if normalization == 'exponential':
w = np.exp(alpha[:len(alpha)-2])/np.sum(np.exp(alpha[:len(alpha)-2]))
elif normalization == 'l1':
w = alpha[:len(alpha)-2]/np.sum(np.abs(alpha[:len(alpha)-2]))
buy_threshold = alpha[len(alpha)-2]
sell_threshold = alpha[len(alpha)-1]
return w, buy_threshold, sell_threshold
def get_combined_signal(moving_average_rules, moving_averages, w, index):
"""
Combines in a weighted way buy-sell signals coming from moving average crosses.
:param moving_average_rules: list with moving average rules
:param moving_averages: dict with moving averages from historical data
:param w: weights vector
:parm index: moving averages index
:return: final signal get from combined all signals
"""
signal_list = []
# Get signals from all moving averages rules
for short_period, long_period in moving_average_rules:
moving_average_short = moving_averages['MA_' + str(short_period)][index]
moving_average_long = moving_averages['MA_' + str(long_period)][index]
if moving_average_short < moving_average_long:
signal_list.append(-1)
else:
signal_list.append(+1)
final_signal = np.sum(np.array(w)*np.array(signal_list))
return final_signal
|
[
"numpy.array"
] |
[((1541, 1552), 'numpy.array', 'np.array', (['w'], {}), '(w)\n', (1549, 1552), True, 'import numpy as np\n'), ((1553, 1574), 'numpy.array', 'np.array', (['signal_list'], {}), '(signal_list)\n', (1561, 1574), True, 'import numpy as np\n')]
|
#!/usr/bin/env python2.7
from __future__ import print_function, division
import numpy as np
import scipy as sp
import matplotlib.pyplot as plt
import matplotlib.colors as clr
import dtk
import h5py
import time
import sys
def plot_mag_dust(mag_delta, mag, name,obs= False,ybins=None):
plt.figure()
if obs:
xbins = np.linspace(10,25,100)
else:
xbins = np.linspace(-25,-10,100)
if ybins is None:
ybins = np.linspace(-1,3,100)
h,xbins,ybins = np.histogram2d(mag, mag_delta, bins=(xbins,ybins))
plt.pcolor(xbins,ybins,h.T,cmap='PuBu',norm=clr.LogNorm())
plt.xlabel(name +' no dust');plt.ylabel(name+' (dust - no dust)')
plt.grid()
def plot_clr_dust(mag_delta1, mag_delta2, mag, clr_name, mag_name,obs=False,xbins=None,ybins = None):
plt.figure()
if xbins is None:
if obs:
xbins = np.linspace(10,25,100)
else:
xbins = np.linspace(-25,-10,100)
if ybins is None:
ybins = np.linspace(-1,1,100)
h,xbins,ybins = np.histogram2d(mag, mag_delta1 - mag_delta2, bins = (xbins,ybins))
plt.pcolor(xbins,ybins,h.T,cmap='PuBu',norm=clr.LogNorm())
plt.xlabel(mag_name +' no dust');plt.ylabel(clr_name+' (dust - no dust)')
plt.grid()
def plot_dust_effect(fname):
t1 = time.time()
gal_prop = {}
hfile = h5py.File(fname,'r')
hgp = hfile['galaxyProperties']
m_star = np.log10(hgp['totalMassStellar'].value)
incl = hgp['morphology/inclination'].value
mag_gd = hgp['SDSS_filters/magnitude:SDSS_g:rest:dustAtlas'].value
mag_rd = hgp['SDSS_filters/magnitude:SDSS_r:rest:dustAtlas'].value
mag_id = hgp['SDSS_filters/magnitude:SDSS_i:rest:dustAtlas'].value
mag_gnd = hgp['SDSS_filters/magnitude:SDSS_g:rest'].value
mag_rnd = hgp['SDSS_filters/magnitude:SDSS_r:rest'].value
mag_ind = hgp['SDSS_filters/magnitude:SDSS_i:rest'].value
mag_dgd = mag_gd - mag_gnd
mag_drd = mag_rd - mag_rnd
mag_did = mag_id - mag_ind
plot_mag_dust(mag_dgd, mag_gnd, "Mag g rest", ybins=np.linspace(-.05,.05,100))
plot_clr_dust(mag_dgd, mag_drd, mag_rnd, "g-r rest", "Mag r rest", ybins=np.linspace(-.05,.05,100))
plot_clr_dust(mag_dgd, mag_drd, incl , "g-r rest", "inclination", xbins=np.linspace(0,90,100),ybins=np.linspace(-.06,.06,100))
mag_gd = hgp['SDSS_filters/magnitude:SDSS_g:observed:dustAtlas'].value
mag_rd = hgp['SDSS_filters/magnitude:SDSS_r:observed:dustAtlas'].value
mag_id = hgp['SDSS_filters/magnitude:SDSS_i:observed:dustAtlas'].value
mag_gnd = hgp['SDSS_filters/magnitude:SDSS_g:observed'].value
mag_rnd = hgp['SDSS_filters/magnitude:SDSS_r:observed'].value
mag_ind = hgp['SDSS_filters/magnitude:SDSS_i:observed'].value
mag_dgd = mag_gd - mag_gnd
mag_drd = mag_rd - mag_rnd
mag_did = mag_id - mag_ind
plot_mag_dust(mag_dgd, mag_gnd, "Mag g observed",obs=True)
# plot_mag_dust(mag_drd, mag_rnd, "Mag r observed",obs=True)
# plot_mag_dust(mag_did, mag_ind, "Mag i observed",obs=True)
plot_clr_dust(mag_dgd, mag_drd, mag_rnd, "g-r observed", "Mag r observed",obs=True)
# plot_clr_dust(mag_dgd, mag_drd, mag_rnd, "r-i observed", "Mag r observed",obs=True)
plt.show()
if __name__ == "__main__":
param = dtk.Param(sys.argv[1])
gltcs_fname = param.get_string("gltcs_fname")
steps = param.get_string_list("steps")
plot_dust_effect(gltcs_fname.replace('${step}',str(421)))
|
[
"h5py.File",
"matplotlib.pyplot.show",
"numpy.histogram2d",
"time.time",
"matplotlib.pyplot.figure",
"dtk.Param",
"matplotlib.colors.LogNorm",
"numpy.linspace",
"matplotlib.pyplot.ylabel",
"numpy.log10",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.grid"
] |
[((290, 302), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (300, 302), True, 'import matplotlib.pyplot as plt\n'), ((485, 536), 'numpy.histogram2d', 'np.histogram2d', (['mag', 'mag_delta'], {'bins': '(xbins, ybins)'}), '(mag, mag_delta, bins=(xbins, ybins))\n', (499, 536), True, 'import numpy as np\n'), ((603, 632), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (["(name + ' no dust')"], {}), "(name + ' no dust')\n", (613, 632), True, 'import matplotlib.pyplot as plt\n'), ((632, 670), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (["(name + ' (dust - no dust)')"], {}), "(name + ' (dust - no dust)')\n", (642, 670), True, 'import matplotlib.pyplot as plt\n'), ((673, 683), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (681, 683), True, 'import matplotlib.pyplot as plt\n'), ((791, 803), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (801, 803), True, 'import matplotlib.pyplot as plt\n'), ((1033, 1098), 'numpy.histogram2d', 'np.histogram2d', (['mag', '(mag_delta1 - mag_delta2)'], {'bins': '(xbins, ybins)'}), '(mag, mag_delta1 - mag_delta2, bins=(xbins, ybins))\n', (1047, 1098), True, 'import numpy as np\n'), ((1167, 1200), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (["(mag_name + ' no dust')"], {}), "(mag_name + ' no dust')\n", (1177, 1200), True, 'import matplotlib.pyplot as plt\n'), ((1200, 1242), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (["(clr_name + ' (dust - no dust)')"], {}), "(clr_name + ' (dust - no dust)')\n", (1210, 1242), True, 'import matplotlib.pyplot as plt\n'), ((1245, 1255), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (1253, 1255), True, 'import matplotlib.pyplot as plt\n'), ((1301, 1312), 'time.time', 'time.time', ([], {}), '()\n', (1310, 1312), False, 'import time\n'), ((1343, 1364), 'h5py.File', 'h5py.File', (['fname', '"""r"""'], {}), "(fname, 'r')\n", (1352, 1364), False, 'import h5py\n'), ((1413, 1452), 'numpy.log10', 'np.log10', (["hgp['totalMassStellar'].value"], {}), "(hgp['totalMassStellar'].value)\n", (1421, 1452), True, 'import numpy as np\n'), ((3202, 3212), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3210, 3212), True, 'import matplotlib.pyplot as plt\n'), ((3259, 3281), 'dtk.Param', 'dtk.Param', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (3268, 3281), False, 'import dtk\n'), ((331, 355), 'numpy.linspace', 'np.linspace', (['(10)', '(25)', '(100)'], {}), '(10, 25, 100)\n', (342, 355), True, 'import numpy as np\n'), ((380, 406), 'numpy.linspace', 'np.linspace', (['(-25)', '(-10)', '(100)'], {}), '(-25, -10, 100)\n', (391, 406), True, 'import numpy as np\n'), ((443, 466), 'numpy.linspace', 'np.linspace', (['(-1)', '(3)', '(100)'], {}), '(-1, 3, 100)\n', (454, 466), True, 'import numpy as np\n'), ((982, 1005), 'numpy.linspace', 'np.linspace', (['(-1)', '(1)', '(100)'], {}), '(-1, 1, 100)\n', (993, 1005), True, 'import numpy as np\n'), ((584, 597), 'matplotlib.colors.LogNorm', 'clr.LogNorm', ([], {}), '()\n', (595, 597), True, 'import matplotlib.colors as clr\n'), ((862, 886), 'numpy.linspace', 'np.linspace', (['(10)', '(25)', '(100)'], {}), '(10, 25, 100)\n', (873, 886), True, 'import numpy as np\n'), ((919, 945), 'numpy.linspace', 'np.linspace', (['(-25)', '(-10)', '(100)'], {}), '(-25, -10, 100)\n', (930, 945), True, 'import numpy as np\n'), ((1148, 1161), 'matplotlib.colors.LogNorm', 'clr.LogNorm', ([], {}), '()\n', (1159, 1161), True, 'import matplotlib.colors as clr\n'), ((2048, 2077), 'numpy.linspace', 'np.linspace', (['(-0.05)', '(0.05)', '(100)'], {}), '(-0.05, 0.05, 100)\n', (2059, 2077), True, 'import numpy as np\n'), ((2152, 2181), 'numpy.linspace', 'np.linspace', (['(-0.05)', '(0.05)', '(100)'], {}), '(-0.05, 0.05, 100)\n', (2163, 2181), True, 'import numpy as np\n'), ((2255, 2278), 'numpy.linspace', 'np.linspace', (['(0)', '(90)', '(100)'], {}), '(0, 90, 100)\n', (2266, 2278), True, 'import numpy as np\n'), ((2283, 2312), 'numpy.linspace', 'np.linspace', (['(-0.06)', '(0.06)', '(100)'], {}), '(-0.06, 0.06, 100)\n', (2294, 2312), True, 'import numpy as np\n')]
|
import pathlib
import pygraphviz
def task_imports():
"""find imports from a python module"""
return {
'file_dep': ['projects/requests/requests/models.py'],
'targets': ['requests.models.deps'],
'actions': ['python -m import_deps %(dependencies)s > %(targets)s'],
'clean': True,
}
def module_to_dot(dependencies, targets):
graph = pygraphviz.AGraph(strict=False, directed=True)
graph.node_attr['color'] = 'lightblue2'
graph.node_attr['style'] = 'filled'
for dep in dependencies:
filepath = pathlib.Path(dep)
source = filepath.stem
with open(filepath) as fh:
for line in fh:
sink = line.strip()
if sink:
graph.add_edge(source, sink)
graph.write(targets[0])
def task_dot():
"""generate a graphviz's dot graph from module imports"""
return {
'file_dep': ['requests.models.deps'],
'targets': ['requests.models.dot'],
'actions': [module_to_dot],
'clean': True,
}
def task_draw():
"""generate image from a dot file"""
return {
'file_dep': ['requests.models.dot'],
'targets': ['requests.models.png'],
'actions': ['dot -Tpng %(dependencies)s -o %(targets)s'],
'clean': True,
}
|
[
"pygraphviz.AGraph",
"pathlib.Path"
] |
[((380, 426), 'pygraphviz.AGraph', 'pygraphviz.AGraph', ([], {'strict': '(False)', 'directed': '(True)'}), '(strict=False, directed=True)\n', (397, 426), False, 'import pygraphviz\n'), ((559, 576), 'pathlib.Path', 'pathlib.Path', (['dep'], {}), '(dep)\n', (571, 576), False, 'import pathlib\n')]
|
import torch
import torch.nn as nn
from model_util import conv_block_3d_feature_leaner
class featureLearner(nn.Module):
def __init__(self,channels):
super(featureLearner, self).__init__()
self.in_dim = 1
self.mid1_dim = channels[0]
self.mid2_dim = channels[1]
self.mid3_dim = channels[2]
self.mid4_dim = channels[3]
self.mid5_dim = channels[4]
self.out_dim = channels[5]
#act_fn = nn.LeakyReLU()
act_fn = nn.ReLU()
print("\n------Initiating Network------\n")
self.cnn1 = conv_block_3d_feature_leaner(self.in_dim, self.mid1_dim, act_fn, 1)
self.cnn2 = conv_block_3d_feature_leaner(self.mid1_dim, self.mid2_dim, act_fn, 1)
self.cnn3 = conv_block_3d_feature_leaner(self.mid2_dim, self.mid3_dim, act_fn, 2)
self.cnn4 = conv_block_3d_feature_leaner(self.mid3_dim, self.mid4_dim, act_fn, 2)
self.cnn5 = conv_block_3d_feature_leaner(self.mid4_dim, self.mid5_dim, act_fn, 4)
self.cnn6 = conv_block_3d_feature_leaner(self.mid5_dim, self.out_dim, act_fn, 8, True)
self.reset_params()
@staticmethod
def weight_init(m):
if (isinstance(m, nn.Conv3d)):
nn.init.kaiming_normal(m.weight)
nn.init.constant(m.bias, 0)
def reset_params(self):
for i, m in enumerate(self.modules()):
self.weight_init(m)
def forward(self, x):
x = self.cnn1(x)
x = self.cnn2(x)
x = self.cnn3(x)
x = self.cnn4(x)
x = self.cnn5(x)
out = self.cnn6(x)
#get_gpu_info(2)
return out
def save(self,epoch):
torch.save(self.state_dict(),"featureLearner"+'_'+str(epoch)+'.pt')
class featureLearner_old(nn.Module):
def __init__(self):
super(featureLearner_old, self).__init__()
self.in_dim = 1
self.mid1_dim = 32
self.mid2_dim = 32
self.mid3_dim = 16
self.out_dim = 8
act_fn = nn.ReLU()
print("\n------Initiating Network------\n")
self.cnn1 = conv_block_3d_feature_leaner(self.in_dim, self.mid1_dim, act_fn, 1)
self.cnn2 = conv_block_3d_feature_leaner(self.mid1_dim, self.mid2_dim, act_fn, 2)
self.cnn3 = conv_block_3d_feature_leaner(self.mid2_dim, self.mid3_dim, act_fn, 4)
self.cnn4 = conv_block_3d_feature_leaner(self.mid3_dim, self.out_dim, act_fn, 8, True)
self.reset_params()
@staticmethod
def weight_init(m):
if (isinstance(m, nn.Conv3d)):
# todo: change it to kaiming initialization
nn.init.kaiming_normal(m.weight)
nn.init.constant(m.bias, 0)
def reset_params(self):
for i, m in enumerate(self.modules()):
self.weight_init(m)
def forward(self, x):
x = self.cnn1(x)
x = self.cnn2(x)
x = self.cnn3(x)
out = self.cnn4(x)
return out
def save(self,epoch):
torch.save(self.state_dict(),"featureLearner"+'_'+str(epoch)+'.pt')
|
[
"torch.nn.init.constant",
"model_util.conv_block_3d_feature_leaner",
"torch.nn.ReLU",
"torch.nn.init.kaiming_normal"
] |
[((492, 501), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (499, 501), True, 'import torch.nn as nn\n'), ((576, 643), 'model_util.conv_block_3d_feature_leaner', 'conv_block_3d_feature_leaner', (['self.in_dim', 'self.mid1_dim', 'act_fn', '(1)'], {}), '(self.in_dim, self.mid1_dim, act_fn, 1)\n', (604, 643), False, 'from model_util import conv_block_3d_feature_leaner\n'), ((664, 733), 'model_util.conv_block_3d_feature_leaner', 'conv_block_3d_feature_leaner', (['self.mid1_dim', 'self.mid2_dim', 'act_fn', '(1)'], {}), '(self.mid1_dim, self.mid2_dim, act_fn, 1)\n', (692, 733), False, 'from model_util import conv_block_3d_feature_leaner\n'), ((754, 823), 'model_util.conv_block_3d_feature_leaner', 'conv_block_3d_feature_leaner', (['self.mid2_dim', 'self.mid3_dim', 'act_fn', '(2)'], {}), '(self.mid2_dim, self.mid3_dim, act_fn, 2)\n', (782, 823), False, 'from model_util import conv_block_3d_feature_leaner\n'), ((844, 913), 'model_util.conv_block_3d_feature_leaner', 'conv_block_3d_feature_leaner', (['self.mid3_dim', 'self.mid4_dim', 'act_fn', '(2)'], {}), '(self.mid3_dim, self.mid4_dim, act_fn, 2)\n', (872, 913), False, 'from model_util import conv_block_3d_feature_leaner\n'), ((934, 1003), 'model_util.conv_block_3d_feature_leaner', 'conv_block_3d_feature_leaner', (['self.mid4_dim', 'self.mid5_dim', 'act_fn', '(4)'], {}), '(self.mid4_dim, self.mid5_dim, act_fn, 4)\n', (962, 1003), False, 'from model_util import conv_block_3d_feature_leaner\n'), ((1024, 1098), 'model_util.conv_block_3d_feature_leaner', 'conv_block_3d_feature_leaner', (['self.mid5_dim', 'self.out_dim', 'act_fn', '(8)', '(True)'], {}), '(self.mid5_dim, self.out_dim, act_fn, 8, True)\n', (1052, 1098), False, 'from model_util import conv_block_3d_feature_leaner\n'), ((1991, 2000), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (1998, 2000), True, 'import torch.nn as nn\n'), ((2075, 2142), 'model_util.conv_block_3d_feature_leaner', 'conv_block_3d_feature_leaner', (['self.in_dim', 'self.mid1_dim', 'act_fn', '(1)'], {}), '(self.in_dim, self.mid1_dim, act_fn, 1)\n', (2103, 2142), False, 'from model_util import conv_block_3d_feature_leaner\n'), ((2163, 2232), 'model_util.conv_block_3d_feature_leaner', 'conv_block_3d_feature_leaner', (['self.mid1_dim', 'self.mid2_dim', 'act_fn', '(2)'], {}), '(self.mid1_dim, self.mid2_dim, act_fn, 2)\n', (2191, 2232), False, 'from model_util import conv_block_3d_feature_leaner\n'), ((2253, 2322), 'model_util.conv_block_3d_feature_leaner', 'conv_block_3d_feature_leaner', (['self.mid2_dim', 'self.mid3_dim', 'act_fn', '(4)'], {}), '(self.mid2_dim, self.mid3_dim, act_fn, 4)\n', (2281, 2322), False, 'from model_util import conv_block_3d_feature_leaner\n'), ((2343, 2417), 'model_util.conv_block_3d_feature_leaner', 'conv_block_3d_feature_leaner', (['self.mid3_dim', 'self.out_dim', 'act_fn', '(8)', '(True)'], {}), '(self.mid3_dim, self.out_dim, act_fn, 8, True)\n', (2371, 2417), False, 'from model_util import conv_block_3d_feature_leaner\n'), ((1221, 1253), 'torch.nn.init.kaiming_normal', 'nn.init.kaiming_normal', (['m.weight'], {}), '(m.weight)\n', (1243, 1253), True, 'import torch.nn as nn\n'), ((1266, 1293), 'torch.nn.init.constant', 'nn.init.constant', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (1282, 1293), True, 'import torch.nn as nn\n'), ((2596, 2628), 'torch.nn.init.kaiming_normal', 'nn.init.kaiming_normal', (['m.weight'], {}), '(m.weight)\n', (2618, 2628), True, 'import torch.nn as nn\n'), ((2641, 2668), 'torch.nn.init.constant', 'nn.init.constant', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (2657, 2668), True, 'import torch.nn as nn\n')]
|
# Copyright 2018 The TensorFlow Constrained Optimization Authors. All Rights
# Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
# ==============================================================================
"""TFCO estimator head for use with TensorFlow 1.x and 2.x.
The classes `HeadV1` and `HeadV2` allow you to create custom heads that can be
used with a `tf.Estimator` to minimize a constrained minimization problem
in TF 1.x and 2.x respectively. You would need to provide an existing `_Head`
for TF 1.x and a `tf.estimator.Head` for TF 2.x and a function specifying
a constrained minimization problem, and the base head's minimization ops
will be accordingly modified.
Example
=======
Consider a binary classification problem, where we wish to train a
LinearEstimator by minimizing error rate subject to a recall constraint. For
this, we will first create a function that takes "logits", "labels",
"features", and an (optional) "weight_column", and returns the
`RateMinimizationProblem`.
```python
def problem_fn(logits, labels, features, weight_column=None):
context = tfco.rate_context(predictions=logits, labels=labels)
objective=tfco.error_rate(context)
problem = tfco.RateMinimizationProblem(
objective=tfco.error_rate(context),
constraints=[tfco.recall(context) >= 0.9])
return problem
```
In TF 2.x, we will then create a `tfco.HeadV2` instance from a base
`BinaryClassHead` instance and the `problem_fn` defined above.
```python
base_head = tf.estimator.BinaryClassHead()
custom_head = tfco.HeadV2(base_head, problem_fn)
```
The final step is to create a `LinearEstimator` using the custom head.
```python
estimator = tf.estimator.LinearEstimator(head=custom_head, ...)
```
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow_constrained_optimization.python.train import constrained_optimizer
from tensorflow_constrained_optimization.python.train import lagrangian_optimizer
class _WrapperHead(tf.estimator.Head):
"""Base class for `HeadV1` and `HeadV2`.
This class is a wrapper around an existing base head, which can be either a
V1 `_Head` instance or a V2 `Head` instance. While this class implements the
`tf.estimator.Head` interface provided in TensorFlow 2.x, it can be used to
wrap both a V1 and V2 head instance, as they have similar signatures. Some of
the functions implemented may be relevant for only a V1 or V2 head.
"""
def __init__(self, base_head, problem_fn, weight_column=None):
"""Initializes a `_WrapperHead` instance that wraps the `base_head`.
Args:
base_head: A V1 `_Head` instance or a V2 `tf.estimator.Head` instance.
problem_fn: A function that takes logits, labels and features as input,
and returns a `ConstrainedMinimizationProblem` instance.
weight_column: Optional string or a `NumericColumn` created by
`tf.feature_column.numeric_column` defining feature column representing
weights. It is used to re-weight examples during training. This may be
an optional argument to the base_head, but however needs to be passed
again here as `weight_column` is not usually publicly accessible in the
base_head.
"""
self._base_head = base_head
self._problem_fn = problem_fn
self._weight_column = weight_column
@property
def name(self):
"""The name of this head.
Returns:
A string.
"""
return self._base_head.name
@property
def logits_dimension(self):
"""Size of the last dimension of the logits `Tensor`.
Often is the number of classes, labels, or real values to be predicted.
Typically, logits is of shape `[batch_size, logits_dimension]`.
Returns:
The expected size of the `logits` tensor.
"""
return self._base_head.logits_dimension
def create_loss(self, features, mode, logits, labels):
"""Returns a loss Tensor from provided logits.
The returned loss is the same as that of the base head and is meant
solely for book-keeping purposes. We do not return the custom loss used to
create the train_op for constrained optimization, as this loss makes use of
auxilliary variables whose values may not be set properly in EVAL mode. This
function is relevant only for a V1 estimator.
Args:
features: Input `dict` of `Tensor` objects.
mode: Estimator's `ModeKeys`.
logits: logits `Tensor` to be used for loss construction.
labels: Labels `Tensor`, or `dict` of same.
Returns:
`LossSpec`.
"""
return self._base_head.create_loss(labels, logits, features, mode)
@property
def loss_reduction(self):
"""One of `tf.losses.Reduction`.
Returns the same value as the base head, and does not reflect how TFCO
aggregates its losses internally. This function is relevant only for a V2
estimator.
Returns:
The type of loss reduction used in the head.
"""
# TODO: Should we return SUM_OVER_BATCH_SIZE, as this better
# represents TFCO's aggregation strategy, and may be used for rescaling
# purposes during distributed training?
return self._base_head.loss_reduction
def predictions(self, logits, keys=None):
"""Returns a `dict` of predictions from provided logits.
This function is relevant only for a V2 estimator.
Args:
logits: Logits `Tensor` to be used for prediction construction.
keys: A list of `string` for prediction keys. Defaults to `None`, meaning
if not specified, predictions will be created for all the pre-defined
valid keys in the head.
Returns:
A `dict` of predicted `Tensor` keyed by prediction name.
"""
return self._base_head.predictions(logits, keys)
def metrics(self, regularization_losses=None):
"""Returns a `dict` of metric objects.
This function is relevant only for a V2 estimator.
Args:
regularization_losses: A list of additional scalar losses to be added to
the training loss, such as regularization losses.
Returns:
A `dict` of metrics keyed by string name. The value is an instance of
`Metric` class.
"""
return self._base_head.metrics(regularization_losses)
def update_metrics(self,
eval_metrics,
features,
logits,
labels,
mode=None,
regularization_losses=None):
"""Updates metric objects and returns a `dict` of the updated metrics.
This function is relevant only for a V2 estimator.
Args:
eval_metrics: A `dict` of metrics to be updated.
features: Input `dict` mapping string feature names to `Tensor` or
`SparseTensor` objects containing the values for that feature in a
minibatch. Often to be used to fetch example-weight tensor.
logits: logits `Tensor` to be used for metrics update.
labels: Labels `Tensor`, or `dict` mapping string label names to `Tensor`
objects of the label values.
mode: Estimator's `ModeKeys`. In most cases, this arg is not used and can
be removed in the method implementation.
regularization_losses: A list of additional scalar losses to be added to
the training and evaluation loss, such as regularization losses. Note
that, the `mode` arg is not used in the `tf.estimator.Head`. If the
update of the metrics doesn't rely on `mode`, it can be safely ignored
in the method signature.
Returns:
A `dict` of updated metrics keyed by name. The value is an instance of
`Metric` class.
"""
return self._base_head.update_metrics(
eval_metrics, features, logits, labels, mode, regularization_losses)
def loss(self,
labels,
logits,
features=None,
mode=None,
regularization_losses=None):
"""Returns a loss `Tensor` from provided arguments.
The returned loss is the same as that of the base head and is meant
solely for book-keeping purposes. We do not return the custom loss used to
create the train_op for constrained optimization, as this loss makes use of
auxilliary variables whose values may not be set properly in EVAL mode. This
function is relevant for a V2 estimator.
Args:
labels: Labels `Tensor`, or `dict` mapping string label names to `Tensor`
objects of the label values.
logits: Logits `Tensor` to be used for loss construction.
features: Input `dict` mapping string feature names to `Tensor` or
`SparseTensor` objects containing the values for that feature in a
minibatch. Often to be used to fetch example-weight tensor.
mode: Estimator's `ModeKeys`. To be used in case loss calculation is
different in Train and Eval mode.
regularization_losses: A list of additional scalar losses to be added to
the training loss, such as regularization losses.
Returns:
A scalar `Tensor` representing a dummy loss.
"""
return self._base_head.loss(
labels, logits, features, mode, regularization_losses)
def _create_no_op_estimator_spec(self, features, mode, logits, labels):
"""Returns `EstimatorSpec` for the base head with no `train_op`."""
return self._base_head.create_estimator_spec(
features=features,
mode=mode,
logits=logits,
labels=labels,
train_op_fn=lambda loss: tf.constant(0),
regularization_losses=None)
def _create_problem_and_update_optimizer(
self, logits, labels, features, optimizer):
"""Returns `ConstrainedMinimizationProblem` created using `_problem_fn`."""
problem = self._problem_fn(
logits, labels, features, weight_column=self._weight_column)
# Set the number of constraints in the optimizer. This is needed if
# `num_constraints` hasn't already been specified to the optimizer, and
# will also check that we aren't changing the number of constraints from
# a previously-specified value.
optimizer.num_constraints = problem.num_constraints
return problem
def _append_update_ops(self, train_op_fn, update_ops):
"""Returns `train_op` with control dependency on `update_ops`."""
# We handle the case of update_ops=None separately because calling
# tf.control_dependencies(None) in graph mode clears existing control
# dependencies.
if update_ops is None:
train_op = train_op_fn()
else:
with tf.control_dependencies(update_ops):
train_op = train_op_fn()
return train_op
class HeadV1(_WrapperHead):
"""A wrapper around an existing V1 `_Head` for use with TensorFlow 1.x.
This head modifies the base head's train_op to minimize a
`ConstrainedMinimizationProblem` specified via `problem_fn`, while
preserving the base head's other functionality. If the estimator's optimizer
is an instance of `ConstrainedOptimizerV1`, it uses the same for minimization.
If not, it creates a `ConstrainedOptimizerV1` instance from the estimator's
optimizer.
"""
def __init__(self, base_head, problem_fn, weight_column=None):
"""Initializes a `HeadV1` instance that wraps the `base_head`.
Args:
base_head: A `_Head` instance.
problem_fn: A function that takes logits, labels and features as input,
and returns a `ConstrainedMinimizationProblem` instance.
weight_column: Optional string or a `NumericColumn` created by
`tf.feature_column.numeric_column` defining feature column representing
weights. It is used to re-weight examples during training. This may be
an optional argument to the base_head, but however needs to be passed
again here as `weight_column` is not usually publicly accessible in the
base_head.
Raises:
ValueError: If a `tf.estimator.Head` instance is passed as the
`base_head`.
"""
if isinstance(base_head, tf.estimator.Head):
raise ValueError("You cannot pass a `tf.estimator.Head` instance as the "
"`base_head` to `HeadV1`.")
super(HeadV1, self).__init__(base_head, problem_fn, weight_column)
def create_estimator_spec(self,
features,
mode,
logits,
labels=None,
optimizer=None,
train_op_fn=None,
regularization_losses=None):
"""Returns `EstimatorSpec` that a model_fn can return.
Args:
features: Input `dict` of `Tensor` or `SparseTensor` objects.
mode: Estimator's `ModeKeys`.
logits: logits `Tensor` to be used by the head.
labels: Optional labels `Tensor`, or `dict` mapping string label names to
`Tensor` objects of the label values.
optimizer: A `ConstrainedOptimizerV1` or a `tf.compat.v1.train.Optimizer`.
instance. If a `tf.compat.v1.train.Optimizer` is provided, the head
creates a `ConstrainedOptimizerV1` that wraps it. This is an optional
argument in the `_Head` base class, but needs to be passed here.
train_op_fn: This argument is ignored and can be left unspecified.
regularization_losses: This argument is ignored and can be left
unspecified.
Returns:
`EstimatorSpec`.
"""
estimator_spec = self._create_no_op_estimator_spec(
features=features,
mode=mode,
logits=logits,
labels=labels)
# When mode is PREDICT or EVAL, no modification needed to the base head.
if (mode == tf.estimator.ModeKeys.PREDICT) or (
mode == tf.estimator.ModeKeys.EVAL):
return estimator_spec
# When mode is TRAIN, replace train_op in estimator_spec.
if mode == tf.estimator.ModeKeys.TRAIN:
if optimizer is None:
raise ValueError("You must provide an optimizer to the estimator.")
# TODO: Add support for passing train_op_fn.
# If the optimizer is not a `ConstrainedOptimizerV1` instance, then
# create a `LagrangianOptimizerV1` that wraps the base heads's optimizer.
if not isinstance(
optimizer, constrained_optimizer.ConstrainedOptimizerV1):
optimizer = lagrangian_optimizer.LagrangianOptimizerV1(optimizer)
problem = self._create_problem_and_update_optimizer(
logits, labels, features, optimizer)
# Create `train_op` with a control dependency on `UPDATE_OPS`.
update_ops = tf.compat.v1.get_collection(
tf.compat.v1.GraphKeys.UPDATE_OPS)
global_step = tf.compat.v1.train.get_global_step()
train_op = self._append_update_ops(
lambda: optimizer.minimize(problem, global_step=global_step),
update_ops)
return estimator_spec._replace(train_op=train_op)
raise ValueError("mode={} not recognized".format(mode))
class HeadV2(_WrapperHead):
"""A wrapper around an existing V2 `Head` for use with TensorFlow 2.x.
This head modifies the base head's train_op to minimize a
`ConstrainedMinimizationProblem` specified via `problem_fn`, while
preserving the base head's other functionality. If the estimator's optimizer
is an instance of `ConstrainedOptimizerV2`, it uses the same for minimization.
If not, it creates a `ConstrainedOptimizerV2` instance from the estimator's
optimizer.
"""
def __init__(self, base_head, problem_fn, weight_column=None):
"""Initializes a `HeadV2` instance that wraps the `base_head`.
Args:
base_head: A `tf.estimator.Head` instance.
problem_fn: A function that takes logits, labels and features as input,
and returns a `ConstrainedMinimizationProblem` instance.
weight_column: Optional string or a `NumericColumn` created by
`tf.feature_column.numeric_column` defining feature column representing
weights. It is used to re-weight examples during training. This may be
an optional argument to the base_head, but however needs to be passed
again here as `weight_column` is not usually publicly accessible in the
base_head.
Raises:
ValueError: If a `tf.estimator.Head` instance is not passed as
the `base_head`.
"""
if not isinstance(base_head, tf.estimator.Head):
raise ValueError("You must pass a `tf.estimator.Head` instance as "
"`base_head` to `HeadV2`.")
super(HeadV2, self).__init__(base_head, problem_fn, weight_column)
def create_estimator_spec(self,
features,
mode,
logits,
labels=None,
optimizer=None,
trainable_variables=None,
train_op_fn=None,
update_ops=None,
regularization_losses=None):
"""Returns `EstimatorSpec` for constrained optimization.
The `EstimatorSpec` is the same as that of the base head with the
`train_op` alone replaced with one for minimizing the constrained
minimization problem specified by self._problem_fn.
Args:
features: Input `dict` mapping string feature names to `Tensor` or
`SparseTensor` objects containing the values for that feature in a
minibatch. Often to be used to fetch example-weight tensor.
mode: Estimator's `ModeKeys`.
logits: Logits `Tensor` to be used by the head.
labels: Optional labels `Tensor`, or `dict` mapping string label names to
`Tensor` objects of the label values.
optimizer: A `ConstrainedOptimizerV2` or a `tf.keras.optimizers.Optimizer`
instance. If a `tf.keras.optimizers.Optimizer` is provided, the head
creates a `ConstrainedOptimizerV2` that wraps it. This is an optional
argument in the `tf.estimator.Head` base class, but needs to be passed
here.
trainable_variables: A list or tuple of `Variable` objects to update to
solve the constrained minimization problem. In Tensorflow 1.x, by
default these are the list of variables collected in the graph under the
key `GraphKeys.TRAINABLE_VARIABLES`. As Tensorflow 2.x doesn't have
collections and GraphKeys, trainable_variables needs to be passed
explicitly here.
train_op_fn: This argument is ignored and can be left unspecified.
update_ops: Optional list or tuple of update ops to be run at training
time. For example, layers such as BatchNormalization create mean and
variance update ops that need to be run at training time. In Tensorflow
1.x, these are thrown into an UPDATE_OPS collection. As Tensorflow 2.x
doesn't have collections, update_ops needs to be passed explicitly here.
regularization_losses: This argument is ignored and can be left
unspecified.
Returns:
`EstimatorSpec`.
Raises:
ValueError: If mode is not recognized or optimizer is not specified in
TRAIN mode.
"""
estimator_spec = self._create_no_op_estimator_spec(
features=features,
mode=mode,
logits=logits,
labels=labels)
# When mode is PREDICT or EVAL, no modification needed to the base head.
if (mode == tf.estimator.ModeKeys.PREDICT) or (
mode == tf.estimator.ModeKeys.EVAL):
return estimator_spec
# When mode is TRAIN, replace train_op in estimator_spec.
if mode == tf.estimator.ModeKeys.TRAIN:
if optimizer is None:
raise ValueError("You must provide an optimizer to the estimator.")
# TODO: Add support for passing train_op_fn.
# If the optimizer is not a `ConstrainedOptimizerV2` instance, then
# create a `LagrangianOptimizer` that wraps the base heads's optimizer.
if not isinstance(
optimizer, constrained_optimizer.ConstrainedOptimizerV2):
iterations = optimizer.iterations
optimizer = lagrangian_optimizer.LagrangianOptimizerV2(optimizer)
# Pass the iterations member (which contains the global step) in the
# base head's optimizer to the newly created one.
optimizer.iterations = iterations
problem = self._create_problem_and_update_optimizer(
logits, labels, features, optimizer)
# Create `train_op` with a control dependency on the `update_ops`.
var_list = trainable_variables + list(
problem.trainable_variables) + optimizer.trainable_variables()
train_op = self._append_update_ops(
lambda: tf.group(optimizer.get_updates(problem, var_list)),
update_ops)
return estimator_spec._replace(train_op=train_op)
raise ValueError("mode={} not recognized".format(mode))
|
[
"tensorflow.control_dependencies",
"tensorflow_constrained_optimization.python.train.lagrangian_optimizer.LagrangianOptimizerV1",
"tensorflow.compat.v1.train.get_global_step",
"tensorflow.constant",
"tensorflow.compat.v1.get_collection",
"tensorflow_constrained_optimization.python.train.lagrangian_optimizer.LagrangianOptimizerV2"
] |
[((15093, 15155), 'tensorflow.compat.v1.get_collection', 'tf.compat.v1.get_collection', (['tf.compat.v1.GraphKeys.UPDATE_OPS'], {}), '(tf.compat.v1.GraphKeys.UPDATE_OPS)\n', (15120, 15155), True, 'import tensorflow as tf\n'), ((15187, 15223), 'tensorflow.compat.v1.train.get_global_step', 'tf.compat.v1.train.get_global_step', ([], {}), '()\n', (15221, 15223), True, 'import tensorflow as tf\n'), ((11088, 11123), 'tensorflow.control_dependencies', 'tf.control_dependencies', (['update_ops'], {}), '(update_ops)\n', (11111, 11123), True, 'import tensorflow as tf\n'), ((14843, 14896), 'tensorflow_constrained_optimization.python.train.lagrangian_optimizer.LagrangianOptimizerV1', 'lagrangian_optimizer.LagrangianOptimizerV1', (['optimizer'], {}), '(optimizer)\n', (14885, 14896), False, 'from tensorflow_constrained_optimization.python.train import lagrangian_optimizer\n'), ((20574, 20627), 'tensorflow_constrained_optimization.python.train.lagrangian_optimizer.LagrangianOptimizerV2', 'lagrangian_optimizer.LagrangianOptimizerV2', (['optimizer'], {}), '(optimizer)\n', (20616, 20627), False, 'from tensorflow_constrained_optimization.python.train import lagrangian_optimizer\n'), ((10052, 10066), 'tensorflow.constant', 'tf.constant', (['(0)'], {}), '(0)\n', (10063, 10066), True, 'import tensorflow as tf\n')]
|
import torch
import argparse
from sdf.utils import *
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('path', type=str)
parser.add_argument('--test', action='store_true', help="test mode")
parser.add_argument('--workspace', type=str, default='workspace')
parser.add_argument('--seed', type=int, default=0)
parser.add_argument('--lr', type=float, default=1e-4, help="initial learning rate")
parser.add_argument('--fp16', action='store_true', help="use amp mixed precision training")
parser.add_argument('--ff', action='store_true', help="use fully-fused MLP")
parser.add_argument('--tcnn', action='store_true', help="use TCNN backend")
opt = parser.parse_args()
print(opt)
seed_everything(opt.seed)
if opt.ff:
assert opt.fp16, "fully-fused mode must be used with fp16 mode"
from sdf.netowrk_ff import SDFNetwork
elif opt.tcnn:
assert opt.fp16, "tcnn mode must be used with fp16 mode"
from sdf.network_tcnn import SDFNetwork
else:
from sdf.netowrk import SDFNetwork
model = SDFNetwork(encoding="hashgrid")
print(model)
if opt.test:
trainer = Trainer('ngp', model, workspace=opt.workspace, fp16=opt.fp16, use_checkpoint='best', eval_interval=1)
trainer.save_mesh(os.path.join(opt.workspace, 'results', 'output.ply'), 1024)
else:
from sdf.provider import SDFDataset
from loss import mape_loss
train_dataset = SDFDataset(opt.path, size=100, num_samples=2**18)
train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=1, shuffle=True)
valid_dataset = SDFDataset(opt.path, size=1, num_samples=2**18) # just a dummy
valid_loader = torch.utils.data.DataLoader(valid_dataset, batch_size=1)
criterion = mape_loss # torch.nn.L1Loss()
optimizer = lambda model: torch.optim.Adam([
{'name': 'encoding', 'params': model.encoder.parameters()},
{'name': 'net', 'params': model.backbone.parameters(), 'weight_decay': 1e-6},
], lr=opt.lr, betas=(0.9, 0.99), eps=1e-15)
scheduler = lambda optimizer: optim.lr_scheduler.StepLR(optimizer, step_size=10, gamma=0.1)
trainer = Trainer('ngp', model, workspace=opt.workspace, optimizer=optimizer, criterion=criterion, ema_decay=0.95, fp16=opt.fp16, lr_scheduler=scheduler, use_checkpoint='latest', eval_interval=1)
trainer.train(train_loader, valid_loader, 20)
# also test
trainer.save_mesh(os.path.join(opt.workspace, 'results', 'output.ply'), 1024)
|
[
"sdf.provider.SDFDataset",
"sdf.netowrk.SDFNetwork",
"argparse.ArgumentParser",
"torch.utils.data.DataLoader"
] |
[((96, 121), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (119, 121), False, 'import argparse\n'), ((1124, 1155), 'sdf.netowrk.SDFNetwork', 'SDFNetwork', ([], {'encoding': '"""hashgrid"""'}), "(encoding='hashgrid')\n", (1134, 1155), False, 'from sdf.netowrk import SDFNetwork\n'), ((1512, 1563), 'sdf.provider.SDFDataset', 'SDFDataset', (['opt.path'], {'size': '(100)', 'num_samples': '(2 ** 18)'}), '(opt.path, size=100, num_samples=2 ** 18)\n', (1522, 1563), False, 'from sdf.provider import SDFDataset\n'), ((1585, 1655), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['train_dataset'], {'batch_size': '(1)', 'shuffle': '(True)'}), '(train_dataset, batch_size=1, shuffle=True)\n', (1612, 1655), False, 'import torch\n'), ((1681, 1730), 'sdf.provider.SDFDataset', 'SDFDataset', (['opt.path'], {'size': '(1)', 'num_samples': '(2 ** 18)'}), '(opt.path, size=1, num_samples=2 ** 18)\n', (1691, 1730), False, 'from sdf.provider import SDFDataset\n'), ((1767, 1823), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['valid_dataset'], {'batch_size': '(1)'}), '(valid_dataset, batch_size=1)\n', (1794, 1823), False, 'import torch\n')]
|
"""
Scatter Plot with Minimap
-------------------------
This example shows how to create a miniature version of a plot
such that creating a selection in the miniature version
adjusts the axis limits in another, more detailed view.
"""
# category: scatter plots
import altair as alt
from vega_datasets import data
source = data.seattle_weather()
zoom = alt.selection_interval(encodings=["x", "y"])
minimap = (
alt.Chart(source)
.mark_point()
.add_selection(zoom)
.encode(
x="date:T",
y="temp_max:Q",
color=alt.condition(zoom, "weather", alt.value("lightgray")),
)
.properties(
width=200,
height=200,
title="Minimap -- click and drag to zoom in the detail view",
)
)
detail = (
alt.Chart(source)
.mark_point()
.encode(
x=alt.X(
"date:T", scale=alt.Scale(domain={"selection": zoom.name, "encoding": "x"})
),
y=alt.Y(
"temp_max:Q",
scale=alt.Scale(domain={"selection": zoom.name, "encoding": "y"}),
),
color="weather",
)
.properties(width=600, height=400, title="Seattle weather -- detail view")
)
detail | minimap
|
[
"altair.selection_interval",
"vega_datasets.data.seattle_weather",
"altair.Chart",
"altair.value",
"altair.Scale"
] |
[((324, 346), 'vega_datasets.data.seattle_weather', 'data.seattle_weather', ([], {}), '()\n', (344, 346), False, 'from vega_datasets import data\n'), ((355, 399), 'altair.selection_interval', 'alt.selection_interval', ([], {'encodings': "['x', 'y']"}), "(encodings=['x', 'y'])\n", (377, 399), True, 'import altair as alt\n'), ((580, 602), 'altair.value', 'alt.value', (['"""lightgray"""'], {}), "('lightgray')\n", (589, 602), True, 'import altair as alt\n'), ((761, 778), 'altair.Chart', 'alt.Chart', (['source'], {}), '(source)\n', (770, 778), True, 'import altair as alt\n'), ((855, 914), 'altair.Scale', 'alt.Scale', ([], {'domain': "{'selection': zoom.name, 'encoding': 'x'}"}), "(domain={'selection': zoom.name, 'encoding': 'x'})\n", (864, 914), True, 'import altair as alt\n'), ((987, 1046), 'altair.Scale', 'alt.Scale', ([], {'domain': "{'selection': zoom.name, 'encoding': 'y'}"}), "(domain={'selection': zoom.name, 'encoding': 'y'})\n", (996, 1046), True, 'import altair as alt\n'), ((417, 434), 'altair.Chart', 'alt.Chart', (['source'], {}), '(source)\n', (426, 434), True, 'import altair as alt\n')]
|
import re
import sys
if ".draft" not in sys.argv[1]:
sys.exit(1)
t = open(sys.argv[1], "rb").read().decode()
t = re.sub(r"(\n|\r)", "", t)
t = re.sub(r" +", " ", t)
open(sys.argv[1].replace(".draft", ""), "wb").write(t.encode())
|
[
"re.sub",
"sys.exit"
] |
[((120, 146), 're.sub', 're.sub', (['"""(\\\\n|\\\\r)"""', '""""""', 't'], {}), "('(\\\\n|\\\\r)', '', t)\n", (126, 146), False, 'import re\n'), ((151, 172), 're.sub', 're.sub', (['""" +"""', '""" """', 't'], {}), "(' +', ' ', t)\n", (157, 172), False, 'import re\n'), ((58, 69), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (66, 69), False, 'import sys\n')]
|
from distutils.core import setup
from setuptools import find_packages
with open('README.md') as f:
long_description = f.read()
setup(
name='gdaxcli',
packages=find_packages('gdaxcli', exclude=['tests']),
version='0.1.1',
description='Commandline client for trading on GDAX',
long_description=long_description,
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/sonph/gdaxcli',
download_url='https://github.com/sonph/gdaxcli/archive/0.1.1.zip',
license='MIT',
keywords='gdax trading cryptocurrency bitcoin ethereum',
install_requires=[
# TODO: sync this with requirements.txt
"gdax",
"tabulate",
"colorama",
],
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
# 'Intended Audience :: Developers',
# 'Topic :: Software Development :: Build Tools',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
# 'Programming Language :: Python :: 2',
# 'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
# 'Programming Language :: Python :: 3',
# 'Programming Language :: Python :: 3.2',
# 'Programming Language :: Python :: 3.3',
# 'Programming Language :: Python :: 3.4',
],
)
# For reference: https://packaging.python.org/tutorials/distributing-packages/
# Python package management is a PITA.
# pip install twine
# rm -rf dist
# python setup.py bdist_weheel --universal
# twine upload dist/*
|
[
"setuptools.find_packages"
] |
[((172, 215), 'setuptools.find_packages', 'find_packages', (['"""gdaxcli"""'], {'exclude': "['tests']"}), "('gdaxcli', exclude=['tests'])\n", (185, 215), False, 'from setuptools import find_packages\n')]
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import torch
import torch.nn.functional as F
from torch import nn
class PostProcessor(nn.Module):
def __init__(
self,
score_thresh=0.05,
nms=0.5,
detections_per_img=100,
num_class=2
):
super(PostProcessor, self).__init__()
self.score_thresh = score_thresh
self.nms = nms
self.detections_per_img = detections_per_img
self.num_class = num_class
def forward(self, depth, proposals):
for depth_per_img, proposal_per_img in zip(depth, proposals):
depth_per_img = depth_per_img.unsqueeze(-1)
disp_per_img = depth_per_img.new(depth_per_img.shape)
position_z_per_img = torch.cat((depth_per_img, disp_per_img), -1)
proposal_per_img.add_field("positions_z", position_z_per_img)
proposal_per_img.add_field("positions_z_depth", position_z_per_img)
return proposals
def make_cost_volum_post_processor(cfg):
use_fpn = cfg.MODEL.ROI_HEADS.USE_FPN
bbox_reg_weights = cfg.MODEL.ROI_HEADS.BBOX_REG_WEIGHTS
score_thresh = cfg.MODEL.ROI_HEADS.SCORE_THRESH
nms_thresh = cfg.MODEL.ROI_HEADS.NMS
detections_per_img = cfg.MODEL.ROI_HEADS.DETECTIONS_PER_IMG
num_class = cfg.MODEL.ROI_BOX_HEAD.NUM_CLASSES
postprocessor = PostProcessor(
score_thresh,
nms_thresh,
detections_per_img,
num_class
)
return postprocessor
|
[
"torch.cat"
] |
[((771, 815), 'torch.cat', 'torch.cat', (['(depth_per_img, disp_per_img)', '(-1)'], {}), '((depth_per_img, disp_per_img), -1)\n', (780, 815), False, 'import torch\n')]
|
from copy import deepcopy
import pytest
@pytest.fixture
def real_oldcase_database(real_panel_database, parsed_case):
# add case with old case id construct
config_data = deepcopy(parsed_case)
config_data["case_id"] = "-".join([config_data["owner"], config_data["display_name"]])
case_obj = real_panel_database.load_case(config_data)
# add suspect and causative!
institute_obj = real_panel_database.institute(case_obj["owner"])
user_obj = real_panel_database.users()[0]
variant_obj = real_panel_database.variant_collection.find_one()
real_panel_database.pin_variant(
institute=institute_obj,
case=case_obj,
user=user_obj,
link="",
variant=variant_obj,
)
real_panel_database.mark_causative(
institute=institute_obj,
case=case_obj,
user=user_obj,
link="",
variant=variant_obj,
)
# add ACMG evaluation
real_panel_database.submit_evaluation(
variant_obj=variant_obj,
user_obj=user_obj,
institute_obj=institute_obj,
case_obj=case_obj,
link="",
criteria=[{"term": "PS1"}, {"term": "PM1"}],
)
# add comment on a variant
real_panel_database.comment(
institute=institute_obj,
case=case_obj,
user=user_obj,
link="",
variant=variant_obj,
comment_level="specific",
)
yield {
"adapter": real_panel_database,
"variant": variant_obj,
"case": real_panel_database.case(case_obj["_id"]),
}
@pytest.fixture
def parsed_gene():
gene_info = {
"hgnc_id": 1,
"hgnc_symbol": "AAA",
"ensembl_id": "ENSG1",
"chrom": "1",
"start": 10,
"end": 100,
"build": "37",
}
return gene_info
|
[
"copy.deepcopy"
] |
[((180, 201), 'copy.deepcopy', 'deepcopy', (['parsed_case'], {}), '(parsed_case)\n', (188, 201), False, 'from copy import deepcopy\n')]
|
import hashlib
import logging
import os
import dj_database_url
from django.contrib.messages import constants as messages
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
APP_DIRNAME = "apps"
SETTINGS_DIR = os.path.dirname(os.path.abspath(__file__))
BASE_DIR = os.path.dirname(SETTINGS_DIR)
DOCKER_DEFAULT_FILES = os.path.join(BASE_DIR, 'default_file')
DOCKER_MEM_LIMIT = "100m"
APPS_DIR = os.path.realpath(os.path.join(BASE_DIR, APP_DIRNAME))
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "<KEY>"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
# List of Allowed Hosts
ALLOWED_HOSTS = ['127.0.0.1']
# Used by mail_admins log handler,
# set ENABLE_MAIL_ADMINS to True to use it (DEBUG should also be set to False)
EMAIL_HOST = 'localhost'
EMAIL_PORT = 25
SERVER_EMAIL = 'root@localhost'
ADMINS = []
# Write email in console instead of sending it if DEBUG is True
if DEBUG:
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Application definition
PREREQ_APPS = [
'taggit',
'hitcount',
'django_http_method',
'django_markdown',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
PROJECT_APPS = [
'filebrowser',
'playexo',
'user_profile',
'loader',
'lti_app',
'qa',
'editor',
'components',
'activity',
'git',
'progress',
]
INSTALLED_APPS = PROJECT_APPS + PREREQ_APPS
INSTALLED_APPS += ('django_jinja','django_extensions')
# Middleware definition
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'lti_app.middleware.LTIAuthMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
]
# Cookies settings
SESSION_SAVE_EVERY_REQUEST = True
SESSION_COOKIE_AGE = 5 * 365 * 24 * 60 * 60
# Redirect when not authenticated to
LOGIN_URL = "/activity/login/"
# URLs module
ROOT_URLCONF = 'premierlangage.urls'
# Overriding messages.ERROR to 'danger' to correspond with the bootstrap alert class
MESSAGE_TAGS = {
messages.ERROR: 'danger',
}
# Templates engines
TEMPLATES = [
{
'BACKEND': 'django_jinja.backend.Jinja2',
'DIRS': [
os.path.join(BASE_DIR, 'templates')
],
'APP_DIRS': True,
'OPTIONS': {
"autoescape": True,
"app_dirname": "templates",
"auto_reload": DEBUG,
"match_extension": ".html",
"match_regex": r"^((?!(admin|registration)).)*$",
"environment": "premierlangage.jinja2_custom.environment",
"undefined": "premierlangage.jinja2_custom.CustomUndefined",
"debug": True,
"filters": {
"markdown": "django_markdown.templatetags.django_markdown.markdown",
"dict_value": "apps.playexo.templatetags.playexo_tags.dict_value",
},
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
"extensions": [
"jinja2.ext.do",
"jinja2.ext.loopcontrols",
"jinja2.ext.with_",
"jinja2.ext.i18n",
"jinja2.ext.autoescape",
"django_jinja.builtins.extensions.CsrfExtension",
"django_jinja.builtins.extensions.CacheExtension",
"django_jinja.builtins.extensions.TimezoneExtension",
"django_jinja.builtins.extensions.UrlsExtension",
"django_jinja.builtins.extensions.StaticFilesExtension",
"django_jinja.builtins.extensions.DjangoFiltersExtension",
]
},
},
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates')
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
# WSGI Module
WSGI_APPLICATION = 'premierlangage.wsgi.application'
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'django_premierlangage',
'USER': 'django',
'PASSWORD': '<PASSWORD>',
'HOST': '127.0.0.1',
'PORT': '5432',
}
}
DATA_UPLOAD_MAX_MEMORY_SIZE = 134217728
# Update database configuration with $DATABASE_URL.
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Password validation
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'lti_app.backends.LTIAuthBackend',
)
LTI_OAUTH_CREDENTIALS = {
'moodle': 'secret',
}
LOGIN_REDIRECT_URL = '/'
# Logger information
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
},
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
'formatters': {
'verbose': {
'format': '[%(asctime)-15s] %(levelname)s -- '
'File: %(pathname)s line n°%(lineno)d -- %(message)s',
'datefmt': '%Y/%m/%d %H:%M:%S'
},
'simple': {
'format': '[%(asctime)-15s] %(levelname)s -- %(message)s',
'datefmt': '%Y/%m/%d %H:%M:%S'
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler',
'include_html': True,
'formatter': 'verbose'
}
},
'loggers': {
'': {
'handlers': ['console', 'mail_admins'],
'level': 'INFO',
},
'django.request': {
'handlers': ['console', 'mail_admins'],
'level': 'ERROR',
},
},
}
# Ask's settings
# Reputation contains the points awarded
# Right contains the points needed for doing action, -1 to disable (do not apply for the owner)
QA_SETTINGS = {
'qa_messages': True,
'qa_description_optional': False,
'reputation': {
'CREATE_QUESTION': 10,
'CREATE_ANSWER': 20,
'CREATE_ANSWER_COMMENT': 2,
'CREATE_QUESTION_COMMENT': 2,
'RECEIVE_QUESTION_COMMENT': 1,
'RECEIVE_ANSWER_COMMENT': 1,
'ANSWER_ACCEPTED': 20, # Half for the acceptor
'UPVOTE_QUESTION': 3,
'UPVOTE_ANSWER': 3,
'DOWNVOTE_QUESTION': -3,
'DOWNVOTE_ANSWER': -3,
},
'right': {
'POST_QUESTION': 0,
'POST_ANSWER': 0,
'POST_COMMENT': 0,
'EDIT_QUESTION': 500,
'EDIT_ANSWER': 500,
'EDIT_COMMENT': -1,
'DELETE_QUESTION': 2000,
'DELETE_ANSWER': 2000,
'DELETE_COMMENT': 2000,
},
}
# Hitcount settings
HITCOUNT_KEEP_HIT_ACTIVE = {'days': 1}
# Settings used for the creation of identicon (default avatar)
IDENTICON_SETTINGS = {
'background': 'rgb(224,224,224)',
'foreground': [
'rgb(45,79,255)',
'rgb(254,180,44)',
'rgb(226,121,234)',
'rgb(30,179,253)',
'rgb(232,77,65)',
'rgb(49,203,115)',
],
'row': 15,
'col': 15,
'padding': (20, 20, 20, 20),
'size': (300, 300),
'digest': hashlib.sha1,
'output_format': 'png',
}
# Internationalization
LANGUAGE_CODE = 'fr-FR'
TIME_ZONE = 'Europe/Paris'
USE_I18N = False
USE_L10N = True
USE_TZ = True
# Sandbox url:
SANDBOX = 'http://127.0.0.1:7000'
# Static files (CSS, JavaScript, Images)
STATIC_ROOT = os.path.abspath(os.path.join(BASE_DIR, 'static'))
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "common_static"),
]
MEDIA_ROOT = os.path.abspath(os.path.join(BASE_DIR, 'media'))
MEDIA_URL = '/media/'
# Default Filebrowser's path
FILEBROWSER_ROOT = os.path.abspath(os.path.join(BASE_DIR, 'home/'))
# Filebrowser settings
FILEBROWSER_DISALLOWED_CHAR = ['/', ' ', '\t', '\n', ';', '#', '+', '&']
# Path to directory containing parsers
PARSERS_ROOT = os.path.abspath(os.path.join(APPS_DIR, 'loader/parsers/'))
PARSERS_MODULE = 'loader.parsers'
# Default home directory name for pl users
HOME = "Yggdrasil"
# Allow a file '[PL_ROOT]/server/premierlangage/premierlangage/config.py' to override any of the
# settings above.
try:
from premierlangage.config import *
except Exception:
if "VERBOSE" in os.environ:
logger = logging.getLogger(__name__)
logger.exception("No config file found.")
pass
|
[
"os.path.abspath",
"os.path.dirname",
"os.path.join",
"dj_database_url.config",
"logging.getLogger"
] |
[((286, 315), 'os.path.dirname', 'os.path.dirname', (['SETTINGS_DIR'], {}), '(SETTINGS_DIR)\n', (301, 315), False, 'import os\n'), ((339, 377), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""default_file"""'], {}), "(BASE_DIR, 'default_file')\n", (351, 377), False, 'import os\n'), ((5284, 5324), 'dj_database_url.config', 'dj_database_url.config', ([], {'conn_max_age': '(500)'}), '(conn_max_age=500)\n', (5306, 5324), False, 'import dj_database_url\n'), ((248, 273), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (263, 273), False, 'import os\n'), ((432, 467), 'os.path.join', 'os.path.join', (['BASE_DIR', 'APP_DIRNAME'], {}), '(BASE_DIR, APP_DIRNAME)\n', (444, 467), False, 'import os\n'), ((9371, 9403), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""static"""'], {}), "(BASE_DIR, 'static')\n", (9383, 9403), False, 'import os\n'), ((9454, 9493), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""common_static"""'], {}), "(BASE_DIR, 'common_static')\n", (9466, 9493), False, 'import os\n'), ((9527, 9558), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""media"""'], {}), "(BASE_DIR, 'media')\n", (9539, 9558), False, 'import os\n'), ((9647, 9678), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""home/"""'], {}), "(BASE_DIR, 'home/')\n", (9659, 9678), False, 'import os\n'), ((9848, 9889), 'os.path.join', 'os.path.join', (['APPS_DIR', '"""loader/parsers/"""'], {}), "(APPS_DIR, 'loader/parsers/')\n", (9860, 9889), False, 'import os\n'), ((2647, 2682), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""templates"""'], {}), "(BASE_DIR, 'templates')\n", (2659, 2682), False, 'import os\n'), ((4412, 4447), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""templates"""'], {}), "(BASE_DIR, 'templates')\n", (4424, 4447), False, 'import os\n'), ((10216, 10243), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (10233, 10243), False, 'import logging\n')]
|
# This source code is part of the Biotite package and is distributed
# under the 3-Clause BSD License. Please see 'LICENSE.rst' for further
# information.
__name__ = "biotite.structure.io.mol"
__author__ = "<NAME>"
__all__ = ["MOLFile"]
import datetime
from warnings import warn
import numpy as np
from ...atoms import AtomArray
from ....file import TextFile, InvalidFileError
from ...error import BadStructureError
from ..ctab import read_structure_from_ctab, write_structure_to_ctab
# Number of header lines
N_HEADER = 3
DATE_FORMAT = "%d%m%y%H%M"
class MOLFile(TextFile):
"""
This class represents a file in MOL format, that is used to store
structure information for small molecules. :footcite:`Dalby1992`
Since its use is intended for single small molecules, it stores
less atom annotation information than the macromolecular structure
formats:
Only the atom positions, charges, elements and bonds can be read
from the file, chain and and residue information is missing.
This class can also be used to parse the first structure from an SDF
file, as the SDF format extends the MOL format.
References
----------
.. footbibliography::
Examples
--------
>>> from os.path import join
>>> mol_file = MOLFile.read(join(path_to_structures, "molecules", "TYR.sdf"))
>>> atom_array = mol_file.get_structure()
>>> print(atom_array)
0 N 1.320 0.952 1.428
0 C -0.018 0.429 1.734
0 C -0.103 0.094 3.201
0 O 0.886 -0.254 3.799
0 C -0.274 -0.831 0.907
0 C -0.189 -0.496 -0.559
0 C 1.022 -0.589 -1.219
0 C -1.324 -0.102 -1.244
0 C 1.103 -0.282 -2.563
0 C -1.247 0.210 -2.587
0 C -0.032 0.118 -3.252
0 O 0.044 0.420 -4.574
0 O -1.279 0.184 3.842
0 H 1.977 0.225 1.669
0 H 1.365 1.063 0.426
0 H -0.767 1.183 1.489
0 H 0.473 -1.585 1.152
0 H -1.268 -1.219 1.134
0 H 1.905 -0.902 -0.683
0 H -2.269 -0.031 -0.727
0 H 2.049 -0.354 -3.078
0 H -2.132 0.523 -3.121
0 H -0.123 -0.399 -5.059
0 H -1.333 -0.030 4.784
"""
def __init__(self):
super().__init__()
# empty header lines
self.lines = [""] * N_HEADER
def get_header(self):
"""
Get the header from the MOL file.
Returns
-------
mol_name : str
The name of the molecule.
initials : str
The author's initials.
program : str
The program name.
time : datetime
The time of file creation.
dimensions : str
Dimensional codes.
scaling_factors : str
Scaling factors.
energy : str
Energy from modeling program.
registry_number : str
MDL registry number.
comments : str
Additional comments.
"""
mol_name = self.lines[0].strip()
initials = self.lines[1][0:2].strip()
program = self.lines[1][2:10].strip()
time = datetime.datetime.strptime(self.lines[1][10:20],
DATE_FORMAT)
dimensions = self.lines[1][20:22].strip()
scaling_factors = self.lines[1][22:34].strip()
energy = self.lines[1][34:46].strip()
registry_number = self.lines[1][46:52].strip()
comments = self.lines[2].strip()
return mol_name, initials, program, time, dimensions, \
scaling_factors, energy, registry_number, comments
def set_header(self, mol_name, initials="", program="", time=None,
dimensions="", scaling_factors="", energy="",
registry_number="", comments=""):
"""
Set the header for the MOL file.
Parameters
----------
mol_name : str
The name of the molecule.
initials : str, optional
The author's initials. Maximum length is 2.
program : str, optional
The program name. Maximum length is 8.
time : datetime or date, optional
The time of file creation.
dimensions : str, optional
Dimensional codes. Maximum length is 2.
scaling_factors : str, optional
Scaling factors. Maximum length is 12.
energy : str, optional
Energy from modeling program. Maximum length is 12.
registry_number : str, optional
MDL registry number. Maximum length is 6.
comments : str, optional
Additional comments.
"""
if time is None:
time = datetime.datetime.now()
time_str = time.strftime(DATE_FORMAT)
self.lines[0] = str(mol_name)
self.lines[1] = (
f"{initials:>2}"
f"{program:>8}"
f"{time_str:>10}"
f"{dimensions:>2}"
f"{scaling_factors:>12}"
f"{energy:>12}"
f"{registry_number:>6}"
)
self.lines[2] = str(comments)
def get_structure(self):
"""
Get an :class:`AtomArray` from the MOL file.
Returns
-------
array : AtomArray
This :class:`AtomArray` contains the optional ``charge``
annotation and has an associated :class:`BondList`.
All other annotation categories, except ``element`` are
empty.
"""
ctab_lines = _get_ctab_lines(self.lines)
if len(ctab_lines) == 0:
raise InvalidFileError("File does not contain structure data")
return read_structure_from_ctab(ctab_lines)
def set_structure(self, atoms):
"""
Set the :class:`AtomArray` for the file.
Parameters
----------
array : AtomArray
The array to be saved into this file.
Must have an associated :class:`BondList`.
"""
self.lines = self.lines[:N_HEADER] + write_structure_to_ctab(atoms)
def _get_ctab_lines(lines):
for i, line in enumerate(lines):
if line.startswith("M END"):
return lines[N_HEADER:i+1]
return lines[N_HEADER:]
|
[
"datetime.datetime.strptime",
"datetime.datetime.now"
] |
[((3942, 4003), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['self.lines[1][10:20]', 'DATE_FORMAT'], {}), '(self.lines[1][10:20], DATE_FORMAT)\n', (3968, 4003), False, 'import datetime\n'), ((5549, 5572), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (5570, 5572), False, 'import datetime\n')]
|
# coding: utf-8
# DO NOT EDIT
# Autogenerated from the notebook glm.ipynb.
# Edit the notebook and then sync the output with this file.
#
# flake8: noqa
# DO NOT EDIT
# # 广义线性模型
import numpy as np
import statsmodels.api as sm
from scipy import stats
from matplotlib import pyplot as plt
# ## GLM: 二项式响应数据
#
# ### 加载数据
#
# 在此示例中,我们使用 Star98 的数据集,该数据集来自 <NAME>(2000)的许可下获取的广义线性模型:统一方法。 可以通过键入以下内容获得码本信息:
print(sm.datasets.star98.NOTE)
# 加载数据,并将常数项添加到外生(独立)变量中:
data = sm.datasets.star98.load()
data.exog = sm.add_constant(data.exog, prepend=False)
# 因变量是 N by 2 (Success: NABOVE, Failure: NBELOW):
print(data.endog[:5, :])
# 自变量包括上述所有其他变量以及交互项:
print(data.exog[:2, :])
# ### Fit 和 summary
glm_binom = sm.GLM(data.endog, data.exog, family=sm.families.Binomial())
res = glm_binom.fit()
print(res.summary())
# ### Quantities of interest
print('Total number of trials:', data.endog[0].sum())
print('Parameters: ', res.params)
print('T-values: ', res.tvalues)
# 第一个差异:我们将所有解释变量保持在其均值不变,并操纵低收入家庭的百分比来评估其对响应变量的影响:
means = data.exog.mean(axis=0)
means25 = means.copy()
means25[0] = stats.scoreatpercentile(data.exog[:, 0], 25)
means75 = means.copy()
means75[0] = lowinc_75per = stats.scoreatpercentile(data.exog[:, 0], 75)
resp_25 = res.predict(means25)
resp_75 = res.predict(means75)
diff = resp_75 - resp_25
# 学区中低收入家庭的百分比的四分位差是:
print("%2.4f%%" % (diff * 100))
# ### Plots
#
# 我们提取信息用于绘制一些有趣图:
nobs = res.nobs
y = data.endog[:, 0] / data.endog.sum(1)
yhat = res.mu
# Plot yhat vs y:
from statsmodels.graphics.api import abline_plot
fig, ax = plt.subplots()
ax.scatter(yhat, y)
line_fit = sm.OLS(y, sm.add_constant(yhat, prepend=True)).fit()
abline_plot(model_results=line_fit, ax=ax)
ax.set_title('Model Fit Plot')
ax.set_ylabel('Observed values')
ax.set_xlabel('Fitted values')
# Plot yhat vs. Pearson residuals:
fig, ax = plt.subplots()
ax.scatter(yhat, res.resid_pearson)
ax.hlines(0, 0, 1)
ax.set_xlim(0, 1)
ax.set_title('Residual Dependence Plot')
ax.set_ylabel('Pearson Residuals')
ax.set_xlabel('Fitted values')
# 标准化偏差残差直方图:
from scipy import stats
fig, ax = plt.subplots()
resid = res.resid_deviance.copy()
resid_std = stats.zscore(resid)
ax.hist(resid_std, bins=25)
ax.set_title('Histogram of standardized deviance residuals')
# 偏差残差QQ图:
from statsmodels import graphics
graphics.gofplots.qqplot(resid, line='r')
# ## GLM: 比例计数响应的 Gamma
#
# ### 加载数据
#
# 在上面的示例中,我们输出了 ``NOTE`` 属性来了解 Star98 数据集。 statsmodels 数据集附带了其他有用信息。 例如:
print(sm.datasets.scotland.DESCRLONG)
# 加载数据并给外生变量添加常数项:
data2 = sm.datasets.scotland.load()
data2.exog = sm.add_constant(data2.exog, prepend=False)
print(data2.exog[:5, :])
print(data2.endog[:5])
# ### Fit 和 summary
glm_gamma = sm.GLM(data2.endog, data2.exog, family=sm.families.Gamma())
glm_results = glm_gamma.fit()
print(glm_results.summary())
# ## GLM: 具有非规范链接的高斯分布
#
# ### 人工数据
nobs2 = 100
x = np.arange(nobs2)
np.random.seed(54321)
X = np.column_stack((x, x**2))
X = sm.add_constant(X, prepend=False)
lny = np.exp(-(.03 * x + .0001 * x**2 - 1.0)) + .001 * np.random.rand(nobs2)
# ### Fit 和 summary
gauss_log = sm.GLM(lny, X, family=sm.families.Gaussian(sm.families.links.log))
gauss_log_results = gauss_log.fit()
print(gauss_log_results.summary())
|
[
"numpy.random.seed",
"scipy.stats.zscore",
"statsmodels.api.families.Binomial",
"scipy.stats.scoreatpercentile",
"statsmodels.api.families.Gamma",
"numpy.column_stack",
"statsmodels.graphics.gofplots.qqplot",
"numpy.arange",
"numpy.exp",
"statsmodels.graphics.api.abline_plot",
"statsmodels.api.datasets.scotland.load",
"numpy.random.rand",
"statsmodels.api.add_constant",
"statsmodels.api.datasets.star98.load",
"statsmodels.api.families.Gaussian",
"matplotlib.pyplot.subplots"
] |
[((474, 499), 'statsmodels.api.datasets.star98.load', 'sm.datasets.star98.load', ([], {}), '()\n', (497, 499), True, 'import statsmodels.api as sm\n'), ((512, 553), 'statsmodels.api.add_constant', 'sm.add_constant', (['data.exog'], {'prepend': '(False)'}), '(data.exog, prepend=False)\n', (527, 553), True, 'import statsmodels.api as sm\n'), ((1092, 1136), 'scipy.stats.scoreatpercentile', 'stats.scoreatpercentile', (['data.exog[:, 0]', '(25)'], {}), '(data.exog[:, 0], 25)\n', (1115, 1136), False, 'from scipy import stats\n'), ((1188, 1232), 'scipy.stats.scoreatpercentile', 'stats.scoreatpercentile', (['data.exog[:, 0]', '(75)'], {}), '(data.exog[:, 0], 75)\n', (1211, 1232), False, 'from scipy import stats\n'), ((1562, 1576), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (1574, 1576), True, 'from matplotlib import pyplot as plt\n'), ((1661, 1703), 'statsmodels.graphics.api.abline_plot', 'abline_plot', ([], {'model_results': 'line_fit', 'ax': 'ax'}), '(model_results=line_fit, ax=ax)\n', (1672, 1703), False, 'from statsmodels.graphics.api import abline_plot\n'), ((1847, 1861), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (1859, 1861), True, 'from matplotlib import pyplot as plt\n'), ((2094, 2108), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (2106, 2108), True, 'from matplotlib import pyplot as plt\n'), ((2156, 2175), 'scipy.stats.zscore', 'stats.zscore', (['resid'], {}), '(resid)\n', (2168, 2175), False, 'from scipy import stats\n'), ((2311, 2352), 'statsmodels.graphics.gofplots.qqplot', 'graphics.gofplots.qqplot', (['resid'], {'line': '"""r"""'}), "(resid, line='r')\n", (2335, 2352), False, 'from statsmodels import graphics\n'), ((2535, 2562), 'statsmodels.api.datasets.scotland.load', 'sm.datasets.scotland.load', ([], {}), '()\n', (2560, 2562), True, 'import statsmodels.api as sm\n'), ((2576, 2618), 'statsmodels.api.add_constant', 'sm.add_constant', (['data2.exog'], {'prepend': '(False)'}), '(data2.exog, prepend=False)\n', (2591, 2618), True, 'import statsmodels.api as sm\n'), ((2874, 2890), 'numpy.arange', 'np.arange', (['nobs2'], {}), '(nobs2)\n', (2883, 2890), True, 'import numpy as np\n'), ((2891, 2912), 'numpy.random.seed', 'np.random.seed', (['(54321)'], {}), '(54321)\n', (2905, 2912), True, 'import numpy as np\n'), ((2917, 2945), 'numpy.column_stack', 'np.column_stack', (['(x, x ** 2)'], {}), '((x, x ** 2))\n', (2932, 2945), True, 'import numpy as np\n'), ((2948, 2981), 'statsmodels.api.add_constant', 'sm.add_constant', (['X'], {'prepend': '(False)'}), '(X, prepend=False)\n', (2963, 2981), True, 'import statsmodels.api as sm\n'), ((2988, 3031), 'numpy.exp', 'np.exp', (['(-(0.03 * x + 0.0001 * x ** 2 - 1.0))'], {}), '(-(0.03 * x + 0.0001 * x ** 2 - 1.0))\n', (2994, 3031), True, 'import numpy as np\n'), ((752, 774), 'statsmodels.api.families.Binomial', 'sm.families.Binomial', ([], {}), '()\n', (772, 774), True, 'import statsmodels.api as sm\n'), ((2740, 2759), 'statsmodels.api.families.Gamma', 'sm.families.Gamma', ([], {}), '()\n', (2757, 2759), True, 'import statsmodels.api as sm\n'), ((3037, 3058), 'numpy.random.rand', 'np.random.rand', (['nobs2'], {}), '(nobs2)\n', (3051, 3058), True, 'import numpy as np\n'), ((3115, 3158), 'statsmodels.api.families.Gaussian', 'sm.families.Gaussian', (['sm.families.links.log'], {}), '(sm.families.links.log)\n', (3135, 3158), True, 'import statsmodels.api as sm\n'), ((1618, 1653), 'statsmodels.api.add_constant', 'sm.add_constant', (['yhat'], {'prepend': '(True)'}), '(yhat, prepend=True)\n', (1633, 1653), True, 'import statsmodels.api as sm\n')]
|
import logging
import os
log = logging.getLogger(__name__)
class JobListService(object):
PESTO_WORKSPACE = '/tmp/.pesto/jobs'
def __init__(self) -> None:
self.PESTO_WORKSPACE = JobListService.PESTO_WORKSPACE
def job_list(self, url_root: str) -> dict:
log.info('job_list : url_root = {}'.format(url_root))
result = {}
if os.path.exists(self.PESTO_WORKSPACE):
log.info('workspace found : {}'.format(self.PESTO_WORKSPACE))
for job_id in os.listdir(self.PESTO_WORKSPACE):
result[job_id] = {'link': '{}/api/v1/jobs/{}/status'.format(url_root, job_id)}
return result
|
[
"os.path.exists",
"os.listdir",
"logging.getLogger"
] |
[((32, 59), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (49, 59), False, 'import logging\n'), ((371, 407), 'os.path.exists', 'os.path.exists', (['self.PESTO_WORKSPACE'], {}), '(self.PESTO_WORKSPACE)\n', (385, 407), False, 'import os\n'), ((509, 541), 'os.listdir', 'os.listdir', (['self.PESTO_WORKSPACE'], {}), '(self.PESTO_WORKSPACE)\n', (519, 541), False, 'import os\n')]
|
from __future__ import absolute_import, division, print_function
import os
from qtpy import QtWidgets
from glue.utils.qt import load_ui
from glue.external.echo.qt import autoconnect_callbacks_to_qt
from glue_vispy_viewers.utils import fix_tab_widget_fontsize
class ScatterLayerStyleWidget(QtWidgets.QWidget):
def __init__(self, layer_artist):
super(ScatterLayerStyleWidget, self).__init__()
self.ui = load_ui('layer_style_widget.ui', self,
directory=os.path.dirname(__file__))
fix_tab_widget_fontsize(self.ui.tab_widget)
self.state = layer_artist.state
self.layer_artist = layer_artist
self.layer = layer_artist.layer
connect_kwargs = {'value_alpha': dict(value_range=(0., 1.)),
'value_size_scaling': dict(value_range=(0.1, 10), log=True)}
self._connections = autoconnect_callbacks_to_qt(self.state, self.ui, connect_kwargs)
# Set initial values
self._update_size_mode()
self._update_color_mode()
self.state.add_callback('color_mode', self._update_color_mode)
self.state.add_callback('size_mode', self._update_size_mode)
def _update_size_mode(self, *args):
if self.state.size_mode == "Fixed":
self.ui.size_row_2.hide()
self.ui.combosel_size_attribute.hide()
self.ui.valuetext_size.show()
else:
self.ui.valuetext_size.hide()
self.ui.combosel_size_attribute.show()
self.ui.size_row_2.show()
def _update_color_mode(self, *args):
if self.state.color_mode == "Fixed":
self.ui.color_row_2.hide()
self.ui.color_row_3.hide()
self.ui.combosel_cmap_attribute.hide()
self.ui.spacer_color_label.show()
self.ui.color_color.show()
else:
self.ui.color_color.hide()
self.ui.combosel_cmap_attribute.show()
self.ui.spacer_color_label.hide()
self.ui.color_row_2.show()
self.ui.color_row_3.show()
|
[
"glue_vispy_viewers.utils.fix_tab_widget_fontsize",
"os.path.dirname",
"glue.external.echo.qt.autoconnect_callbacks_to_qt"
] |
[((541, 584), 'glue_vispy_viewers.utils.fix_tab_widget_fontsize', 'fix_tab_widget_fontsize', (['self.ui.tab_widget'], {}), '(self.ui.tab_widget)\n', (564, 584), False, 'from glue_vispy_viewers.utils import fix_tab_widget_fontsize\n'), ((893, 957), 'glue.external.echo.qt.autoconnect_callbacks_to_qt', 'autoconnect_callbacks_to_qt', (['self.state', 'self.ui', 'connect_kwargs'], {}), '(self.state, self.ui, connect_kwargs)\n', (920, 957), False, 'from glue.external.echo.qt import autoconnect_callbacks_to_qt\n'), ((505, 530), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (520, 530), False, 'import os\n')]
|
"""
Implementation of the XDG Menu Specification Version 1.0.draft-1
http://standards.freedesktop.org/menu-spec/
"""
from __future__ import generators
import locale, os, xml.dom.minidom
from xdg.BaseDirectory import *
from xdg.DesktopEntry import *
from xdg.Exceptions import *
import xdg.Xocale
import xdg.Config
ELEMENT_NODE = xml.dom.Node.ELEMENT_NODE
# for python <= 2.3
try:
reversed = reversed
except NameError:
def reversed(x):
return x[::-1]
class Menu:
def __init__(self):
# Public stuff
self.Name = ""
self.Directory = None
self.Entries = []
self.Doc = ""
self.Filename = ""
self.Depth = 0
self.Parent = None
self.NotInXml = False
# Can be one of Deleted/NoDisplay/Hidden/Empty/NotShowIn or True
self.Show = True
self.Visible = 0
# Private stuff, only needed for parsing
self.AppDirs = []
self.DefaultLayout = None
self.Deleted = "notset"
self.Directories = []
self.DirectoryDirs = []
self.Layout = None
self.MenuEntries = []
self.Moves = []
self.OnlyUnallocated = "notset"
self.Rules = []
self.Submenus = []
def __str__(self):
return self.Name
def __add__(self, other):
for dir in other.AppDirs:
self.AppDirs.append(dir)
for dir in other.DirectoryDirs:
self.DirectoryDirs.append(dir)
for directory in other.Directories:
self.Directories.append(directory)
if other.Deleted != "notset":
self.Deleted = other.Deleted
if other.OnlyUnallocated != "notset":
self.OnlyUnallocated = other.OnlyUnallocated
if other.Layout:
self.Layout = other.Layout
if other.DefaultLayout:
self.DefaultLayout = other.DefaultLayout
for rule in other.Rules:
self.Rules.append(rule)
for move in other.Moves:
self.Moves.append(move)
for submenu in other.Submenus:
self.addSubmenu(submenu)
return self
# FIXME: Performance: cache getName()
def __cmp__(self, other):
return locale.strcoll(self.getName(), other.getName())
def __eq__(self, other):
if self.Name == str(other):
return True
else:
return False
""" PUBLIC STUFF """
def getEntries(self, hidden=False):
for entry in self.Entries:
if hidden == True:
yield entry
elif entry.Show == True:
yield entry
# FIXME: Add searchEntry/seaqrchMenu function
# search for name/comment/genericname/desktopfileide
# return multiple items
def getMenuEntry(self, desktopfileid, deep = False):
for menuentry in self.MenuEntries:
if menuentry.DesktopFileID == desktopfileid:
return menuentry
if deep == True:
for submenu in self.Submenus:
submenu.getMenuEntry(desktopfileid, deep)
def getMenu(self, path):
array = path.split("/", 1)
for submenu in self.Submenus:
if submenu.Name == array[0]:
if len(array) > 1:
return submenu.getMenu(array[1])
else:
return submenu
def getPath(self, org=False, toplevel=False):
parent = self
names=[]
while 1:
if org:
names.append(parent.Name)
else:
names.append(parent.getName())
if parent.Depth > 0:
parent = parent.Parent
else:
break
names.reverse()
path = ""
if toplevel == False:
names.pop(0)
for name in names:
path = os.path.join(path, name)
return path
def getName(self):
try:
return self.Directory.DesktopEntry.getName()
except AttributeError:
return self.Name
def getGenericName(self):
try:
return self.Directory.DesktopEntry.getGenericName()
except AttributeError:
return ""
def getComment(self):
try:
return self.Directory.DesktopEntry.getComment()
except AttributeError:
return ""
def getIcon(self):
try:
return self.Directory.DesktopEntry.getIcon()
except AttributeError:
return ""
""" PRIVATE STUFF """
def addSubmenu(self, newmenu):
for submenu in self.Submenus:
if submenu == newmenu:
submenu += newmenu
break
else:
self.Submenus.append(newmenu)
newmenu.Parent = self
newmenu.Depth = self.Depth + 1
class Move:
"A move operation"
def __init__(self, node=None):
if node:
self.parseNode(node)
else:
self.Old = ""
self.New = ""
def __cmp__(self, other):
return cmp(self.Old, other.Old)
def parseNode(self, node):
for child in node.childNodes:
if child.nodeType == ELEMENT_NODE:
if child.tagName == "Old":
try:
self.parseOld(child.childNodes[0].nodeValue)
except IndexError:
raise ValidationError('Old cannot be empty', '??')
elif child.tagName == "New":
try:
self.parseNew(child.childNodes[0].nodeValue)
except IndexError:
raise ValidationError('New cannot be empty', '??')
def parseOld(self, value):
self.Old = value
def parseNew(self, value):
self.New = value
class Layout:
"Menu Layout class"
def __init__(self, node=None):
self.order = []
if node:
self.show_empty = node.getAttribute("show_empty") or "false"
self.inline = node.getAttribute("inline") or "false"
self.inline_limit = node.getAttribute("inline_limit") or 4
self.inline_header = node.getAttribute("inline_header") or "true"
self.inline_alias = node.getAttribute("inline_alias") or "false"
self.inline_limit = int(self.inline_limit)
self.parseNode(node)
else:
self.show_empty = "false"
self.inline = "false"
self.inline_limit = 4
self.inline_header = "true"
self.inline_alias = "false"
self.order.append(["Merge", "menus"])
self.order.append(["Merge", "files"])
def parseNode(self, node):
for child in node.childNodes:
if child.nodeType == ELEMENT_NODE:
if child.tagName == "Menuname":
try:
self.parseMenuname(
child.childNodes[0].nodeValue,
child.getAttribute("show_empty") or "false",
child.getAttribute("inline") or "false",
child.getAttribute("inline_limit") or 4,
child.getAttribute("inline_header") or "true",
child.getAttribute("inline_alias") or "false" )
except IndexError:
raise ValidationError('Menuname cannot be empty', "")
elif child.tagName == "Separator":
self.parseSeparator()
elif child.tagName == "Filename":
try:
self.parseFilename(child.childNodes[0].nodeValue)
except IndexError:
raise ValidationError('Filename cannot be empty', "")
elif child.tagName == "Merge":
self.parseMerge(child.getAttribute("type") or "all")
def parseMenuname(self, value, empty="false", inline="false", inline_limit=4, inline_header="true", inline_alias="false"):
self.order.append(["Menuname", value, empty, inline, inline_limit, inline_header, inline_alias])
self.order[-1][4] = int(self.order[-1][4])
def parseSeparator(self):
self.order.append(["Separator"])
def parseFilename(self, value):
self.order.append(["Filename", value])
def parseMerge(self, type="all"):
self.order.append(["Merge", type])
class Rule:
"Inlcude / Exclude Rules Class"
def __init__(self, type, node=None):
# Type is Include or Exclude
self.Type = type
# Rule is a python expression
self.Rule = ""
# Private attributes, only needed for parsing
self.Depth = 0
self.Expr = [ "or" ]
self.New = True
# Begin parsing
if node:
self.parseNode(node)
self.compile()
def __str__(self):
return self.Rule
def compile(self):
exec("""
def do(menuentries, type, run):
for menuentry in menuentries:
if run == 2 and ( menuentry.MatchedInclude == True \
or menuentry.Allocated == True ):
continue
elif %s:
if type == "Include":
menuentry.Add = True
menuentry.MatchedInclude = True
else:
menuentry.Add = False
return menuentries
""" % self.Rule) in self.__dict__
def parseNode(self, node):
for child in node.childNodes:
if child.nodeType == ELEMENT_NODE:
if child.tagName == 'Filename':
try:
self.parseFilename(child.childNodes[0].nodeValue)
except IndexError:
raise ValidationError('Filename cannot be empty', "???")
elif child.tagName == 'Category':
try:
self.parseCategory(child.childNodes[0].nodeValue)
except IndexError:
raise ValidationError('Category cannot be empty', "???")
elif child.tagName == 'All':
self.parseAll()
elif child.tagName == 'And':
self.parseAnd(child)
elif child.tagName == 'Or':
self.parseOr(child)
elif child.tagName == 'Not':
self.parseNot(child)
def parseNew(self, set=True):
if not self.New:
self.Rule += " " + self.Expr[self.Depth] + " "
if not set:
self.New = True
elif set:
self.New = False
def parseFilename(self, value):
self.parseNew()
self.Rule += "menuentry.DesktopFileID == '%s'" % value.strip().replace("\\", r"\\").replace("'", r"\'")
def parseCategory(self, value):
self.parseNew()
self.Rule += "'%s' in menuentry.Categories" % value.strip()
def parseAll(self):
self.parseNew()
self.Rule += "True"
def parseAnd(self, node):
self.parseNew(False)
self.Rule += "("
self.Depth += 1
self.Expr.append("and")
self.parseNode(node)
self.Depth -= 1
self.Expr.pop()
self.Rule += ")"
def parseOr(self, node):
self.parseNew(False)
self.Rule += "("
self.Depth += 1
self.Expr.append("or")
self.parseNode(node)
self.Depth -= 1
self.Expr.pop()
self.Rule += ")"
def parseNot(self, node):
self.parseNew(False)
self.Rule += "not ("
self.Depth += 1
self.Expr.append("or")
self.parseNode(node)
self.Depth -= 1
self.Expr.pop()
self.Rule += ")"
class MenuEntry:
"Wrapper for 'Menu Style' Desktop Entries"
def __init__(self, filename, dir="", prefix=""):
# Create entry
self.DesktopEntry = DesktopEntry(os.path.join(dir,filename))
self.setAttributes(filename, dir, prefix)
# Can be one of Deleted/Hidden/Empty/NotShowIn/NoExec or True
self.Show = True
# Semi-Private
self.Original = None
self.Parents = []
# Private Stuff
self.Allocated = False
self.Add = False
self.MatchedInclude = False
# Caching
self.Categories = self.DesktopEntry.getCategories()
def save(self):
if self.DesktopEntry.tainted == True:
self.DesktopEntry.write()
def getDir(self):
return self.DesktopEntry.filename.replace(self.Filename, '')
def getType(self):
# Can be one of System/User/Both
if xdg.Config.root_mode == False:
if self.Original:
return "Both"
elif xdg_data_dirs[0] in self.DesktopEntry.filename:
return "User"
else:
return "System"
else:
return "User"
def setAttributes(self, filename, dir="", prefix=""):
self.Filename = filename
self.Prefix = prefix
self.DesktopFileID = os.path.join(prefix,filename).replace("/", "-")
if not os.path.isabs(self.DesktopEntry.filename):
self.__setFilename()
def updateAttributes(self):
if self.getType() == "System":
self.Original = MenuEntry(self.Filename, self.getDir(), self.Prefix)
self.__setFilename()
def __setFilename(self):
if xdg.Config.root_mode == False:
path = xdg_data_dirs[0]
else:
path= xdg_data_dirs[1]
if self.DesktopEntry.getType() == "Application":
dir = os.path.join(path, "applications")
else:
dir = os.path.join(path, "desktop-directories")
self.DesktopEntry.filename = os.path.join(dir, self.Filename)
def __cmp__(self, other):
return locale.strcoll(self.DesktopEntry.getName(), other.DesktopEntry.getName())
def __eq__(self, other):
if self.DesktopFileID == str(other):
return True
else:
return False
def __repr__(self):
return self.DesktopFileID
class Separator:
"Just a dummy class for Separators"
def __init__(self, parent):
self.Parent = parent
self.Show = True
class Header:
"Class for Inline Headers"
def __init__(self, name, generic_name, comment):
self.Name = name
self.GenericName = generic_name
self.Comment = comment
def __str__(self):
return self.Name
tmp = {}
def __getFileName(filename):
dirs = xdg_config_dirs[:]
if xdg.Config.root_mode == True:
dirs.pop(0)
for dir in dirs:
menuname = os.path.join (dir, "menus" , filename)
if os.path.isdir(dir) and os.path.isfile(menuname):
return menuname
def parse(filename=None):
# conver to absolute path
if filename and not os.path.isabs(filename):
filename = __getFileName(filename)
# use default if no filename given
if not filename:
candidate = os.environ.get('XDG_MENU_PREFIX', '') + "applications.menu"
filename = __getFileName(candidate)
if not filename:
raise ParsingError('File not found', "/etc/xdg/menus/%s" % candidate)
# check if it is a .menu file
if not os.path.splitext(filename)[1] == ".menu":
raise ParsingError('Not a .menu file', filename)
# create xml parser
try:
doc = xml.dom.minidom.parse(filename)
except xml.parsers.expat.ExpatError:
raise ParsingError('Not a valid .menu file', filename)
# parse menufile
tmp["Root"] = ""
tmp["mergeFiles"] = []
tmp["DirectoryDirs"] = []
tmp["cache"] = MenuEntryCache()
__parse(doc, filename, tmp["Root"])
__parsemove(tmp["Root"])
__postparse(tmp["Root"])
tmp["Root"].Doc = doc
tmp["Root"].Filename = filename
# generate the menu
__genmenuNotOnlyAllocated(tmp["Root"])
__genmenuOnlyAllocated(tmp["Root"])
# and finally sort
sort(tmp["Root"])
return tmp["Root"]
def __parse(node, filename, parent=None):
for child in node.childNodes:
if child.nodeType == ELEMENT_NODE:
if child.tagName == 'Menu':
__parseMenu(child, filename, parent)
elif child.tagName == 'AppDir':
try:
__parseAppDir(child.childNodes[0].nodeValue, filename, parent)
except IndexError:
raise ValidationError('AppDir cannot be empty', filename)
elif child.tagName == 'DefaultAppDirs':
__parseDefaultAppDir(filename, parent)
elif child.tagName == 'DirectoryDir':
try:
__parseDirectoryDir(child.childNodes[0].nodeValue, filename, parent)
except IndexError:
raise ValidationError('DirectoryDir cannot be empty', filename)
elif child.tagName == 'DefaultDirectoryDirs':
__parseDefaultDirectoryDir(filename, parent)
elif child.tagName == 'Name' :
try:
parent.Name = child.childNodes[0].nodeValue
except IndexError:
raise ValidationError('Name cannot be empty', filename)
elif child.tagName == 'Directory' :
try:
parent.Directories.append(child.childNodes[0].nodeValue)
except IndexError:
raise ValidationError('Directory cannot be empty', filename)
elif child.tagName == 'OnlyUnallocated':
parent.OnlyUnallocated = True
elif child.tagName == 'NotOnlyUnallocated':
parent.OnlyUnallocated = False
elif child.tagName == 'Deleted':
parent.Deleted = True
elif child.tagName == 'NotDeleted':
parent.Deleted = False
elif child.tagName == 'Include' or child.tagName == 'Exclude':
parent.Rules.append(Rule(child.tagName, child))
elif child.tagName == 'MergeFile':
try:
if child.getAttribute("type") == "parent":
__parseMergeFile("applications.menu", child, filename, parent)
else:
__parseMergeFile(child.childNodes[0].nodeValue, child, filename, parent)
except IndexError:
raise ValidationError('MergeFile cannot be empty', filename)
elif child.tagName == 'MergeDir':
try:
__parseMergeDir(child.childNodes[0].nodeValue, child, filename, parent)
except IndexError:
raise ValidationError('MergeDir cannot be empty', filename)
elif child.tagName == 'DefaultMergeDirs':
__parseDefaultMergeDirs(child, filename, parent)
elif child.tagName == 'Move':
parent.Moves.append(Move(child))
elif child.tagName == 'Layout':
if len(child.childNodes) > 1:
parent.Layout = Layout(child)
elif child.tagName == 'DefaultLayout':
if len(child.childNodes) > 1:
parent.DefaultLayout = Layout(child)
elif child.tagName == 'LegacyDir':
try:
__parseLegacyDir(child.childNodes[0].nodeValue, child.getAttribute("prefix"), filename, parent)
except IndexError:
raise ValidationError('LegacyDir cannot be empty', filename)
elif child.tagName == 'KDELegacyDirs':
__parseKDELegacyDirs(filename, parent)
def __parsemove(menu):
for submenu in menu.Submenus:
__parsemove(submenu)
# parse move operations
for move in menu.Moves:
move_from_menu = menu.getMenu(move.Old)
if move_from_menu:
move_to_menu = menu.getMenu(move.New)
menus = move.New.split("/")
oldparent = None
while len(menus) > 0:
if not oldparent:
oldparent = menu
newmenu = oldparent.getMenu(menus[0])
if not newmenu:
newmenu = Menu()
newmenu.Name = menus[0]
if len(menus) > 1:
newmenu.NotInXml = True
oldparent.addSubmenu(newmenu)
oldparent = newmenu
menus.pop(0)
newmenu += move_from_menu
move_from_menu.Parent.Submenus.remove(move_from_menu)
def __postparse(menu):
# unallocated / deleted
if menu.Deleted == "notset":
menu.Deleted = False
if menu.OnlyUnallocated == "notset":
menu.OnlyUnallocated = False
# Layout Tags
if not menu.Layout or not menu.DefaultLayout:
if menu.DefaultLayout:
menu.Layout = menu.DefaultLayout
elif menu.Layout:
if menu.Depth > 0:
menu.DefaultLayout = menu.Parent.DefaultLayout
else:
menu.DefaultLayout = Layout()
else:
if menu.Depth > 0:
menu.Layout = menu.Parent.DefaultLayout
menu.DefaultLayout = menu.Parent.DefaultLayout
else:
menu.Layout = Layout()
menu.DefaultLayout = Layout()
# add parent's app/directory dirs
if menu.Depth > 0:
menu.AppDirs = menu.Parent.AppDirs + menu.AppDirs
menu.DirectoryDirs = menu.Parent.DirectoryDirs + menu.DirectoryDirs
# remove duplicates
menu.Directories = __removeDuplicates(menu.Directories)
menu.DirectoryDirs = __removeDuplicates(menu.DirectoryDirs)
menu.AppDirs = __removeDuplicates(menu.AppDirs)
# go recursive through all menus
for submenu in menu.Submenus:
__postparse(submenu)
# reverse so handling is easier
menu.Directories.reverse()
menu.DirectoryDirs.reverse()
menu.AppDirs.reverse()
# get the valid .directory file out of the list
for directory in menu.Directories:
for dir in menu.DirectoryDirs:
if os.path.isfile(os.path.join(dir, directory)):
menuentry = MenuEntry(directory, dir)
if not menu.Directory:
menu.Directory = menuentry
elif menuentry.getType() == "System":
if menu.Directory.getType() == "User":
menu.Directory.Original = menuentry
if menu.Directory:
break
# Menu parsing stuff
def __parseMenu(child, filename, parent):
m = Menu()
__parse(child, filename, m)
if parent:
parent.addSubmenu(m)
else:
tmp["Root"] = m
# helper function
def __check(value, filename, type):
path = os.path.dirname(filename)
if not os.path.isabs(value):
value = os.path.join(path, value)
value = os.path.abspath(value)
if type == "dir" and os.path.exists(value) and os.path.isdir(value):
return value
elif type == "file" and os.path.exists(value) and os.path.isfile(value):
return value
else:
return False
# App/Directory Dir Stuff
def __parseAppDir(value, filename, parent):
value = __check(value, filename, "dir")
if value:
parent.AppDirs.append(value)
def __parseDefaultAppDir(filename, parent):
for dir in reversed(xdg_data_dirs):
__parseAppDir(os.path.join(dir, "applications"), filename, parent)
def __parseDirectoryDir(value, filename, parent):
value = __check(value, filename, "dir")
if value:
parent.DirectoryDirs.append(value)
def __parseDefaultDirectoryDir(filename, parent):
for dir in reversed(xdg_data_dirs):
__parseDirectoryDir(os.path.join(dir, "desktop-directories"), filename, parent)
# Merge Stuff
def __parseMergeFile(value, child, filename, parent):
if child.getAttribute("type") == "parent":
for dir in xdg_config_dirs:
rel_file = filename.replace(dir, "").strip("/")
if rel_file != filename:
for p in xdg_config_dirs:
if dir == p:
continue
if os.path.isfile(os.path.join(p,rel_file)):
__mergeFile(os.path.join(p,rel_file),child,parent)
break
else:
value = __check(value, filename, "file")
if value:
__mergeFile(value, child, parent)
def __parseMergeDir(value, child, filename, parent):
value = __check(value, filename, "dir")
if value:
for item in os.listdir(value):
try:
if os.path.splitext(item)[1] == ".menu":
__mergeFile(os.path.join(value, item), child, parent)
except UnicodeDecodeError:
continue
def __parseDefaultMergeDirs(child, filename, parent):
basename = os.path.splitext(os.path.basename(filename))[0]
for dir in reversed(xdg_config_dirs):
__parseMergeDir(os.path.join(dir, "menus", basename + "-merged"), child, filename, parent)
def __mergeFile(filename, child, parent):
# check for infinite loops
if filename in tmp["mergeFiles"]:
if debug:
raise ParsingError('Infinite MergeFile loop detected', filename)
else:
return
tmp["mergeFiles"].append(filename)
# load file
try:
doc = xml.dom.minidom.parse(filename)
except IOError:
if debug:
raise ParsingError('File not found', filename)
else:
return
except xml.parsers.expat.ExpatError:
if debug:
raise ParsingError('Not a valid .menu file', filename)
else:
return
# append file
for child in doc.childNodes:
if child.nodeType == ELEMENT_NODE:
__parse(child,filename,parent)
break
# Legacy Dir Stuff
def __parseLegacyDir(dir, prefix, filename, parent):
m = __mergeLegacyDir(dir,prefix,filename,parent)
if m:
parent += m
def __mergeLegacyDir(dir, prefix, filename, parent):
dir = __check(dir,filename,"dir")
if dir and dir not in tmp["DirectoryDirs"]:
tmp["DirectoryDirs"].append(dir)
m = Menu()
m.AppDirs.append(dir)
m.DirectoryDirs.append(dir)
m.Name = os.path.basename(dir)
m.NotInXml = True
for item in os.listdir(dir):
try:
if item == ".directory":
m.Directories.append(item)
elif os.path.isdir(os.path.join(dir,item)):
m.addSubmenu(__mergeLegacyDir(os.path.join(dir,item), prefix, filename, parent))
except UnicodeDecodeError:
continue
tmp["cache"].addMenuEntries([dir],prefix, True)
menuentries = tmp["cache"].getMenuEntries([dir], False)
for menuentry in menuentries:
categories = menuentry.Categories
if len(categories) == 0:
r = Rule("Include")
r.parseFilename(menuentry.DesktopFileID)
r.compile()
m.Rules.append(r)
if not dir in parent.AppDirs:
categories.append("Legacy")
menuentry.Categories = categories
return m
def __parseKDELegacyDirs(filename, parent):
f=os.popen3("kde-config --path apps")
output = f[1].readlines()
try:
for dir in output[0].split(":"):
__parseLegacyDir(dir,"kde", filename, parent)
except IndexError:
pass
# remove duplicate entries from a list
def __removeDuplicates(list):
set = {}
list.reverse()
list = [set.setdefault(e,e) for e in list if e not in set]
list.reverse()
return list
# Finally generate the menu
def __genmenuNotOnlyAllocated(menu):
for submenu in menu.Submenus:
__genmenuNotOnlyAllocated(submenu)
if menu.OnlyUnallocated == False:
tmp["cache"].addMenuEntries(menu.AppDirs)
menuentries = []
for rule in menu.Rules:
menuentries = rule.do(tmp["cache"].getMenuEntries(menu.AppDirs), rule.Type, 1)
for menuentry in menuentries:
if menuentry.Add == True:
menuentry.Parents.append(menu)
menuentry.Add = False
menuentry.Allocated = True
menu.MenuEntries.append(menuentry)
def __genmenuOnlyAllocated(menu):
for submenu in menu.Submenus:
__genmenuOnlyAllocated(submenu)
if menu.OnlyUnallocated == True:
tmp["cache"].addMenuEntries(menu.AppDirs)
menuentries = []
for rule in menu.Rules:
menuentries = rule.do(tmp["cache"].getMenuEntries(menu.AppDirs), rule.Type, 2)
for menuentry in menuentries:
if menuentry.Add == True:
menuentry.Parents.append(menu)
# menuentry.Add = False
# menuentry.Allocated = True
menu.MenuEntries.append(menuentry)
# And sorting ...
def sort(menu):
menu.Entries = []
menu.Visible = 0
for submenu in menu.Submenus:
sort(submenu)
tmp_s = []
tmp_e = []
for order in menu.Layout.order:
if order[0] == "Filename":
tmp_e.append(order[1])
elif order[0] == "Menuname":
tmp_s.append(order[1])
for order in menu.Layout.order:
if order[0] == "Separator":
separator = Separator(menu)
if len(menu.Entries) > 0 and isinstance(menu.Entries[-1], Separator):
separator.Show = False
menu.Entries.append(separator)
elif order[0] == "Filename":
menuentry = menu.getMenuEntry(order[1])
if menuentry:
menu.Entries.append(menuentry)
elif order[0] == "Menuname":
submenu = menu.getMenu(order[1])
if submenu:
__parse_inline(submenu, menu)
elif order[0] == "Merge":
if order[1] == "files" or order[1] == "all":
menu.MenuEntries.sort()
for menuentry in menu.MenuEntries:
if menuentry not in tmp_e:
menu.Entries.append(menuentry)
elif order[1] == "menus" or order[1] == "all":
menu.Submenus.sort()
for submenu in menu.Submenus:
if submenu.Name not in tmp_s:
__parse_inline(submenu, menu)
# getHidden / NoDisplay / OnlyShowIn / NotOnlyShowIn / Deleted / NoExec
for entry in menu.Entries:
entry.Show = True
menu.Visible += 1
if isinstance(entry, Menu):
if entry.Deleted == True:
entry.Show = "Deleted"
menu.Visible -= 1
elif isinstance(entry.Directory, MenuEntry):
if entry.Directory.DesktopEntry.getNoDisplay() == True:
entry.Show = "NoDisplay"
menu.Visible -= 1
elif entry.Directory.DesktopEntry.getHidden() == True:
entry.Show = "Hidden"
menu.Visible -= 1
elif isinstance(entry, MenuEntry):
if entry.DesktopEntry.getNoDisplay() == True:
entry.Show = "NoDisplay"
menu.Visible -= 1
elif entry.DesktopEntry.getHidden() == True:
entry.Show = "Hidden"
menu.Visible -= 1
elif entry.DesktopEntry.getTryExec() and not __try_exec(entry.DesktopEntry.getTryExec()):
entry.Show = "NoExec"
menu.Visible -= 1
elif xdg.Config.windowmanager:
if ( entry.DesktopEntry.getOnlyShowIn() != [] and xdg.Config.windowmanager not in entry.DesktopEntry.getOnlyShowIn() ) \
or xdg.Config.windowmanager in entry.DesktopEntry.getNotShowIn():
entry.Show = "NotShowIn"
menu.Visible -= 1
elif isinstance(entry,Separator):
menu.Visible -= 1
# remove separators at the beginning and at the end
if len(menu.Entries) > 0:
if isinstance(menu.Entries[0], Separator):
menu.Entries[0].Show = False
if len(menu.Entries) > 1:
if isinstance(menu.Entries[-1], Separator):
menu.Entries[-1].Show = False
# show_empty tag
for entry in menu.Entries:
if isinstance(entry,Menu) and entry.Layout.show_empty == "false" and entry.Visible == 0:
entry.Show = "Empty"
menu.Visible -= 1
if entry.NotInXml == True:
menu.Entries.remove(entry)
def __try_exec(executable):
paths = os.environ['PATH'].split(os.pathsep)
if not os.path.isfile(executable):
for p in paths:
f = os.path.join(p, executable)
if os.path.isfile(f):
if os.access(f, os.X_OK):
return True
else:
if os.access(executable, os.X_OK):
return True
return False
# inline tags
def __parse_inline(submenu, menu):
if submenu.Layout.inline == "true":
if len(submenu.Entries) == 1 and submenu.Layout.inline_alias == "true":
menuentry = submenu.Entries[0]
menuentry.DesktopEntry.set("Name", submenu.getName(), locale = True)
menuentry.DesktopEntry.set("GenericName", submenu.getGenericName(), locale = True)
menuentry.DesktopEntry.set("Comment", submenu.getComment(), locale = True)
menu.Entries.append(menuentry)
elif len(submenu.Entries) <= submenu.Layout.inline_limit or submenu.Layout.inline_limit == 0:
if submenu.Layout.inline_header == "true":
header = Header(submenu.getName(), submenu.getGenericName(), submenu.getComment())
menu.Entries.append(header)
for entry in submenu.Entries:
menu.Entries.append(entry)
else:
menu.Entries.append(submenu)
else:
menu.Entries.append(submenu)
class MenuEntryCache:
"Class to cache Desktop Entries"
def __init__(self):
self.cacheEntries = {}
self.cacheEntries['legacy'] = []
self.cache = {}
def addMenuEntries(self, dirs, prefix="", legacy=False):
for dir in dirs:
if not self.cacheEntries.has_key(dir):
self.cacheEntries[dir] = []
self.__addFiles(dir, "", prefix, legacy)
def __addFiles(self, dir, subdir, prefix, legacy):
for item in os.listdir(os.path.join(dir,subdir)):
if os.path.splitext(item)[1] == ".desktop":
try:
menuentry = MenuEntry(os.path.join(subdir,item), dir, prefix)
except ParsingError:
continue
self.cacheEntries[dir].append(menuentry)
if legacy == True:
self.cacheEntries['legacy'].append(menuentry)
elif os.path.isdir(os.path.join(dir,subdir,item)) and legacy == False:
self.__addFiles(dir, os.path.join(subdir,item), prefix, legacy)
def getMenuEntries(self, dirs, legacy=True):
list = []
ids = []
# handle legacy items
appdirs = dirs[:]
if legacy == True:
appdirs.append("legacy")
# cache the results again
key = "".join(appdirs)
try:
return self.cache[key]
except KeyError:
pass
for dir in appdirs:
for menuentry in self.cacheEntries[dir]:
try:
if menuentry.DesktopFileID not in ids:
ids.append(menuentry.DesktopFileID)
list.append(menuentry)
elif menuentry.getType() == "System":
# FIXME: This is only 99% correct, but still...
i = list.index(menuentry)
e = list[i]
if e.getType() == "User":
e.Original = menuentry
except UnicodeDecodeError:
continue
self.cache[key] = list
return list
|
[
"os.path.abspath",
"os.path.isabs",
"os.path.basename",
"os.path.isdir",
"os.popen3",
"os.path.dirname",
"os.path.exists",
"os.environ.get",
"os.path.isfile",
"os.path.splitext",
"os.access",
"os.path.join",
"os.listdir"
] |
[((22946, 22971), 'os.path.dirname', 'os.path.dirname', (['filename'], {}), '(filename)\n', (22961, 22971), False, 'import locale, os, xml.dom.minidom\n'), ((23061, 23083), 'os.path.abspath', 'os.path.abspath', (['value'], {}), '(value)\n', (23076, 23083), False, 'import locale, os, xml.dom.minidom\n'), ((27493, 27528), 'os.popen3', 'os.popen3', (['"""kde-config --path apps"""'], {}), "('kde-config --path apps')\n", (27502, 27528), False, 'import locale, os, xml.dom.minidom\n'), ((13891, 13923), 'os.path.join', 'os.path.join', (['dir', 'self.Filename'], {}), '(dir, self.Filename)\n', (13903, 13923), False, 'import locale, os, xml.dom.minidom\n'), ((14800, 14836), 'os.path.join', 'os.path.join', (['dir', '"""menus"""', 'filename'], {}), "(dir, 'menus', filename)\n", (14812, 14836), False, 'import locale, os, xml.dom.minidom\n'), ((22984, 23004), 'os.path.isabs', 'os.path.isabs', (['value'], {}), '(value)\n', (22997, 23004), False, 'import locale, os, xml.dom.minidom\n'), ((23022, 23047), 'os.path.join', 'os.path.join', (['path', 'value'], {}), '(path, value)\n', (23034, 23047), False, 'import locale, os, xml.dom.minidom\n'), ((23110, 23131), 'os.path.exists', 'os.path.exists', (['value'], {}), '(value)\n', (23124, 23131), False, 'import locale, os, xml.dom.minidom\n'), ((23136, 23156), 'os.path.isdir', 'os.path.isdir', (['value'], {}), '(value)\n', (23149, 23156), False, 'import locale, os, xml.dom.minidom\n'), ((24747, 24764), 'os.listdir', 'os.listdir', (['value'], {}), '(value)\n', (24757, 24764), False, 'import locale, os, xml.dom.minidom\n'), ((26474, 26495), 'os.path.basename', 'os.path.basename', (['dir'], {}), '(dir)\n', (26490, 26495), False, 'import locale, os, xml.dom.minidom\n'), ((26543, 26558), 'os.listdir', 'os.listdir', (['dir'], {}), '(dir)\n', (26553, 26558), False, 'import locale, os, xml.dom.minidom\n'), ((32861, 32887), 'os.path.isfile', 'os.path.isfile', (['executable'], {}), '(executable)\n', (32875, 32887), False, 'import locale, os, xml.dom.minidom\n'), ((33086, 33116), 'os.access', 'os.access', (['executable', 'os.X_OK'], {}), '(executable, os.X_OK)\n', (33095, 33116), False, 'import locale, os, xml.dom.minidom\n'), ((3859, 3883), 'os.path.join', 'os.path.join', (['path', 'name'], {}), '(path, name)\n', (3871, 3883), False, 'import locale, os, xml.dom.minidom\n'), ((12037, 12064), 'os.path.join', 'os.path.join', (['dir', 'filename'], {}), '(dir, filename)\n', (12049, 12064), False, 'import locale, os, xml.dom.minidom\n'), ((13249, 13290), 'os.path.isabs', 'os.path.isabs', (['self.DesktopEntry.filename'], {}), '(self.DesktopEntry.filename)\n', (13262, 13290), False, 'import locale, os, xml.dom.minidom\n'), ((13744, 13778), 'os.path.join', 'os.path.join', (['path', '"""applications"""'], {}), "(path, 'applications')\n", (13756, 13778), False, 'import locale, os, xml.dom.minidom\n'), ((13811, 13852), 'os.path.join', 'os.path.join', (['path', '"""desktop-directories"""'], {}), "(path, 'desktop-directories')\n", (13823, 13852), False, 'import locale, os, xml.dom.minidom\n'), ((14850, 14868), 'os.path.isdir', 'os.path.isdir', (['dir'], {}), '(dir)\n', (14863, 14868), False, 'import locale, os, xml.dom.minidom\n'), ((14873, 14897), 'os.path.isfile', 'os.path.isfile', (['menuname'], {}), '(menuname)\n', (14887, 14897), False, 'import locale, os, xml.dom.minidom\n'), ((15008, 15031), 'os.path.isabs', 'os.path.isabs', (['filename'], {}), '(filename)\n', (15021, 15031), False, 'import locale, os, xml.dom.minidom\n'), ((15158, 15195), 'os.environ.get', 'os.environ.get', (['"""XDG_MENU_PREFIX"""', '""""""'], {}), "('XDG_MENU_PREFIX', '')\n", (15172, 15195), False, 'import locale, os, xml.dom.minidom\n'), ((23207, 23228), 'os.path.exists', 'os.path.exists', (['value'], {}), '(value)\n', (23221, 23228), False, 'import locale, os, xml.dom.minidom\n'), ((23233, 23254), 'os.path.isfile', 'os.path.isfile', (['value'], {}), '(value)\n', (23247, 23254), False, 'import locale, os, xml.dom.minidom\n'), ((23581, 23614), 'os.path.join', 'os.path.join', (['dir', '"""applications"""'], {}), "(dir, 'applications')\n", (23593, 23614), False, 'import locale, os, xml.dom.minidom\n'), ((23905, 23945), 'os.path.join', 'os.path.join', (['dir', '"""desktop-directories"""'], {}), "(dir, 'desktop-directories')\n", (23917, 23945), False, 'import locale, os, xml.dom.minidom\n'), ((25065, 25091), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (25081, 25091), False, 'import locale, os, xml.dom.minidom\n'), ((25162, 25210), 'os.path.join', 'os.path.join', (['dir', '"""menus"""', "(basename + '-merged')"], {}), "(dir, 'menus', basename + '-merged')\n", (25174, 25210), False, 'import locale, os, xml.dom.minidom\n'), ((32929, 32956), 'os.path.join', 'os.path.join', (['p', 'executable'], {}), '(p, executable)\n', (32941, 32956), False, 'import locale, os, xml.dom.minidom\n'), ((32972, 32989), 'os.path.isfile', 'os.path.isfile', (['f'], {}), '(f)\n', (32986, 32989), False, 'import locale, os, xml.dom.minidom\n'), ((34671, 34696), 'os.path.join', 'os.path.join', (['dir', 'subdir'], {}), '(dir, subdir)\n', (34683, 34696), False, 'import locale, os, xml.dom.minidom\n'), ((13185, 13215), 'os.path.join', 'os.path.join', (['prefix', 'filename'], {}), '(prefix, filename)\n', (13197, 13215), False, 'import locale, os, xml.dom.minidom\n'), ((15416, 15442), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (15432, 15442), False, 'import locale, os, xml.dom.minidom\n'), ((22301, 22329), 'os.path.join', 'os.path.join', (['dir', 'directory'], {}), '(dir, directory)\n', (22313, 22329), False, 'import locale, os, xml.dom.minidom\n'), ((33010, 33031), 'os.access', 'os.access', (['f', 'os.X_OK'], {}), '(f, os.X_OK)\n', (33019, 33031), False, 'import locale, os, xml.dom.minidom\n'), ((34713, 34735), 'os.path.splitext', 'os.path.splitext', (['item'], {}), '(item)\n', (34729, 34735), False, 'import locale, os, xml.dom.minidom\n'), ((24360, 24385), 'os.path.join', 'os.path.join', (['p', 'rel_file'], {}), '(p, rel_file)\n', (24372, 24385), False, 'import locale, os, xml.dom.minidom\n'), ((24802, 24824), 'os.path.splitext', 'os.path.splitext', (['item'], {}), '(item)\n', (24818, 24824), False, 'import locale, os, xml.dom.minidom\n'), ((24872, 24897), 'os.path.join', 'os.path.join', (['value', 'item'], {}), '(value, item)\n', (24884, 24897), False, 'import locale, os, xml.dom.minidom\n'), ((26700, 26723), 'os.path.join', 'os.path.join', (['dir', 'item'], {}), '(dir, item)\n', (26712, 26723), False, 'import locale, os, xml.dom.minidom\n'), ((34817, 34843), 'os.path.join', 'os.path.join', (['subdir', 'item'], {}), '(subdir, item)\n', (34829, 34843), False, 'import locale, os, xml.dom.minidom\n'), ((35113, 35144), 'os.path.join', 'os.path.join', (['dir', 'subdir', 'item'], {}), '(dir, subdir, item)\n', (35125, 35144), False, 'import locale, os, xml.dom.minidom\n'), ((35202, 35228), 'os.path.join', 'os.path.join', (['subdir', 'item'], {}), '(subdir, item)\n', (35214, 35228), False, 'import locale, os, xml.dom.minidom\n'), ((24423, 24448), 'os.path.join', 'os.path.join', (['p', 'rel_file'], {}), '(p, rel_file)\n', (24435, 24448), False, 'import locale, os, xml.dom.minidom\n'), ((26775, 26798), 'os.path.join', 'os.path.join', (['dir', 'item'], {}), '(dir, item)\n', (26787, 26798), False, 'import locale, os, xml.dom.minidom\n')]
|
import sapien.core as sapien
import mplib
import numpy as np
from sapien.utils.viewer import Viewer
class PlanningDemo():
def __init__(self):
self.engine = sapien.Engine()
self.renderer = sapien.VulkanRenderer()
self.engine.set_renderer(self.renderer)
scene_config = sapien.SceneConfig()
self.scene = self.engine.create_scene(scene_config)
self.scene.set_timestep(1 / 240.0)
self.scene.add_ground(-0.8)
physical_material = self.scene.create_physical_material(1, 1, 0.0)
self.scene.default_physical_material = physical_material
self.rscene = self.scene.get_renderer_scene()
self.rscene.set_ambient_light([0.5, 0.5, 0.5])
self.rscene.add_directional_light([0, 1, -1], [0.5, 0.5, 0.5], shadow=True)
self.rscene.add_point_light([1, 2, 2], [1, 1, 1], shadow=True)
self.rscene.add_point_light([1, -2, 2], [1, 1, 1], shadow=True)
self.rscene.add_point_light([-1, 0, 1], [1, 1, 1], shadow=True)
self.viewer = Viewer(self.renderer)
self.viewer.set_scene(self.scene)
self.viewer.set_camera_xyz(x=1.2, y=0.25, z=0.4)
self.viewer.set_camera_rpy(r=0, p=-0.4, y=2.7)
# Robot
# Load URDF
loader: sapien.URDFLoader = self.scene.create_urdf_loader()
loader.fix_root_link = True
self.robot: sapien.Articulation = loader.load("./panda/panda.urdf")
self.robot.set_root_pose(sapien.Pose([0, 0, 0], [1, 0, 0, 0]))
# Set initial joint positions
init_qpos = [0, 0.19634954084936207, 0.0, -2.617993877991494, 0.0, 2.941592653589793, 0.7853981633974483, 0, 0]
self.robot.set_qpos(init_qpos)
self.active_joints = self.robot.get_active_joints()
for joint in self.active_joints:
joint.set_drive_property(stiffness=1000, damping=200)
# table top
builder = self.scene.create_actor_builder()
builder.add_box_collision(half_size=[0.4, 0.4, 0.025])
builder.add_box_visual(half_size=[0.4, 0.4, 0.025])
self.table = builder.build_kinematic(name='table')
self.table.set_pose(sapien.Pose([0.56, 0, - 0.025]))
# boxes
builder = self.scene.create_actor_builder()
builder.add_box_collision(half_size=[0.02, 0.02, 0.06])
builder.add_box_visual(half_size=[0.02, 0.02, 0.06], color=[1, 0, 0])
self.red_cube = builder.build(name='red_cube')
self.red_cube.set_pose(sapien.Pose([0.7, 0, 0.06]))
builder = self.scene.create_actor_builder()
builder.add_box_collision(half_size=[0.04, 0.04, 0.005])
builder.add_box_visual(half_size=[0.04, 0.04, 0.005], color=[0, 1, 0])
self.green_cube = builder.build(name='green_cube')
self.green_cube.set_pose(sapien.Pose([0.4, 0.3, 0.005]))
builder = self.scene.create_actor_builder()
builder.add_box_collision(half_size=[0.05, 0.2, 0.1])
builder.add_box_visual(half_size=[0.05, 0.2, 0.1], color=[0, 0, 1])
self.blue_cube = builder.build(name='blue_cube')
self.blue_cube.set_pose(sapien.Pose([0.55, 0, 0.1]))
self.setup_planner()
def setup_planner(self):
link_names = [link.get_name() for link in self.robot.get_links()]
joint_names = [joint.get_name() for joint in self.robot.get_active_joints()]
self.planner = mplib.Planner(
urdf="./panda/panda.urdf",
srdf="./panda/panda.srdf",
user_link_names=link_names,
user_joint_names=joint_names,
move_group="panda_hand",
joint_vel_limits=np.ones(7),
joint_acc_limits=np.ones(7))
def follow_path(self, result):
n_step = result['position'].shape[0]
for i in range(n_step):
qf = self.robot.compute_passive_force(
gravity=True,
coriolis_and_centrifugal=True)
self.robot.set_qf(qf)
for j in range(7):
self.active_joints[j].set_drive_target(result['position'][i][j])
self.active_joints[j].set_drive_velocity_target(result['velocity'][i][j])
self.scene.step()
if i % 4 == 0:
self.scene.update_render()
self.viewer.render()
def open_gripper(self):
for joint in self.active_joints[-2:]:
joint.set_drive_target(0.4)
for i in range(100):
qf = self.robot.compute_passive_force(
gravity=True,
coriolis_and_centrifugal=True)
self.robot.set_qf(qf)
self.scene.step()
if i % 4 == 0:
self.scene.update_render()
self.viewer.render()
def close_gripper(self):
for joint in self.active_joints[-2:]:
joint.set_drive_target(0)
for i in range(100):
qf = self.robot.compute_passive_force(
gravity=True,
coriolis_and_centrifugal=True)
self.robot.set_qf(qf)
self.scene.step()
if i % 4 == 0:
self.scene.update_render()
self.viewer.render()
def add_point_cloud(self):
import trimesh
box = trimesh.creation.box([0.1, 0.4, 0.2])
points, _ = trimesh.sample.sample_surface(box, 1000)
points += [0.55, 0, 0.1]
self.planner.update_point_cloud(points)
return
def move_to_pose(self, pose):
result = self.planner.plan_screw(pose, self.robot.get_qpos(), time_step=1/250,
use_point_cloud=self.use_point_cloud, use_attach=self.use_attach)
if result['status'] != "Success":
result = self.planner.plan(pose, self.robot.get_qpos(), time_step=1/250,
use_point_cloud=self.use_point_cloud, use_attach=self.use_attach)
if result['status'] != "Success":
print(result['status'])
return -1
self.follow_path(result)
return 0
def demo(self, with_screw = True, use_point_cloud = True, use_attach = True):
pickup_pose = [0.7, 0, 0.12, 0, 1, 0, 0]
delivery_pose = [0.4, 0.3, 0.13, 0, 1, 0, 0]
self.use_point_cloud = use_point_cloud
if self.use_point_cloud:
self.add_point_cloud()
self.use_attach = False
pickup_pose[2] += 0.2
self.move_to_pose(pickup_pose)
self.open_gripper()
pickup_pose[2] -= 0.12
self.move_to_pose(pickup_pose)
self.close_gripper()
if use_attach:
self.use_attach = True
self.planner.update_attached_box([0.04, 0.04, 0.12], [0, 0, 0.14, 1, 0, 0, 0])
pickup_pose[2] += 0.12
self.move_to_pose(pickup_pose)
delivery_pose[2] += 0.2
self.move_to_pose(delivery_pose)
delivery_pose[2] -= 0.12
self.move_to_pose(delivery_pose)
self.open_gripper()
delivery_pose[2] += 0.12
self.move_to_pose(delivery_pose)
if __name__ == '__main__':
demo = PlanningDemo()
demo.demo()
|
[
"sapien.core.SceneConfig",
"trimesh.sample.sample_surface",
"numpy.ones",
"sapien.core.Pose",
"trimesh.creation.box",
"sapien.utils.viewer.Viewer",
"sapien.core.Engine",
"sapien.core.VulkanRenderer"
] |
[((169, 184), 'sapien.core.Engine', 'sapien.Engine', ([], {}), '()\n', (182, 184), True, 'import sapien.core as sapien\n'), ((209, 232), 'sapien.core.VulkanRenderer', 'sapien.VulkanRenderer', ([], {}), '()\n', (230, 232), True, 'import sapien.core as sapien\n'), ((305, 325), 'sapien.core.SceneConfig', 'sapien.SceneConfig', ([], {}), '()\n', (323, 325), True, 'import sapien.core as sapien\n'), ((1037, 1058), 'sapien.utils.viewer.Viewer', 'Viewer', (['self.renderer'], {}), '(self.renderer)\n', (1043, 1058), False, 'from sapien.utils.viewer import Viewer\n'), ((5263, 5300), 'trimesh.creation.box', 'trimesh.creation.box', (['[0.1, 0.4, 0.2]'], {}), '([0.1, 0.4, 0.2])\n', (5283, 5300), False, 'import trimesh\n'), ((5321, 5361), 'trimesh.sample.sample_surface', 'trimesh.sample.sample_surface', (['box', '(1000)'], {}), '(box, 1000)\n', (5350, 5361), False, 'import trimesh\n'), ((1463, 1499), 'sapien.core.Pose', 'sapien.Pose', (['[0, 0, 0]', '[1, 0, 0, 0]'], {}), '([0, 0, 0], [1, 0, 0, 0])\n', (1474, 1499), True, 'import sapien.core as sapien\n'), ((2150, 2180), 'sapien.core.Pose', 'sapien.Pose', (['[0.56, 0, -0.025]'], {}), '([0.56, 0, -0.025])\n', (2161, 2180), True, 'import sapien.core as sapien\n'), ((2480, 2507), 'sapien.core.Pose', 'sapien.Pose', (['[0.7, 0, 0.06]'], {}), '([0.7, 0, 0.06])\n', (2491, 2507), True, 'import sapien.core as sapien\n'), ((2798, 2828), 'sapien.core.Pose', 'sapien.Pose', (['[0.4, 0.3, 0.005]'], {}), '([0.4, 0.3, 0.005])\n', (2809, 2828), True, 'import sapien.core as sapien\n'), ((3110, 3137), 'sapien.core.Pose', 'sapien.Pose', (['[0.55, 0, 0.1]'], {}), '([0.55, 0, 0.1])\n', (3121, 3137), True, 'import sapien.core as sapien\n'), ((3626, 3636), 'numpy.ones', 'np.ones', (['(7)'], {}), '(7)\n', (3633, 3636), True, 'import numpy as np\n'), ((3667, 3677), 'numpy.ones', 'np.ones', (['(7)'], {}), '(7)\n', (3674, 3677), True, 'import numpy as np\n')]
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
""" Generating random graphs"""
from cyberbattle.simulation.model import Identifiers, NodeID, CredentialID, PortName, FirewallConfiguration, FirewallRule, RulePermission
import numpy as np
import networkx as nx
from cyberbattle.simulation import model as m
import random
from typing import List, Optional, Tuple, DefaultDict
from collections import defaultdict
ENV_IDENTIFIERS = Identifiers(
properties=[
'breach_node'
],
ports=['SMB', 'HTTP', 'RDP'],
local_vulnerabilities=[
'ScanWindowsCredentialManagerForRDP',
'ScanWindowsExplorerRecentFiles',
'ScanWindowsCredentialManagerForSMB'
],
remote_vulnerabilities=[
'Traceroute'
]
)
def generate_random_traffic_network(
n_clients: int = 200,
n_servers={
"SMB": 1,
"HTTP": 1,
"RDP": 1,
},
seed: Optional[int] = 0,
tolerance: np.float32 = np.float32(1e-3),
alpha=np.array([(0.1, 0.3), (0.18, 0.09)], dtype=float),
beta=np.array([(100, 10), (10, 100)], dtype=float),
) -> nx.DiGraph:
"""
Randomly generate a directed multi-edge network graph representing
fictitious SMB, HTTP, and RDP traffic.
Arguments:
n_clients: number of workstation nodes that can initiate sessions with server nodes
n_servers: dictionary indicatin the numbers of each nodes listening to each protocol
seed: seed for the psuedo-random number generator
tolerance: absolute tolerance for bounding the edge probabilities in [tolerance, 1-tolerance]
alpha: beta distribution parameters alpha such that E(edge prob) = alpha / beta
beta: beta distribution parameters beta such that E(edge prob) = alpha / beta
Returns:
(nx.classes.multidigraph.MultiDiGraph): the randomly generated network from the hierarchical block model
"""
edges_labels = defaultdict(set) # set backed multidict
for protocol in list(n_servers.keys()):
sizes = [n_clients, n_servers[protocol]]
# sample edge probabilities from a beta distribution
np.random.seed(seed)
probs: np.ndarray = np.random.beta(a=alpha, b=beta, size=(2, 2))
# scale by edge type
if protocol == "SMB":
probs = 3 * probs
if protocol == "RDP":
probs = 4 * probs
# don't allow probs too close to zero or one
probs = np.clip(probs, a_min=tolerance, a_max=np.float32(1.0 - tolerance))
# sample edges using block models given edge probabilities
di_graph_for_protocol = nx.stochastic_block_model(
sizes=sizes, p=probs, directed=True, seed=seed)
for edge in di_graph_for_protocol.edges:
edges_labels[edge].add(protocol)
digraph = nx.DiGraph()
for (u, v), port in list(edges_labels.items()):
digraph.add_edge(u, v, protocol=port)
return digraph
def cyberbattle_model_from_traffic_graph(
traffic_graph: nx.DiGraph,
cached_smb_password_probability=0.75,
cached_rdp_password_probability=0.8,
cached_accessed_network_shares_probability=0.6,
cached_password_has_changed_probability=0.1,
traceroute_discovery_probability=0.5,
probability_two_nodes_use_same_password_to_access_given_resource=0.8
) -> nx.DiGraph:
"""Generate a random CyberBattle network model from a specified traffic (directed multi) graph.
The input graph can for instance be generated with `generate_random_traffic_network`.
Each edge of the input graph indicates that a communication took place
between the two nodes with the protocol specified in the edge label.
Returns a CyberBattle network with the same nodes and implanted vulnerabilities
to be used to instantiate a CyverBattleSim gym.
Arguments:
cached_smb_password_probability, cached_rdp_password_probability:
probability that a password used for authenticated traffic was cached by the OS for SMB and RDP
cached_accessed_network_shares_probability:
probability that a network share accessed by the system was cached by the OS
cached_password_has_changed_probability:
probability that a given password cached on a node has been rotated on the target node
(typically low has people tend to change their password infrequently)
probability_two_nodes_use_same_password_to_access_given_resource:
as the variable name says
traceroute_discovery_probability:
probability that a target node of an SMB/RDP connection get exposed by a traceroute attack
"""
# convert node IDs to string
graph = nx.relabel_nodes(traffic_graph, {i: str(i) for i in traffic_graph.nodes})
password_counter: int = 0
def generate_password() -> CredentialID:
nonlocal password_counter
password_counter = password_counter + 1
return f'unique_pwd{password_counter}'
def traffic_targets(source_node: NodeID, protocol: str) -> List[NodeID]:
neighbors = [t for (s, t) in graph.edges()
if s == source_node and protocol in graph.edges[(s, t)]['protocol']]
return neighbors
# Map (node, port name) -> assigned pwd
assigned_passwords: DefaultDict[Tuple[NodeID, PortName],
List[CredentialID]] = defaultdict(list)
def assign_new_valid_password(node: NodeID, port: PortName) -> CredentialID:
pwd = generate_password()
assigned_passwords[node, port].append(pwd)
return pwd
def reuse_valid_password(node: NodeID, port: PortName) -> CredentialID:
"""Reuse a password already assigned to that node an port, if none is already
assigned create and assign a new valid password"""
if (node, port) not in assigned_passwords:
return assign_new_valid_password(node, port)
# reuse any of the existing assigne valid password for that node/port
return random.choice(assigned_passwords[node, port])
def create_cached_credential(node: NodeID, port: PortName) -> CredentialID:
if random.random() < cached_password_has_changed_probability:
# generate a new invalid password
return generate_password()
else:
if random.random() < probability_two_nodes_use_same_password_to_access_given_resource:
return reuse_valid_password(node, port)
else:
return assign_new_valid_password(node, port)
def add_leak_neighbors_vulnerability(
node_id: m.NodeID,
library: m.VulnerabilityLibrary = {}) -> m.VulnerabilityLibrary:
"""Create random vulnerabilities
that reveals immediate traffic neighbors from a given node"""
rdp_neighbors = traffic_targets(node_id, 'RDP')
if len(rdp_neighbors) > 0:
library['ScanWindowsCredentialManagerForRDP'] = m.VulnerabilityInfo(
description="Look for RDP credentials in the Windows Credential Manager",
type=m.VulnerabilityType.LOCAL,
outcome=m.LeakedCredentials(credentials=[
m.CachedCredential(node=target_node, port='RDP',
credential=create_cached_credential(target_node, 'RDP'))
for target_node in rdp_neighbors
if random.random() < cached_rdp_password_probability
]),
reward_string="Discovered creds in the Windows Credential Manager",
cost=2.0
)
smb_neighbors = traffic_targets(node_id, 'SMB')
if len(smb_neighbors) > 0:
library['ScanWindowsExplorerRecentFiles'] = m.VulnerabilityInfo(
description="Look for network shares in the Windows Explorer Recent files",
type=m.VulnerabilityType.LOCAL,
outcome=m.LeakedNodesId(
[target_node
for target_node in smb_neighbors
if random.random() < cached_accessed_network_shares_probability
]
),
reward_string="Windows Explorer Recent Files revealed network shares",
cost=1.0
)
library['ScanWindowsCredentialManagerForSMB'] = m.VulnerabilityInfo(
description="Look for network credentials in the Windows Credential Manager",
type=m.VulnerabilityType.LOCAL,
outcome=m.LeakedCredentials(credentials=[
m.CachedCredential(node=target_node, port='SMB',
credential=create_cached_credential(target_node, 'SMB'))
for target_node in smb_neighbors
if random.random() < cached_smb_password_probability
]),
reward_string="Discovered SMB creds in the Windows Credential Manager",
cost=2.0
)
if len(smb_neighbors) > 0 and len(rdp_neighbors) > 0:
library['Traceroute'] = m.VulnerabilityInfo(
description="Attempt to discvover network nodes using Traceroute",
type=m.VulnerabilityType.REMOTE,
outcome=m.LeakedNodesId(
[target_node
for target_node in smb_neighbors or rdp_neighbors
if random.random() < traceroute_discovery_probability
]
),
reward_string="Discovered new network nodes via traceroute",
cost=5.0
)
return library
def create_vulnerabilities_from_traffic_data(node_id: m.NodeID):
return add_leak_neighbors_vulnerability(node_id=node_id)
firewall_conf = FirewallConfiguration(
[FirewallRule("RDP", RulePermission.ALLOW), FirewallRule("SMB", RulePermission.ALLOW)],
[FirewallRule("RDP", RulePermission.ALLOW), FirewallRule("SMB", RulePermission.ALLOW)])
# Pick a random node as the agent entry node
entry_node_index = random.randrange(len(graph.nodes))
entry_node_id, entry_node_data = list(graph.nodes(data=True))[entry_node_index]
graph.nodes[entry_node_id].clear()
graph.nodes[entry_node_id].update(
{'data': m.NodeInfo(services=[],
value=0,
properties=["breach_node"],
vulnerabilities=create_vulnerabilities_from_traffic_data(entry_node_id),
agent_installed=True,
firewall=firewall_conf,
reimagable=False)})
def create_node_data(node_id: m.NodeID):
return m.NodeInfo(
services=[m.ListeningService(name=port, allowedCredentials=assigned_passwords[(target_node, port)])
for (target_node, port) in assigned_passwords.keys()
if target_node == node_id
],
value=random.randint(0, 100),
vulnerabilities=create_vulnerabilities_from_traffic_data(node_id),
agent_installed=False,
firewall=firewall_conf
)
for node in list(graph.nodes):
if node != entry_node_id:
graph.nodes[node].clear()
graph.nodes[node].update({'data': create_node_data(node)})
return graph
def new_environment(n_servers_per_protocol: int):
"""Create a new simulation environment based on
a randomly generated network topology.
NOTE: the probabilities and parameter values used
here for the statistical generative model
were arbirarily picked. We recommend exploring different values for those parameters.
"""
traffic = generate_random_traffic_network(seed=None,
n_clients=50,
n_servers={
"SMB": n_servers_per_protocol,
"HTTP": n_servers_per_protocol,
"RDP": n_servers_per_protocol,
},
alpha=np.array([(1, 1), (0.2, 0.5)]),
beta=np.array([(1000, 10), (10, 100)]))
network = cyberbattle_model_from_traffic_graph(
traffic,
cached_rdp_password_probability=0.8,
cached_smb_password_probability=0.7,
cached_accessed_network_shares_probability=0.8,
cached_password_has_changed_probability=0.01,
probability_two_nodes_use_same_password_to_access_given_resource=0.9)
return m.Environment(network=network,
vulnerability_library=dict([]),
identifiers=ENV_IDENTIFIERS)
|
[
"cyberbattle.simulation.model.ListeningService",
"numpy.random.seed",
"random.randint",
"numpy.random.beta",
"numpy.float32",
"random.choice",
"cyberbattle.simulation.model.Identifiers",
"cyberbattle.simulation.model.FirewallRule",
"collections.defaultdict",
"networkx.stochastic_block_model",
"random.random",
"numpy.array",
"networkx.DiGraph"
] |
[((455, 709), 'cyberbattle.simulation.model.Identifiers', 'Identifiers', ([], {'properties': "['breach_node']", 'ports': "['SMB', 'HTTP', 'RDP']", 'local_vulnerabilities': "['ScanWindowsCredentialManagerForRDP', 'ScanWindowsExplorerRecentFiles',\n 'ScanWindowsCredentialManagerForSMB']", 'remote_vulnerabilities': "['Traceroute']"}), "(properties=['breach_node'], ports=['SMB', 'HTTP', 'RDP'],\n local_vulnerabilities=['ScanWindowsCredentialManagerForRDP',\n 'ScanWindowsExplorerRecentFiles', 'ScanWindowsCredentialManagerForSMB'],\n remote_vulnerabilities=['Traceroute'])\n", (466, 709), False, 'from cyberbattle.simulation.model import Identifiers, NodeID, CredentialID, PortName, FirewallConfiguration, FirewallRule, RulePermission\n'), ((974, 991), 'numpy.float32', 'np.float32', (['(0.001)'], {}), '(0.001)\n', (984, 991), True, 'import numpy as np\n'), ((1002, 1051), 'numpy.array', 'np.array', (['[(0.1, 0.3), (0.18, 0.09)]'], {'dtype': 'float'}), '([(0.1, 0.3), (0.18, 0.09)], dtype=float)\n', (1010, 1051), True, 'import numpy as np\n'), ((1062, 1107), 'numpy.array', 'np.array', (['[(100, 10), (10, 100)]'], {'dtype': 'float'}), '([(100, 10), (10, 100)], dtype=float)\n', (1070, 1107), True, 'import numpy as np\n'), ((1937, 1953), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (1948, 1953), False, 'from collections import defaultdict\n'), ((2819, 2831), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (2829, 2831), True, 'import networkx as nx\n'), ((5340, 5357), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (5351, 5357), False, 'from collections import defaultdict\n'), ((2141, 2161), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (2155, 2161), True, 'import numpy as np\n'), ((2190, 2234), 'numpy.random.beta', 'np.random.beta', ([], {'a': 'alpha', 'b': 'beta', 'size': '(2, 2)'}), '(a=alpha, b=beta, size=(2, 2))\n', (2204, 2234), True, 'import numpy as np\n'), ((2622, 2695), 'networkx.stochastic_block_model', 'nx.stochastic_block_model', ([], {'sizes': 'sizes', 'p': 'probs', 'directed': '(True)', 'seed': 'seed'}), '(sizes=sizes, p=probs, directed=True, seed=seed)\n', (2647, 2695), True, 'import networkx as nx\n'), ((5969, 6014), 'random.choice', 'random.choice', (['assigned_passwords[node, port]'], {}), '(assigned_passwords[node, port])\n', (5982, 6014), False, 'import random\n'), ((6107, 6122), 'random.random', 'random.random', ([], {}), '()\n', (6120, 6122), False, 'import random\n'), ((9818, 9859), 'cyberbattle.simulation.model.FirewallRule', 'FirewallRule', (['"""RDP"""', 'RulePermission.ALLOW'], {}), "('RDP', RulePermission.ALLOW)\n", (9830, 9859), False, 'from cyberbattle.simulation.model import Identifiers, NodeID, CredentialID, PortName, FirewallConfiguration, FirewallRule, RulePermission\n'), ((9861, 9902), 'cyberbattle.simulation.model.FirewallRule', 'FirewallRule', (['"""SMB"""', 'RulePermission.ALLOW'], {}), "('SMB', RulePermission.ALLOW)\n", (9873, 9902), False, 'from cyberbattle.simulation.model import Identifiers, NodeID, CredentialID, PortName, FirewallConfiguration, FirewallRule, RulePermission\n'), ((9914, 9955), 'cyberbattle.simulation.model.FirewallRule', 'FirewallRule', (['"""RDP"""', 'RulePermission.ALLOW'], {}), "('RDP', RulePermission.ALLOW)\n", (9926, 9955), False, 'from cyberbattle.simulation.model import Identifiers, NodeID, CredentialID, PortName, FirewallConfiguration, FirewallRule, RulePermission\n'), ((9957, 9998), 'cyberbattle.simulation.model.FirewallRule', 'FirewallRule', (['"""SMB"""', 'RulePermission.ALLOW'], {}), "('SMB', RulePermission.ALLOW)\n", (9969, 9998), False, 'from cyberbattle.simulation.model import Identifiers, NodeID, CredentialID, PortName, FirewallConfiguration, FirewallRule, RulePermission\n'), ((12253, 12283), 'numpy.array', 'np.array', (['[(1, 1), (0.2, 0.5)]'], {}), '([(1, 1), (0.2, 0.5)])\n', (12261, 12283), True, 'import numpy as np\n'), ((12336, 12369), 'numpy.array', 'np.array', (['[(1000, 10), (10, 100)]'], {}), '([(1000, 10), (10, 100)])\n', (12344, 12369), True, 'import numpy as np\n'), ((2493, 2520), 'numpy.float32', 'np.float32', (['(1.0 - tolerance)'], {}), '(1.0 - tolerance)\n', (2503, 2520), True, 'import numpy as np\n'), ((6280, 6295), 'random.random', 'random.random', ([], {}), '()\n', (6293, 6295), False, 'import random\n'), ((11007, 11029), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (11021, 11029), False, 'import random\n'), ((10751, 10843), 'cyberbattle.simulation.model.ListeningService', 'm.ListeningService', ([], {'name': 'port', 'allowedCredentials': 'assigned_passwords[target_node, port]'}), '(name=port, allowedCredentials=assigned_passwords[\n target_node, port])\n', (10769, 10843), True, 'from cyberbattle.simulation import model as m\n'), ((8027, 8042), 'random.random', 'random.random', ([], {}), '()\n', (8040, 8042), False, 'import random\n'), ((9397, 9412), 'random.random', 'random.random', ([], {}), '()\n', (9410, 9412), False, 'import random\n'), ((7372, 7387), 'random.random', 'random.random', ([], {}), '()\n', (7385, 7387), False, 'import random\n'), ((8779, 8794), 'random.random', 'random.random', ([], {}), '()\n', (8792, 8794), False, 'import random\n')]
|
import asyncio
import datetime, time
import os, sys
from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTClient
from uuid import uuid4
from aiocoap import *
class Times:
def timeNow(self):
return datetime.datetime.now()
def __init__(self):
self.sendMQTTTime = None
self.sendCoAPTime = None
def sendMQTT(self):
self.sendMQTTTime = self.timeNow()
def sendCoAP(self):
self.sendCoAPTime = self.timeNow()
def recvMQTT(self):
print(f"MQTT packet received in {float((self.timeNow()-self.sendMQTTTime).total_seconds())}s")
def recvCoAP(self):
print(f"CoAP packet received in {float((self.timeNow()-self.sendCoAPTime).total_seconds())}s")
timer = Times()
def customCallback(client, userdata, message):
timer.recvMQTT()
cwd = os.path.dirname(os.path.abspath(__file__))
qos = 0
host = "a3ccusvtjpdwda-ats.iot.eu-west-2.amazonaws.com"
rootCAPath = os.path.join(cwd,"certs","Amazon-root-CA-1.pem")
privateKeyPath = os.path.join(cwd,"certs","private.pem.key")
certificatePath = os.path.join(cwd,"certs","device.pem.crt")
port = 8883
clientId = "test-" + str(uuid4())
topic = "topic_1"
# Init AWSIoTMQTTClient
myAWSIoTMQTTClient = None
myAWSIoTMQTTClient = AWSIoTMQTTClient(clientId)
myAWSIoTMQTTClient.configureEndpoint(host, port)
myAWSIoTMQTTClient.configureCredentials(rootCAPath, privateKeyPath, certificatePath)
# AWSIoTMQTTClient connection configuration
myAWSIoTMQTTClient.configureAutoReconnectBackoffTime(1, 32, 20)
myAWSIoTMQTTClient.configureOfflinePublishQueueing(-1) # Infinite offline Publish queueing
myAWSIoTMQTTClient.configureDrainingFrequency(2) # Draining: 2 Hz
myAWSIoTMQTTClient.configureConnectDisconnectTimeout(10) # 10 sec
myAWSIoTMQTTClient.configureMQTTOperationTimeout(5) # 5 sec
# Connect and subscribe to AWS IoT
try:
myAWSIoTMQTTClient.connect(1000)
myAWSIoTMQTTClient.subscribe(topic, qos, customCallback)
except:
print("Could not connect to AWS MQTT broker")
raise SystemExit
async def main():
protocol = await Context.create_client_context()
msg = "test"
request = Message(code=GET, uri=f'coap://{sys.argv[1]}:5683/time')
timer.sendCoAP()
myAWSIoTMQTTClient.publish(topic, msg, qos)
timer.sendMQTT()
try:
response = await protocol.request(request).response
timer.recvCoAP()
except Exception as e:
print('Failed to fetch resource:')
print(e)
else:
print('Result: %s\n%r'%(response.code, response.payload))
if __name__ == "__main__":
asyncio.get_event_loop().run_until_complete(main())
time.sleep(2)
|
[
"AWSIoTPythonSDK.MQTTLib.AWSIoTMQTTClient",
"os.path.abspath",
"uuid.uuid4",
"asyncio.get_event_loop",
"datetime.datetime.now",
"time.sleep",
"os.path.join"
] |
[((976, 1026), 'os.path.join', 'os.path.join', (['cwd', '"""certs"""', '"""Amazon-root-CA-1.pem"""'], {}), "(cwd, 'certs', 'Amazon-root-CA-1.pem')\n", (988, 1026), False, 'import os, sys\n'), ((1042, 1087), 'os.path.join', 'os.path.join', (['cwd', '"""certs"""', '"""private.pem.key"""'], {}), "(cwd, 'certs', 'private.pem.key')\n", (1054, 1087), False, 'import os, sys\n'), ((1104, 1148), 'os.path.join', 'os.path.join', (['cwd', '"""certs"""', '"""device.pem.crt"""'], {}), "(cwd, 'certs', 'device.pem.crt')\n", (1116, 1148), False, 'import os, sys\n'), ((1283, 1309), 'AWSIoTPythonSDK.MQTTLib.AWSIoTMQTTClient', 'AWSIoTMQTTClient', (['clientId'], {}), '(clientId)\n', (1299, 1309), False, 'from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTClient\n'), ((871, 896), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (886, 896), False, 'import os, sys\n'), ((2675, 2688), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (2685, 2688), False, 'import datetime, time\n'), ((203, 226), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (224, 226), False, 'import datetime, time\n'), ((1184, 1191), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (1189, 1191), False, 'from uuid import uuid4\n'), ((2619, 2643), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (2641, 2643), False, 'import asyncio\n')]
|
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainApp(object):
def setupUi(self, MainApp):
MainApp.setObjectName("MainApp")
MainApp.resize(1017, 805)
self.centralwidget = QtWidgets.QWidget(MainApp)
self.centralwidget.setObjectName("centralwidget")
self.pushButton_register = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_register.setGeometry(QtCore.QRect(40, 380, 131, 31))
self.pushButton_register.setObjectName("pushButton_register")
self.lineEdit_No = QtWidgets.QLineEdit(self.centralwidget)
self.lineEdit_No.setGeometry(QtCore.QRect(170, 60, 113, 22))
self.lineEdit_No.setObjectName("lineEdit_No")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(20, 50, 161, 31))
font = QtGui.QFont()
font.setPointSize(10)
self.label.setFont(font)
self.label.setObjectName("label")
self.label_6 = QtWidgets.QLabel(self.centralwidget)
self.label_6.setGeometry(QtCore.QRect(560, 110, 191, 31))
font = QtGui.QFont()
font.setPointSize(10)
self.label_6.setFont(font)
self.label_6.setObjectName("label_6")
self.label_7 = QtWidgets.QLabel(self.centralwidget)
self.label_7.setGeometry(QtCore.QRect(560, 150, 191, 31))
font = QtGui.QFont()
font.setPointSize(10)
self.label_7.setFont(font)
self.label_7.setObjectName("label_7")
self.label_8 = QtWidgets.QLabel(self.centralwidget)
self.label_8.setGeometry(QtCore.QRect(560, 180, 191, 31))
font = QtGui.QFont()
font.setPointSize(10)
self.label_8.setFont(font)
self.label_8.setObjectName("label_8")
self.label_9 = QtWidgets.QLabel(self.centralwidget)
self.label_9.setGeometry(QtCore.QRect(560, 210, 191, 31))
font = QtGui.QFont()
font.setPointSize(10)
self.label_9.setFont(font)
self.label_9.setObjectName("label_9")
self.label_10 = QtWidgets.QLabel(self.centralwidget)
self.label_10.setGeometry(QtCore.QRect(560, 240, 191, 31))
font = QtGui.QFont()
font.setPointSize(10)
self.label_10.setFont(font)
self.label_10.setObjectName("label_10")
self.comboBox_type = QtWidgets.QComboBox(self.centralwidget)
self.comboBox_type.setGeometry(QtCore.QRect(760, 160, 121, 22))
self.comboBox_type.setObjectName("comboBox_type")
self.comboBox_type.addItem("")
self.comboBox_type.addItem("")
self.comboBox_type.addItem("")
self.comboBox_type.addItem("")
self.comboBox_type.addItem("")
self.comboBox_from = QtWidgets.QComboBox(self.centralwidget)
self.comboBox_from.setGeometry(QtCore.QRect(760, 190, 121, 22))
self.comboBox_from.setObjectName("comboBox_from")
self.comboBox_from.addItem("")
self.comboBox_from.addItem("")
self.comboBox_from.addItem("")
self.comboBox_from.addItem("")
self.comboBox_from.addItem("")
self.comboBox_from.addItem("")
self.comboBox_dest = QtWidgets.QComboBox(self.centralwidget)
self.comboBox_dest.setGeometry(QtCore.QRect(760, 220, 121, 22))
self.comboBox_dest.setObjectName("comboBox_dest")
self.comboBox_dest.addItem("")
self.comboBox_dest.addItem("")
self.comboBox_dest.addItem("")
self.comboBox_dest.addItem("")
self.comboBox_dest.addItem("")
self.comboBox_dest.addItem("")
self.comboBox_entdate = QtWidgets.QComboBox(self.centralwidget)
self.comboBox_entdate.setGeometry(QtCore.QRect(760, 250, 121, 22))
self.comboBox_entdate.setObjectName("comboBox_entdate")
self.comboBox_entdate.addItem("")
self.comboBox_entdate.addItem("")
self.comboBox_entdate.addItem("")
self.comboBox_entdate.addItem("")
self.comboBox_entdate.addItem("")
self.comboBox_entdate.addItem("")
self.comboBox_entdate.addItem("")
self.pushButton_delete = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_delete.setGeometry(QtCore.QRect(180, 380, 131, 31))
self.pushButton_delete.setObjectName("pushButton_delete")
self.pushButton_quit = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_quit.setGeometry(QtCore.QRect(320, 380, 131, 31))
self.pushButton_quit.setObjectName("pushButton_quit")
self.frame = QtWidgets.QFrame(self.centralwidget)
self.frame.setGeometry(QtCore.QRect(30, 120, 331, 161))
self.frame.setFrameShape(QtWidgets.QFrame.Panel)
self.frame.setFrameShadow(QtWidgets.QFrame.Plain)
self.frame.setObjectName("frame")
self.lineEdit_tcno = QtWidgets.QLineEdit(self.frame)
self.lineEdit_tcno.setGeometry(QtCore.QRect(180, 120, 113, 22))
self.lineEdit_tcno.setObjectName("lineEdit_tcno")
self.lineEdit_name = QtWidgets.QLineEdit(self.frame)
self.lineEdit_name.setGeometry(QtCore.QRect(180, 60, 113, 22))
self.lineEdit_name.setObjectName("lineEdit_name")
self.label_2 = QtWidgets.QLabel(self.frame)
self.label_2.setGeometry(QtCore.QRect(20, 10, 191, 31))
font = QtGui.QFont()
font.setPointSize(10)
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
self.lineEdit_surname = QtWidgets.QLineEdit(self.frame)
self.lineEdit_surname.setGeometry(QtCore.QRect(180, 90, 113, 22))
self.lineEdit_surname.setObjectName("lineEdit_surname")
self.label_3 = QtWidgets.QLabel(self.frame)
self.label_3.setGeometry(QtCore.QRect(20, 50, 191, 31))
font = QtGui.QFont()
font.setPointSize(10)
self.label_3.setFont(font)
self.label_3.setObjectName("label_3")
self.label_4 = QtWidgets.QLabel(self.frame)
self.label_4.setGeometry(QtCore.QRect(20, 80, 191, 31))
font = QtGui.QFont()
font.setPointSize(10)
self.label_4.setFont(font)
self.label_4.setObjectName("label_4")
self.label_5 = QtWidgets.QLabel(self.frame)
self.label_5.setGeometry(QtCore.QRect(20, 110, 191, 31))
font = QtGui.QFont()
font.setPointSize(10)
self.label_5.setFont(font)
self.label_5.setObjectName("label_5")
self.tableWidget = QtWidgets.QTableWidget(self.centralwidget)
self.tableWidget.setGeometry(QtCore.QRect(40, 460, 901, 281))
self.tableWidget.setObjectName("tableWidget")
self.tableWidget.setColumnCount(9)
self.tableWidget.setRowCount(6)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(7, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(8, item)
self.pushButton_4 = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_4.setGeometry(QtCore.QRect(20, 10, 101, 31))
self.pushButton_4.setObjectName("pushButton_4")
MainApp.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainApp)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1017, 26))
self.menubar.setObjectName("menubar")
MainApp.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainApp)
self.statusbar.setObjectName("statusbar")
MainApp.setStatusBar(self.statusbar)
self.retranslateUi(MainApp)
QtCore.QMetaObject.connectSlotsByName(MainApp)
def retranslateUi(self, MainApp):
_translate = QtCore.QCoreApplication.translate
MainApp.setWindowTitle(_translate("MainApp", "MainWindow"))
self.pushButton_register.setText(_translate("MainApp", "Register"))
self.label.setText(_translate("MainApp", "Track No:"))
self.label_6.setText(_translate("MainApp", "INFORMATION"))
self.label_7.setText(_translate("MainApp", "TYPE:"))
self.label_8.setText(_translate("MainApp", "START POINT:"))
self.label_9.setText(_translate("MainApp", "DESTINATION:"))
self.label_10.setText(_translate("MainApp", "ENTRY DATE:"))
self.comboBox_type.setItemText(0, _translate("MainApp", "-"))
self.comboBox_type.setItemText(1, _translate("MainApp", "Document"))
self.comboBox_type.setItemText(2, _translate("MainApp", "Package"))
self.comboBox_type.setItemText(3, _translate("MainApp", "Letter"))
self.comboBox_type.setItemText(4, _translate("MainApp", "Other"))
self.comboBox_from.setItemText(0, _translate("MainApp", "-"))
self.comboBox_from.setItemText(1, _translate("MainApp", "California"))
self.comboBox_from.setItemText(2, _translate("MainApp", "New York"))
self.comboBox_from.setItemText(3, _translate("MainApp", "Texas"))
self.comboBox_from.setItemText(4, _translate("MainApp", "Ohio"))
self.comboBox_from.setItemText(5, _translate("MainApp", "Florida"))
self.comboBox_dest.setItemText(0, _translate("MainApp", "-"))
self.comboBox_dest.setItemText(1, _translate("MainApp", "California"))
self.comboBox_dest.setItemText(2, _translate("MainApp", "New York"))
self.comboBox_dest.setItemText(3, _translate("MainApp", "Texas"))
self.comboBox_dest.setItemText(4, _translate("MainApp", "Ohio"))
self.comboBox_dest.setItemText(5, _translate("MainApp", "Florida"))
self.comboBox_entdate.setItemText(0, _translate("MainApp", "-"))
self.comboBox_entdate.setItemText(1, _translate("MainApp", "19.11.2020"))
self.comboBox_entdate.setItemText(2, _translate("MainApp", "20.11.2020"))
self.comboBox_entdate.setItemText(3, _translate("MainApp", "21.11.2020"))
self.comboBox_entdate.setItemText(4, _translate("MainApp", "22.11.2020"))
self.comboBox_entdate.setItemText(5, _translate("MainApp", "23.11.2020"))
self.comboBox_entdate.setItemText(6, _translate("MainApp", "24.11.2020"))
self.pushButton_delete.setText(_translate("MainApp", "Delete"))
self.pushButton_quit.setText(_translate("MainApp", "Exit"))
self.label_2.setText(_translate("MainApp", "Sender Information"))
self.label_3.setText(_translate("MainApp", "Name:"))
self.label_4.setText(_translate("MainApp", "Surname:"))
self.label_5.setText(_translate("MainApp", "Card No:"))
item = self.tableWidget.verticalHeaderItem(0)
item.setText(_translate("MainApp", "1"))
item = self.tableWidget.verticalHeaderItem(1)
item.setText(_translate("MainApp", "2"))
item = self.tableWidget.verticalHeaderItem(2)
item.setText(_translate("MainApp", "3"))
item = self.tableWidget.verticalHeaderItem(3)
item.setText(_translate("MainApp", "4"))
item = self.tableWidget.verticalHeaderItem(4)
item.setText(_translate("MainApp", "6"))
item = self.tableWidget.verticalHeaderItem(5)
item.setText(_translate("MainApp", "7"))
item = self.tableWidget.horizontalHeaderItem(0)
item.setText(_translate("MainApp", "ID"))
item = self.tableWidget.horizontalHeaderItem(1)
item.setText(_translate("MainApp", "No"))
item = self.tableWidget.horizontalHeaderItem(2)
item.setText(_translate("MainApp", "Name"))
item = self.tableWidget.horizontalHeaderItem(3)
item.setText(_translate("MainApp", "Surname"))
item = self.tableWidget.horizontalHeaderItem(4)
item.setText(_translate("MainApp", "Card No"))
item = self.tableWidget.horizontalHeaderItem(5)
item.setText(_translate("MainApp", "Type"))
item = self.tableWidget.horizontalHeaderItem(6)
item.setText(_translate("MainApp", "Start Point"))
item = self.tableWidget.horizontalHeaderItem(7)
item.setText(_translate("MainApp", "Destination"))
item = self.tableWidget.horizontalHeaderItem(8)
item.setText(_translate("MainApp", "Entry Date"))
self.pushButton_4.setText(_translate("MainApp", "CREDITS"))
|
[
"PyQt5.QtWidgets.QComboBox",
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtWidgets.QFrame",
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtWidgets.QTableWidget",
"PyQt5.QtCore.QRect",
"PyQt5.QtWidgets.QStatusBar",
"PyQt5.QtWidgets.QPushButton",
"PyQt5.QtWidgets.QLineEdit",
"PyQt5.QtGui.QFont",
"PyQt5.QtCore.QMetaObject.connectSlotsByName",
"PyQt5.QtWidgets.QTableWidgetItem",
"PyQt5.QtWidgets.QMenuBar"
] |
[((212, 238), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['MainApp'], {}), '(MainApp)\n', (229, 238), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((334, 375), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (355, 375), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((553, 592), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.centralwidget'], {}), '(self.centralwidget)\n', (572, 592), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((740, 776), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (756, 776), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((856, 869), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (867, 869), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1002, 1038), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1018, 1038), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1122, 1135), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (1133, 1135), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1274, 1310), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1290, 1310), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1394, 1407), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (1405, 1407), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1546, 1582), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1562, 1582), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1666, 1679), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (1677, 1679), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1818, 1854), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1834, 1854), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1938, 1951), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (1949, 1951), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2091, 2127), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2107, 2127), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2212, 2225), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (2223, 2225), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2373, 2412), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2392, 2412), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2775, 2814), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2794, 2814), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3217, 3256), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (3236, 3256), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3662, 3701), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (3681, 3701), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4178, 4219), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (4199, 4219), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4396, 4437), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (4417, 4437), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4598, 4634), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.centralwidget'], {}), '(self.centralwidget)\n', (4614, 4634), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4890, 4921), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.frame'], {}), '(self.frame)\n', (4909, 4921), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5084, 5115), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.frame'], {}), '(self.frame)\n', (5103, 5115), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5271, 5299), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.frame'], {}), '(self.frame)\n', (5287, 5299), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5381, 5394), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (5392, 5394), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5542, 5573), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.frame'], {}), '(self.frame)\n', (5561, 5573), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5738, 5766), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.frame'], {}), '(self.frame)\n', (5754, 5766), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5848, 5861), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (5859, 5861), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6000, 6028), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.frame'], {}), '(self.frame)\n', (6016, 6028), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6110, 6123), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (6121, 6123), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6262, 6290), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.frame'], {}), '(self.frame)\n', (6278, 6290), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6373, 6386), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (6384, 6386), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6529, 6571), 'PyQt5.QtWidgets.QTableWidget', 'QtWidgets.QTableWidget', (['self.centralwidget'], {}), '(self.centralwidget)\n', (6551, 6571), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6799, 6827), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (6825, 6827), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6901, 6929), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (6927, 6929), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7003, 7031), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (7029, 7031), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7105, 7133), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (7131, 7133), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7207, 7235), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (7233, 7235), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7309, 7337), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (7335, 7337), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7411, 7439), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (7437, 7439), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7515, 7543), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (7541, 7543), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7619, 7647), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (7645, 7647), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7723, 7751), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (7749, 7751), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7827, 7855), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (7853, 7855), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7931, 7959), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (7957, 7959), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8035, 8063), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (8061, 8063), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8139, 8167), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (8165, 8167), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8243, 8271), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (8269, 8271), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8360, 8401), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (8381, 8401), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8607, 8634), 'PyQt5.QtWidgets.QMenuBar', 'QtWidgets.QMenuBar', (['MainApp'], {}), '(MainApp)\n', (8625, 8634), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8814, 8843), 'PyQt5.QtWidgets.QStatusBar', 'QtWidgets.QStatusBar', (['MainApp'], {}), '(MainApp)\n', (8834, 8843), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8989, 9035), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['MainApp'], {}), '(MainApp)\n', (9026, 9035), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((422, 452), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(40)', '(380)', '(131)', '(31)'], {}), '(40, 380, 131, 31)\n', (434, 452), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((631, 661), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(170)', '(60)', '(113)', '(22)'], {}), '(170, 60, 113, 22)\n', (643, 661), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((809, 838), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(50)', '(161)', '(31)'], {}), '(20, 50, 161, 31)\n', (821, 838), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1073, 1104), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(560)', '(110)', '(191)', '(31)'], {}), '(560, 110, 191, 31)\n', (1085, 1104), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1345, 1376), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(560)', '(150)', '(191)', '(31)'], {}), '(560, 150, 191, 31)\n', (1357, 1376), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1617, 1648), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(560)', '(180)', '(191)', '(31)'], {}), '(560, 180, 191, 31)\n', (1629, 1648), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1889, 1920), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(560)', '(210)', '(191)', '(31)'], {}), '(560, 210, 191, 31)\n', (1901, 1920), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2163, 2194), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(560)', '(240)', '(191)', '(31)'], {}), '(560, 240, 191, 31)\n', (2175, 2194), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2453, 2484), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(760)', '(160)', '(121)', '(22)'], {}), '(760, 160, 121, 22)\n', (2465, 2484), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2855, 2886), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(760)', '(190)', '(121)', '(22)'], {}), '(760, 190, 121, 22)\n', (2867, 2886), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3297, 3328), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(760)', '(220)', '(121)', '(22)'], {}), '(760, 220, 121, 22)\n', (3309, 3328), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3745, 3776), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(760)', '(250)', '(121)', '(22)'], {}), '(760, 250, 121, 22)\n', (3757, 3776), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4264, 4295), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(180)', '(380)', '(131)', '(31)'], {}), '(180, 380, 131, 31)\n', (4276, 4295), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4480, 4511), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(320)', '(380)', '(131)', '(31)'], {}), '(320, 380, 131, 31)\n', (4492, 4511), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4667, 4698), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(30)', '(120)', '(331)', '(161)'], {}), '(30, 120, 331, 161)\n', (4679, 4698), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4962, 4993), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(180)', '(120)', '(113)', '(22)'], {}), '(180, 120, 113, 22)\n', (4974, 4993), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5156, 5186), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(180)', '(60)', '(113)', '(22)'], {}), '(180, 60, 113, 22)\n', (5168, 5186), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5334, 5363), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(10)', '(191)', '(31)'], {}), '(20, 10, 191, 31)\n', (5346, 5363), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5617, 5647), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(180)', '(90)', '(113)', '(22)'], {}), '(180, 90, 113, 22)\n', (5629, 5647), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5801, 5830), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(50)', '(191)', '(31)'], {}), '(20, 50, 191, 31)\n', (5813, 5830), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6063, 6092), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(80)', '(191)', '(31)'], {}), '(20, 80, 191, 31)\n', (6075, 6092), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6325, 6355), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(110)', '(191)', '(31)'], {}), '(20, 110, 191, 31)\n', (6337, 6355), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6610, 6641), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(40)', '(460)', '(901)', '(281)'], {}), '(40, 460, 901, 281)\n', (6622, 6641), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8441, 8470), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(10)', '(101)', '(31)'], {}), '(20, 10, 101, 31)\n', (8453, 8470), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8669, 8697), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(0)', '(0)', '(1017)', '(26)'], {}), '(0, 0, 1017, 26)\n', (8681, 8697), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')]
|
from tkinter import *
from tkinter.scrolledtext import ScrolledText
from rl_2048.gui.root import WIDTH
from rl_2048.gui.config import Config
from tkinter.constants import END
import threading
TITLE_FONT = ("Helvetica", 32)
DEFAULT_FONT = ("Helvetica", 14)
TRAIN_DIR_POS = (WIDTH / 2, 60)
DELAY_MS_POS = (WIDTH / 2, 100)
NUM_GAMES_POS = (WIDTH / 2, 140)
TRAIN_BUTTON_POS = (50, 300)
PLAY_BUTTON_POS = (300, 300)
TENSOR_BUTTON_POS = (580, 300)
SHOW_GAMES_CHECKBOX_POS = (WIDTH / 2, 180)
class Window(Frame):
"""
Main window for the GUI.
Creates all components in the __init__-function.
"""
def __init__(self, master):
Frame.__init__(self, master=master)
# attributes
self.master = master
self.config = Config(master)
# create the view
title = Label(self.master, text="2048 GUI", font=TITLE_FONT)
title.place(x=WIDTH / 2, y=30, anchor="center")
self.add_input_box("Trainings directory", TRAIN_DIR_POS, 30, self.config.train_dir_obj)
self.add_input_box("Delay per move (in ms)", DELAY_MS_POS, 5, self.config.delay_in_ms_obj)
self.add_input_box("Number of games", NUM_GAMES_POS, 5, self.config.num_games_obj)
self.add_checkbox("Show games", SHOW_GAMES_CHECKBOX_POS, self.config.show_games_obj)
self.add_button("Start training", TRAIN_BUTTON_POS, self.start_training)
self.add_button("Start playing", PLAY_BUTTON_POS, self.start_playing)
self.add_button("Start tensorboard", TENSOR_BUTTON_POS, self.start_tensorboard)
# Add the log
Label(self.master, text="Log:", font=DEFAULT_FONT).place(x=10, y=370)
self.log_obj = ScrolledText(self.master, width=111, height=12)
self.log_obj.configure(state="disabled")
self.log_obj.place(x=0, y=400)
self.log('Program initialised')
def add_input_box(self, label, pos, input_width, value):
"""
Place an input box, together with a label.
:param label: a string with the text of the label
:param pos: the position (consists of a tuple) with the x and y location
:param input_width: how many characters the input box is width
:param value: reference to attribute in the config-obj
"""
x, y = pos
label_widget = Label(self.master, text=label + ':', font=DEFAULT_FONT)
label_widget.place(x=x - 5, y=y, anchor="ne")
input_box = Entry(self.master, width=input_width, font=DEFAULT_FONT, textvariable=value)
input_box.place(x=x + 5, y=y, anchor="nw")
def add_button(self, text, position, command):
"""
:param text: Label for the button
:param position: the position (consists of a tuple) with the x and y location
:param command: function reference to whatever is being executed when the button is clicked
"""
button = Button(self.master, text=text, command=command, font=DEFAULT_FONT)
x, y = position
button.place(x=x, y=y, anchor="nw")
def add_checkbox(self, label, position, value):
"""
:param label: Name of the checkbox
:param position: the position (consists of a tuple) with the x and y location
:param value: reference to attribute in the config-obj
"""
checkbox = Checkbutton(self.master, text=label, variable=value, font=DEFAULT_FONT)
x, y = position
checkbox.place(x=x, y=y, anchor="nw")
def log(self, text):
"""
Add a line of text to the log-obj
:param text: the text (without the newline) to be added
"""
self.log_obj.configure(state="normal")
self.log_obj.insert(END, text + '\n')
self.log_obj.see('end')
self.log_obj.configure(state="disabled")
# Methods below are button actions
def start_training(self):
"""
Start with the training (gathering experiences)
"""
def train():
self.log('Starting with the training')
from rl_2048.learning.learning import run_training
run_training(self.config.get_train_dir())
threading.Thread(target=train).start()
def start_playing(self):
"""
Start playing a number of games
"""
def play():
self.log('Playing ' + str(self.config.get_num_games()) + ' game(s).')
from rl_2048.play_game import average_score, make_greedy_strategy
score = average_score(make_greedy_strategy(self.config.get_train_dir()), window=self)
self.log('Average score: ' + str(score))
threading.Thread(target=play).start()
def start_tensorboard(self):
"""
Start the tensorboard server (using the command line)
"""
def launch_tensor_board():
self.log('Starting tensorboard')
import os
os.system('tensorboard --logdir=' + self.config.get_train_dir())
return
threading.Thread(target=launch_tensor_board).start()
|
[
"threading.Thread",
"rl_2048.gui.config.Config",
"tkinter.scrolledtext.ScrolledText"
] |
[((768, 782), 'rl_2048.gui.config.Config', 'Config', (['master'], {}), '(master)\n', (774, 782), False, 'from rl_2048.gui.config import Config\n'), ((1689, 1736), 'tkinter.scrolledtext.ScrolledText', 'ScrolledText', (['self.master'], {'width': '(111)', 'height': '(12)'}), '(self.master, width=111, height=12)\n', (1701, 1736), False, 'from tkinter.scrolledtext import ScrolledText\n'), ((4203, 4233), 'threading.Thread', 'threading.Thread', ([], {'target': 'train'}), '(target=train)\n', (4219, 4233), False, 'import threading\n'), ((4682, 4711), 'threading.Thread', 'threading.Thread', ([], {'target': 'play'}), '(target=play)\n', (4698, 4711), False, 'import threading\n'), ((5052, 5096), 'threading.Thread', 'threading.Thread', ([], {'target': 'launch_tensor_board'}), '(target=launch_tensor_board)\n', (5068, 5096), False, 'import threading\n')]
|
"""Configuration Object."""
import logging
import yaml
import voluptuous as vol
from watchdog.events import FileSystemEventHandler
from watchdog.observers.polling import PollingObserver as Observer
from consts import (
DEFAULT_CONFIG_FILE,
DEFAULT_MQTT_PORT,
DEFAULT_MQTT_SERVER,
DEFAULT_HA_DISCOVERY_PREFIX,
DEFAULT_MQTT_BASE_TOPIC,
DEFAULT_DEVICES_NAMES_FILE,
DEFAULT_LOG_LEVEL,
DEFAULT_LOG_COLOR,
DEFAULT_DALI_DRIVER,
DALI_DRIVERS,
ALL_SUPPORTED_LOG_LEVELS,
LOG_FORMAT,
CONF_CONFIG,
CONF_DALI_DRIVER,
CONF_DALI_LAMPS,
CONF_LOG_COLOR,
CONF_LOG_LEVEL,
CONF_HA_DISCOVERY_PREFIX,
CONF_DEVICES_NAMES_FILE,
CONF_MQTT_BASE_TOPIC,
CONF_MQTT_PORT,
CONF_MQTT_SERVER,
)
CONF_SCHEMA = vol.Schema(
{
vol.Required(CONF_MQTT_SERVER, default=DEFAULT_MQTT_SERVER): str,
vol.Optional(CONF_MQTT_PORT, default=DEFAULT_MQTT_PORT): vol.All(
vol.Coerce(int), vol.Range(min=1, max=65535)
),
vol.Optional(CONF_MQTT_BASE_TOPIC, default=DEFAULT_MQTT_BASE_TOPIC): str,
vol.Required(CONF_DALI_DRIVER, default=DEFAULT_DALI_DRIVER): vol.In(
DALI_DRIVERS
),
vol.Optional(
CONF_HA_DISCOVERY_PREFIX, default=DEFAULT_HA_DISCOVERY_PREFIX
): str,
vol.Optional(CONF_DEVICES_NAMES_FILE, default=DEFAULT_DEVICES_NAMES_FILE): str,
vol.Optional(CONF_LOG_LEVEL, default=DEFAULT_LOG_LEVEL): vol.In(
ALL_SUPPORTED_LOG_LEVELS
),
vol.Optional(CONF_LOG_COLOR, default=DEFAULT_LOG_COLOR): bool,
},
extra=True,
)
logging.basicConfig(format=LOG_FORMAT)
logger = logging.getLogger(__name__)
class Config:
def __init__(self, args, callback=None):
self._watchdog_observer = None
self._path = args.config
self._callback = callback
self._config = {}
# Load from file
try:
self.load_config_file()
except FileNotFoundError:
logger.info("No configuration file, creating a new one")
self._config = CONF_SCHEMA({})
# Overwrite with command line arguments
args_keys = vars(args)
for key in args_keys:
if self._config.get(key) != args_keys[key]:
self._config[key] = args_keys[key]
self.save_config_file()
self._watchdog_observer = Observer()
watchdog_event_handler = FileSystemEventHandler()
watchdog_event_handler.on_modified = lambda event: self.load_config_file()
self._watchdog_observer.schedule(watchdog_event_handler, self._path)
self._watchdog_observer.start()
def load_config_file(self):
"""Load configuration from yaml file."""
with open(self._path, "r") as infile:
logger.debug("Loading configuration from <%s>", self._path)
try:
configuration = yaml.safe_load(infile)
if not configuration:
logger.warning(
"Could not load a configuration from %s, creating a new one",
self._path,
)
configuration = {}
self._config = CONF_SCHEMA(configuration)
self._callback()
except AttributeError:
# No callback configured
pass
except vol.MultipleInvalid as error:
logger.error("In configuration file %s: %s", self._path, error)
quit(1)
def save_config_file(self):
"""Save configuration back to yaml file."""
try:
with open(self._path, "w", encoding="utf8") as outfile:
cfg = self._config.pop(CONF_CONFIG) # temporary displace config file
yaml.dump(
self._config, outfile, default_flow_style=False, allow_unicode=True
)
except Exception as err:
logger.error("Could not save configuration: %s", err)
finally:
self._config[CONF_CONFIG] = cfg # restore
def __del__(self):
"""Release watchdog."""
if self._watchdog_observer:
self._watchdog_observer.stop()
self._watchdog_observer.join()
if self._config != {}:
self.save_config_file()
def __repr__(self):
return self._config
@property
def mqtt_conf(self):
return (
self._config[CONF_MQTT_SERVER],
self._config[CONF_MQTT_PORT],
self._config[CONF_MQTT_BASE_TOPIC],
)
@property
def dali_driver(self):
return self._config[CONF_DALI_DRIVER]
@property
def ha_discovery_prefix(self):
return self._config[CONF_HA_DISCOVERY_PREFIX]
@property
def log_level(self):
return self._config[CONF_LOG_LEVEL]
@property
def log_color(self):
return self._config[CONF_LOG_COLOR]
@property
def devices_names_file(self):
return self._config[CONF_DEVICES_NAMES_FILE]
|
[
"voluptuous.Range",
"watchdog.observers.polling.PollingObserver",
"voluptuous.Optional",
"logging.basicConfig",
"voluptuous.Required",
"yaml.dump",
"yaml.safe_load",
"watchdog.events.FileSystemEventHandler",
"voluptuous.In",
"logging.getLogger",
"voluptuous.Coerce"
] |
[((1614, 1652), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': 'LOG_FORMAT'}), '(format=LOG_FORMAT)\n', (1633, 1652), False, 'import logging\n'), ((1662, 1689), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1679, 1689), False, 'import logging\n'), ((793, 852), 'voluptuous.Required', 'vol.Required', (['CONF_MQTT_SERVER'], {'default': 'DEFAULT_MQTT_SERVER'}), '(CONF_MQTT_SERVER, default=DEFAULT_MQTT_SERVER)\n', (805, 852), True, 'import voluptuous as vol\n'), ((867, 922), 'voluptuous.Optional', 'vol.Optional', (['CONF_MQTT_PORT'], {'default': 'DEFAULT_MQTT_PORT'}), '(CONF_MQTT_PORT, default=DEFAULT_MQTT_PORT)\n', (879, 922), True, 'import voluptuous as vol\n'), ((1009, 1076), 'voluptuous.Optional', 'vol.Optional', (['CONF_MQTT_BASE_TOPIC'], {'default': 'DEFAULT_MQTT_BASE_TOPIC'}), '(CONF_MQTT_BASE_TOPIC, default=DEFAULT_MQTT_BASE_TOPIC)\n', (1021, 1076), True, 'import voluptuous as vol\n'), ((1091, 1150), 'voluptuous.Required', 'vol.Required', (['CONF_DALI_DRIVER'], {'default': 'DEFAULT_DALI_DRIVER'}), '(CONF_DALI_DRIVER, default=DEFAULT_DALI_DRIVER)\n', (1103, 1150), True, 'import voluptuous as vol\n'), ((1204, 1279), 'voluptuous.Optional', 'vol.Optional', (['CONF_HA_DISCOVERY_PREFIX'], {'default': 'DEFAULT_HA_DISCOVERY_PREFIX'}), '(CONF_HA_DISCOVERY_PREFIX, default=DEFAULT_HA_DISCOVERY_PREFIX)\n', (1216, 1279), True, 'import voluptuous as vol\n'), ((1316, 1389), 'voluptuous.Optional', 'vol.Optional', (['CONF_DEVICES_NAMES_FILE'], {'default': 'DEFAULT_DEVICES_NAMES_FILE'}), '(CONF_DEVICES_NAMES_FILE, default=DEFAULT_DEVICES_NAMES_FILE)\n', (1328, 1389), True, 'import voluptuous as vol\n'), ((1404, 1459), 'voluptuous.Optional', 'vol.Optional', (['CONF_LOG_LEVEL'], {'default': 'DEFAULT_LOG_LEVEL'}), '(CONF_LOG_LEVEL, default=DEFAULT_LOG_LEVEL)\n', (1416, 1459), True, 'import voluptuous as vol\n'), ((1525, 1580), 'voluptuous.Optional', 'vol.Optional', (['CONF_LOG_COLOR'], {'default': 'DEFAULT_LOG_COLOR'}), '(CONF_LOG_COLOR, default=DEFAULT_LOG_COLOR)\n', (1537, 1580), True, 'import voluptuous as vol\n'), ((1152, 1172), 'voluptuous.In', 'vol.In', (['DALI_DRIVERS'], {}), '(DALI_DRIVERS)\n', (1158, 1172), True, 'import voluptuous as vol\n'), ((1461, 1493), 'voluptuous.In', 'vol.In', (['ALL_SUPPORTED_LOG_LEVELS'], {}), '(ALL_SUPPORTED_LOG_LEVELS)\n', (1467, 1493), True, 'import voluptuous as vol\n'), ((2389, 2399), 'watchdog.observers.polling.PollingObserver', 'Observer', ([], {}), '()\n', (2397, 2399), True, 'from watchdog.observers.polling import PollingObserver as Observer\n'), ((2433, 2457), 'watchdog.events.FileSystemEventHandler', 'FileSystemEventHandler', ([], {}), '()\n', (2455, 2457), False, 'from watchdog.events import FileSystemEventHandler\n'), ((945, 960), 'voluptuous.Coerce', 'vol.Coerce', (['int'], {}), '(int)\n', (955, 960), True, 'import voluptuous as vol\n'), ((962, 989), 'voluptuous.Range', 'vol.Range', ([], {'min': '(1)', 'max': '(65535)'}), '(min=1, max=65535)\n', (971, 989), True, 'import voluptuous as vol\n'), ((2907, 2929), 'yaml.safe_load', 'yaml.safe_load', (['infile'], {}), '(infile)\n', (2921, 2929), False, 'import yaml\n'), ((3796, 3874), 'yaml.dump', 'yaml.dump', (['self._config', 'outfile'], {'default_flow_style': '(False)', 'allow_unicode': '(True)'}), '(self._config, outfile, default_flow_style=False, allow_unicode=True)\n', (3805, 3874), False, 'import yaml\n')]
|
# -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='pitman',
version='0.0.1',
packages=['pitman'],
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/tschaefer/pitman',
description='Dig for your favored Podcast.',
license='BSD',
install_requires=['feedparser>=5.1.3', 'requests>=2.4.3', 'clint>=0.4.1'],
entry_points={'console_scripts': ['pitman=pitman.pitman:main']}
)
|
[
"setuptools.setup"
] |
[((55, 418), 'setuptools.setup', 'setup', ([], {'name': '"""pitman"""', 'version': '"""0.0.1"""', 'packages': "['pitman']", 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'url': '"""https://github.com/tschaefer/pitman"""', 'description': '"""Dig for your favored Podcast."""', 'license': '"""BSD"""', 'install_requires': "['feedparser>=5.1.3', 'requests>=2.4.3', 'clint>=0.4.1']", 'entry_points': "{'console_scripts': ['pitman=pitman.pitman:main']}"}), "(name='pitman', version='0.0.1', packages=['pitman'], author='<NAME>',\n author_email='<EMAIL>', url='https://github.com/tschaefer/pitman',\n description='Dig for your favored Podcast.', license='BSD',\n install_requires=['feedparser>=5.1.3', 'requests>=2.4.3',\n 'clint>=0.4.1'], entry_points={'console_scripts': [\n 'pitman=pitman.pitman:main']})\n", (60, 418), False, 'from setuptools import setup\n')]
|
"""Polynomials over arbitrary terms."""
from fractions import Fraction
from collections.abc import Iterable
from functools import cmp_to_key
from kernel.term import Term
from kernel import term_ord
def compare_fst(p1, p2):
if isinstance(p1[0], Term):
return term_ord.fast_compare(p1[0], p2[0])
else:
if len(p1[0]) != len(p2[0]):
return term_ord.compare_atom(len(p1[0]), len(p2[0]))
for i in range(len(p1[0])):
if p1[0][i] != p2[0][i]:
return compare_fst(p1[0][i], p2[0][i])
return 0
def collect_pairs(ps):
"""Reduce a list of pairs by collecting into groups according to
first components, and adding the second component for each group.
It is assumed that the first components are hashable.
e.g. [("x", 1), ("y", 2), ("x", 3)] => [("x", 4), ("y", 2)]
"""
res = {}
for v, c in ps:
if v in res:
res[v] += c
else:
res[v] = c
return tuple(sorted([(k, v) for k, v in res.items() if v != 0], key=cmp_to_key(compare_fst)))
class Monomial:
"""Represents a monomial."""
def __init__(self, coeff, factors):
"""Construct a monomial from coefficient and tuple of factors,
where each factor is associated its power. For example,
(2, ()) -> 2
(2, ((x, 1))) -> 2 * x
(2, ((x, 2), (y, 1))) -> 2 * x^2 * y
"""
assert isinstance(coeff, (int, Fraction))
assert all(isinstance(factor, Iterable) and len(factor) == 2 and \
isinstance(factor[1], int) for factor in factors), \
"Unexpected argument for factors: %s" % str(factors)
self.coeff = coeff
self.factors = collect_pairs(factors)
def __eq__(self, other):
return isinstance(other, Monomial) and self.coeff == other.coeff and \
self.factors == other.factors
def __str__(self):
res = ""
if self.coeff != 1:
res += str(self.coeff)
for var, p in self.factors:
s = str(var)
if len(s) != 1:
s = "(" + s + ")"
if p != 1:
s = s + "^" + str(p)
res += s
if res == "":
res = "1"
return res
def __repr__(self):
return "Monomial(%s)" % str(self)
def __le__(self, other):
cp = term_ord.fast_compare_list(self.factors, other.factors)
if cp < 0:
return True
elif cp > 0:
return False
else:
return term_ord.compare_atom(self.coeff, other.coeff)
def __lt__(self, other):
return self <= other and self != other
def __mul__(self, other):
return Monomial(self.coeff * other.coeff, self.factors + other.factors)
def scale(self, c):
if c == 1:
return self
else:
return Monomial(c * self.coeff, self.factors)
def __neg__(self):
return self.scale(-1)
def is_constant(self):
return len(self.factors) == 0
def get_constant(self):
assert self.is_constant()
return self.coeff
class Polynomial:
"""Represents a polynomial."""
def __init__(self, monomials):
monomials = tuple(monomials)
assert all(isinstance(mono, Monomial) for mono in monomials)
ts = collect_pairs((mono.factors, mono.coeff) for mono in monomials)
self.monomials = tuple(Monomial(coeff, factor) for factor, coeff in ts)
def __eq__(self, other):
return isinstance(other, Polynomial) and self.monomials == other.monomials
def __str__(self):
if len(self.monomials) == 0:
return "0"
else:
return " + ".join(str(mono) for mono in self.monomials)
def __repr__(self):
return "Polynomial(%s)" % str(self)
def __add__(self, other):
return Polynomial(self.monomials + other.monomials)
def scale(self, c):
return Polynomial(mono.scale(c) for mono in self.monomials)
def __neg__(self):
return self.scale(-1)
def __sub__(self, other):
return self + (-other)
def __mul__(self, other):
return Polynomial(m1 * m2 for m1 in self.monomials for m2 in other.monomials)
def __pow__(self, other):
assert isinstance(other, int) and other >= 0
if other == 0:
return Polynomial([Monomial(1, [])])
res = self
for i in range(other-1):
res *= self
return res
def is_nonzero_constant(self):
return len(self.monomials) == 1 and self.monomials[0].is_constant()
def is_zero_constant(self):
return len(self.monomials) == 0
def is_constant(self):
return self.is_nonzero_constant() or self.is_zero_constant()
def get_constant(self):
if self.is_zero_constant():
return 0
elif self.is_nonzero_constant():
return self.monomials[0].get_constant()
else:
raise AssertionError
def singleton(s):
"""Polynomial for 1*s^1."""
return Polynomial([Monomial(1, [(s, 1)])])
def constant(c):
"""Polynomial for c (numerical constant)."""
assert isinstance(c, (int, Fraction))
if isinstance(c, Fraction) and c.denominator == 1:
return Polynomial([Monomial(c.numerator, tuple())])
else:
return Polynomial([Monomial(c, tuple())])
|
[
"kernel.term_ord.compare_atom",
"kernel.term_ord.fast_compare_list",
"functools.cmp_to_key",
"kernel.term_ord.fast_compare"
] |
[((274, 309), 'kernel.term_ord.fast_compare', 'term_ord.fast_compare', (['p1[0]', 'p2[0]'], {}), '(p1[0], p2[0])\n', (295, 309), False, 'from kernel import term_ord\n'), ((2377, 2432), 'kernel.term_ord.fast_compare_list', 'term_ord.fast_compare_list', (['self.factors', 'other.factors'], {}), '(self.factors, other.factors)\n', (2403, 2432), False, 'from kernel import term_ord\n'), ((1054, 1077), 'functools.cmp_to_key', 'cmp_to_key', (['compare_fst'], {}), '(compare_fst)\n', (1064, 1077), False, 'from functools import cmp_to_key\n'), ((2555, 2601), 'kernel.term_ord.compare_atom', 'term_ord.compare_atom', (['self.coeff', 'other.coeff'], {}), '(self.coeff, other.coeff)\n', (2576, 2601), False, 'from kernel import term_ord\n')]
|
import os
import pandas as pd
CURRENT_DIR = os.path.dirname(__file__)
INPUT_DIR = os.path.join(CURRENT_DIR, "input")
TMP_DIR = os.path.join(CURRENT_DIR, "tmp")
GRAPHER_DIR = os.path.join(CURRENT_DIR, "grapher")
def main():
# GCP data
gas_gcp = pd.read_excel(
os.path.join(INPUT_DIR, "country_fuel/gas_by_country.xlsx"),
sheet_name="Data", skiprows=1
)
oil_gcp = pd.read_excel(
os.path.join(INPUT_DIR, "country_fuel/oil_by_country.xlsx"),
sheet_name="Data", skiprows=1
)
coal_gcp = pd.read_excel(
os.path.join(INPUT_DIR, "country_fuel/coal_by_country.xlsx"),
sheet_name="Data", skiprows=1
)
flaring_gcp = pd.read_excel(
os.path.join(INPUT_DIR, "country_fuel/flaring_by_country.xlsx"),
sheet_name="Data", skiprows=1
)
cement_gcp = pd.read_excel(
os.path.join(INPUT_DIR, "country_fuel/cement_by_country.xlsx"),
sheet_name="Data", skiprows=1
)
country_gcp = pd.read_csv(
os.path.join(INPUT_DIR, "shared/gcp_country_standardized.csv")
)
gas_gcp = pd.melt(gas_gcp, id_vars=["Year"], var_name=["Country"], value_name="Gas")
oil_gcp = pd.melt(oil_gcp, id_vars=["Year"], var_name=["Country"], value_name="Oil")
coal_gcp = pd.melt(coal_gcp, id_vars=["Year"], var_name=["Country"], value_name="Coal")
flaring_gcp = pd.melt(flaring_gcp, id_vars=["Year"], var_name=["Country"], value_name="Flaring")
cement_gcp = pd.melt(cement_gcp, id_vars=["Year"], var_name=["Country"], value_name="Cement")
emissions_gcp = (
gas_gcp
.merge(oil_gcp, on=["Year", "Country"])
.merge(coal_gcp, on=["Year", "Country"])
.merge(flaring_gcp, on=["Year", "Country"])
.merge(cement_gcp, on=["Year", "Country"])
)
emissions_gcp = (
emissions_gcp
.merge(country_gcp, on="Country")
.drop(columns=["Country"])
.rename(columns={"CountryStandardised": "Country"})
)
# CDIAC data
emissions_cdiac = pd.read_csv(
os.path.join(INPUT_DIR, "country_fuel/co2_fuel_country_cdiac.csv"),
skiprows=[1, 2, 3],
na_values=[".", " "]
)
emissions_cdiac = emissions_cdiac.rename(columns={
"Nation": "Country",
"Emissions from solid fuel consumption": "Coal",
"Emissions from liquid fuel consumption": "Oil",
"Emissions from gas fuel consumption": "Gas",
"Emissions from cement production": "Cement",
"Emissions from gas flaring": "Flaring"
})
emissions_cdiac = emissions_cdiac.drop(columns={
"Total CO2 emissions from fossil-fuels and cement production (thousand metric tons of C)",
"Per capita CO2 emissions (metric tons of carbon)",
"Emissions from bunker fuels (not included in the totals)"
})
emissions_cdiac["Country"] = emissions_cdiac["Country"].str.capitalize()
# Merge with country standardised names
country_cdiac = pd.read_csv(os.path.join(INPUT_DIR, "shared/cdiac_country_standardized.csv"))
emissions_cdiac = (
emissions_cdiac
.merge(country_cdiac, on="Country")
.drop(columns=["Country"])
.rename(columns={"CountryStandardised": "Country"})
.groupby(["Country", "Year"], as_index=False)
.sum()
)
#Convert from thousand tonnes of carbon to million tonnes of CO2
emissions_cdiac[["Cement", "Coal", "Flaring", "Gas", "Oil"]] = (
emissions_cdiac[["Cement", "Coal", "Flaring", "Gas", "Oil"]]
.astype(float)
.mul(3.664 / 1000)
)
# Combined CDIAC and GCP
emissions_cdiac.loc[:, "Source"] = "CDIAC"
emissions_cdiac.loc[:, "Priority"] = 0
emissions_gcp.loc[:, "Source"] = "GCP"
emissions_gcp.loc[:, "Priority"] = 1
combined = pd.concat([emissions_cdiac, emissions_gcp])
combined = combined.sort_values(["Country", "Year", "Priority"])
combined = combined.groupby(["Year", "Country"]).tail(1)
# Drop columns
combined = combined.drop(columns=["Priority", "Source"])
# Reorder columns
other_columns = sorted([col for col in combined.columns if col not in ["Country", "Year"]])
combined = combined[["Country", "Year"] + other_columns]
# Merge with global figures
world = pd.read_csv(os.path.join(TMP_DIR, "global_co2_fuel_type.csv"))
combined = pd.concat([combined, world])
# Import population dataset
population = pd.read_csv(os.path.join(INPUT_DIR, "shared/population.csv"))
# Add population
combined = combined.merge(population, on=["Country", "Year"])
# Calculate per capita figures
per_capita_cols = ["Cement", "Coal", "Flaring", "Gas", "Oil"]
for col in per_capita_cols:
combined[f"{col} (per capita)"] = combined[col] / combined["Population"] * 1000000
# Drop "Population" column
combined = combined.drop(columns=["Population", "Total emissions"])
# Save to CSV file
combined.to_csv(
os.path.join(GRAPHER_DIR, "co2_fuel.csv"), index=False
)
if __name__ == '__main__':
main()
|
[
"pandas.melt",
"os.path.dirname",
"os.path.join",
"pandas.concat"
] |
[((46, 71), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (61, 71), False, 'import os\n'), ((84, 118), 'os.path.join', 'os.path.join', (['CURRENT_DIR', '"""input"""'], {}), "(CURRENT_DIR, 'input')\n", (96, 118), False, 'import os\n'), ((129, 161), 'os.path.join', 'os.path.join', (['CURRENT_DIR', '"""tmp"""'], {}), "(CURRENT_DIR, 'tmp')\n", (141, 161), False, 'import os\n'), ((176, 212), 'os.path.join', 'os.path.join', (['CURRENT_DIR', '"""grapher"""'], {}), "(CURRENT_DIR, 'grapher')\n", (188, 212), False, 'import os\n'), ((1091, 1165), 'pandas.melt', 'pd.melt', (['gas_gcp'], {'id_vars': "['Year']", 'var_name': "['Country']", 'value_name': '"""Gas"""'}), "(gas_gcp, id_vars=['Year'], var_name=['Country'], value_name='Gas')\n", (1098, 1165), True, 'import pandas as pd\n'), ((1180, 1254), 'pandas.melt', 'pd.melt', (['oil_gcp'], {'id_vars': "['Year']", 'var_name': "['Country']", 'value_name': '"""Oil"""'}), "(oil_gcp, id_vars=['Year'], var_name=['Country'], value_name='Oil')\n", (1187, 1254), True, 'import pandas as pd\n'), ((1270, 1346), 'pandas.melt', 'pd.melt', (['coal_gcp'], {'id_vars': "['Year']", 'var_name': "['Country']", 'value_name': '"""Coal"""'}), "(coal_gcp, id_vars=['Year'], var_name=['Country'], value_name='Coal')\n", (1277, 1346), True, 'import pandas as pd\n'), ((1365, 1452), 'pandas.melt', 'pd.melt', (['flaring_gcp'], {'id_vars': "['Year']", 'var_name': "['Country']", 'value_name': '"""Flaring"""'}), "(flaring_gcp, id_vars=['Year'], var_name=['Country'], value_name=\n 'Flaring')\n", (1372, 1452), True, 'import pandas as pd\n'), ((1465, 1550), 'pandas.melt', 'pd.melt', (['cement_gcp'], {'id_vars': "['Year']", 'var_name': "['Country']", 'value_name': '"""Cement"""'}), "(cement_gcp, id_vars=['Year'], var_name=['Country'], value_name='Cement'\n )\n", (1472, 1550), True, 'import pandas as pd\n'), ((3788, 3831), 'pandas.concat', 'pd.concat', (['[emissions_cdiac, emissions_gcp]'], {}), '([emissions_cdiac, emissions_gcp])\n', (3797, 3831), True, 'import pandas as pd\n'), ((4346, 4374), 'pandas.concat', 'pd.concat', (['[combined, world]'], {}), '([combined, world])\n', (4355, 4374), True, 'import pandas as pd\n'), ((279, 338), 'os.path.join', 'os.path.join', (['INPUT_DIR', '"""country_fuel/gas_by_country.xlsx"""'], {}), "(INPUT_DIR, 'country_fuel/gas_by_country.xlsx')\n", (291, 338), False, 'import os\n'), ((421, 480), 'os.path.join', 'os.path.join', (['INPUT_DIR', '"""country_fuel/oil_by_country.xlsx"""'], {}), "(INPUT_DIR, 'country_fuel/oil_by_country.xlsx')\n", (433, 480), False, 'import os\n'), ((564, 624), 'os.path.join', 'os.path.join', (['INPUT_DIR', '"""country_fuel/coal_by_country.xlsx"""'], {}), "(INPUT_DIR, 'country_fuel/coal_by_country.xlsx')\n", (576, 624), False, 'import os\n'), ((711, 774), 'os.path.join', 'os.path.join', (['INPUT_DIR', '"""country_fuel/flaring_by_country.xlsx"""'], {}), "(INPUT_DIR, 'country_fuel/flaring_by_country.xlsx')\n", (723, 774), False, 'import os\n'), ((860, 922), 'os.path.join', 'os.path.join', (['INPUT_DIR', '"""country_fuel/cement_by_country.xlsx"""'], {}), "(INPUT_DIR, 'country_fuel/cement_by_country.xlsx')\n", (872, 922), False, 'import os\n'), ((1007, 1069), 'os.path.join', 'os.path.join', (['INPUT_DIR', '"""shared/gcp_country_standardized.csv"""'], {}), "(INPUT_DIR, 'shared/gcp_country_standardized.csv')\n", (1019, 1069), False, 'import os\n'), ((2040, 2106), 'os.path.join', 'os.path.join', (['INPUT_DIR', '"""country_fuel/co2_fuel_country_cdiac.csv"""'], {}), "(INPUT_DIR, 'country_fuel/co2_fuel_country_cdiac.csv')\n", (2052, 2106), False, 'import os\n'), ((2975, 3039), 'os.path.join', 'os.path.join', (['INPUT_DIR', '"""shared/cdiac_country_standardized.csv"""'], {}), "(INPUT_DIR, 'shared/cdiac_country_standardized.csv')\n", (2987, 3039), False, 'import os\n'), ((4280, 4329), 'os.path.join', 'os.path.join', (['TMP_DIR', '"""global_co2_fuel_type.csv"""'], {}), "(TMP_DIR, 'global_co2_fuel_type.csv')\n", (4292, 4329), False, 'import os\n'), ((4437, 4485), 'os.path.join', 'os.path.join', (['INPUT_DIR', '"""shared/population.csv"""'], {}), "(INPUT_DIR, 'shared/population.csv')\n", (4449, 4485), False, 'import os\n'), ((4957, 4998), 'os.path.join', 'os.path.join', (['GRAPHER_DIR', '"""co2_fuel.csv"""'], {}), "(GRAPHER_DIR, 'co2_fuel.csv')\n", (4969, 4998), False, 'import os\n')]
|
from contextlib import suppress
from steem.blockchain import Blockchain
from steem.post import Post
from steembase.exceptions import PostDoesNotExist
from steembase.exceptions import RPCError
from datetime import timedelta
def gen(stream):
while True:
try:
for post in stream:
yield post
except PostDoesNotExist as e:
print('')
print(e)
print('')
pass
except RPCError as e:
print('')
print(e)
print('')
pass
except StopIteration:
raise
except Exception as e:
print('')
print('그 외 오류 전부')
print('')
pass
b = Blockchain()
stream = map(Post, b.stream(filter_by=['comment']))
for post in gen(stream):
post_tags = post.json_metadata.get('tags', [])
if 'kr' in post_tags:
print(post.get("last_update", "1970-01-01T00:00:00") + timedelta(hours=9))
print(post["author"], ' : ', post["body"])
|
[
"datetime.timedelta",
"steem.blockchain.Blockchain"
] |
[((767, 779), 'steem.blockchain.Blockchain', 'Blockchain', ([], {}), '()\n', (777, 779), False, 'from steem.blockchain import Blockchain\n'), ((1004, 1022), 'datetime.timedelta', 'timedelta', ([], {'hours': '(9)'}), '(hours=9)\n', (1013, 1022), False, 'from datetime import timedelta\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Copyright (C) 2018 <NAME>
Parameter Parser
"""
import sys
_VALID_TRUE_VALUES = set(['true', '1', 1])
_VALID_FALSE_VALUES = set(['false', '0', 0])
_VALID_BOOLEAN_VALUES = _VALID_TRUE_VALUES | _VALID_FALSE_VALUES
_VALID_TYPES = set(['int', 'float', 'bool', 'str'])
class MissingKeyError(Exception):
"""Raise MissingKeyError if the paramters file doesn't have the requested key
"""
pass
class ParameterValueError(Exception):
"""Raise ParameterValueError if the value is not of the expected type.
"""
pass
def _is_int(string):
"""If string s is a int, return true, else return false
"""
try:
int(string)
return True
except ValueError:
return False
def _is_float(string):
"""If string s is a float, return true, else return false
"""
try:
float(string)
return True
except ValueError:
return False
def _is_bool(string):
"""If string s is a boolean, return true, else return false
"""
string_lower = string.lower()
if string_lower in _VALID_BOOLEAN_VALUES:
return True
else:
return False
def _is_list(string):
"""If string s is a list, return true, else return false
"""
if string[0] in ['[']:
return True
else:
return False
_CHECK_TYPE_METHOD = {
'int': _is_int,
'float': _is_float,
'bool': _is_bool
}
def _str2bool(string):
return string.lower() in _VALID_TRUE_VALUES
def _str2list(string):
string = string.replace('[', '')
string = string.replace(']', '')
string = string.replace(' ', '')
s_list = []
for item in string.split(','):
s_list.append(item)
return s_list
_CONVERT_METHOD = {
'str': str,
'int': int,
'float': float,
'bool': _str2bool
}
def _check_type(value, param_type):
checker = _CHECK_TYPE_METHOD[param_type]
if not _is_list(value) and not checker(value):
return False
elif _is_list(value):
value_list = _str2list(value)
for item in value_list:
if not checker(item):
return False
return True
def _convert(value, param_type):
converter = _CONVERT_METHOD[param_type]
if _is_list(value):
results = []
value_list = _str2list(value)
for item in value_list:
results.append(converter(item))
return results
else:
return converter(value)
def _auto_convert(value):
for param_type in ['int', 'float', 'bool']:
if _check_type(value, param_type):
return _convert(value, param_type)
return _convert(value, 'str')
class ParamParser(object):
"""Parameter Parser
"""
def __init__(self, params_file=None):
if params_file is None:
sys.exit("Must provide a params_file.")
with open(params_file, 'r') as f:
file_line_list = f.readlines()
file_line_list = [file_line.strip() for file_line in file_line_list]
i = 0
self._params_values = {}
self._key_list = []
while i < len(file_line_list):
key = file_line_list[i].replace(':', '')
value = file_line_list[i+1]
if value[-1] == ':':
sys.exit("{} does not have a value.".format(key))
self._params_values[key] = value
self._key_list.append(key)
i += 2
def read(self, key, param_type='auto'):
"""Read parameter value
"""
if key not in self._params_values:
raise MissingKeyError("{}".format(key))
if param_type != 'auto' and param_type not in _VALID_TYPES:
sys.exit("{} is not a valid type.".format(param_type))
value = self._params_values[key]
if param_type != 'auto':
if param_type != 'str' and not _check_type(value, param_type):
raise ParameterValueError(
"{}: {} (or some item in it) is not of type {}".format(key, value, param_type))
return _convert(value, param_type)
if param_type == 'auto':
return _auto_convert(value)
def change_value(self, key, new_value, outfile=None):
"""change the value of a key
"""
if outfile is None:
sys.exit("outfile required")
with open(outfile, 'w') as f:
for item in self._key_list:
f.write(item+':\n')
if item == key:
self._params_values[item] = str(new_value)
f.write(str(new_value)+'\n')
else:
f.write(self._params_values[item]+'\n')
|
[
"sys.exit"
] |
[((2815, 2854), 'sys.exit', 'sys.exit', (['"""Must provide a params_file."""'], {}), "('Must provide a params_file.')\n", (2823, 2854), False, 'import sys\n'), ((4309, 4337), 'sys.exit', 'sys.exit', (['"""outfile required"""'], {}), "('outfile required')\n", (4317, 4337), False, 'import sys\n')]
|
""" Network architectures.
"""
# pylint: disable=W0221,W0622,C0103,R0913
##
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.nn.functional as F
from .unet_parts import *
import functools
##
def get_norm_layer(norm_type='instance'):
"""Return a normalization layer
Parameters:
norm_type (str) -- the name of the normalization layer: batch | instance | none
For BatchNorm, we use learnable affine parameters and track running statistics (mean/stddev).
For InstanceNorm, we do not use learnable affine parameters. We do not track running statistics.
"""
if norm_type == 'batch':
norm_layer = functools.partial(nn.BatchNorm2d, affine=True, track_running_stats=True)
elif norm_type == 'instance':
norm_layer = functools.partial(nn.InstanceNorm2d, affine=False, track_running_stats=False)
elif norm_type == 'none':
norm_layer = lambda x: Identity()
else:
raise NotImplementedError('normalization layer [%s] is not found' % norm_type)
return norm_layer
def weights_init(mod):
"""
Custom weights initialization called on netG, netD and netE
:param m:
:return:
"""
classname = mod.__class__.__name__
if classname.find('Conv') != -1:
mod.weight.data.normal_(0.0, 0.02)
elif classname.find('BatchNorm') != -1:
mod.weight.data.normal_(1.0, 0.02)
mod.bias.data.fill_(0)
#####################################################################3
class Unet(nn.Module):
"""Create a Unet-based generator"""
def __init__(self, input_nc, output_nc, num_downs, ngpu, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False):
"""Construct a Unet generator
Parameters:
input_nc (int) -- the number of channels in input images
output_nc (int) -- the number of channels in output images
num_downs (int) -- the number of downsamplings in UNet. For example, # if |num_downs| == 7,
image of size 128x128 will become of size 1x1 # at the bottleneck
ngf (int) -- the number of filters in the last conv layer
norm_layer -- normalization layer
We construct the U-Net from the innermost layer to the outermost layer.
It is a recursive process.
"""
super(Unet, self).__init__()
self.ngpu = ngpu
# construct unet structure
unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True) # add the innermost layer
for i in range(num_downs - 5): # add intermediate layers with ngf * 8 filters
unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer, use_dropout=use_dropout)
# gradually reduce the number of filters from ngf * 8 to ngf
unet_block = UnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer)
unet_block = UnetSkipConnectionBlock(ngf * 2, ngf * 4, input_nc=None, submodule=unet_block, norm_layer=norm_layer)
unet_block = UnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_block, norm_layer=norm_layer)
self.main = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, norm_layer=norm_layer) # add the outermost layer
def forward(self, input):
if self.ngpu > 1:
output = nn.parallel.data_parallel(self.main, input, range(self.ngpu))
else:
output = self.main(input)
return output
###########################################################################################
class UnetSkipConnectionBlock(nn.Module):
"""Defines the Unet submodule with skip connection.
X -------------------identity----------------------
|-- downsampling -- |submodule| -- upsampling --|
"""
def __init__(self, outer_nc, inner_nc, input_nc=None,
submodule=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_dropout=False):
"""Construct a Unet submodule with skip connections.
Parameters:
outer_nc (int) -- the number of filters in the outer conv layer
inner_nc (int) -- the number of filters in the inner conv layer
input_nc (int) -- the number of channels in input images/features
submodule (UnetSkipConnectionBlock) -- previously defined submodules
outermost (bool) -- if this module is the outermost module
innermost (bool) -- if this module is the innermost module
norm_layer -- normalization layer
user_dropout (bool) -- if use dropout layers.
"""
super(UnetSkipConnectionBlock, self).__init__()
self.outermost = outermost
if type(norm_layer) == functools.partial:
use_bias = norm_layer.func == nn.InstanceNorm2d
else:
use_bias = norm_layer == nn.InstanceNorm2d
if input_nc is None:
input_nc = outer_nc
downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=4,
stride=2, padding=1, bias=use_bias)
downrelu = nn.LeakyReLU(0.2, True)
downnorm = norm_layer(inner_nc)
uprelu = nn.ReLU(True)
upnorm = norm_layer(outer_nc)
if outermost:
upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc,
kernel_size=4, stride=2,
padding=1)
down = [downconv]
up = [uprelu, upconv, nn.Tanh()]
model = down + [submodule] + up
elif innermost:
upconv = nn.ConvTranspose2d(inner_nc, outer_nc,
kernel_size=4, stride=2,
padding=1, bias=use_bias)
down = [downrelu, downconv]
up = [uprelu, upconv, upnorm]
model = down + up
else:
upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc,
kernel_size=4, stride=2,
padding=1, bias=use_bias)
down = [downrelu, downconv, downnorm]
up = [uprelu, upconv, upnorm]
if use_dropout:
model = down + [submodule] + up + [nn.Dropout(0.5)]
else:
model = down + [submodule] + up
self.main = nn.Sequential(*model)
def forward(self, x):
if self.outermost:
return self.main(x)
else: # add skip connections
return torch.cat([x, self.main(x)], 1)
#######################################################################################
class Encoder(nn.Module):
"""
DCGAN ENCODER NETWORK
"""
def __init__(self, isize, nz, input_nc, ndf, ngpu, n_extra_layers=0, add_final_conv=True):
super(Encoder, self).__init__()
self.ngpu = ngpu
assert isize % 16 == 0, "isize has to be a multiple of 16"
main = nn.Sequential()
# input is nc x isize x isize
main.add_module('initial-conv-{0}-{1}'.format(input_nc, ndf),
nn.Conv2d(input_nc, ndf, 4, 2, 1, bias=False))
main.add_module('initial-relu-{0}'.format(ndf),
nn.LeakyReLU(0.2, inplace=True))
csize, cndf = isize / 2, ndf
# Extra layers
for t in range(n_extra_layers):
main.add_module('extra-layers-{0}-{1}-conv'.format(t, cndf),
nn.Conv2d(cndf, cndf, 3, 1, 1, bias=False))
main.add_module('extra-layers-{0}-{1}-batchnorm'.format(t, cndf),
nn.BatchNorm2d(cndf))
main.add_module('extra-layers-{0}-{1}-relu'.format(t, cndf),
nn.LeakyReLU(0.2, inplace=True))
while csize > 4:
in_feat = cndf
out_feat = cndf * 2
main.add_module('pyramid-{0}-{1}-conv'.format(in_feat, out_feat),
nn.Conv2d(in_feat, out_feat, 4, 2, 1, bias=False))
main.add_module('pyramid-{0}-batchnorm'.format(out_feat),
nn.BatchNorm2d(out_feat))
main.add_module('pyramid-{0}-relu'.format(out_feat),
nn.LeakyReLU(0.2, inplace=True))
cndf = cndf * 2
csize = csize / 2
# state size. K x 4 x 4
if add_final_conv:
main.add_module('final-{0}-{1}-conv'.format(cndf, 1),
nn.Conv2d(cndf, nz, 4, 1, 0, bias=False))
self.main = main
def forward(self, input):
if self.ngpu > 1:
output = nn.parallel.data_parallel(self.main, input, range(self.ngpu))
else:
output = self.main(input)
return output
##
##
class NetD(nn.Module):
"""
DISCRIMINATOR NETWORK
"""
def __init__(self, opt):
super(NetD, self).__init__()
model = Encoder(opt.isize, 1, opt.input_nc, opt.ngf, opt.ngpu, opt.extralayers)
layers = list(model.main.children())
self.features = nn.Sequential(*layers[:-1])
self.classifier = nn.Sequential(layers[-1])
self.classifier.add_module('Sigmoid', nn.Sigmoid())
def forward(self, x):
features = self.features(x)
features = features
classifier = self.classifier(features)
classifier = classifier.view(-1, 1).squeeze(1)
return classifier, features
##
class NetG(nn.Module):
"""
GENERATOR NETWORK
"""
def __init__(self, opt):
super(NetG, self).__init__()
norm_layer = get_norm_layer(norm_type='batch')
self.unet = Unet(opt.input_nc, opt.output_nc, 5 , opt.ngpu, opt.ngf, norm_layer=norm_layer, use_dropout=False)
def forward(self, x):
gen_imag = self.unet(x)
return gen_imag
class Class(nn.Module):
def __init__(self , opt):
super(Class, self).__init__()
self.con_layer1 = nn.Sequential(
nn.Conv2d(opt.input_nc, opt.isize, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(32),
nn.ReLU(),
nn.Conv2d(32, 32, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(32),
nn.ReLU(),
nn.MaxPool2d(2)
)
self.con_layer2 = nn.Sequential(
nn.Conv2d(32, 64, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(64),
nn.ReLU(),
nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(64),
nn.ReLU(),
nn.MaxPool2d(2)
)
self.con_layer3 = nn.Sequential(
nn.Conv2d(64, 128, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(128),
nn.ReLU(),
nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(128),
nn.ReLU(),
nn.MaxPool2d(2)
)
self.con_layer4 = nn.Sequential(
nn.Conv2d(128, 256, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(256),
nn.ReLU()
)
self.maxpool = nn.MaxPool2d(4)
size = int(opt.isize / 32)
self.fc_layer = nn.Linear(256*size*size,opt.outclassnum)
def forward(self, x):
x = self.con_layer1(x)
x = self.con_layer2(x)
x = self.con_layer3(x)
x = self.con_layer4(x)
x = self.maxpool(x)
x = x.view(x.size(0),-1)
x = self.fc_layer(x)
return x
|
[
"torch.nn.Dropout",
"functools.partial",
"torch.nn.ReLU",
"torch.nn.ConvTranspose2d",
"torch.nn.Sequential",
"torch.nn.Tanh",
"torch.nn.Conv2d",
"torch.nn.BatchNorm2d",
"torch.nn.Linear",
"torch.nn.MaxPool2d",
"torch.nn.LeakyReLU",
"torch.nn.Sigmoid"
] |
[((659, 731), 'functools.partial', 'functools.partial', (['nn.BatchNorm2d'], {'affine': '(True)', 'track_running_stats': '(True)'}), '(nn.BatchNorm2d, affine=True, track_running_stats=True)\n', (676, 731), False, 'import functools\n'), ((5162, 5247), 'torch.nn.Conv2d', 'nn.Conv2d', (['input_nc', 'inner_nc'], {'kernel_size': '(4)', 'stride': '(2)', 'padding': '(1)', 'bias': 'use_bias'}), '(input_nc, inner_nc, kernel_size=4, stride=2, padding=1, bias=use_bias\n )\n', (5171, 5247), True, 'import torch.nn as nn\n'), ((5291, 5314), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)', '(True)'], {}), '(0.2, True)\n', (5303, 5314), True, 'import torch.nn as nn\n'), ((5372, 5385), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (5379, 5385), True, 'import torch.nn as nn\n'), ((6558, 6579), 'torch.nn.Sequential', 'nn.Sequential', (['*model'], {}), '(*model)\n', (6571, 6579), True, 'import torch.nn as nn\n'), ((7161, 7176), 'torch.nn.Sequential', 'nn.Sequential', ([], {}), '()\n', (7174, 7176), True, 'import torch.nn as nn\n'), ((9261, 9288), 'torch.nn.Sequential', 'nn.Sequential', (['*layers[:-1]'], {}), '(*layers[:-1])\n', (9274, 9288), True, 'import torch.nn as nn\n'), ((9315, 9340), 'torch.nn.Sequential', 'nn.Sequential', (['layers[-1]'], {}), '(layers[-1])\n', (9328, 9340), True, 'import torch.nn as nn\n'), ((11432, 11447), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(4)'], {}), '(4)\n', (11444, 11447), True, 'import torch.nn as nn\n'), ((11507, 11552), 'torch.nn.Linear', 'nn.Linear', (['(256 * size * size)', 'opt.outclassnum'], {}), '(256 * size * size, opt.outclassnum)\n', (11516, 11552), True, 'import torch.nn as nn\n'), ((787, 864), 'functools.partial', 'functools.partial', (['nn.InstanceNorm2d'], {'affine': '(False)', 'track_running_stats': '(False)'}), '(nn.InstanceNorm2d, affine=False, track_running_stats=False)\n', (804, 864), False, 'import functools\n'), ((5468, 5546), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(inner_nc * 2)', 'outer_nc'], {'kernel_size': '(4)', 'stride': '(2)', 'padding': '(1)'}), '(inner_nc * 2, outer_nc, kernel_size=4, stride=2, padding=1)\n', (5486, 5546), True, 'import torch.nn as nn\n'), ((7309, 7354), 'torch.nn.Conv2d', 'nn.Conv2d', (['input_nc', 'ndf', '(4)', '(2)', '(1)'], {'bias': '(False)'}), '(input_nc, ndf, 4, 2, 1, bias=False)\n', (7318, 7354), True, 'import torch.nn as nn\n'), ((7436, 7467), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)'], {'inplace': '(True)'}), '(0.2, inplace=True)\n', (7448, 7467), True, 'import torch.nn as nn\n'), ((9387, 9399), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (9397, 9399), True, 'import torch.nn as nn\n'), ((10171, 10241), 'torch.nn.Conv2d', 'nn.Conv2d', (['opt.input_nc', 'opt.isize'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(opt.input_nc, opt.isize, kernel_size=3, stride=1, padding=1)\n', (10180, 10241), True, 'import torch.nn as nn\n'), ((10255, 10273), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(32)'], {}), '(32)\n', (10269, 10273), True, 'import torch.nn as nn\n'), ((10287, 10296), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (10294, 10296), True, 'import torch.nn as nn\n'), ((10310, 10363), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', '(32)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(32, 32, kernel_size=3, stride=1, padding=1)\n', (10319, 10363), True, 'import torch.nn as nn\n'), ((10377, 10395), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(32)'], {}), '(32)\n', (10391, 10395), True, 'import torch.nn as nn\n'), ((10409, 10418), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (10416, 10418), True, 'import torch.nn as nn\n'), ((10432, 10447), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {}), '(2)\n', (10444, 10447), True, 'import torch.nn as nn\n'), ((10511, 10564), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', '(64)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(32, 64, kernel_size=3, stride=1, padding=1)\n', (10520, 10564), True, 'import torch.nn as nn\n'), ((10578, 10596), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(64)'], {}), '(64)\n', (10592, 10596), True, 'import torch.nn as nn\n'), ((10610, 10619), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (10617, 10619), True, 'import torch.nn as nn\n'), ((10633, 10686), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(64)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(64, 64, kernel_size=3, stride=1, padding=1)\n', (10642, 10686), True, 'import torch.nn as nn\n'), ((10700, 10718), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(64)'], {}), '(64)\n', (10714, 10718), True, 'import torch.nn as nn\n'), ((10732, 10741), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (10739, 10741), True, 'import torch.nn as nn\n'), ((10755, 10770), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {}), '(2)\n', (10767, 10770), True, 'import torch.nn as nn\n'), ((10834, 10888), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(128)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(64, 128, kernel_size=3, stride=1, padding=1)\n', (10843, 10888), True, 'import torch.nn as nn\n'), ((10902, 10921), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(128)'], {}), '(128)\n', (10916, 10921), True, 'import torch.nn as nn\n'), ((10935, 10944), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (10942, 10944), True, 'import torch.nn as nn\n'), ((10958, 11013), 'torch.nn.Conv2d', 'nn.Conv2d', (['(128)', '(128)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(128, 128, kernel_size=3, stride=1, padding=1)\n', (10967, 11013), True, 'import torch.nn as nn\n'), ((11027, 11046), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(128)'], {}), '(128)\n', (11041, 11046), True, 'import torch.nn as nn\n'), ((11060, 11069), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (11067, 11069), True, 'import torch.nn as nn\n'), ((11083, 11098), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {}), '(2)\n', (11095, 11098), True, 'import torch.nn as nn\n'), ((11162, 11217), 'torch.nn.Conv2d', 'nn.Conv2d', (['(128)', '(256)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(128, 256, kernel_size=3, stride=1, padding=1)\n', (11171, 11217), True, 'import torch.nn as nn\n'), ((11231, 11250), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(256)'], {}), '(256)\n', (11245, 11250), True, 'import torch.nn as nn\n'), ((11264, 11273), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (11271, 11273), True, 'import torch.nn as nn\n'), ((11287, 11342), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(256)'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(256, 256, kernel_size=3, stride=1, padding=1)\n', (11296, 11342), True, 'import torch.nn as nn\n'), ((11356, 11375), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(256)'], {}), '(256)\n', (11370, 11375), True, 'import torch.nn as nn\n'), ((11389, 11398), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (11396, 11398), True, 'import torch.nn as nn\n'), ((5691, 5700), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (5698, 5700), True, 'import torch.nn as nn\n'), ((5791, 5884), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['inner_nc', 'outer_nc'], {'kernel_size': '(4)', 'stride': '(2)', 'padding': '(1)', 'bias': 'use_bias'}), '(inner_nc, outer_nc, kernel_size=4, stride=2, padding=1,\n bias=use_bias)\n', (5809, 5884), True, 'import torch.nn as nn\n'), ((6108, 6206), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['(inner_nc * 2)', 'outer_nc'], {'kernel_size': '(4)', 'stride': '(2)', 'padding': '(1)', 'bias': 'use_bias'}), '(inner_nc * 2, outer_nc, kernel_size=4, stride=2, padding\n =1, bias=use_bias)\n', (6126, 6206), True, 'import torch.nn as nn\n'), ((7671, 7713), 'torch.nn.Conv2d', 'nn.Conv2d', (['cndf', 'cndf', '(3)', '(1)', '(1)'], {'bias': '(False)'}), '(cndf, cndf, 3, 1, 1, bias=False)\n', (7680, 7713), True, 'import torch.nn as nn\n'), ((7821, 7841), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['cndf'], {}), '(cndf)\n', (7835, 7841), True, 'import torch.nn as nn\n'), ((7944, 7975), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)'], {'inplace': '(True)'}), '(0.2, inplace=True)\n', (7956, 7975), True, 'import torch.nn as nn\n'), ((8168, 8217), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_feat', 'out_feat', '(4)', '(2)', '(1)'], {'bias': '(False)'}), '(in_feat, out_feat, 4, 2, 1, bias=False)\n', (8177, 8217), True, 'import torch.nn as nn\n'), ((8317, 8341), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['out_feat'], {}), '(out_feat)\n', (8331, 8341), True, 'import torch.nn as nn\n'), ((8436, 8467), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)'], {'inplace': '(True)'}), '(0.2, inplace=True)\n', (8448, 8467), True, 'import torch.nn as nn\n'), ((8681, 8721), 'torch.nn.Conv2d', 'nn.Conv2d', (['cndf', 'nz', '(4)', '(1)', '(0)'], {'bias': '(False)'}), '(cndf, nz, 4, 1, 0, bias=False)\n', (8690, 8721), True, 'import torch.nn as nn\n'), ((6454, 6469), 'torch.nn.Dropout', 'nn.Dropout', (['(0.5)'], {}), '(0.5)\n', (6464, 6469), True, 'import torch.nn as nn\n')]
|
from casexml.apps.case.xform import extract_case_blocks
from corehq.apps.case_importer.tracking.models import CaseUploadRecord
from corehq.form_processor.interfaces.dbaccessors import FormAccessors
MAX_RECENT_UPLOADS = 100
def get_case_upload_records(domain, user, limit, skip=0):
query_set = CaseUploadRecord.objects.filter(domain=domain)
if not user.has_permission(domain, 'access_all_locations'):
query_set = query_set.filter(couch_user_id=user._id)
return query_set.order_by('-created')[skip:skip + limit]
def get_case_upload_record_count(domain, user):
query_set = CaseUploadRecord.objects.filter(domain=domain)
if not user.has_permission(domain, 'access_all_locations'):
query_set = query_set.filter(couch_user_id=user._id)
return min(MAX_RECENT_UPLOADS, query_set.count())
def get_case_ids_for_case_upload(case_upload):
for form_record in case_upload.form_records.order_by('pk').all():
form = FormAccessors(case_upload.domain).get_form(form_record.form_id)
for case_block in extract_case_blocks(form):
yield case_block['@case_id']
def get_form_ids_for_case_upload(case_upload):
for form_record in case_upload.form_records.order_by('pk').all():
yield '{}\n'.format(form_record.form_id)
|
[
"casexml.apps.case.xform.extract_case_blocks",
"corehq.form_processor.interfaces.dbaccessors.FormAccessors",
"corehq.apps.case_importer.tracking.models.CaseUploadRecord.objects.filter"
] |
[((301, 347), 'corehq.apps.case_importer.tracking.models.CaseUploadRecord.objects.filter', 'CaseUploadRecord.objects.filter', ([], {'domain': 'domain'}), '(domain=domain)\n', (332, 347), False, 'from corehq.apps.case_importer.tracking.models import CaseUploadRecord\n'), ((600, 646), 'corehq.apps.case_importer.tracking.models.CaseUploadRecord.objects.filter', 'CaseUploadRecord.objects.filter', ([], {'domain': 'domain'}), '(domain=domain)\n', (631, 646), False, 'from corehq.apps.case_importer.tracking.models import CaseUploadRecord\n'), ((1050, 1075), 'casexml.apps.case.xform.extract_case_blocks', 'extract_case_blocks', (['form'], {}), '(form)\n', (1069, 1075), False, 'from casexml.apps.case.xform import extract_case_blocks\n'), ((960, 993), 'corehq.form_processor.interfaces.dbaccessors.FormAccessors', 'FormAccessors', (['case_upload.domain'], {}), '(case_upload.domain)\n', (973, 993), False, 'from corehq.form_processor.interfaces.dbaccessors import FormAccessors\n')]
|
import zmq
import logging as log
import multiprocessing as prc
import time
class DBNode:
def __init__(self, netconf, name="Database"):
self.name=name
self.listen_addr = netconf.get_address('database')
def _init_sockets(self):
ctx = zmq.Context()
listen_addr = self.listen_addr
self.socket = ctx.socket(zmq.REP)
log.debug("Binding database to addr %s"%listen_addr)
self.socket.bind(listen_addr)
def _recv(self):
return self.socket.recv_json()
def _node_loop(self):
self._init_sockets()
sock = self.socket
while True:
msg = self._recv()
log.debug('Got msg %s'%msg)
try:
response = self.connection_handler(msg)
except Exception as e:
log.error("Error in db: %s"%str(e))
response = {"error":str(e)}
sock.send_json(response)
time.sleep(0)
def start(self):
p = prc.Process(target=self._node_loop, name=self.name)
p.start()
self.p = p
return p
def terminate(self):
self.p.terminate()
self.p.join()
|
[
"multiprocessing.Process",
"logging.debug",
"zmq.Context",
"time.sleep"
] |
[((266, 279), 'zmq.Context', 'zmq.Context', ([], {}), '()\n', (277, 279), False, 'import zmq\n'), ((369, 423), 'logging.debug', 'log.debug', (["('Binding database to addr %s' % listen_addr)"], {}), "('Binding database to addr %s' % listen_addr)\n", (378, 423), True, 'import logging as log\n'), ((997, 1048), 'multiprocessing.Process', 'prc.Process', ([], {'target': 'self._node_loop', 'name': 'self.name'}), '(target=self._node_loop, name=self.name)\n', (1008, 1048), True, 'import multiprocessing as prc\n'), ((668, 697), 'logging.debug', 'log.debug', (["('Got msg %s' % msg)"], {}), "('Got msg %s' % msg)\n", (677, 697), True, 'import logging as log\n'), ((949, 962), 'time.sleep', 'time.sleep', (['(0)'], {}), '(0)\n', (959, 962), False, 'import time\n')]
|
#!/usr/bin/env python
import sys
import sqlite3
import Bio
from Bio import SeqIO
import pickle
recs = SeqIO.parse(sys.argv[1],'fasta')
source = sys.argv[3]
load = []
for rec in recs:
scanid = rec.id
seq = str(rec.seq)
load.append((scanid, seq, sqlite3.Binary(pickle.dumps(rec,pickle.HIGHEST_PROTOCOL)), source))
db = sqlite3.connect(sys.argv[2])
cursor = db.cursor()
cursor.execute("CREATE TABLE IF NOT EXISTS combined ( scanid TEXT, seqstr TEXT, rec BYTEA, source TEXT );")
#cursor.execute("CREATE TABLE IF NOT EXISTS combined (scanid TEXT, seqstr TEXT, rec BYTEA, source TEXT, UNIQUE( scanid, seqstr, source ) )")
db.commit() # This creates table, makes sure that scanid, seqstr and source are unique
sql = ("DROP INDEX IF EXISTS index_combined_seqs;")
cursor.execute(sql)
db.commit()
sql = ("DROP INDEX IF EXISTS index_combined_scanids;")
cursor.execute(sql)
db.commit()
sql = "INSERT INTO combined VALUES(?, ?, ?, ?)"
cursor.executemany(sql, load )
db.commit()
sql = ("CREATE INDEX IF NOT EXISTS index_combined_seqs ON combined (seqstr);")
cursor.execute(sql)
db.commit()
sql = ("CREATE INDEX IF NOT EXISTS index_combined_scanids ON combined (scanid);")
cursor.execute(sql)
db.commit()
|
[
"sqlite3.connect",
"Bio.SeqIO.parse",
"pickle.dumps"
] |
[((104, 137), 'Bio.SeqIO.parse', 'SeqIO.parse', (['sys.argv[1]', '"""fasta"""'], {}), "(sys.argv[1], 'fasta')\n", (115, 137), False, 'from Bio import SeqIO\n'), ((333, 361), 'sqlite3.connect', 'sqlite3.connect', (['sys.argv[2]'], {}), '(sys.argv[2])\n', (348, 361), False, 'import sqlite3\n'), ((274, 316), 'pickle.dumps', 'pickle.dumps', (['rec', 'pickle.HIGHEST_PROTOCOL'], {}), '(rec, pickle.HIGHEST_PROTOCOL)\n', (286, 316), False, 'import pickle\n')]
|
#!/usr/bin/python3
"""
Tool for listing and extracting data from an UBI (Unsorted Block Image) image.
(C) 2017 by <NAME> <<EMAIL>>
"""
from __future__ import division, print_function
import crcmod.predefined
import argparse
import struct
from binascii import b2a_hex
import lzo
import zlib
import os
import errno
import datetime
import sys
from collections import defaultdict
import pkg_resources
if sys.version_info[0] == 2:
stdin = sys.stdin
stdout = sys.stdout
else:
stdin = sys.stdin.buffer
stdout = sys.stdout.buffer
if sys.version_info[0] == 2:
reload(sys)
sys.setdefaultencoding('utf-8')
dependencies = [
'python-lzo>=1.11',
'crcmod>=1.7'
]
pkg_resources.require(dependencies)
if sys.version_info[0] == 3:
def cmp(a,b):
return (a>b) - (a<b)
# using crcmod to generate the correct crc function.
crc32 = crcmod.predefined.mkPredefinedCrcFun('CrcJamCrc')
class SeekableStdout:
"""
Wrapper for stdout, which allows forward seeking.
"""
def __init__(self):
self.pos = 0
def seek(self, newpos, whence=os.SEEK_SET):
if whence==os.SEEK_SET:
if newpos < self.pos:
print("WARNING: can't seek stdout backwards")
return -1
if newpos > self.pos:
self.seekforward(newpos - self.pos)
self.pos = newpos
elif whence==os.SEEK_CUR:
if newpos < 0:
print("WARNING: can't seek stdout backwards")
return -1
if newpos > 0:
self.seekforward(newpos)
self.pos += newpos
else:
print("WARNING: can't seek stdout from EOF")
return -1
def seekforward(self, size):
"""
Seek forward by writing NUL bytes.
"""
sys.stdout.flush()
chunk = b"\x00" * 0x10000
while size > 0:
if len(chunk) > size:
chunk = chunk[:size]
stdout.write(chunk)
size -= len(chunk)
def write(self, data):
sys.stdout.flush()
stdout.write(data)
self.pos += len(data)
def truncate(self, size):
"""
Ignore this.
"""
pass
########### block level objects ############
class UbiEcHeader:
"""
The Erase count header
"""
hdrsize = 16*4
def __init__(self):
self.magic = b'UBI#'
def parse(self, data):
self.magic, self.version, self.erasecount, self.vid_hdr_ofs, self.data_ofs, \
self.image_seq, hdr_crc = struct.unpack(">4sB3xQLLL32xL", data)
if self.magic != b'UBI#':
raise Exception("magic num mismatch")
if hdr_crc != crc32(data[:-4]):
raise Exception("crc mismatch")
def encode(self):
data = struct.pack(">4sB3xQLLL32x", self.magic, self.version, self.erasecount, self.vid_hdr_ofs, self.data_ofs, \
self.image_seq)
return data + struct.pack(">L", crc32(data))
def __repr__(self):
return "EC: magic=%s, v%d, ec=%d, vidhdr=%x, data=%x, imgseq=%x" % (
self.magic, self.version, self.erasecount, self.vid_hdr_ofs,
self.data_ofs, self.image_seq)
VTBL_VOLID=0x7fffefff
class UbiVidHead:
"""
The volume id header
"""
hdrsize = 16*4
def __init__(self):
self.vol_id = VTBL_VOLID
self.magic = b'UBI!'
def parse(self, data):
self.magic, self.version, self.vol_type, self.copy_flag, self.compat, self.vol_id, \
self.lnum, self.data_size, self.used_ebs, self.data_pad, self.data_crc, \
self.sqnum, hdr_crc = struct.unpack(">4s4BLL4x4L4xQ12xL", data)
if self.magic != b'UBI!':
raise Exception("magic num mismatch")
if hdr_crc != crc32(data[:-4]):
raise Exception("crc mismatch")
def encode(self):
data = struct.pack(">4s4BLL4x4L4xQ12x", self.magic, self.version, self.vol_type, self.copy_flag, self.compat, self.vol_id, \
self.lnum, self.data_size, self.used_ebs, self.data_pad, self.data_crc, \
self.sqnum)
return data + struct.pack(">L", crc32(data))
def __repr__(self):
if hasattr(self, 'magic'):
return "VID: magic=%s, v%d, vt=%d, cp=%d, compat=%d, volid=%x, lnum=[%d], " \
"dsize=%d, usedebs=%d, datapad=%d, datacrc=%x, sqnum=%d" % (
self.magic, self.version, self.vol_type, self.copy_flag, self.compat,
self.vol_id, self.lnum, self.data_size, self.used_ebs, self.data_pad,
self.data_crc, self.sqnum)
else:
return "VID"
class UbiVtblRecord:
"""
A volume table record.
"""
hdrsize = 4*4+128+24+4
def __init__(self):
self.reserved_pebs = 0
def parse(self, data):
self.reserved_pebs, self.alignment, self.data_pad, self.vol_type, self.upd_marker, \
name_len, self.name, self.flags, crc = struct.unpack(">3LBBH128sB23xL", data)
if crc != crc32(data[:-4]):
raise Exception("crc mismatch")
self.name = self.name[:name_len]
def encode(self):
data = struct.pack(">3LBBH128sB23x", self.reserved_pebs, self.alignment, self.data_pad, self.vol_type, self.upd_marker, \
name_len, self.name, self.flags)
return data + struct.pack(">L", crc32(data))
def empty(self):
if hasattr(self, 'name'):
return self.reserved_pebs==0 and self.alignment==0 and self.data_pad==0 \
and self.vol_type==0 and self.upd_marker==0 and self.name==b'' and self.flags==0
else:
return True
def __repr__(self):
return "VREC: rsvpebs=%d, align=%d, datapad=%d, voltype=%d, updmark=%d, flags=%x, name=%s" % (
self.reserved_pebs, self.alignment, self.data_pad, self.vol_type,
self.upd_marker, self.flags, self.name)
class UbiVolume:
"""
provides read access to a specific volume in an UBI image.
"""
def __init__(self, blks, volid, dataofs):
"""
takes an UbiBlocks object, a volumeid, and a baseoffset.
"""
self.blks = blks
self.volid = volid
self.dataofs = dataofs
def read(self, lnum, offs, size):
return self.blks.readvolume(self.volid, lnum, self.dataofs+offs, size)
def write(self, lnum, offs, data):
return self.blks.writevolume(self.volid, lnum, self.dataofs+offs, data)
def hexdump(self, lnum, offs, size):
print("[%03d:0x%05x] %s" % (lnum, offs, b2a_hex(self.read(lnum, offs, size))))
class RawVolume:
"""
provides read access to a raw data volume
"""
def __init__(self, fh):
self.fh = fh
self.leb_size = self.find_block_size()
def read(self, lnum, offs, size):
self.fh.seek(lnum*self.leb_size+offs)
return self.fh.read(size)
def write(self, lnum, offs, data):
self.fh.seek(lnum*self.leb_size+offs)
return self.fh.write(data)
def find_block_size(self):
self.fh.seek(0)
data = self.fh.read(0x200)
values = struct.unpack("<12L", data[:4*12])
if values[0] == 0x06101831 and values[5] == 6:
# is superblock
return values[9]
def hexdump(self, lnum, offs, size):
print("[%03d:0x%05x] %s" % (lnum, offs, b2a_hex(self.read(lnum, offs, size))))
class UbiBlocks:
"""
Block level access to an UBI image.
"""
def __init__(self, fh):
self.fh = fh
self.lebsize = self.find_blocksize()
fh.seek(0, os.SEEK_END)
self.filesize = fh.tell()
self.maxlebs = self.filesize // self.lebsize
self.scanblocks()
if not VTBL_VOLID in self.vmap:
print("no volume directory, %d physical volumes" % len(self.vmap))
return
self.scanvtbls(self.vmap[VTBL_VOLID][0])
print("%d named volumes found, %d physical volumes, blocksize=0x%x" % (self.nr_named, len(self.vmap), self.lebsize))
def find_blocksize(self):
self.fh.seek(0)
magic = self.fh.read(4)
if magic != b'UBI#':
raise Exception("not an UBI image")
for log_blocksize in range(10,20):
self.fh.seek(1<<log_blocksize)
magic = self.fh.read(4)
if magic == b'UBI#':
return 1<<log_blocksize
raise Exception("Could not determine UBI image blocksize")
def scanblocks(self):
"""
creates map of volid + lnum => physical lnum
"""
self.vmap = defaultdict(lambda : defaultdict(int))
for lnum in range(self.maxlebs):
try:
ec = UbiEcHeader()
hdr = self.readblock(lnum, 0, ec.hdrsize)
ec.parse(hdr)
vid = UbiVidHead()
viddata = self.readblock(lnum, ec.vid_hdr_ofs, vid.hdrsize)
vid.parse(viddata)
self.vmap[vid.vol_id][vid.lnum] = lnum
except:
pass
def readblock(self, lnum, offs, size):
self.fh.seek(lnum * self.lebsize + offs)
return self.fh.read(size)
def writeblock(self, lnum, offs, data):
self.fh.seek(lnum * self.lebsize + offs)
return self.fh.write(data)
def hexdump(self, lnum, offs, size):
print("[%03d:0x%05x] %s" % (lnum, offs, b2a_hex(self.readblock(lnum, offs, size))))
def scanvtbls(self, lnum):
"""
reads the volume table
"""
ec = UbiEcHeader()
hdr = self.readblock(lnum, 0, ec.hdrsize)
ec.parse(hdr)
self.ec = ec
try:
vid = UbiVidHead()
viddata = self.readblock(lnum, ec.vid_hdr_ofs, vid.hdrsize)
vid.parse(viddata)
self.vid = vid
self.vtbl = []
self.nr_named = 0
if vid.vol_id == VTBL_VOLID:
for i in range(128):
vrec = UbiVtblRecord()
vrecdata = self.readblock(lnum, self.ec.data_ofs + i * vrec.hdrsize, vrec.hdrsize)
vrec.parse(vrecdata)
self.vtbl.append(vrec)
if not vrec.empty():
self.nr_named += 1
except:
print(ec)
print("viddata:%s" % b2a_hex(viddata))
import traceback
traceback.print_exc()
self.vid = UbiVidHead()
self.vtbl = [ UbiVtblRecord() ]
def dumpvtbl(self):
print("%s %s" % (self.ec, self.vid))
for v in self.vtbl:
if not v.empty():
print(" %s" % v)
for volid, lmap in self.vmap.items():
print("volume %x : %d lebs" % (volid, len(lmap)))
def nr_named(self):
return self.nr_named
def getvrec(self, volid):
return self.vtbl[volid]
def getvolume(self, volid):
return UbiVolume(self, volid, self.ec.data_ofs)
def readvolume(self, volid, lnum, offs, size):
physlnum = self.vmap[volid].get(lnum, None)
if physlnum is None:
raise Exception("volume does not contain lnum")
return self.readblock(physlnum, offs, size)
def writevolume(self, volid, lnum, offs, data):
physlnum = self.vmap[volid].get(lnum, None)
if physlnum is None:
raise Exception("volume does not contain lnum")
return self.writeblock(physlnum, offs, data)
################ filesytem level objects ##################
UBIFS_INO_KEY = 0
UBIFS_DATA_KEY = 1
UBIFS_DENT_KEY = 2
UBIFS_XENT_KEY = 3
"""
key format: (inum, (type<<29) | value)
key types: UBIFS_*_KEY: INO, DATA, DENT, XENT
inode: <inum> + 0
dirent: <inum> + hash
xent: <inum> + hash
data: <inum + blocknum
trunc: <inum> + 0
"""
def unpackkey(key):
if len(key)==16 and key[8:]!=b'\x00'*8:
print("key has more than 8 bytes: %s" % b2a_hex(key))
inum, value = struct.unpack("<LL", key[:8])
return (inum, value>>29, value&0x1FFFFFFF)
def packkey(key):
inum, ityp, value = key
return struct.pack("<LL", inum, (ityp<<29) | value)
def formatkey(key):
if key is None:
return "None"
if type(key) != tuple:
key = unpackkey(key)
return "%05d:%d:%08x" % key
def comparekeys(lhs, rhs):
return cmp(unpackkey(lhs), unpackkey(rhs))
def namehash(name):
a = 0
for b in name:
if type(b)==str: b = ord(b)
a += b<<4
a += b>>4
a &= 0xFFFFFFFF
a *= 11
a &= 0xFFFFFFFF
a &= 0x1FFFFFFF
if a <= 2: a += 3
return a
COMPR_NONE = 0
COMPR_LZO = 1
COMPR_ZLIB = 2
def decompress(data, buflen, compr_type):
if compr_type==COMPR_NONE:
return data
elif compr_type==COMPR_LZO:
return lzo.decompress(data, False, buflen)
elif compr_type==COMPR_ZLIB:
return zlib.decompress(data, -zlib.MAX_WBITS)
else:
raise Exception("unknown compression type")
def compress(data, compr_type):
if compr_type==COMPR_NONE:
return data
elif compr_type==COMPR_LZO:
return lzo.compress(data, False)
elif compr_type==COMPR_ZLIB:
return zlib.compress(data, -zlib.MAX_WBITS)
else:
raise Exception("unknown compression type")
# the blocksize is a fixed value, independent of the underlying device.
UBIFS_BLOCKSIZE = 4096
########### objects for the various node types ###########
class UbiFsInode:
"""
Leafnode in the B-tree, contains information for a specific file or directory.
It's b-tree key is formatted like this:
* 32 bit inode number
* the 3 bit node type: 0 for inode
* a 29 bit zero value.
"""
nodetype = 0
hdrsize = 16 + 5*8 + 11*4 + 2*4 + 28
# note: these values are like the posix stat values,
# the UbiFsDirEntry uses a different set of values for the same types.
ITYPE_FIFO = 1 # S_IFIFO
ITYPE_CHARDEV = 2 # S_IFCHR
ITYPE_DIRECTORY = 4 # S_IFDIR
ITYPE_BLOCKDEV = 6 # S_IFBLK
ITYPE_REGULAR = 8 # S_IFREG
ITYPE_SYMLINK = 10 # S_IFLNK
ITYPE_SOCKET = 12 # S_IFSOCK
def __init__(self):
pass
def parse(self, data):
(
self.key, # 16s
self.creat_sqnum, # Q
self.size, # Q
self.atime_sec, # Q
self.ctime_sec, # Q
self.mtime_sec, # Q
self.atime_nsec, # L
self.ctime_nsec, # L
self.mtime_nsec, # L
self.nlink, # L
self.uid, # L
self.gid, # L
self.mode, # L
self.flags, # L
self.data_len, # L
self.xattr_cnt, # L
self.xattr_size, # L
# 4x
self.xattr_names, # L
self.compr_type # H
# 26x
) = struct.unpack("<16s5Q11L4xLH26x", data[:self.hdrsize])
# data contains the symlink string for symbolic links
self.data = data[self.hdrsize:]
if len(self.data) != self.data_len:
raise Exception("inode data size mismatch")
def encode(self):
return struct.pack("<16s5Q11L4xLH26x", \
self.key, self.creat_sqnum, self.size, self.atime_sec, self.ctime_sec, self.mtime_sec, \
self.atime_nsec, self.ctime_nsec, self.mtime_nsec, self.nlink, self.uid, self.gid, \
self.mode, self.flags, self.data_len, self.xattr_cnt, self.xattr_size, \
self.xattr_names, self.compr_type)
def inodedata_repr(self):
types = ["0", "FIFO", "CHAR", "3", "DIRENT", "5", "BLOCK", "7", "FILE", "9", "LINK", "11", "SOCK", "13", "14", "15"]
typ = self.nodetype()
if typ in (self.ITYPE_CHARDEV, self.ITYPE_BLOCKDEV): # CHAR or BLOCK
return types[typ] + ":" + b2a_hex(self.data).decode('ascii')
return types[typ] + ":%s" % self.data
def __repr__(self):
return "INODE: key=%s, sq=%04x, size=%5d, n=%3d, uid:gid=%d:%d, mode=%06o, fl=%x, dl=%3d, " \
"xattr=%d:%d, xanames=%d, comp=%d -- %s" % (formatkey(self.key), self.creat_sqnum,
self.size, self.nlink, self.uid, self.gid, self.mode, self.flags, self.data_len,
self.xattr_cnt, self.xattr_size, self.xattr_names, self.compr_type, self.inodedata_repr())
# todo: self.atime_sec, self.ctime_sec, self.mtime_sec, self.atime_nsec, self.ctime_nsec, self.mtime_nsec,
def atime(self):
return self.atime_sec + self.atime_nsec / 1000000000.0
def mtime(self):
return self.mtime_sec + self.mtime_nsec / 1000000000.0
def ctime(self):
return self.ctime_sec + self.ctime_nsec / 1000000000.0
def devnum(self):
ma, mi = struct.unpack("BB", self.data[:2])
return (ma, mi)
def nodetype(self):
return (self.mode >> 12) & 0xF
class UbiFsData:
"""
Leafnode in the B-tree, contains a datablock
It's b-tree key is formatted like this:
* 32 bit inode number
* the 3 bit node type: 1 for data
* a 29 bit file blocknumber
"""
nodetype = 1
hdrsize = 16 + 4 + 4
def __init__(self):
pass
def parse(self, data):
self.key, self.size, self.compr_type = struct.unpack("<16sLH2x", data[:self.hdrsize])
self.data = decompress(data[self.hdrsize:], self.size, self.compr_type)
if len(self.data) != self.size:
raise Exception("data size mismatch")
def encode(self):
return struct.pack("<16sLH2x", self.key, len(self.data), self.compr_type) + compress(self.data, self.compr_type)
def __repr__(self):
return "DATA: key=%s, size=%d, comp=%d" % (formatkey(self.key), self.size, self.compr_type)
class UbiFsDirEntry:
"""
Leafnode in the B-tree, contains a directory entry.
Properties:
* key
* inum
* type
* name
It's b-tree key is formatted like this:
* 32 bit inode number ( of the directory containing this dirent )
* the 3 bit node type: 2 for dirent
* a 29 bit name hash
"""
TYPE_REGULAR = 0
TYPE_DIRECTORY = 1
TYPE_SYMLINK = 2
TYPE_BLOCKDEV = 3
TYPE_CHARDEV = 4
TYPE_FIFO = 5
TYPE_SOCKET = 6
ALL_TYPES = 127
nodetype = 2
hdrsize = 16 + 8+4+4
def __init__(self):
pass
def parse(self, data):
self.key, self.inum, self.type, nlen = struct.unpack("<16sQxBH4x", data[:self.hdrsize])
self.name = data[self.hdrsize:-1]
if len(self.name) != nlen:
raise Exception("name length mismatch")
def encode(self):
return struct.pack("<16sQxBH4x", self.key, self.inum, self.type, nlen)
def __repr__(self):
typenames = [ 'reg', 'dir', 'lnk', 'blk', 'chr', 'fifo', 'sock' ]
# type: UBIFS_ITYPE_REG, UBIFS_ITYPE_DIR, etc
return "DIRENT: key=%s, inum=%05d, type=%d:%s -- %s" % (formatkey(self.key), self.inum, self.type, typenames[self.type], self.name)
class UbiFsExtendedAttribute:
"""
Leafnode in the B-tree, contains extended attributes.
It's b-tree key is formatted like this:
* 32 bit inode number ( of the directory containing this dirent )
* the 3 bit node type: 3 for xent
* a 29 bit hash of the attribute name.
"""
nodetype = 3
hdrsize = 0
def __init__(self):
pass
def parse(self, data):
# TODO
pass
def __repr__(self):
return "EA"
class UbiFsTruncation:
"""
Used only in the journal
"""
nodetype = 4
hdrsize = 4+12+2*8
def __init__(self):
pass
def parse(self, data):
self.inum, self.old_size, self.new_size = struct.unpack("<L12xQQ", data)
def encode(self):
return struct.pack("<L12xQQ", self.inum, self.old_size, self.new_size)
def __repr__(self):
return "TRUNC: inum:%05d, size:%d->%d" % (self.inum, self.old_size, self.new_size)
class UbiFsPadding:
"""
"""
nodetype = 5
hdrsize = 4
def __init__(self):
pass
def parse(self, data):
self.pad_len, = struct.unpack_from("<L", data, 0)
def encode(self):
return struct.pack("<L", self.pad_len)
def __repr__(self):
return "PAD: padlen=%d" % self.pad_len
class UbiFsSuperblock:
"""
This object can be referenced via UbiFs.sb
"""
nodetype = 6
hdrsize = 6*4+8+7*4+3*4+8+4+16+4
def __init__(self):
pass
def parse(self, data):
self.key_hash, self.key_fmt, self.flags, self.min_io_size, self.leb_size, self.leb_cnt, \
self.max_leb_cnt, self.max_bud_bytes, self.log_lebs, self.lpt_lebs, self.orph_lebs, \
self.jhead_cnt, self.fanout, self.lsave_cnt, self.fmt_version, self.default_compr, \
self.rp_uid, self.rp_gid, self.rp_size, self.time_gran, self.uuid, self.ro_compat_version \
= struct.unpack("<2xBB5LQ7LH2xLLQL16sL", data[:self.hdrsize])
if len(data) != self.hdrsize + 3968:
raise Exception("invalid superblock padding size")
def encode(self):
return struct.pack("<2xBB5LQ7LH2xLLQL16sL",
self.key_hash, self.key_fmt, self.flags, self.min_io_size, self.leb_size, self.leb_cnt, \
self.max_leb_cnt, self.max_bud_bytes, self.log_lebs, self.lpt_lebs, self.orph_lebs, \
self.jhead_cnt, self.fanout, self.lsave_cnt, self.fmt_version, self.default_compr, \
self.rp_uid, self.rp_gid, self.rp_size, self.time_gran, self.uuid, self.ro_compat_version)
def __repr__(self):
return "SUPER: kh:%d, fmt:%d, flags=%x, minio=%d, lebsize=0x%x, lebcount=%d, maxleb=%d, " \
"maxbud=%d, loglebs=%d, lptlebs=%d, orphlebs=%d, jheads=%d, fanout=%d, lsave=%d, " \
"fmt=v%d, compr=%d, rp=%d:%d, rpsize=%d, timegran=%d, uuid=%s, rocompat=%d" % (
self.key_hash, self.key_fmt, self.flags, self.min_io_size, self.leb_size,
self.leb_cnt, self.max_leb_cnt, self.max_bud_bytes, self.log_lebs, self.lpt_lebs,
self.orph_lebs, self.jhead_cnt, self.fanout, self.lsave_cnt, self.fmt_version,
self.default_compr, self.rp_uid, self.rp_gid, self.rp_size, self.time_gran,
b2a_hex(self.uuid), self.ro_compat_version)
class UbiFsMaster:
"""
This object can be referenced via UbiFs.mst
"""
nodetype = 7
hdrsize = 2*8+8*4+6*8+12*4
def __init__(self):
pass
def parse(self, data):
self.highest_inum, self.cmt_no, self.flags, self.log_lnum, self.root_lnum, self.root_offs, \
self.root_len, self.gc_lnum, self.ihead_lnum, self.ihead_offs, self.index_size, \
self.total_free, self.total_dirty, self.total_used, self.total_dead, \
self.total_dark, self.lpt_lnum, self.lpt_offs, self.nhead_lnum, self.nhead_offs, \
self.ltab_lnum, self.ltab_offs, self.lsave_lnum, self.lsave_offs, self.lscan_lnum, \
self.empty_lebs, self.idx_lebs, self.leb_cnt = struct.unpack("<QQ8L6Q12L", data[:self.hdrsize])
if len(data) != self.hdrsize + 344:
raise Exception("invalid master padding size")
def encode(self):
return struct.pack("<QQ8L6Q12L", self.highest_inum, self.cmt_no, self.flags, self.log_lnum, self.root_lnum, self.root_offs, \
self.root_len, self.gc_lnum, self.ihead_lnum, self.ihead_offs, self.index_size, \
self.total_free, self.total_dirty, self.total_used, self.total_dead, \
self.total_dark, self.lpt_lnum, self.lpt_offs, self.nhead_lnum, self.nhead_offs, \
self.ltab_lnum, self.ltab_offs, self.lsave_lnum, self.lsave_offs, self.lscan_lnum, \
self.empty_lebs, self.idx_lebs, self.leb_cnt)
def __repr__(self):
return "MST: max_inum=%05d, cmtno=%d, flags=%x, loglnum=[%03d], root=[%03d:0x%05x], rootlen=%d, " \
"gc_lnum=[%03d], ihead=[%03d:0x%05x], ixsize=%d, total(free:%d, dirty:%d, used:%d, " \
"dead:%d, dark:%d), lpt=[%03d:0x%05x], nhead=[%03d:0x%05x], ltab=[%03d:0x%05x], " \
"lsave=[%03d:0x%05x], lscan=[%03d], empty=%d, idx=%d, nleb=%d" % (
self.highest_inum, self.cmt_no, self.flags, self.log_lnum,
self.root_lnum, self.root_offs, self.root_len,
self.gc_lnum, self.ihead_lnum, self.ihead_offs,
self.index_size, self.total_free, self.total_dirty, self.total_used, self.total_dead,
self.total_dark, self.lpt_lnum, self.lpt_offs, self.nhead_lnum, self.nhead_offs,
self.ltab_lnum, self.ltab_offs, self.lsave_lnum, self.lsave_offs, self.lscan_lnum,
self.empty_lebs, self.idx_lebs, self.leb_cnt)
class UbiFsLEBReference:
nodetype = 8
hdrsize = 12+28
def __init__(self):
pass
def parse(self, data):
self.lnum, self.offs, self.jhead = struct.unpack("<3L28x", data)
def encode(self):
return struct.pack("<3L28x", self.lnum, self.offs, self.jhead)
def __repr__(self):
return "REF: ref=[%03d:0x%05x], jhead=%d" % (self.lnum, self.offs, self.jhead)
class UbiFsIndex:
"""
Part if the B-tree structure, referenced via UbiFs.root.
"""
nodetype = 9
hdrsize = 4
class Branch:
hdrsize = 12
def __init__(self):
pass
def parse(self, data):
self.lnum, self.offs, self.len = struct.unpack("<3L", data[:self.hdrsize])
self.key = data[self.hdrsize:]
def encode(self):
return struct.pack("<3L", self.lnum, self.offs, self.len) + self.key
def __repr__(self):
return "BRANCH: ref=[%03d:0x%05x] len=%4d -- key=%s" % (self.lnum, self.offs, self.len, formatkey(self.key))
def __init__(self):
pass
def parse(self, data):
self.child_cnt, self.level = struct.unpack("<HH", data[:self.hdrsize])
self.branches = []
o = self.hdrsize
for _ in range(self.child_cnt):
if o >= len(data):
raise Exception("parse error")
branch = self.Branch()
branch.parse(data[o:o+branch.hdrsize]) ; o += branch.hdrsize
branch.key = data[o:o+8] ; o += 8
self.branches.append(branch)
def encode(self):
data = struct.pack("<HH", self.child_cnt, self.level)
for _ in self.branches:
data += _.encode()
return data
def __repr__(self):
return "INDEX: nchild=%d, level=%d" % (self.child_cnt, self.level)
def find(self, key):
"""
searches index for a branch.key >= key, returns relation to the key
these are all possibilities with 1 branches
key < b0 -> 'lt', 0
key == b0 -> 'eq', 0
b0 < key -> 'gt', 0
these are all possibilities with 2 branches
key < b0 < b1 -> 'lt', 0
key == b0 < b1 -> 'eq', 0
b0 < key < b1 -> 'gt', 0
b0 < key == b1 -> 'eq', 1
b0 < b1 < key -> 'gt', 1
add two more options for every next branch.
"""
for i, b in enumerate(self.branches):
c = comparekeys(key, b.key)
if c<0:
if i==0:
# before first item
return ('lt', i)
else:
# between prev and this item
return ('gt', i-1)
elif c==0:
# found item
return ('eq', i)
# else c>0 -> continue searching
# after last item
return ('gt', i)
class UbiFsCommitStart:
nodetype = 10
hdrsize = 8
def __init__(self):
pass
def parse(self, data):
self.cmt_no, = struct.unpack("<Q", data[:self.hdrsize])
def encode(self):
return struct.pack("<Q", self.cmt_no)
def __repr__(self):
return "COMMIT: cmt=%d" % self.cmt_no
class UbiFsOrphan:
nodetype = 11
hdrsize = 8
def __init__(self):
pass
def parse(self, data):
self.cmt_no, = struct.unpack("<Q", data[:self.hdrsize])
# todo: inos
def encode(self):
return struct.pack("<Q", self.cmt_no)
def __repr__(self):
return "ORPHAN: cmt=%d" % self.cmt_no
class UbiFsCommonHeader:
"""
Header common to all node types.
"""
hdrsize = 16+8
_classmap = [
UbiFsInode, # 0
UbiFsData, # 1
UbiFsDirEntry, # 2
UbiFsExtendedAttribute, # 3
UbiFsTruncation, # 4
UbiFsPadding, # 5
UbiFsSuperblock, # 6
UbiFsMaster, # 7
UbiFsLEBReference, # 8
UbiFsIndex, # 9
UbiFsCommitStart, # 10
UbiFsOrphan, # 11
]
def __init__(self):
self.magic = 0x06101831
self.crc = 0
self.sqnum = 0
def parse(self, data):
self.magic, self.crc, self.sqnum, self.len, self.node_type, self.group_type = struct.unpack("<LLQLBB2x", data)
if self.magic != 0x06101831:
raise Exception("magic num mismatch")
def encode(self):
return struct.pack("<LLQLBB2x", self.magic, self.crc, self.sqnum, self.len, self.node_type, self.group_type)
def getnode(self):
"""
create node object for current node type.
"""
if 0 <= self.node_type < len(self._classmap):
cls = self._classmap[self.node_type]
node = cls()
node.hdr = self
return node
raise Exception("invalid node type")
def __repr__(self):
return "%08x %08x %08x %08x %2d %2d" % (self.magic, self.crc, self.sqnum, self.len, self.node_type, self.group_type)
class UbiFs:
"""
Filesystem level access to an UBI image volume.
the filesystem consists of a b-tree containing inodes, direntry and data nodes.
"""
def __init__(self, vol, masteroffset):
"""
The constructor takes a UbiVolume or RawVolume object
"""
self.vol = vol
self.load(masteroffset)
def find_most_recent_master(self):
o = 0
mst = None
while True:
try:
mst = self.readnode(1, o)
o += 0x1000 # Fixed value ... do i need to configure this somewhere?
except:
return mst
def load(self, masteroffset):
self.sb = self.readnode(0, 0)
if masteroffset:
self.mst = self.readnode(*masteroffset)
print("using mst from 0x%x, seq: %08x/%08x" % (masteroffset, self.mst.hdr.sqnum, self.mst.cmt_no))
else:
self.mst = self.find_most_recent_master()
# todo: check that the 2nd master node matches the first.
#mst2 = self.readnode(2, 0)
self.root = self.readnode(self.mst.root_lnum, self.mst.root_offs)
def dumpfs(self):
print("[%03d:0x%05x-0x%05x] %s" % (self.sb.hdr.lnum, self.sb.hdr.offs, self.sb.hdr.offs+self.sb.hdr.len, self.sb))
print("[%03d:0x%05x-0x%05x] %s" % (self.mst.hdr.lnum, self.mst.hdr.offs, self.mst.hdr.offs+self.mst.hdr.len, self.mst))
def readnode(self, lnum, offs):
"""
read a node from a lnum + offset.
"""
ch = UbiFsCommonHeader()
hdrdata = self.vol.read(lnum, offs, ch.hdrsize)
ch.parse(hdrdata)
ch.lnum = lnum
ch.offs = offs
node = ch.getnode()
nodedata = self.vol.read(lnum, offs + ch.hdrsize, ch.len - ch.hdrsize)
if crc32(hdrdata[8:] + nodedata) != ch.crc:
node.parse(nodedata)
print(ch, node)
print(" %s + %s = %08x -> want = %08x" % ( b2a_hex(hdrdata), b2a_hex(nodedata), crc32(hdrdata[8:] + nodedata), ch.crc))
raise Exception("invalid node crc")
node.parse(nodedata)
return node
def writenode(self, node):
"""
Write a node from a lnum + offset.
TODO
"""
nodedata = node.encode()
node.hdr.len = len(nodedata) + node.hdr.hdrsize
hdrdata = node.hdr.encode()
node.hdr.crc = crc32(hdrdata[8:] + nodedata)
hdrdata = node.hdr.encode()
self.vol.write(node.hdr.lnum, node.hdr.offs, hdrdata+nodedata)
def dumpnode(self, lnum, offs):
node = self.readnode(lnum, offs)
print("[%03d:0x%05x-0x%05x] %s" % (lnum, offs, offs+node.hdr.len, node))
def printrecursive(self, idx):
"""
Recursively dump all b-tree nodes.
"""
print("[%03d:0x%05x-0x%05x] %s" % (idx.hdr.lnum, idx.hdr.offs, idx.hdr.offs+idx.hdr.len, idx))
if not hasattr(idx, 'branches'):
#print(idx)
return
for i, b in enumerate(idx.branches):
print("%s %d %s -> " % (" " * (6-idx.level), i, b), end=" ")
try:
n = self.readnode(b.lnum, b.offs)
self.printrecursive(n)
except Exception as e:
print("ERROR %s" % e)
def printmbitems(self):
print("--log [%03d] .. [%03d]" % (self.mst.log_lnum, self.mst.log_lnum+self.sb.log_lebs-1))
try:
self.dumpnode(self.mst.log_lnum, 0)
self.vol.hexdump(self.mst.log_lnum, 0, 0x100)
except Exception as e:
print(e)
print("--root")
try:
self.dumpnode(self.mst.root_lnum, self.mst.root_offs)
self.vol.hexdump(self.mst.root_lnum, self.mst.root_offs, self.mst.root_len)
except Exception as e:
print(e)
print("--gc [%03d]" % (self.mst.gc_lnum))
try:
self.vol.hexdump(self.mst.gc_lnum, 0, 0x100)
except Exception as e:
print(e)
print("--ihead")
try:
self.vol.hexdump(self.mst.ihead_lnum, self.mst.ihead_offs, self.mst.index_size)
except Exception as e:
print(e)
print("--lpt [%03d] .. [%03d]" % (self.mst.lpt_lnum, self.mst.lpt_lnum+self.sb.lpt_lebs-1))
try:
self.vol.hexdump(self.mst.lpt_lnum, self.mst.lpt_offs, 0x100)
except Exception as e:
print(e)
print("--nhead")
try:
self.vol.hexdump(self.mst.nhead_lnum, self.mst.nhead_offs, 0x100)
except Exception as e:
print(e)
print("--ltab")
try:
self.vol.hexdump(self.mst.ltab_lnum, self.mst.ltab_offs, 0x100)
except Exception as e:
print(e)
print("--lsave")
try:
self.vol.hexdump(self.mst.lsave_lnum, self.mst.lsave_offs, 0x100)
self.dumpnode(self.mst.lsave_lnum, self.mst.lsave_offs)
except Exception as e:
print(e)
print("--lscan")
try:
self.vol.hexdump(self.mst.lscan_lnum, 0, 0x100)
self.dumpnode(self.mst.lscan_lnum, 0)
except Exception as e:
print(e)
class Cursor:
"""
The Cursor represents a position in the b-tree.
"""
def __init__(self, fs, stack):
self.fs = fs
self.stack = stack
def next(self):
""" move cursor to next entry """
if not self.stack:
# starting at 'eof'
page = self.fs.root
ix = 0
else:
page, ix = self.stack.pop()
while self.stack and ix==len(page.branches)-1:
page, ix = self.stack.pop()
if ix==len(page.branches)-1:
return
ix += 1
self.stack.append( (page, ix) )
while page.level:
page = self.fs.readnode(page.branches[ix].lnum, page.branches[ix].offs)
ix = 0
self.stack.append( (page, ix) )
def prev(self):
""" move cursor to next entry """
if not self.stack:
# starting at 'eof'
page = self.fs.root
ix = len(page.branches)-1
else:
page, ix = self.stack.pop()
while self.stack and ix==0:
page, ix = self.stack.pop()
if ix==0:
return
ix -= 1
self.stack.append( (page, ix) )
while page.level:
page = self.fs.readnode(page.branches[ix].lnum, page.branches[ix].offs)
ix = len(page.branches)-1
self.stack.append( (page, ix) )
def eof(self):
return len(self.stack)==0
def __repr__(self):
return "[%s]" % (",".join(str(_[1]) for _ in self.stack))
def getkey(self):
"""
Returns the key tuple for the current item
"""
if self.stack:
page, ix = self.stack[-1]
return unpackkey(page.branches[ix].key)
def getnode(self):
"""
Returns the node object for the current item
"""
if self.stack:
page, ix = self.stack[-1]
return self.fs.readnode(page.branches[ix].lnum, page.branches[ix].offs)
def find(self, rel, key, root=None):
"""
returns a cursor for the relation + key.
('lt', searchkey) searches for the highest ordered node with a key less than `searchkey`
('ge', searchkey) searches for the lowest ordered node with a key greater or equal to `searchkey`
etc...
"""
stack = []
page = self.root if root is None else root
while len(stack)<32:
act, ix = page.find(packkey(key))
stack.append( (page, ix) )
if page.level==0:
break
page = self.readnode(page.branches[ix].lnum, page.branches[ix].offs)
if len(stack)==32:
raise Exception("tree too deep")
cursor = self.Cursor(self, stack)
"""
act rel: | lt le eq ge gt
(lt, 0) key < 0 | None None None pass pass
(eq, ix) key == ix | -- pass pass pass ++
(gt, ix) ix < key < ix+1 | pass pass None ++ ++
"""
if (act+rel) in ('gtlt', 'gtle', 'eqle', 'eqeq', 'eqge', 'ltge', 'ltgt'):
return cursor
if (act+rel) in ('ltlt', 'ltle', 'lteq', 'gteq'):
return None
if (act+rel) == 'eqlt':
cursor.prev()
return cursor
if (act+rel) in ('eqgt', 'gtge', 'gtgt'):
cursor.next()
return cursor
raise Exception("unexpected case")
def setkey(self, key, node):
pass
#todo - adding a
def recursefiles(self, inum, path, filter = 1<<UbiFsDirEntry.TYPE_REGULAR, root=None):
"""
Recursively yield all files below the directory with inode `inum`
"""
startkey = (inum, UBIFS_DENT_KEY, 0)
endkey = (inum, UBIFS_DENT_KEY+1, 0)
if root is None:
root = self.root
c = self.find('ge', startkey, root)
while not c.eof() and c.getkey() < endkey:
ent = c.getnode()
if filter & (1<<ent.type):
yield ent.inum, path + [ent.name]
if ent.type==ent.TYPE_DIRECTORY:
# recurse into subdirs
for x in self.recursefiles(ent.inum, path + [ent.name], filter, root):
yield x
c.next()
def exportfile(self, inum, fh, ubiname):
"""
save file data from inode `inum` to the filehandle `fh`.
the `ubiname` argument is not needed, except for printing useful error messages.
"""
startkey = (inum, UBIFS_DATA_KEY, 0)
endkey = (inum, UBIFS_DATA_KEY+1, 0)
c = self.find('ge', startkey)
savedlen = 0
while not c.eof() and c.getkey() < endkey:
dat = c.getnode()
_, _, blocknum = c.getkey()
fh.seek(UBIFS_BLOCKSIZE * blocknum)
fh.write(dat.data)
savedlen += len(dat.data)
c.next()
c = self.find('eq', (inum, UBIFS_INO_KEY, 0))
inode = c.getnode()
if savedlen > inode.size:
print("WARNING: found more (%d bytes) for inode %05d, than specified in the inode(%d bytes) -- %s" % (savedlen, inum, inode.size, ubiname))
elif savedlen < inode.size:
# padding file with zeros
fh.seek(inode.size)
fh.truncate(inode.size)
def findfile(self, path, inum = 1):
"""
find the inode of the given `path`, starting in the directory specified by `inum`
`path` must be a list of path elements. ( so not a '/' separated path string )
"""
itype = UbiFsDirEntry.TYPE_DIRECTORY
for part in path:
if itype!=UbiFsDirEntry.TYPE_DIRECTORY:
# not a directory
return None
c = self.find('eq', (inum, UBIFS_DENT_KEY, namehash(part)))
if not c or c.eof():
# not found
return None
dirent = c.getnode()
inum, itype = dirent.inum, dirent.type
return inum
def modestring(mode):
"""
return a "-rw-r--r--" style mode string
"""
# 4 bits type
# 3 bits suid/sgid/sticky
# 3 bits owner perm
# 3 bits group perm
# 3 bits other perm
typechar = "?pc?d?b?-?l?s???"
def rwx(bits, extra, xchar):
rflag = "-r"[(bits>>2)&1]
wflag = "-w"[(bits>>1)&1]
xflag = ("-x" + xchar.upper() + xchar.lower())[(bits&1)+2*extra]
return rflag + wflag + xflag
return typechar[(mode>>12)&15] + rwx((mode>>6)&7, (mode>>11)&1, 's') + rwx((mode>>3)&7, (mode>>10)&1, 's') + rwx(mode&7, (mode>>9)&1, 't')
def timestring(t):
return datetime.datetime.utcfromtimestamp(t).strftime("%Y-%m-%d %H:%M:%S")
def processvolume(vol, volumename, args):
"""
Perform actions specified by `args` on `vol`.
`vol` can be either a RawVolume ( an image file containing only the filesystem,
no flash block management layer.
Or a UbiVolume, with the block management layer.
"""
fs = UbiFs(vol, args.masteroffset)
if args.verbose:
fs.dumpfs()
root = fs.root
if args.root:
lnum, offset = args.root.split(':', 1)
lnum = int(lnum, 16)
offset = int(offset, 16)
root = fs.readnode(lnum, offset)
if args.hexdump and isinstance(vol, RawVolume):
vol.hexdump(*args.hexdump)
if args.nodedump:
fs.dumpnode(*args.nodedump)
if args.dumptree:
fs.printrecursive(root)
if args.verbose:
fs.printmbitems()
if args.savedir:
savedir = args.savedir.encode(args.encoding)
count = 0
for inum, path in fs.recursefiles(1, [], UbiFsDirEntry.ALL_TYPES, root=root):
c = fs.find('eq', (inum, UBIFS_INO_KEY, 0))
inode = c.getnode()
typ = inode.nodetype()
# fullpath = os.path.join(*[savedir, volumename] + path)
fullpath = os.path.join(*[savedir] + path)
try:
if typ == inode.ITYPE_FIFO:
os.mkfifo(fullpath)
elif typ == inode.ITYPE_SOCKET:
import socket as s
sock = s.socket(s.AF_UNIX)
sock.bind(fullpath)
elif typ == inode.ITYPE_SYMLINK:
os.symlink(inode.data, fullpath)
elif typ == inode.ITYPE_DIRECTORY:
os.makedirs(fullpath)
elif typ == inode.ITYPE_REGULAR:
with open(fullpath, "wb") as fh:
fs.exportfile(inum, fh, os.path.join(*path))
elif typ in (inode.ITYPE_BLOCKDEV, inode.ITYPE_CHARDEV):
try:
devnum = os.makedev(*inode.devnum())
if devnum < 0:
devnum += 0x100000000
os.mknod(fullpath, inode.mode, devnum)
except PermissionError as e:
# silently ignoring permission error
pass
else:
if args.verbose:
print("UNKNOWN inode type: %d" % typ)
continue
except OSError as e:
if e.errno != errno.EEXIST:
raise
if args.preserve and typ != inode.ITYPE_SYMLINK:
# note: we have to do this after closing the file, since the close after exportfile
# will update the last-modified time.
os.utime(fullpath, (inode.atime(), inode.mtime()))
os.chmod(fullpath, inode.mode)
count += 1
print("saved %d files" % count)
if args.listfiles:
for inum, path in fs.recursefiles(1, [], UbiFsDirEntry.ALL_TYPES, root=root):
c = fs.find('eq', (inum, UBIFS_INO_KEY, 0))
inode = c.getnode()
if inode.nodetype() in (inode.ITYPE_CHARDEV, inode.ITYPE_BLOCKDEV): # char or block dev.
sizestr = "%d,%4d" % inode.devnum()
else:
sizestr = str(inode.size)
if inode.nodetype() == inode.ITYPE_SYMLINK:
linkdata = inode.data
if args.encoding:
linkdata = linkdata.decode(args.encoding, 'ignore')
linkstr = " -> %s" % linkdata
else:
linkstr = ""
filename = b"/".join(path)
if args.encoding:
filename = filename.decode(args.encoding, 'ignore')
print("%s %2d %-5d %-5d %10s %s %s%s" % (modestring(inode.mode), inode.nlink, inode.uid, inode.gid, sizestr, timestring(inode.mtime_sec), filename, linkstr))
for srcfile in args.cat:
if len(args.cat)>1:
print("==>", srcfile, "<==")
inum = fs.findfile(srcfile.lstrip('/').split('/'))
if inum:
fs.exportfile(inum, SeekableStdout(), srcfile)
if len(args.cat)>1:
print()
else:
print("Not found")
def processblocks(fh, args):
"""
Perform operations on a UbiBlocks type image: starting with bytes 'UBI#'
"""
blks = UbiBlocks(fh)
if args.verbose:
print("===== block =====")
blks.dumpvtbl()
if args.hexdump:
if args.volume is None:
blks.hexdump(*args.hexdump)
else:
vol = blks.getvolume(args.volume)
vol.hexdump(*args.hexdump)
for volid in range(128):
vrec = blks.getvrec(volid)
if vrec.empty():
continue
vol = blks.getvolume(volid)
try:
print("== volume %s ==" % vrec.name)
processvolume(vol, vrec.name, args)
except Exception as e:
print("E: %s" % e)
if args.debug:
raise
##################################################
# raw hexdumper
def findpattern(data, pattn, blocksize):
o = 0
while o<len(data):
p = data.find(pattn, o)
if p==-1:
break
if (p % blocksize)==0:
yield p
else:
print("%08x: %x - %s" % (p, p % blocksize, data[p:p+4]))
o = p+1
def raw_ec_dump(o, data):
# note: ec blocks should be all the same.
data = data.rstrip(b'\xff')
if len(data)!=64:
print("%08x: %s" % (o, b2a_hex(data.rstrip(b'\xff'))))
return
m, v, ec, vidofs, datofs, iseq, zero, crc = struct.unpack(">4sLQLLL32sL", data)
print("%08x: %s %08x %010x %08x %08x %08x %s %08x" % (o, m, v, ec, vidofs, datofs, iseq, zero, crc))
def raw_vid_dump(o, data):
i = 0
while i < len(data):
print("%08x: %s" % (o+i, data[i:i+0xAC]))
i += 0xAC
def raw_vhdr_dump(o, data):
data = data.rstrip(b'\xff')
data2 = b''
o2 = 0
if len(data)!=64:
data2 = data[64:].lstrip(b'\xff')
o2 = o + data.find(data2)
data = data[:64]
m, v, vt, cf, compat, volid, lnum, zero1, dsize, usedebs, pad, dcrc, zero2, sqnum, zero3, hcrc = struct.unpack(">4s4BLL4s4L4sQ12sL", data)
print("%08x: %s %d %d %d %d %08x %08x %s %08x %08x %08x %08x %s %010x %s %08x" % (o, m, v, vt, cf, compat, volid, lnum, zero1, dsize, usedebs, pad, dcrc, zero2, sqnum, zero3, hcrc))
if len(data2)==0xAC*0x80:
raw_vid_dump(o2, data2)
def raw_node_dump(o, data):
ch = UbiFsCommonHeader()
ch.parse(data[:24])
node = ch.getnode()
try:
node.parse(data[24:])
except Exception as e:
pass
try:
print("%08x: %s - %s" % (o, repr(ch), repr(node)))
except Exception as e:
print("%08x: %s" % (o, b2a_hex(data)))
def rawhexdump(fh, args):
data = fh.read()
ofs = []
for pattn, bs in ((b'UBI#', 64), (b'UBI!', 64), (b'\x31\x18\x10\x06', 8)):
for o in findpattern(data, pattn, bs):
ofs.append( (o, pattn) )
ofs = sorted(ofs, key=lambda o: o[0])
print("found %d magic numbers" % len(ofs))
ofs.append( (len(data), None) )
for (o0, p), (o1, _) in zip(ofs, ofs[1:]):
if p==b'UBI#':
raw_ec_dump(o0, data[o0:o1])
elif p==b'UBI!':
raw_vhdr_dump(o0, data[o0:o1])
elif p==b'\x31\x18\x10\x06':
raw_node_dump(o0, data[o0:o1])
else:
print("%08x: %s" % (o0, b2a_hex(data[o0:o1])))
##################################################
def processfile(fn, args):
with open(fn, "rb") as fh:
if args.rawdump:
rawhexdump(fh, args)
else:
magic = fh.read(4)
if magic == b'UBI#':
processblocks(fh, args)
elif magic == b'\x31\x18\x10\x06':
processvolume(RawVolume(fh), b"raw", args)
else:
print("Unknown file type")
def main():
parser = argparse.ArgumentParser(description='UBIFS dumper.')
parser.add_argument('--savedir', '-s', type=str, help="save files in all volumes to the specified directory", metavar='DIRECTORY')
parser.add_argument('--preserve', '-p', action='store_true', help="preserve permissions and timestamps")
parser.add_argument('--cat', '-c', type=str, action="append", help="extract a single file to stdout", metavar='FILE', default=[])
parser.add_argument('--listfiles', '-l', action='store_true', help="list directory contents")
parser.add_argument('--dumptree', '-d', action='store_true', help="dump the filesystem b-tree contents")
parser.add_argument('--verbose', '-v', action='count', help="print extra info")
parser.add_argument('--debug', action='store_true', help="abort on exceptions")
parser.add_argument('--encoding', '-e', type=str, help="filename encoding, default=utf-8", default='utf-8')
parser.add_argument('--masteroffset', '-m', type=str, help="Which master node to use.")
parser.add_argument('--root', '-R', type=str, help="Which Root node to use (hexlnum:hexoffset).")
parser.add_argument('--rawdump', action='store_true', help="Raw hexdump of entire volume.")
parser.add_argument('--volume', type=str, help="which volume to hexdump")
parser.add_argument('--hexdump', type=str, help="hexdump part of a volume/leb[/ofs[/size]]", metavar="LEB:OFF:N")
parser.add_argument('--nodedump', type=str, help="dump specific node at volume/leb[/ofs]", metavar="LEB:OFF")
parser.add_argument('FILES', type=str, nargs='+', help="list of ubi images to use")
args = parser.parse_args()
if args.masteroffset:
args.masteroffset = [int(_,0) for _ in args.masteroffset.split(':')]
if args.volume:
args.volume = int(args.volume, 0)
if args.hexdump:
args.hexdump = [int(_, 0) for _ in args.hexdump.split(":")]
if len(args.hexdump) == 1:
args.hexdump.append(0)
if len(args.hexdump) == 2:
args.hexdump.append(0x100)
if args.nodedump:
args.nodedump = [int(_, 0) for _ in args.nodedump.split(":")]
if len(args.nodedump) == 1:
args.nodedump.append(0)
for fn in args.FILES:
print("==>", fn, "<==")
try:
processfile(fn, args)
except Exception as e:
print("ERROR", e)
if args.debug:
raise
if __name__ == '__main__':
main()
|
[
"argparse.ArgumentParser",
"pkg_resources.require",
"socket.socket",
"collections.defaultdict",
"sys.stdout.flush",
"lzo.decompress",
"lzo.compress",
"os.path.join",
"struct.unpack_from",
"traceback.print_exc",
"struct.pack",
"datetime.datetime.utcfromtimestamp",
"zlib.decompress",
"sys.setdefaultencoding",
"os.chmod",
"os.mknod",
"struct.unpack",
"zlib.compress",
"os.mkfifo",
"os.makedirs",
"binascii.b2a_hex",
"os.symlink"
] |
[((686, 721), 'pkg_resources.require', 'pkg_resources.require', (['dependencies'], {}), '(dependencies)\n', (707, 721), False, 'import pkg_resources\n'), ((591, 622), 'sys.setdefaultencoding', 'sys.setdefaultencoding', (['"""utf-8"""'], {}), "('utf-8')\n", (613, 622), False, 'import sys\n'), ((12070, 12099), 'struct.unpack', 'struct.unpack', (['"""<LL"""', 'key[:8]'], {}), "('<LL', key[:8])\n", (12083, 12099), False, 'import struct\n'), ((12206, 12250), 'struct.pack', 'struct.pack', (['"""<LL"""', 'inum', '(ityp << 29 | value)'], {}), "('<LL', inum, ityp << 29 | value)\n", (12217, 12250), False, 'import struct\n'), ((48250, 48285), 'struct.unpack', 'struct.unpack', (['""">4sLQLLL32sL"""', 'data'], {}), "('>4sLQLLL32sL', data)\n", (48263, 48285), False, 'import struct\n'), ((48842, 48883), 'struct.unpack', 'struct.unpack', (['""">4s4BLL4s4L4sQ12sL"""', 'data'], {}), "('>4s4BLL4s4L4sQ12sL', data)\n", (48855, 48883), False, 'import struct\n'), ((50642, 50694), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""UBIFS dumper."""'}), "(description='UBIFS dumper.')\n", (50665, 50694), False, 'import argparse\n'), ((1820, 1838), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1836, 1838), False, 'import sys\n'), ((2067, 2085), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (2083, 2085), False, 'import sys\n'), ((2569, 2606), 'struct.unpack', 'struct.unpack', (['""">4sB3xQLLL32xL"""', 'data'], {}), "('>4sB3xQLLL32xL', data)\n", (2582, 2606), False, 'import struct\n'), ((2812, 2936), 'struct.pack', 'struct.pack', (['""">4sB3xQLLL32x"""', 'self.magic', 'self.version', 'self.erasecount', 'self.vid_hdr_ofs', 'self.data_ofs', 'self.image_seq'], {}), "('>4sB3xQLLL32x', self.magic, self.version, self.erasecount,\n self.vid_hdr_ofs, self.data_ofs, self.image_seq)\n", (2823, 2936), False, 'import struct\n'), ((3667, 3708), 'struct.unpack', 'struct.unpack', (['""">4s4BLL4x4L4xQ12xL"""', 'data'], {}), "('>4s4BLL4x4L4xQ12xL', data)\n", (3680, 3708), False, 'import struct\n'), ((3915, 4122), 'struct.pack', 'struct.pack', (['""">4s4BLL4x4L4xQ12x"""', 'self.magic', 'self.version', 'self.vol_type', 'self.copy_flag', 'self.compat', 'self.vol_id', 'self.lnum', 'self.data_size', 'self.used_ebs', 'self.data_pad', 'self.data_crc', 'self.sqnum'], {}), "('>4s4BLL4x4L4xQ12x', self.magic, self.version, self.vol_type,\n self.copy_flag, self.compat, self.vol_id, self.lnum, self.data_size,\n self.used_ebs, self.data_pad, self.data_crc, self.sqnum)\n", (3926, 4122), False, 'import struct\n'), ((5048, 5086), 'struct.unpack', 'struct.unpack', (['""">3LBBH128sB23xL"""', 'data'], {}), "('>3LBBH128sB23xL', data)\n", (5061, 5086), False, 'import struct\n'), ((5245, 5395), 'struct.pack', 'struct.pack', (['""">3LBBH128sB23x"""', 'self.reserved_pebs', 'self.alignment', 'self.data_pad', 'self.vol_type', 'self.upd_marker', 'name_len', 'self.name', 'self.flags'], {}), "('>3LBBH128sB23x', self.reserved_pebs, self.alignment, self.\n data_pad, self.vol_type, self.upd_marker, name_len, self.name, self.flags)\n", (5256, 5395), False, 'import struct\n'), ((7225, 7261), 'struct.unpack', 'struct.unpack', (['"""<12L"""', 'data[:4 * 12]'], {}), "('<12L', data[:4 * 12])\n", (7238, 7261), False, 'import struct\n'), ((15011, 15065), 'struct.unpack', 'struct.unpack', (['"""<16s5Q11L4xLH26x"""', 'data[:self.hdrsize]'], {}), "('<16s5Q11L4xLH26x', data[:self.hdrsize])\n", (15024, 15065), False, 'import struct\n'), ((15307, 15633), 'struct.pack', 'struct.pack', (['"""<16s5Q11L4xLH26x"""', 'self.key', 'self.creat_sqnum', 'self.size', 'self.atime_sec', 'self.ctime_sec', 'self.mtime_sec', 'self.atime_nsec', 'self.ctime_nsec', 'self.mtime_nsec', 'self.nlink', 'self.uid', 'self.gid', 'self.mode', 'self.flags', 'self.data_len', 'self.xattr_cnt', 'self.xattr_size', 'self.xattr_names', 'self.compr_type'], {}), "('<16s5Q11L4xLH26x', self.key, self.creat_sqnum, self.size, self\n .atime_sec, self.ctime_sec, self.mtime_sec, self.atime_nsec, self.\n ctime_nsec, self.mtime_nsec, self.nlink, self.uid, self.gid, self.mode,\n self.flags, self.data_len, self.xattr_cnt, self.xattr_size, self.\n xattr_names, self.compr_type)\n", (15318, 15633), False, 'import struct\n'), ((16926, 16960), 'struct.unpack', 'struct.unpack', (['"""BB"""', 'self.data[:2]'], {}), "('BB', self.data[:2])\n", (16939, 16960), False, 'import struct\n'), ((17435, 17481), 'struct.unpack', 'struct.unpack', (['"""<16sLH2x"""', 'data[:self.hdrsize]'], {}), "('<16sLH2x', data[:self.hdrsize])\n", (17448, 17481), False, 'import struct\n'), ((18595, 18643), 'struct.unpack', 'struct.unpack', (['"""<16sQxBH4x"""', 'data[:self.hdrsize]'], {}), "('<16sQxBH4x', data[:self.hdrsize])\n", (18608, 18643), False, 'import struct\n'), ((18810, 18873), 'struct.pack', 'struct.pack', (['"""<16sQxBH4x"""', 'self.key', 'self.inum', 'self.type', 'nlen'], {}), "('<16sQxBH4x', self.key, self.inum, self.type, nlen)\n", (18821, 18873), False, 'import struct\n'), ((19870, 19900), 'struct.unpack', 'struct.unpack', (['"""<L12xQQ"""', 'data'], {}), "('<L12xQQ', data)\n", (19883, 19900), False, 'import struct\n'), ((19938, 20001), 'struct.pack', 'struct.pack', (['"""<L12xQQ"""', 'self.inum', 'self.old_size', 'self.new_size'], {}), "('<L12xQQ', self.inum, self.old_size, self.new_size)\n", (19949, 20001), False, 'import struct\n'), ((20276, 20309), 'struct.unpack_from', 'struct.unpack_from', (['"""<L"""', 'data', '(0)'], {}), "('<L', data, 0)\n", (20294, 20309), False, 'import struct\n'), ((20347, 20378), 'struct.pack', 'struct.pack', (['"""<L"""', 'self.pad_len'], {}), "('<L', self.pad_len)\n", (20358, 20378), False, 'import struct\n'), ((21079, 21138), 'struct.unpack', 'struct.unpack', (['"""<2xBB5LQ7LH2xLLQL16sL"""', 'data[:self.hdrsize]'], {}), "('<2xBB5LQ7LH2xLLQL16sL', data[:self.hdrsize])\n", (21092, 21138), False, 'import struct\n'), ((21284, 21689), 'struct.pack', 'struct.pack', (['"""<2xBB5LQ7LH2xLLQL16sL"""', 'self.key_hash', 'self.key_fmt', 'self.flags', 'self.min_io_size', 'self.leb_size', 'self.leb_cnt', 'self.max_leb_cnt', 'self.max_bud_bytes', 'self.log_lebs', 'self.lpt_lebs', 'self.orph_lebs', 'self.jhead_cnt', 'self.fanout', 'self.lsave_cnt', 'self.fmt_version', 'self.default_compr', 'self.rp_uid', 'self.rp_gid', 'self.rp_size', 'self.time_gran', 'self.uuid', 'self.ro_compat_version'], {}), "('<2xBB5LQ7LH2xLLQL16sL', self.key_hash, self.key_fmt, self.\n flags, self.min_io_size, self.leb_size, self.leb_cnt, self.max_leb_cnt,\n self.max_bud_bytes, self.log_lebs, self.lpt_lebs, self.orph_lebs, self.\n jhead_cnt, self.fanout, self.lsave_cnt, self.fmt_version, self.\n default_compr, self.rp_uid, self.rp_gid, self.rp_size, self.time_gran,\n self.uuid, self.ro_compat_version)\n", (21295, 21689), False, 'import struct\n'), ((23280, 23328), 'struct.unpack', 'struct.unpack', (['"""<QQ8L6Q12L"""', 'data[:self.hdrsize]'], {}), "('<QQ8L6Q12L', data[:self.hdrsize])\n", (23293, 23328), False, 'import struct\n'), ((23470, 23972), 'struct.pack', 'struct.pack', (['"""<QQ8L6Q12L"""', 'self.highest_inum', 'self.cmt_no', 'self.flags', 'self.log_lnum', 'self.root_lnum', 'self.root_offs', 'self.root_len', 'self.gc_lnum', 'self.ihead_lnum', 'self.ihead_offs', 'self.index_size', 'self.total_free', 'self.total_dirty', 'self.total_used', 'self.total_dead', 'self.total_dark', 'self.lpt_lnum', 'self.lpt_offs', 'self.nhead_lnum', 'self.nhead_offs', 'self.ltab_lnum', 'self.ltab_offs', 'self.lsave_lnum', 'self.lsave_offs', 'self.lscan_lnum', 'self.empty_lebs', 'self.idx_lebs', 'self.leb_cnt'], {}), "('<QQ8L6Q12L', self.highest_inum, self.cmt_no, self.flags, self.\n log_lnum, self.root_lnum, self.root_offs, self.root_len, self.gc_lnum,\n self.ihead_lnum, self.ihead_offs, self.index_size, self.total_free,\n self.total_dirty, self.total_used, self.total_dead, self.total_dark,\n self.lpt_lnum, self.lpt_offs, self.nhead_lnum, self.nhead_offs, self.\n ltab_lnum, self.ltab_offs, self.lsave_lnum, self.lsave_offs, self.\n lscan_lnum, self.empty_lebs, self.idx_lebs, self.leb_cnt)\n", (23481, 23972), False, 'import struct\n'), ((25244, 25273), 'struct.unpack', 'struct.unpack', (['"""<3L28x"""', 'data'], {}), "('<3L28x', data)\n", (25257, 25273), False, 'import struct\n'), ((25311, 25366), 'struct.pack', 'struct.pack', (['"""<3L28x"""', 'self.lnum', 'self.offs', 'self.jhead'], {}), "('<3L28x', self.lnum, self.offs, self.jhead)\n", (25322, 25366), False, 'import struct\n'), ((26212, 26253), 'struct.unpack', 'struct.unpack', (['"""<HH"""', 'data[:self.hdrsize]'], {}), "('<HH', data[:self.hdrsize])\n", (26225, 26253), False, 'import struct\n'), ((26661, 26707), 'struct.pack', 'struct.pack', (['"""<HH"""', 'self.child_cnt', 'self.level'], {}), "('<HH', self.child_cnt, self.level)\n", (26672, 26707), False, 'import struct\n'), ((28126, 28166), 'struct.unpack', 'struct.unpack', (['"""<Q"""', 'data[:self.hdrsize]'], {}), "('<Q', data[:self.hdrsize])\n", (28139, 28166), False, 'import struct\n'), ((28204, 28234), 'struct.pack', 'struct.pack', (['"""<Q"""', 'self.cmt_no'], {}), "('<Q', self.cmt_no)\n", (28215, 28234), False, 'import struct\n'), ((28447, 28487), 'struct.unpack', 'struct.unpack', (['"""<Q"""', 'data[:self.hdrsize]'], {}), "('<Q', data[:self.hdrsize])\n", (28460, 28487), False, 'import struct\n'), ((28546, 28576), 'struct.pack', 'struct.pack', (['"""<Q"""', 'self.cmt_no'], {}), "('<Q', self.cmt_no)\n", (28557, 28576), False, 'import struct\n'), ((29523, 29555), 'struct.unpack', 'struct.unpack', (['"""<LLQLBB2x"""', 'data'], {}), "('<LLQLBB2x', data)\n", (29536, 29555), False, 'import struct\n'), ((29680, 29786), 'struct.pack', 'struct.pack', (['"""<LLQLBB2x"""', 'self.magic', 'self.crc', 'self.sqnum', 'self.len', 'self.node_type', 'self.group_type'], {}), "('<LLQLBB2x', self.magic, self.crc, self.sqnum, self.len, self.\n node_type, self.group_type)\n", (29691, 29786), False, 'import struct\n'), ((12907, 12942), 'lzo.decompress', 'lzo.decompress', (['data', '(False)', 'buflen'], {}), '(data, False, buflen)\n', (12921, 12942), False, 'import lzo\n'), ((13224, 13249), 'lzo.compress', 'lzo.compress', (['data', '(False)'], {}), '(data, False)\n', (13236, 13249), False, 'import lzo\n'), ((25769, 25810), 'struct.unpack', 'struct.unpack', (['"""<3L"""', 'data[:self.hdrsize]'], {}), "('<3L', data[:self.hdrsize])\n", (25782, 25810), False, 'import struct\n'), ((42444, 42481), 'datetime.datetime.utcfromtimestamp', 'datetime.datetime.utcfromtimestamp', (['t'], {}), '(t)\n', (42478, 42481), False, 'import datetime\n'), ((43716, 43749), 'os.path.join', 'os.path.join', (['*([savedir] + path)'], {}), '(*([savedir] + path))\n', (43728, 43749), False, 'import os\n'), ((8702, 8718), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (8713, 8718), False, 'from collections import defaultdict\n'), ((10503, 10524), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (10522, 10524), False, 'import traceback\n'), ((12038, 12050), 'binascii.b2a_hex', 'b2a_hex', (['key'], {}), '(key)\n', (12045, 12050), False, 'from binascii import b2a_hex\n'), ((12991, 13029), 'zlib.decompress', 'zlib.decompress', (['data', '(-zlib.MAX_WBITS)'], {}), '(data, -zlib.MAX_WBITS)\n', (13006, 13029), False, 'import zlib\n'), ((13298, 13334), 'zlib.compress', 'zlib.compress', (['data', '(-zlib.MAX_WBITS)'], {}), '(data, -zlib.MAX_WBITS)\n', (13311, 13334), False, 'import zlib\n'), ((22490, 22508), 'binascii.b2a_hex', 'b2a_hex', (['self.uuid'], {}), '(self.uuid)\n', (22497, 22508), False, 'from binascii import b2a_hex\n'), ((25899, 25949), 'struct.pack', 'struct.pack', (['"""<3L"""', 'self.lnum', 'self.offs', 'self.len'], {}), "('<3L', self.lnum, self.offs, self.len)\n", (25910, 25949), False, 'import struct\n'), ((45393, 45423), 'os.chmod', 'os.chmod', (['fullpath', 'inode.mode'], {}), '(fullpath, inode.mode)\n', (45401, 45423), False, 'import os\n'), ((43830, 43849), 'os.mkfifo', 'os.mkfifo', (['fullpath'], {}), '(fullpath)\n', (43839, 43849), False, 'import os\n'), ((10444, 10460), 'binascii.b2a_hex', 'b2a_hex', (['viddata'], {}), '(viddata)\n', (10451, 10460), False, 'from binascii import b2a_hex\n'), ((15989, 16007), 'binascii.b2a_hex', 'b2a_hex', (['self.data'], {}), '(self.data)\n', (15996, 16007), False, 'from binascii import b2a_hex\n'), ((32218, 32234), 'binascii.b2a_hex', 'b2a_hex', (['hdrdata'], {}), '(hdrdata)\n', (32225, 32234), False, 'from binascii import b2a_hex\n'), ((32236, 32253), 'binascii.b2a_hex', 'b2a_hex', (['nodedata'], {}), '(nodedata)\n', (32243, 32253), False, 'from binascii import b2a_hex\n'), ((43965, 43984), 'socket.socket', 's.socket', (['s.AF_UNIX'], {}), '(s.AF_UNIX)\n', (43973, 43984), True, 'import socket as s\n'), ((49449, 49462), 'binascii.b2a_hex', 'b2a_hex', (['data'], {}), '(data)\n', (49456, 49462), False, 'from binascii import b2a_hex\n'), ((44094, 44126), 'os.symlink', 'os.symlink', (['inode.data', 'fullpath'], {}), '(inode.data, fullpath)\n', (44104, 44126), False, 'import os\n'), ((44198, 44219), 'os.makedirs', 'os.makedirs', (['fullpath'], {}), '(fullpath)\n', (44209, 44219), False, 'import os\n'), ((50131, 50151), 'binascii.b2a_hex', 'b2a_hex', (['data[o0:o1]'], {}), '(data[o0:o1])\n', (50138, 50151), False, 'from binascii import b2a_hex\n'), ((44370, 44389), 'os.path.join', 'os.path.join', (['*path'], {}), '(*path)\n', (44382, 44389), False, 'import os\n'), ((44663, 44701), 'os.mknod', 'os.mknod', (['fullpath', 'inode.mode', 'devnum'], {}), '(fullpath, inode.mode, devnum)\n', (44671, 44701), False, 'import os\n')]
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from tempest.lib.common import rest_client
class IdentityClient(rest_client.RestClient):
api_version = "v3"
def show_api_description(self):
"""Retrieves info about the v3 Identity API"""
url = ''
resp, body = self.get(url)
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def show_token(self, resp_token):
"""Get token details."""
headers = {'X-Subject-Token': resp_token}
resp, body = self.get("auth/tokens", headers=headers)
self.expected_success(200, resp.status)
body = json.loads(body)
return rest_client.ResponseBody(resp, body)
def delete_token(self, resp_token):
"""Deletes token."""
headers = {'X-Subject-Token': resp_token}
resp, body = self.delete("auth/tokens", headers=headers)
self.expected_success(204, resp.status)
return rest_client.ResponseBody(resp, body)
|
[
"tempest.lib.common.rest_client.ResponseBody",
"oslo_serialization.jsonutils.loads"
] |
[((1007, 1023), 'oslo_serialization.jsonutils.loads', 'json.loads', (['body'], {}), '(body)\n', (1017, 1023), True, 'from oslo_serialization import jsonutils as json\n'), ((1039, 1075), 'tempest.lib.common.rest_client.ResponseBody', 'rest_client.ResponseBody', (['resp', 'body'], {}), '(resp, body)\n', (1063, 1075), False, 'from tempest.lib.common import rest_client\n'), ((1323, 1339), 'oslo_serialization.jsonutils.loads', 'json.loads', (['body'], {}), '(body)\n', (1333, 1339), True, 'from oslo_serialization import jsonutils as json\n'), ((1355, 1391), 'tempest.lib.common.rest_client.ResponseBody', 'rest_client.ResponseBody', (['resp', 'body'], {}), '(resp, body)\n', (1379, 1391), False, 'from tempest.lib.common import rest_client\n'), ((1640, 1676), 'tempest.lib.common.rest_client.ResponseBody', 'rest_client.ResponseBody', (['resp', 'body'], {}), '(resp, body)\n', (1664, 1676), False, 'from tempest.lib.common import rest_client\n')]
|
"""The :func:`deephyper.nas.run.horovod.run` function is used to evaluate a deep neural network by enabling data-parallelism with Horovod to the :func:`deephyper.nas.run.alpha.run` function. This function will automatically apply the linear scaling rule to the learning rate and batch size given the current number of ranks (i.e., the initial learning rate and batch size are scaled by the number of ranks).
"""
import os
import traceback
import logging
import numpy as np
import tensorflow as tf
from deephyper.contrib.callbacks import import_callback
import horovod.tensorflow.keras as hvd
import deephyper.nas.trainer._arch as a
from deephyper.nas.trainer import HorovodTrainer
from deephyper.nas.run._util import (
compute_objective,
load_config,
preproc_trainer,
save_history,
setup_data,
get_search_space,
)
logger = logging.getLogger(__name__)
# Default callbacks parameters
default_callbacks_config = {
"EarlyStopping": dict(
monitor="val_loss", min_delta=0, mode="min", verbose=0, patience=0
),
"ModelCheckpoint": dict(
monitor="val_loss",
mode="min",
save_best_only=True,
verbose=1,
filepath="model.h5",
save_weights_only=False,
),
"TensorBoard": dict(
log_dir="",
histogram_freq=0,
batch_size=32,
write_graph=False,
write_grads=False,
write_images=False,
update_freq="epoch",
),
"CSVLogger": dict(filename="training.csv", append=True),
"CSVExtendedLogger": dict(filename="training.csv", append=True),
"TimeStopping": dict(),
"ReduceLROnPlateau": dict(patience=5, verbose=0),
}
# Name of Callbacks reserved for root node
hvd_root_cb = ["ModelCheckpoint", "Tensorboard", "CSVLogger", "CSVExtendedLogger"]
def run_horovod(config: dict) -> float:
hvd.init()
# Threading configuration
if os.environ.get("OMP_NUM_THREADS", None) is not None:
logger.debug(f"OMP_NUM_THREADS is {os.environ.get('OMP_NUM_THREADS')}")
num_intra = int(os.environ.get("OMP_NUM_THREADS"))
tf.config.threading.set_intra_op_parallelism_threads(num_intra)
tf.config.threading.set_inter_op_parallelism_threads(2)
if os.environ.get("CUDA_VISIBLE_DEVICES") is not None:
devices = os.environ.get("CUDA_VISIBLE_DEVICES").split(",")
os.environ["CUDA_VISIBLE_DEVICES"] = devices[hvd.rank()]
config["seed"]
seed = config["seed"]
if seed is not None:
np.random.seed(seed)
tf.random.set_seed(seed)
load_config(config)
# Scale batch size and learning rate according to the number of ranks
initial_lr = config[a.hyperparameters][a.learning_rate]
batch_size = config[a.hyperparameters][a.batch_size] * hvd.size()
learning_rate = config[a.hyperparameters][a.learning_rate] * hvd.size()
logger.info(
f"Scaled: 'batch_size' from {config[a.hyperparameters][a.batch_size]} to {batch_size} "
)
logger.info(
f"Scaled: 'learning_rate' from {config[a.hyperparameters][a.learning_rate]} to {learning_rate} "
)
config[a.hyperparameters][a.batch_size] = batch_size
config[a.hyperparameters][a.learning_rate] = learning_rate
input_shape, output_shape = setup_data(config)
search_space = get_search_space(config, input_shape, output_shape, seed=seed)
# Initialize Horovod
model_created = False
try:
model = search_space.sample(config["arch_seq"])
model_created = True
except:
logger.info("Error: Model creation failed...")
logger.info(traceback.format_exc())
if model_created:
# Setup callbacks only
callbacks = [
# Horovod: broadcast initial variable states from rank 0 to all other processes.
# This is necessary to ensure consistent initialization of all workers when
# training is started with random weights or restored from a checkpoint.
hvd.callbacks.BroadcastGlobalVariablesCallback(0),
# Horovod: average metrics among workers at the end of every epoch.
#
# Note: This callback must be in the list before the ReduceLROnPlateau,
# TensorBoard or other metrics-based callbacks.
hvd.callbacks.MetricAverageCallback(),
# Horovod: using `lr = 1.0 * hvd.size()` from the very beginning leads to worse final
# accuracy. Scale the learning rate `lr = 1.0` ---> `lr = 1.0 * hvd.size()` during
# the first five epochs. See https://arxiv.org/abs/1706.02677 for details.
#! initial_lr argument is not available in horovod==0.19.0
hvd.callbacks.LearningRateWarmupCallback(
warmup_epochs=5, verbose=0, initial_lr=initial_lr
),
]
cb_requires_valid = False # Callbacks requires validation data
callbacks_config = config[a.hyperparameters].get(a.callbacks, {})
if callbacks_config is not None:
for cb_name, cb_conf in callbacks_config.items():
if cb_name in default_callbacks_config:
# cb_bame in hvd_root_cb implies hvd.rank() == 0
if not (cb_name in hvd_root_cb) or hvd.rank() == 0:
default_callbacks_config[cb_name].update(cb_conf)
# Import and create corresponding callback
Callback = import_callback(cb_name)
callbacks.append(Callback(**default_callbacks_config[cb_name]))
if cb_name in ["EarlyStopping"]:
cb_requires_valid = "val" in cb_conf["monitor"].split("_")
else:
logger.error(f"'{cb_name}' is not an accepted callback!")
trainer = HorovodTrainer(config=config, model=model)
trainer.callbacks.extend(callbacks)
last_only, with_pred = preproc_trainer(config)
last_only = last_only and not cb_requires_valid
history = trainer.train(with_pred=with_pred, last_only=last_only)
# save history
if hvd.rank() == 0:
save_history(config.get("log_dir", None), history, config)
result = compute_objective(config["objective"], history)
else:
# penalising actions if model cannot be created
result = -1
if result < -10:
result = -10
return result
|
[
"tensorflow.random.set_seed",
"numpy.random.seed",
"tensorflow.config.threading.set_intra_op_parallelism_threads",
"horovod.tensorflow.keras.callbacks.MetricAverageCallback",
"deephyper.nas.run._util.get_search_space",
"horovod.tensorflow.keras.callbacks.LearningRateWarmupCallback",
"deephyper.nas.run._util.preproc_trainer",
"traceback.format_exc",
"horovod.tensorflow.keras.callbacks.BroadcastGlobalVariablesCallback",
"tensorflow.config.threading.set_inter_op_parallelism_threads",
"deephyper.nas.trainer.HorovodTrainer",
"horovod.tensorflow.keras.rank",
"horovod.tensorflow.keras.init",
"deephyper.contrib.callbacks.import_callback",
"horovod.tensorflow.keras.size",
"deephyper.nas.run._util.compute_objective",
"os.environ.get",
"deephyper.nas.run._util.setup_data",
"logging.getLogger",
"deephyper.nas.run._util.load_config"
] |
[((851, 878), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (868, 878), False, 'import logging\n'), ((1841, 1851), 'horovod.tensorflow.keras.init', 'hvd.init', ([], {}), '()\n', (1849, 1851), True, 'import horovod.tensorflow.keras as hvd\n'), ((2549, 2568), 'deephyper.nas.run._util.load_config', 'load_config', (['config'], {}), '(config)\n', (2560, 2568), False, 'from deephyper.nas.run._util import compute_objective, load_config, preproc_trainer, save_history, setup_data, get_search_space\n'), ((3250, 3268), 'deephyper.nas.run._util.setup_data', 'setup_data', (['config'], {}), '(config)\n', (3260, 3268), False, 'from deephyper.nas.run._util import compute_objective, load_config, preproc_trainer, save_history, setup_data, get_search_space\n'), ((3289, 3351), 'deephyper.nas.run._util.get_search_space', 'get_search_space', (['config', 'input_shape', 'output_shape'], {'seed': 'seed'}), '(config, input_shape, output_shape, seed=seed)\n', (3305, 3351), False, 'from deephyper.nas.run._util import compute_objective, load_config, preproc_trainer, save_history, setup_data, get_search_space\n'), ((1890, 1929), 'os.environ.get', 'os.environ.get', (['"""OMP_NUM_THREADS"""', 'None'], {}), "('OMP_NUM_THREADS', None)\n", (1904, 1929), False, 'import os\n'), ((2090, 2153), 'tensorflow.config.threading.set_intra_op_parallelism_threads', 'tf.config.threading.set_intra_op_parallelism_threads', (['num_intra'], {}), '(num_intra)\n', (2142, 2153), True, 'import tensorflow as tf\n'), ((2162, 2217), 'tensorflow.config.threading.set_inter_op_parallelism_threads', 'tf.config.threading.set_inter_op_parallelism_threads', (['(2)'], {}), '(2)\n', (2214, 2217), True, 'import tensorflow as tf\n'), ((2226, 2264), 'os.environ.get', 'os.environ.get', (['"""CUDA_VISIBLE_DEVICES"""'], {}), "('CUDA_VISIBLE_DEVICES')\n", (2240, 2264), False, 'import os\n'), ((2490, 2510), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (2504, 2510), True, 'import numpy as np\n'), ((2519, 2543), 'tensorflow.random.set_seed', 'tf.random.set_seed', (['seed'], {}), '(seed)\n', (2537, 2543), True, 'import tensorflow as tf\n'), ((2763, 2773), 'horovod.tensorflow.keras.size', 'hvd.size', ([], {}), '()\n', (2771, 2773), True, 'import horovod.tensorflow.keras as hvd\n'), ((2839, 2849), 'horovod.tensorflow.keras.size', 'hvd.size', ([], {}), '()\n', (2847, 2849), True, 'import horovod.tensorflow.keras as hvd\n'), ((5802, 5844), 'deephyper.nas.trainer.HorovodTrainer', 'HorovodTrainer', ([], {'config': 'config', 'model': 'model'}), '(config=config, model=model)\n', (5816, 5844), False, 'from deephyper.nas.trainer import HorovodTrainer\n'), ((5922, 5945), 'deephyper.nas.run._util.preproc_trainer', 'preproc_trainer', (['config'], {}), '(config)\n', (5937, 5945), False, 'from deephyper.nas.run._util import compute_objective, load_config, preproc_trainer, save_history, setup_data, get_search_space\n'), ((6218, 6265), 'deephyper.nas.run._util.compute_objective', 'compute_objective', (["config['objective']", 'history'], {}), "(config['objective'], history)\n", (6235, 6265), False, 'from deephyper.nas.run._util import compute_objective, load_config, preproc_trainer, save_history, setup_data, get_search_space\n'), ((2047, 2080), 'os.environ.get', 'os.environ.get', (['"""OMP_NUM_THREADS"""'], {}), "('OMP_NUM_THREADS')\n", (2061, 2080), False, 'import os\n'), ((2399, 2409), 'horovod.tensorflow.keras.rank', 'hvd.rank', ([], {}), '()\n', (2407, 2409), True, 'import horovod.tensorflow.keras as hvd\n'), ((3965, 4014), 'horovod.tensorflow.keras.callbacks.BroadcastGlobalVariablesCallback', 'hvd.callbacks.BroadcastGlobalVariablesCallback', (['(0)'], {}), '(0)\n', (4011, 4014), True, 'import horovod.tensorflow.keras as hvd\n'), ((4266, 4303), 'horovod.tensorflow.keras.callbacks.MetricAverageCallback', 'hvd.callbacks.MetricAverageCallback', ([], {}), '()\n', (4301, 4303), True, 'import horovod.tensorflow.keras as hvd\n'), ((4668, 4763), 'horovod.tensorflow.keras.callbacks.LearningRateWarmupCallback', 'hvd.callbacks.LearningRateWarmupCallback', ([], {'warmup_epochs': '(5)', 'verbose': '(0)', 'initial_lr': 'initial_lr'}), '(warmup_epochs=5, verbose=0,\n initial_lr=initial_lr)\n', (4708, 4763), True, 'import horovod.tensorflow.keras as hvd\n'), ((6112, 6122), 'horovod.tensorflow.keras.rank', 'hvd.rank', ([], {}), '()\n', (6120, 6122), True, 'import horovod.tensorflow.keras as hvd\n'), ((2296, 2334), 'os.environ.get', 'os.environ.get', (['"""CUDA_VISIBLE_DEVICES"""'], {}), "('CUDA_VISIBLE_DEVICES')\n", (2310, 2334), False, 'import os\n'), ((3586, 3608), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (3606, 3608), False, 'import traceback\n'), ((1986, 2019), 'os.environ.get', 'os.environ.get', (['"""OMP_NUM_THREADS"""'], {}), "('OMP_NUM_THREADS')\n", (2000, 2019), False, 'import os\n'), ((5425, 5449), 'deephyper.contrib.callbacks.import_callback', 'import_callback', (['cb_name'], {}), '(cb_name)\n', (5440, 5449), False, 'from deephyper.contrib.callbacks import import_callback\n'), ((5231, 5241), 'horovod.tensorflow.keras.rank', 'hvd.rank', ([], {}), '()\n', (5239, 5241), True, 'import horovod.tensorflow.keras as hvd\n')]
|
import pytest
from unittest.mock import Mock
from app.meals.exceptions import MealNotFound, ActionNotAllowed
from app.meals.services import UpdateMealService, DeleteMealService
@pytest.mark.unit
class TestUpdateMealService:
def test_meal_not_found_raises_exception(self):
meal_repository = Mock()
meal_repository.get_by_id.return_value = None
service = UpdateMealService(meal_repository)
with pytest.raises(MealNotFound):
service.execute(Mock(id=1), 1)
def test_wrong_user_raises_exception(self):
request_user_id = 1
meal_repository = Mock()
meal_repository.get_by_id.return_value = Mock(user_id=5)
service = UpdateMealService(meal_repository)
with pytest.raises(ActionNotAllowed):
service.execute(Mock(id=1), request_user_id)
@pytest.mark.unit
class TestDeleteMealService:
def test_meal_not_found_raises_exception(self):
meal_repository = Mock()
meal_repository.get_by_id.return_value = None
service = DeleteMealService(meal_repository)
with pytest.raises(MealNotFound):
service.execute(1, 1)
def test_wrong_user_raises_exception(self):
request_user_id = 1
meal_repository = Mock()
meal_repository.get_by_id.return_value = Mock(id=1, user_id=5)
service = DeleteMealService(meal_repository)
with pytest.raises(ActionNotAllowed):
service.execute(1, request_user_id)
|
[
"app.meals.services.UpdateMealService",
"pytest.raises",
"unittest.mock.Mock",
"app.meals.services.DeleteMealService"
] |
[((305, 311), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (309, 311), False, 'from unittest.mock import Mock\n'), ((385, 419), 'app.meals.services.UpdateMealService', 'UpdateMealService', (['meal_repository'], {}), '(meal_repository)\n', (402, 419), False, 'from app.meals.services import UpdateMealService, DeleteMealService\n'), ((608, 614), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (612, 614), False, 'from unittest.mock import Mock\n'), ((664, 679), 'unittest.mock.Mock', 'Mock', ([], {'user_id': '(5)'}), '(user_id=5)\n', (668, 679), False, 'from unittest.mock import Mock\n'), ((699, 733), 'app.meals.services.UpdateMealService', 'UpdateMealService', (['meal_repository'], {}), '(meal_repository)\n', (716, 733), False, 'from app.meals.services import UpdateMealService, DeleteMealService\n'), ((964, 970), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (968, 970), False, 'from unittest.mock import Mock\n'), ((1044, 1078), 'app.meals.services.DeleteMealService', 'DeleteMealService', (['meal_repository'], {}), '(meal_repository)\n', (1061, 1078), False, 'from app.meals.services import UpdateMealService, DeleteMealService\n'), ((1258, 1264), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (1262, 1264), False, 'from unittest.mock import Mock\n'), ((1314, 1335), 'unittest.mock.Mock', 'Mock', ([], {'id': '(1)', 'user_id': '(5)'}), '(id=1, user_id=5)\n', (1318, 1335), False, 'from unittest.mock import Mock\n'), ((1355, 1389), 'app.meals.services.DeleteMealService', 'DeleteMealService', (['meal_repository'], {}), '(meal_repository)\n', (1372, 1389), False, 'from app.meals.services import UpdateMealService, DeleteMealService\n'), ((433, 460), 'pytest.raises', 'pytest.raises', (['MealNotFound'], {}), '(MealNotFound)\n', (446, 460), False, 'import pytest\n'), ((747, 778), 'pytest.raises', 'pytest.raises', (['ActionNotAllowed'], {}), '(ActionNotAllowed)\n', (760, 778), False, 'import pytest\n'), ((1092, 1119), 'pytest.raises', 'pytest.raises', (['MealNotFound'], {}), '(MealNotFound)\n', (1105, 1119), False, 'import pytest\n'), ((1403, 1434), 'pytest.raises', 'pytest.raises', (['ActionNotAllowed'], {}), '(ActionNotAllowed)\n', (1416, 1434), False, 'import pytest\n'), ((490, 500), 'unittest.mock.Mock', 'Mock', ([], {'id': '(1)'}), '(id=1)\n', (494, 500), False, 'from unittest.mock import Mock\n'), ((808, 818), 'unittest.mock.Mock', 'Mock', ([], {'id': '(1)'}), '(id=1)\n', (812, 818), False, 'from unittest.mock import Mock\n')]
|
import pymysql
db = pymysql.connect(host="localhost",user="root",passwd="<PASSWORD>", db="sidekem")
cur = db.cursor()
cur.execute("SELECT id FROM `villages` WHERE id LIKE '%3327%' ")
desa=cur.fetchall()
for a in desa:
"""Counting Bahan Bakar Masak"""
"""bahan_bakar_masak_listrik"""
cur.execute("SELECT COUNT(*) FROM `bdt_kemiskinan-33` WHERE desa_id='"+a[0]+"' AND bahan_bakar_masak LIKE '1' ")
bahan_bakar_masak_listrik = str(cur.fetchone()[0])
"""bahan_bakar_masak_gas>3kg"""
cur.execute("SELECT COUNT(*) FROM `bdt_kemiskinan-33` WHERE desa_id='"+a[0]+"' AND bahan_bakar_masak LIKE '2' ")
bahan_bakar_masak_gaslebih3kg = str(cur.fetchone()[0])
"""bahan_bakar_masak_gas3kg"""
cur.execute("SELECT COUNT(*) FROM `bdt_kemiskinan-33` WHERE desa_id='"+a[0]+"' AND bahan_bakar_masak LIKE '3' ")
bahan_bakar_masak_gas3kg = str(cur.fetchone()[0])
"""bahan_bakar_masak_biogas"""
cur.execute("SELECT COUNT(*) FROM `bdt_kemiskinan-33` WHERE desa_id='"+a[0]+"' AND bahan_bakar_masak LIKE '4' ")
bahan_bakar_masak_biogas = str(cur.fetchone()[0])
"""bahan_bakar_masak_minyaktanah"""
cur.execute("SELECT COUNT(*) FROM `bdt_kemiskinan-33` WHERE desa_id='"+a[0]+"' AND bahan_bakar_masak LIKE '5' ")
bahan_bakar_masak_minyaktanah = str(cur.fetchone()[0])
"""bahan_bakar_masak_briket"""
cur.execute("SELECT COUNT(*) FROM `bdt_kemiskinan-33` WHERE desa_id='"+a[0]+"' AND bahan_bakar_masak LIKE '6' ")
bahan_bakar_masak_briket = str(cur.fetchone()[0])
"""bahan_bakar_masak_arang"""
cur.execute("SELECT COUNT(*) FROM `bdt_kemiskinan-33` WHERE desa_id='"+a[0]+"' AND bahan_bakar_masak LIKE '7' ")
bahan_bakar_masak_arang = str(cur.fetchone()[0])
"""bahan_bakar_masak_kayubakar"""
cur.execute("SELECT COUNT(*) FROM `bdt_kemiskinan-33` WHERE desa_id='"+a[0]+"' AND bahan_bakar_masak LIKE '8' ")
bahan_bakar_masak_kayubakar = str(cur.fetchone()[0])
"""bahan_bakar_masak_tdkmemasakdirumah"""
cur.execute("SELECT COUNT(*) FROM `bdt_kemiskinan-33` WHERE desa_id='"+a[0]+"' AND bahan_bakar_masak LIKE '9' ")
bahan_bakar_masak_tdkmemasakdirumah = str(cur.fetchone()[0])
"""insert to sql"""
cur.execute("UPDATE `statistik_bahanbakarmasak_desa` SET `bahan_bakar_masak_listrik`='"+bahan_bakar_masak_listrik+"', \
`bahan_bakar_masak_gaslebih3kg`='"+bahan_bakar_masak_gaslebih3kg+"', `bahan_bakar_masak_gas3kg`='"+bahan_bakar_masak_gas3kg+"', \
`bahan_bakar_masak_biogas`='"+bahan_bakar_masak_biogas+"', `bahan_bakar_masak_minyaktanah`='"+bahan_bakar_masak_minyaktanah+"', \
`bahan_bakar_masak_briket`='"+bahan_bakar_masak_briket+"',\
`bahan_bakar_masak_arang`='"+bahan_bakar_masak_arang+"', `bahan_bakar_masak_kayubakar`='"+bahan_bakar_masak_kayubakar+"', \
`bahan_bakar_masak_tdkmemasakdirumah`='"+bahan_bakar_masak_tdkmemasakdirumah+"' WHERE id='"+a[0]+"' ")
db.commit()
|
[
"pymysql.connect"
] |
[((20, 106), 'pymysql.connect', 'pymysql.connect', ([], {'host': '"""localhost"""', 'user': '"""root"""', 'passwd': '"""<PASSWORD>"""', 'db': '"""sidekem"""'}), "(host='localhost', user='root', passwd='<PASSWORD>', db=\n 'sidekem')\n", (35, 106), False, 'import pymysql\n')]
|
import unittest
from nose.tools import *
from website.addons.citations.utils import serialize_account
class TestSerializeAccount(unittest.TestCase):
# TODO: Move to website/addons/citations/tests
def test_serialize_account_none(self):
assert_is_none(serialize_account(None))
|
[
"website.addons.citations.utils.serialize_account"
] |
[((270, 293), 'website.addons.citations.utils.serialize_account', 'serialize_account', (['None'], {}), '(None)\n', (287, 293), False, 'from website.addons.citations.utils import serialize_account\n')]
|
#
# Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending
#
""" This module implements the AxisFormat class that can be applied to format axis tick labels on a plot. """
import jpy
from deephaven.time import TimeZone
from deephaven._wrapper import JObjectWrapper
_JAxisFormat = jpy.get_type("io.deephaven.plot.axisformatters.AxisFormat")
_JDecimalAxisFormat = jpy.get_type("io.deephaven.plot.axisformatters.DecimalAxisFormat")
_JNanosAxisFormat = jpy.get_type("io.deephaven.plot.axisformatters.NanosAxisFormat")
class AxisFormat(JObjectWrapper):
""" The AxisFormat class defines the format for axis tick labels. For time values, this would be how the dates are
formatted. For numerical values, this would be the number of significant digits, etc. """
j_object_type = _JAxisFormat
@property
def j_object(self) -> jpy.JType:
return self.j_axis_format
def __init__(self, j_axis_format):
self.j_axis_format = j_axis_format
def set_pattern(self, pattern: str) -> None:
""" Set the pattern used for formatting values.
For details on the supported patterns see the javadoc for DateTimeFormatter
"https://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html"
Args:
pattern (str): pattern string indicating how values should be formatted.
"""
self.j_axis_format.setPattern(pattern)
class DecimalAxisFormat(AxisFormat):
""" A formatter for converting decimals into formatted strings.
For details on the supported patterns see the javadoc for DecimalFormat
"https://docs.oracle.com/javase/7/docs/api/java/text/DecimalFormat.html"
"""
def __init__(self):
self.j_axis_format = _JDecimalAxisFormat()
class NanosAxisFormat(AxisFormat):
""" A formatter for converting nanoseconds into formatted strings. """
def __init__(self, tz: TimeZone = None):
""" Creates a new NanosAxisFormat with the specified timezone.
Args:
tz (TimeZone): the timezone to use for formatting, default is None meaning to use the default time zone.
"""
if not tz:
self.j_axis_format = _JNanosAxisFormat()
else:
self.j_axis_format = _JNanosAxisFormat(tz.value)
|
[
"jpy.get_type"
] |
[((292, 351), 'jpy.get_type', 'jpy.get_type', (['"""io.deephaven.plot.axisformatters.AxisFormat"""'], {}), "('io.deephaven.plot.axisformatters.AxisFormat')\n", (304, 351), False, 'import jpy\n'), ((374, 440), 'jpy.get_type', 'jpy.get_type', (['"""io.deephaven.plot.axisformatters.DecimalAxisFormat"""'], {}), "('io.deephaven.plot.axisformatters.DecimalAxisFormat')\n", (386, 440), False, 'import jpy\n'), ((461, 525), 'jpy.get_type', 'jpy.get_type', (['"""io.deephaven.plot.axisformatters.NanosAxisFormat"""'], {}), "('io.deephaven.plot.axisformatters.NanosAxisFormat')\n", (473, 525), False, 'import jpy\n')]
|
from typing import Optional, List
import torch
import torch.nn as nn
from pbrl.policy.base import Mlp, Cnn, Rnn, Discrete, Continuous, Deterministic, init_weights
class Actor(nn.Module):
def __init__(
self,
obs_dim: tuple,
action_dim: int,
hidden_sizes: List[int],
activation,
rnn: Optional[str],
continuous: bool
):
super(Actor, self).__init__()
self.hidden_size = hidden_sizes[-1]
self.rnn = rnn
self.continuous = continuous
if len(obs_dim) == 3:
self.f = Cnn(obs_dim, hidden_sizes, activation)
else:
self.f = Mlp(obs_dim, hidden_sizes, activation)
init_weights(self.f)
if self.rnn:
self.f2 = Rnn(self.hidden_size, activation, self.rnn)
init_weights(self.f2)
if self.continuous:
self.dist = Continuous(self.hidden_size, action_dim)
torch.nn.init.constant_(self.dist.logstd, -0.5)
else:
self.dist = Discrete(self.hidden_size, action_dim)
init_weights(self.dist, 0.01)
def forward(self, observations, states=None, dones: Optional[torch.Tensor] = None):
x = self.f(observations)
if self.rnn:
x, states = self.f2(x, states, dones)
dists = self.dist.forward(x)
return dists, states
class Critic(nn.Module):
def __init__(
self,
obs_dim: tuple,
hidden_sizes: List[int],
activation,
rnn: Optional[str]
):
super(Critic, self).__init__()
self.hidden_size = hidden_sizes[-1]
self.rnn = rnn
if len(obs_dim) == 3:
self.f = Cnn(obs_dim, hidden_sizes, activation)
else:
self.f = Mlp(obs_dim, hidden_sizes, activation)
init_weights(self.f)
if self.rnn:
self.f2 = Rnn(self.hidden_size, activation)
init_weights(self.f2)
self.value = Deterministic(self.hidden_size, 1)
init_weights(self.value, 1.0)
def forward(self, observations, states=None, dones: Optional[torch.Tensor] = None):
x = self.f.forward(observations)
if self.rnn:
x, states = self.f2.forward(x, states, dones)
values = self.value.forward(x).squeeze(-1)
return values, states
|
[
"pbrl.policy.base.init_weights",
"pbrl.policy.base.Rnn",
"pbrl.policy.base.Continuous",
"pbrl.policy.base.Deterministic",
"pbrl.policy.base.Mlp",
"torch.nn.init.constant_",
"pbrl.policy.base.Discrete",
"pbrl.policy.base.Cnn"
] |
[((726, 746), 'pbrl.policy.base.init_weights', 'init_weights', (['self.f'], {}), '(self.f)\n', (738, 746), False, 'from pbrl.policy.base import Mlp, Cnn, Rnn, Discrete, Continuous, Deterministic, init_weights\n'), ((1106, 1135), 'pbrl.policy.base.init_weights', 'init_weights', (['self.dist', '(0.01)'], {}), '(self.dist, 0.01)\n', (1118, 1135), False, 'from pbrl.policy.base import Mlp, Cnn, Rnn, Discrete, Continuous, Deterministic, init_weights\n'), ((1863, 1883), 'pbrl.policy.base.init_weights', 'init_weights', (['self.f'], {}), '(self.f)\n', (1875, 1883), False, 'from pbrl.policy.base import Mlp, Cnn, Rnn, Discrete, Continuous, Deterministic, init_weights\n'), ((2016, 2050), 'pbrl.policy.base.Deterministic', 'Deterministic', (['self.hidden_size', '(1)'], {}), '(self.hidden_size, 1)\n', (2029, 2050), False, 'from pbrl.policy.base import Mlp, Cnn, Rnn, Discrete, Continuous, Deterministic, init_weights\n'), ((2059, 2088), 'pbrl.policy.base.init_weights', 'init_weights', (['self.value', '(1.0)'], {}), '(self.value, 1.0)\n', (2071, 2088), False, 'from pbrl.policy.base import Mlp, Cnn, Rnn, Discrete, Continuous, Deterministic, init_weights\n'), ((605, 643), 'pbrl.policy.base.Cnn', 'Cnn', (['obs_dim', 'hidden_sizes', 'activation'], {}), '(obs_dim, hidden_sizes, activation)\n', (608, 643), False, 'from pbrl.policy.base import Mlp, Cnn, Rnn, Discrete, Continuous, Deterministic, init_weights\n'), ((679, 717), 'pbrl.policy.base.Mlp', 'Mlp', (['obs_dim', 'hidden_sizes', 'activation'], {}), '(obs_dim, hidden_sizes, activation)\n', (682, 717), False, 'from pbrl.policy.base import Mlp, Cnn, Rnn, Discrete, Continuous, Deterministic, init_weights\n'), ((790, 833), 'pbrl.policy.base.Rnn', 'Rnn', (['self.hidden_size', 'activation', 'self.rnn'], {}), '(self.hidden_size, activation, self.rnn)\n', (793, 833), False, 'from pbrl.policy.base import Mlp, Cnn, Rnn, Discrete, Continuous, Deterministic, init_weights\n'), ((846, 867), 'pbrl.policy.base.init_weights', 'init_weights', (['self.f2'], {}), '(self.f2)\n', (858, 867), False, 'from pbrl.policy.base import Mlp, Cnn, Rnn, Discrete, Continuous, Deterministic, init_weights\n'), ((920, 960), 'pbrl.policy.base.Continuous', 'Continuous', (['self.hidden_size', 'action_dim'], {}), '(self.hidden_size, action_dim)\n', (930, 960), False, 'from pbrl.policy.base import Mlp, Cnn, Rnn, Discrete, Continuous, Deterministic, init_weights\n'), ((973, 1020), 'torch.nn.init.constant_', 'torch.nn.init.constant_', (['self.dist.logstd', '(-0.5)'], {}), '(self.dist.logstd, -0.5)\n', (996, 1020), False, 'import torch\n'), ((1059, 1097), 'pbrl.policy.base.Discrete', 'Discrete', (['self.hidden_size', 'action_dim'], {}), '(self.hidden_size, action_dim)\n', (1067, 1097), False, 'from pbrl.policy.base import Mlp, Cnn, Rnn, Discrete, Continuous, Deterministic, init_weights\n'), ((1742, 1780), 'pbrl.policy.base.Cnn', 'Cnn', (['obs_dim', 'hidden_sizes', 'activation'], {}), '(obs_dim, hidden_sizes, activation)\n', (1745, 1780), False, 'from pbrl.policy.base import Mlp, Cnn, Rnn, Discrete, Continuous, Deterministic, init_weights\n'), ((1816, 1854), 'pbrl.policy.base.Mlp', 'Mlp', (['obs_dim', 'hidden_sizes', 'activation'], {}), '(obs_dim, hidden_sizes, activation)\n', (1819, 1854), False, 'from pbrl.policy.base import Mlp, Cnn, Rnn, Discrete, Continuous, Deterministic, init_weights\n'), ((1927, 1960), 'pbrl.policy.base.Rnn', 'Rnn', (['self.hidden_size', 'activation'], {}), '(self.hidden_size, activation)\n', (1930, 1960), False, 'from pbrl.policy.base import Mlp, Cnn, Rnn, Discrete, Continuous, Deterministic, init_weights\n'), ((1973, 1994), 'pbrl.policy.base.init_weights', 'init_weights', (['self.f2'], {}), '(self.f2)\n', (1985, 1994), False, 'from pbrl.policy.base import Mlp, Cnn, Rnn, Discrete, Continuous, Deterministic, init_weights\n')]
|
import numpy as np
def distances_to_point(lat_point, lon_point, lats, lons):
"""Method to calculate distances between a project and an array of lats and lons
:Parameters:
lat_project: float
Project latitude
lon_project: float
Project longitude
lats: np.array
Latitudes from which to calculate distances
lons: np.array
Longitudes from which to calculate distances
:Returns:
out: np.array of distances
"""
lat_point = np.deg2rad(lat_point)
lon_point = np.deg2rad(lon_point)
avg_earth_radius = 6373 # in km
lats = np.deg2rad(lats)
lons = np.deg2rad(lons)
lat = lat_point - lats
lon = lon_point - lons
d = np.sin(lat * 0.5) ** 2 + np.cos(lat_point) * np.cos(lats) * np.sin(lon * 0.5) ** 2
dist = 2 * avg_earth_radius * np.arcsin(np.sqrt(d))
return dist
|
[
"numpy.sin",
"numpy.sqrt",
"numpy.cos",
"numpy.deg2rad"
] |
[((523, 544), 'numpy.deg2rad', 'np.deg2rad', (['lat_point'], {}), '(lat_point)\n', (533, 544), True, 'import numpy as np\n'), ((562, 583), 'numpy.deg2rad', 'np.deg2rad', (['lon_point'], {}), '(lon_point)\n', (572, 583), True, 'import numpy as np\n'), ((634, 650), 'numpy.deg2rad', 'np.deg2rad', (['lats'], {}), '(lats)\n', (644, 650), True, 'import numpy as np\n'), ((663, 679), 'numpy.deg2rad', 'np.deg2rad', (['lons'], {}), '(lons)\n', (673, 679), True, 'import numpy as np\n'), ((745, 762), 'numpy.sin', 'np.sin', (['(lat * 0.5)'], {}), '(lat * 0.5)\n', (751, 762), True, 'import numpy as np\n'), ((873, 883), 'numpy.sqrt', 'np.sqrt', (['d'], {}), '(d)\n', (880, 883), True, 'import numpy as np\n'), ((770, 787), 'numpy.cos', 'np.cos', (['lat_point'], {}), '(lat_point)\n', (776, 787), True, 'import numpy as np\n'), ((790, 802), 'numpy.cos', 'np.cos', (['lats'], {}), '(lats)\n', (796, 802), True, 'import numpy as np\n'), ((805, 822), 'numpy.sin', 'np.sin', (['(lon * 0.5)'], {}), '(lon * 0.5)\n', (811, 822), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
"""force sync_translation after migrate for unit tests"""
import sys
from django.conf import settings
from django.core.management import call_command
from django.core.management.commands import migrate
from django.utils.six import StringIO
class Command(migrate.Command):
"""migrate"""
help = u"patched migrate: call sync_translation_fields after migrations when unit testing"
def handle(self, *args, **options):
"""command"""
super(Command, self).handle(*args, **options)
# Unit testing force sync_translation_fields
if 'test' in sys.argv and 'modeltranslation' in settings.INSTALLED_APPS:
print("Call sync_translation_fields")
sys_stdout = sys.stdout # keep for restoring
silent_stdout = StringIO()
sys.stdout = silent_stdout # make the command silent
call_command('sync_translation_fields', interactive=False, stdout=silent_stdout)
sys.stdout = sys_stdout # restore stdout
|
[
"django.utils.six.StringIO",
"django.core.management.call_command"
] |
[((802, 812), 'django.utils.six.StringIO', 'StringIO', ([], {}), '()\n', (810, 812), False, 'from django.utils.six import StringIO\n'), ((891, 976), 'django.core.management.call_command', 'call_command', (['"""sync_translation_fields"""'], {'interactive': '(False)', 'stdout': 'silent_stdout'}), "('sync_translation_fields', interactive=False, stdout=silent_stdout\n )\n", (903, 976), False, 'from django.core.management import call_command\n')]
|
import contextlib
from datetime import timedelta
from Engine import GGame
def spawn(delay, activity):
GGame.AddDelayedActivity(delay, activity)
|
[
"Engine.GGame.AddDelayedActivity"
] |
[((105, 146), 'Engine.GGame.AddDelayedActivity', 'GGame.AddDelayedActivity', (['delay', 'activity'], {}), '(delay, activity)\n', (129, 146), False, 'from Engine import GGame\n')]
|
# -*- coding: utf-8 -*-
#
# ***********************************************************************************
# MIT License
#
# Copyright (c) 2020 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is furnished
# to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# ***********************************************************************************
# **********************************************************************************************************************
# Copyright 2012 NVIDIA Corporation. All rights reserved.
# NOTICE TO USER:
# This software is subject to NVIDIA ownership rights under U.S. and international Copyright laws.
# This software and the information contained herein are PROPRIETARY and CONFIDENTIAL to NVIDIA
# and are being provided solely under the terms and conditions of an NVIDIA software license agreement.
# Otherwise, you have no rights to use or access this software in any manner.
#
# If not covered by the applicable NVIDIA software license agreement:
# NVIDIA MAKES NO REPRESENTATION ABOUT THE SUITABILITY OF THIS SOFTWARE FOR ANY PURPOSE.
# IT IS PROVIDED "AS IS" WITHOUT EXPRESS OR IMPLIED WARRANTY OF ANY KIND.
# NVIDIA DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY, NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
# IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES,
# OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOURCE CODE.
#
# U.S. Government End Users.
# This software is a "commercial item" as that term is defined at 48 C.F.R. 2.101 (OCT 1995),
# consisting of "commercial computer software" and "commercial computer software documentation"
# as such terms are used in 48 C.F.R. 12.212 (SEPT 1995) and is provided to the U.S. Government only as a commercial
# end item.
# Consistent with 48 C.F.R.12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (JUNE 1995),
# all U.S. Government End Users acquire the software with only those rights set forth herein.
#
# Any use of this software in individual and commercial software must include,
# in the user documentation and internal comments to the code,
# the above Disclaimer (as applicable) and U.S. Government End Users Notice.
#
# **********************************************************************************************************************
from .nvapi_h import *
import ctypes
import six
from collections import namedtuple
ColorCoordinates = namedtuple('ColorCoordinates', ['red', 'green', 'blue', 'white'])
RedCoordinate = namedtuple('RedCoordinate', ['x', 'y'])
GreenCoordinate = namedtuple('RedCoordinate', ['x', 'y'])
BlueCoordinate = namedtuple('RedCoordinate', ['x', 'y'])
WhiteCoordinate = namedtuple('RedCoordinate', ['x', 'y'])
class Display(object):
# NvAPI_GPU_GetEDID(NvPhysicalGpuHandle hPhysicalGpu, NvU32 displayOutputId, NV_EDID *pEDID);
# NvAPI_GPU_SetEDID(NvPhysicalGpuHandle hPhysicalGpu, NvU32 displayOutputId, NV_EDID *pEDID);
# NvAPI_GPU_GetScanoutConfiguration(NvU32 displayId, NvSBox* desktopRect, NvSBox* scanoutRect);
# NvAPI_GPU_GetScanoutCompositionParameter(__in NvU32 displayId, __in NV_GPU_SCANOUT_COMPOSITION_PARAMETER parameter, __out NV_GPU_SCANOUT_COMPOSITION_PARAMETER_VALUE *parameterData, __out float *pContainer);
# NvAPI_GPU_GetScanoutConfigurationEx(__in NvU32 displayId, __inout NV_SCANOUT_INFORMATION *pScanoutInformation);
# NvAPI_GPU_SetScanoutIntensity(NvU32 displayId, NV_SCANOUT_INTENSITY_DATA* scanoutIntensityData, int *pbSticky);
# NvAPI_GPU_GetScanoutIntensityState(__in NvU32 displayId, __inout NV_SCANOUT_INTENSITY_STATE_DATA* scanoutIntensityStateData);
# NvAPI_GPU_SetScanoutWarping(NvU32 displayId, NV_SCANOUT_WARPING_DATA* scanoutWarpingData, int* piMaxNumVertices, int* pbSticky);
# NvAPI_GPU_GetScanoutWarpingState(__in NvU32 displayId, __inout NV_SCANOUT_WARPING_STATE_DATA* scanoutWarpingStateData);
# NvAPI_GPU_SetScanoutCompositionParameter(NvU32 displayId, NV_GPU_SCANOUT_COMPOSITION_PARAMETER parameter,NV_GPU_SCANOUT_COMPOSITION_PARAMETER_VALUE parameterValue, float *pContainer);
# NvAPI_Disp_InfoFrameControl(__in NvU32 displayId, __inout NV_INFOFRAME_DATA *pInfoframeData);
# NvAPI_Disp_ColorControl(NvU32 displayId, NV_COLOR_DATA *pColorData);
# NvAPI_DISP_GetTiming( __in NvU32 displayId,__in NV_TIMING_INPUT *timingInput, __out NV_TIMING *pTiming);
# NvAPI_DISP_GetMonitorCapabilities(__in NvU32 displayId, __inout NV_MONITOR_CAPABILITIES *pMonitorCapabilities);
# NvAPI_DISP_GetMonitorColorCapabilities(__in NvU32 displayId, __inout_ecount_part_opt(*pColorCapsCount, *pColorCapsCount) NV_MONITOR_COLOR_CAPS *pMonitorColorCapabilities, __inout NvU32 *pColorCapsCount);
# NvAPI_DISP_EnumCustomDisplay( __in NvU32 displayId, __in NvU32 index, __inout NV_CUSTOM_DISPLAY *pCustDisp);
# NvAPI_DISP_TryCustomDisplay( __in_ecount(count) NvU32 *pDisplayIds, __in NvU32 count, __in_ecount(count) NV_CUSTOM_DISPLAY *pCustDisp);
# NvAPI_DISP_DeleteCustomDisplay( __in_ecount(count) NvU32 *pDisplayIds, __in NvU32 count, __in NV_CUSTOM_DISPLAY *pCustDisp);
# NvAPI_DISP_SaveCustomDisplay( __in_ecount(count) NvU32 *pDisplayIds, __in NvU32 count, __in NvU32 isThisOutputIdOnly, __in NvU32 isThisMonitorIdOnly);
# NvAPI_DISP_RevertCustomDisplayTrial( __in_ecount(count) NvU32* pDisplayIds, __in NvU32 count);
# NvAPI_EnumNvidiaDisplayHandle(NvU32 thisEnum, NvDisplayHandle *pNvDispHandle);
# NvAPI_EnumNvidiaUnAttachedDisplayHandle(NvU32 thisEnum, NvUnAttachedDisplayHandle *pNvUnAttachedDispHandle);
# NvAPI_CreateDisplayFromUnAttachedDisplay(NvUnAttachedDisplayHandle hNvUnAttachedDisp, NvDisplayHandle *pNvDisplay);
# NvAPI_GetAssociatedNvidiaDisplayHandle(const char *szDisplayName, NvDisplayHandle *pNvDispHandle);
# NvAPI_DISP_GetAssociatedUnAttachedNvidiaDisplayHandle(const char *szDisplayName, NvUnAttachedDisplayHandle *pNvUnAttachedDispHandle);
# NvAPI_GetAssociatedNvidiaDisplayName(NvDisplayHandle NvDispHandle, NvAPI_ShortString szDisplayName);
# NvAPI_GetUnAttachedAssociatedDisplayName(NvUnAttachedDisplayHandle hNvUnAttachedDisp, NvAPI_ShortString szDisplayName);
# NvAPI_EnableHWCursor(NvDisplayHandle hNvDisplay);
# NvAPI_DisableHWCursor(NvDisplayHandle hNvDisplay);
# NvAPI_GetVBlankCounter(NvDisplayHandle hNvDisplay, NvU32 *pCounter);
# NvAPI_SetRefreshRateOverride(NvDisplayHandle hNvDisplay, NvU32 outputsMask, float refreshRate, NvU32 bSetDeferred);
# NvAPI_GetAssociatedDisplayOutputId(NvDisplayHandle hNvDisplay, NvU32 *pOutputId);
# NvAPI_GetDisplayPortInfo(__in_opt NvDisplayHandle hNvDisplay, __in NvU32 outputId, __inout NV_DISPLAY_PORT_INFO *pInfo);
# NvAPI_SetDisplayPort(NvDisplayHandle hNvDisplay, NvU32 outputId, NV_DISPLAY_PORT_CONFIG *pCfg);
# NvAPI_GetHDMISupportInfo(__in_opt NvDisplayHandle hNvDisplay, __in NvU32 outputId, __inout NV_HDMI_SUPPORT_INFO *pInfo);
# NvAPI_DISP_GetDisplayConfig(__inout NvU32 *pathInfoCount, __out_ecount_full_opt(*pathInfoCount) NV_DISPLAYCONFIG_PATH_INFO *pathInfo);
# NvAPI_DISP_SetDisplayConfig(__in NvU32 pathInfoCount, __in_ecount(pathInfoCount) NV_DISPLAYCONFIG_PATH_INFO* pathInfo, __in NvU32 flags);
def __init__(self, gpu, display_id):
self.gpu = gpu
self.display_id = display_id
@property
def is_primary(self):
displayId = NvU32()
nvStatus = NvAPI_DISP_GetGDIPrimaryDisplayId(ctypes.byref(displayId))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_DISP_GetGDIPrimaryDisplayId returned %s (%d)" % (szDesc, nvStatus))
return self.display_id.value == displayId.value
@property
def _hPhysicalGpu(self):
hPhysicalGpu = NvPhysicalGpuHandle()
nvStatus = NvAPI_SYS_GetPhysicalGpuFromDisplayId(self.display_id, ctypes.byref(hPhysicalGpu))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_SYS_GetPhysicalGpuFromDisplayId returned %s (%d)" % (szDesc, nvStatus))
return hPhysicalGpu
@property
def __display_data(self):
displayIdCount = NvU32(16)
flags = NvU32(0)
displayIdArray = (NV_GPU_DISPLAYIDS * 16)()
displayIdArray[0].version = NV_GPU_DISPLAYIDS_VER
hPhysicalGpu = NvPhysicalGpuHandle()
NvAPI_SYS_GetPhysicalGpuFromDisplayId(
self.display_id,
ctypes.byref(hPhysicalGpu)
)
nvStatus = NvAPI_GPU_GetConnectedDisplayIds(
hPhysicalGpu,
displayIdArray,
ctypes.byref(displayIdCount),
flags
)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetConnectedDisplayIds returned %s (%d)" % (szDesc, nvStatus))
for i in range(displayIdCount.value):
if displayIdArray[i].displayId == self.display_id:
return displayIdArray[i]
@property
def __hdr_data(self):
hdrCapabilities = NV_HDR_CAPABILITIES()
hdrCapabilities.version = NV_HDR_CAPABILITIES_VER
nvStatus = NvAPI_Disp_GetHdrCapabilities(
self.display_id,
ctypes.byref(hdrCapabilities)
)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_Disp_GetHdrCapabilities returned %s (%d)" % (szDesc, nvStatus))
return hdrCapabilities
@property
def hdr(self):
if not self.is_hdr_supported:
return False
hdrColorData = NV_HDR_COLOR_DATA()
hdrColorData.version = NV_HDR_COLOR_DATA_VER
hdrColorData.cmd = NV_HDR_CMD_GET
hdrColorData.static_metadata_descriptor_id = NV_STATIC_METADATA_TYPE_1
nvStatus = NvAPI_Disp_HdrColorControl(
self.display_id,
ctypes.byref(hdrColorData)
)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_Disp_HdrColorControl returned %s (%d)" % (szDesc, nvStatus))
return hdrColorData.hdrMode != NV_HDR_MODE_OFF
@hdr.setter
def hdr(self, value):
if self.is_hdr_supported:
hdrColorData = NV_HDR_COLOR_DATA()
hdrColorData.version = NV_HDR_COLOR_DATA_VER
hdrColorData.cmd = NV_HDR_CMD_SET
hdrColorData.static_metadata_descriptor_id = NV_STATIC_METADATA_TYPE_1
if value:
hdrColorData.hdrMode = NV_HDR_MODE_UHDA
else:
hdrColorData.hdrMode = NV_HDR_MODE_OFF
nvStatus = NvAPI_Disp_HdrColorControl(
self.display_id,
ctypes.byref(hdrColorData)
)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_Disp_HdrColorControl returned %s (%d)" % (szDesc, nvStatus))
@property
def connector_type(self):
dd = self.__display_data
return NV_MONITOR_CONN_TYPE.get(dd.connectorType)
@property
def is_dynamic(self):
dd = self.__display_data
return bool(dd.isDynamic)
@property
def is_multi_stream_root_node(self):
dd = self.__display_data
return bool(dd.isMultiStreamRootNode)
@property
def is_active(self):
dd = self.__display_data
return bool(dd.isActive)
@property
def is_cluster(self):
dd = self.__display_data
return bool(dd.isCluster)
@property
def is_visible(self):
dd = self.__display_data
return bool(dd.isOSVisible)
@property
def is_wireless_display(self):
dd = self.__display_data
return bool(dd.isWFD)
@property
def is_connected(self):
dd = self.__display_data
return bool(dd.isConnected)
@property
def is_physically_connected(self):
dd = self.__display_data
return bool(dd.isConnected) and bool(dd.isPhysicallyConnected)
@property
def is_hdr_supported(self):
return self.is_st2048etof_supported
@property
def is_st2048etof_supported(self):
# HDMI2.0a UHDA HDR with ST2084 EOTF (CEA861.3).
return bool(self.__hdr_data.isST2084EotfSupported)
@property
def is_traditional_gamma_supported(self):
# HDMI2.0a traditional HDR gamma (CEA861.3).
return bool(self.__hdr_data.isTraditionalHdrGammaSupported)
@property
def is_edr_supported(self):
# Extended Dynamic Range on SDR displays.
return bool(self.__hdr_data.isEdrSupported)
@property
def is_traditional_sdr_gamma_supported(self):
# HDMI2.0a traditional SDR gamma (CEA861.3).
return bool(self.__hdr_data.isTraditionalSdrGammaSupported)
@property
def is_dolby_vision_supported(self):
# Dolby Vision Support.
return bool(self.__hdr_data.isDolbyVisionSupported)
@property
def hdr_dynamic_range(self):
hdrColorData = NV_HDR_COLOR_DATA()
hdrColorData.version = NV_HDR_COLOR_DATA_VER
hdrColorData.cmd = NV_HDR_CMD_GET
hdrColorData.static_metadata_descriptor_id = NV_STATIC_METADATA_TYPE_1
nvStatus = NvAPI_Disp_HdrColorControl(
self.display_id,
ctypes.byref(hdrColorData)
)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_Disp_HdrColorControl returned %s (%d)" % (szDesc, nvStatus))
return NV_DYNAMIC_RANGE.get(hdrColorData.hdrDynamicRange)
@hdr_dynamic_range.setter
def hdr_dynamic_range(self, value):
hdrColorData = NV_HDR_COLOR_DATA()
hdrColorData.version = NV_HDR_COLOR_DATA_VER
hdrColorData.cmd = NV_HDR_CMD_GET
hdrColorData.static_metadata_descriptor_id = NV_STATIC_METADATA_TYPE_1
nvStatus = NvAPI_Disp_HdrColorControl(
self.display_id,
ctypes.byref(hdrColorData)
)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_Disp_HdrColorControl returned %s (%d)" % (szDesc, nvStatus))
hdrColorData.cmd = NV_HDR_CMD_SET
value = NV_DYNAMIC_RANGE.get(value)
if value is None:
return
hdrColorData.hdrDynamicRange = value
nvStatus = NvAPI_Disp_HdrColorControl(
self.display_id,
ctypes.byref(hdrColorData)
)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_Disp_HdrColorControl returned %s (%d)" % (szDesc, nvStatus))
@property
def hdr_color_format(self):
hdrColorData = NV_HDR_COLOR_DATA()
hdrColorData.version = NV_HDR_COLOR_DATA_VER
hdrColorData.cmd = NV_HDR_CMD_GET
hdrColorData.static_metadata_descriptor_id = NV_STATIC_METADATA_TYPE_1
nvStatus = NvAPI_Disp_HdrColorControl(
self.display_id,
ctypes.byref(hdrColorData)
)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_Disp_HdrColorControl returned %s (%d)" % (szDesc, nvStatus))
return NV_COLOR_FORMAT.get(hdrColorData.hdrColorFormat)
@hdr_color_format.setter
def hdr_color_format(self, value):
hdrColorData = NV_HDR_COLOR_DATA()
hdrColorData.version = NV_HDR_COLOR_DATA_VER
hdrColorData.cmd = NV_HDR_CMD_GET
hdrColorData.static_metadata_descriptor_id = NV_STATIC_METADATA_TYPE_1
nvStatus = NvAPI_Disp_HdrColorControl(
self.display_id,
ctypes.byref(hdrColorData)
)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_Disp_HdrColorControl returned %s (%d)" % (szDesc, nvStatus))
hdrColorData.cmd = NV_HDR_CMD_SET
value = NV_DYNAMIC_RANGE.get(value)
if value is None:
return
hdrColorData.hdrDynamicRange = value
nvStatus = NvAPI_Disp_HdrColorControl(
self.display_id,
ctypes.byref(hdrColorData)
)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_Disp_HdrColorControl returned %s (%d)" % (szDesc, nvStatus))
@property
def hdr_primary_color_coordinates(self):
# [0x0000-0xC350] = [0.0 - 1.0]
dd = self.__hdr_data.display_data
red = RedCoordinate(x=dd.displayPrimary_x0, y=dd.displayPrimary_y0)
green = GreenCoordinate(x=dd.displayPrimary_x1, y=dd.displayPrimary_y1)
blue = BlueCoordinate(x=dd.displayPrimary_x2, y=dd.displayPrimary_y2)
white = WhiteCoordinate(x=dd.displayWhitePoint_x, y=dd.displayWhitePoint_y)
return ColorCoordinates(
red=red,
green=green,
blue=blue,
white=white
)
@hdr_primary_color_coordinates.setter
def hdr_primary_color_coordinates(self, value):
if not self.is_hdr_supported:
return
mdd = self._hdr_mastering_display_data
mdd.displayPrimary_x0 = value.red.x
mdd.displayPrimary_y0 = value.red.y
mdd.displayPrimary_x1 = value.green.x
mdd.displayPrimary_y1 = value.green.y
mdd.displayPrimary_x2 = value.blue.x
mdd.displayPrimary_y2 = value.blue.y
mdd.displayWhitePoint_x = value.white.x
mdd.displayWhitePoint_y = value.white.y
hdrColorData = NV_HDR_COLOR_DATA()
hdrColorData.mastering_display_data = mdd
hdrColorData.version = NV_HDR_COLOR_DATA_VER
hdrColorData.cmd = NV_HDR_CMD_SET
hdrColorData.static_metadata_descriptor_id = NV_STATIC_METADATA_TYPE_1
nvStatus = NvAPI_Disp_HdrColorControl(
self.display_id,
ctypes.byref(hdrColorData)
)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_Disp_HdrColorControl returned %s (%d)" % (szDesc, nvStatus))
@property
def hdr_maximum_content_light_level(self):
if not self.is_hdr_supported:
return
return self._hdr_mastering_display_data.max_content_light_level.value
@hdr_maximum_content_light_level.setter
def hdr_maximum_content_light_level(self, value):
if not self.is_hdr_supported:
return
mdd = self._hdr_mastering_display_data
mdd.max_content_light_level = value
hdrColorData = NV_HDR_COLOR_DATA()
hdrColorData.mastering_display_data = mdd
hdrColorData.version = NV_HDR_COLOR_DATA_VER
hdrColorData.cmd = NV_HDR_CMD_SET
hdrColorData.static_metadata_descriptor_id = NV_STATIC_METADATA_TYPE_1
nvStatus = NvAPI_Disp_HdrColorControl(
self.display_id,
ctypes.byref(hdrColorData)
)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_Disp_HdrColorControl returned %s (%d)" % (szDesc, nvStatus))
@property
def _hdr_mastering_display_data(self):
hdrColorData = NV_HDR_COLOR_DATA()
hdrColorData.version = NV_HDR_COLOR_DATA_VER
hdrColorData.cmd = NV_HDR_CMD_GET
hdrColorData.static_metadata_descriptor_id = NV_STATIC_METADATA_TYPE_1
nvStatus = NvAPI_Disp_HdrColorControl(
self.display_id,
ctypes.byref(hdrColorData)
)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_Disp_HdrColorControl returned %s (%d)" % (szDesc, nvStatus))
return hdrColorData.mastering_display_data
@property
def hdr_maximum_luminance(self):
# Maximum display luminance = desired max luminance of HDR
# content ([0x0001-0xFFFF] = [1.0 - 65535.0] cd/m^2)
dd = self.__hdr_data.display_data
return dd.desired_content_max_luminance
@hdr_maximum_luminance.setter
def hdr_maximum_luminance(self, value):
if not self.is_hdr_supported:
return
mdd = self._hdr_mastering_display_data
mdd.max_display_mastering_luminance = value
hdrColorData = NV_HDR_COLOR_DATA()
hdrColorData.mastering_display_data = mdd
hdrColorData.version = NV_HDR_COLOR_DATA_VER
hdrColorData.cmd = NV_HDR_CMD_SET
hdrColorData.static_metadata_descriptor_id = NV_STATIC_METADATA_TYPE_1
nvStatus = NvAPI_Disp_HdrColorControl(
self.display_id,
ctypes.byref(hdrColorData)
)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_Disp_HdrColorControl returned %s (%d)" % (szDesc, nvStatus))
@property
def hdr_minimum_luminance(self):
# Minimum display luminance = desired min luminance of HDR
# content ([0x0001-0xFFFF] = [1.0 - 6.55350] cd/m^2)
dd = self.__hdr_data.display_data
return dd.desired_content_min_luminance
@hdr_minimum_luminance.setter
def hdr_minimum_luminance(self, value):
if not self.is_hdr_supported:
return
mdd = self._hdr_mastering_display_data
mdd.min_display_mastering_luminance = value
hdrColorData = NV_HDR_COLOR_DATA()
hdrColorData.mastering_display_data = mdd
hdrColorData.version = NV_HDR_COLOR_DATA_VER
hdrColorData.cmd = NV_HDR_CMD_SET
hdrColorData.static_metadata_descriptor_id = NV_STATIC_METADATA_TYPE_1
nvStatus = NvAPI_Disp_HdrColorControl(
self.display_id,
ctypes.byref(hdrColorData)
)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_Disp_HdrColorControl returned %s (%d)" % (szDesc, nvStatus))
@property
def hdr_maximum_frame_average_luminance(self):
# Desired maximum Frame-Average Light Level (MaxFALL) of HDR
# content ([0x0001-0xFFFF] = [1.0 - 65535.0] cd/m^2)
dd = self.__hdr_data.display_data
return dd.desired_content_max_frame_average_luminance.value
@hdr_maximum_frame_average_luminance.setter
def hdr_maximum_frame_average_luminance(self, value):
if not self.is_hdr_supported:
return
mdd = self._hdr_mastering_display_data
mdd.max_frame_average_light_level = value
hdrColorData = NV_HDR_COLOR_DATA()
hdrColorData.mastering_display_data = mdd
hdrColorData.version = NV_HDR_COLOR_DATA_VER
hdrColorData.cmd = NV_HDR_CMD_SET
hdrColorData.static_metadata_descriptor_id = NV_STATIC_METADATA_TYPE_1
nvStatus = NvAPI_Disp_HdrColorControl(
self.display_id,
ctypes.byref(hdrColorData)
)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_Disp_HdrColorControl returned %s (%d)" % (szDesc, nvStatus))
@property
def hdr_supports_2160p60hz(self):
# If set sink is capable of 4kx2k @ 60hz
dvsm = self.__hdr_data.dv_static_metadata
return bool(dvsm.supports_2160p60hz.value)
@property
def hdr_supports_yuv422_12bit(self):
# If set, sink is capable of YUV422-12 bit
dvsm = self.__hdr_data.dv_static_metadata
return bool(dvsm.supports_YUV422_12bit.value)
@property
def hdr_supports_global_dimming(self):
# Indicates if sink supports global dimming
dvsm = self.__hdr_data.dv_static_metadata
return bool(dvsm.supports_global_dimming.value)
@property
def hdr_colorimetry(self):
# If set indicates sink supports DCI P3 colorimetry, REc709 otherwise
dvsm = self.__hdr_data.dv_static_metadata
if bool(dvsm.colorimetry.value):
return 'DCI P3'
else:
return 'REc709'
@property
def hdr_supports_backlight_control(self):
# This is set when sink is using lowlatency interface and can control its backlight.
dvsm = self.__hdr_data.dv_static_metadata
return bool(dvsm.supports_backlight_control.value)
@property
def hdr_backlight_minimum(self):
# It is the level for Backlt min luminance value.
dvsm = self.__hdr_data.dv_static_metadata
return dvsm.backlt_min_luma.value
@property
def hdr_interface_supported_by_sink(self):
# Indicates the interface (standard or low latency) supported by the sink.
dvsm = self.__hdr_data.dv_static_metadata
return dvsm.interface_supported_by_sink.value
@property
def hdr_supports_10b_12b_444(self):
# It is set when interface supported is low latency,
# it tells whether it supports 10 bit or 12 bit RGB 4:4:4 or YCbCr 4:4:4 or both.
dvsm = self.__hdr_data.dv_static_metadata
return dvsm.supports_10b_12b_444.value
@property
def hdr_minimum_sink_luminance(self):
# Represents min luminance level of Sink
dvsm = self.__hdr_data.dv_static_metadata
return dvsm.target_min_luminance.value
@property
def hdr_maximum_sink_luminance(self):
# Represents max luminance level of sink
dvsm = self.__hdr_data.dv_static_metadata
return dvsm.target_max_luminance.value
@property
def hdr_primary_chromaticity_coordinates(self):
dvsm = self.__hdr_data.dv_static_metadata
red = RedCoordinate(x=dvsm.cc_red_x, y=dvsm.cc_red_y)
green = GreenCoordinate(x=dvsm.cc_green_x.value, y=dvsm.cc_green_y.value)
blue = BlueCoordinate(x=dvsm.cc_blue_x.value, y=dvsm.cc_blue_y.value)
white = WhiteCoordinate(x=dvsm.cc_white_x.value, y=dvsm.cc_white_y.value)
return ColorCoordinates(
red=red,
green=green,
blue=blue,
white=white
)
def _get_bit(byteval, idx):
return byteval & (1 << idx) != 0
class PhysicalGPU(object):
@property
def _hdcp_support_status(self):
pGetHDCPSupportStatus = NV_GPU_GET_HDCP_SUPPORT_STATUS()
nvStatus = NvAPI_GPU_GetHDCPSupportStatus(self._hPhysicalGpu, ctypes.byref(pGetHDCPSupportStatus))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetHDCPSupportStatus returned %s (%d)" % (szDesc, nvStatus))
return pGetHDCPSupportStatus
@property
def hdcp_fuse_state(self):
return NV_GPU_HDCP_FUSE_STATE.get(self._hdcp_support_status.hdcpFuseState)
@property
def hdcp_key_source(self):
return NV_GPU_HDCP_KEY_SOURCE.get(self._hdcp_support_status.hdcpKeySource)
@property
def hdcp_key_source_state(self):
return NV_GPU_HDCP_KEY_SOURCE_STATE.get(self._hdcp_support_status.hdcpKeySourceState)
@property
def shader_sub_pipe_count(self):
hPhysicalGpu = self._hPhysicalGpu
pCount = NvU32()
nvStatus = NvAPI_GPU_GetShaderSubPipeCount(hPhysicalGpu, ctypes.byref(pCount))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetShaderSubPipeCount returned %s (%d)" % (szDesc, nvStatus))
return pCount.value
@property
def core_count(self):
hPhysicalGpu = self._hPhysicalGpu
pCount = NvU32()
nvStatus = NvAPI_GPU_GetGpuCoreCount(hPhysicalGpu, ctypes.byref(pCount))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetGpuCoreCount returned %s (%d)" % (szDesc, nvStatus))
return pCount.value
# NvAPI_GPU_GetAllOutputs(NvPhysicalGpuHandle hPhysicalGpu,NvU32 *pOutputsMask);
# NvAPI_GPU_GetConnectedOutputs(NvPhysicalGpuHandle hPhysicalGpu, NvU32 *pOutputsMask);
# NvAPI_GPU_GetConnectedSLIOutputs(NvPhysicalGpuHandle hPhysicalGpu, NvU32 *pOutputsMask);
# NvAPI_GPU_GetConnectedOutputsWithLidState(NvPhysicalGpuHandle hPhysicalGpu, NvU32 *pOutputsMask);
# NvAPI_GPU_GetConnectedSLIOutputsWithLidState(NvPhysicalGpuHandle hPhysicalGpu, NvU32 *pOutputsMask);
@property
def system_type(self):
pSystemType = NV_SYSTEM_TYPE()
nvStatus = NvAPI_GPU_GetSystemType(self._hPhysicalGpu, ctypes.byref(pSystemType))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetConnectedDisplayIds returned %s (%d)" % (szDesc, nvStatus))
return NV_SYSTEM_TYPE.get(pSystemType)
# NvAPI_GPU_GetActiveOutputs(NvPhysicalGpuHandle hPhysicalGpu, NvU32 *pOutputsMask);
# NvAPI_GPU_GetOutputType(NvPhysicalGpuHandle hPhysicalGpu, NvU32 outputId, NV_GPU_OUTPUT_TYPE *pOutputType);
# NvAPI_GPU_ValidateOutputCombination(NvPhysicalGpuHandle hPhysicalGpu, NvU32 outputsMask);
# NvAPI_GPU_GetFullName(NvPhysicalGpuHandle hPhysicalGpu, NvAPI_ShortString szName);
@property
def _pci_identifiers(self):
pDeviceId = NvU32()
pSubSystemId = NvU32()
pRevisionId = NvU32()
pExtDeviceId = NvU32()
nvStatus = NvAPI_GPU_GetPCIIdentifiers(
self._hPhysicalGpu,
ctypes.byref(pDeviceId),
ctypes.byref(pSubSystemId),
ctypes.byref(pRevisionId),
ctypes.byref(pExtDeviceId)
)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetPCIIdentifiers returned %s (%d)" % (szDesc, nvStatus))
return (
pDeviceId.value,
pSubSystemId.value,
pRevisionId.value,
pExtDeviceId.value
)
@property
def pci_device_id(self):
return self._pci_identifiers[0]
@property
def pci_subsystem_id(self):
return self._pci_identifiers[1]
@property
def pci_revision_id(self):
return self._pci_identifiers[2]
@property
def pci_ext_device_id(self):
return self._pci_identifiers[3]
@property
def gpu_type(self):
pGpuType = NV_GPU_TYPE()
nvStatus = NvAPI_GPU_GetGPUType(self._hPhysicalGpu, ctypes.byref(pGpuType))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetGPUType returned %s (%d)" % (szDesc, nvStatus))
return NV_GPU_TYPE.get(pGpuType)
@property
def bus_type(self):
pBusType = NV_GPU_BUS_TYPE()
nvStatus = NvAPI_GPU_GetBusType(self._hPhysicalGpu, ctypes.byref(pBusType))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetBusType returned %s (%d)" % (szDesc, nvStatus))
return NV_GPU_BUS_TYPE.get(pBusType)
@property
def bus_id(self):
pBusId = NvU32()
nvStatus = NvAPI_GPU_GetBusId(self._hPhysicalGpu, ctypes.byref(pBusId))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetBusId returned %s (%d)" % (szDesc, nvStatus))
return pBusId.value
@property
def bus_slot_id(self):
pBusSlotId = NvU32()
nvStatus = NvAPI_GPU_GetTachReading(self._hPhysicalGpu, ctypes.byref(pBusSlotId))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetTachReading returned %s (%d)" % (szDesc, nvStatus))
return pBusSlotId.value
@property
def irq(self):
pIRQ = NvU32()
nvStatus = NvAPI_GPU_GetIRQ(self._hPhysicalGpu, ctypes.byref(pIRQ))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetIRQ returned %s (%d)" % (szDesc, nvStatus))
return pIRQ.value
@property
def vbios_revision(self):
pBiosRevision = NvU32()
nvStatus = NvAPI_GPU_GetVbiosRevision(self._hPhysicalGpu, ctypes.byref(pBiosRevision))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetVbiosRevision returned %s (%d)" % (szDesc, nvStatus))
return pBiosRevision.value
@property
def oem_vbios_revision(self):
# (NvPhysicalGpuHandle hPhysicalGpu,NvU32 *);
pBiosRevision = NvU32()
nvStatus = NvAPI_GPU_GetVbiosOEMRevision(self._hPhysicalGpu, ctypes.byref(pBiosRevision))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetVbiosOEMRevision returned %s (%d)" % (szDesc, nvStatus))
return pBiosRevision.value
@property
def vbios_version(self):
szBiosRevision = NvAPI_ShortString()
nvStatus = NvAPI_GPU_GetVbiosVersionString(self._hPhysicalGpu, szBiosRevision)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetVbiosVersionString returned %s (%d)" % (szDesc, nvStatus))
return szBiosRevision.value
@property
def agp_aperture(self):
# (NvPhysicalGpuHandle hPhysicalGpu,NvU32 *);
pSize = NvU32()
nvStatus = NvAPI_GPU_GetAGPAperture(self._hPhysicalGpu, ctypes.byref(pSize))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetAGPAperture returned %s (%d)" % (szDesc, nvStatus))
return pSize.value
@property
def current_agp_rate(self):
pRate = NvU32()
nvStatus = NvAPI_GPU_GetCurrentAGPRate(self._hPhysicalGpu, ctypes.byref(pRate))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetCurrentAGPRate returned %s (%d)" % (szDesc, nvStatus))
return pRate.value
@property
def current_pcie_downstream_width(self):
pWidth = NvU32()
nvStatus = NvAPI_GPU_GetCurrentPCIEDownstreamWidth(self._hPhysicalGpu, ctypes.byref(pWidth))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetCurrentPCIEDownstreamWidth returned %s (%d)" % (szDesc, nvStatus))
return pWidth.value
@property
def physical_frame_buffer_size(self):
pSize = NvU32()
nvStatus = NvAPI_GPU_GetPhysicalFrameBufferSize(self._hPhysicalGpu, ctypes.byref(pSize))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetPhysicalFrameBufferSize returned %s (%d)" % (szDesc, nvStatus))
return pSize.value
@property
def virtual_frame_buffer_size(self):
pSize = NvU32()
nvStatus = NvAPI_GPU_GetVirtualFrameBufferSize(self._hPhysicalGpu, ctypes.byref(pSize))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetVirtualFrameBufferSize returned %s (%d)" % (szDesc, nvStatus))
return pSize.value
@property
def quadro_status(self):
pStatus = NvU32()
nvStatus = NvAPI_GPU_GetQuadroStatus(self._hPhysicalGpu, ctypes.byref(pStatus))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetQuadroStatus returned %s (%d)" % (szDesc, nvStatus))
return 'Quadro' if pStatus.value else 'GeForce'
@property
def serial_number(self):
pBoardInfo = NV_BOARD_INFO()
nvStatus = NvAPI_GPU_GetBoardInfo(self._hPhysicalGpu, ctypes.byref(pBoardInfo))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetBoardInfo returned %s (%d)" % (szDesc, nvStatus))
res = ''
for i in range(16):
res += chr(pBoardInfo.BoardNum[i].value)
return res
@property
def tach_reading(self):
pValue = NvU32()
nvStatus = NvAPI_GPU_GetTachReading(self._hPhysicalGpu, ctypes.byref(pValue))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetTachReading returned %s (%d)" % (szDesc, nvStatus))
return pValue.value
# NvAPI_I2CRead(NvPhysicalGpuHandle hPhysicalGpu, NV_I2C_INFO *pI2cInfo);
# NvAPI_I2CWrite(NvPhysicalGpuHandle hPhysicalGpu, NV_I2C_INFO *pI2cInfo);
# NvAPI_GPU_WorkstationFeatureSetup(__in NvPhysicalGpuHandle hPhysicalGpu, __in NvU32 featureEnableMask, __in NvU32 featureDisableMask);
# NvAPI_GPU_WorkstationFeatureQuery(__in NvPhysicalGpuHandle hPhysicalGpu, __out_opt NvU32 *pConfiguredFeatureMask, __out_opt NvU32 *pConsistentFeatureMask);
# NvAPI_GPU_GetECCStatusInfo(NvPhysicalGpuHandle hPhysicalGpu,NV_GPU_ECC_STATUS_INFO *pECCStatusInfo);
# NvAPI_GPU_GetECCErrorInfo(NvPhysicalGpuHandle hPhysicalGpu,NV_GPU_ECC_ERROR_INFO *pECCErrorInfo);
# NvAPI_GPU_ResetECCErrorInfo(NvPhysicalGpuHandle hPhysicalGpu, NvU8 bResetCurrent,NvU8 bResetAggregate);
# NvAPI_GPU_GetECCConfigurationInfo(NvPhysicalGpuHandle hPhysicalGpu,NV_GPU_ECC_CONFIGURATION_INFO *pECCConfigurationInfo);
# NvAPI_GPU_SetECCConfiguration(NvPhysicalGpuHandle hPhysicalGpu, NvU8 bEnable,NvU8 bEnableImmediately);
# NvAPI_GPU_QueryWorkstationFeatureSupport(NvPhysicalGpuHandle physicalGpu, NV_GPU_WORKSTATION_FEATURE_TYPE gpuWorkstationFeature);
# NvAPI_GPU_GetPerfDecreaseInfo(__in NvPhysicalGpuHandle hPhysicalGpu, __inout NvU32 *pPerfDecrInfo);
@property
def performance_monitor(self):
pPerfPstatesInfo = NV_GPU_PERF_PSTATES_INFO()
inputFlags = NvU32()
nvStatus = NvAPI_GPU_GetPstatesInfoEx(self._hPhysicalGpu, ctypes.byref(pPerfPstatesInfo), inputFlags)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetPstatesInfoEx returned %s (%d)" % (szDesc, nvStatus))
pPstatesInfo = NV_GPU_PERF_PSTATES20_INFO()
NvAPI_GPU_GetPstates20(self._hPhysicalGpu, ctypes.byref(pPstatesInfo))
res = {}
if _get_bit(pPerfPstatesInfo.flags.value, 0):
for i in range(pPerfPstatesInfo.numPstates.value):
pstate = pPerfPstatesInfo.pstates[i]
state_info = pPstatesInfo.pstates[i]
ps = {'clocks': [], 'voltages': []}
res[pstate.pstateId] = ps
if _get_bit(pstate.flags, 0):
ps['pcie_limit'] = 'GEN2'
else:
ps['pcie_limit'] = 'GEN1'
for j in range(pPerfPstatesInfo.numClocks.value):
clock = pstate.clocks[j]
state_info_clock = state_info.clocks[j]
type_id = NV_GPU_PERF_PSTATE20_CLOCK_TYPE_ID.get(clock.typeId)
data = {
'type': NV_GPU_PUBLIC_CLOCK_ID.get(clock.domainId),
'info_type': type_id,
'freq_delta_khz': state_info_clock.freqDelta_kHz.value.value,
'freq_delta_maximum_khz': state_info_clock.freqDelta_kHz.valueRange.max.value,
'freq_delta_minimum_khz': state_info_clock.freqDelta_kHz.valueRange.min.value,
'can_overclock': _get_bit(clock.flags.value, 0),
'freq_khz': clock.freq.value
}
if type_id == 'Single':
data['freq_khz'] = state_info_clock.data.single.freq_kHz.value
else:
data['minimum_freq_khz'] = state_info_clock.data.range.minFreq_kHz.value
data['maximum_freq_khz'] = state_info_clock.data.range.maxFreq_kHz.value
data['minimum_voltage'] = state_info_clock.data.range.minVoltage_uV.value
data['maximum_voltage'] = state_info_clock.data.range.maxVoltage_uV.value
ps['clocks'] += [data]
for j in range(pPerfPstatesInfo.numVoltages.value):
voltage = pstate.voltages[j]
base_voltage = state_info.baseVoltages[j]
ps['voltages'] += [
{
'type': NV_GPU_PERF_VOLTAGE_INFO_DOMAIN_ID.get(voltage.domainId),
'mvolt': voltage.mvolt.value,
'volt': base_voltage.volt_uV.value,
'volt_delta': base_voltage.voltDelta_uV.value.value,
'volt_delta_maximum': base_voltage.voltDelta_uV.valueRange.max.value,
'volt_delta_minimum': base_voltage.voltDelta_uV.valueRange.min.value
}
]
pDynamicPstatesInfoEx = NV_GPU_DYNAMIC_PSTATES_INFO_EX()
NvAPI_GPU_GetDynamicPstatesInfoEx(self._hPhysicalGpu, ctypes.byref(pDynamicPstatesInfoEx))
res['utilization'] = []
if _get_bit(pDynamicPstatesInfoEx.flags.value, 0):
for i in range(NVAPI_MAX_GPU_UTILIZATIONS):
util = pDynamicPstatesInfoEx.utilization[i]
if util.bIsPresent.value:
res['utilization'] += [util.percentage.value]
else:
res['utilization'] += [None]
# TODO: pPstatesInfo.ov
return res
# NvAPI_GPU_GetCurrentPstate(NvPhysicalGpuHandle hPhysicalGpu, NV_GPU_PERF_PSTATE_ID *pCurrentPstate);
@property
def thermal_sensors(self):
sensorIndex = NvU32(NVAPI_THERMAL_TARGET_ALL)
pThermalSettings = NV_GPU_THERMAL_SETTINGS()
nvStatus = NvAPI_GPU_GetThermalSettings(self._hPhysicalGpu, sensorIndex, ctypes.byref(pThermalSettings))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetThermalSettings returned %s (%d)" % (szDesc, nvStatus))
res = []
for i in range(pThermalSettings.count.value):
sensr = pThermalSettings.sensor[i]
res += [
{
'controller': NV_THERMAL_CONTROLLER.get(sensr.controller),
'default_minimum_temp': sensr.defaultMinTemp.value,
'default_maximum_temp': sensr.defaultMaxTemp.value,
'current_temp': sensr.currentTemp.value,
'target': NV_THERMAL_TARGET.get(sensr.target)
}
]
@property
def clock_frequencies(self):
pClkFreqs = NV_GPU_CLOCK_FREQUENCIES()
nvStatus = NvAPI_GPU_GetAllClockFrequencies(self._hPhysicalGpu, ctypes.byref(pClkFreqs))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetThermalSettings returned %s (%d)" % (szDesc, nvStatus))
res = []
for i in range(NVAPI_MAX_GPU_PUBLIC_CLOCKS):
dmain = pClkFreqs.domain[i]
if dmain.bIsPresent.value:
freq = dmain.frequency.value
else:
freq = None
res += [
{
'frequency': freq,
'type': NV_GPU_CLOCK_FREQUENCIES_CLOCK_TYPE.get(domain.ClockType.value)
}
]
return res
# NvAPI_GPU_ClientIllumDevicesGetInfo(__in NvPhysicalGpuHandle hPhysicalGpu, __inout NV_GPU_CLIENT_ILLUM_DEVICE_INFO_PARAMS *pIllumDevicesInfo);
# NvAPI_GPU_ClientIllumDevicesGetControl(__in NvPhysicalGpuHandle hPhysicalGpu, __inout NV_GPU_CLIENT_ILLUM_DEVICE_CONTROL_PARAMS *pClientIllumDevicesControl);
# NvAPI_GPU_ClientIllumDevicesSetControl(__in NvPhysicalGpuHandle hPhysicalGpu, __inout NV_GPU_CLIENT_ILLUM_DEVICE_CONTROL_PARAMS *pClientIllumDevicesControl);
# NvAPI_GPU_ClientIllumZonesGetInfo(__in NvPhysicalGpuHandle hPhysicalGpu, __inout NV_GPU_CLIENT_ILLUM_ZONE_INFO_PARAMS *pIllumZonesInfo);
# NvAPI_GPU_ClientIllumZonesGetControl(__in NvPhysicalGpuHandle hPhysicalGpu, __inout NV_GPU_CLIENT_ILLUM_ZONE_CONTROL_PARAMS *pIllumZonesControl);
# NvAPI_GPU_ClientIllumZonesSetControl(__in NvPhysicalGpuHandle hPhysicalGpu, __inout NV_GPU_CLIENT_ILLUM_ZONE_CONTROL_PARAMS *pIllumZonesControl);
def __init__(self, logical_gpu, physical_gpu_index):
self.logical_gpu = logical_gpu
self.physical_gpu_index = physical_gpu_index
@property
def dedicated_memory(self):
# Size(in kb) of the physical framebuffer.
return self._memory_info.dedicatedVideoMemory.value
@property
def available_dedicated_memory(self):
# Size(in kb) of the available physical framebuffer for allocating
# video memory surfaces.
return self._memory_info.availableDedicatedVideoMemory.value
@property
def system_memory(self):
# Size(in kb) of system memory the driver allocates at load time.
return self._memory_info.systemVideoMemory.value
@property
def shared_system_memory(self):
# Size(in kb) of shared system memory that driver is allowed to
# commit for surfaces across all allocations.
return self._memory_info.sharedSystemMemory.value
@property
def current_available_dedicated_memory(self):
# Size(in kb) of the current available physical framebuffer for
# allocating video memory surfaces.
return self._memory_info.curAvailableDedicatedVideoMemory.value
@property
def dedicated_memory_eviction_size(self):
# Size(in kb) of the total size of memory released as a result of
# the evictions.
return self._memory_info.dedicatedVideoMemoryEvictionsSize.value
@property
def dedicated_memory_eviction_count(self):
# Indicates the number of eviction events that caused an allocation
# to be removed from dedicated video memory to free GPU
return self._memory_info.dedicatedVideoMemoryEvictionCount.value
@property
def _hPhysicalGpu(self):
return self.logical_gpu._logical_gpu_info.physicalGpuHandles[self.physical_gpu_index]
@property
def _memory_info(self):
hPhysicalGpu = self._hPhysicalGpu
pMemoryInfo = NV_DISPLAY_DRIVER_MEMORY_INFO()
nvStatus = NvAPI_GPU_GetMemoryInfo(hPhysicalGpu, ctypes.byref(pMemoryInfo))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetMemoryInfo returned %s (%d)" % (szDesc, nvStatus))
class LogicalGPU(object):
@property
def _pLogicalGPU(self):
thisEnum = NvU32(self.gpu_index)
hNvDisplay = NvDisplayHandle()
nvStatus = NvAPI_EnumNvidiaDisplayHandle(thisEnum, ctypes.byref(hNvDisplay))
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_EnumNvidiaDisplayHandle returned %s (%d)" % (szDesc, nvStatus))
pLogicalGPU = NvLogicalGpuHandle()
nvStatus = NvAPI_GetLogicalGPUFromDisplay(
hNvDisplay,
ctypes.byref(pLogicalGPU)
)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GetLogicalGPUFromDisplay returned %s (%d)" % (szDesc, nvStatus))
return pLogicalGPU
def __len__(self):
return
@property
def physical_gpus(self):
for i in range(self._logical_gpu_info.physicalGpuCount.value):
yield PhysicalGPU(self, i)
@property
def _logical_gpu_info(self):
pLogicalGpuData = NV_LOGICAL_GPU_DATA()
nvStatus = NvAPI_GPU_GetLogicalGpuInfo(
self._pLogicalGPU,
ctypes.byref(pLogicalGpuData)
)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetLogicalGpuInfo returned %s (%d)" % (szDesc, nvStatus))
return pLogicalGpuData
@property
def os_adpater_id(self):
return self._logical_gpu_info.pOSAdapterId.value
# NvAPI_Stereo_CreateHandleFromIUnknown(IUnknown *pDevice, StereoHandle *pStereoHandle);
# NvAPI_Stereo_DestroyHandle(StereoHandle stereoHandle);
# NvAPI_Stereo_Activate(StereoHandle stereoHandle);
# NvAPI_Stereo_Deactivate(StereoHandle stereoHandle);
# NvAPI_Stereo_IsActivated(StereoHandle stereoHandle, NvU8 *pIsStereoOn);
# NvAPI_Stereo_GetSeparation(StereoHandle stereoHandle, float *pSeparationPercentage);
# NvAPI_Stereo_SetSeparation(StereoHandle stereoHandle, float newSeparationPercentage);
# NvAPI_Stereo_GetConvergence(StereoHandle stereoHandle, float *pConvergence);
# NvAPI_Stereo_SetConvergence(StereoHandle stereoHandle, float newConvergence);
# NvAPI_Stereo_SetActiveEye(StereoHandle hStereoHandle, NV_STEREO_ACTIVE_EYE StereoEye);
# NvAPI_Stereo_GetEyeSeparation(StereoHandle hStereoHandle, float *pSeparation );
# NvAPI_Stereo_GetSurfaceCreationMode(__in StereoHandle hStereoHandle,__in NVAPI_STEREO_SURFACECREATEMODE* pCreationMode);
# NvAPI_Stereo_Debug_WasLastDrawStereoized(__in StereoHandle hStereoHandle, __out NvU8 *pWasStereoized);
def __init__(self, gpu_index):
self.gpu_index = gpu_index
def __iter__(self):
logical_gpu_info = self._logical_gpu_info
for i in range(logical_gpu_info.physicalGpuCount.value):
hPhysicalGpu = logical_gpu_info.physicalGpuHandles[i]
displayIdCount = NvU32(16)
displayIdArray = (NV_GPU_DISPLAYIDS * 16)()
displayIdArray[0].version = NV_GPU_DISPLAYIDS_VER
nvStatus = NvAPI_GPU_GetAllDisplayIds(
hPhysicalGpu,
displayIdArray,
ctypes.byref(displayIdCount),
)
if NvAPI_Status.NVAPI_OK != nvStatus:
szDesc = NvAPI_ShortString()
NvAPI_GetErrorMessage(nvStatus, szDesc)
raise RuntimeError("NvAPI_GPU_GetConnectedDisplayIds returned %s (%d)" % (szDesc, nvStatus))
for i in range(displayIdCount.value):
yield Display(self, displayIdArray[i].displayId)
class Singleton(type):
def __init__(cls, name, bases, dct):
super(Singleton, cls).__init__(name, bases, dct)
cls._instance = None
def __call__(cls):
if cls._instance is None:
cls._instance = super(Singleton, cls).__call__()
return cls._instance
@six.add_metaclass(Singleton)
class GPUs(object):
# NvAPI_GetInterfaceVersionString(NvAPI_ShortString szDesc);
# NvAPI_SYS_GetChipSetInfo(NV_CHIPSET_INFO *pChipSetInfo);
# NvAPI_SYS_GetLidAndDockInfo(NV_LID_DOCK_PARAMS *pLidAndDock);
# NvAPI_GPU_QueryIlluminationSupport(__inout NV_GPU_QUERY_ILLUMINATION_SUPPORT_PARM *pIlluminationSupportInfo);
# NvAPI_GPU_GetIllumination(NV_GPU_GET_ILLUMINATION_PARM *pIlluminationInfo);
# NvAPI_GPU_SetIllumination(NV_GPU_SET_ILLUMINATION_PARM *pIlluminationInfo);
# NvAPI_Stereo_Enable(void);
# NvAPI_Stereo_Disable(void);
# NvAPI_Stereo_IsEnabled(NvU8 *pIsStereoEnabled);
# NvAPI_Stereo_IsWindowedModeSupported(NvU8* bSupported);
# NvAPI_Stereo_SetDriverMode( NV_STEREO_DRIVER_MODE mode );
def __init__(self):
InitNV()
def __iter__(self):
count = 0
hNvDisplay = NvDisplayHandle()
nvStatus = NvAPI_EnumNvidiaDisplayHandle(NvU32(count), ctypes.byref(hNvDisplay))
while nvStatus == NvAPI_Status.NVAPI_OK:
pLogicalGPU = NvLogicalGpuHandle()
if NvAPI_GetLogicalGPUFromDisplay(
hNvDisplay,
ctypes.byref(pLogicalGPU)
) == NvAPI_Status.NVAPI_OK:
yield LogicalGPU(count)
count += 1
nvStatus = NvAPI_EnumNvidiaDisplayHandle(NvU32(count), ctypes.byref(hNvDisplay))
if nvStatus != NvAPI_Status.NVAPI_END_ENUMERATION:
raise RuntimeError("NvAPI_EnumNvidiaDisplayHandle returned error code %d" % (nvStatus,))
|
[
"ctypes.byref",
"collections.namedtuple",
"six.add_metaclass"
] |
[((3596, 3661), 'collections.namedtuple', 'namedtuple', (['"""ColorCoordinates"""', "['red', 'green', 'blue', 'white']"], {}), "('ColorCoordinates', ['red', 'green', 'blue', 'white'])\n", (3606, 3661), False, 'from collections import namedtuple\n'), ((3678, 3717), 'collections.namedtuple', 'namedtuple', (['"""RedCoordinate"""', "['x', 'y']"], {}), "('RedCoordinate', ['x', 'y'])\n", (3688, 3717), False, 'from collections import namedtuple\n'), ((3736, 3775), 'collections.namedtuple', 'namedtuple', (['"""RedCoordinate"""', "['x', 'y']"], {}), "('RedCoordinate', ['x', 'y'])\n", (3746, 3775), False, 'from collections import namedtuple\n'), ((3793, 3832), 'collections.namedtuple', 'namedtuple', (['"""RedCoordinate"""', "['x', 'y']"], {}), "('RedCoordinate', ['x', 'y'])\n", (3803, 3832), False, 'from collections import namedtuple\n'), ((3851, 3890), 'collections.namedtuple', 'namedtuple', (['"""RedCoordinate"""', "['x', 'y']"], {}), "('RedCoordinate', ['x', 'y'])\n", (3861, 3890), False, 'from collections import namedtuple\n'), ((54538, 54566), 'six.add_metaclass', 'six.add_metaclass', (['Singleton'], {}), '(Singleton)\n', (54555, 54566), False, 'import six\n'), ((8584, 8607), 'ctypes.byref', 'ctypes.byref', (['displayId'], {}), '(displayId)\n', (8596, 8607), False, 'import ctypes\n'), ((9075, 9101), 'ctypes.byref', 'ctypes.byref', (['hPhysicalGpu'], {}), '(hPhysicalGpu)\n', (9087, 9101), False, 'import ctypes\n'), ((9732, 9758), 'ctypes.byref', 'ctypes.byref', (['hPhysicalGpu'], {}), '(hPhysicalGpu)\n', (9744, 9758), False, 'import ctypes\n'), ((9890, 9918), 'ctypes.byref', 'ctypes.byref', (['displayIdCount'], {}), '(displayIdCount)\n', (9902, 9918), False, 'import ctypes\n'), ((10584, 10613), 'ctypes.byref', 'ctypes.byref', (['hdrCapabilities'], {}), '(hdrCapabilities)\n', (10596, 10613), False, 'import ctypes\n'), ((11303, 11329), 'ctypes.byref', 'ctypes.byref', (['hdrColorData'], {}), '(hdrColorData)\n', (11315, 11329), False, 'import ctypes\n'), ((14894, 14920), 'ctypes.byref', 'ctypes.byref', (['hdrColorData'], {}), '(hdrColorData)\n', (14906, 14920), False, 'import ctypes\n'), ((15615, 15641), 'ctypes.byref', 'ctypes.byref', (['hdrColorData'], {}), '(hdrColorData)\n', (15627, 15641), False, 'import ctypes\n'), ((16159, 16185), 'ctypes.byref', 'ctypes.byref', (['hdrColorData'], {}), '(hdrColorData)\n', (16171, 16185), False, 'import ctypes\n'), ((16788, 16814), 'ctypes.byref', 'ctypes.byref', (['hdrColorData'], {}), '(hdrColorData)\n', (16800, 16814), False, 'import ctypes\n'), ((17505, 17531), 'ctypes.byref', 'ctypes.byref', (['hdrColorData'], {}), '(hdrColorData)\n', (17517, 17531), False, 'import ctypes\n'), ((18049, 18075), 'ctypes.byref', 'ctypes.byref', (['hdrColorData'], {}), '(hdrColorData)\n', (18061, 18075), False, 'import ctypes\n'), ((19846, 19872), 'ctypes.byref', 'ctypes.byref', (['hdrColorData'], {}), '(hdrColorData)\n', (19858, 19872), False, 'import ctypes\n'), ((20925, 20951), 'ctypes.byref', 'ctypes.byref', (['hdrColorData'], {}), '(hdrColorData)\n', (20937, 20951), False, 'import ctypes\n'), ((21566, 21592), 'ctypes.byref', 'ctypes.byref', (['hdrColorData'], {}), '(hdrColorData)\n', (21578, 21592), False, 'import ctypes\n'), ((22757, 22783), 'ctypes.byref', 'ctypes.byref', (['hdrColorData'], {}), '(hdrColorData)\n', (22769, 22783), False, 'import ctypes\n'), ((23896, 23922), 'ctypes.byref', 'ctypes.byref', (['hdrColorData'], {}), '(hdrColorData)\n', (23908, 23922), False, 'import ctypes\n'), ((25097, 25123), 'ctypes.byref', 'ctypes.byref', (['hdrColorData'], {}), '(hdrColorData)\n', (25109, 25123), False, 'import ctypes\n'), ((28552, 28587), 'ctypes.byref', 'ctypes.byref', (['pGetHDCPSupportStatus'], {}), '(pGetHDCPSupportStatus)\n', (28564, 28587), False, 'import ctypes\n'), ((29460, 29480), 'ctypes.byref', 'ctypes.byref', (['pCount'], {}), '(pCount)\n', (29472, 29480), False, 'import ctypes\n'), ((29921, 29941), 'ctypes.byref', 'ctypes.byref', (['pCount'], {}), '(pCount)\n', (29933, 29941), False, 'import ctypes\n'), ((30838, 30863), 'ctypes.byref', 'ctypes.byref', (['pSystemType'], {}), '(pSystemType)\n', (30850, 30863), False, 'import ctypes\n'), ((31805, 31828), 'ctypes.byref', 'ctypes.byref', (['pDeviceId'], {}), '(pDeviceId)\n', (31817, 31828), False, 'import ctypes\n'), ((31842, 31868), 'ctypes.byref', 'ctypes.byref', (['pSubSystemId'], {}), '(pSubSystemId)\n', (31854, 31868), False, 'import ctypes\n'), ((31882, 31907), 'ctypes.byref', 'ctypes.byref', (['pRevisionId'], {}), '(pRevisionId)\n', (31894, 31907), False, 'import ctypes\n'), ((31921, 31947), 'ctypes.byref', 'ctypes.byref', (['pExtDeviceId'], {}), '(pExtDeviceId)\n', (31933, 31947), False, 'import ctypes\n'), ((32825, 32847), 'ctypes.byref', 'ctypes.byref', (['pGpuType'], {}), '(pGpuType)\n', (32837, 32847), False, 'import ctypes\n'), ((33259, 33281), 'ctypes.byref', 'ctypes.byref', (['pBusType'], {}), '(pBusType)\n', (33271, 33281), False, 'import ctypes\n'), ((33681, 33701), 'ctypes.byref', 'ctypes.byref', (['pBusId'], {}), '(pBusId)\n', (33693, 33701), False, 'import ctypes\n'), ((34097, 34121), 'ctypes.byref', 'ctypes.byref', (['pBusSlotId'], {}), '(pBusSlotId)\n', (34109, 34121), False, 'import ctypes\n'), ((34505, 34523), 'ctypes.byref', 'ctypes.byref', (['pIRQ'], {}), '(pIRQ)\n', (34517, 34523), False, 'import ctypes\n'), ((34923, 34950), 'ctypes.byref', 'ctypes.byref', (['pBiosRevision'], {}), '(pBiosRevision)\n', (34935, 34950), False, 'import ctypes\n'), ((35429, 35456), 'ctypes.byref', 'ctypes.byref', (['pBiosRevision'], {}), '(pBiosRevision)\n', (35441, 35456), False, 'import ctypes\n'), ((36377, 36396), 'ctypes.byref', 'ctypes.byref', (['pSize'], {}), '(pSize)\n', (36389, 36396), False, 'import ctypes\n'), ((36800, 36819), 'ctypes.byref', 'ctypes.byref', (['pRate'], {}), '(pRate)\n', (36812, 36819), False, 'import ctypes\n'), ((37252, 37272), 'ctypes.byref', 'ctypes.byref', (['pWidth'], {}), '(pWidth)\n', (37264, 37272), False, 'import ctypes\n'), ((37711, 37730), 'ctypes.byref', 'ctypes.byref', (['pSize'], {}), '(pSize)\n', (37723, 37730), False, 'import ctypes\n'), ((38163, 38182), 'ctypes.byref', 'ctypes.byref', (['pSize'], {}), '(pSize)\n', (38175, 38182), False, 'import ctypes\n'), ((38594, 38615), 'ctypes.byref', 'ctypes.byref', (['pStatus'], {}), '(pStatus)\n', (38606, 38615), False, 'import ctypes\n'), ((39054, 39078), 'ctypes.byref', 'ctypes.byref', (['pBoardInfo'], {}), '(pBoardInfo)\n', (39066, 39078), False, 'import ctypes\n'), ((39567, 39587), 'ctypes.byref', 'ctypes.byref', (['pValue'], {}), '(pValue)\n', (39579, 39587), False, 'import ctypes\n'), ((41316, 41346), 'ctypes.byref', 'ctypes.byref', (['pPerfPstatesInfo'], {}), '(pPerfPstatesInfo)\n', (41328, 41346), False, 'import ctypes\n'), ((41703, 41729), 'ctypes.byref', 'ctypes.byref', (['pPstatesInfo'], {}), '(pPstatesInfo)\n', (41715, 41729), False, 'import ctypes\n'), ((45444, 45474), 'ctypes.byref', 'ctypes.byref', (['pThermalSettings'], {}), '(pThermalSettings)\n', (45456, 45474), False, 'import ctypes\n'), ((46425, 46448), 'ctypes.byref', 'ctypes.byref', (['pClkFreqs'], {}), '(pClkFreqs)\n', (46437, 46448), False, 'import ctypes\n'), ((50129, 50154), 'ctypes.byref', 'ctypes.byref', (['pMemoryInfo'], {}), '(pMemoryInfo)\n', (50141, 50154), False, 'import ctypes\n'), ((50601, 50625), 'ctypes.byref', 'ctypes.byref', (['hNvDisplay'], {}), '(hNvDisplay)\n', (50613, 50625), False, 'import ctypes\n'), ((51001, 51026), 'ctypes.byref', 'ctypes.byref', (['pLogicalGPU'], {}), '(pLogicalGPU)\n', (51013, 51026), False, 'import ctypes\n'), ((51688, 51717), 'ctypes.byref', 'ctypes.byref', (['pLogicalGpuData'], {}), '(pLogicalGpuData)\n', (51700, 51717), False, 'import ctypes\n'), ((55504, 55528), 'ctypes.byref', 'ctypes.byref', (['hNvDisplay'], {}), '(hNvDisplay)\n', (55516, 55528), False, 'import ctypes\n'), ((12199, 12225), 'ctypes.byref', 'ctypes.byref', (['hdrColorData'], {}), '(hdrColorData)\n', (12211, 12225), False, 'import ctypes\n'), ((44593, 44628), 'ctypes.byref', 'ctypes.byref', (['pDynamicPstatesInfoEx'], {}), '(pDynamicPstatesInfoEx)\n', (44605, 44628), False, 'import ctypes\n'), ((53779, 53807), 'ctypes.byref', 'ctypes.byref', (['displayIdCount'], {}), '(displayIdCount)\n', (53791, 53807), False, 'import ctypes\n'), ((55924, 55948), 'ctypes.byref', 'ctypes.byref', (['hNvDisplay'], {}), '(hNvDisplay)\n', (55936, 55948), False, 'import ctypes\n'), ((55726, 55751), 'ctypes.byref', 'ctypes.byref', (['pLogicalGPU'], {}), '(pLogicalGPU)\n', (55738, 55751), False, 'import ctypes\n')]
|
# Generated by Django 3.0.11 on 2021-03-22 22:07
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("users", "0008_remove_old_fields"),
]
operations = [
migrations.AlterField(
model_name="institutionfontys",
name="profile",
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to="users.Profile"),
),
migrations.AlterModelOptions(
name="institutionfontys",
options={"verbose_name": "Member of Fontys", "verbose_name_plural": "Members of Fontys"},
),
migrations.AlterModelOptions(
name="institutiontue",
options={"verbose_name": "Member of TU/e", "verbose_name_plural": "Members of TU/e"},
),
]
|
[
"django.db.models.OneToOneField",
"django.db.migrations.AlterModelOptions"
] |
[((485, 638), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""institutionfontys"""', 'options': "{'verbose_name': 'Member of Fontys', 'verbose_name_plural': 'Members of Fontys'\n }"}), "(name='institutionfontys', options={\n 'verbose_name': 'Member of Fontys', 'verbose_name_plural':\n 'Members of Fontys'})\n", (513, 638), False, 'from django.db import migrations, models\n'), ((674, 815), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""institutiontue"""', 'options': "{'verbose_name': 'Member of TU/e', 'verbose_name_plural': 'Members of TU/e'}"}), "(name='institutiontue', options={'verbose_name':\n 'Member of TU/e', 'verbose_name_plural': 'Members of TU/e'})\n", (702, 815), False, 'from django.db import migrations, models\n'), ((379, 469), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""users.Profile"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'users.Profile')\n", (399, 469), False, 'from django.db import migrations, models\n')]
|
# Copyright 2021 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Utility functions for feature extraction."""
import functools
from typing import Any, Dict, NamedTuple, Optional
from graph_nets import graphs
import ml_collections
import tensorflow.compat.v2 as tf
from neural_lns import mip_utils
from neural_lns import preprocessor
from neural_lns import solving_utils
BIAS_FEATURE_INDEX = 1
SOLUTION_FEATURE_INDEX = 14
BINARY_FEATURE_INDEX = 15
# Number of variable features without incumbent features.
NUM_ROOT_VARIABLE_FEATURES = 19
# Number of past incumbents to include in features.
NUM_PAST_INCUMBENTS = 3
# Total number of variable features.
NUM_VARIABLE_FEATURES = NUM_ROOT_VARIABLE_FEATURES + 2 * NUM_PAST_INCUMBENTS + 1
_INDICATOR_DIM = 1
_CON_FEATURE_DIM = 5
ORDER_TO_FEATURE_INDEX = {
'coefficient': 6,
'fractionality': 11,
}
# SCIP feature extraction parameters
SCIP_FEATURE_EXTRACTION_PARAMS = ml_collections.ConfigDict({
'seed': 42,
'time_limit_seconds': 60 * 10,
'separating_maxroundsroot': 0, # No cuts
'conflict_enable': False, # No additional cuts
'heuristics_emphasis': 'off', # No heuristics
})
class DatasetTuple(NamedTuple):
state: Dict[str, tf.Tensor]
graphs_tuple: graphs.GraphsTuple
labels: tf.Tensor
integer_labels: tf.Tensor
integer_node_indices: tf.Tensor
def get_dataset_feature_metadata() -> Dict[str, tf.io.VarLenFeature]:
"""Returns the schema of the data for writing Neural LNS datasets."""
features = {
'constraint_features': tf.io.VarLenFeature(dtype=tf.string),
'edge_features': tf.io.VarLenFeature(dtype=tf.string),
'edge_indices': tf.io.VarLenFeature(dtype=tf.string),
'variable_features': tf.io.VarLenFeature(dtype=tf.string),
'variable_lbs': tf.io.VarLenFeature(dtype=tf.float32),
'variable_ubs': tf.io.VarLenFeature(dtype=tf.float32),
'constraint_feature_names': tf.io.VarLenFeature(dtype=tf.string),
'variable_feature_names': tf.io.VarLenFeature(dtype=tf.string),
'edge_features_names': tf.io.VarLenFeature(dtype=tf.string),
'variable_names': tf.io.VarLenFeature(dtype=tf.string),
'binary_variable_indices': tf.io.VarLenFeature(dtype=tf.int64),
'all_integer_variable_indices': tf.io.VarLenFeature(dtype=tf.int64),
'model_maximize': tf.io.VarLenFeature(dtype=tf.int64),
'best_solution_labels': tf.io.VarLenFeature(dtype=tf.float32),
}
return features
def bnb_node_state_to_model_inputs(
state: Dict[str, Any],
node_depth: Optional[int] = None) -> graphs.GraphsTuple:
"""Convert a branch-and-bound node state into model inputs.
Args:
state: State information.
node_depth: Depth of this search state.
Returns:
graph_tuple: The graph structure information.
"""
variable_features = tf.where(
tf.math.is_nan(state['variable_features']),
tf.zeros_like(state['variable_features']),
state['variable_features'])
n_variables = tf.shape(variable_features)[0]
variable_feature_dim = tf.shape(variable_features)[1]
n_constraints = tf.shape(state['constraint_features'])[0]
constraint_feature_dim = tf.shape(state['constraint_features'])[1]
n_nodes = n_variables + n_constraints
tf.Assert(constraint_feature_dim == _CON_FEATURE_DIM,
[constraint_feature_dim])
padded_variables = tf.pad(
variable_features,
[[0, 0], [0, constraint_feature_dim]],
'CONSTANT') # + constraint_feature_dim
# Pad again with 1 to indicate variable corresponds to vertex.
padded_variables = tf.pad(
padded_variables,
[[0, 0], [0, _INDICATOR_DIM]],
'CONSTANT', constant_values=1.0) # + 1
padded_constraints = tf.pad(
state['constraint_features'],
[[0, 0], [variable_feature_dim, _INDICATOR_DIM]],
'CONSTANT') # + variable_feature_dim + 1
nodes = tf.concat([padded_variables, padded_constraints], axis=0)
edge_indices = tf.concat(
[state['edge_indices'][:, :1] + tf.cast(n_variables, dtype=tf.int64),
state['edge_indices'][:, 1:]], axis=1)
edge_features = state['edge_features']
node_features_dim = NUM_VARIABLE_FEATURES + _CON_FEATURE_DIM + 3
graph_tuple = graphs.GraphsTuple(
nodes=tf.cast(tf.reshape(nodes, [-1, node_features_dim]),
dtype=tf.float32),
edges=tf.cast(edge_features, dtype=tf.float32),
globals=tf.cast(node_depth, dtype=tf.float32),
receivers=edge_indices[:, 0], # constraint
senders=edge_indices[:, 1], # variables
n_node=tf.reshape(n_nodes, [1]),
n_edge=tf.reshape(tf.shape(state['edge_features'])[0], [1]))
return graph_tuple
def convert_to_minimization(gt: graphs.GraphsTuple, state: Dict[str, Any]):
"""Changes the sign of the objective coefficients of all variable nodes.
Args:
gt: Input graph.
state: Raw feature dictionary.
Returns:
graphs.GraphsTuple with updated nodes.
"""
nodes = gt.nodes
if tf.cast(state['model_maximize'], bool):
num_vars = tf.shape(state['variable_features'])[0]
feature_idx = ORDER_TO_FEATURE_INDEX['coefficient']
indices = tf.stack([
tf.range(num_vars),
tf.broadcast_to(tf.constant(feature_idx), shape=[num_vars])
])
indices = tf.transpose(indices)
sign_change = tf.tensor_scatter_nd_update(
tf.ones_like(nodes), indices,
tf.broadcast_to(tf.constant(-1.0), shape=[num_vars]))
nodes = nodes * sign_change
return gt.replace(nodes=nodes)
def get_graphs_tuple(state: Dict[str, Any]) -> graphs.GraphsTuple:
"""Converts feature state into GraphsTuple."""
state_with_bounds = state.copy()
state_with_bounds['variable_features'] = tf.concat([
state['variable_features'],
tf.expand_dims(state['variable_lbs'], -1),
tf.expand_dims(state['variable_ubs'], -1)
], -1)
graphs_tuple = bnb_node_state_to_model_inputs(
state_with_bounds, node_depth=1)
graphs_tuple = convert_to_minimization(graphs_tuple, state_with_bounds)
return graphs_tuple
def get_features(
mip: mip_utils.MPModel,
solver_params: ml_collections.ConfigDict = SCIP_FEATURE_EXTRACTION_PARAMS
) -> Optional[Dict[str, Any]]:
"""Extracts and preprocesses the features from the root of B&B tree."""
mip_solver = solving_utils.Solver()
presolver = preprocessor.Preprocessor()
_, mip = presolver.presolve(mip)
status = mip_solver.load_model(mip)
features = None
if status == mip_utils.MPSolverResponseStatus.NOT_SOLVED:
features = mip_solver.extract_lp_features_at_root(solver_params)
if features is not None and mip is not None:
features['model_maximize'] = mip.maximize
return features
def apply_feature_scaling(state, labels):
"""Scale variable bounds, solutions, coefficients and biases by sol norm.
Out goal here is to scale continuous variables in such a way that we wouldn't
change the integer feasible solutions to the MIP.
In order to achieve that, we have to ensure that all constraints are scaled
appropriately:
a^Tx <= b can be rescaled without changes in the integer solutions via:
(s * a_int)^Tx_int + a_cont^T(x_cont * s) <= s * b
where
- s = ||x_cont||^2,
- a_int/cont are constraints coefficients corresponding to integer or
continuous variables,
- x_int/cont - solution values corresponding to integer or continuous
variables.
Args:
state: dictionary with tensors corresponding to a single MIP instance
labels: tensor with feasible solutions, including integer and continuous
variables.
Returns:
state: dictionary with scaled tensors
labels: tensor with scaled continuous solution values
"""
sol = state['variable_features'][:, SOLUTION_FEATURE_INDEX]
is_binary = state['variable_features'][:, BINARY_FEATURE_INDEX]
is_non_integer = ~tf.cast(is_binary, tf.bool)
continuous_sol = tf.boolean_mask(sol, is_non_integer)
norm = tf.norm(continuous_sol)
lbs = state['variable_lbs']
ubs = state['variable_ubs']
state['variable_lbs'] = tf.where(is_non_integer, lbs / norm, lbs)
state['variable_ubs'] = tf.where(is_non_integer, ubs / norm, ubs)
scaled_sol = tf.where(is_non_integer, sol / norm, sol)
variable_features = tf.concat(
[state['variable_features'][:, :SOLUTION_FEATURE_INDEX],
tf.expand_dims(scaled_sol, axis=-1),
state['variable_features'][:, SOLUTION_FEATURE_INDEX + 1:]],
axis=1)
state['variable_features'] = variable_features
senders = state['edge_indices'][:, 1]
is_integer_edge = tf.gather(~is_non_integer, senders)
edges = tf.squeeze(state['edge_features'])
scaled_edges = tf.where(is_integer_edge, edges / norm, edges)
state['edge_features'] = tf.reshape(scaled_edges, [-1, 1])
biases = state['constraint_features'][:, BIAS_FEATURE_INDEX]
scaled_biases = biases / norm
state['constraint_features'] = tf.concat([
state['constraint_features'][:, :BIAS_FEATURE_INDEX],
tf.reshape(scaled_biases, [-1, 1]),
state['constraint_features'][:, BIAS_FEATURE_INDEX + 1:],
], axis=1)
is_non_integer = tf.reshape(is_non_integer, [-1, 1])
scaled_labels = tf.where(is_non_integer, labels / norm, labels)
return state, scaled_labels
def decode_fn(record_bytes):
"""Decode a tf.train.Example.
The list of (feature_name, feature_dtype, feature_ndim) is:
[('variable_features', tf.float32, 2),
('binary_variable_indices', tf.int64, 1),
('model_maximize', tf.bool, 0),
('variable_names', tf.string, 1),
('constraint_features', tf.float32, 2),
('best_solution_labels', tf.float32, 1),
('variable_lbs', tf.float32, 1),
('edge_indices', tf.int64, 2),
('all_integer_variable_indices', tf.int64, 1),
('edge_features_names', tf.string, 0),
('variable_feature_names', tf.string, 0),
('constraint_feature_names', tf.string, 0),
('variable_ubs', tf.float32, 1),
('edge_features', tf.float32, 2)]
Args:
record_bytes: Serialised example.
Returns:
Deserialised example.
"""
example = tf.io.parse_single_example(
# Data
record_bytes,
# Schema
get_dataset_feature_metadata()
)
# Parse all 2-D tensors and cast to the right dtype
parsed_example = {}
parsed_example['variable_features'] = tf.io.parse_tensor(tf.sparse.to_dense(
example['variable_features'])[0], out_type=tf.float32)
parsed_example['constraint_features'] = tf.io.parse_tensor(tf.sparse.to_dense(
example['constraint_features'])[0], out_type=tf.float32)
parsed_example['edge_indices'] = tf.io.parse_tensor(tf.sparse.to_dense(
example['edge_indices'])[0], out_type=tf.int64)
parsed_example['edge_features'] = tf.io.parse_tensor(tf.sparse.to_dense(
example['edge_features'])[0], out_type=tf.float32)
# Convert the remaining features to dense.
for key, value in example.items():
if key not in parsed_example:
parsed_example[key] = tf.sparse.to_dense(value)
return parsed_example
def extract_data(state: Dict[str, Any], scale_features: bool = False):
"""Create a DatasetTuple for each MIP instance."""
num_vars = len(state['best_solution_labels'])
labels = tf.reshape(state['best_solution_labels'], [num_vars, -1])
if scale_features:
state, labels = apply_feature_scaling(state, labels)
if 'features_extraction_time' not in state:
state['features_extraction_time'] = tf.constant(
[], dtype=tf.float32)
graphs_tuple = get_graphs_tuple(state)
node_indices = tf.cast(state['binary_variable_indices'], tf.int32)
# We allow filtering out instances that are invalid.
valid_example = (tf.size(labels) > 0)
if valid_example:
int_labels = tf.gather(labels, node_indices)
int_labels = tf.cast(tf.round(int_labels), tf.int32)
int_labels = tf.cast(tf.expand_dims(int_labels, axis=-1), tf.int32)
else:
int_labels = tf.constant([], shape=[0, 0, 0], dtype=tf.int32)
labels = tf.constant([], shape=[0, 0], dtype=tf.float32)
return DatasetTuple(
state=state,
graphs_tuple=graphs_tuple,
integer_node_indices=node_indices,
labels=labels,
integer_labels=int_labels)
def get_dataset(input_path: str,
scale_features: bool = False,
shuffle_size: int = 1000,
num_epochs: Optional[int] = None) -> tf.data.Dataset:
"""Makes a tf.Dataset with correct preprocessing."""
ds = tf.data.TFRecordDataset([input_path]).repeat(num_epochs)
if shuffle_size > 0:
ds = ds.shuffle(shuffle_size, reshuffle_each_iteration=True)
data_fn = functools.partial(extract_data, scale_features=scale_features)
return ds.map(decode_fn).map(data_fn)
|
[
"tensorflow.compat.v2.pad",
"tensorflow.compat.v2.reshape",
"tensorflow.compat.v2.constant",
"neural_lns.preprocessor.Preprocessor",
"tensorflow.compat.v2.round",
"tensorflow.compat.v2.data.TFRecordDataset",
"tensorflow.compat.v2.expand_dims",
"tensorflow.compat.v2.shape",
"tensorflow.compat.v2.concat",
"tensorflow.compat.v2.boolean_mask",
"tensorflow.compat.v2.transpose",
"tensorflow.compat.v2.norm",
"tensorflow.compat.v2.io.VarLenFeature",
"tensorflow.compat.v2.sparse.to_dense",
"functools.partial",
"tensorflow.compat.v2.where",
"tensorflow.compat.v2.math.is_nan",
"tensorflow.compat.v2.zeros_like",
"tensorflow.compat.v2.gather",
"tensorflow.compat.v2.cast",
"tensorflow.compat.v2.range",
"neural_lns.solving_utils.Solver",
"tensorflow.compat.v2.size",
"tensorflow.compat.v2.squeeze",
"ml_collections.ConfigDict",
"tensorflow.compat.v2.ones_like",
"tensorflow.compat.v2.Assert"
] |
[((1538, 1703), 'ml_collections.ConfigDict', 'ml_collections.ConfigDict', (["{'seed': 42, 'time_limit_seconds': 60 * 10, 'separating_maxroundsroot': 0,\n 'conflict_enable': False, 'heuristics_emphasis': 'off'}"], {}), "({'seed': 42, 'time_limit_seconds': 60 * 10,\n 'separating_maxroundsroot': 0, 'conflict_enable': False,\n 'heuristics_emphasis': 'off'})\n", (1563, 1703), False, 'import ml_collections\n'), ((3842, 3921), 'tensorflow.compat.v2.Assert', 'tf.Assert', (['(constraint_feature_dim == _CON_FEATURE_DIM)', '[constraint_feature_dim]'], {}), '(constraint_feature_dim == _CON_FEATURE_DIM, [constraint_feature_dim])\n', (3851, 3921), True, 'import tensorflow.compat.v2 as tf\n'), ((3955, 4031), 'tensorflow.compat.v2.pad', 'tf.pad', (['variable_features', '[[0, 0], [0, constraint_feature_dim]]', '"""CONSTANT"""'], {}), "(variable_features, [[0, 0], [0, constraint_feature_dim]], 'CONSTANT')\n", (3961, 4031), True, 'import tensorflow.compat.v2 as tf\n'), ((4166, 4258), 'tensorflow.compat.v2.pad', 'tf.pad', (['padded_variables', '[[0, 0], [0, _INDICATOR_DIM]]', '"""CONSTANT"""'], {'constant_values': '(1.0)'}), "(padded_variables, [[0, 0], [0, _INDICATOR_DIM]], 'CONSTANT',\n constant_values=1.0)\n", (4172, 4258), True, 'import tensorflow.compat.v2 as tf\n'), ((4305, 4407), 'tensorflow.compat.v2.pad', 'tf.pad', (["state['constraint_features']", '[[0, 0], [variable_feature_dim, _INDICATOR_DIM]]', '"""CONSTANT"""'], {}), "(state['constraint_features'], [[0, 0], [variable_feature_dim,\n _INDICATOR_DIM]], 'CONSTANT')\n", (4311, 4407), True, 'import tensorflow.compat.v2 as tf\n'), ((4464, 4521), 'tensorflow.compat.v2.concat', 'tf.concat', (['[padded_variables, padded_constraints]'], {'axis': '(0)'}), '([padded_variables, padded_constraints], axis=0)\n', (4473, 4521), True, 'import tensorflow.compat.v2 as tf\n'), ((5555, 5593), 'tensorflow.compat.v2.cast', 'tf.cast', (["state['model_maximize']", 'bool'], {}), "(state['model_maximize'], bool)\n", (5562, 5593), True, 'import tensorflow.compat.v2 as tf\n'), ((6865, 6887), 'neural_lns.solving_utils.Solver', 'solving_utils.Solver', ([], {}), '()\n', (6885, 6887), False, 'from neural_lns import solving_utils\n'), ((6902, 6929), 'neural_lns.preprocessor.Preprocessor', 'preprocessor.Preprocessor', ([], {}), '()\n', (6927, 6929), False, 'from neural_lns import preprocessor\n'), ((8441, 8477), 'tensorflow.compat.v2.boolean_mask', 'tf.boolean_mask', (['sol', 'is_non_integer'], {}), '(sol, is_non_integer)\n', (8456, 8477), True, 'import tensorflow.compat.v2 as tf\n'), ((8487, 8510), 'tensorflow.compat.v2.norm', 'tf.norm', (['continuous_sol'], {}), '(continuous_sol)\n', (8494, 8510), True, 'import tensorflow.compat.v2 as tf\n'), ((8597, 8638), 'tensorflow.compat.v2.where', 'tf.where', (['is_non_integer', '(lbs / norm)', 'lbs'], {}), '(is_non_integer, lbs / norm, lbs)\n', (8605, 8638), True, 'import tensorflow.compat.v2 as tf\n'), ((8665, 8706), 'tensorflow.compat.v2.where', 'tf.where', (['is_non_integer', '(ubs / norm)', 'ubs'], {}), '(is_non_integer, ubs / norm, ubs)\n', (8673, 8706), True, 'import tensorflow.compat.v2 as tf\n'), ((8723, 8764), 'tensorflow.compat.v2.where', 'tf.where', (['is_non_integer', '(sol / norm)', 'sol'], {}), '(is_non_integer, sol / norm, sol)\n', (8731, 8764), True, 'import tensorflow.compat.v2 as tf\n'), ((9097, 9132), 'tensorflow.compat.v2.gather', 'tf.gather', (['(~is_non_integer)', 'senders'], {}), '(~is_non_integer, senders)\n', (9106, 9132), True, 'import tensorflow.compat.v2 as tf\n'), ((9143, 9177), 'tensorflow.compat.v2.squeeze', 'tf.squeeze', (["state['edge_features']"], {}), "(state['edge_features'])\n", (9153, 9177), True, 'import tensorflow.compat.v2 as tf\n'), ((9195, 9241), 'tensorflow.compat.v2.where', 'tf.where', (['is_integer_edge', '(edges / norm)', 'edges'], {}), '(is_integer_edge, edges / norm, edges)\n', (9203, 9241), True, 'import tensorflow.compat.v2 as tf\n'), ((9269, 9302), 'tensorflow.compat.v2.reshape', 'tf.reshape', (['scaled_edges', '[-1, 1]'], {}), '(scaled_edges, [-1, 1])\n', (9279, 9302), True, 'import tensorflow.compat.v2 as tf\n'), ((9643, 9678), 'tensorflow.compat.v2.reshape', 'tf.reshape', (['is_non_integer', '[-1, 1]'], {}), '(is_non_integer, [-1, 1])\n', (9653, 9678), True, 'import tensorflow.compat.v2 as tf\n'), ((9697, 9744), 'tensorflow.compat.v2.where', 'tf.where', (['is_non_integer', '(labels / norm)', 'labels'], {}), '(is_non_integer, labels / norm, labels)\n', (9705, 9744), True, 'import tensorflow.compat.v2 as tf\n'), ((11712, 11769), 'tensorflow.compat.v2.reshape', 'tf.reshape', (["state['best_solution_labels']", '[num_vars, -1]'], {}), "(state['best_solution_labels'], [num_vars, -1])\n", (11722, 11769), True, 'import tensorflow.compat.v2 as tf\n'), ((12039, 12090), 'tensorflow.compat.v2.cast', 'tf.cast', (["state['binary_variable_indices']", 'tf.int32'], {}), "(state['binary_variable_indices'], tf.int32)\n", (12046, 12090), True, 'import tensorflow.compat.v2 as tf\n'), ((13106, 13168), 'functools.partial', 'functools.partial', (['extract_data'], {'scale_features': 'scale_features'}), '(extract_data, scale_features=scale_features)\n', (13123, 13168), False, 'import functools\n'), ((2147, 2183), 'tensorflow.compat.v2.io.VarLenFeature', 'tf.io.VarLenFeature', ([], {'dtype': 'tf.string'}), '(dtype=tf.string)\n', (2166, 2183), True, 'import tensorflow.compat.v2 as tf\n'), ((2208, 2244), 'tensorflow.compat.v2.io.VarLenFeature', 'tf.io.VarLenFeature', ([], {'dtype': 'tf.string'}), '(dtype=tf.string)\n', (2227, 2244), True, 'import tensorflow.compat.v2 as tf\n'), ((2268, 2304), 'tensorflow.compat.v2.io.VarLenFeature', 'tf.io.VarLenFeature', ([], {'dtype': 'tf.string'}), '(dtype=tf.string)\n', (2287, 2304), True, 'import tensorflow.compat.v2 as tf\n'), ((2333, 2369), 'tensorflow.compat.v2.io.VarLenFeature', 'tf.io.VarLenFeature', ([], {'dtype': 'tf.string'}), '(dtype=tf.string)\n', (2352, 2369), True, 'import tensorflow.compat.v2 as tf\n'), ((2393, 2430), 'tensorflow.compat.v2.io.VarLenFeature', 'tf.io.VarLenFeature', ([], {'dtype': 'tf.float32'}), '(dtype=tf.float32)\n', (2412, 2430), True, 'import tensorflow.compat.v2 as tf\n'), ((2454, 2491), 'tensorflow.compat.v2.io.VarLenFeature', 'tf.io.VarLenFeature', ([], {'dtype': 'tf.float32'}), '(dtype=tf.float32)\n', (2473, 2491), True, 'import tensorflow.compat.v2 as tf\n'), ((2527, 2563), 'tensorflow.compat.v2.io.VarLenFeature', 'tf.io.VarLenFeature', ([], {'dtype': 'tf.string'}), '(dtype=tf.string)\n', (2546, 2563), True, 'import tensorflow.compat.v2 as tf\n'), ((2597, 2633), 'tensorflow.compat.v2.io.VarLenFeature', 'tf.io.VarLenFeature', ([], {'dtype': 'tf.string'}), '(dtype=tf.string)\n', (2616, 2633), True, 'import tensorflow.compat.v2 as tf\n'), ((2664, 2700), 'tensorflow.compat.v2.io.VarLenFeature', 'tf.io.VarLenFeature', ([], {'dtype': 'tf.string'}), '(dtype=tf.string)\n', (2683, 2700), True, 'import tensorflow.compat.v2 as tf\n'), ((2726, 2762), 'tensorflow.compat.v2.io.VarLenFeature', 'tf.io.VarLenFeature', ([], {'dtype': 'tf.string'}), '(dtype=tf.string)\n', (2745, 2762), True, 'import tensorflow.compat.v2 as tf\n'), ((2797, 2832), 'tensorflow.compat.v2.io.VarLenFeature', 'tf.io.VarLenFeature', ([], {'dtype': 'tf.int64'}), '(dtype=tf.int64)\n', (2816, 2832), True, 'import tensorflow.compat.v2 as tf\n'), ((2872, 2907), 'tensorflow.compat.v2.io.VarLenFeature', 'tf.io.VarLenFeature', ([], {'dtype': 'tf.int64'}), '(dtype=tf.int64)\n', (2891, 2907), True, 'import tensorflow.compat.v2 as tf\n'), ((2933, 2968), 'tensorflow.compat.v2.io.VarLenFeature', 'tf.io.VarLenFeature', ([], {'dtype': 'tf.int64'}), '(dtype=tf.int64)\n', (2952, 2968), True, 'import tensorflow.compat.v2 as tf\n'), ((3000, 3037), 'tensorflow.compat.v2.io.VarLenFeature', 'tf.io.VarLenFeature', ([], {'dtype': 'tf.float32'}), '(dtype=tf.float32)\n', (3019, 3037), True, 'import tensorflow.compat.v2 as tf\n'), ((3439, 3481), 'tensorflow.compat.v2.math.is_nan', 'tf.math.is_nan', (["state['variable_features']"], {}), "(state['variable_features'])\n", (3453, 3481), True, 'import tensorflow.compat.v2 as tf\n'), ((3489, 3530), 'tensorflow.compat.v2.zeros_like', 'tf.zeros_like', (["state['variable_features']"], {}), "(state['variable_features'])\n", (3502, 3530), True, 'import tensorflow.compat.v2 as tf\n'), ((3583, 3610), 'tensorflow.compat.v2.shape', 'tf.shape', (['variable_features'], {}), '(variable_features)\n', (3591, 3610), True, 'import tensorflow.compat.v2 as tf\n'), ((3639, 3666), 'tensorflow.compat.v2.shape', 'tf.shape', (['variable_features'], {}), '(variable_features)\n', (3647, 3666), True, 'import tensorflow.compat.v2 as tf\n'), ((3688, 3726), 'tensorflow.compat.v2.shape', 'tf.shape', (["state['constraint_features']"], {}), "(state['constraint_features'])\n", (3696, 3726), True, 'import tensorflow.compat.v2 as tf\n'), ((3757, 3795), 'tensorflow.compat.v2.shape', 'tf.shape', (["state['constraint_features']"], {}), "(state['constraint_features'])\n", (3765, 3795), True, 'import tensorflow.compat.v2 as tf\n'), ((5848, 5869), 'tensorflow.compat.v2.transpose', 'tf.transpose', (['indices'], {}), '(indices)\n', (5860, 5869), True, 'import tensorflow.compat.v2 as tf\n'), ((8394, 8421), 'tensorflow.compat.v2.cast', 'tf.cast', (['is_binary', 'tf.bool'], {}), '(is_binary, tf.bool)\n', (8401, 8421), True, 'import tensorflow.compat.v2 as tf\n'), ((11936, 11969), 'tensorflow.compat.v2.constant', 'tf.constant', (['[]'], {'dtype': 'tf.float32'}), '([], dtype=tf.float32)\n', (11947, 11969), True, 'import tensorflow.compat.v2 as tf\n'), ((12166, 12181), 'tensorflow.compat.v2.size', 'tf.size', (['labels'], {}), '(labels)\n', (12173, 12181), True, 'import tensorflow.compat.v2 as tf\n'), ((12225, 12256), 'tensorflow.compat.v2.gather', 'tf.gather', (['labels', 'node_indices'], {}), '(labels, node_indices)\n', (12234, 12256), True, 'import tensorflow.compat.v2 as tf\n'), ((12411, 12459), 'tensorflow.compat.v2.constant', 'tf.constant', (['[]'], {'shape': '[0, 0, 0]', 'dtype': 'tf.int32'}), '([], shape=[0, 0, 0], dtype=tf.int32)\n', (12422, 12459), True, 'import tensorflow.compat.v2 as tf\n'), ((12473, 12520), 'tensorflow.compat.v2.constant', 'tf.constant', (['[]'], {'shape': '[0, 0]', 'dtype': 'tf.float32'}), '([], shape=[0, 0], dtype=tf.float32)\n', (12484, 12520), True, 'import tensorflow.compat.v2 as tf\n'), ((4933, 4973), 'tensorflow.compat.v2.cast', 'tf.cast', (['edge_features'], {'dtype': 'tf.float32'}), '(edge_features, dtype=tf.float32)\n', (4940, 4973), True, 'import tensorflow.compat.v2 as tf\n'), ((4989, 5026), 'tensorflow.compat.v2.cast', 'tf.cast', (['node_depth'], {'dtype': 'tf.float32'}), '(node_depth, dtype=tf.float32)\n', (4996, 5026), True, 'import tensorflow.compat.v2 as tf\n'), ((5138, 5162), 'tensorflow.compat.v2.reshape', 'tf.reshape', (['n_nodes', '[1]'], {}), '(n_nodes, [1])\n', (5148, 5162), True, 'import tensorflow.compat.v2 as tf\n'), ((5610, 5646), 'tensorflow.compat.v2.shape', 'tf.shape', (["state['variable_features']"], {}), "(state['variable_features'])\n", (5618, 5646), True, 'import tensorflow.compat.v2 as tf\n'), ((5925, 5944), 'tensorflow.compat.v2.ones_like', 'tf.ones_like', (['nodes'], {}), '(nodes)\n', (5937, 5944), True, 'import tensorflow.compat.v2 as tf\n'), ((6331, 6372), 'tensorflow.compat.v2.expand_dims', 'tf.expand_dims', (["state['variable_lbs']", '(-1)'], {}), "(state['variable_lbs'], -1)\n", (6345, 6372), True, 'import tensorflow.compat.v2 as tf\n'), ((6380, 6421), 'tensorflow.compat.v2.expand_dims', 'tf.expand_dims', (["state['variable_ubs']", '(-1)'], {}), "(state['variable_ubs'], -1)\n", (6394, 6421), True, 'import tensorflow.compat.v2 as tf\n'), ((8868, 8903), 'tensorflow.compat.v2.expand_dims', 'tf.expand_dims', (['scaled_sol'], {'axis': '(-1)'}), '(scaled_sol, axis=-1)\n', (8882, 8903), True, 'import tensorflow.compat.v2 as tf\n'), ((9510, 9544), 'tensorflow.compat.v2.reshape', 'tf.reshape', (['scaled_biases', '[-1, 1]'], {}), '(scaled_biases, [-1, 1])\n', (9520, 9544), True, 'import tensorflow.compat.v2 as tf\n'), ((10846, 10894), 'tensorflow.compat.v2.sparse.to_dense', 'tf.sparse.to_dense', (["example['variable_features']"], {}), "(example['variable_features'])\n", (10864, 10894), True, 'import tensorflow.compat.v2 as tf\n'), ((10988, 11038), 'tensorflow.compat.v2.sparse.to_dense', 'tf.sparse.to_dense', (["example['constraint_features']"], {}), "(example['constraint_features'])\n", (11006, 11038), True, 'import tensorflow.compat.v2 as tf\n'), ((11125, 11168), 'tensorflow.compat.v2.sparse.to_dense', 'tf.sparse.to_dense', (["example['edge_indices']"], {}), "(example['edge_indices'])\n", (11143, 11168), True, 'import tensorflow.compat.v2 as tf\n'), ((11254, 11298), 'tensorflow.compat.v2.sparse.to_dense', 'tf.sparse.to_dense', (["example['edge_features']"], {}), "(example['edge_features'])\n", (11272, 11298), True, 'import tensorflow.compat.v2 as tf\n'), ((11476, 11501), 'tensorflow.compat.v2.sparse.to_dense', 'tf.sparse.to_dense', (['value'], {}), '(value)\n', (11494, 11501), True, 'import tensorflow.compat.v2 as tf\n'), ((12282, 12302), 'tensorflow.compat.v2.round', 'tf.round', (['int_labels'], {}), '(int_labels)\n', (12290, 12302), True, 'import tensorflow.compat.v2 as tf\n'), ((12339, 12374), 'tensorflow.compat.v2.expand_dims', 'tf.expand_dims', (['int_labels'], {'axis': '(-1)'}), '(int_labels, axis=-1)\n', (12353, 12374), True, 'import tensorflow.compat.v2 as tf\n'), ((12947, 12984), 'tensorflow.compat.v2.data.TFRecordDataset', 'tf.data.TFRecordDataset', (['[input_path]'], {}), '([input_path])\n', (12970, 12984), True, 'import tensorflow.compat.v2 as tf\n'), ((4588, 4624), 'tensorflow.compat.v2.cast', 'tf.cast', (['n_variables'], {'dtype': 'tf.int64'}), '(n_variables, dtype=tf.int64)\n', (4595, 4624), True, 'import tensorflow.compat.v2 as tf\n'), ((4838, 4880), 'tensorflow.compat.v2.reshape', 'tf.reshape', (['nodes', '[-1, node_features_dim]'], {}), '(nodes, [-1, node_features_dim])\n', (4848, 4880), True, 'import tensorflow.compat.v2 as tf\n'), ((5739, 5757), 'tensorflow.compat.v2.range', 'tf.range', (['num_vars'], {}), '(num_vars)\n', (5747, 5757), True, 'import tensorflow.compat.v2 as tf\n'), ((5979, 5996), 'tensorflow.compat.v2.constant', 'tf.constant', (['(-1.0)'], {}), '(-1.0)\n', (5990, 5996), True, 'import tensorflow.compat.v2 as tf\n'), ((5188, 5220), 'tensorflow.compat.v2.shape', 'tf.shape', (["state['edge_features']"], {}), "(state['edge_features'])\n", (5196, 5220), True, 'import tensorflow.compat.v2 as tf\n'), ((5783, 5807), 'tensorflow.compat.v2.constant', 'tf.constant', (['feature_idx'], {}), '(feature_idx)\n', (5794, 5807), True, 'import tensorflow.compat.v2 as tf\n')]
|
#!/usr/bin/env python3
import sys
import os
import json
import networkx as nx
import math
from networkx.algorithms import approximation as approx
import time
from itertools import groupby as g
from operator import itemgetter
from graphFunctions import *
import csv
import configparser
import logging
#from networkx.readwrite import d3_js
from networkx.readwrite import json_graph
#import mpld3
#mpld3.enable_notebook()
#from mpld3 import plugins
#import matplotlib.pyplot as plt
Config = configparser.ConfigParser()
Config.read("../etc/config.cfg")
logging.basicConfig(filename=Config.get("Debug", "Logfile"),level=logging.INFO, format='%(asctime)s %(message)s')
debug = Config.getboolean("Debug", "DebugInfo")
standardDirectory = Config.get("Path", "StandardDirectory")
filename = ""
lastcomm = ""
outputfile = ""
maxEdgeValue = Config.getint("Graph", "maxEdgeValue");
# Values for Edge creation
createEdgePercent = Config.getint("Graph", "createEdgePercent") # Bis zu einem Wert von 30% NICHT ähnlich
createBlueEdgePercent = Config.getint("Graph", "createBlueEdgePercent") # Bis zu einem Wert von 50% ETWAS ähnlich
createBluedEdges = True
for argument in sys.argv:
if lastcomm.strip() == "-f":
filename = argument.strip();
if lastcomm.strip() == "-d":
debug = True
if lastcomm.strip() == "-m":
maxEdgeValue = int(argument.strip());
if lastcomm.strip() == "-o":
outputfile = argument.strip();
lastcomm = argument
if lastcomm.strip() == "-f":
filename = argument.strip();
if lastcomm.strip() == "-d":
debug = True
if lastcomm.strip() == "-m":
maxEdgeValue = int(argument.strip());
if lastcomm.strip() == "-o":
outputfile = argument.strip();
logging.debug (" ========")
logging.info (" Starting PS-Clustering Algorithm with Complement on "+filename)
logging.info (" ========")
start_time = time.time()
if filename == "":
logging.error ("No file name given.")
exit()
G=nx.Graph()
logging.info (" ... now reading graph at "+filename)
G = nx.read_gml(filename)
if len(G) == 0:
logging.error (" ... Graph is empty! Exiting!")
exit()
build_time = time.time() - start_time
logging.info (" => Build time: "+str(build_time))
start_time = time.time()
G2 = G.copy()
G2 = nx.complement(G2)
nh = getNeighbors(G2)
cliques = []
for key, value in nh.items():
u1 = key
#print (u1)
if u1 not in G2.nodes():
#print (" -> not in G2")
continue
u2 = getSmalestNeighbor(G2,u1)
if u2 == None:
#print (" -> None ")
continue
G2.add_node (str(u1)+"|"+str(u2))
for node in G2[u1]:
if nodes_connected(G2, u2, node):
G2.add_edge(str(u1)+"|"+str(u2), node)
if 'clique' in G2.node[u2]:
G2.node[str(u1)+"|"+str(u2)]['clique']=G2.node[u2]['clique']
else:
G2.node[str(u1)+"|"+str(u2)]['clique']=getMaxClique(G2)+1
G2.remove_node(u1)
G2.remove_node(u2)
for node in G2.nodes():
if 'clique' not in G2.node[node]:
G2.node[node]['clique']=getMaxClique(G2)+1
cl = nx.get_node_attributes(G2,'clique')
for key, value in cl.items():
if "|" in str(key):
nodes = key.split("|")
else:
nodes = [key]
for node in nodes:
if node in G.nodes():
G.node[node]['color']=value
elif int(node) in G.nodes():
G.node[int(node)]['color']=value
else:
print (" Error, node does not exist! ")
#d = nx.coloring.greedy_color(G, strategy=nx.coloring.strategy_largest_first)
d = nx.get_node_attributes(G, 'color')
colorCount = d[max(d, key=lambda key: d[key])]
build_time = time.time() - start_time
logging.info (" => Clique time: "+str(build_time))
start_time = time.time()
start_time = time.time()
# valueGraph und Farbenliste bauen
valueGraph = buildValueGraph (G, d )
outputpathfilename = standardDirectory+outputfile
if os.path.isabs (outputfile):
outputpathfilename = outputfile
logging.debug (" ... saving weightes Value Graph at "+outputpathfilename+'coloredWithValue.gml')
nx.write_gml(valueGraph, outputpathfilename+'coloredWithValue.gml')
# Build a colorlist and sort min first
colorList = []
for i in range(0,colorCount):
countC = countColor (G,i);
colorList.append ( [i, countC] )
colorList = sorted(colorList,key=itemgetter(1))
build_time = time.time() - start_time
logging.info (" => Build weighted graph time: "+str(build_time))
start_time = time.time()
#G = addBlueEdges (G, createEdgePercent, createBlueEdgePercent, createBluedEdges, data)
# For all nodes
countNodesEliminated = 0;
listConnections = []
colorCount = d[max(d, key=lambda key: d[key])]
minYear = 3000
maxYear = 0
for i in range(0,colorCount):#
yearc = getClusterYearsList (G,i)
for year in yearc:#
if year[0]==0:
continue
if year[0]>maxYear:
maxYear = year[0]
if year[0]<minYear:
minYear = year[0]
for node in G.nodes():
G.node[node]['year']=int(G.node[node]['year'])
for colorSet in colorList:
# Get a list of nodes with that color
tmpNodeList = list(n for n,d in G.nodes_iter(data=True) if d['color']==colorSet[0])
#print (" Color count before: "+str(countColor(G, colorSet[0])))
# Now iterate over these nodes
for i in range(0,len(tmpNodeList)):
# catch that node
node = tmpNodeList[i]
# check if this node is not already an end-point!
if G.node[node]["end"]==0:
colors = [];
# for all neighbours
for nb in G.neighbors(node):
# is edge blue?
if G[node][nb]["color"] == "blue":
# is node not yet excluded from stable sets?
if (G.node[nb]["color"] >= 0):
# Nodes are not in the same stable sets
if (G.node[nb]["color"] != G.node[node]["color"]):
colors.append (G.node[nb]["color"])
# Now remove all double entries
uniqueList = unique(colors)
# check if "node" has at last two blue neighbours in different stable sets
if len(uniqueList) >= 2:
# Node is excluded from stable sets (color -1)
G.node[node]["color"] = -1;
countNodesEliminated += 1
# now we see, that this node connects all stable sets connected with blue edges:
for i in range(0,len(uniqueList)) :
for j in range (i+1, len(uniqueList)):
newYear = "y"+str(G.node[node]["year"])
#print (" Year: "+newYear + " from "+ str(G.node[node]["year"]))
if str(uniqueList[i]-1) in valueGraph.nodes() and str(str(uniqueList[j]-1)) in valueGraph.nodes():
if valueGraph.has_edge(str(uniqueList[i]-1),str(uniqueList[j]-1)):
valueGraph[str(uniqueList[i]-1)][str(uniqueList[j]-1)]["value"]+=1
if "y"+str(G.node[node]["year"]) in valueGraph[str(uniqueList[i]-1)][str(uniqueList[j]-1)]:
# Jahr existiert
valueGraph[str(uniqueList[i]-1)][str(uniqueList[j]-1)]["y"+str(G.node[node]["year"])] += 1
else:
valueGraph.add_edge (str(uniqueList[i]-1) , str(uniqueList[j]-1), {newYear : 1})
else:
valueGraph.add_edge(str(uniqueList[i]-1),str(uniqueList[j]-1), value=1)
valueGraph.add_edge (str(uniqueList[i]-1) , str(uniqueList[j]-1), {newYear : 1})
for yearsum in range(minYear, G.node[node]["year"]-1):
if "s"+str(yearsum) not in valueGraph[str(uniqueList[i]-1)][str(uniqueList[j]-1)]:
newYear2 = "s"+str(yearsum)
valueGraph.add_edge (str(uniqueList[i]-1) , str(uniqueList[j]-1), {newYear2 : 0})
for yearsum in range(G.node[node]["year"], maxYear):
if "s"+str(yearsum) in valueGraph[str(uniqueList[i]-1)][str(uniqueList[j]-1)]:
valueGraph[str(uniqueList[i]-1)][str(uniqueList[j]-1)]["s"+str(yearsum)] += 1
else:
newYear2 = "s"+str(yearsum)
valueGraph.add_edge (str(uniqueList[i]-1) , str(uniqueList[j]-1), {newYear2 : 1})
#else:
# logging.warn (" Error, nodes do not exist. ")
#valueGraph[str(uniqueList[i]-1)][str(uniqueList[j]-1)]["y"+str(G.node[node]["year"])] += 1
# mark all nodes as end-nodes
for nb in G.neighbors(node):
# is edge blue?
if G[node][nb]["color"] == "blue":
# is node not yet excluded from stable sets?
if (G.node[nb]["color"] >= 0):
G.node[nb]["end"] = 1
#print (" Color count after: "+str(countColor(G, colorSet[0])))
#print (str(valueGraph.number_of_nodes()))
if "-1" in valueGraph.nodes():
valueGraph.remove_node("-1")
if -1 in valueGraph.nodes():
valueGraph.remove_node(-1)
count = 15
for node in valueGraph.nodes():
newValue = countColor (G, int(node))
if newValue == 0:
#print ("Removing node "+str(node)+" with "+str(valueGraph.node[node]["text"]))
valueGraph.remove_node(node)
else:
valueGraph.node[node]["value"]=newValue
terms = getClusterName(G,node,count);
journals = getClusterJournals (G,node,count)
yearc = getClusterYearsList (G,node,count)
valueGraph.node[node]["text"]=terms
valueGraph.node[node]["journal"]=journals
yearc.sort(key=lambda tup: tup[0])
#print (str(yearc))
summe = 0
#print ("New Sum")
#print (str(minYear) +" -- "+str(maxYear))
#print (yearc)
for year in range(minYear, maxYear):
addValue = 0
#print ("Looking for year "+str(year))
for years in yearc:
if years[0]==0:
continue
if years[0] == year:
#print (str(years[0]) +" == "+str(year))
addValue = years[1]
#print (" "+str(year))
valueGraph.node[node]["y"+str(year)] = addValue
valueGraph.node[node]["s"+str(year)] = addValue + summe
summe += addValue
#csvwriter.writerow ([''])
#csvwriter.writerow (["Node", int(node), "Nodes:", newValue])
#csvwriter.writerow (getClusterName (G, int(node), maxCount=-1, separator=";").split(";"))
#csvwriter.writerow ([''])
#nodeList = (n for n in G if G.node[n]['color']==int(node))
#for nodeG in nodeList:
# csvwriter.writerow ([ G.node[nodeG]['title'], G.node[nodeG]['ref'], G.node[nodeG]['uri']])
logging.debug (" ... saving PS Value Graph at "+outputpathfilename+'coloredWithValuePS.gml')
nx.write_gml(valueGraph, outputpathfilename+'coloredWithValuePS.gml')
build_time = time.time() - start_time
logging.debug (" => PS time: "+str(build_time))
start_time = time.time()
ps_time = build_time
#csvwriter.writerows (sorted(oldList.items(), key=operator.itemgetter(1), reverse=True))
logging.debug (" ... saving Network RB at "+outputpathfilename+'network_rb.gml')
# Output nach neu färben
nx.write_gml(G, outputpathfilename+'network_rb.gml')
numbers = getMeshTermNumber (G);
#print ( sorted(numbers.items(), key=operator.itemgetter(1)))
# Export
data = json_graph.node_link_data(valueGraph)
with open(outputpathfilename+'coloredWithValuePS.json', 'w') as outfile:
json.dump(data, outfile)
#fig, axs = plt.subplots(1, 1, figsize=(10, 10))
#ax = axs
#pos = None
#mpld3.plugins.connect(fig, NetworkXD3ForceLayout(valueGraph,
# pos,
# ax,
# gravity=.5,
# link_distance=20,
# charge=-600,
# friction=1
# )
# )
if debug:
#logging.debug ("Graph created. Number of nodes: " + str(G.number_of_nodes())+ ", number edges: "+ str(G.number_of_edges()))
#logging.debug (" Blue edges: "+str(bluedEdges))
#print (" Black edges: "+ str(countBlack) + ", Blued edges: "+ str(countBlue) +", lost edged: "+str(countLost))
#logging.debug (" Build time: \t " + str((build_time)))
#logging.debug (" Color time: \t " + str((color_time )))
logging.info (" PS time: \t " + str((ps_time )))
logging.info (" ========")
logging.info (" Farben: \t\t "+str(colorCount))
logging.info (" minMPS: \t\t "+str(valueGraph.number_of_nodes()-1))
#lower = colorCount - math.floor (bluedEdges / 2)
#if lower < 2:
# lower = 2
#logging.debug (" l_b: \t \t\t "+str(lower))
#print (" Cliquenzahl: "+ str(clique) )
logging.info (" Eliminated nodes: \t "+str(countNodesEliminated))
#print (" Clique time: " + str((clique_time - color_time)))
#print (G.edges())
#nx.set_node_attributes(G, 'label', {0: "0", 1: "1"})
output = []
output.append (ps_time)
output.append (colorCount)
output.append (valueGraph.number_of_nodes())
output.append (countNodesEliminated)
print (output)
|
[
"json.dump",
"os.path.isabs",
"logging.error",
"logging.debug",
"networkx.write_gml",
"networkx.complement",
"networkx.readwrite.json_graph.node_link_data",
"time.time",
"logging.info",
"networkx.Graph",
"networkx.get_node_attributes",
"networkx.read_gml",
"configparser.ConfigParser",
"operator.itemgetter"
] |
[((491, 518), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (516, 518), False, 'import configparser\n'), ((1669, 1695), 'logging.debug', 'logging.debug', (['""" ========"""'], {}), "(' ========')\n", (1682, 1695), False, 'import logging\n'), ((1697, 1782), 'logging.info', 'logging.info', (["(' Starting PS-Clustering Algorithm with Complement on ' + filename)"], {}), "(' Starting PS-Clustering Algorithm with Complement on ' +\n filename)\n", (1709, 1782), False, 'import logging\n'), ((1778, 1803), 'logging.info', 'logging.info', (['""" ========"""'], {}), "(' ========')\n", (1790, 1803), False, 'import logging\n'), ((1818, 1829), 'time.time', 'time.time', ([], {}), '()\n', (1827, 1829), False, 'import time\n'), ((1900, 1910), 'networkx.Graph', 'nx.Graph', ([], {}), '()\n', (1908, 1910), True, 'import networkx as nx\n'), ((1911, 1964), 'logging.info', 'logging.info', (["(' ... now reading graph at ' + filename)"], {}), "(' ... now reading graph at ' + filename)\n", (1923, 1964), False, 'import logging\n'), ((1968, 1989), 'networkx.read_gml', 'nx.read_gml', (['filename'], {}), '(filename)\n', (1979, 1989), True, 'import networkx as nx\n'), ((2166, 2177), 'time.time', 'time.time', ([], {}), '()\n', (2175, 2177), False, 'import time\n'), ((2198, 2215), 'networkx.complement', 'nx.complement', (['G2'], {}), '(G2)\n', (2211, 2215), True, 'import networkx as nx\n'), ((2894, 2930), 'networkx.get_node_attributes', 'nx.get_node_attributes', (['G2', '"""clique"""'], {}), "(G2, 'clique')\n", (2916, 2930), True, 'import networkx as nx\n'), ((3306, 3340), 'networkx.get_node_attributes', 'nx.get_node_attributes', (['G', '"""color"""'], {}), "(G, 'color')\n", (3328, 3340), True, 'import networkx as nx\n'), ((3491, 3502), 'time.time', 'time.time', ([], {}), '()\n', (3500, 3502), False, 'import time\n'), ((3518, 3529), 'time.time', 'time.time', ([], {}), '()\n', (3527, 3529), False, 'import time\n'), ((3655, 3680), 'os.path.isabs', 'os.path.isabs', (['outputfile'], {}), '(outputfile)\n', (3668, 3680), False, 'import os\n'), ((3716, 3820), 'logging.debug', 'logging.debug', (["(' ... saving weightes Value Graph at ' + outputpathfilename +\n 'coloredWithValue.gml')"], {}), "(' ... saving weightes Value Graph at ' + outputpathfilename +\n 'coloredWithValue.gml')\n", (3729, 3820), False, 'import logging\n'), ((3814, 3883), 'networkx.write_gml', 'nx.write_gml', (['valueGraph', "(outputpathfilename + 'coloredWithValue.gml')"], {}), "(valueGraph, outputpathfilename + 'coloredWithValue.gml')\n", (3826, 3883), True, 'import networkx as nx\n'), ((4195, 4206), 'time.time', 'time.time', ([], {}), '()\n', (4204, 4206), False, 'import time\n'), ((9562, 9662), 'logging.debug', 'logging.debug', (["(' ... saving PS Value Graph at ' + outputpathfilename +\n 'coloredWithValuePS.gml')"], {}), "(' ... saving PS Value Graph at ' + outputpathfilename +\n 'coloredWithValuePS.gml')\n", (9575, 9662), False, 'import logging\n'), ((9656, 9727), 'networkx.write_gml', 'nx.write_gml', (['valueGraph', "(outputpathfilename + 'coloredWithValuePS.gml')"], {}), "(valueGraph, outputpathfilename + 'coloredWithValuePS.gml')\n", (9668, 9727), True, 'import networkx as nx\n'), ((9826, 9837), 'time.time', 'time.time', ([], {}), '()\n', (9835, 9837), False, 'import time\n'), ((9952, 10041), 'logging.debug', 'logging.debug', (["(' ... saving Network RB at ' + outputpathfilename + 'network_rb.gml')"], {}), "(' ... saving Network RB at ' + outputpathfilename +\n 'network_rb.gml')\n", (9965, 10041), False, 'import logging\n'), ((10060, 10114), 'networkx.write_gml', 'nx.write_gml', (['G', "(outputpathfilename + 'network_rb.gml')"], {}), "(G, outputpathfilename + 'network_rb.gml')\n", (10072, 10114), True, 'import networkx as nx\n'), ((10229, 10266), 'networkx.readwrite.json_graph.node_link_data', 'json_graph.node_link_data', (['valueGraph'], {}), '(valueGraph)\n', (10254, 10266), False, 'from networkx.readwrite import json_graph\n'), ((1851, 1887), 'logging.error', 'logging.error', (['"""No file name given."""'], {}), "('No file name given.')\n", (1864, 1887), False, 'import logging\n'), ((2008, 2054), 'logging.error', 'logging.error', (['""" ... Graph is empty! Exiting!"""'], {}), "(' ... Graph is empty! Exiting!')\n", (2021, 2054), False, 'import logging\n'), ((2077, 2088), 'time.time', 'time.time', ([], {}), '()\n', (2086, 2088), False, 'import time\n'), ((3402, 3413), 'time.time', 'time.time', ([], {}), '()\n', (3411, 3413), False, 'import time\n'), ((4092, 4103), 'time.time', 'time.time', ([], {}), '()\n', (4101, 4103), False, 'import time\n'), ((9740, 9751), 'time.time', 'time.time', ([], {}), '()\n', (9749, 9751), False, 'import time\n'), ((10344, 10368), 'json.dump', 'json.dump', (['data', 'outfile'], {}), '(data, outfile)\n', (10353, 10368), False, 'import json\n'), ((11421, 11446), 'logging.info', 'logging.info', (['""" ========"""'], {}), "(' ========')\n", (11433, 11446), False, 'import logging\n'), ((4063, 4076), 'operator.itemgetter', 'itemgetter', (['(1)'], {}), '(1)\n', (4073, 4076), False, 'from operator import itemgetter\n')]
|
# -*- coding: utf-8 -*-
from fabric.api import env, local, puts
import fabric.contrib.project as project
import os
import sys
PY3 = sys.version_info > (3,)
# Local path configuration (can be absolute or relative to fabfile)
env.deploy_path = 'output'
DEPLOY_PATH = env.deploy_path
# Remote server configuration
env.user = 'www-data'
env.hosts = ['chtoes.li']
path = {
'dev': '/var/www/dev.chtoes.li/public/',
'prod': '/var/www/chtoes.li/public/',
'ci': '/var/www/ci.chtoes.li/public/'
}
TEMPLATE = 'Title:\n\
Date: {{date}}\n\
Slug: {{slug}}\n\
Category: {{ category }}\n\
Source: http://what-if.xkcd.com/{{num}}/\n\
SourceNum: {{num}}\n\
SourceTitle: {{title}}\n\
Formulas: False\n\
Description: \n\
Image: https://chtoes.li/uploads/{{num}}-{{slug}}/front.png\n\
\n\
\n'
def new(num, title=None, category='What If?', overwrite='no'):
import datetime
if title is None:
if PY3:
from urllib.request import urlopen
else:
from urllib2 import urlopen
from bs4 import BeautifulSoup
html = urlopen('http://what-if.xkcd.com/{}/'.format(num))
title = BeautifulSoup(html, 'lxml').title.string
slug = slugify(title)
CATEGORIES = {
'What If?': 'what-if',
'Новости проекта': 'news',
'Прочее': 'other',
}
category_slug = CATEGORIES[category]
now = datetime.datetime.now()
post_date = now.strftime('%Y-%m-%d')
params = dict(
date=post_date,
title=title,
slug=slug,
num=num,
category=category
)
out_file = 'content/{}/{:03d}-{}.md'.format(category_slug, int(num), slug)
local("mkdir -p '{}' || true".format(os.path.dirname(out_file)))
if not os.path.exists(out_file) or overwrite.lower() == 'yes':
render(TEMPLATE, out_file, **params)
else:
print("{} already exists. Pass 'overwrite=yes' to destroy it."
.format(out_file))
def slugify(text):
import re
normalized = ''.join([c.lower() if c.isalnum() else '-'
for c in text])
no_repetitions = re.sub(r'--+', '-', normalized)
clean_start = re.sub(r'^-+', '', no_repetitions)
clean_end = re.sub(r'-+$', '', clean_start)
return clean_end
def render(template, destination, **kwargs):
from jinja2 import Template
template = Template(TEMPLATE)
text = template.render(**kwargs)
with open(destination, 'w') as output:
puts('Rendering to {}'.format(destination))
if PY3:
output.write(text)
else:
output.write(text.encode('utf-8'))
def clean():
if os.path.isdir(DEPLOY_PATH):
local('rm -rf {deploy_path}'.format(**env))
local('mkdir {deploy_path}'.format(**env))
def build(environment):
local('pelican -s pelicanconf-{}.py'.format(environment))
def rebuild(environment):
clean()
build(environment)
def regenerate(environment):
local('pelican -r -s pelicanconf-{}.py'.format(environment))
def serve():
# Determine major version of python as the one executes this script now.
if PY3:
local('cd {deploy_path} && python3 -m http.server'.format(**env))
else:
local('cd {deploy_path} && python2 -m SimpleHTTPServer'.format(**env))
def reserve(environment):
build(environment)
serve()
def preview(environment):
local('pelican -s pelicanconf-{}.py'.format(environment))
def publish(environment):
local('pelican -s pelicanconf-{}.py'.format(environment))
project.rsync_project(
remote_dir=path[environment],
exclude='.DS_Store',
local_dir=DEPLOY_PATH.rstrip('/') + '/',
delete=True
)
|
[
"jinja2.Template",
"os.path.isdir",
"os.path.dirname",
"os.path.exists",
"bs4.BeautifulSoup",
"datetime.datetime.now",
"re.sub"
] |
[((1373, 1396), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1394, 1396), False, 'import datetime\n'), ((2103, 2133), 're.sub', 're.sub', (['"""--+"""', '"""-"""', 'normalized'], {}), "('--+', '-', normalized)\n", (2109, 2133), False, 'import re\n'), ((2153, 2186), 're.sub', 're.sub', (['"""^-+"""', '""""""', 'no_repetitions'], {}), "('^-+', '', no_repetitions)\n", (2159, 2186), False, 'import re\n'), ((2204, 2234), 're.sub', 're.sub', (['"""-+$"""', '""""""', 'clean_start'], {}), "('-+$', '', clean_start)\n", (2210, 2234), False, 'import re\n'), ((2351, 2369), 'jinja2.Template', 'Template', (['TEMPLATE'], {}), '(TEMPLATE)\n', (2359, 2369), False, 'from jinja2 import Template\n'), ((2632, 2658), 'os.path.isdir', 'os.path.isdir', (['DEPLOY_PATH'], {}), '(DEPLOY_PATH)\n', (2645, 2658), False, 'import os\n'), ((1692, 1717), 'os.path.dirname', 'os.path.dirname', (['out_file'], {}), '(out_file)\n', (1707, 1717), False, 'import os\n'), ((1731, 1755), 'os.path.exists', 'os.path.exists', (['out_file'], {}), '(out_file)\n', (1745, 1755), False, 'import os\n'), ((1134, 1161), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html', '"""lxml"""'], {}), "(html, 'lxml')\n", (1147, 1161), False, 'from bs4 import BeautifulSoup\n')]
|
# array module example
import sample
import array
a = array.array('d', [1, -3, 4, 7, 2, 0])
PETSc.Sys.Print(a)
sample.clip(a, 1, 4, a)
PETSc.Sys.Print(a)
# numpy example
import numpy
b = numpy.random.uniform(-10, 10, size=1000000)
PETSc.Sys.Print(b)
c = numpy.zeros_like(b)
PETSc.Sys.Print(c)
sample.clip(b, -5, 5, c)
PETSc.Sys.Print(c)
PETSc.Sys.Print(min(c))
PETSc.Sys.Print(max(c))
# Timing test
from timeit import timeit
PETSc.Sys.Print('numpy.clip')
PETSc.Sys.Print(timeit('numpy.clip(b,-5,5,c)', 'from __main__ import b,c,numpy', number=1000))
PETSc.Sys.Print('sample.clip')
PETSc.Sys.Print(timeit('sample.clip(b,-5,5,c)', 'from __main__ import b,c,sample', number=1000))
PETSc.Sys.Print('sample.clip_fast')
PETSc.Sys.Print(timeit('sample.clip_fast(b,-5,5,c)', 'from __main__ import b,c,sample', number=1000))
# 2D test
d = numpy.random.uniform(-10, 10, size=(1000, 1000))
PETSc.Sys.Print(d)
sample.clip2d(d, -5, 5, d)
PETSc.Sys.Print(d)
|
[
"numpy.random.uniform",
"numpy.zeros_like",
"sample.clip2d",
"array.array",
"timeit.timeit",
"sample.clip"
] |
[((59, 96), 'array.array', 'array.array', (['"""d"""', '[1, -3, 4, 7, 2, 0]'], {}), "('d', [1, -3, 4, 7, 2, 0])\n", (70, 96), False, 'import array\n'), ((118, 141), 'sample.clip', 'sample.clip', (['a', '(1)', '(4)', 'a'], {}), '(a, 1, 4, a)\n', (129, 141), False, 'import sample\n'), ((202, 245), 'numpy.random.uniform', 'numpy.random.uniform', (['(-10)', '(10)'], {'size': '(1000000)'}), '(-10, 10, size=1000000)\n', (222, 245), False, 'import numpy\n'), ((271, 290), 'numpy.zeros_like', 'numpy.zeros_like', (['b'], {}), '(b)\n', (287, 290), False, 'import numpy\n'), ((312, 336), 'sample.clip', 'sample.clip', (['b', '(-5)', '(5)', 'c'], {}), '(b, -5, 5, c)\n', (323, 336), False, 'import sample\n'), ((870, 918), 'numpy.random.uniform', 'numpy.random.uniform', (['(-10)', '(10)'], {'size': '(1000, 1000)'}), '(-10, 10, size=(1000, 1000))\n', (890, 918), False, 'import numpy\n'), ((940, 966), 'sample.clip2d', 'sample.clip2d', (['d', '(-5)', '(5)', 'd'], {}), '(d, -5, 5, d)\n', (953, 966), False, 'import sample\n'), ((501, 578), 'timeit.timeit', 'timeit', (['"""numpy.clip(b,-5,5,c)"""', '"""from __main__ import b,c,numpy"""'], {'number': '(1000)'}), "('numpy.clip(b,-5,5,c)', 'from __main__ import b,c,numpy', number=1000)\n", (507, 578), False, 'from timeit import timeit\n'), ((629, 708), 'timeit.timeit', 'timeit', (['"""sample.clip(b,-5,5,c)"""', '"""from __main__ import b,c,sample"""'], {'number': '(1000)'}), "('sample.clip(b,-5,5,c)', 'from __main__ import b,c,sample', number=1000)\n", (635, 708), False, 'from timeit import timeit\n'), ((766, 854), 'timeit.timeit', 'timeit', (['"""sample.clip_fast(b,-5,5,c)"""', '"""from __main__ import b,c,sample"""'], {'number': '(1000)'}), "('sample.clip_fast(b,-5,5,c)', 'from __main__ import b,c,sample',\n number=1000)\n", (772, 854), False, 'from timeit import timeit\n')]
|
import os
import random
import tweepy
import redis
CONSUMER_KEY = os.environ.get('TWITTER_CONSUMER_KEY')
CONSUMER_SECRET = os.environ.get('TWITTER_CONSUMER_SECRET')
ACCESS_TOKEN = os.environ.get('TWITTER_ACCESS_TOKEN')
ACCESS_TOKEN_SECRET = os.environ.get('TWITTER_ACCESS_TOKEN_SECRET')
r = redis.from_url(os.environ.get("REDIS_URL"))
phrases = ['Here you go',
'A link coming right up',
'Done',
"I know what you're thinking. Zelda was the princess though"]
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
twitter = tweepy.API(auth)
def tweet_back(tweet):
username = tweet.user.screen_name
img = 'img/{}.png'.format(random.choice(range(10)))
message = '{}, @{}!'.format(random.choice(phrases), username)
print("Replying to {}'s tweet with ID {}".format(username, tweet.id))
twitter.update_with_media(filename=img, status=message, in_reply_to_status_id=tweet.id)
if __name__ == '__main__':
tweets = twitter.search('"link please"')
random_tweet = next((tweet for tweet in tweets if not tweet.retweeted), None)
tweet_back(random_tweet)
replies = twitter.search('@alinkplease link please', since_id=r.get('last'))
if replies:
r.set('last', replies[0].id)
for tweet in replies:
tweet_back(tweet)
|
[
"os.environ.get",
"tweepy.OAuthHandler",
"random.choice",
"tweepy.API"
] |
[((67, 105), 'os.environ.get', 'os.environ.get', (['"""TWITTER_CONSUMER_KEY"""'], {}), "('TWITTER_CONSUMER_KEY')\n", (81, 105), False, 'import os\n'), ((124, 165), 'os.environ.get', 'os.environ.get', (['"""TWITTER_CONSUMER_SECRET"""'], {}), "('TWITTER_CONSUMER_SECRET')\n", (138, 165), False, 'import os\n'), ((181, 219), 'os.environ.get', 'os.environ.get', (['"""TWITTER_ACCESS_TOKEN"""'], {}), "('TWITTER_ACCESS_TOKEN')\n", (195, 219), False, 'import os\n'), ((242, 287), 'os.environ.get', 'os.environ.get', (['"""TWITTER_ACCESS_TOKEN_SECRET"""'], {}), "('TWITTER_ACCESS_TOKEN_SECRET')\n", (256, 287), False, 'import os\n'), ((501, 551), 'tweepy.OAuthHandler', 'tweepy.OAuthHandler', (['CONSUMER_KEY', 'CONSUMER_SECRET'], {}), '(CONSUMER_KEY, CONSUMER_SECRET)\n', (520, 551), False, 'import tweepy\n'), ((619, 635), 'tweepy.API', 'tweepy.API', (['auth'], {}), '(auth)\n', (629, 635), False, 'import tweepy\n'), ((308, 335), 'os.environ.get', 'os.environ.get', (['"""REDIS_URL"""'], {}), "('REDIS_URL')\n", (322, 335), False, 'import os\n'), ((787, 809), 'random.choice', 'random.choice', (['phrases'], {}), '(phrases)\n', (800, 809), False, 'import random\n')]
|
# -*- coding: utf-8 -*-
# *****************************************************************************
# NICOS, the Networked Instrument Control System of the MLZ
# Copyright (c) 2009-2021 by the NICOS contributors (see AUTHORS)
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Module authors:
# <NAME> <<EMAIL>>
#
# *****************************************************************************
"""Session class used with the NICOS poller."""
from nicos.core import POLLER, Device, DeviceAlias, Override
from nicos.core.sessions.simple import NoninteractiveSession
from nicos.devices.cacheclient import CacheClient
from nicos.devices.generic.cache import CacheReader
from nicos.protocols.cache import OP_TELL, cache_load
class PollerCacheClient(CacheClient):
"""Special cache client for the poller.
Callbacks are normally only called for cache updates that are sent to
us, but not updates we send to the cache.
For the poller, we want callbacks (that trigger polling of superdevices
among other things) to be called even if the update comes from this
process (i.e. another thread calling another device).
"""
remote_callbacks = False # no "normal" callbacks on remote updates
# but use _propagate to call callbacks always
def _propagate(self, args):
time, key, op, value = args
if op == OP_TELL and key in self._callbacks and value:
self._call_callbacks(key, cache_load(value), time)
class PollerCacheReader(CacheReader):
parameter_overrides = {
'unit': Override(mandatory=False),
}
def _initParam(self, param, paraminfo=None):
# This method is called on init when a parameter is not in the cache.
# In this case we don't want to do anything here since we don't want
# to overwrite parameters shared by CacheReader and the real device
# in the cache with the default values -- the poller shouldn't need
# the parameters anyway.
pass
def __getattr__(self, param):
# called when a parameter of the underlying device is required
entry = self._cache.get(self, param, Ellipsis)
if entry is not Ellipsis:
return entry
raise AttributeError
class PollerSession(NoninteractiveSession):
cache_class = PollerCacheClient
sessiontype = POLLER
@classmethod
def _notify_systemd(cls, appname, msg):
# can only notify systemd from the main poller process
if appname == 'poller':
NoninteractiveSession._notify_systemd(appname, msg)
# pylint: disable=dangerous-default-value
def getDevice(self, dev, cls=None, source=None,
replace_classes=[(DeviceAlias, PollerCacheReader, {})]):
"""Override device creation for the poller.
With the "alias device" mechanism, aliases can point to any device in
the currently loaded setups. This leads to a problem with the poller,
since the poller loads each setup in a different process, and in the
process that polls the DeviceAlias, the pointee can be missing.
Therefore, we replace devices that are not found by a CacheReader, in
the hope that the actual pointee is contained in another setup that is
polled by another process, and we can get current values for the device
via the CacheReader.
"""
return NoninteractiveSession.getDevice(self, dev, Device, source,
replace_classes=replace_classes)
def _deviceNotFound(self, devname, source=None):
"""Override "device not found" to be able to create PollerCacheReaders
for devices noted as such in extended['poller_cache_reader'].
"""
for setupname in self.loaded_setups:
ext = self._setup_info[setupname]['extended']
if devname in ext.get('poller_cache_reader', ()):
return PollerCacheReader(devname)
NoninteractiveSession._deviceNotFound(self, devname, source)
# do not send action messages to the cache
def beginActionScope(self, what):
pass
def endActionScope(self):
pass
def action(self, what):
pass
|
[
"nicos.core.sessions.simple.NoninteractiveSession._notify_systemd",
"nicos.protocols.cache.cache_load",
"nicos.core.sessions.simple.NoninteractiveSession._deviceNotFound",
"nicos.core.Override",
"nicos.core.sessions.simple.NoninteractiveSession.getDevice"
] |
[((2206, 2231), 'nicos.core.Override', 'Override', ([], {'mandatory': '(False)'}), '(mandatory=False)\n', (2214, 2231), False, 'from nicos.core import POLLER, Device, DeviceAlias, Override\n'), ((4047, 4143), 'nicos.core.sessions.simple.NoninteractiveSession.getDevice', 'NoninteractiveSession.getDevice', (['self', 'dev', 'Device', 'source'], {'replace_classes': 'replace_classes'}), '(self, dev, Device, source, replace_classes=\n replace_classes)\n', (4078, 4143), False, 'from nicos.core.sessions.simple import NoninteractiveSession\n'), ((4624, 4684), 'nicos.core.sessions.simple.NoninteractiveSession._deviceNotFound', 'NoninteractiveSession._deviceNotFound', (['self', 'devname', 'source'], {}), '(self, devname, source)\n', (4661, 4684), False, 'from nicos.core.sessions.simple import NoninteractiveSession\n'), ((3168, 3219), 'nicos.core.sessions.simple.NoninteractiveSession._notify_systemd', 'NoninteractiveSession._notify_systemd', (['appname', 'msg'], {}), '(appname, msg)\n', (3205, 3219), False, 'from nicos.core.sessions.simple import NoninteractiveSession\n'), ((2097, 2114), 'nicos.protocols.cache.cache_load', 'cache_load', (['value'], {}), '(value)\n', (2107, 2114), False, 'from nicos.protocols.cache import OP_TELL, cache_load\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-06-17 15:31
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0023_alter_page_revision_on_delete_behaviour'),
('ossuo', '0034_auto_20160617_1609'),
]
operations = [
migrations.CreateModel(
name='GlobalSettings',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('HeadQrAddressTitle', models.URLField()),
('HeadQrAddress', models.CharField(help_text='Full address', max_length=255)),
('HeadQrAddressLink', models.URLField(help_text='Link to google maps', max_length=255)),
('HeadQrAddressSVG', models.CharField(help_text='Paste SVG code here', max_length=9000)),
('site', models.OneToOneField(editable=False, on_delete=django.db.models.deletion.CASCADE, to='wagtailcore.Site')),
],
options={
'verbose_name': 'Global Settings',
},
),
migrations.AlterField(
model_name='homepagehero',
name='colour',
field=models.CharField(help_text='Hex ref colour of link and background gradient, use #23b0b0 for default blue', max_length=255),
),
]
|
[
"django.db.models.CharField",
"django.db.models.URLField",
"django.db.models.OneToOneField",
"django.db.models.AutoField"
] |
[((1324, 1456), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': '"""Hex ref colour of link and background gradient, use #23b0b0 for default blue"""', 'max_length': '(255)'}), "(help_text=\n 'Hex ref colour of link and background gradient, use #23b0b0 for default blue'\n , max_length=255)\n", (1340, 1456), False, 'from django.db import migrations, models\n'), ((506, 599), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (522, 599), False, 'from django.db import migrations, models\n'), ((637, 654), 'django.db.models.URLField', 'models.URLField', ([], {}), '()\n', (652, 654), False, 'from django.db import migrations, models\n'), ((691, 749), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': '"""Full address"""', 'max_length': '(255)'}), "(help_text='Full address', max_length=255)\n", (707, 749), False, 'from django.db import migrations, models\n'), ((790, 854), 'django.db.models.URLField', 'models.URLField', ([], {'help_text': '"""Link to google maps"""', 'max_length': '(255)'}), "(help_text='Link to google maps', max_length=255)\n", (805, 854), False, 'from django.db import migrations, models\n'), ((894, 960), 'django.db.models.CharField', 'models.CharField', ([], {'help_text': '"""Paste SVG code here"""', 'max_length': '(9000)'}), "(help_text='Paste SVG code here', max_length=9000)\n", (910, 960), False, 'from django.db import migrations, models\n'), ((988, 1097), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'editable': '(False)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""wagtailcore.Site"""'}), "(editable=False, on_delete=django.db.models.deletion.\n CASCADE, to='wagtailcore.Site')\n", (1008, 1097), False, 'from django.db import migrations, models\n')]
|
import os
from pathlib import *
p = PurePath('/etc')
print(os.fspath(p))
|
[
"os.fspath"
] |
[((59, 71), 'os.fspath', 'os.fspath', (['p'], {}), '(p)\n', (68, 71), False, 'import os\n')]
|
# Generated by Django 3.2.7 on 2021-12-13 17:40
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('awardsapp', '0003_auto_20211212_1417'),
]
operations = [
migrations.CreateModel(
name='Rating',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('design', models.IntegerField(blank=True, default=0, null=True)),
('usability', models.IntegerField(blank=True, default=0, null=True)),
('content', models.IntegerField(blank=True, default=0, null=True)),
('average', models.IntegerField(blank=True, default=0, null=True)),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='awardsapp.project')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"django.db.models.BigAutoField",
"django.db.models.IntegerField",
"django.db.migrations.swappable_dependency",
"django.db.models.ForeignKey"
] |
[((227, 284), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (258, 284), False, 'from django.db import migrations, models\n'), ((465, 561), 'django.db.models.BigAutoField', 'models.BigAutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (484, 561), False, 'from django.db import migrations, models\n'), ((587, 640), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'default': '(0)', 'null': '(True)'}), '(blank=True, default=0, null=True)\n', (606, 640), False, 'from django.db import migrations, models\n'), ((673, 726), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'default': '(0)', 'null': '(True)'}), '(blank=True, default=0, null=True)\n', (692, 726), False, 'from django.db import migrations, models\n'), ((757, 810), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'default': '(0)', 'null': '(True)'}), '(blank=True, default=0, null=True)\n', (776, 810), False, 'from django.db import migrations, models\n'), ((841, 894), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'default': '(0)', 'null': '(True)'}), '(blank=True, default=0, null=True)\n', (860, 894), False, 'from django.db import migrations, models\n'), ((925, 1016), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""awardsapp.project"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'awardsapp.project')\n", (942, 1016), False, 'from django.db import migrations, models\n'), ((1039, 1135), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(on_delete=django.db.models.deletion.CASCADE, to=settings.\n AUTH_USER_MODEL)\n', (1056, 1135), False, 'from django.db import migrations, models\n')]
|