code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
import openpyxl
import sys
from googleSentiment import Google
def main():
fileName = 'Ranking30.xlsx'
# Select which lines of the input sentences you wish to use
input_selection = [1, 18976]
google = Google()
try:
file = openpyxl.load_workbook(fileName)
except:
file = openpyxl.Workbook()
sheet = file['Sheet1']
for line_counter in range(input_selection[0], input_selection[1]):
try:
sentence = sheet.cell(row=line_counter, column=24).value
sentence = sentence.encode()
#zh for Chinese es for English
languageIn = "es"
google.analyze(sentence, languageIn)
sheet.cell(row = line_counter, column = 25).value = google.getScore()
sheet.cell(row = line_counter, column = 26).value = google.getMagnitude()
except Exception as exception:
print (exception)
continue
# Save the file and notify the user
file.save(fileName)
print("google analyze finish")
if __name__ == "__main__":
main()
|
[
"googleSentiment.Google",
"openpyxl.load_workbook",
"openpyxl.Workbook"
] |
[((216, 224), 'googleSentiment.Google', 'Google', ([], {}), '()\n', (222, 224), False, 'from googleSentiment import Google\n'), ((249, 281), 'openpyxl.load_workbook', 'openpyxl.load_workbook', (['fileName'], {}), '(fileName)\n', (271, 281), False, 'import openpyxl\n'), ((309, 328), 'openpyxl.Workbook', 'openpyxl.Workbook', ([], {}), '()\n', (326, 328), False, 'import openpyxl\n')]
|
# -*- coding: utf-8 -*-
# Generated by Hand
from __future__ import unicode_literals, print_function
from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
from django.db import migrations
# SearchVisitor.numeric_keys + SearchVisitor.date_keys
OPERATOR_KEYS = set(
[
"project_id",
"project.id",
"issue.id",
"device.battery_level",
"device.charging",
"device.online",
"device.simulator",
"error.handled",
"stack.colno",
"stack.in_app",
"stack.lineno",
"stack.stack_level",
"transaction.duration",
"apdex",
"impact",
"p75",
"p95",
"p99",
"error_rate",
"start",
"end",
"first_seen",
"last_seen",
"time",
"timestamp",
"transaction.start_time",
"transaction.end_time",
]
)
# Aggregates are now fields
def convert_field(fieldname, unique, reverse):
if fieldname == "count":
fieldname = u"count()"
elif unique:
fieldname = u"count_unique({})".format(fieldname)
fieldname = u"-{}".format(fieldname) if reverse else fieldname
return fieldname
def prepare_value(value):
value = value.replace("%", "*")
if " " in value and not value.startswith('"'):
value = u'"{}"'.format(value)
return value
def convert(DiscoverSavedQuery, DiscoverSavedQueryProject, saved_query, name_extra=" (migrated from legacy discover)"):
""" Create a v2 query from a v1 query"""
if saved_query.version == 2:
# nothing to do! Already v2 :)
return saved_query
updated_query = {
u"environment": [],
u"fields": saved_query.query.get('fields', []),
u"orderby": u"",
u"query": [], # Will become a string later via join
}
if "range" in saved_query.query:
updated_query["range"] = saved_query.query["range"]
elif "start" in saved_query.query and "end" in saved_query.query:
updated_query["start"] = saved_query.query["start"]
updated_query["end"] = saved_query.query["end"]
else:
updated_query["range"] = "14d"
for aggregate in saved_query.query.get("aggregations", []):
if aggregate[0] == "uniq":
field = convert_field(aggregate[1], True, False)
else:
field = convert_field(aggregate[0], False, False)
if field:
updated_query["fields"].append(field)
# Order by
orderby = saved_query.query.get('orderby', "")
unique = reverse = False
if orderby.startswith('-'):
reverse = True
orderby = orderby[1:]
if orderby.startswith('uniq_'):
unique = True
orderby = orderby[5:].replace('_', '.')
field = convert_field(orderby, unique, reverse)
if field:
updated_query['orderby'] = field
if reverse:
field = field[1:]
if field not in updated_query["fields"]:
updated_query["fields"].append(field)
# Conditions become a query now
for condition in saved_query.query.get("conditions", []):
column, operator, value = condition
if column in ['contexts.key']:
column = "tags[contexts.key]"
if column == "environment" and operator == "=":
updated_query['environment'].append(value.strip('"'))
elif operator == 'IS NOT NULL':
updated_query["query"].append(u"has:{}".format(column))
elif operator == 'IS NULL':
updated_query["query"].append(u"!has:{}".format(column))
elif column in OPERATOR_KEYS:
updated_query["query"].append(u"{}:{}{}".format(
column,
operator if operator != '=' else '',
value
))
elif operator in ['LIKE', '=']:
updated_query["query"].append(u"{}:{}".format(column, prepare_value(value)))
elif operator in ['NOT LIKE', '!=']:
updated_query["query"].append(u"!{}:{}".format(column, prepare_value(value)))
updated_query["query"] = ' '.join(updated_query["query"])
# Create the version 2 query
new_query = DiscoverSavedQuery.objects.create(
organization=saved_query.organization,
name=saved_query.name + name_extra,
query=updated_query,
version=2,
)
# Set project_ids
saved_query_project_ids = DiscoverSavedQueryProject.objects.filter(
discover_saved_query=saved_query
).values_list("project", flat=True)
# This is DiscoverSavedQueryProject.set_projects
DiscoverSavedQueryProject.objects.filter(discover_saved_query=new_query).exclude(
project__in=saved_query_project_ids
).delete()
existing_project_ids = DiscoverSavedQueryProject.objects.filter(
discover_saved_query=new_query
).values_list("project", flat=True)
new_project_ids = list(set(saved_query_project_ids) - set(existing_project_ids))
DiscoverSavedQueryProject.objects.bulk_create(
[
DiscoverSavedQueryProject(project_id=project_id, discover_saved_query=new_query)
for project_id in new_project_ids
]
)
return new_query
def migrate_v1_queries(apps, schema_editor):
"""
Creates v2 versions of existing v1 queries
"""
DiscoverSavedQuery = apps.get_model("sentry", "DiscoverSavedQuery")
DiscoverSavedQueryProject = apps.get_model("sentry", "DiscoverSavedQueryProject")
""" Seq Scan on sentry_discoversavedquery
(cost=0.00..102.86 rows=1601 width=284)
(actual time=0.027..1.158 rows=1275 loops=1)
Filter: (version = 1)
Rows Removed by Filter: 69
Planning time: 0.929 ms
Execution time: 1.296 ms
"""
queryset = DiscoverSavedQuery.objects.filter(version=1)
for query in RangeQuerySetWrapperWithProgressBar(queryset):
convert(DiscoverSavedQuery, DiscoverSavedQueryProject, query)
class Migration(migrations.Migration):
# This flag is used to mark that a migration shouldn't be automatically run in
# production. We set this to True for operations that we think are risky and want
# someone from ops to run manually and monitor.
# General advice is that if in doubt, mark your migration as `is_dangerous`.
# Some things you should always mark as dangerous:
# - Adding indexes to large tables. These indexes should be created concurrently,
# unfortunately we can't run migrations outside of a transaction until Django
# 1.10. So until then these should be run manually.
# - Large data migrations. Typically we want these to be run manually by ops so that
# they can be monitored. Since data migrations will now hold a transaction open
# this is even more important.
# - Adding columns to highly active tables, even ones that are NULL.
is_dangerous = False
atomic = False
dependencies = [
("sentry", "0028_user_reports"),
]
operations = [
migrations.RunPython(migrate_v1_queries, reverse_code=migrations.RunPython.noop),
]
|
[
"django.db.migrations.RunPython",
"sentry.utils.query.RangeQuerySetWrapperWithProgressBar"
] |
[((5828, 5873), 'sentry.utils.query.RangeQuerySetWrapperWithProgressBar', 'RangeQuerySetWrapperWithProgressBar', (['queryset'], {}), '(queryset)\n', (5863, 5873), False, 'from sentry.utils.query import RangeQuerySetWrapperWithProgressBar\n'), ((6996, 7081), 'django.db.migrations.RunPython', 'migrations.RunPython', (['migrate_v1_queries'], {'reverse_code': 'migrations.RunPython.noop'}), '(migrate_v1_queries, reverse_code=migrations.RunPython.noop\n )\n', (7016, 7081), False, 'from django.db import migrations\n')]
|
import pandas as pd
import cx_Oracle
def query(code):
conn = cx_Oracle.connect('SOC_READ', 'soc_read', 'ossam-cluster-scan.robi.com.bd:1721/RBPB.robi.com.bd')
print(conn)
qry1 = """Select * from (select distinct Summary AlarmText,(Case when Summary like '%2G%' then '2G' when
Summary like '%3G%' then '3G' else '4G' end) as Technology,CUSTOMATTR15 as SITECODE,FIRSTOCCURRENCE StartTime,
ROUND((Sysdate-FIRSTOCCURRENCE)*24*60,2) DurationMIn,CLEARTIMESTAMP EndTime,CUSTOMATTR26 CRNumber,TTRequestTime, TTSequence, CUSTOMATTR23 as CI from alerts_status
where FirstOccurrence between TO_DATE(TO_CHAR(SYSDATE - 7, 'YYYYMMDD') || '0000', 'YYYYMMDDHH24MI') and TO_DATE(TO_CHAR(SYSDATE, 'YYYYMMDD') || '2359', 'YYYYMMDDHH24MI')
and X733EventType = 100 and agent != 'Total Site Down'--and CUSTOMATTR15 != 'UNKNOWN'
and Severity!= 0 and CustomAttr27 in (0,1) and Manager <> 'TSD Automation')t where t.Technology IN ('2G','3G','4G') and SITECODE like '%"""
qry2 = qry1 + code + "%'"
try:
df = pd.read_sql(qry2, con=conn)
print('try success')
conn.close()
except:
connx = cx_Oracle.connect('SOC_READ', 'soc_read', 'ossam-cluster-scan.robi.com.bd:1721/RBPB.robi.com.bd')
df = pd.read_sql(qry2, con=connx)
print('Except trigger')
connx.close()
print(df)
rows = df.shape[0]
heap = code + ":"
if rows != 0:
for i in range(0, len(df)):
tech = df.iloc[i]['TECHNOLOGY']
tm = df.iloc[i]['STARTTIME']
if '2G' in tech:
heap = heap + '\n' + "2G: Down, " + "Downtime: " + str(tm)
if '3G' in tech:
heap = heap + '\n' + "3G: Down, " + "Downtime: " + str(tm)
if '4G' in tech:
heap = heap + '\n' + "4G: Down, " + "Downtime: " + str(tm)
# print(heap)
else:
return heap + '\nAll Tech are up'
return heap
|
[
"cx_Oracle.connect",
"pandas.read_sql"
] |
[((66, 167), 'cx_Oracle.connect', 'cx_Oracle.connect', (['"""SOC_READ"""', '"""soc_read"""', '"""ossam-cluster-scan.robi.com.bd:1721/RBPB.robi.com.bd"""'], {}), "('SOC_READ', 'soc_read',\n 'ossam-cluster-scan.robi.com.bd:1721/RBPB.robi.com.bd')\n", (83, 167), False, 'import cx_Oracle\n'), ((1033, 1060), 'pandas.read_sql', 'pd.read_sql', (['qry2'], {'con': 'conn'}), '(qry2, con=conn)\n', (1044, 1060), True, 'import pandas as pd\n'), ((1139, 1240), 'cx_Oracle.connect', 'cx_Oracle.connect', (['"""SOC_READ"""', '"""soc_read"""', '"""ossam-cluster-scan.robi.com.bd:1721/RBPB.robi.com.bd"""'], {}), "('SOC_READ', 'soc_read',\n 'ossam-cluster-scan.robi.com.bd:1721/RBPB.robi.com.bd')\n", (1156, 1240), False, 'import cx_Oracle\n'), ((1250, 1278), 'pandas.read_sql', 'pd.read_sql', (['qry2'], {'con': 'connx'}), '(qry2, con=connx)\n', (1261, 1278), True, 'import pandas as pd\n')]
|
#!/usr/bin/python
"""
To use, please type in:
python concatenate.py
The script will only look at files that are within
folders that are one level below the directory supplied
to the script. For example, if no argument is given to
the script, script will parse all folders within the
current working directory as such:
./output_folder1/output_file1
./output_folder1/output_file2
./output_folder2/output_file1
./output_folder2/output_file2
./output_folder_n/output_file_n
"""
import pandas as pd
import sys
import re
import os
raw_path = os.path.join("data", "raw")
def main():
# combine all files in all subdirectories
combineFiles(raw_path)
# add headers, indices, remove tuple parentheses
df = pd.read_csv(
os.path.join(raw_path, "combinedFile.csv"), delimiter=",", quotechar='"'
)
headerLabels = [
"url",
"iphone",
"samsunggalaxy",
"sonyxperia",
"nokialumina",
"htcphone",
"ios",
"googleandroid",
"iphonecampos",
"samsungcampos",
"sonycampos",
"nokiacampos",
"htccampos",
"iphonecamneg",
"samsungcamneg",
"sonycamneg",
"nokiacamneg",
"htccamneg",
"iphonecamunc",
"samsungcamunc",
"sonycamunc",
"nokiacamunc",
"htccamunc",
"iphonedispos",
"samsungdispos",
"sonydispos",
"nokiadispos",
"htcdispos",
"iphonedisneg",
"samsungdisneg",
"sonydisneg",
"nokiadisneg",
"htcdisneg",
"iphonedisunc",
"samsungdisunc",
"sonydisunc",
"nokiadisunc",
"htcdisunc",
"iphoneperpos",
"samsungperpos",
"sonyperpos",
"nokiaperpos",
"htcperpos",
"iphoneperneg",
"samsungperneg",
"sonyperneg",
"nokiaperneg",
"htcperneg",
"iphoneperunc",
"samsungperunc",
"sonyperunc",
"nokiaperunc",
"htcperunc",
"iosperpos",
"googleperpos",
"iosperneg",
"googleperneg",
"iosperunc",
"googleperunc",
]
df.columns = headerLabels
df.index.name = "id"
# output factor and url files
df.to_csv(
os.path.join(raw_path, "concatenated_websites.csv"),
columns=headerLabels[:1],
quotechar='"',
sep=",",
header=True,
)
df.to_csv(
os.path.join(raw_path, "concatenated_factors.csv"),
columns=headerLabels[1:],
quotechar='"',
sep=",",
header=True,
)
# cleanup
os.remove(os.path.join(raw_path, "combinedFile.csv"))
print("Successfully processed " + str(fileCount) + " files")
sys.exit()
def combineFiles(file):
outfile = open(os.path.join(raw_path, "combinedFile.csv"), "w+")
global fileCount
fileCount = 0
httpRe = re.compile(r".*?[http]")
for dirname, dirnames, filenames in os.walk(file):
# For each sub folder
for subdirname in dirnames:
subdirpath = os.path.join(dirname, subdirname)
for fileName in os.listdir(subdirpath):
fileCount += 1
print("Processing " + fileName + "...")
with open(subdirpath + "/" + fileName) as infile:
for line in infile:
# make sure we're reading reducer output files
if len(httpRe.findall(line)) > 0:
outfile.write(line)
return None
if __name__ == "__main__":
main()
|
[
"os.walk",
"os.path.join",
"os.listdir",
"sys.exit",
"re.compile"
] |
[((567, 594), 'os.path.join', 'os.path.join', (['"""data"""', '"""raw"""'], {}), "('data', 'raw')\n", (579, 594), False, 'import os\n'), ((2790, 2800), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2798, 2800), False, 'import sys\n'), ((2948, 2971), 're.compile', 're.compile', (['""".*?[http]"""'], {}), "('.*?[http]')\n", (2958, 2971), False, 'import re\n'), ((3013, 3026), 'os.walk', 'os.walk', (['file'], {}), '(file)\n', (3020, 3026), False, 'import os\n'), ((766, 808), 'os.path.join', 'os.path.join', (['raw_path', '"""combinedFile.csv"""'], {}), "(raw_path, 'combinedFile.csv')\n", (778, 808), False, 'import os\n'), ((2318, 2369), 'os.path.join', 'os.path.join', (['raw_path', '"""concatenated_websites.csv"""'], {}), "(raw_path, 'concatenated_websites.csv')\n", (2330, 2369), False, 'import os\n'), ((2495, 2545), 'os.path.join', 'os.path.join', (['raw_path', '"""concatenated_factors.csv"""'], {}), "(raw_path, 'concatenated_factors.csv')\n", (2507, 2545), False, 'import os\n'), ((2677, 2719), 'os.path.join', 'os.path.join', (['raw_path', '"""combinedFile.csv"""'], {}), "(raw_path, 'combinedFile.csv')\n", (2689, 2719), False, 'import os\n'), ((2846, 2888), 'os.path.join', 'os.path.join', (['raw_path', '"""combinedFile.csv"""'], {}), "(raw_path, 'combinedFile.csv')\n", (2858, 2888), False, 'import os\n'), ((3119, 3152), 'os.path.join', 'os.path.join', (['dirname', 'subdirname'], {}), '(dirname, subdirname)\n', (3131, 3152), False, 'import os\n'), ((3181, 3203), 'os.listdir', 'os.listdir', (['subdirpath'], {}), '(subdirpath)\n', (3191, 3203), False, 'import os\n')]
|
from TexGen.Core import *
import numpy as np
from os import path
def export_weave_vtu(filename, weave, domain, max_dim_nvox, round_vox_up=True, export_orientation=True):
""" Exporting weave to vtu, to be read by pumapy
:param filename: filepath and name
:type filename: string
:param weave: weave object, as defined in TexGen
:type weave: CTextile or child class of CTextile
:param domain: domain size object, as defined in TexGen
:type domain: CDomainPlanes
:param max_dim_nvox: number of voxels to add in the largest domain dimension
:type max_dim_nvox: int
:param round_vox_up: for the shorter dimensions, round number of voxels up (for +/-1 vox)
:type round_vox_up: bool
:param export_orientation: specify whether to export orientation
:type export_orientation: bool
:return: filename of weave exported (input filename + dimensions)
:rtype: string
"""
if not isinstance(domain, CDomainPlanes):
raise Exception("Domain needs to be of CDomainPlanes type.")
if not isinstance(filename, str):
raise Exception("Filename has to be a string.")
if not path.exists(path.split(filename)[0]):
raise Exception("Directory " + path.split(filename)[0] + " not found.")
min_bounds = XYZ()
max_bounds = XYZ()
domain.GetBoxLimits(min_bounds, max_bounds)
weave.AssignDomain(CDomainPlanes(min_bounds, max_bounds))
lengths = np.array([max_bounds.x - min_bounds.x, max_bounds.y - min_bounds.y, max_bounds.z - min_bounds.z])
max_len = np.max(lengths)
mask = np.zeros(3, dtype=bool)
mask[lengths == max_len] = True
voxel_length = max_len / float(max_dim_nvox)
nvox = np.zeros(3, dtype=int)
nvox[mask] = max_dim_nvox
nvox[~mask] = (lengths[~mask] / voxel_length).astype(int) # truncates
rem = np.zeros(3, dtype=float)
rem[~mask] = lengths[~mask] - voxel_length * nvox[~mask]
if round_vox_up:
rem[~mask] = voxel_length - rem[~mask]
max_bounds = XYZ(max_bounds.x + rem[0],
max_bounds.y + rem[1],
max_bounds.z + rem[2])
nvox[~mask] += 1
else:
max_bounds = XYZ(max_bounds.x - rem[0], max_bounds.y - rem[1], max_bounds.z - rem[2])
weave.AssignDomain(CDomainPlanes(min_bounds, max_bounds))
mesh = CRectangularVoxelMesh()
print("Exporting " + filename + ".vtu ... ", end='')
filename += "_" + str(nvox[0]) + "_" + str(nvox[1]) + "_" + str(nvox[2])
mesh.SaveVoxelMesh(weave, filename, int(nvox[0]), int(nvox[1]), int(nvox[2]), False, export_orientation,
MATERIAL_CONTINUUM, 0, VTU_EXPORT)
print("Done")
return filename
|
[
"numpy.zeros",
"numpy.max",
"os.path.split",
"numpy.array"
] |
[((1491, 1593), 'numpy.array', 'np.array', (['[max_bounds.x - min_bounds.x, max_bounds.y - min_bounds.y, max_bounds.z -\n min_bounds.z]'], {}), '([max_bounds.x - min_bounds.x, max_bounds.y - min_bounds.y, \n max_bounds.z - min_bounds.z])\n', (1499, 1593), True, 'import numpy as np\n'), ((1603, 1618), 'numpy.max', 'np.max', (['lengths'], {}), '(lengths)\n', (1609, 1618), True, 'import numpy as np\n'), ((1631, 1654), 'numpy.zeros', 'np.zeros', (['(3)'], {'dtype': 'bool'}), '(3, dtype=bool)\n', (1639, 1654), True, 'import numpy as np\n'), ((1753, 1775), 'numpy.zeros', 'np.zeros', (['(3)'], {'dtype': 'int'}), '(3, dtype=int)\n', (1761, 1775), True, 'import numpy as np\n'), ((1892, 1916), 'numpy.zeros', 'np.zeros', (['(3)'], {'dtype': 'float'}), '(3, dtype=float)\n', (1900, 1916), True, 'import numpy as np\n'), ((1212, 1232), 'os.path.split', 'path.split', (['filename'], {}), '(filename)\n', (1222, 1232), False, 'from os import path\n'), ((1277, 1297), 'os.path.split', 'path.split', (['filename'], {}), '(filename)\n', (1287, 1297), False, 'from os import path\n')]
|
# Generated by Django 3.1.1 on 2020-09-11 04:47
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
atomic = False
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('auth', '0012_alter_user_first_name_max_length'),
('admin', '0003_logentry_add_action_flag_choices'),
('core', '0005_auto_20200911_0435'),
]
operations = [
migrations.RenameModel(
old_name='Costumer',
new_name='Customer',
),
migrations.RenameField(
model_name='cart',
old_name='user',
new_name='customer',
),
]
|
[
"django.db.migrations.swappable_dependency",
"django.db.migrations.RenameField",
"django.db.migrations.RenameModel"
] |
[((205, 262), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (236, 262), False, 'from django.db import migrations\n'), ((462, 526), 'django.db.migrations.RenameModel', 'migrations.RenameModel', ([], {'old_name': '"""Costumer"""', 'new_name': '"""Customer"""'}), "(old_name='Costumer', new_name='Customer')\n", (484, 526), False, 'from django.db import migrations\n'), ((571, 650), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""cart"""', 'old_name': '"""user"""', 'new_name': '"""customer"""'}), "(model_name='cart', old_name='user', new_name='customer')\n", (593, 650), False, 'from django.db import migrations\n')]
|
import requests
import _main
import _pk as p
import _x86 as j
import os
import logging
class _Service():
# Instrumentation
def _apk_cache_i(self):
try:
logging.debug("Download instrumentation apk {}".format(p._apk()))
url = 'https://github.com/snakx/x86-uiautomator2-server/raw/main/bin/{}'.format(p._apk())
logging.debug(url)
r = requests.get(url, allow_redirects=True)
except Exception as e:
logging.error(e.__context__)
return False
try:
open(p._apk(), 'wb').write(r.content)
logging.debug('Download instrumentation apk successfully completed')
return True
except Exception as e:
logging.error(e.__context__)
return False
# Release
def _apk_cache_r(self):
try:
logging.debug("Download release apk {}".format(p._apk2()))
url = 'https://github.com/snakx/x86-uiautomator2-server/raw/main/bin/{}'.format(p._apk2())
logging.debug(url)
r = requests.get(url, allow_redirects=True)
except Exception as e:
logging.error(e.__context__)
return False
try:
open(p._apk2(), 'wb').write(r.content)
logging.debug('Download release apk successfully completed')
return True
except Exception as e:
logging.error(e.__context__)
return False
# Jar
def _jar_cache(self):
try:
logging.debug("Download x86 jar {}".format(p._apk2()))
url = 'https://github.com/snakx/x86-uiautomator2-server/raw/main/out/artifacts/x86_uiautomator2_server_jar/{}'.format(j._jar())
logging.debug(url)
r = requests.get(url, allow_redirects=True)
except Exception as e:
logging.error(e.__context__)
return False
try:
open(j._jar(), 'wb').write(r.content)
logging.debug('Download x86 jar successfully completed')
return True
except Exception as e:
logging.error(e.__context__)
return False
# vbs
def _vbs_cache(self):
try:
logging.debug("Download vbs script {}".format(p._apk2()))
url = 'https://github.com/snakx/x86-uiautomator2-server/raw/main/bin/uiautomator2.vbs'
logging.debug(url)
r = requests.get(url, allow_redirects=True)
except Exception as e:
logging.error(e.__context__)
return False
try:
open('uiautomator2.vbs', 'wb').write(r.content)
logging.debug('Download vbs script successfully completed')
return True
except Exception as e:
logging.error(e.__context__)
return False
# bat
def _bat_cache(self):
try:
logging.debug("Download shell script {}".format(p._apk2()))
url = 'https://github.com/snakx/x86-uiautomator2-server/raw/main/bin/uiautomator2.bat'
logging.debug(url)
r = requests.get(url, allow_redirects=True)
except Exception as e:
logging.error(e.__context__)
return False
try:
open('uiautomator2.bat', 'wb').write(r.content)
logging.debug('Download shell script successfully completed')
return True
except Exception as e:
logging.error(e.__context__)
return False
|
[
"_pk._apk",
"logging.error",
"logging.debug",
"_pk._apk2",
"requests.get",
"_x86._jar"
] |
[((366, 384), 'logging.debug', 'logging.debug', (['url'], {}), '(url)\n', (379, 384), False, 'import logging\n'), ((401, 440), 'requests.get', 'requests.get', (['url'], {'allow_redirects': '(True)'}), '(url, allow_redirects=True)\n', (413, 440), False, 'import requests\n'), ((613, 681), 'logging.debug', 'logging.debug', (['"""Download instrumentation apk successfully completed"""'], {}), "('Download instrumentation apk successfully completed')\n", (626, 681), False, 'import logging\n'), ((1045, 1063), 'logging.debug', 'logging.debug', (['url'], {}), '(url)\n', (1058, 1063), False, 'import logging\n'), ((1080, 1119), 'requests.get', 'requests.get', (['url'], {'allow_redirects': '(True)'}), '(url, allow_redirects=True)\n', (1092, 1119), False, 'import requests\n'), ((1293, 1353), 'logging.debug', 'logging.debug', (['"""Download release apk successfully completed"""'], {}), "('Download release apk successfully completed')\n", (1306, 1353), False, 'import logging\n'), ((1744, 1762), 'logging.debug', 'logging.debug', (['url'], {}), '(url)\n', (1757, 1762), False, 'import logging\n'), ((1779, 1818), 'requests.get', 'requests.get', (['url'], {'allow_redirects': '(True)'}), '(url, allow_redirects=True)\n', (1791, 1818), False, 'import requests\n'), ((1991, 2047), 'logging.debug', 'logging.debug', (['"""Download x86 jar successfully completed"""'], {}), "('Download x86 jar successfully completed')\n", (2004, 2047), False, 'import logging\n'), ((2400, 2418), 'logging.debug', 'logging.debug', (['url'], {}), '(url)\n', (2413, 2418), False, 'import logging\n'), ((2435, 2474), 'requests.get', 'requests.get', (['url'], {'allow_redirects': '(True)'}), '(url, allow_redirects=True)\n', (2447, 2474), False, 'import requests\n'), ((2657, 2716), 'logging.debug', 'logging.debug', (['"""Download vbs script successfully completed"""'], {}), "('Download vbs script successfully completed')\n", (2670, 2716), False, 'import logging\n'), ((3071, 3089), 'logging.debug', 'logging.debug', (['url'], {}), '(url)\n', (3084, 3089), False, 'import logging\n'), ((3106, 3145), 'requests.get', 'requests.get', (['url'], {'allow_redirects': '(True)'}), '(url, allow_redirects=True)\n', (3118, 3145), False, 'import requests\n'), ((3328, 3389), 'logging.debug', 'logging.debug', (['"""Download shell script successfully completed"""'], {}), "('Download shell script successfully completed')\n", (3341, 3389), False, 'import logging\n'), ((344, 352), '_pk._apk', 'p._apk', ([], {}), '()\n', (350, 352), True, 'import _pk as p\n'), ((484, 512), 'logging.error', 'logging.error', (['e.__context__'], {}), '(e.__context__)\n', (497, 512), False, 'import logging\n'), ((749, 777), 'logging.error', 'logging.error', (['e.__context__'], {}), '(e.__context__)\n', (762, 777), False, 'import logging\n'), ((1022, 1031), '_pk._apk2', 'p._apk2', ([], {}), '()\n', (1029, 1031), True, 'import _pk as p\n'), ((1163, 1191), 'logging.error', 'logging.error', (['e.__context__'], {}), '(e.__context__)\n', (1176, 1191), False, 'import logging\n'), ((1421, 1449), 'logging.error', 'logging.error', (['e.__context__'], {}), '(e.__context__)\n', (1434, 1449), False, 'import logging\n'), ((1722, 1730), '_x86._jar', 'j._jar', ([], {}), '()\n', (1728, 1730), True, 'import _x86 as j\n'), ((1862, 1890), 'logging.error', 'logging.error', (['e.__context__'], {}), '(e.__context__)\n', (1875, 1890), False, 'import logging\n'), ((2115, 2143), 'logging.error', 'logging.error', (['e.__context__'], {}), '(e.__context__)\n', (2128, 2143), False, 'import logging\n'), ((2518, 2546), 'logging.error', 'logging.error', (['e.__context__'], {}), '(e.__context__)\n', (2531, 2546), False, 'import logging\n'), ((2784, 2812), 'logging.error', 'logging.error', (['e.__context__'], {}), '(e.__context__)\n', (2797, 2812), False, 'import logging\n'), ((3189, 3217), 'logging.error', 'logging.error', (['e.__context__'], {}), '(e.__context__)\n', (3202, 3217), False, 'import logging\n'), ((3457, 3485), 'logging.error', 'logging.error', (['e.__context__'], {}), '(e.__context__)\n', (3470, 3485), False, 'import logging\n'), ((241, 249), '_pk._apk', 'p._apk', ([], {}), '()\n', (247, 249), True, 'import _pk as p\n'), ((918, 927), '_pk._apk2', 'p._apk2', ([], {}), '()\n', (925, 927), True, 'import _pk as p\n'), ((1580, 1589), '_pk._apk2', 'p._apk2', ([], {}), '()\n', (1587, 1589), True, 'import _pk as p\n'), ((2277, 2286), '_pk._apk2', 'p._apk2', ([], {}), '()\n', (2284, 2286), True, 'import _pk as p\n'), ((2948, 2957), '_pk._apk2', 'p._apk2', ([], {}), '()\n', (2955, 2957), True, 'import _pk as p\n'), ((568, 576), '_pk._apk', 'p._apk', ([], {}), '()\n', (574, 576), True, 'import _pk as p\n'), ((1247, 1256), '_pk._apk2', 'p._apk2', ([], {}), '()\n', (1254, 1256), True, 'import _pk as p\n'), ((1946, 1954), '_x86._jar', 'j._jar', ([], {}), '()\n', (1952, 1954), True, 'import _x86 as j\n')]
|
from django.db import models
class IMG(models.Model):
img = models.ImageField(upload_to='img') # upload_to 指定图片存储的文件夹名称,上传文件后自动创建
name = models.CharField(max_length=100)
|
[
"django.db.models.ImageField",
"django.db.models.CharField"
] |
[((66, 100), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""img"""'}), "(upload_to='img')\n", (83, 100), False, 'from django.db import models\n'), ((148, 180), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (164, 180), False, 'from django.db import models\n')]
|
import os
import os.path
import logging
import subprocess
from .base import (
SourceControl,
STATE_NEW, STATE_MODIFIED, STATE_COMMITTED)
try:
import pygit2
SUPPORTS_GIT = True
except ImportError:
SUPPORTS_GIT = False
logger = logging.getLogger(__name__)
class GitBaseSourceControl(SourceControl):
def __init__(self, root):
SourceControl.__init__(self)
self.root = root
def start(self, wiki):
# Make a Git repo if there's none.
if not os.path.isdir(os.path.join(self.root, '.git')):
logger.info("Creating Git repository at: " + self.root)
self._initRepo(self.root)
# Create a `.gitignore` file there's none.
ignore_path = os.path.join(self.root, '.gitignore')
if not os.path.isfile(ignore_path):
logger.info("Creating `.gitignore` file.")
with open(ignore_path, 'w') as f:
f.write('.wiki')
self._add(ignore_path)
self._commit('Created .gitignore.', [ignore_path])
def getSpecialFilenames(self):
specials = ['.git', '.gitignore']
return [os.path.join(self.root, d) for d in specials]
def getState(self, path):
return self._status(path)
def _run(self, cmd, *args, **kwargs):
exe = [self.git]
if 'norepo' not in kwargs or not kwargs['norepo']:
exe.append('--git-dir="%s"' % self.root)
exe.append(cmd)
exe += args
logger.debug("Running Git: " + str(exe))
return subprocess.check_output(exe)
class GitLibSourceControl(GitBaseSourceControl):
def __init__(self, root):
if not SUPPORTS_GIT:
raise Exception(
"Can't support Git because pygit2 is not available.")
GitBaseSourceControl.__init__(self, root)
def initRepo(self, wiki):
GitBaseSourceControl.initRepo(self, wiki)
self.repo = pygit2.Repository(self.root)
def _initRepo(self, path):
pygit2.init_repository(path, False)
def _add(self, paths):
pass
def _commit(self, message, paths):
pass
def _status(self, path):
flags = self.repo.status_file(self._getRepoPath(path))
if flags == pygit2.GIT_STATUS_CURRENT:
return STATE_COMMITTED
if (flags & pygit2.GIT_STATUS_WT_MODIFIED or
flags & pygit2.GIT_STATUS_INDEX_MODIFIED):
return STATE_MODIFIED
if (flags & pygit2.GIT_STATUS_WT_NEW or
flags & pygit2.GIT_STATUS_INDEX_NEW):
return STATE_NEW
raise Exception("Unsupported status flag combination: %s" % flags)
def _getRepoPath(self, path):
return os.path.relpath(path, self.root).replace('\\', '/')
|
[
"pygit2.Repository",
"subprocess.check_output",
"os.path.isfile",
"os.path.relpath",
"os.path.join",
"logging.getLogger",
"pygit2.init_repository"
] |
[((257, 284), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (274, 284), False, 'import logging\n'), ((736, 773), 'os.path.join', 'os.path.join', (['self.root', '""".gitignore"""'], {}), "(self.root, '.gitignore')\n", (748, 773), False, 'import os\n'), ((1543, 1571), 'subprocess.check_output', 'subprocess.check_output', (['exe'], {}), '(exe)\n', (1566, 1571), False, 'import subprocess\n'), ((1936, 1964), 'pygit2.Repository', 'pygit2.Repository', (['self.root'], {}), '(self.root)\n', (1953, 1964), False, 'import pygit2\n'), ((2005, 2040), 'pygit2.init_repository', 'pygit2.init_repository', (['path', '(False)'], {}), '(path, False)\n', (2027, 2040), False, 'import pygit2\n'), ((789, 816), 'os.path.isfile', 'os.path.isfile', (['ignore_path'], {}), '(ignore_path)\n', (803, 816), False, 'import os\n'), ((1144, 1170), 'os.path.join', 'os.path.join', (['self.root', 'd'], {}), '(self.root, d)\n', (1156, 1170), False, 'import os\n'), ((522, 553), 'os.path.join', 'os.path.join', (['self.root', '""".git"""'], {}), "(self.root, '.git')\n", (534, 553), False, 'import os\n'), ((2712, 2744), 'os.path.relpath', 'os.path.relpath', (['path', 'self.root'], {}), '(path, self.root)\n', (2727, 2744), False, 'import os\n')]
|
from django.db import models
from tinymce.models import HTMLField
# Create your models here.
class Agreement(models.Model):
english = HTMLField()
korean = HTMLField()
class LoginError(models.Model):
email = models.EmailField()
|
[
"django.db.models.EmailField",
"tinymce.models.HTMLField"
] |
[((142, 153), 'tinymce.models.HTMLField', 'HTMLField', ([], {}), '()\n', (151, 153), False, 'from tinymce.models import HTMLField\n'), ((167, 178), 'tinymce.models.HTMLField', 'HTMLField', ([], {}), '()\n', (176, 178), False, 'from tinymce.models import HTMLField\n'), ((225, 244), 'django.db.models.EmailField', 'models.EmailField', ([], {}), '()\n', (242, 244), False, 'from django.db import models\n')]
|
import torch
from torch import nn
from torchviz import make_dot
from torchvision.models import AlexNet
from cnn_flower import CNNNet
model = CNNNet()
x = torch.randn(1, 3, 32, 32).requires_grad_(True)
y = model(x)
vis_graph = make_dot(y, params=dict(list(model.named_parameters()) + [('x', x)]))
vis_graph.view()
# model = AlexNet()
# x = torch.randn(1, 3, 227, 227).requires_grad_(True)
# y = model(x)
# vis_graph = make_dot(y, params=dict(list(model.named_parameters()) + [('x', x)]))
# vis_graph.view()
|
[
"cnn_flower.CNNNet",
"torch.randn"
] |
[((143, 151), 'cnn_flower.CNNNet', 'CNNNet', ([], {}), '()\n', (149, 151), False, 'from cnn_flower import CNNNet\n'), ((156, 181), 'torch.randn', 'torch.randn', (['(1)', '(3)', '(32)', '(32)'], {}), '(1, 3, 32, 32)\n', (167, 181), False, 'import torch\n')]
|
import cv2
import numpy as np
#script que simula o jogo, pra testar um frame
#se apertar esc ele fecha, se apertar espaço alterna entre uma tela preta e um frame
print("Aperte espaço para mudar frame/tela preta")
print("Pressione uma tecla pra selecionar a resolução:")
print("A->1280x720 zoom = 75%")
print("B->1280x720 zoom = 80%")
print("C->1280x720 zoom = 85%")
print("D->1280x720 zoom = 90%")
print("E->1280x720 zoom = 95%")
print("F->1280x720 zoom = 100%")
print("G->1280x720 zoom = 125%")
print("H->1752x712 zoom = 75%")
print("I->1752x712 zoom = 80%")
print("J->1752x712 zoom = 85%")
print("K->1752x712 zoom = 90%")
print("L->1752x712 zoom = 95%")
print("M->1752x712 zoom = 100%")
print("N->1752x712 zoom = 105%")
print("O->1752x712 zoom = 110%")
print("P->1752x712 zoom = 115%")
print("Q->1752x712 zoom = 120%")
print("R->1752x712 zoom = 125%")
print("S->1920x1080 zoom = 90%")
cv2.imshow('Stardew Valley', np.zeros([20,20]))
k = cv2.waitKey()
if k == 65 or k == 97: # letra a
img = cv2.imread('1280x720_zoom75.png')
img2 = np.zeros(img.shape)
dsp = img
if k == 66 or k == 98: # letra b
img = cv2.imread('1280x720_zoom80.png')
img2 = np.zeros(img.shape)
dsp = img
if k == 67 or k == 99: # letra c
img = cv2.imread('1280x720_zoom85.png')
img2 = np.zeros(img.shape)
dsp = img
if k == 68 or k == 100: # letra d
img = cv2.imread('1280x720_zoom90.png')
img2 = np.zeros(img.shape)
dsp = img
if k == 69 or k == 101: # letra e
img = cv2.imread('1280x720_zoom95.png')
img2 = np.zeros(img.shape)
dsp = img
if k == 70 or k == 102: # letra f
img = cv2.imread('1280x720_zoom100.png')
img2 = np.zeros(img.shape)
dsp = img
if k == 71 or k == 103: # letra g
img = cv2.imread('1280x720_zoom125.png')
img2 = np.zeros(img.shape)
dsp = img
if k == 72 or k == 104: # letra h
img = cv2.imread('1752x712_zoom75.png')
img2 = np.zeros(img.shape)
dsp = img
if k == 73 or k == 105: # letra i
img = cv2.imread('1752x712_zoom80.png')
img2 = np.zeros(img.shape)
dsp = img
if k == 74 or k == 106: # letra j
img = cv2.imread('1752x712_zoom85.png')
img2 = np.zeros(img.shape)
dsp = img
if k == 75 or k == 107: # letra k
img = cv2.imread('1752x712_zoom90.png')
img2 = np.zeros(img.shape)
dsp = img
if k == 76 or k == 108: # letra l
img = cv2.imread('1752x712_zoom95.png')
img2 = np.zeros(img.shape)
dsp = img
if k == 77 or k == 109: # letra m
img = cv2.imread('1752x712_zoom100.png')
img2 = np.zeros(img.shape)
dsp = img
if k == 78 or k == 110: # letra n
img = cv2.imread('1752x712_zoom105.png')
img2 = np.zeros(img.shape)
dsp = img
if k == 79 or k == 111: # letra o
img = cv2.imread('1752x712_zoom110.png')
img2 = np.zeros(img.shape)
dsp = img
if k == 80 or k == 112: # letra p
img = cv2.imread('1752x712_zoom115.png')
img2 = np.zeros(img.shape)
dsp = img
if k == 81 or k == 113: # letra q
img = cv2.imread('1752x712_zoom120.png')
img2 = np.zeros(img.shape)
dsp = img
if k == 82 or k == 114: # letra r
img = cv2.imread('1752x712_zoom125.png')
img2 = np.zeros(img.shape)
dsp = img
if k == 83 or k == 115: # letra s
img = cv2.imread('1920x1080_zoom90.png')
img2 = np.zeros(img.shape)
dsp = img
cv2.imshow('Stardew Valley', img)
while True:
k = cv2.waitKey(0)
if k == 32:
if dsp is img:
dsp = img2
elif dsp is img2:
dsp = img
cv2.imshow('Stardew Valley', dsp)
elif k == 27:
cv2.destroyAllWindows()
break
|
[
"cv2.waitKey",
"cv2.destroyAllWindows",
"numpy.zeros",
"cv2.imread",
"cv2.imshow"
] |
[((939, 952), 'cv2.waitKey', 'cv2.waitKey', ([], {}), '()\n', (950, 952), False, 'import cv2\n'), ((3298, 3331), 'cv2.imshow', 'cv2.imshow', (['"""Stardew Valley"""', 'img'], {}), "('Stardew Valley', img)\n", (3308, 3331), False, 'import cv2\n'), ((916, 934), 'numpy.zeros', 'np.zeros', (['[20, 20]'], {}), '([20, 20])\n', (924, 934), True, 'import numpy as np\n'), ((996, 1029), 'cv2.imread', 'cv2.imread', (['"""1280x720_zoom75.png"""'], {}), "('1280x720_zoom75.png')\n", (1006, 1029), False, 'import cv2\n'), ((1041, 1060), 'numpy.zeros', 'np.zeros', (['img.shape'], {}), '(img.shape)\n', (1049, 1060), True, 'import numpy as np\n'), ((1118, 1151), 'cv2.imread', 'cv2.imread', (['"""1280x720_zoom80.png"""'], {}), "('1280x720_zoom80.png')\n", (1128, 1151), False, 'import cv2\n'), ((1163, 1182), 'numpy.zeros', 'np.zeros', (['img.shape'], {}), '(img.shape)\n', (1171, 1182), True, 'import numpy as np\n'), ((1240, 1273), 'cv2.imread', 'cv2.imread', (['"""1280x720_zoom85.png"""'], {}), "('1280x720_zoom85.png')\n", (1250, 1273), False, 'import cv2\n'), ((1285, 1304), 'numpy.zeros', 'np.zeros', (['img.shape'], {}), '(img.shape)\n', (1293, 1304), True, 'import numpy as np\n'), ((1363, 1396), 'cv2.imread', 'cv2.imread', (['"""1280x720_zoom90.png"""'], {}), "('1280x720_zoom90.png')\n", (1373, 1396), False, 'import cv2\n'), ((1408, 1427), 'numpy.zeros', 'np.zeros', (['img.shape'], {}), '(img.shape)\n', (1416, 1427), True, 'import numpy as np\n'), ((1486, 1519), 'cv2.imread', 'cv2.imread', (['"""1280x720_zoom95.png"""'], {}), "('1280x720_zoom95.png')\n", (1496, 1519), False, 'import cv2\n'), ((1531, 1550), 'numpy.zeros', 'np.zeros', (['img.shape'], {}), '(img.shape)\n', (1539, 1550), True, 'import numpy as np\n'), ((1609, 1643), 'cv2.imread', 'cv2.imread', (['"""1280x720_zoom100.png"""'], {}), "('1280x720_zoom100.png')\n", (1619, 1643), False, 'import cv2\n'), ((1655, 1674), 'numpy.zeros', 'np.zeros', (['img.shape'], {}), '(img.shape)\n', (1663, 1674), True, 'import numpy as np\n'), ((1733, 1767), 'cv2.imread', 'cv2.imread', (['"""1280x720_zoom125.png"""'], {}), "('1280x720_zoom125.png')\n", (1743, 1767), False, 'import cv2\n'), ((1779, 1798), 'numpy.zeros', 'np.zeros', (['img.shape'], {}), '(img.shape)\n', (1787, 1798), True, 'import numpy as np\n'), ((1857, 1890), 'cv2.imread', 'cv2.imread', (['"""1752x712_zoom75.png"""'], {}), "('1752x712_zoom75.png')\n", (1867, 1890), False, 'import cv2\n'), ((1902, 1921), 'numpy.zeros', 'np.zeros', (['img.shape'], {}), '(img.shape)\n', (1910, 1921), True, 'import numpy as np\n'), ((1980, 2013), 'cv2.imread', 'cv2.imread', (['"""1752x712_zoom80.png"""'], {}), "('1752x712_zoom80.png')\n", (1990, 2013), False, 'import cv2\n'), ((2025, 2044), 'numpy.zeros', 'np.zeros', (['img.shape'], {}), '(img.shape)\n', (2033, 2044), True, 'import numpy as np\n'), ((2103, 2136), 'cv2.imread', 'cv2.imread', (['"""1752x712_zoom85.png"""'], {}), "('1752x712_zoom85.png')\n", (2113, 2136), False, 'import cv2\n'), ((2148, 2167), 'numpy.zeros', 'np.zeros', (['img.shape'], {}), '(img.shape)\n', (2156, 2167), True, 'import numpy as np\n'), ((2226, 2259), 'cv2.imread', 'cv2.imread', (['"""1752x712_zoom90.png"""'], {}), "('1752x712_zoom90.png')\n", (2236, 2259), False, 'import cv2\n'), ((2271, 2290), 'numpy.zeros', 'np.zeros', (['img.shape'], {}), '(img.shape)\n', (2279, 2290), True, 'import numpy as np\n'), ((2349, 2382), 'cv2.imread', 'cv2.imread', (['"""1752x712_zoom95.png"""'], {}), "('1752x712_zoom95.png')\n", (2359, 2382), False, 'import cv2\n'), ((2394, 2413), 'numpy.zeros', 'np.zeros', (['img.shape'], {}), '(img.shape)\n', (2402, 2413), True, 'import numpy as np\n'), ((2472, 2506), 'cv2.imread', 'cv2.imread', (['"""1752x712_zoom100.png"""'], {}), "('1752x712_zoom100.png')\n", (2482, 2506), False, 'import cv2\n'), ((2518, 2537), 'numpy.zeros', 'np.zeros', (['img.shape'], {}), '(img.shape)\n', (2526, 2537), True, 'import numpy as np\n'), ((2596, 2630), 'cv2.imread', 'cv2.imread', (['"""1752x712_zoom105.png"""'], {}), "('1752x712_zoom105.png')\n", (2606, 2630), False, 'import cv2\n'), ((2642, 2661), 'numpy.zeros', 'np.zeros', (['img.shape'], {}), '(img.shape)\n', (2650, 2661), True, 'import numpy as np\n'), ((2720, 2754), 'cv2.imread', 'cv2.imread', (['"""1752x712_zoom110.png"""'], {}), "('1752x712_zoom110.png')\n", (2730, 2754), False, 'import cv2\n'), ((2766, 2785), 'numpy.zeros', 'np.zeros', (['img.shape'], {}), '(img.shape)\n', (2774, 2785), True, 'import numpy as np\n'), ((2844, 2878), 'cv2.imread', 'cv2.imread', (['"""1752x712_zoom115.png"""'], {}), "('1752x712_zoom115.png')\n", (2854, 2878), False, 'import cv2\n'), ((2890, 2909), 'numpy.zeros', 'np.zeros', (['img.shape'], {}), '(img.shape)\n', (2898, 2909), True, 'import numpy as np\n'), ((2968, 3002), 'cv2.imread', 'cv2.imread', (['"""1752x712_zoom120.png"""'], {}), "('1752x712_zoom120.png')\n", (2978, 3002), False, 'import cv2\n'), ((3014, 3033), 'numpy.zeros', 'np.zeros', (['img.shape'], {}), '(img.shape)\n', (3022, 3033), True, 'import numpy as np\n'), ((3092, 3126), 'cv2.imread', 'cv2.imread', (['"""1752x712_zoom125.png"""'], {}), "('1752x712_zoom125.png')\n", (3102, 3126), False, 'import cv2\n'), ((3138, 3157), 'numpy.zeros', 'np.zeros', (['img.shape'], {}), '(img.shape)\n', (3146, 3157), True, 'import numpy as np\n'), ((3216, 3250), 'cv2.imread', 'cv2.imread', (['"""1920x1080_zoom90.png"""'], {}), "('1920x1080_zoom90.png')\n", (3226, 3250), False, 'import cv2\n'), ((3262, 3281), 'numpy.zeros', 'np.zeros', (['img.shape'], {}), '(img.shape)\n', (3270, 3281), True, 'import numpy as np\n'), ((3352, 3366), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (3363, 3366), False, 'import cv2\n'), ((3485, 3518), 'cv2.imshow', 'cv2.imshow', (['"""Stardew Valley"""', 'dsp'], {}), "('Stardew Valley', dsp)\n", (3495, 3518), False, 'import cv2\n'), ((3545, 3568), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (3566, 3568), False, 'import cv2\n')]
|
from datetime import datetime, timedelta
from django_elasticsearch_dsl.registries import registry
from celery.decorators import periodic_task
from celery.task.schedules import crontab
from django.apps import apps
from django.http.request import HttpRequest
from django.core.cache import cache
from rest_framework.request import Request
from researchhub_document.related_models.constants.document_type import (
DISCUSSION,
ELN,
PAPER,
POSTS,
ALL,
)
from paper.utils import get_cache_key
from researchhub.celery import app
from researchhub.settings import (
APP_ENV,
STAGING,
PRODUCTION,
)
@app.task
def preload_trending_documents(
document_type,
hub_id,
ordering,
time_difference
):
from researchhub_document.views import ResearchhubUnifiedDocumentViewSet
from researchhub_document.serializers import (
DynamicUnifiedDocumentSerializer
)
initial_date = datetime.now().replace(
hour=7,
minute=0,
second=0,
microsecond=0
)
end_date = datetime.now()
if time_difference > 365:
cache_pk = f'{document_type}_{hub_id}_{ordering}_all_time'
start_date = datetime(
year=2018,
month=12,
day=31,
hour=7
)
elif time_difference == 365:
cache_pk = f'{document_type}_{hub_id}_{ordering}_year'
start_date = initial_date - timedelta(days=365)
elif time_difference == 30 or time_difference == 31:
cache_pk = f'{document_type}_{hub_id}_{ordering}_month'
start_date = initial_date - timedelta(days=30)
elif time_difference == 7:
cache_pk = f'{document_type}_{hub_id}_{ordering}_week'
start_date = initial_date - timedelta(days=7)
else:
start_date = datetime.now().replace(
hour=7,
minute=0,
second=0,
microsecond=0
)
cache_pk = f'{document_type}_{hub_id}_{ordering}_today'
query_string_ordering = 'top_rated'
if ordering == 'removed':
query_string_ordering = 'removed'
elif ordering == '-score':
query_string_ordering = 'top_rated'
elif ordering == '-discussed':
query_string_ordering = 'most_discussed'
elif ordering == '-created_date':
query_string_ordering = 'newest'
elif ordering == '-hot_score':
query_string_ordering = 'hot'
request_path = '/api/paper/get_hub_papers/'
if STAGING:
http_host = 'staging-backend.researchhub.com'
protocol = 'https'
elif PRODUCTION:
http_host = 'backend.researchhub.com'
protocol = 'https'
else:
http_host = 'localhost:8000'
protocol = 'http'
start_date_timestamp = int(start_date.timestamp())
end_date_timestamp = int(end_date.timestamp())
query_string = 'page=1&start_date__gte={}&end_date__lte={}&ordering={}&hub_id={}&'.format(
start_date_timestamp,
end_date_timestamp,
query_string_ordering,
hub_id
)
http_meta = {
'QUERY_STRING': query_string,
'HTTP_HOST': http_host,
'HTTP_X_FORWARDED_PROTO': protocol,
}
document_view = ResearchhubUnifiedDocumentViewSet()
http_req = HttpRequest()
http_req.META = http_meta
http_req.path = request_path
req = Request(http_req)
document_view.request = req
documents = document_view.get_filtered_queryset(
document_type,
ordering,
hub_id,
start_date,
end_date
)
page = document_view.paginate_queryset(documents)
context = document_view._get_serializer_context()
serializer = DynamicUnifiedDocumentSerializer(
page,
_include_fields=[
'created_by',
'documents',
'document_type',
'hot_score',
'score'
],
many=True,
context=context,
)
serializer_data = serializer.data
paginated_response = document_view.get_paginated_response(
serializer_data
)
cache_key_hub = get_cache_key('hub', cache_pk)
cache.set(
cache_key_hub,
paginated_response.data,
timeout=None
)
return paginated_response.data
# Executes every 5 minutes
@periodic_task(
run_every=crontab(minute='*/5'),
priority=1,
options={'queue': f'{APP_ENV}_core_queue'}
)
def preload_hub_documents(
document_type=ALL.lower(),
hub_ids=None
):
from researchhub_document.views import ResearchhubUnifiedDocumentViewSet
from researchhub_document.serializers import (
DynamicUnifiedDocumentSerializer
)
Hub = apps.get_model('hub.Hub')
hubs = Hub.objects.all()
document_view = ResearchhubUnifiedDocumentViewSet()
if document_type == ALL.lower():
document_types = [PAPER, ELN, DISCUSSION]
elif document_type == POSTS.lower():
document_types = [ELN, DISCUSSION]
else:
document_types = [PAPER]
if hub_ids:
hubs = hubs.filter(id__in=hub_ids)
data = []
for hub in hubs.iterator():
hub_name = hub.slug
cache_pk = f'{document_type}_{hub_name}'
documents = hub.related_documents.get_queryset().filter(
document_type__in=document_types,
is_removed=False
).order_by(
'-hot_score'
)[:15]
cache_key = get_cache_key('documents', cache_pk)
context = document_view._get_serializer_context()
serializer = DynamicUnifiedDocumentSerializer(
documents,
_include_fields=[
'created_by',
'documents',
'document_type',
'hot_score',
'score'
],
many=True,
context=context
)
serializer_data = serializer.data
data.append(serializer_data)
cache.set(
cache_key,
serializer_data,
timeout=None
)
return data
@app.task
def update_elastic_registry(post):
registry.update(post)
|
[
"django.apps.apps.get_model",
"django.core.cache.cache.set",
"rest_framework.request.Request",
"researchhub_document.related_models.constants.document_type.POSTS.lower",
"researchhub_document.views.ResearchhubUnifiedDocumentViewSet",
"researchhub_document.serializers.DynamicUnifiedDocumentSerializer",
"datetime.datetime",
"django.http.request.HttpRequest",
"datetime.timedelta",
"celery.task.schedules.crontab",
"researchhub_document.related_models.constants.document_type.ALL.lower",
"datetime.datetime.now",
"django_elasticsearch_dsl.registries.registry.update",
"paper.utils.get_cache_key"
] |
[((1046, 1060), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1058, 1060), False, 'from datetime import datetime, timedelta\n'), ((3186, 3221), 'researchhub_document.views.ResearchhubUnifiedDocumentViewSet', 'ResearchhubUnifiedDocumentViewSet', ([], {}), '()\n', (3219, 3221), False, 'from researchhub_document.views import ResearchhubUnifiedDocumentViewSet\n'), ((3237, 3250), 'django.http.request.HttpRequest', 'HttpRequest', ([], {}), '()\n', (3248, 3250), False, 'from django.http.request import HttpRequest\n'), ((3324, 3341), 'rest_framework.request.Request', 'Request', (['http_req'], {}), '(http_req)\n', (3331, 3341), False, 'from rest_framework.request import Request\n'), ((3653, 3812), 'researchhub_document.serializers.DynamicUnifiedDocumentSerializer', 'DynamicUnifiedDocumentSerializer', (['page'], {'_include_fields': "['created_by', 'documents', 'document_type', 'hot_score', 'score']", 'many': '(True)', 'context': 'context'}), "(page, _include_fields=['created_by',\n 'documents', 'document_type', 'hot_score', 'score'], many=True, context\n =context)\n", (3685, 3812), False, 'from researchhub_document.serializers import DynamicUnifiedDocumentSerializer\n'), ((4066, 4096), 'paper.utils.get_cache_key', 'get_cache_key', (['"""hub"""', 'cache_pk'], {}), "('hub', cache_pk)\n", (4079, 4096), False, 'from paper.utils import get_cache_key\n'), ((4101, 4164), 'django.core.cache.cache.set', 'cache.set', (['cache_key_hub', 'paginated_response.data'], {'timeout': 'None'}), '(cache_key_hub, paginated_response.data, timeout=None)\n', (4110, 4164), False, 'from django.core.cache import cache\n'), ((4423, 4434), 'researchhub_document.related_models.constants.document_type.ALL.lower', 'ALL.lower', ([], {}), '()\n', (4432, 4434), False, 'from researchhub_document.related_models.constants.document_type import DISCUSSION, ELN, PAPER, POSTS, ALL\n'), ((4640, 4665), 'django.apps.apps.get_model', 'apps.get_model', (['"""hub.Hub"""'], {}), "('hub.Hub')\n", (4654, 4665), False, 'from django.apps import apps\n'), ((4716, 4751), 'researchhub_document.views.ResearchhubUnifiedDocumentViewSet', 'ResearchhubUnifiedDocumentViewSet', ([], {}), '()\n', (4749, 4751), False, 'from researchhub_document.views import ResearchhubUnifiedDocumentViewSet\n'), ((6048, 6069), 'django_elasticsearch_dsl.registries.registry.update', 'registry.update', (['post'], {}), '(post)\n', (6063, 6069), False, 'from django_elasticsearch_dsl.registries import registry\n'), ((1179, 1224), 'datetime.datetime', 'datetime', ([], {'year': '(2018)', 'month': '(12)', 'day': '(31)', 'hour': '(7)'}), '(year=2018, month=12, day=31, hour=7)\n', (1187, 1224), False, 'from datetime import datetime, timedelta\n'), ((4777, 4788), 'researchhub_document.related_models.constants.document_type.ALL.lower', 'ALL.lower', ([], {}), '()\n', (4786, 4788), False, 'from researchhub_document.related_models.constants.document_type import DISCUSSION, ELN, PAPER, POSTS, ALL\n'), ((5371, 5407), 'paper.utils.get_cache_key', 'get_cache_key', (['"""documents"""', 'cache_pk'], {}), "('documents', cache_pk)\n", (5384, 5407), False, 'from paper.utils import get_cache_key\n'), ((5487, 5651), 'researchhub_document.serializers.DynamicUnifiedDocumentSerializer', 'DynamicUnifiedDocumentSerializer', (['documents'], {'_include_fields': "['created_by', 'documents', 'document_type', 'hot_score', 'score']", 'many': '(True)', 'context': 'context'}), "(documents, _include_fields=['created_by',\n 'documents', 'document_type', 'hot_score', 'score'], many=True, context\n =context)\n", (5519, 5651), False, 'from researchhub_document.serializers import DynamicUnifiedDocumentSerializer\n'), ((5883, 5934), 'django.core.cache.cache.set', 'cache.set', (['cache_key', 'serializer_data'], {'timeout': 'None'}), '(cache_key, serializer_data, timeout=None)\n', (5892, 5934), False, 'from django.core.cache import cache\n'), ((4290, 4311), 'celery.task.schedules.crontab', 'crontab', ([], {'minute': '"""*/5"""'}), "(minute='*/5')\n", (4297, 4311), False, 'from celery.task.schedules import crontab\n'), ((927, 941), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (939, 941), False, 'from datetime import datetime, timedelta\n'), ((4866, 4879), 'researchhub_document.related_models.constants.document_type.POSTS.lower', 'POSTS.lower', ([], {}), '()\n', (4877, 4879), False, 'from researchhub_document.related_models.constants.document_type import DISCUSSION, ELN, PAPER, POSTS, ALL\n'), ((1415, 1434), 'datetime.timedelta', 'timedelta', ([], {'days': '(365)'}), '(days=365)\n', (1424, 1434), False, 'from datetime import datetime, timedelta\n'), ((1592, 1610), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (1601, 1610), False, 'from datetime import datetime, timedelta\n'), ((1741, 1758), 'datetime.timedelta', 'timedelta', ([], {'days': '(7)'}), '(days=7)\n', (1750, 1758), False, 'from datetime import datetime, timedelta\n'), ((1790, 1804), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1802, 1804), False, 'from datetime import datetime, timedelta\n')]
|
# Copyright 2017 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models for storing the question data models."""
from core.platform import models
import utils
from google.appengine.ext import ndb
(base_models,) = models.Registry.import_models([models.NAMES.base_model])
class QuestionSnapshotMetadataModel(base_models.BaseSnapshotMetadataModel):
"""Storage model for the metadata for a question snapshot."""
pass
class QuestionSnapshotContentModel(base_models.BaseSnapshotContentModel):
"""Storage model for the content of a question snapshot."""
pass
class QuestionModel(base_models.VersionedModel):
"""Model for storing Questions.
The ID of instances of this class has the form
{{collection_id}}.{{random_hash_of_16_chars}}
"""
SNAPSHOT_METADATA_CLASS = QuestionSnapshotMetadataModel
SNAPSHOT_CONTENT_CLASS = QuestionSnapshotContentModel
ALLOW_REVERT = True
# The title of the question.
title = ndb.StringProperty(required=True, indexed=True)
# A dict representing the question data.
question_data = ndb.JsonProperty(indexed=False)
# The schema version for the data.
question_data_schema_version = (
ndb.IntegerProperty(required=True, indexed=True))
# The ID of collection containing the question.
collection_id = ndb.StringProperty(required=True, indexed=True)
# The ISO 639-1 code for the language this question is written in.
language_code = ndb.StringProperty(required=True, indexed=True)
@classmethod
def _get_new_id(cls, collection_id):
"""Generates a unique ID for the question of the form
{{collection_id}}.{{random_hash_of_16_chars}}
Args:
collection_id: str. The ID of collection containing the question.
Returns:
new_id: int. ID of the new QuestionModel instance.
Raises:
Exception: The ID generator for QuestionModel is
producing too many collisions.
"""
for _ in range(base_models.MAX_RETRIES):
new_id = '%s.%s' % (
collection_id,
utils.convert_to_hash(
str(utils.get_random_int(base_models.RAND_RANGE)),
base_models.ID_LENGTH))
if not cls.get_by_id(new_id):
return new_id
raise Exception(
'The id generator for QuestionModel is producing too many '
'collisions.')
@classmethod
def create(
cls, title, question_data, question_data_schema_version,
collection_id, language_code):
"""Creates a new QuestionModel entry.
Args:
title: str. The title of the question.
question_data: dict. A dict representing the question data.
question_data_schema_version: int. The schema version for the data.
collection_id: str. The ID of the collection containing the
question.
language_code: str. The ISO 639-1 code for the language this
question is written in.
Returns:
QuestionModel. Instance of the new QuestionModel entry.
Raises:
Exception: A model with the same ID already exists.
"""
instance_id = cls._get_new_id(collection_id)
question_model_instance = cls(
id=instance_id, title=title,
question_data=question_data,
question_data_schema_version=question_data_schema_version,
collection_id=collection_id,
language_code=language_code)
return question_model_instance
|
[
"core.platform.models.Registry.import_models",
"utils.get_random_int",
"google.appengine.ext.ndb.IntegerProperty",
"google.appengine.ext.ndb.StringProperty",
"google.appengine.ext.ndb.JsonProperty"
] |
[((759, 815), 'core.platform.models.Registry.import_models', 'models.Registry.import_models', (['[models.NAMES.base_model]'], {}), '([models.NAMES.base_model])\n', (788, 815), False, 'from core.platform import models\n'), ((1503, 1550), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {'required': '(True)', 'indexed': '(True)'}), '(required=True, indexed=True)\n', (1521, 1550), False, 'from google.appengine.ext import ndb\n'), ((1616, 1647), 'google.appengine.ext.ndb.JsonProperty', 'ndb.JsonProperty', ([], {'indexed': '(False)'}), '(indexed=False)\n', (1632, 1647), False, 'from google.appengine.ext import ndb\n'), ((1732, 1780), 'google.appengine.ext.ndb.IntegerProperty', 'ndb.IntegerProperty', ([], {'required': '(True)', 'indexed': '(True)'}), '(required=True, indexed=True)\n', (1751, 1780), False, 'from google.appengine.ext import ndb\n'), ((1854, 1901), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {'required': '(True)', 'indexed': '(True)'}), '(required=True, indexed=True)\n', (1872, 1901), False, 'from google.appengine.ext import ndb\n'), ((1993, 2040), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {'required': '(True)', 'indexed': '(True)'}), '(required=True, indexed=True)\n', (2011, 2040), False, 'from google.appengine.ext import ndb\n'), ((2699, 2743), 'utils.get_random_int', 'utils.get_random_int', (['base_models.RAND_RANGE'], {}), '(base_models.RAND_RANGE)\n', (2719, 2743), False, 'import utils\n')]
|
from tensorflow.keras.models import load_model
import tensorflow as tf
def PSNR(y_true, y_pred):
max_pixel = 1.0
return tf.image.psnr(y_true, y_pred, max_val =max_pixel)
def ssim(y_true, y_pred):
max_val = 1.0
return tf.image.ssim(y_true, y_pred, max_val = max_val, filter_size=11, filter_sigma=1.5, k1=0.01, k2=0.03)
def mssim(y_true, y_pred):
max_val = 1.0
return tf.image.ssim_multiscale(
y_true, y_pred, max_val = max_val, filter_size=7,
filter_sigma=1.5, k1=0.01, k2=0.03)
def SSIM_loss():
def SSIMLoss(y_true, y_pred):
# l1 = tf.keras.losses.mean_absolute_error(y_true, y_pred)
ss = 1 - tf.reduce_mean(tf.image.ssim(y_true, y_pred, max_val = 1.0))
l2 = tf.keras.losses.mean_absolute_error(y_true, y_pred)
return ss + 0.1*l2
return SSIMLoss
def get_combined_models(path = './weights/', load_individual_models = False):
didn = load_model(path + 'DIDN_l1.h5',
custom_objects={'PSNR': PSNR, 'ssim':ssim, 'mssim': mssim, 'tf': tf} ,
compile=False)
for layer in didn.layers:
layer._name = layer._name + str("_1")
didn._name = 'didn'
mimo = load_model(path + 'MIMO_l1.h5',
custom_objects={'PSNR': PSNR, 'ssim':ssim, 'mssim': mssim, 'tf': tf} ,
compile=False)
mimo._name = 'mimo'
for layer in mimo.layers:
layer._name = layer._name + str("_2")
dfcan = load_model(path + 'DFCAN-ssim-l2.h5',
custom_objects={'PSNR': PSNR, 'ssim':ssim, 'mssim': mssim, 'tf': tf, 'SSIM_loss': SSIM_loss} ,
compile=False)
dfcan._name = 'dfcan'
for layer in dfcan.layers:
layer._name = layer._name + str("_3")
model = tf.keras.Sequential([didn, mimo, dfcan])
# model.trainable = False
if load_individual_models:
return didn, mimo, dfcan, model
else:
return model
if __name__ == '__main__':
model = get_combined_models()
print(model.summary())
|
[
"tensorflow.keras.models.load_model",
"tensorflow.image.psnr",
"tensorflow.image.ssim",
"tensorflow.keras.Sequential",
"tensorflow.image.ssim_multiscale",
"tensorflow.keras.losses.mean_absolute_error"
] |
[((145, 193), 'tensorflow.image.psnr', 'tf.image.psnr', (['y_true', 'y_pred'], {'max_val': 'max_pixel'}), '(y_true, y_pred, max_val=max_pixel)\n', (158, 193), True, 'import tensorflow as tf\n'), ((263, 366), 'tensorflow.image.ssim', 'tf.image.ssim', (['y_true', 'y_pred'], {'max_val': 'max_val', 'filter_size': '(11)', 'filter_sigma': '(1.5)', 'k1': '(0.01)', 'k2': '(0.03)'}), '(y_true, y_pred, max_val=max_val, filter_size=11, filter_sigma\n =1.5, k1=0.01, k2=0.03)\n', (276, 366), True, 'import tensorflow as tf\n'), ((433, 545), 'tensorflow.image.ssim_multiscale', 'tf.image.ssim_multiscale', (['y_true', 'y_pred'], {'max_val': 'max_val', 'filter_size': '(7)', 'filter_sigma': '(1.5)', 'k1': '(0.01)', 'k2': '(0.03)'}), '(y_true, y_pred, max_val=max_val, filter_size=7,\n filter_sigma=1.5, k1=0.01, k2=0.03)\n', (457, 545), True, 'import tensorflow as tf\n'), ((1059, 1180), 'tensorflow.keras.models.load_model', 'load_model', (["(path + 'DIDN_l1.h5')"], {'custom_objects': "{'PSNR': PSNR, 'ssim': ssim, 'mssim': mssim, 'tf': tf}", 'compile': '(False)'}), "(path + 'DIDN_l1.h5', custom_objects={'PSNR': PSNR, 'ssim': ssim,\n 'mssim': mssim, 'tf': tf}, compile=False)\n", (1069, 1180), False, 'from tensorflow.keras.models import load_model\n'), ((1375, 1496), 'tensorflow.keras.models.load_model', 'load_model', (["(path + 'MIMO_l1.h5')"], {'custom_objects': "{'PSNR': PSNR, 'ssim': ssim, 'mssim': mssim, 'tf': tf}", 'compile': '(False)'}), "(path + 'MIMO_l1.h5', custom_objects={'PSNR': PSNR, 'ssim': ssim,\n 'mssim': mssim, 'tf': tf}, compile=False)\n", (1385, 1496), False, 'from tensorflow.keras.models import load_model\n'), ((1682, 1833), 'tensorflow.keras.models.load_model', 'load_model', (["(path + 'DFCAN-ssim-l2.h5')"], {'custom_objects': "{'PSNR': PSNR, 'ssim': ssim, 'mssim': mssim, 'tf': tf, 'SSIM_loss': SSIM_loss}", 'compile': '(False)'}), "(path + 'DFCAN-ssim-l2.h5', custom_objects={'PSNR': PSNR, 'ssim':\n ssim, 'mssim': mssim, 'tf': tf, 'SSIM_loss': SSIM_loss}, compile=False)\n", (1692, 1833), False, 'from tensorflow.keras.models import load_model\n'), ((2031, 2071), 'tensorflow.keras.Sequential', 'tf.keras.Sequential', (['[didn, mimo, dfcan]'], {}), '([didn, mimo, dfcan])\n', (2050, 2071), True, 'import tensorflow as tf\n'), ((839, 890), 'tensorflow.keras.losses.mean_absolute_error', 'tf.keras.losses.mean_absolute_error', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (874, 890), True, 'import tensorflow as tf\n'), ((771, 813), 'tensorflow.image.ssim', 'tf.image.ssim', (['y_true', 'y_pred'], {'max_val': '(1.0)'}), '(y_true, y_pred, max_val=1.0)\n', (784, 813), True, 'import tensorflow as tf\n')]
|
import numpy as np
def conv(f, g):
def h(x):
"""Input x has to be equidistant!
"""
# If the support of f or g extends outside x,
# we have to evaluate the functions also outside x
# to get the values of the convolution for all x.
n = len(x)
d = x[1] - x[0]
x_ext = np.concatenate([x[-n:] - n * d, x, x[:n] + n * d])
m = len(x_ext)
x_ext_tiled = np.tile(x_ext, (m, 1))
distance_matrix = x_ext_tiled - x_ext_tiled.T
res = np.sum(g(-distance_matrix) * np.tile(f(x_ext), (m, 1)), axis=1) * d
return res[n:-n]
return h
from scipy.signal import fftconvolve
def fconv(f, g):
def h(x):
"""Input x has to be equidistant!
"""
# Do some trickery to evaluate the convolution at the desired x-values.
n = len(x)
d = x[1] - x[0]
x_ext = np.concatenate([x[-n // 2 :] - n * d, x, x[: n // 2] + n * d])
res = fftconvolve(f(x_ext), g(x_ext), mode="full") * (x_ext[1] - x_ext[0])
return np.interp(x, x_ext * 2, res[::2])
return h
|
[
"numpy.interp",
"numpy.tile",
"numpy.concatenate"
] |
[((336, 386), 'numpy.concatenate', 'np.concatenate', (['[x[-n:] - n * d, x, x[:n] + n * d]'], {}), '([x[-n:] - n * d, x, x[:n] + n * d])\n', (350, 386), True, 'import numpy as np\n'), ((433, 455), 'numpy.tile', 'np.tile', (['x_ext', '(m, 1)'], {}), '(x_ext, (m, 1))\n', (440, 455), True, 'import numpy as np\n'), ((898, 958), 'numpy.concatenate', 'np.concatenate', (['[x[-n // 2:] - n * d, x, x[:n // 2] + n * d]'], {}), '([x[-n // 2:] - n * d, x, x[:n // 2] + n * d])\n', (912, 958), True, 'import numpy as np\n'), ((1059, 1092), 'numpy.interp', 'np.interp', (['x', '(x_ext * 2)', 'res[::2]'], {}), '(x, x_ext * 2, res[::2])\n', (1068, 1092), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
import numpy as np
import rospy
import time
from sensor_msgs.msg import LaserScan
from geometry_msgs.msg import Twist
from nav_msgs.msg import Odometry
from geometry_msgs.msg import PoseWithCovarianceStamped, PointStamped
from tutoriales_basicos.msg import Histogram
from tf.transformations import euler_from_quaternion, quaternion_from_euler
from std_msgs.msg import Int16
from std_msgs.msg import Float32
BURGER_MAX_LIN_VEL = 0.22*.7
BURGER_MAX_ANG_VEL = 2.84
#import matplotlib.pyplot as plt
def min(c1,c2):
s = 4
return(np.amin([(c1 - c2),(c1 - c2 - s),(c1 - c2 + s)]))
class LaserSub:
def __init__(self):
self.sub_l_0 = rospy.Subscriber("/tb3_0/scan", LaserScan, self.scan_callback, queue_size=1)
self.pub_H = rospy.Publisher("/tb3_0/Histogram", Histogram, queue_size=10)
self.r = 0.3
self.s = 0.3
self.alfa = 4 #tamano del sector 4 grados.
self.a = 5
self.b = 1
self.H = np.zeros(90)
self.Hp = list()
#def steeringCallback(self,data):
# if self.Hp[int(data.steering/4)] < 1:
# twist = Twist()
# twist.linear.x = BURGER_MAX_LIN_VEL; twist.linear.y = 0.0; twist.linear.z = 0.0
# #print(twist.linear.x)
# twist.angular.x = 0.0; twist.angular.y = 0.0; twist.angular.z = data.steering*Kp
# self.pub.publish(twist)
# else:
# for k in range(90):
# if self.Hp[k] < 1:
# if k == 0:
# gmin = 5*min(k,int(data.steering/4)) + 2*min(k,int(data.yaw/4))
# gpast = gmin
# orientation = k
# else:
# gmin = 5*min(k,int(data.steering/4)) + 2*min(k,int(data.yaw/4))
# if gmin < gpast:
# gpast = gmin
# orientation = k
def scan_callback(self,data):
# Guardar datos en histograma
#print(data.ranges)
self.H = np.zeros(90)#Crear vector de 90 elementos
size = np.size(data.ranges) #Obtiene el tamano de los datos (360)
for beta in range(size): #For hasta 360
#print(data.ranges[beta])
if data.ranges[beta] > 2: #Si la distancia es mayor a 2
d = 0 #d=0
#print(beta, d)
else:
d = data.ranges[beta] #Si no guarda la distancia
#print(beta, d)
k = int((beta)/self.alfa) # k_alfa es el sector actualmente en calculo
if beta<120 or (beta>240 and beta<360):
#if beta>(beta - np.arcsin((self.r + self.s)/d)) and beta<(beta + np.arcsin((self.r + self.s)/d)):
previus = self.H[k]
self.H[k]=(previus + (15*(self.a-self.b*d*d)))
msg_to_send = Histogram()
msg_to_send.Histogram = self.H
self.pub_H.publish(msg_to_send)
def main():
try:
rospy.init_node('LaseSub')
LaserSub() # constructor creates publishers / subscribers
rospy.spin()
except rospy.ROSInterruptException:
pass
if __name__=="__main__":
main()
|
[
"numpy.size",
"rospy.Subscriber",
"numpy.amin",
"tutoriales_basicos.msg.Histogram",
"numpy.zeros",
"rospy.Publisher",
"rospy.init_node",
"rospy.spin"
] |
[((558, 602), 'numpy.amin', 'np.amin', (['[c1 - c2, c1 - c2 - s, c1 - c2 + s]'], {}), '([c1 - c2, c1 - c2 - s, c1 - c2 + s])\n', (565, 602), True, 'import numpy as np\n'), ((673, 749), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/tb3_0/scan"""', 'LaserScan', 'self.scan_callback'], {'queue_size': '(1)'}), "('/tb3_0/scan', LaserScan, self.scan_callback, queue_size=1)\n", (689, 749), False, 'import rospy\n'), ((771, 832), 'rospy.Publisher', 'rospy.Publisher', (['"""/tb3_0/Histogram"""', 'Histogram'], {'queue_size': '(10)'}), "('/tb3_0/Histogram', Histogram, queue_size=10)\n", (786, 832), False, 'import rospy\n'), ((981, 993), 'numpy.zeros', 'np.zeros', (['(90)'], {}), '(90)\n', (989, 993), True, 'import numpy as np\n'), ((2040, 2052), 'numpy.zeros', 'np.zeros', (['(90)'], {}), '(90)\n', (2048, 2052), True, 'import numpy as np\n'), ((2097, 2117), 'numpy.size', 'np.size', (['data.ranges'], {}), '(data.ranges)\n', (2104, 2117), True, 'import numpy as np\n'), ((2875, 2886), 'tutoriales_basicos.msg.Histogram', 'Histogram', ([], {}), '()\n', (2884, 2886), False, 'from tutoriales_basicos.msg import Histogram\n'), ((3000, 3026), 'rospy.init_node', 'rospy.init_node', (['"""LaseSub"""'], {}), "('LaseSub')\n", (3015, 3026), False, 'import rospy\n'), ((3102, 3114), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (3112, 3114), False, 'import rospy\n')]
|
"""Add abbreviations
Revision ID: 164efab664ba
Revises: <KEY>
Create Date: 2016-03-14 01:27:28.956379
"""
# revision identifiers, used by Alembic.
revision = '164efab664ba'
down_revision = '<KEY>'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('cde_job', sa.Column('abbreviations', postgresql.JSONB(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('cde_job', 'abbreviations')
### end Alembic commands ###
|
[
"alembic.op.drop_column",
"sqlalchemy.dialects.postgresql.JSONB"
] |
[((583, 625), 'alembic.op.drop_column', 'op.drop_column', (['"""cde_job"""', '"""abbreviations"""'], {}), "('cde_job', 'abbreviations')\n", (597, 625), False, 'from alembic import op\n'), ((427, 445), 'sqlalchemy.dialects.postgresql.JSONB', 'postgresql.JSONB', ([], {}), '()\n', (443, 445), False, 'from sqlalchemy.dialects import postgresql\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import logging
from types import FrameType
from typing import cast
from loguru import logger
from datetime import timedelta
class InterceptHandler(logging.Handler):
def emit(self, record: logging.LogRecord) -> None: # pragma: no cover
# Get corresponding Loguru level if it exists
try:
level = logger.level(record.levelname).name
except ValueError:
level = str(record.levelno)
# Find caller from where originated the logged message
frame, depth = logging.currentframe(), 2
while frame.f_code.co_filename == logging.__file__: # noqa: WPS609
frame = cast(FrameType, frame.f_back)
depth += 1
logger.opt(depth=depth, exception=record.exc_info).log(
level, record.getMessage(),
)
# logging configuration
LOGGING_LEVEL = logging.INFO
LOGGERS = ("uvicorn.asgi", "uvicorn.access")
logging.getLogger().handlers = [InterceptHandler()]
for logger_name in LOGGERS:
logging_logger = logging.getLogger(logger_name)
logging_logger.handlers = [InterceptHandler(level=LOGGING_LEVEL)]
# sink可自主配置
logger.configure(
handlers=[
{"sink": sys.stdout, "level": LOGGING_LEVEL},
{"sink": './log/runtime.log', "level": LOGGING_LEVEL, "rotation": timedelta(hours=1)}]
)
|
[
"loguru.logger.opt",
"loguru.logger.level",
"typing.cast",
"datetime.timedelta",
"logging.currentframe",
"logging.getLogger"
] |
[((968, 987), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (985, 987), False, 'import logging\n'), ((1069, 1099), 'logging.getLogger', 'logging.getLogger', (['logger_name'], {}), '(logger_name)\n', (1086, 1099), False, 'import logging\n'), ((577, 599), 'logging.currentframe', 'logging.currentframe', ([], {}), '()\n', (597, 599), False, 'import logging\n'), ((699, 728), 'typing.cast', 'cast', (['FrameType', 'frame.f_back'], {}), '(FrameType, frame.f_back)\n', (703, 728), False, 'from typing import cast\n'), ((387, 417), 'loguru.logger.level', 'logger.level', (['record.levelname'], {}), '(record.levelname)\n', (399, 417), False, 'from loguru import logger\n'), ((761, 811), 'loguru.logger.opt', 'logger.opt', ([], {'depth': 'depth', 'exception': 'record.exc_info'}), '(depth=depth, exception=record.exc_info)\n', (771, 811), False, 'from loguru import logger\n'), ((1344, 1362), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (1353, 1362), False, 'from datetime import timedelta\n')]
|
from Cryptodome.Random import get_random_bytes
from Cryptodome.Cipher import AES
from Cryptodome.Hash import SHA256
from Cryptodome.Signature import PKCS1_v1_5
import rsa
FILES_DIR = 'files/'
STANDARD_FILE_PATH = f'{FILES_DIR}standard_file.txt'
ENCRYPTED_FILE_PATH = f'{FILES_DIR}encrypted_file.txt'
DECRYPTED_FILE_PATH = f'{FILES_DIR}decrypted_file.txt'
ENCRYPTED_SIGNATURE_PATH = f'{FILES_DIR}signature.txt'
SIGNATURES_DIR = 'signatures/'
def generate_session_key(size=16):
"""
Генерирует сессионный ключ
:param size: размер ключа
:return: сессионный ключ
"""
return get_random_bytes(size)
def encrypt_file(session_key, in_path=STANDARD_FILE_PATH, out_path=ENCRYPTED_FILE_PATH):
"""
Шифрует и сохраняет файл симметричным алгоритмом AES
:param session_key: сессионный ключ симметричного алгоритма
:param in_path: путь к шифруемому файлу
:param out_path: путь к зашифрованному файлу
:return: зашифрованное содержимое файла
"""
with open(in_path, 'rb') as input_file:
data = input_file.read()
with open(out_path, 'wb') as output_file:
cipher_aes = AES.new(session_key, AES.MODE_EAX)
output_file.write(cipher_aes.nonce)
encrypted_data = cipher_aes.encrypt(data)
output_file.write(encrypted_data)
return encrypted_data
def decrypt_file(session_key, in_path=ENCRYPTED_FILE_PATH, out_path=DECRYPTED_FILE_PATH):
"""
Расшифровывает и сохраняет файл симметричным алгоритмом AES
:param session_key: сессионный ключ симметричного алгоритма
:param in_path: путь к зашифрованному файлу
:param out_path: путь к расшифрованному файлу
:return: расшифрованное содержимое файла
"""
with open(in_path, 'rb') as input_file:
nonce, data = [input_file.read(size) for size in (16, -1)]
with open(out_path, 'wb') as output_file:
cipher_aes = AES.new(session_key, AES.MODE_EAX, nonce)
decrypted_data = cipher_aes.decrypt(data)
output_file.write(decrypted_data)
return decrypted_data
def sign_file_and_encrypt(private_key, public_key, in_path=STANDARD_FILE_PATH, out_path=ENCRYPTED_SIGNATURE_PATH):
"""
Подписывает файл и сохраняет зашифрованную подпись в файл
:param private_key: приватный ключ подписывающего
:param public_key: публичный ключ проверяющего
:param in_path: путь к подписываемому файлу
:param out_path: путь к зашифрованной подписи
:return: None
"""
signature = PKCS1_v1_5.new(private_key)
with open(in_path, 'rb') as input_file:
file_hash = SHA256.new(input_file.read())
signature = signature.sign(file_hash)
rsa.encrypt(signature, public_key, out_path)
return signature
def sign_file(private_key, in_path=STANDARD_FILE_PATH, out_path=ENCRYPTED_SIGNATURE_PATH):
"""
Подписывает файл и сохраняет подпись в файл
:param private_key: приватный ключ подписывающего
:param in_path: путь к подписываемому файлу
:param out_path: путь к зашифрованной подписи
:return: None
"""
signature = PKCS1_v1_5.new(private_key)
with open(in_path, 'rb') as input_file:
file_hash = SHA256.new(input_file.read())
signature = signature.sign(file_hash)
with open(out_path, 'wb') as output_file:
output_file.write(signature)
return signature
def verify_sign_and_decrypt(public_key, private_key, file_path=DECRYPTED_FILE_PATH, signature_path=ENCRYPTED_SIGNATURE_PATH):
"""
Расшифровывает и проверяет подпись для файла
:param public_key: публичный ключ подписывающего
:param private_key: приватный ключ проверяющего
:param file_path: путь к расшифрованному файлу
:param signature_path: путь к зашифрованной подписи
:return: True - если подпись верна, False - иначе
"""
with open(file_path, 'rb') as input_file:
file_hash = SHA256.new(input_file.read())
decrypted_signature = rsa.decrypt(private_key, signature_path)
signature = PKCS1_v1_5.new(public_key)
return signature.verify(file_hash, decrypted_signature)
def verify_sign(public_key, file_path=DECRYPTED_FILE_PATH, signature_path=ENCRYPTED_SIGNATURE_PATH):
"""
Проверяет подпись для файла
:param public_key: публичный ключ подписывающего
:param file_path: путь к расшифрованному файлу
:param signature_path: путь к зашифрованной подписи
:return: True - если подпись верна, False - иначе
"""
with open(file_path, 'rb') as input_file:
file_hash = SHA256.new(input_file.read())
signature = PKCS1_v1_5.new(public_key)
with open(signature_path, 'rb') as input_file:
sign = input_file.read()
return signature.verify(file_hash, sign)
|
[
"Cryptodome.Cipher.AES.new",
"Cryptodome.Signature.PKCS1_v1_5.new",
"Cryptodome.Random.get_random_bytes",
"rsa.decrypt",
"rsa.encrypt"
] |
[((597, 619), 'Cryptodome.Random.get_random_bytes', 'get_random_bytes', (['size'], {}), '(size)\n', (613, 619), False, 'from Cryptodome.Random import get_random_bytes\n'), ((2527, 2554), 'Cryptodome.Signature.PKCS1_v1_5.new', 'PKCS1_v1_5.new', (['private_key'], {}), '(private_key)\n', (2541, 2554), False, 'from Cryptodome.Signature import PKCS1_v1_5\n'), ((2695, 2739), 'rsa.encrypt', 'rsa.encrypt', (['signature', 'public_key', 'out_path'], {}), '(signature, public_key, out_path)\n', (2706, 2739), False, 'import rsa\n'), ((3104, 3131), 'Cryptodome.Signature.PKCS1_v1_5.new', 'PKCS1_v1_5.new', (['private_key'], {}), '(private_key)\n', (3118, 3131), False, 'from Cryptodome.Signature import PKCS1_v1_5\n'), ((3953, 3993), 'rsa.decrypt', 'rsa.decrypt', (['private_key', 'signature_path'], {}), '(private_key, signature_path)\n', (3964, 3993), False, 'import rsa\n'), ((4010, 4036), 'Cryptodome.Signature.PKCS1_v1_5.new', 'PKCS1_v1_5.new', (['public_key'], {}), '(public_key)\n', (4024, 4036), False, 'from Cryptodome.Signature import PKCS1_v1_5\n'), ((4575, 4601), 'Cryptodome.Signature.PKCS1_v1_5.new', 'PKCS1_v1_5.new', (['public_key'], {}), '(public_key)\n', (4589, 4601), False, 'from Cryptodome.Signature import PKCS1_v1_5\n'), ((1137, 1171), 'Cryptodome.Cipher.AES.new', 'AES.new', (['session_key', 'AES.MODE_EAX'], {}), '(session_key, AES.MODE_EAX)\n', (1144, 1171), False, 'from Cryptodome.Cipher import AES\n'), ((1919, 1960), 'Cryptodome.Cipher.AES.new', 'AES.new', (['session_key', 'AES.MODE_EAX', 'nonce'], {}), '(session_key, AES.MODE_EAX, nonce)\n', (1926, 1960), False, 'from Cryptodome.Cipher import AES\n')]
|
# coding: utf-8
"""
metal-api
API to manage and control plane resources like machines, switches, operating system images, machine sizes, networks, IP addresses and more # noqa: E501
OpenAPI spec version: v0.15.7
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from metal_python.api_client import ApiClient
class MachineApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def abort_reinstall_machine(self, id, body, **kwargs): # noqa: E501
"""abort reinstall this machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.abort_reinstall_machine(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineAbortReinstallRequest body: (required)
:return: V1BootInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.abort_reinstall_machine_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.abort_reinstall_machine_with_http_info(id, body, **kwargs) # noqa: E501
return data
def abort_reinstall_machine_with_http_info(self, id, body, **kwargs): # noqa: E501
"""abort reinstall this machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.abort_reinstall_machine_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineAbortReinstallRequest body: (required)
:return: V1BootInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method abort_reinstall_machine" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `abort_reinstall_machine`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `abort_reinstall_machine`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/abort-reinstall', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1BootInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_provisioning_event(self, id, body, **kwargs): # noqa: E501
"""adds a machine provisioning event # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_provisioning_event(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineProvisioningEvent body: (required)
:return: V1MachineRecentProvisioningEvents
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_provisioning_event_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.add_provisioning_event_with_http_info(id, body, **kwargs) # noqa: E501
return data
def add_provisioning_event_with_http_info(self, id, body, **kwargs): # noqa: E501
"""adds a machine provisioning event # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_provisioning_event_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineProvisioningEvent body: (required)
:return: V1MachineRecentProvisioningEvents
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_provisioning_event" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `add_provisioning_event`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `add_provisioning_event`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/event', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineRecentProvisioningEvents', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def allocate_machine(self, body, **kwargs): # noqa: E501
"""allocate a machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.allocate_machine(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineAllocateRequest body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.allocate_machine_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.allocate_machine_with_http_info(body, **kwargs) # noqa: E501
return data
def allocate_machine_with_http_info(self, body, **kwargs): # noqa: E501
"""allocate a machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.allocate_machine_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineAllocateRequest body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method allocate_machine" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `allocate_machine`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/allocate', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def chassis_identify_led_off(self, id, body, **kwargs): # noqa: E501
"""sends a power-off to the chassis identify LED # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.chassis_identify_led_off(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:param str description: reason why the chassis identify LED has been turned off
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.chassis_identify_led_off_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.chassis_identify_led_off_with_http_info(id, body, **kwargs) # noqa: E501
return data
def chassis_identify_led_off_with_http_info(self, id, body, **kwargs): # noqa: E501
"""sends a power-off to the chassis identify LED # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.chassis_identify_led_off_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:param str description: reason why the chassis identify LED has been turned off
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body', 'description'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method chassis_identify_led_off" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `chassis_identify_led_off`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `chassis_identify_led_off`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'description' in params:
query_params.append(('description', params['description'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/power/chassis-identify-led-off', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def chassis_identify_led_on(self, id, body, **kwargs): # noqa: E501
"""sends a power-on to the chassis identify LED # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.chassis_identify_led_on(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:param str description: identifier of the machine
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.chassis_identify_led_on_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.chassis_identify_led_on_with_http_info(id, body, **kwargs) # noqa: E501
return data
def chassis_identify_led_on_with_http_info(self, id, body, **kwargs): # noqa: E501
"""sends a power-on to the chassis identify LED # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.chassis_identify_led_on_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:param str description: identifier of the machine
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body', 'description'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method chassis_identify_led_on" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `chassis_identify_led_on`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `chassis_identify_led_on`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'description' in params:
query_params.append(('description', params['description'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/power/chassis-identify-led-on', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_machine(self, id, **kwargs): # noqa: E501
"""deletes a machine from the database # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_machine(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_machine_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_machine_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_machine_with_http_info(self, id, **kwargs): # noqa: E501
"""deletes a machine from the database # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_machine_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_machine" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_machine`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def finalize_allocation(self, id, body, **kwargs): # noqa: E501
"""finalize the allocation of the machine by reconfiguring the switch, sent on successful image installation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.finalize_allocation(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineFinalizeAllocationRequest body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.finalize_allocation_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.finalize_allocation_with_http_info(id, body, **kwargs) # noqa: E501
return data
def finalize_allocation_with_http_info(self, id, body, **kwargs): # noqa: E501
"""finalize the allocation of the machine by reconfiguring the switch, sent on successful image installation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.finalize_allocation_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineFinalizeAllocationRequest body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method finalize_allocation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `finalize_allocation`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `finalize_allocation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/finalize-allocation', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def find_ipmi_machine(self, id, **kwargs): # noqa: E501
"""returns a machine including the ipmi connection data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_ipmi_machine(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:return: V1MachineIPMIResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.find_ipmi_machine_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.find_ipmi_machine_with_http_info(id, **kwargs) # noqa: E501
return data
def find_ipmi_machine_with_http_info(self, id, **kwargs): # noqa: E501
"""returns a machine including the ipmi connection data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_ipmi_machine_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:return: V1MachineIPMIResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_ipmi_machine" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `find_ipmi_machine`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/ipmi', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineIPMIResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def find_ipmi_machines(self, body, **kwargs): # noqa: E501
"""returns machines including the ipmi connection data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_ipmi_machines(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineFindRequest body: (required)
:return: list[V1MachineIPMIResponse]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.find_ipmi_machines_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.find_ipmi_machines_with_http_info(body, **kwargs) # noqa: E501
return data
def find_ipmi_machines_with_http_info(self, body, **kwargs): # noqa: E501
"""returns machines including the ipmi connection data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_ipmi_machines_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineFindRequest body: (required)
:return: list[V1MachineIPMIResponse]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_ipmi_machines" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `find_ipmi_machines`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/ipmi/find', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[V1MachineIPMIResponse]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def find_machine(self, id, **kwargs): # noqa: E501
"""get machine by id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_machine(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.find_machine_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.find_machine_with_http_info(id, **kwargs) # noqa: E501
return data
def find_machine_with_http_info(self, id, **kwargs): # noqa: E501
"""get machine by id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_machine_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_machine" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `find_machine`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def find_machines(self, body, **kwargs): # noqa: E501
"""find machines by multiple criteria # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_machines(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineFindRequest body: (required)
:return: list[V1MachineResponse]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.find_machines_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.find_machines_with_http_info(body, **kwargs) # noqa: E501
return data
def find_machines_with_http_info(self, body, **kwargs): # noqa: E501
"""find machines by multiple criteria # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_machines_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineFindRequest body: (required)
:return: list[V1MachineResponse]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_machines" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `find_machines`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/find', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[V1MachineResponse]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def free_machine(self, id, **kwargs): # noqa: E501
"""free a machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.free_machine(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.free_machine_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.free_machine_with_http_info(id, **kwargs) # noqa: E501
return data
def free_machine_with_http_info(self, id, **kwargs): # noqa: E501
"""free a machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.free_machine_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method free_machine" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `free_machine`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/free', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_machine_console_password(self, body, **kwargs): # noqa: E501
"""get consolepassword for machine by id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_machine_console_password(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineConsolePasswordRequest body: (required)
:return: V1MachineConsolePasswordResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_machine_console_password_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.get_machine_console_password_with_http_info(body, **kwargs) # noqa: E501
return data
def get_machine_console_password_with_http_info(self, body, **kwargs): # noqa: E501
"""get consolepassword for machine by id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_machine_console_password_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineConsolePasswordRequest body: (required)
:return: V1MachineConsolePasswordResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_machine_console_password" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `get_machine_console_password`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/consolepassword', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineConsolePasswordResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_provisioning_event_container(self, id, **kwargs): # noqa: E501
"""get the current machine provisioning event container # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_provisioning_event_container(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:return: V1MachineRecentProvisioningEvents
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_provisioning_event_container_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_provisioning_event_container_with_http_info(id, **kwargs) # noqa: E501
return data
def get_provisioning_event_container_with_http_info(self, id, **kwargs): # noqa: E501
"""get the current machine provisioning event container # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_provisioning_event_container_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:return: V1MachineRecentProvisioningEvents
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_provisioning_event_container" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_provisioning_event_container`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/event', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineRecentProvisioningEvents', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def ipmi_report(self, body, **kwargs): # noqa: E501
"""reports IPMI ip addresses leased by a management server for machines # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.ipmi_report(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineIpmiReports body: (required)
:return: V1MachineIpmiReportResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.ipmi_report_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.ipmi_report_with_http_info(body, **kwargs) # noqa: E501
return data
def ipmi_report_with_http_info(self, body, **kwargs): # noqa: E501
"""reports IPMI ip addresses leased by a management server for machines # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.ipmi_report_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineIpmiReports body: (required)
:return: V1MachineIpmiReportResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method ipmi_report" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `ipmi_report`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/ipmi', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineIpmiReportResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_machines(self, **kwargs): # noqa: E501
"""get all known machines # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_machines(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[V1MachineResponse]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_machines_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_machines_with_http_info(**kwargs) # noqa: E501
return data
def list_machines_with_http_info(self, **kwargs): # noqa: E501
"""get all known machines # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_machines_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[V1MachineResponse]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_machines" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[V1MachineResponse]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def machine_bios(self, id, body, **kwargs): # noqa: E501
"""boots machine into BIOS # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_bios(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.machine_bios_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.machine_bios_with_http_info(id, body, **kwargs) # noqa: E501
return data
def machine_bios_with_http_info(self, id, body, **kwargs): # noqa: E501
"""boots machine into BIOS # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_bios_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method machine_bios" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `machine_bios`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `machine_bios`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/power/bios', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def machine_cycle(self, id, body, **kwargs): # noqa: E501
"""sends a power cycle to the machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_cycle(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.machine_cycle_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.machine_cycle_with_http_info(id, body, **kwargs) # noqa: E501
return data
def machine_cycle_with_http_info(self, id, body, **kwargs): # noqa: E501
"""sends a power cycle to the machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_cycle_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method machine_cycle" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `machine_cycle`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `machine_cycle`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/power/cycle', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def machine_disk(self, id, body, **kwargs): # noqa: E501
"""boots machine from disk # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_disk(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.machine_disk_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.machine_disk_with_http_info(id, body, **kwargs) # noqa: E501
return data
def machine_disk_with_http_info(self, id, body, **kwargs): # noqa: E501
"""boots machine from disk # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_disk_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method machine_disk" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `machine_disk`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `machine_disk`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/power/disk', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def machine_off(self, id, body, **kwargs): # noqa: E501
"""sends a power-off to the machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_off(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.machine_off_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.machine_off_with_http_info(id, body, **kwargs) # noqa: E501
return data
def machine_off_with_http_info(self, id, body, **kwargs): # noqa: E501
"""sends a power-off to the machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_off_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method machine_off" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `machine_off`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `machine_off`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/power/off', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def machine_on(self, id, body, **kwargs): # noqa: E501
"""sends a power-on to the machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_on(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.machine_on_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.machine_on_with_http_info(id, body, **kwargs) # noqa: E501
return data
def machine_on_with_http_info(self, id, body, **kwargs): # noqa: E501
"""sends a power-on to the machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_on_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method machine_on" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `machine_on`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `machine_on`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/power/on', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def machine_pxe(self, id, body, **kwargs): # noqa: E501
"""boots machine from PXE # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_pxe(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.machine_pxe_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.machine_pxe_with_http_info(id, body, **kwargs) # noqa: E501
return data
def machine_pxe_with_http_info(self, id, body, **kwargs): # noqa: E501
"""boots machine from PXE # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_pxe_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method machine_pxe" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `machine_pxe`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `machine_pxe`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/power/pxe', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def machine_reset(self, id, body, **kwargs): # noqa: E501
"""sends a reset to the machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_reset(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.machine_reset_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.machine_reset_with_http_info(id, body, **kwargs) # noqa: E501
return data
def machine_reset_with_http_info(self, id, body, **kwargs): # noqa: E501
"""sends a reset to the machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.machine_reset_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1EmptyBody body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method machine_reset" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `machine_reset`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `machine_reset`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/power/reset', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def register_machine(self, body, **kwargs): # noqa: E501
"""register a machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.register_machine(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineRegisterRequest body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.register_machine_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.register_machine_with_http_info(body, **kwargs) # noqa: E501
return data
def register_machine_with_http_info(self, body, **kwargs): # noqa: E501
"""register a machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.register_machine_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param V1MachineRegisterRequest body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method register_machine" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `register_machine`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/register', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def reinstall_machine(self, id, body, **kwargs): # noqa: E501
"""reinstall this machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reinstall_machine(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineReinstallRequest body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.reinstall_machine_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.reinstall_machine_with_http_info(id, body, **kwargs) # noqa: E501
return data
def reinstall_machine_with_http_info(self, id, body, **kwargs): # noqa: E501
"""reinstall this machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reinstall_machine_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineReinstallRequest body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method reinstall_machine" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `reinstall_machine`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `reinstall_machine`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/reinstall', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_chassis_identify_led_state(self, id, body, **kwargs): # noqa: E501
"""set the state of a chassis identify LED # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_chassis_identify_led_state(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1ChassisIdentifyLEDState body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_chassis_identify_led_state_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.set_chassis_identify_led_state_with_http_info(id, body, **kwargs) # noqa: E501
return data
def set_chassis_identify_led_state_with_http_info(self, id, body, **kwargs): # noqa: E501
"""set the state of a chassis identify LED # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_chassis_identify_led_state_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1ChassisIdentifyLEDState body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_chassis_identify_led_state" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `set_chassis_identify_led_state`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `set_chassis_identify_led_state`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/chassis-identify-led-state', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_machine_state(self, id, body, **kwargs): # noqa: E501
"""set the state of a machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_machine_state(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineState body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_machine_state_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.set_machine_state_with_http_info(id, body, **kwargs) # noqa: E501
return data
def set_machine_state_with_http_info(self, id, body, **kwargs): # noqa: E501
"""set the state of a machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_machine_state_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineState body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_machine_state" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `set_machine_state`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `set_machine_state`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/{id}/state', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_firmware(self, id, body, **kwargs): # noqa: E501
"""sends a firmware command to the machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_firmware(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineUpdateFirmwareRequest body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_firmware_with_http_info(id, body, **kwargs) # noqa: E501
else:
(data) = self.update_firmware_with_http_info(id, body, **kwargs) # noqa: E501
return data
def update_firmware_with_http_info(self, id, body, **kwargs): # noqa: E501
"""sends a firmware command to the machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_firmware_with_http_info(id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: identifier of the machine (required)
:param V1MachineUpdateFirmwareRequest body: (required)
:return: V1MachineResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_firmware" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_firmware`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_firmware`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['HMAC', 'jwt'] # noqa: E501
return self.api_client.call_api(
'/v1/machine/update-firmware/{id}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1MachineResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
[
"six.iteritems",
"metal_python.api_client.ApiClient"
] |
[((2811, 2842), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (2824, 2842), False, 'import six\n'), ((7084, 7115), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (7097, 7115), False, 'import six\n'), ((11115, 11146), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (11128, 11146), False, 'import six\n'), ((15202, 15233), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (15215, 15233), False, 'import six\n'), ((19719, 19750), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (19732, 19750), False, 'import six\n'), ((23897, 23928), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (23910, 23928), False, 'import six\n'), ((27931, 27962), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (27944, 27962), False, 'import six\n'), ((32025, 32056), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (32038, 32056), False, 'import six\n'), ((35799, 35830), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (35812, 35830), False, 'import six\n'), ((39450, 39481), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (39463, 39481), False, 'import six\n'), ((43133, 43164), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (43146, 43164), False, 'import six\n'), ((46759, 46790), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (46772, 46790), False, 'import six\n'), ((50586, 50617), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (50599, 50617), False, 'import six\n'), ((54489, 54520), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (54502, 54520), False, 'import six\n'), ((58298, 58329), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (58311, 58329), False, 'import six\n'), ((61810, 61841), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (61823, 61841), False, 'import six\n'), ((65266, 65297), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (65279, 65297), False, 'import six\n'), ((69394, 69425), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (69407, 69425), False, 'import six\n'), ((73498, 73529), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (73511, 73529), False, 'import six\n'), ((77610, 77641), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (77623, 77641), False, 'import six\n'), ((81710, 81741), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (81723, 81741), False, 'import six\n'), ((85794, 85825), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (85807, 85825), False, 'import six\n'), ((89906, 89937), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (89919, 89937), False, 'import six\n'), ((93900, 93931), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (93913, 93931), False, 'import six\n'), ((97736, 97767), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (97749, 97767), False, 'import six\n'), ((102018, 102049), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (102031, 102049), False, 'import six\n'), ((106230, 106261), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (106243, 106261), False, 'import six\n'), ((110428, 110459), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (110441, 110459), False, 'import six\n'), ((785, 796), 'metal_python.api_client.ApiClient', 'ApiClient', ([], {}), '()\n', (794, 796), False, 'from metal_python.api_client import ApiClient\n')]
|
# Lint as: python3
# coding=utf-8
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Prepare data for further process.
Read data from "/slope", "/ring", "/wing", "/negative" and save them
in "/data/complete_data" in python dict format.
It will generate a new file with the following structure:
├── data
│ └── complete_data
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import json
import os
import random
LABEL_NAME = "gesture"
DATA_NAME = "accel_ms2_xyz"
folders = ["A","B","N","O","backspace","space","done"]
# folders = ["A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W","X","Y","Z",
# "apostrophe","backspace","comma","done","exclamation_point", "period","question_mark","quotes","slash","space"]
names = [
"lauren","katherine","annie", "chris","hallie"
]
def prepare_original_data(folder, name, data, file_to_read): # pylint: disable=redefined-outer-name
"""Read collected data from files."""
if folder != "negative":
with open(file_to_read, "r") as f:
lines = csv.reader(f)
data_new = {}
data_new[LABEL_NAME] = folder
data_new[DATA_NAME] = []
data_new["name"] = name
for idx, line in enumerate(lines): # pylint: disable=unused-variable,redefined-outer-name
if len(line) == 3:
if line[2] == "-":
data.append(data_new)
data_new = {}
data_new[LABEL_NAME] = folder
data_new[DATA_NAME] = [] # this should store (300,3) data recording
data_new["name"] = name
elif line[2] != "-":
data_new[DATA_NAME].append([float(line[i]) if i < 2 else (float(line[i]) -.98) for i in range(3)])
# data_new[DATA_NAME].append([float(i) for i in line[0:3]])
data.append(data_new)
else:
with open(file_to_read, "r") as f:
lines = csv.reader(f)
data_new = {}
data_new[LABEL_NAME] = folder
data_new[DATA_NAME] = []
data_new["name"] = name
for idx, line in enumerate(lines):
if len(line) == 3 and line[2] != "-":
if len(data_new[DATA_NAME]) == 300:
data.append(data_new)
data_new = {}
data_new[LABEL_NAME] = folder
data_new[DATA_NAME] = []
data_new["name"] = name
else:
data_new[DATA_NAME].append([float(i) for i in line[0:3]])
data.append(data_new)
def generate_negative_data(data): # pylint: disable=redefined-outer-name
"""Generate negative data labeled as 'negative6~8'."""
# Big movement -> around straight line
samp = 300
for i in range(100):
if i > 80:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative8"}
elif i > 60:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative7"}
else:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative6"}
start_x = (random.random() - 0.5) * 2000
start_y = (random.random() - 0.5) * 2000
start_z = (random.random() - 0.5) * 2000
x_increase = (random.random() - 0.5) * 10
y_increase = (random.random() - 0.5) * 10
z_increase = (random.random() - 0.5) * 10
for j in range(samp):
dic[DATA_NAME].append([
start_x + j * x_increase + (random.random() - 0.5) * 6,
start_y + j * y_increase + (random.random() - 0.5) * 6,
start_z + j * z_increase + (random.random() - 0.5) * 6
])
data.append(dic)
# Random
for i in range(100):
if i > 80:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative8"}
elif i > 60:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative7"}
else:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative6"}
for j in range(samp):
dic[DATA_NAME].append([(random.random() - 0.5) * 1000,
(random.random() - 0.5) * 1000,
(random.random() - 0.5) * 1000])
data.append(dic)
# Stay still
for i in range(100):
if i > 80:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative8"}
elif i > 60:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative7"}
else:
dic = {DATA_NAME: [], LABEL_NAME: "negative", "name": "negative6"}
start_x = (random.random() - 0.5) * 2000
start_y = (random.random() - 0.5) * 2000
start_z = (random.random() - 0.5) * 2000
for j in range(samp):
dic[DATA_NAME].append([
start_x + (random.random() - 0.5) * 40,
start_y + (random.random() - 0.5) * 40,
start_z + (random.random() - 0.5) * 40
])
data.append(dic)
# Write data to file
def write_data(data_to_write, path):
with open(path, "w") as f:
for idx, item in enumerate(data_to_write): # pylint: disable=unused-variable,redefined-outer-name
dic = json.dumps(item, ensure_ascii=False)
f.write(dic)
f.write("\n")
if __name__ == "__main__":
data = [] # pylint: disable=redefined-outer-name
for idx1, folder in enumerate(folders):
for idx2, name in enumerate(names):
path = "./data/"+folder+"/output_"+folder+"_"+ name + ".txt"
print(path)
prepare_original_data(folder, name, data, path)
# for idx in range(3): ##############THIS IS HOW MANY NEG FILES WE HAVE##############################
# prepare_original_data("negative", "negative_"+name , data, #% (idx + 1)
# "./data/negative/output_negative_"+name+".txt")# % (idx + 1)) #% (idx + 1) #"C:/Users/kathe/Documents/GitHub/tensorflow/tensorflow/lite/micro/examples/magic_wand/train
# # generate_negative_data(data)
print("data_length: " + str(len(data)))
if not os.path.exists("./data"):
os.makedirs("./data")
write_data(data, "./data/complete_data")
|
[
"csv.reader",
"os.makedirs",
"os.path.exists",
"json.dumps",
"random.random"
] |
[((6397, 6421), 'os.path.exists', 'os.path.exists', (['"""./data"""'], {}), "('./data')\n", (6411, 6421), False, 'import os\n'), ((6427, 6448), 'os.makedirs', 'os.makedirs', (['"""./data"""'], {}), "('./data')\n", (6438, 6448), False, 'import os\n'), ((1768, 1781), 'csv.reader', 'csv.reader', (['f'], {}), '(f)\n', (1778, 1781), False, 'import csv\n'), ((2575, 2588), 'csv.reader', 'csv.reader', (['f'], {}), '(f)\n', (2585, 2588), False, 'import csv\n'), ((5554, 5590), 'json.dumps', 'json.dumps', (['item'], {'ensure_ascii': '(False)'}), '(item, ensure_ascii=False)\n', (5564, 5590), False, 'import json\n'), ((3614, 3629), 'random.random', 'random.random', ([], {}), '()\n', (3627, 3629), False, 'import random\n'), ((3659, 3674), 'random.random', 'random.random', ([], {}), '()\n', (3672, 3674), False, 'import random\n'), ((3704, 3719), 'random.random', 'random.random', ([], {}), '()\n', (3717, 3719), False, 'import random\n'), ((3752, 3767), 'random.random', 'random.random', ([], {}), '()\n', (3765, 3767), False, 'import random\n'), ((3798, 3813), 'random.random', 'random.random', ([], {}), '()\n', (3811, 3813), False, 'import random\n'), ((3844, 3859), 'random.random', 'random.random', ([], {}), '()\n', (3857, 3859), False, 'import random\n'), ((4995, 5010), 'random.random', 'random.random', ([], {}), '()\n', (5008, 5010), False, 'import random\n'), ((5040, 5055), 'random.random', 'random.random', ([], {}), '()\n', (5053, 5055), False, 'import random\n'), ((5085, 5100), 'random.random', 'random.random', ([], {}), '()\n', (5098, 5100), False, 'import random\n'), ((4506, 4521), 'random.random', 'random.random', ([], {}), '()\n', (4519, 4521), False, 'import random\n'), ((4567, 4582), 'random.random', 'random.random', ([], {}), '()\n', (4580, 4582), False, 'import random\n'), ((4628, 4643), 'random.random', 'random.random', ([], {}), '()\n', (4641, 4643), False, 'import random\n'), ((3966, 3981), 'random.random', 'random.random', ([], {}), '()\n', (3979, 3981), False, 'import random\n'), ((4032, 4047), 'random.random', 'random.random', ([], {}), '()\n', (4045, 4047), False, 'import random\n'), ((4098, 4113), 'random.random', 'random.random', ([], {}), '()\n', (4111, 4113), False, 'import random\n'), ((5192, 5207), 'random.random', 'random.random', ([], {}), '()\n', (5205, 5207), False, 'import random\n'), ((5242, 5257), 'random.random', 'random.random', ([], {}), '()\n', (5255, 5257), False, 'import random\n'), ((5292, 5307), 'random.random', 'random.random', ([], {}), '()\n', (5305, 5307), False, 'import random\n')]
|
def interpret(mem):
pc = 0
reg = [0]*10
ins_commited = 0
halted = False
while not halted:
ir = mem[pc]
opcode = ir[0]
op1 = int(ir[1])
op2 = int(ir[2])
ins_commited += 1
pc = ( pc + 1 ) % 1000
if opcode == "1": # Halt
halted = True
elif opcode == "2": # Load imm
reg[op1] = op2
elif opcode == "3": # Add I
reg[op1] += op2
reg[op1] %= 1000
elif opcode == "4": # Mul I
reg[op1] *= op2
reg[op1] %= 1000
elif opcode == "5": # Move R
reg[op1] = reg[op2]
elif opcode == "6": # Add R
reg[op1] += reg[op2]
reg[op1] %= 1000
elif opcode == "7": # Mul r
reg[op1] *= reg[op2]
reg[op1] %= 1000
elif opcode == "8": # Load
reg[op1] = int(mem[reg[op2]])
elif opcode == "9": # Store
v = str(reg[op1]).zfill(3)
mem[reg[op2]] = v
elif opcode == "0": # Jump
if reg[op2] != 0:
pc = reg[op1]
else:
raise ValueError("unexpected instrution")
return ins_commited
num_cases = int(input())
input()
from sys import stdin, stdout
for c in range(num_cases):
mem = ["000"] * 1001
line = stdin.readline().strip()
i = 0
while line != "":
mem[i] = line
i += 1
line = stdin.readline().strip()
stdout.write("{}\n".format(interpret(mem)))
if c < num_cases - 1:
stdout.write("\n")
|
[
"sys.stdout.write",
"sys.stdin.readline"
] |
[((1350, 1368), 'sys.stdout.write', 'stdout.write', (['"""\n"""'], {}), "('\\n')\n", (1362, 1368), False, 'from sys import stdin, stdout\n'), ((1155, 1171), 'sys.stdin.readline', 'stdin.readline', ([], {}), '()\n', (1169, 1171), False, 'from sys import stdin, stdout\n'), ((1248, 1264), 'sys.stdin.readline', 'stdin.readline', ([], {}), '()\n', (1262, 1264), False, 'from sys import stdin, stdout\n')]
|
from django.contrib.auth.models import User
from userroles.models import set_user_role
from userroles import roles
# Read in some file with usersnames, emails, etc.
username = "tmp"
email = "<EMAIL>"
password = "<PASSWORD>"
user = User.objects.create_user(username, email, password)
# At this point, user is a User object that has already been saved
# to the database. You can continue to change its attributes
# if you want to change other fields.
# Now set user role
set_user_role(user, roles.question_editor) # roles.question_editor
# roles.assessment_editor
# roles.behavior_editor
|
[
"userroles.models.set_user_role",
"django.contrib.auth.models.User.objects.create_user"
] |
[((232, 283), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', (['username', 'email', 'password'], {}), '(username, email, password)\n', (256, 283), False, 'from django.contrib.auth.models import User\n'), ((472, 514), 'userroles.models.set_user_role', 'set_user_role', (['user', 'roles.question_editor'], {}), '(user, roles.question_editor)\n', (485, 514), False, 'from userroles.models import set_user_role\n')]
|
from django.http import HttpResponse
from django.shortcuts import render
import pandas_gbq
from google.oauth2 import service_account
import os
# Make sure you have installed pandas-gbq at first;
# You can use the other way to query BigQuery.
# please have a look at
# https://cloud.google.com/bigquery/docs/reference/libraries#client-libraries-install-nodejs
# To get your credential
app_dir = os.path.dirname(__file__)
service_path = os.path.join(app_dir,'static/eecs-e6895-edu-591e4a34fcda.json')
project_id = "eecs-e6895-edu"
credentials = service_account.Credentials \
.from_service_account_file(service_path)
def dashboard(request):
import json
pandas_gbq.context.credentials = credentials
pandas_gbq.context.project = project_id
dataset = 'usheatmap' # the name of dataset in BigQuery
# table = 'final' # the name of table in BigQuery
table = 'training_data' # the name of table in BigQuery
table_id = '{0}.{1}'.format(dataset,table)
# cols = ['ai', 'data','good','movie','spark'] # column names
data = {}
data['data'] = []
# query the table, return as pandas df.
# SQL = "SELECT * FROM `{}` ORDER BY date ASC LIMIT 8".format(table_id)
SQL = "SELECT * FROM `{}` ORDER BY date ASC".format(table_id)
df = pandas_gbq.read_gbq(SQL)
df.fillna(-1,inplace=True) # fill NaN/Na with -1
# iterate each row of the dataframe
def getGeoid(row):
statefp = row['state'].split("_")[0]
countyfp = row['county'].split("_")[0]
return "{}{}".format(statefp,countyfp)
# add the geoid (FIXME: this should have been included in original processing)
df['geoid'] = df.apply (lambda row: getGeoid(row), axis=1)
# j = (df.groupby(['date','geoid'], as_index=False)
# .apply(lambda x: x[['vci','tci','vhi','tasmin','tasmax','pr']].to_dict('r'))
# .reset_index()
# .rename(columns={0:'Test'})
# .to_json(orient='records'))
# pprint.pprint(j)
# sys.exit()
tmp = {}
for index, row in df.iterrows():
dt_date = row['date'].to_pydatetime().strftime('%Y-%m-%d')
if (dt_date != "2018-01-08"):
continue;
# statefp = row['state'].split("_")[0]
# countyfp = row['county'].split("_")[0]
# geoid = "{}{}".format(statefp,countyfp)
# geoid = SSCCC, SS = State FIPS, CCC = County FIPS
# BQ has state = SS_Name, county = CCC_Name
tmp = { 'date' : dt_date, \
'geoid': row['geoid'], \
'value' : { 'vci' : row['vci'], \
'tci':row['tci'], \
'vhi':row['vhi'], \
'tasmin':row['tasmin'], \
'tasmax':row['tasmax'], \
'pr':row['pr'] \
} \
}
data['data'].append(tmp)
# return render(request, 'map.html', {'results':data,'geojson':geojson_data})
return render(request, 'map.html', data)
# Notes:
# desired output, to organize by date
# 'date1': [
# {
# 'geoid': geoid1,
# 'values': {
# 'vci': vci,
# 'tci': tci,
# ...
# }
# },
# {
# 'geoid': geoid2,
# 'values': {
# 'vci': vci,
# 'tci': tci,
# ...
# }
# }, ...
# ]
#
# # 'date2': [
# {
# 'geoid': geoid1,
# 'values': {
# 'vci': vci,
# 'tci': tci,
# ...
# }
# },
# {
# 'geoid': geoid2,
# 'values': {
# 'vci': vci,
# 'tci': tci,
# ...
# }
# }, ...
# ]
# hello world page
def hello(request):
context = {}
context['content1'] = 'Hello World!'
return render(request, 'helloworld.html', context)
# deprecated
# def dashboard(request):
# pandas_gbq.context.credentials = credentials
# pandas_gbq.context.project = project_id
# dataset = 'usheatmap' # the name of dataset in BigQuery
# table = 'final' # the name of table in BigQuery
# table_id = '{0}.{1}'.format(dataset,table)
# # cols = ['ai', 'data','good','movie','spark'] # column names
# data = {}
# data['data'] = []
# # query the table, return as pandas df.
# SQL = "SELECT * FROM `{}` ORDER BY date ASC LIMIT 8".format(table_id)
# df = pandas_gbq.read_gbq(SQL)
# # iterate each row of the dataframe
# tmp = {}
# for index, row in df.iterrows():
# dt_date = row['date'].to_pydatetime().strftime('%Y-%m-%d')
# tmp = { 'date' : dt_date, \
# 'count' : { 'vci' : row['vci'], \
# 'tci':row['tci'], \
# 'vhi':row['vhi'], \
# 'tasmin':row['tasmin'], \
# 'tasmax':row['tasmax'], \
# 'pr':row['pr'] \
# } \
# }
# data['data'].append(tmp)
# return render(request, 'dashboard.html', data)
# def connection(request):
# pandas_gbq.context.credentials = credentials
# pandas_gbq.context.project = "Your-Project"
# SQL1 = ''
# df1 = pandas_gbq.read_gbq(SQL1)
# SQL2 = ''
# df2 = pandas_gbq.read_gbq(SQL2)
# data = {}
# '''
# TODO: Finish the SQL to query the data, it should be limited to 8 rows.
# Then process them to format below:
# Format of data:
# {'n': [xxx, xxx, xxx, xxx],
# 'e': [{'source': xxx, 'target': xxx},
# {'source': xxx, 'target': xxx},
# ...
# ]
# }
# '''
# return render(request, 'connection.html', data)
|
[
"google.oauth2.service_account.Credentials.from_service_account_file",
"os.path.dirname",
"django.shortcuts.render",
"pandas_gbq.read_gbq",
"os.path.join"
] |
[((395, 420), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (410, 420), False, 'import os\n'), ((436, 500), 'os.path.join', 'os.path.join', (['app_dir', '"""static/eecs-e6895-edu-591e4a34fcda.json"""'], {}), "(app_dir, 'static/eecs-e6895-edu-591e4a34fcda.json')\n", (448, 500), False, 'import os\n'), ((545, 612), 'google.oauth2.service_account.Credentials.from_service_account_file', 'service_account.Credentials.from_service_account_file', (['service_path'], {}), '(service_path)\n', (598, 612), False, 'from google.oauth2 import service_account\n'), ((1298, 1322), 'pandas_gbq.read_gbq', 'pandas_gbq.read_gbq', (['SQL'], {}), '(SQL)\n', (1317, 1322), False, 'import pandas_gbq\n'), ((3029, 3062), 'django.shortcuts.render', 'render', (['request', '"""map.html"""', 'data'], {}), "(request, 'map.html', data)\n", (3035, 3062), False, 'from django.shortcuts import render\n'), ((4292, 4335), 'django.shortcuts.render', 'render', (['request', '"""helloworld.html"""', 'context'], {}), "(request, 'helloworld.html', context)\n", (4298, 4335), False, 'from django.shortcuts import render\n')]
|
"""CFNgin init action."""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Any, Optional, Union, cast
from ...compat import cached_property
from ...config.models.cfngin import CfnginStackDefinitionModel
from ...core.providers.aws.s3 import Bucket
from ..exceptions import CfnginBucketAccessDenied
from . import deploy
from .base import BaseAction
if TYPE_CHECKING:
import threading
from ..._logging import RunwayLogger
from ...context import CfnginContext
from ..providers.aws.default import ProviderBuilder
LOGGER = cast("RunwayLogger", logging.getLogger(__name__))
class Action(BaseAction):
"""Initialize environment."""
NAME = "init"
DESCRIPTION = "Initialize environment"
def __init__(
self,
context: CfnginContext,
provider_builder: Optional[ProviderBuilder] = None,
cancel: Optional[threading.Event] = None,
):
"""Instantiate class.
This class creates a copy of the context object prior to initialization
as some of it can perform destructive actions on the context object.
Args:
context: The context for the current run.
provider_builder: An object that will build a provider that will be
interacted with in order to perform the necessary actions.
cancel: Cancel handler.
"""
super().__init__(
context=context.copy(), provider_builder=provider_builder, cancel=cancel
)
@property
def _stack_action(self) -> Any:
"""Run against a step."""
return None
@cached_property
def cfngin_bucket(self) -> Optional[Bucket]:
"""CFNgin bucket.
Raises:
CfnginBucketRequired: cfngin_bucket not defined.
"""
if not self.context.bucket_name:
return None
return Bucket(
self.context,
name=self.context.bucket_name,
region=self.context.bucket_region,
)
@cached_property
def default_cfngin_bucket_stack(self) -> CfnginStackDefinitionModel:
"""CFNgin bucket stack."""
return CfnginStackDefinitionModel(
class_path="runway.cfngin.blueprints.cfngin_bucket.CfnginBucket",
in_progress_behavior="wait",
name="cfngin-bucket",
termination_protection=True,
variables={"BucketName": self.context.bucket_name},
)
def run(
self,
*,
concurrency: int = 0,
dump: Union[bool, str] = False, # pylint: disable=unused-argument
force: bool = False, # pylint: disable=unused-argument
outline: bool = False, # pylint: disable=unused-argument
tail: bool = False,
upload_disabled: bool = True, # pylint: disable=unused-argument
**_kwargs: Any,
) -> None:
"""Run the action.
Args:
concurrency: The maximum number of concurrent deployments.
dump: Not used by this action
force: Not used by this action.
outline: Not used by this action.
tail: Tail the stack's events.
upload_disabled: Not used by this action.
Raises:
CfnginBucketAccessDenied: Could not head cfngin_bucket.
"""
if not self.cfngin_bucket:
LOGGER.info("skipped; cfngin_bucket not defined")
return
if self.cfngin_bucket.forbidden:
raise CfnginBucketAccessDenied(bucket_name=self.cfngin_bucket.name)
if self.cfngin_bucket.exists:
LOGGER.info("cfngin_bucket %s already exists", self.cfngin_bucket.name)
return
if self.context.get_stack("cfngin-bucket"):
LOGGER.verbose(
"found stack for creating cfngin_bucket: cfngin-bucket",
)
self.context.stack_names = ["cfngin-bucket"]
else:
LOGGER.notice("using default blueprint to create cfngin_bucket...")
self.context.config.stacks = [self.default_cfngin_bucket_stack]
# clear cached values that were populated by checking the previous condition
self.context._del_cached_property( # pylint: disable=protected-access
"stacks", "stacks_dict"
)
if self.provider_builder:
self.provider_builder.region = self.context.bucket_region
deploy.Action(
context=self.context,
provider_builder=self.provider_builder,
cancel=self.cancel,
).run(
concurrency=concurrency,
tail=tail,
upload_disabled=True,
)
return
def pre_run(
self,
*,
dump: Union[bool, str] = False, # pylint: disable=unused-argument
outline: bool = False, # pylint: disable=unused-argument
**__kwargs: Any,
) -> None:
"""Do nothing."""
def post_run(
self,
*,
dump: Union[bool, str] = False, # pylint: disable=unused-argument
outline: bool = False, # pylint: disable=unused-argument
**__kwargs: Any,
) -> None:
"""Do nothing."""
|
[
"logging.getLogger"
] |
[((594, 621), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (611, 621), False, 'import logging\n')]
|
import numpy as np
from collections import deque
import gym
import os
import gym.spaces as spaces
import cv2
import datalib.trajectories as trajectories
cv2.ocl.setUseOpenCL(False)
class NoopResetEnv(gym.Wrapper):
def __init__(self, env, noop_max=30):
"""Sample initial states by taking random number of no-ops on reset.
No-op is assumed to be action 0.
"""
gym.Wrapper.__init__(self, env)
self.noop_max = noop_max
self.override_num_noops = None
self.noop_action = 0
assert env.unwrapped.get_action_meanings()[0] == 'NOOP'
def reset(self, **kwargs):
""" Do no-op action for a number of steps in [1, noop_max]."""
self.env.reset(**kwargs)
if self.override_num_noops is not None:
noops = self.override_num_noops
else:
noops = self.unwrapped.np_random.randint(1, self.noop_max + 1) #pylint: disable=E1101
assert noops > 0
obs = None
for _ in range(noops):
obs, _, done, _ = self.env.step(self.noop_action)
if done:
obs = self.env.reset(**kwargs)
return obs
def step(self, ac):
return self.env.step(ac)
class FireResetEnv(gym.Wrapper):
def __init__(self, env):
"""Take action on reset for environments that are fixed until firing."""
gym.Wrapper.__init__(self, env)
assert env.unwrapped.get_action_meanings()[1] == 'FIRE'
assert len(env.unwrapped.get_action_meanings()) >= 3
def reset(self, **kwargs):
self.env.reset(**kwargs)
obs, _, done, _ = self.env.step(1)
if done:
self.env.reset(**kwargs)
obs, _, done, _ = self.env.step(2)
if done:
self.env.reset(**kwargs)
return obs
def step(self, ac):
return self.env.step(ac)
class EpisodicLifeEnv(gym.Wrapper):
def __init__(self, env):
"""Make end-of-life == end-of-episode, but only reset on true game over.
Done by DeepMind for the DQN and co. since it helps value estimation.
"""
gym.Wrapper.__init__(self, env)
self.lives = 0
self.was_real_done = True
def step(self, action):
obs, reward, done, info = self.env.step(action)
self.was_real_done = done
# check current lives, make loss of life terminal,
# then update lives to handle bonus lives
lives = self.env.unwrapped.ale.lives()
if lives < self.lives and lives > 0:
# for Qbert sometimes we stay in lives == 0 condtion for a few frames
# so its important to keep lives > 0, so that we only reset once
# the environment advertises done.
done = True
self.lives = lives
return obs, reward, done, info
def reset(self, **kwargs):
"""Reset only when lives are exhausted.
This way all states are still reachable even though lives are episodic,
and the learner need not know about any of this behind-the-scenes.
"""
if self.was_real_done:
obs = self.env.reset(**kwargs)
else:
# no-op step to advance from terminal/lost life state
obs, _, _, _ = self.env.step(0)
self.lives = self.env.unwrapped.ale.lives()
return obs
class MaxAndSkipEnv(gym.Wrapper):
def __init__(self, env, skip=4):
"""Return only every `skip`-th frame"""
gym.Wrapper.__init__(self, env)
# most recent raw observations (for max pooling across time steps)
self._obs_buffer = np.zeros((2,)+env.observation_space.shape, dtype=np.uint8)
self._skip = skip
def reset(self):
return self.env.reset()
def step(self, action):
"""Repeat action, sum reward, and max over last observations."""
total_reward = 0.0
done = None
for i in range(self._skip):
obs, reward, done, info = self.env.step(action)
if i == self._skip - 2: self._obs_buffer[0] = obs
if i == self._skip - 1: self._obs_buffer[1] = obs
total_reward += reward
if done:
break
# Note that the observation on the done=True frame
# doesn't matter
max_frame = self._obs_buffer.max(axis=0)
return max_frame, total_reward, done, info
def reset(self, **kwargs):
return self.env.reset(**kwargs)
class ClipRewardEnv(gym.RewardWrapper):
def __init__(self, env):
gym.RewardWrapper.__init__(self, env)
def reward(self, reward):
"""Bin reward to {+1, 0, -1} by its sign."""
return np.sign(reward)
class SavedClipRewardEnv(gym.RewardWrapper):
def __init__(self, env):
gym.RewardWrapper.__init__(self, env)
self._flat_reward = 0
def reward(self, reward):
"""Bin reward to {+1, 0, -1} by its sign."""
self._flat_reward = reward
return np.sign(reward)
class SavedPositiveClippedRewardEnv(gym.RewardWrapper):
def __init__(self, env):
gym.RewardWrapper.__init__(self, env)
self._flat_reward = 0
def reward(self, reward):
"""Bin reward to {+1, 0} by its sign."""
self._flat_reward = reward
return max(np.sign(reward), 0)
class WarpFrame(gym.ObservationWrapper):
def __init__(self, env, is_monte, is_pong):
"""Warp frames to 84x84 as done in the Nature paper and later work."""
gym.ObservationWrapper.__init__(self, env)
self.width = 84
self.height = 84
self.is_monte = is_monte
self.is_pong = is_pong
self.observation_space = spaces.Box(low=0, high=255,
shape=(self.height, self.width, 1), dtype=np.uint8)
def observation(self, frame):
if self.is_monte:
frame[0:23, ...] = 0
if self.is_pong:
frame[0:23, :] = [144, 72, 17]
frame = cv2.cvtColor(frame, cv2.COLOR_RGB2GRAY)
frame = cv2.resize(frame, (self.width, self.height), interpolation=cv2.INTER_AREA)
return frame[:, :, None]
class PenalizeDying(gym.Wrapper):
"""
{'ale.lives': 6}
"""
def __init__(self, env, penalty):
gym.Wrapper.__init__(self, env)
self.lives = 6
self.penalty = penalty
def reset(self):
ob = self.env.reset()
self.lives = 6
return ob
def step(self, action):
ob, reward, done, info = self.env.step(action)
new_lives = info['ale.lives']
if new_lives < self.lives:
self.lives = new_lives
reward -= self.penalty
# done = True
return ob, reward, done, info
class StepPenalty(gym.Wrapper):
def __init__(self, env, step_penalty):
gym.Wrapper.__init__(self, env)
self.env = env
self.step_penalty = step_penalty
def reset(self, **kwargs):
self.env.reset(**kwargs)
def step(self, action):
ob, reward, done, info = self.env.step(action)
reward = reward - self.step_penalty
return ob, reward, done, info
class LimitLength(gym.Wrapper):
def __init__(self, env, k, timeout_penalty):
gym.Wrapper.__init__(self, env)
self.k = k
self.timeout_penalty = timeout_penalty
def reset(self):
# This assumes that reset() will really reset the env.
# If the underlying env tries to be smart about reset
# (e.g. end-of-life), the assumption doesn't hold.
ob = self.env.reset()
self.cnt = 0
return ob
def step(self, action):
ob, r, done, info = self.env.step(action)
self.cnt += 1
if self.cnt == self.k:
done = True
r -= self.timeout_penalty
return ob, r, done, info
class FrameStack(gym.Wrapper):
def __init__(self, env, k):
"""Stack k last frames.
Returns lazy array, which is much more memory efficient.
See Also
--------
baselines.common.atari_wrappers.LazyFrames
"""
gym.Wrapper.__init__(self, env)
self.k = k
self.frames = deque([], maxlen=k)
shp = env.observation_space.shape
self.observation_space = spaces.Box(low=0, high=255, shape=(shp[0], shp[1], shp[2] * k), dtype=np.uint8)
def reset(self):
ob = self.env.reset()
for _ in range(self.k):
self.frames.append(ob)
return self._get_ob()
def step(self, action):
ob, reward, done, info = self.env.step(action)
self.frames.append(ob)
return self._get_ob(), reward, done, info
def _get_ob(self):
assert len(self.frames) == self.k
return LazyFrames(list(self.frames))
class ScaledFloatFrame(gym.ObservationWrapper):
def __init__(self, env):
gym.ObservationWrapper.__init__(self, env)
def observation(self, observation):
# careful! This undoes the memory optimization, use
# with smaller replay buffers only.
return np.array(observation).astype(np.float32) / 255.0
class LazyFrames(object):
def __init__(self, frames):
"""This object ensures that common frames between the observations are only stored once.
It exists purely to optimize memory usage which can be huge for DQN's 1M frames replay
buffers.
This object should only be converted to numpy array before being passed to the model.
You'd not believe how complex the previous solution was."""
self._frames = frames
self._out = None
def _force(self):
if self._out is None:
self._out = np.concatenate(self._frames, axis=2)
self._frames = None
return self._out
def __array__(self, dtype=None):
out = self._force()
if dtype is not None:
out = out.astype(dtype)
return out
def __len__(self):
return len(self._force())
def __getitem__(self, i):
return self._force()[i]
class RandomStartingWrapper(gym.Wrapper):
def __init__(self, env, config):
super(RandomStartingWrapper, self).__init__(env)
self.config = config
self.df = trajectories.load_trajectories_by_score(
trajectory_dir=config['traj_dir'],
max_score_cutoff=config['max_score_cutoff'],
min_score_cutoff=config['min_score_cutoff'],
project_level_gamma=config['gamma'],
clip_rewards=config['clip_rewards'],
frameskip=config['frameskip'],
process_lost_lifes=config['process_lost_lifes'],
)
self.random_state = None
def seed(self, seed=None):
self.env.seed(seed)
if seed is None:
raise ValueError("Seed cannot be None in case of random starting env wrapper")
self.random_state = np.random.RandomState(seed)
def reset(self, **kwargs):
super(RandomStartingWrapper, self).reset(**kwargs)
wrapped_env = self.env.env
if self.random_state is None:
raise ValueError("Uninitialized random state")
idx = self.random_state.randint(1, len(self.df))
# We have to kick out the first frame, because we don't have observation before it
while self.df.iloc[idx].frame == 0:
idx = self.random_state.randint(1, len(self.df))
# print("Will restore state no = {}".format(idx))
traj = self.df.iloc[idx].trajectory
state_idx = self.df.iloc[idx].frame
state_fname = os.path.join(self.config['stat_dir'], "{}/{:07d}.npy".format(traj, state_idx))
state = np.load(state_fname)
img_fname = os.path.join(self.config['img_dir'], "{}/{:07d}.png".format(traj, state_idx - 1))
img = cv2.imread(img_fname, cv2.IMREAD_COLOR)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
wrapped_env.restore_full_state(state)
# wrapped_env._get_obs() returns observation before state change, so we have to fix it ourselves
# https://github.com/openai/gym/issues/715
return img
class DoomMyWayHomeActionWrapper(gym.ActionWrapper):
"""
Doom my way home env (see doom.env.doom_my_way_home). has action space:
actions = [0] * 43
actions[13] = 0 # MOVE_FORWARD
actions[14] = 1 # TURN_RIGHT
actions[15] = 0 # TURN_LEFT
We need to change that to match the scheme I have implemnted while gathering data
(and to much the stoachastic policy reinforecement learning formulation).
We want to map e.g.:
7 -> [1, 1, 1]
5 -> [1, 0, 1]
(but ofc the relevant array starts from place 13)
"""
def __init__(self, env):
super(DoomMyWayHomeActionWrapper, self).__init__(env)
self.action_space = gym.spaces.Discrete(8)
def action(self, action):
a = action
move_fwd = a % 2
a = a // 2
turn_right = a % 2
a = a // 2
turn_left = a % 2
a = a // 2
assert a == 0
out = [0] * 43
out[0] = move_fwd
out[1] = turn_right
out[2] = turn_left
return out
def make_state_restoring_atari(env_id, config):
env = gym.make(env_id)
assert 'NoFrameskip' in env.spec.id
env = RandomStartingWrapper(env, config)
env = MaxAndSkipEnv(env, skip=4)
return env
def make_atari(env_id):
env = gym.make(env_id)
assert 'NoFrameskip' in env.spec.id
env = NoopResetEnv(env, noop_max=30)
env = MaxAndSkipEnv(env, skip=4)
return env
def wrap_deepmind(env, episode_life=True, clip_rewards=True, frame_stack=False, scale=False, is_monte=False, is_pong=False, save_original_reward=False, only_positive_rewards=False):
"""Configure environment for DeepMind-style Atari.
"""
if episode_life:
env = EpisodicLifeEnv(env)
if 'FIRE' in env.unwrapped.get_action_meanings():
env = FireResetEnv(env)
env = WarpFrame(env, is_monte, is_pong)
if scale:
env = ScaledFloatFrame(env)
if only_positive_rewards:
env = SavedPositiveClippedRewardEnv(env)
elif clip_rewards:
if save_original_reward:
env = SavedClipRewardEnv(env)
else:
env = ClipRewardEnv(env)
if frame_stack:
env = FrameStack(env, 4)
return env
def wrap_doom_deepmind_like(env, clip_rewards=True, frame_stack=False, scale=False, save_original_reward=False):
env = WarpFrame(env, is_monte=False, is_pong=False)
env = DoomMyWayHomeActionWrapper(env)
if scale:
env = ScaledFloatFrame(env)
if clip_rewards:
if save_original_reward:
env = SavedClipRewardEnv(env)
else:
env = ClipRewardEnv(env)
if frame_stack:
env = FrameStack(env, 4)
return env
|
[
"gym.Wrapper.__init__",
"datalib.trajectories.load_trajectories_by_score",
"numpy.load",
"gym.make",
"gym.RewardWrapper.__init__",
"numpy.concatenate",
"cv2.cvtColor",
"collections.deque",
"numpy.zeros",
"cv2.ocl.setUseOpenCL",
"numpy.random.RandomState",
"gym.ObservationWrapper.__init__",
"gym.spaces.Discrete",
"cv2.imread",
"numpy.array",
"gym.spaces.Box",
"numpy.sign",
"cv2.resize"
] |
[((155, 182), 'cv2.ocl.setUseOpenCL', 'cv2.ocl.setUseOpenCL', (['(False)'], {}), '(False)\n', (175, 182), False, 'import cv2\n'), ((13187, 13203), 'gym.make', 'gym.make', (['env_id'], {}), '(env_id)\n', (13195, 13203), False, 'import gym\n'), ((13377, 13393), 'gym.make', 'gym.make', (['env_id'], {}), '(env_id)\n', (13385, 13393), False, 'import gym\n'), ((397, 428), 'gym.Wrapper.__init__', 'gym.Wrapper.__init__', (['self', 'env'], {}), '(self, env)\n', (417, 428), False, 'import gym\n'), ((1368, 1399), 'gym.Wrapper.__init__', 'gym.Wrapper.__init__', (['self', 'env'], {}), '(self, env)\n', (1388, 1399), False, 'import gym\n'), ((2106, 2137), 'gym.Wrapper.__init__', 'gym.Wrapper.__init__', (['self', 'env'], {}), '(self, env)\n', (2126, 2137), False, 'import gym\n'), ((3456, 3487), 'gym.Wrapper.__init__', 'gym.Wrapper.__init__', (['self', 'env'], {}), '(self, env)\n', (3476, 3487), False, 'import gym\n'), ((3590, 3650), 'numpy.zeros', 'np.zeros', (['((2,) + env.observation_space.shape)'], {'dtype': 'np.uint8'}), '((2,) + env.observation_space.shape, dtype=np.uint8)\n', (3598, 3650), True, 'import numpy as np\n'), ((4517, 4554), 'gym.RewardWrapper.__init__', 'gym.RewardWrapper.__init__', (['self', 'env'], {}), '(self, env)\n', (4543, 4554), False, 'import gym\n'), ((4654, 4669), 'numpy.sign', 'np.sign', (['reward'], {}), '(reward)\n', (4661, 4669), True, 'import numpy as np\n'), ((4753, 4790), 'gym.RewardWrapper.__init__', 'gym.RewardWrapper.__init__', (['self', 'env'], {}), '(self, env)\n', (4779, 4790), False, 'import gym\n'), ((4955, 4970), 'numpy.sign', 'np.sign', (['reward'], {}), '(reward)\n', (4962, 4970), True, 'import numpy as np\n'), ((5065, 5102), 'gym.RewardWrapper.__init__', 'gym.RewardWrapper.__init__', (['self', 'env'], {}), '(self, env)\n', (5091, 5102), False, 'import gym\n'), ((5464, 5506), 'gym.ObservationWrapper.__init__', 'gym.ObservationWrapper.__init__', (['self', 'env'], {}), '(self, env)\n', (5495, 5506), False, 'import gym\n'), ((5653, 5732), 'gym.spaces.Box', 'spaces.Box', ([], {'low': '(0)', 'high': '(255)', 'shape': '(self.height, self.width, 1)', 'dtype': 'np.uint8'}), '(low=0, high=255, shape=(self.height, self.width, 1), dtype=np.uint8)\n', (5663, 5732), True, 'import gym.spaces as spaces\n'), ((5923, 5962), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_RGB2GRAY'], {}), '(frame, cv2.COLOR_RGB2GRAY)\n', (5935, 5962), False, 'import cv2\n'), ((5979, 6053), 'cv2.resize', 'cv2.resize', (['frame', '(self.width, self.height)'], {'interpolation': 'cv2.INTER_AREA'}), '(frame, (self.width, self.height), interpolation=cv2.INTER_AREA)\n', (5989, 6053), False, 'import cv2\n'), ((6206, 6237), 'gym.Wrapper.__init__', 'gym.Wrapper.__init__', (['self', 'env'], {}), '(self, env)\n', (6226, 6237), False, 'import gym\n'), ((6764, 6795), 'gym.Wrapper.__init__', 'gym.Wrapper.__init__', (['self', 'env'], {}), '(self, env)\n', (6784, 6795), False, 'import gym\n'), ((7183, 7214), 'gym.Wrapper.__init__', 'gym.Wrapper.__init__', (['self', 'env'], {}), '(self, env)\n', (7203, 7214), False, 'import gym\n'), ((8052, 8083), 'gym.Wrapper.__init__', 'gym.Wrapper.__init__', (['self', 'env'], {}), '(self, env)\n', (8072, 8083), False, 'import gym\n'), ((8125, 8144), 'collections.deque', 'deque', (['[]'], {'maxlen': 'k'}), '([], maxlen=k)\n', (8130, 8144), False, 'from collections import deque\n'), ((8220, 8299), 'gym.spaces.Box', 'spaces.Box', ([], {'low': '(0)', 'high': '(255)', 'shape': '(shp[0], shp[1], shp[2] * k)', 'dtype': 'np.uint8'}), '(low=0, high=255, shape=(shp[0], shp[1], shp[2] * k), dtype=np.uint8)\n', (8230, 8299), True, 'import gym.spaces as spaces\n'), ((8811, 8853), 'gym.ObservationWrapper.__init__', 'gym.ObservationWrapper.__init__', (['self', 'env'], {}), '(self, env)\n', (8842, 8853), False, 'import gym\n'), ((10180, 10516), 'datalib.trajectories.load_trajectories_by_score', 'trajectories.load_trajectories_by_score', ([], {'trajectory_dir': "config['traj_dir']", 'max_score_cutoff': "config['max_score_cutoff']", 'min_score_cutoff': "config['min_score_cutoff']", 'project_level_gamma': "config['gamma']", 'clip_rewards': "config['clip_rewards']", 'frameskip': "config['frameskip']", 'process_lost_lifes': "config['process_lost_lifes']"}), "(trajectory_dir=config['traj_dir'],\n max_score_cutoff=config['max_score_cutoff'], min_score_cutoff=config[\n 'min_score_cutoff'], project_level_gamma=config['gamma'], clip_rewards=\n config['clip_rewards'], frameskip=config['frameskip'],\n process_lost_lifes=config['process_lost_lifes'])\n", (10219, 10516), True, 'import datalib.trajectories as trajectories\n'), ((10834, 10861), 'numpy.random.RandomState', 'np.random.RandomState', (['seed'], {}), '(seed)\n', (10855, 10861), True, 'import numpy as np\n'), ((11607, 11627), 'numpy.load', 'np.load', (['state_fname'], {}), '(state_fname)\n', (11614, 11627), True, 'import numpy as np\n'), ((11745, 11784), 'cv2.imread', 'cv2.imread', (['img_fname', 'cv2.IMREAD_COLOR'], {}), '(img_fname, cv2.IMREAD_COLOR)\n', (11755, 11784), False, 'import cv2\n'), ((11799, 11835), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2RGB'], {}), '(img, cv2.COLOR_BGR2RGB)\n', (11811, 11835), False, 'import cv2\n'), ((12772, 12794), 'gym.spaces.Discrete', 'gym.spaces.Discrete', (['(8)'], {}), '(8)\n', (12791, 12794), False, 'import gym\n'), ((5267, 5282), 'numpy.sign', 'np.sign', (['reward'], {}), '(reward)\n', (5274, 5282), True, 'import numpy as np\n'), ((9627, 9663), 'numpy.concatenate', 'np.concatenate', (['self._frames'], {'axis': '(2)'}), '(self._frames, axis=2)\n', (9641, 9663), True, 'import numpy as np\n'), ((9014, 9035), 'numpy.array', 'np.array', (['observation'], {}), '(observation)\n', (9022, 9035), True, 'import numpy as np\n')]
|
import os
import sys
import time
import json
import h5py
import argparse
import numpy as np
from tqdm import tqdm
from spectra_utils import load_radionuclide_nndc, generate_spectrum, plot_spectrum
def load_nndc_tables(nndc_dir, radionuclides):
nndc_tables = {}
for rn in radionuclides:
keV, intensity = load_radionuclide_nndc(nndc_dir, rn)
nndc_tables[rn] = {"keV": keV, "intensity": intensity}
return nndc_tables
def generate_templates(config, nndc_tables, outdir, savefigs):
templates = {}
for rn_name, rn_values in tqdm(nndc_tables.items()):
#print(f"building template for {rn_name}")
keV, intensity, _, _ = generate_spectrum(rn_values, config)
templates[rn_name] = {"keV": keV, "intensity": intensity}
if savefigs:
plot_spectrum(keV, intensity, rn_name, outdir)
return templates
def save_templates(dettype, templates, outfile):
with h5py.File(outfile, 'a') as h5f:
try:
h5f.create_group(dettype)
except: # does not create detector group if it already exists
pass
for k, v in templates.items():
try:
h5f[dettype].create_group(k)
except: # does not create radionuclide group if it already exists
pass
for k2, v2 in v.items():
try:
h5f[dettype][k].create_dataset(k2, data=v2)
except: # overwrites existing data if data already exists
data = h5f[dettype][k][k2]
data[...]= v2
def main():
start = time.time()
parser = argparse.ArgumentParser()
parser.add_argument("-rl", "--rnlistfile", help="file containing list of radionuclides to use", default="ANSI_N42.34.json")
parser.add_argument("-cf", "--configfile", help="configuration file for generating data", default="config_data.json")
parser.add_argument("-out", "--outfile", help="output file for data", default="data/templates.h5")
parser.add_argument("-det", "--dettype", help="detector type", default="HPGe,NaI,CZT")
#parser.add_argument("-det", "--dettype", help="detector type", default="HPGe")
parser.add_argument("-nndc", "--nndctables", help="location of NNDC tables data", default="nuclides-nndc")
parser.add_argument("-sf", "--savefigs", help="saves plots of templates", action="store_true")
#parser.add_argument("-n", "--normalize", help="normalize templates by RMS", action="store_true")
arg = parser.parse_args()
outdir = os.path.dirname(arg.outfile)
outfile = arg.outfile
# load configuration parameters
with open(arg.configfile, 'r') as cfile:
config = json.load(cfile)
# make output dir if it does not exist
os.makedirs(outdir, exist_ok=True)
# load NNDC tables for radionuclides
nndc_tables = load_nndc_tables(arg.nndctables, config["RADIONUCLIDES"])
for dettype in arg.dettype.split(','):
dettype = dettype.upper()
print(f'Generating templates for detector {dettype}')
if arg.savefigs:
os.makedirs(os.path.join(outdir, dettype), exist_ok=True)
templates = generate_templates(config["DETECTORS"][dettype], nndc_tables, os.path.join(outdir, dettype), arg.savefigs)
save_templates(dettype, templates, outfile)
print(f'Script completed in {time.time()-start:.2f} secs')
return 0
if __name__ == '__main__':
sys.exit(main())
|
[
"h5py.File",
"json.load",
"os.makedirs",
"argparse.ArgumentParser",
"os.path.dirname",
"spectra_utils.load_radionuclide_nndc",
"time.time",
"spectra_utils.generate_spectrum",
"os.path.join",
"spectra_utils.plot_spectrum"
] |
[((1631, 1642), 'time.time', 'time.time', ([], {}), '()\n', (1640, 1642), False, 'import time\n'), ((1657, 1682), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1680, 1682), False, 'import argparse\n'), ((2568, 2596), 'os.path.dirname', 'os.path.dirname', (['arg.outfile'], {}), '(arg.outfile)\n', (2583, 2596), False, 'import os\n'), ((2787, 2821), 'os.makedirs', 'os.makedirs', (['outdir'], {'exist_ok': '(True)'}), '(outdir, exist_ok=True)\n', (2798, 2821), False, 'import os\n'), ((324, 360), 'spectra_utils.load_radionuclide_nndc', 'load_radionuclide_nndc', (['nndc_dir', 'rn'], {}), '(nndc_dir, rn)\n', (346, 360), False, 'from spectra_utils import load_radionuclide_nndc, generate_spectrum, plot_spectrum\n'), ((684, 720), 'spectra_utils.generate_spectrum', 'generate_spectrum', (['rn_values', 'config'], {}), '(rn_values, config)\n', (701, 720), False, 'from spectra_utils import load_radionuclide_nndc, generate_spectrum, plot_spectrum\n'), ((956, 979), 'h5py.File', 'h5py.File', (['outfile', '"""a"""'], {}), "(outfile, 'a')\n", (965, 979), False, 'import h5py\n'), ((2722, 2738), 'json.load', 'json.load', (['cfile'], {}), '(cfile)\n', (2731, 2738), False, 'import json\n'), ((820, 866), 'spectra_utils.plot_spectrum', 'plot_spectrum', (['keV', 'intensity', 'rn_name', 'outdir'], {}), '(keV, intensity, rn_name, outdir)\n', (833, 866), False, 'from spectra_utils import load_radionuclide_nndc, generate_spectrum, plot_spectrum\n'), ((3257, 3286), 'os.path.join', 'os.path.join', (['outdir', 'dettype'], {}), '(outdir, dettype)\n', (3269, 3286), False, 'import os\n'), ((3129, 3158), 'os.path.join', 'os.path.join', (['outdir', 'dettype'], {}), '(outdir, dettype)\n', (3141, 3158), False, 'import os\n'), ((3388, 3399), 'time.time', 'time.time', ([], {}), '()\n', (3397, 3399), False, 'import time\n')]
|
from django.template.defaultfilters import title # noqa
from django.template.loader import render_to_string # noqa
from horizon.utils import filters
def stack_info(stack, stack_image):
stack.stack_status_desc = title(
filters.replace_underscores(stack.stack_status))
if stack.stack_status_reason:
stack.stack_status_reason = title(
filters.replace_underscores(stack.stack_status_reason)
)
context = {}
context['stack'] = stack
context['stack_image'] = stack_image
return render_to_string('project/stacks/_stack_info.html',
context)
def resource_info(resource):
resource.resource_status_desc = title(
filters.replace_underscores(resource.resource_status)
)
if resource.resource_status_reason:
resource.resource_status_reason = title(
filters.replace_underscores(resource.resource_status_reason)
)
context = {}
context['resource'] = resource
return render_to_string('project/stacks/_resource_info.html',
context)
|
[
"horizon.utils.filters.replace_underscores",
"django.template.loader.render_to_string"
] |
[((536, 596), 'django.template.loader.render_to_string', 'render_to_string', (['"""project/stacks/_stack_info.html"""', 'context'], {}), "('project/stacks/_stack_info.html', context)\n", (552, 596), False, 'from django.template.loader import render_to_string\n'), ((1002, 1065), 'django.template.loader.render_to_string', 'render_to_string', (['"""project/stacks/_resource_info.html"""', 'context'], {}), "('project/stacks/_resource_info.html', context)\n", (1018, 1065), False, 'from django.template.loader import render_to_string\n'), ((235, 282), 'horizon.utils.filters.replace_underscores', 'filters.replace_underscores', (['stack.stack_status'], {}), '(stack.stack_status)\n', (262, 282), False, 'from horizon.utils import filters\n'), ((707, 760), 'horizon.utils.filters.replace_underscores', 'filters.replace_underscores', (['resource.resource_status'], {}), '(resource.resource_status)\n', (734, 760), False, 'from horizon.utils import filters\n'), ((373, 427), 'horizon.utils.filters.replace_underscores', 'filters.replace_underscores', (['stack.stack_status_reason'], {}), '(stack.stack_status_reason)\n', (400, 427), False, 'from horizon.utils import filters\n'), ((868, 928), 'horizon.utils.filters.replace_underscores', 'filters.replace_underscores', (['resource.resource_status_reason'], {}), '(resource.resource_status_reason)\n', (895, 928), False, 'from horizon.utils import filters\n')]
|
import os
import io
from .utils import *
from .exceptions import *
class JsonClient:
"""
JsonClient object constructor.
@param api_token:
The API Token to be used for authentication with the SendSecure service
@param user_id:
The user id of the current user
@param enterprise_account:
The SendSecure enterprise account
@param endpoint:
The URL to the SendSecure service ("https://portal.xmedius.com" will be used by default if empty)
@param locale:
The locale in which the server errors will be returned ("en" will be used by default if empty)
"""
def __init__(self, options):
self.locale = options.get('locale', 'en')
self.enterprise_account = options.get('enterprise_account')
self.endpoint = options.get('endpoint', 'https://portal.xmedius.com')
self.sendsecure_endpoint = None
self.token = str(options.get('token'))
self.user_id = options.get('user_id')
"""
Pre-creates a SafeBox on the SendSecure system and initializes the Safebox object accordingly.
@param user_email:
The email address of a SendSecure user of the current enterprise account
@return: The json containing the guid, public encryption key and upload url of the initialize SafeBox
"""
def new_safebox(self, user_email):
params = {'user_email': user_email}
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes/new.json'], params)
return self._do_get(url, 'application/json')
"""
Pre-creates a document on the SendSecure system and initializes the Safebox object accordingly.
@param safebox_guid:
The guid of the existing safebox
@param file_params:
The full json expected by the server
@return: The json containing the temporary document GUID and the upload URL
"""
def new_file(self, safebox_guid, file_params):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes/', safebox_guid, 'uploads.json'])
return self._do_post(url, 'application/json', file_params, 'application/json')
"""
Uploads the specified file as an Attachment of the specified SafeBox.
@param upload_url:
The url returned by the initializeSafeBox. Can be used multiple time
@param source:
The path of the file to upload or the stream
@param content_type:
The MIME content type of the uploaded file
@param filename:
The file name
@param filesize:
The filesize
@return: The json containing the guid of the uploaded file
"""
def upload_file(self, upload_url, source, content_type='application/octet-stream', filename=None, filesize=None):
status_code = None
status_line = None
response_body = None
if type(source) == str:
(status_code, status_line, response_body) = http_upload_filepath(str(upload_url), source, content_type, filename)
elif self._is_file(source):
upload_filename = filename or source.name.split('/')[-1]
upload_filesize = filesize or (os.path.getsize(source.name) - source.tell())
(status_code, status_line, response_body) = http_upload_raw_stream(str(upload_url), source, content_type, upload_filename, upload_filesize)
else:
(status_code, status_line, response_body) = http_upload_raw_stream(str(upload_url), source, content_type, filename, filesize)
if status_code >= 400:
raise SendSecureException(status_code, status_line, response_body)
return response_body
"""
Finalizes the creation (commit) of the SafeBox on the SendSecure system. This actually "Sends" the SafeBox with
all content and contact info previously specified.
@param safebox_json:
The full json expected by the server
@return: The json containing the guid, preview url and encryption key of the created SafeBox
"""
def commit_safebox(self, safebox_json):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes.json'])
return self._do_post(url, 'application/json', safebox_json, 'application/json')
"""
Retrieves all available security profiles of the enterprise account for a specific user.
@param user_email:
The email address of a SendSecure user of the current enterprise account
@return: The json containing a list of Security Profiles
"""
def get_security_profiles(self, user_email):
params = {'user_email': user_email}
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/enterprises', self.enterprise_account, 'security_profiles.json'], params)
return self._do_get(url, 'application/json')
"""
Get the Enterprise Settings of the current enterprise account.
@return: The json containing the enterprise settings
"""
def get_enterprise_settings(self):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/enterprises', self.enterprise_account, 'settings.json'])
return self._do_get(url, 'application/json')
"""
Get the User Settings of the current user account
@return: The json containing the user settings
"""
def get_user_settings(self):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/enterprises', self.enterprise_account, 'users', self.user_id, 'settings.json'])
return self._do_get(url, 'application/json')
"""
Retrieves all favorites for the current user account.
@return: The json containing a list of Favorite
"""
def get_favorites(self):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/enterprises', self.enterprise_account, 'users', self.user_id, 'favorites.json'])
return self._do_get(url, 'application/json')
"""
Create a new favorite for the current user account.
@param favorite_json:
The full json expected by the server
@return: The json containing all the informations of the created Favorite
"""
def create_favorite(self, favorite_json):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/enterprises', self.enterprise_account, 'users', self.user_id, 'favorites.json'])
return self._do_post(url, 'application/json', favorite_json, 'application/json')
"""
Update an existing favorite for the current user account.
@param favorite_id
The id of the favorite to be updated
@param favorite_params
The full json expected by the server
@return: The json containing all the informations of the updated Favorite
"""
def update_favorite(self, favorite_id, favorite_json):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/enterprises', self.enterprise_account, 'users', self.user_id, 'favorites', str(favorite_id) + '.json'])
return self._do_patch(url, 'application/json', favorite_json, 'application/json')
"""
Delete an existing favorite for the current user account.
@param favorite_id:
The id of the Favorite to be deleted
@return: Nothing
"""
def delete_favorite(self, favorite_id):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/enterprises', self.enterprise_account, 'users', self.user_id, 'favorites', str(favorite_id) + '.json'])
return self._do_delete(url, 'application/json')
"""
Create a new participant for a specific open safebox of the current user account.
@param safebox_guid:
The guid of the safebox to be updated
@param participant_json:
The full json expected by the server
@return: The json containing all the informations of the created Participant
"""
def create_participant(self, safebox_guid, participant_json):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, 'participants.json'])
return self._do_post(url, 'application/json', participant_json, 'application/json')
"""
Update an existing participant for a specific open safebox of the current user account.
@param safebox_guid:
The guid of the safebox to be updated
@param participant_id:
The id of the participant to be updated
@param participant_json
The full json expected by the server
@return: The json containing all the informations of the updated Participant
"""
def update_participant(self, safebox_guid, participant_id, participant_json):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, 'participants', participant_id + '.json'])
return self._do_patch(url, 'application/json', participant_json, 'application/json')
"""
Search the recipients for a safebox
@param term:
A Search term
@return: The json containing the search result
"""
def search_recipient(self, term):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/recipients/autocomplete?term=' + term])
return self._do_get(url, 'application/json')
"""
Reply to a specific safebox associated to the current user's account.
@param safebox_guid:
The guid of the safebox to be updated
@param reply_params:
The full json expected by the server
@return: The json containing the request result
"""
def reply(self, safebox_guid, reply_params):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, '/messages.json'])
return self._do_post(url, 'application/json', reply_params, 'application/json')
"""
Manually add time to expiration date for a specific open safebox of the current user account.
@param safebox_guid:
The guid of the safebox to be updated
@param add_time_json:
The full json expected by the server
@return: The json containing the new expiration date
"""
def add_time(self, safebox_guid, add_time_json):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, 'add_time.json'])
return self._do_patch(url, 'application/json', add_time_json, 'application/json')
"""
Manually close an existing safebox for the current user account.
@param safebox_guid:
The guid of the safebox to be closed
@return: The json containing the request result
"""
def close_safebox(self, safebox_guid):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, 'close.json'])
return self._do_patch(url, 'application/json', '', 'application/json')
"""
Manually delete the content of a closed safebox for the current user account.
@param safebox_guid:
The guid of the safebox
@return: The json containing the request result
"""
def delete_safebox_content(self, safebox_guid):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, 'delete_content.json'])
return self._do_patch(url, 'application/json', '', 'application/json')
"""
Manually mark as read an existing safebox for the current user account.
@param safebox_guid:
The guid of the safebox
@return: The json containing the request result
"""
def mark_as_read(self, safebox_guid):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, 'mark_as_read.json'])
return self._do_patch(url, 'application/json', '', 'application/json')
"""
Manually mark as unread an existing safebox for the current user account.
@param safebox_guid:
The guid of the safebox
@return: The json containing the request result
"""
def mark_as_unread(self, safebox_guid):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, 'mark_as_unread.json'])
return self._do_patch(url, 'application/json', '', 'application/json')
"""
Manually mark as read an existing message.
@param safebox_guid:
The guid of the safebox
@param message_id:
The id of the message to be marked as read
@return: The json containing the request result
"""
def mark_as_read_message(self, safebox_guid, message_id):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, 'messages', str(message_id), 'read'])
return self._do_patch(url, 'application/json', '', 'application/json')
"""
Manually mark as unread an existing message.
@param safebox_guid:
The guid of the safebox
@param message_id:
The id of the message to be marked as unread
@return: The json containing the request result
"""
def mark_as_unread_message(self, safebox_guid, message_id):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, 'messages', str(message_id), 'unread'])
return self._do_patch(url, 'application/json', '', 'application/json')
"""
Retrieve a specific file url of an existing safebox for the current user account.
@param safebox_guid:
The guid of the safebox
@param document_guid:
The guid of the file
@param user_email:
The current user email
@return: The json containing the file url on the fileserver
"""
def get_file_url(self, safebox_guid, document_guid, user_email):
params = {'user_email': user_email}
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, 'documents', document_guid, 'url.json'], params)
return self._do_get(url, 'application/json')
"""
Retrieve the url of the audit record of an existing safebox for the current user account.
@param safebox_guid:
The guid of the safebox
@return: The json containing the url
"""
def get_audit_record_url(self, safebox_guid):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, 'audit_record_pdf.json'])
return self._do_get(url, 'application/json')
"""
Retrieve the audit record of an existing safebox for the current user account.
@param url:
The url of the safebox audit record
@return: The pdf stream
"""
def get_audit_record_pdf(self, url):
return self._get(url, 'application/pdf')
"""
Retrieve a filtered list of safeboxes for the current user account.
@param url:
The complete search url
@param search_params:
The optional filtering parameters
@return: The json containing the count, previous page url, the next page url and a list of Safebox
"""
def get_safeboxes(self, url, search_params):
if url is None:
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes.json'], search_params)
return self._do_get(url, 'application/json')
"""
Retrieve all info of an existing safebox for the current user account.
@param safebox_guid:
The guid of the safebox to be updated
@param sections:
The string containing the list of sections to be retrieved
@return: The json containing all the informations on the specified sections.
If no sections are specified, it will return all safebox infos.
"""
def get_safebox_info(self, safebox_guid, sections):
params = ''
if sections:
params = {'sections': sections}
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid + '.json'], params)
return self._do_get(url, 'application/json')
"""
Retrieve all participants info of an existing safebox for the current user account.
@param safebox_guid:
The guid of the safebox
@return: The json containing the list of participants
"""
def get_safebox_participants(self, safebox_guid):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, 'participants.json'])
return self._do_get(url, 'application/json')
"""
Retrieve all messages info of an existing safebox for the current user account.
@param safebox_guid:
The guid of the safebox
@return: The json containing the list of messages
"""
def get_safebox_messages(self, safebox_guid):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, 'messages.json'])
return self._do_get(url, 'application/json')
"""
Retrieve all security options info of an existing safebox for the current user account.
@param safebox_guid:
The guid of the safebox
@return: The json containing the Security Options
"""
def get_safebox_security_options(self, safebox_guid):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, 'security_options.json'])
return self._do_get(url, 'application/json')
"""
Retrieve all download activity info of an existing safebox for the current user account.
@param safebox_guid:
The guid of the safebox
@return: The json containing the Download Activity
"""
def get_safebox_download_activity(self, safebox_guid):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, 'download_activity.json'])
return self._do_get(url, 'application/json')
"""
Retrieve all event_history info of an existing safebox for the current user account.
@param safebox_guid:
The guid of the safebox
@return: The json containing a list of EventHistory
"""
def get_safebox_event_history(self, safebox_guid):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, 'event_history.json'])
return self._do_get(url, 'application/json')
"""
Archive a specific safebox
@param safebox_guid:
The guid of the safebox
@param user_email:
The current user email
@return: The json containing the request result
"""
def archive_safebox(self, safebox_guid, user_email):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, '/tag/archive'])
return self._do_post(url, 'application/json', user_email, 'application/json')
"""
Remove the tag "archive" from the safebox
@param safebox_guid:
The guid of the safebox
@param user_email:
The current user email
@return: The json containing the request result
"""
def unarchive_safebox(self, safebox_guid, user_email):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, '/untag/archive'])
return self._do_post(url, 'application/json', user_email, 'application/json')
"""
Call to unfollow the SafeBox. By default, all new Safeboxes are "followed"
@param safebox:
A Safebox object
@return: An object containing the request result
"""
def unfollow(self, safebox_guid):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, '/unfollow'])
return self._do_patch(url, 'application/json', '', 'application/json')
"""
Call to follow the SafeBox (opposite of the unfollow call).
@param safebox:
A Safebox object
@return: An object containing the request result
"""
def follow(self, safebox_guid):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/safeboxes', safebox_guid, '/follow'])
return self._do_patch(url, 'application/json', '', 'application/json')
"""
Call to get the list of all the localized messages of a consent group.
@param consent_group_id:
The id of the consent group
@return: The json containing the list of all the localized messages
"""
def get_consent_group_messages(self, consent_group_id):
url = urljoin([self._get_sendsecure_endpoint(), 'api/v2/enterprises', self.enterprise_account, '/consent_message_groups', str(consent_group_id)])
return self._do_get(url, 'application/json')
def _get_sendsecure_endpoint(self):
if not self.sendsecure_endpoint:
url = urljoin([self.endpoint, 'services', self.enterprise_account, 'sendsecure/server/url'])
new_endpoint = self._get(url, 'text/plain')
self.sendsecure_endpoint = new_endpoint
return self.sendsecure_endpoint
def _do_get(self, url, accept):
params = {'locale': self.locale}
new_url = urljoin([url], params)
return self._get(new_url, accept)
def _get(self, url, accept):
(status_code, status_line, response_body) = http_get(url, accept, self.token)
if status_code >= 400:
raise SendSecureException(status_code, status_line, response_body)
return response_body
def _do_post(self, url, content_type, body, accept):
params = {'locale': self.locale}
(status_code, status_line, response_body) = http_post(urljoin([url], params), content_type, body, accept, self.token)
if status_code >= 400:
raise SendSecureException(status_code, status_line, response_body)
return response_body
def _do_patch(self, url, content_type, body, accept):
params = {'locale': self.locale}
(status_code, status_line, response_body) = http_patch(urljoin([url], params), content_type, body, accept, self.token)
if status_code >= 400:
raise SendSecureException(status_code, status_line, response_body)
return response_body
def _do_delete(self, url, accept):
params = {'locale': self.locale}
(status_code, status_line, response_body) = http_delete(urljoin([url], params), accept, self.token)
if status_code >= 400:
raise SendSecureException(status_code, status_line, response_body)
return response_body
def _is_file(self, obj):
return isinstance(obj, (io.TextIOBase, io.BufferedIOBase, io.RawIOBase, io.IOBase))
|
[
"os.path.getsize"
] |
[((3213, 3241), 'os.path.getsize', 'os.path.getsize', (['source.name'], {}), '(source.name)\n', (3228, 3241), False, 'import os\n')]
|
#!/usr/bin/env python3
# coding=utf-8
"""
@author: guoyanfeng
@software: PyCharm
@time: 18-7-1 上午10:08
"""
import atexit
from typing import Dict
import requests as sync_requests
from requests.exceptions import ConnectTimeout, ConnectionError, HTTPError, RequestException, Timeout
from eclients.err_msg import http_msg
from eclients.utils import verify_message
from .decorators import Singleton
from .exceptions import ClientConnectionError, ClientError, ClientResponseError
__all__ = ("HttpClient", "Response")
class Response(object):
"""
响应对象,需要重新封装对象
"""
__slots__ = ["status_code", "reason", "headers", "cookies", "resp_body", "content"]
def __init__(self, status_code: int, reason: str, headers: Dict, cookies: Dict, *, resp_body: Dict,
content: bytes):
"""
Args:
"""
self.status_code = status_code
self.reason = reason
self.headers = headers
self.cookies = cookies
self.resp_body = resp_body
self.content = content
def json(self, ):
"""
为了适配
Args:
Returns:
"""
return self.resp_body
class HttpClient(Singleton):
"""
基于requests的同步封装
"""
def __init__(self, app=None, *, timeout: int = 5 * 60, verify_ssl: bool = True, message: Dict = None,
use_zh: bool = True):
"""
基于requests的同步封装
Args:
app: app应用
timeout:request timeout
verify_ssl:verify ssl
message: 提示消息
use_zh: 消息提示是否使用中文,默认中文
"""
self.session = None
self.timeout = timeout
self.verify_ssl = verify_ssl
self.message = message or {}
self.use_zh = use_zh
self.msg_zh = None
if app is not None:
self.init_app(app, timeout=self.timeout, verify_ssl=self.verify_ssl, message=self.message,
use_zh=self.use_zh)
def init_app(self, app, *, timeout: int = None, verify_ssl: bool = None, message: Dict = None,
use_zh: bool = None):
"""
基于aiohttp的异步封装
Args:
app: app应用
timeout:request timeout
verify_ssl:verify ssl
message: 提示消息
use_zh: 消息提示是否使用中文,默认中文
Returns:
"""
self.timeout = timeout or app.config.get("ECLIENTS_HTTP_TIMEOUT", None) or self.timeout
self.verify_ssl = verify_ssl or app.config.get("ECLIENTS_HTTP_VERIFYSSL", None) or self.verify_ssl
message = message or app.config.get("ECLIENTS_HTTP_MESSAGE", None) or self.message
use_zh = use_zh or app.config.get("ECLIENTS_HTTP_MSGZH", None) or self.use_zh
self.message = verify_message(http_msg, message)
self.msg_zh = "msg_zh" if use_zh else "msg_en"
# 初始化session
self.session = sync_requests.Session()
@atexit.register
def close_connection():
"""
释放session连接池所有连接
Args:
Returns:
"""
if self.session:
self.session.close()
def init_session(self, *, timeout: int = None, verify_ssl: bool = None, message: Dict = None,
use_zh: bool = None):
"""
基于aiohttp的异步封装
Args:
timeout:request timeout
verify_ssl:verify ssl
message: 提示消息
use_zh: 消息提示是否使用中文,默认中文
Returns:
"""
self.timeout = timeout or self.timeout
self.verify_ssl = verify_ssl or self.verify_ssl
use_zh = use_zh or self.use_zh
self.message = verify_message(http_msg, message or self.message)
self.msg_zh = "msg_zh" if use_zh else "msg_en"
# 初始化session
self.session = sync_requests.Session()
@atexit.register
def close_connection():
"""
释放session连接池所有连接
Args:
Returns:
"""
if self.session:
self.session.close()
def _request(self, method: str, url: str, *, params: Dict = None, data: Dict = None, json: Dict = None,
headers: Dict = None, verify_ssl: bool = None, timeout: int = None, **kwargs) -> Response:
"""
Args:
method, url, *, params=None, data=None, json=None, headers=None, **kwargs
Returns:
"""
def _get():
"""
Args:
Returns:
"""
return self.session.get(url, params=params, verify=verify_ssl, headers=headers,
timeout=timeout, **kwargs)
def _post():
"""
Args:
Returns:
"""
res = self.session.post(url, params=params, data=data, json=json, headers=headers,
verify=verify_ssl, timeout=timeout, **kwargs)
return res
def _put():
"""
Args:
Returns:
"""
return self.session.put(url, params=params, data=data, json=json, headers=headers, verify=verify_ssl,
timeout=timeout, **kwargs)
def _patch():
"""
Args:
Returns:
"""
return self.session.patch(url, params=params, data=data, json=json, headers=headers, verify=verify_ssl,
timeout=timeout, **kwargs)
def _delete():
"""
Args:
Returns:
"""
return self.session.delete(url, params=params, data=data, json=json, headers=headers, verify=verify_ssl,
timeout=timeout, **kwargs)
get_resp = {"GET": _get, "POST": _post, "PUT": _put, "DELETE": _delete, "PATCH": _patch}
try:
resp = get_resp[method.upper()]()
resp.raise_for_status()
except KeyError as e:
raise ClientError(url=url, message="error method {0}".format(str(e)))
except (ConnectionError, ConnectTimeout) as e:
raise ClientConnectionError(url=url, message=str(e))
except (Timeout, HTTPError) as e:
resp = e.response
try:
resp_data = resp.json()
except (ValueError, TypeError):
resp_data = resp.text
raise ClientResponseError(url=url, status_code=resp.status_code, message=resp.reason, headers=resp.headers,
body=resp_data)
except RequestException as e:
raise ClientError(url=url, message="ClientError: {}".format(vars(e)))
with resp:
try:
resp_json = resp.json()
except (ValueError, TypeError):
context_type = resp.headers.get("Content-Type", "")
if "text" in context_type:
resp_text = resp.text
return Response(resp.status_code, resp.reason, resp.headers, resp.cookies, resp_body=resp_text,
content=b"")
else:
resp_bytes = resp.content
return Response(resp.status_code, resp.reason, resp.headers, resp.cookies, resp_body="",
content=resp_bytes)
else:
return Response(resp.status_code, resp.reason, resp.headers, resp.cookies, resp_body=resp_json,
content=b"")
def request(self, method: str, url: str, *, params: Dict = None, data: Dict = None, json: Dict = None,
headers: Dict = None, verify_ssl: bool = None, timeout: int = None, **kwargs) -> Response:
"""
Args:
Returns:
"""
verify_ssl = self.verify_ssl if verify_ssl is None else verify_ssl
timeout = self.timeout if timeout is None else timeout
return self._request(method, url, params=params, data=data, json=json, headers=headers,
verify_ssl=verify_ssl, timeout=timeout, **kwargs)
def get(self, url: str, *, params: Dict = None, headers: Dict = None, verify_ssl: bool = None,
timeout: int = None, **kwargs) -> Response:
"""
Args:
Returns:
"""
verify_ssl = self.verify_ssl if verify_ssl is None else verify_ssl
timeout = self.timeout if timeout is None else timeout
return self._request("GET", url, params=params, verify_ssl=verify_ssl, headers=headers,
timeout=timeout, **kwargs)
def post(self, url: str, *, params: Dict = None, data: Dict = None, json: Dict = None, headers: Dict = None,
verify_ssl: bool = None, timeout: int = None, **kwargs) -> Response:
"""
Args:
Returns:
"""
verify_ssl = self.verify_ssl if verify_ssl is None else verify_ssl
timeout = self.timeout if timeout is None else timeout
return self._request("POST", url, params=params, data=data, json=json, headers=headers, verify_ssl=verify_ssl,
timeout=timeout, **kwargs)
def put(self, url: str, *, params: Dict = None, data: Dict = None, json: Dict = None, headers: Dict = None,
verify_ssl: bool = None, timeout: int = None, **kwargs) -> Response:
"""
Args:
Returns:
"""
verify_ssl = self.verify_ssl if verify_ssl is None else verify_ssl
timeout = self.timeout if timeout is None else timeout
return self._request("PUT", url, params=params, data=data, json=json, headers=headers, verify_ssl=verify_ssl,
timeout=timeout, **kwargs)
def patch(self, url: str, *, params: Dict = None, data: Dict = None, json: Dict = None, headers: Dict = None,
verify_ssl: bool = None, timeout: int = None, **kwargs) -> Response:
"""
Args:
Returns:
"""
verify_ssl = self.verify_ssl if verify_ssl is None else verify_ssl
timeout = self.timeout if timeout is None else timeout
return self._request("PATCH", url, params=params, data=data, json=json, headers=headers, verify_ssl=verify_ssl,
timeout=timeout, **kwargs)
def delete(self, url: str, *, params: Dict = None, headers: Dict = None, verify_ssl: bool = None,
timeout: int = None, **kwargs) -> Response:
"""
Args:
Returns:
"""
verify_ssl = self.verify_ssl if verify_ssl is None else verify_ssl
timeout = self.timeout if timeout is None else timeout
return self._request("DELETE", url, params=params, verify_ssl=verify_ssl, headers=headers, timeout=timeout,
**kwargs)
def close(self, ):
"""
close
Args:
Returns:
"""
self.session.close()
|
[
"requests.Session",
"eclients.utils.verify_message"
] |
[((2736, 2769), 'eclients.utils.verify_message', 'verify_message', (['http_msg', 'message'], {}), '(http_msg, message)\n', (2750, 2769), False, 'from eclients.utils import verify_message\n'), ((2869, 2892), 'requests.Session', 'sync_requests.Session', ([], {}), '()\n', (2890, 2892), True, 'import requests as sync_requests\n'), ((3637, 3686), 'eclients.utils.verify_message', 'verify_message', (['http_msg', '(message or self.message)'], {}), '(http_msg, message or self.message)\n', (3651, 3686), False, 'from eclients.utils import verify_message\n'), ((3786, 3809), 'requests.Session', 'sync_requests.Session', ([], {}), '()\n', (3807, 3809), True, 'import requests as sync_requests\n')]
|
from apistar import validators
from apistar_mongoengine.types import Type
class PostType(Type):
message = validators.String()
|
[
"apistar.validators.String"
] |
[((112, 131), 'apistar.validators.String', 'validators.String', ([], {}), '()\n', (129, 131), False, 'from apistar import validators\n')]
|
#!/usr/bin/python
#-*- coding: utf-8 -*-
from SpeakerNet import *
from utils import *
from DatasetLoader import loadWAV
import sys, time, os, argparse, socket
import yaml
import numpy
import pdb
import torch
import glob
import zipfile
import datetime
import os
import random
import subprocess
import torch.distributed as dist
import torch.multiprocessing as mp
import numpy as np
import torch.nn.functional as F
from werkzeug.utils import secure_filename
from flask import Flask, request, jsonify
# ## ===== ===== ===== ===== ===== ===== ===== =====
# ## Parse arguments
# ## ===== ===== ===== ===== ===== ===== ===== =====
parser = argparse.ArgumentParser(description = 'Prepare Data');
## Data loader
parser.add_argument('--max_frames', type=int, default=200, help='Input length to the network for training');
parser.add_argument('--eval_frames', type=int, default=400, help='Input length to the network for testing; 0 uses the whole files');
## Training details
parser.add_argument('--trainfunc', type=str, default='softmaxproto', help='Loss function');
## Optimizer
parser.add_argument('--optimizer', type=str, default='adam', help='sgd or adam');
## Loss functions
parser.add_argument('--hard_prob', type=float, default=0.5, help='Hard negative mining probability, otherwise random, only for some loss functions');
parser.add_argument('--hard_rank', type=int, default=10, help='Hard negative mining rank in the batch, only for some loss functions');
parser.add_argument('--margin', type=float, default=0.1, help='Loss margin, only for some loss functions');
parser.add_argument('--scale', type=float, default=30, help='Loss scale, only for some loss functions');
parser.add_argument('--nPerSpeaker', type=int, default=2, help='Number of utterances per speaker per batch, only for metric learning based losses');
parser.add_argument('--nClasses', type=int, default=400, help='Number of speakers in the softmax layer, only for softmax-based losses');
## Load
parser.add_argument('--model_path', type=str, default='model000000500.model', help='Path for model and logs');
## Model definition
parser.add_argument('--n_mels', type=int, default=64, help='Number of mel filterbanks');
parser.add_argument('--log_input', type=bool, default=True, help='Log input features')
parser.add_argument('--model', type=str, default='ResNetSE34V2', help='Name of model definition');
parser.add_argument('--encoder_type', type=str, default='ASP', help='Type of encoder');
parser.add_argument('--nOut', type=int, default=512, help='Embedding size in the last FC layer');
##Server 's params
parser.add_argument('--gpu', dest='gpu', action='store_true', help='Use GPU');
parser.add_argument('--threshold', type=float, default=-1.0831763744354248, help='Threshold');
parser.add_argument('--feats_path', type=str, default='feats.npy', help='Path for feats file');
args = parser.parse_args();
## Load models
if args.gpu == True:
s = SpeakerNet(**vars(args));
s = WrappedModel(s).cuda(0)
else:
s = SpeakerNetCPU(**vars(args));
s = WrappedModel(s).cpu()
## Load model weights
try:
loadParameters(args.model_path, s, args.gpu);
except:
raise Exception('Model path is wrong!')
print('Model %s loaded from previous state!'%args.model_path);
feats = np.load(args.feats_path, allow_pickle=True)[()]
def main_worker(file_path):
data = create_data(file_path, args.eval_frames)
feature_vector = s(data).detach().cpu()
normalized_vector = F.normalize(feature_vector, p=2, dim=1)
max_score = args.threshold
speaker = ''
for key, value in feats.items():
dist = F.pairwise_distance(normalized_vector.unsqueeze(-1), value.unsqueeze(-1).transpose(0,2)).detach().cpu().numpy();
score = -1 * np.mean(dist);
if score >= max_score:
max_score = score
speaker = key.split('/')[-2]
return speaker
app = Flask(__name__)
@app.route('/predict', methods=['POST'])
def predict():
audio_file = request.files['file']
file_name_1 = str(random.randint(0, 100000)) + '.' + secure_filename(audio_file.filename).split('.')[-1]
audio_file.save(file_name_1)
file_name_2 = str(random.randint(0, 100000)) + '.wav'
out = subprocess.call('ffmpeg -y -i %s -ac 1 -vn -acodec pcm_s16le -ar 16000 %s >/dev/null 2>/dev/null' %(file_name_1, file_name_2), shell=True)
if out != 0:
return 'Invalid format!'
speaker = main_worker(file_name_2)
os.remove(file_name_1)
os.remove(file_name_2)
result = {'speaker': speaker}
return jsonify(result)
if __name__ == '__main__':
app.run(host='0.0.0.0', port='8080', debug=False)
|
[
"numpy.load",
"os.remove",
"argparse.ArgumentParser",
"random.randint",
"flask.Flask",
"werkzeug.utils.secure_filename",
"flask.jsonify",
"numpy.mean",
"subprocess.call",
"torch.nn.functional.normalize"
] |
[((638, 689), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Prepare Data"""'}), "(description='Prepare Data')\n", (661, 689), False, 'import sys, time, os, argparse, socket\n'), ((4073, 4088), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (4078, 4088), False, 'from flask import Flask, request, jsonify\n'), ((3455, 3498), 'numpy.load', 'np.load', (['args.feats_path'], {'allow_pickle': '(True)'}), '(args.feats_path, allow_pickle=True)\n', (3462, 3498), True, 'import numpy as np\n'), ((3653, 3692), 'torch.nn.functional.normalize', 'F.normalize', (['feature_vector'], {'p': '(2)', 'dim': '(1)'}), '(feature_vector, p=2, dim=1)\n', (3664, 3692), True, 'import torch.nn.functional as F\n'), ((4396, 4545), 'subprocess.call', 'subprocess.call', (["('ffmpeg -y -i %s -ac 1 -vn -acodec pcm_s16le -ar 16000 %s >/dev/null 2>/dev/null'\n % (file_name_1, file_name_2))"], {'shell': '(True)'}), "(\n 'ffmpeg -y -i %s -ac 1 -vn -acodec pcm_s16le -ar 16000 %s >/dev/null 2>/dev/null'\n % (file_name_1, file_name_2), shell=True)\n", (4411, 4545), False, 'import subprocess\n'), ((4629, 4651), 'os.remove', 'os.remove', (['file_name_1'], {}), '(file_name_1)\n', (4638, 4651), False, 'import os\n'), ((4656, 4678), 'os.remove', 'os.remove', (['file_name_2'], {}), '(file_name_2)\n', (4665, 4678), False, 'import os\n'), ((4725, 4740), 'flask.jsonify', 'jsonify', (['result'], {}), '(result)\n', (4732, 4740), False, 'from flask import Flask, request, jsonify\n'), ((3928, 3941), 'numpy.mean', 'np.mean', (['dist'], {}), '(dist)\n', (3935, 3941), True, 'import numpy as np\n'), ((4350, 4375), 'random.randint', 'random.randint', (['(0)', '(100000)'], {}), '(0, 100000)\n', (4364, 4375), False, 'import random\n'), ((4207, 4232), 'random.randint', 'random.randint', (['(0)', '(100000)'], {}), '(0, 100000)\n', (4221, 4232), False, 'import random\n'), ((4242, 4278), 'werkzeug.utils.secure_filename', 'secure_filename', (['audio_file.filename'], {}), '(audio_file.filename)\n', (4257, 4278), False, 'from werkzeug.utils import secure_filename\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_calc
----------------------------------
Acceptance tests for MVP.
"""
import sys, os
import pytest
import logging
from time import sleep
from assertpy import assert_that
from pytest_bdd import scenario, given, when, then
from concurrent import futures
#SUT
from ska.scripting.domain import Telescope, SubArray
#SUT infrastructure
from tango import DeviceProxy, DevState # type: ignore
## local imports
from resources.test_support.helpers import resource
from resources.test_support.sync_decorators import sync_assign_resources, sync_restart, sync_abort, sync_scan_oet
from resources.test_support.persistance_helping import update_resource_config_file
from resources.test_support.controls import set_telescope_to_standby,set_telescope_to_running,telescope_is_in_standby,take_subarray,restart_subarray, tmc_is_on
DEV_TEST_TOGGLE = os.environ.get('DISABLE_DEV_TESTS')
if DEV_TEST_TOGGLE == "False":
DISABLE_TESTS_UNDER_DEVELOPMENT = False
else:
DISABLE_TESTS_UNDER_DEVELOPMENT = True
LOGGER = logging.getLogger(__name__)
devices_to_log = [
'ska_mid/tm_subarray_node/1',
'mid_csp/elt/subarray_01',
'mid_csp_cbf/sub_elt/subarray_01',
'mid_sdp/elt/subarray_1',
'mid_d0001/elt/master',
'mid_d0002/elt/master',
'mid_d0003/elt/master',
'mid_d0004/elt/master']
non_default_states_to_check = {
'mid_d0001/elt/master' : 'pointingState',
'mid_d0002/elt/master' : 'pointingState',
'mid_d0003/elt/master' : 'pointingState',
'mid_d0004/elt/master' : 'pointingState'}
@pytest.fixture
def result():
return {}
@pytest.mark.select
@pytest.mark.skamid
@pytest.mark.quarantine
# @pytest.mark.skipif(DISABLE_TESTS_UNDER_DEVELOPMENT, reason="disabaled by local env")
# @pytest.mark.skip(reason="bug as repoted by SKB-20")
@scenario("XTP-1106.feature", "BDD test case for Restart functionality")
def test_subarray_restart():
"""RESTART Subarray"""
def assign():
LOGGER.info("Before starting the telescope checking if the TMC is in ON state")
assert(tmc_is_on())
LOGGER.info("Before starting the telescope checking if the telescope is in StandBy.")
assert(telescope_is_in_standby())
LOGGER.info("Telescope is in StandBy.")
LOGGER.info("Invoking Startup Telescope command on the telescope.")
set_telescope_to_running()
LOGGER.info("Telescope is started successfully.")
pilot, sdp_block = take_subarray(1).to_be_composed_out_of(2)
LOGGER.info("Resources are assigned successfully on Subarray.")
return sdp_block
def configure_ready(sdp_block):
LOGGER.info("Invoking configure command on the Subarray.")
take_subarray(1).and_configure_scan_by_file(sdp_block)
LOGGER.info("Configure command is invoked on Subarray.")
LOGGER.info("Subarray is moved to READY, Configure command is successful on Subarray.")
def scanning(fixture):
fixture['scans'] = '{"id":1}'
@sync_scan_oet
def scan():
LOGGER.info("Invoking scan command on Subarray.")
def send_scan(duration):
SubArray(1).scan()
LOGGER.info("Scan is invoked on Subarray 1")
executor = futures.ThreadPoolExecutor(max_workers=1)
LOGGER.info("Getting into executor block")
return executor.submit(send_scan,fixture['scans'])
fixture['future'] = scan()
LOGGER.info("obsState = Scanning of TMC-Subarray")
return fixture
@given("operator has a running telescope with a subarray in state <subarray_obsstate> and Subarray has transitioned into obsState ABORTED")
def set_up_telescope(subarray_obsstate : str):
if subarray_obsstate == 'IDLE':
assign()
LOGGER.info("Abort command can be invoked on Subarray with Subarray obsState as 'IDLE'")
elif subarray_obsstate == 'READY':
sdp_block = assign()
LOGGER.info("Resources are assigned successfully and configuring the subarray now")
configure_ready(sdp_block)
LOGGER.info("Abort command can be invoked on Subarray with Subarray obsState as 'READY'")
elif subarray_obsstate == 'SCANNING':
sdp_block = assign()
LOGGER.info("Resources are assigned successfully and configuring the subarray now")
configure_ready(sdp_block)
LOGGER.info("Subarray is configured and executing a scan on subarray")
scanning(sdp_block)
LOGGER.info("Abort command can be invoked on Subarray with Subarray obsState as 'SCANNING'")
else:
msg = 'obsState {} is not settable with command methods'
raise ValueError(msg.format(subarray_obsstate))
def abort_subarray():
@sync_abort(200)
def abort():
LOGGER.info("Invoking ABORT command.")
SubArray(1).abort()
LOGGER.info("Abort command is invoked on subarray")
abort()
LOGGER.info("Abort is completed on Subarray")
abort_subarray()
@when("I invoke Restart command")
def restart():
@sync_restart(200)
def command_restart():
LOGGER.info("Invoking Restart command on the Subarray.")
SubArray(1).restart()
LOGGER.info("Restart command is invoked on subarray")
command_restart()
LOGGER.info("Subarray is restarted successfully.")
@then("subarray changes its obsState to EMPTY")
def check_empty_state():
assert_that(resource('mid_sdp/elt/subarray_1').get('obsState')).is_equal_to('EMPTY')
assert_that(resource('mid_csp/elt/subarray_01').get('obsState')).is_equal_to('EMPTY')
assert_that(resource('ska_mid/tm_subarray_node/1').get('obsState')).is_equal_to('EMPTY')
def teardown_function(function):
""" teardown any state that was previously setup with a setup_function
call.
"""
if (resource('ska_mid/tm_subarray_node/1').get('State') == "ON"):
if (resource('ska_mid/tm_subarray_node/1').get('obsState') == "IDLE"):
LOGGER.info("tearing down composed subarray (IDLE)")
take_subarray(1).and_release_all_resources()
if (resource('ska_mid/tm_subarray_node/1').get('obsState') == "CONFIGURING"):
LOGGER.warn("Subarray is still in CONFIFURING! Please restart MVP manually to complete tear down")
restart_subarray(1)
raise Exception("Unable to tear down test setup")
if (resource('ska_mid/tm_subarray_node/1').get('obsState') == "READY"):
LOGGER.info("tearing down configured subarray (READY)")
take_subarray(1).and_end_sb_when_ready().and_release_all_resources()
if (resource('ska_mid/tm_subarray_node/1').get('obsState') == "ABORTING"):
LOGGER.warn("Subarray is still in ABORTING! Please restart MVP manually to complete tear down")
restart_subarray(1)
raise Exception("Unable to tear down test setup")
if (resource('ska_mid/tm_subarray_node/1').get('obsState') == "SCANNING"):
LOGGER.warn("Subarray is still in SCANNING! Please restart MVP manually to complete tear down")
restart_subarray(1)
raise Exception("Unable to tear down test setup")
if (resource('ska_mid/tm_subarray_node/1').get('obsState') == "RESTARTING"):
LOGGER.warn("Subarray is still in RESTARTING! Please restart MVP manually to complete tear down")
restart_subarray(1)
raise Exception("Unable to tear down test setup")
if (resource('ska_mid/tm_subarray_node/1').get('obsState') == "EMPTY"):
LOGGER.info("Subarray is in EMPTY state.")
LOGGER.info("Put Telescope back to standby")
set_telescope_to_standby()
LOGGER.info("Telescope is in standby")
|
[
"resources.test_support.controls.set_telescope_to_running",
"resources.test_support.sync_decorators.sync_restart",
"resources.test_support.controls.take_subarray",
"pytest_bdd.then",
"ska.scripting.domain.SubArray",
"pytest_bdd.given",
"resources.test_support.controls.telescope_is_in_standby",
"os.environ.get",
"resources.test_support.helpers.resource",
"resources.test_support.controls.set_telescope_to_standby",
"pytest_bdd.when",
"resources.test_support.controls.tmc_is_on",
"resources.test_support.controls.restart_subarray",
"concurrent.futures.ThreadPoolExecutor",
"pytest_bdd.scenario",
"logging.getLogger",
"resources.test_support.sync_decorators.sync_abort"
] |
[((887, 922), 'os.environ.get', 'os.environ.get', (['"""DISABLE_DEV_TESTS"""'], {}), "('DISABLE_DEV_TESTS')\n", (901, 922), False, 'import sys, os\n'), ((1057, 1084), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1074, 1084), False, 'import logging\n'), ((1822, 1893), 'pytest_bdd.scenario', 'scenario', (['"""XTP-1106.feature"""', '"""BDD test case for Restart functionality"""'], {}), "('XTP-1106.feature', 'BDD test case for Restart functionality')\n", (1830, 1893), False, 'from pytest_bdd import scenario, given, when, then\n'), ((3413, 3561), 'pytest_bdd.given', 'given', (['"""operator has a running telescope with a subarray in state <subarray_obsstate> and Subarray has transitioned into obsState ABORTED"""'], {}), "(\n 'operator has a running telescope with a subarray in state <subarray_obsstate> and Subarray has transitioned into obsState ABORTED'\n )\n", (3418, 3561), False, 'from pytest_bdd import scenario, given, when, then\n'), ((4900, 4932), 'pytest_bdd.when', 'when', (['"""I invoke Restart command"""'], {}), "('I invoke Restart command')\n", (4904, 4932), False, 'from pytest_bdd import scenario, given, when, then\n'), ((5238, 5284), 'pytest_bdd.then', 'then', (['"""subarray changes its obsState to EMPTY"""'], {}), "('subarray changes its obsState to EMPTY')\n", (5242, 5284), False, 'from pytest_bdd import scenario, given, when, then\n'), ((2060, 2071), 'resources.test_support.controls.tmc_is_on', 'tmc_is_on', ([], {}), '()\n', (2069, 2071), False, 'from resources.test_support.controls import set_telescope_to_standby, set_telescope_to_running, telescope_is_in_standby, take_subarray, restart_subarray, tmc_is_on\n'), ((2174, 2199), 'resources.test_support.controls.telescope_is_in_standby', 'telescope_is_in_standby', ([], {}), '()\n', (2197, 2199), False, 'from resources.test_support.controls import set_telescope_to_standby, set_telescope_to_running, telescope_is_in_standby, take_subarray, restart_subarray, tmc_is_on\n'), ((2321, 2347), 'resources.test_support.controls.set_telescope_to_running', 'set_telescope_to_running', ([], {}), '()\n', (2345, 2347), False, 'from resources.test_support.controls import set_telescope_to_standby, set_telescope_to_running, telescope_is_in_standby, take_subarray, restart_subarray, tmc_is_on\n'), ((4957, 4974), 'resources.test_support.sync_decorators.sync_restart', 'sync_restart', (['(200)'], {}), '(200)\n', (4969, 4974), False, 'from resources.test_support.sync_decorators import sync_assign_resources, sync_restart, sync_abort, sync_scan_oet\n'), ((7500, 7526), 'resources.test_support.controls.set_telescope_to_standby', 'set_telescope_to_standby', ([], {}), '()\n', (7524, 7526), False, 'from resources.test_support.controls import set_telescope_to_standby, set_telescope_to_running, telescope_is_in_standby, take_subarray, restart_subarray, tmc_is_on\n'), ((3153, 3194), 'concurrent.futures.ThreadPoolExecutor', 'futures.ThreadPoolExecutor', ([], {'max_workers': '(1)'}), '(max_workers=1)\n', (3179, 3194), False, 'from concurrent import futures\n'), ((4623, 4638), 'resources.test_support.sync_decorators.sync_abort', 'sync_abort', (['(200)'], {}), '(200)\n', (4633, 4638), False, 'from resources.test_support.sync_decorators import sync_assign_resources, sync_restart, sync_abort, sync_scan_oet\n'), ((6186, 6205), 'resources.test_support.controls.restart_subarray', 'restart_subarray', (['(1)'], {}), '(1)\n', (6202, 6205), False, 'from resources.test_support.controls import set_telescope_to_standby, set_telescope_to_running, telescope_is_in_standby, take_subarray, restart_subarray, tmc_is_on\n'), ((6686, 6705), 'resources.test_support.controls.restart_subarray', 'restart_subarray', (['(1)'], {}), '(1)\n', (6702, 6705), False, 'from resources.test_support.controls import set_telescope_to_standby, set_telescope_to_running, telescope_is_in_standby, take_subarray, restart_subarray, tmc_is_on\n'), ((6964, 6983), 'resources.test_support.controls.restart_subarray', 'restart_subarray', (['(1)'], {}), '(1)\n', (6980, 6983), False, 'from resources.test_support.controls import set_telescope_to_standby, set_telescope_to_running, telescope_is_in_standby, take_subarray, restart_subarray, tmc_is_on\n'), ((7241, 7260), 'resources.test_support.controls.restart_subarray', 'restart_subarray', (['(1)'], {}), '(1)\n', (7257, 7260), False, 'from resources.test_support.controls import set_telescope_to_standby, set_telescope_to_running, telescope_is_in_standby, take_subarray, restart_subarray, tmc_is_on\n'), ((2425, 2441), 'resources.test_support.controls.take_subarray', 'take_subarray', (['(1)'], {}), '(1)\n', (2438, 2441), False, 'from resources.test_support.controls import set_telescope_to_standby, set_telescope_to_running, telescope_is_in_standby, take_subarray, restart_subarray, tmc_is_on\n'), ((2657, 2673), 'resources.test_support.controls.take_subarray', 'take_subarray', (['(1)'], {}), '(1)\n', (2670, 2673), False, 'from resources.test_support.controls import set_telescope_to_standby, set_telescope_to_running, telescope_is_in_standby, take_subarray, restart_subarray, tmc_is_on\n'), ((5075, 5086), 'ska.scripting.domain.SubArray', 'SubArray', (['(1)'], {}), '(1)\n', (5083, 5086), False, 'from ska.scripting.domain import Telescope, SubArray\n'), ((5717, 5755), 'resources.test_support.helpers.resource', 'resource', (['"""ska_mid/tm_subarray_node/1"""'], {}), "('ska_mid/tm_subarray_node/1')\n", (5725, 5755), False, 'from resources.test_support.helpers import resource\n'), ((5989, 6027), 'resources.test_support.helpers.resource', 'resource', (['"""ska_mid/tm_subarray_node/1"""'], {}), "('ska_mid/tm_subarray_node/1')\n", (5997, 6027), False, 'from resources.test_support.helpers import resource\n'), ((6278, 6316), 'resources.test_support.helpers.resource', 'resource', (['"""ska_mid/tm_subarray_node/1"""'], {}), "('ska_mid/tm_subarray_node/1')\n", (6286, 6316), False, 'from resources.test_support.helpers import resource\n'), ((6503, 6541), 'resources.test_support.helpers.resource', 'resource', (['"""ska_mid/tm_subarray_node/1"""'], {}), "('ska_mid/tm_subarray_node/1')\n", (6511, 6541), False, 'from resources.test_support.helpers import resource\n'), ((6773, 6811), 'resources.test_support.helpers.resource', 'resource', (['"""ska_mid/tm_subarray_node/1"""'], {}), "('ska_mid/tm_subarray_node/1')\n", (6781, 6811), False, 'from resources.test_support.helpers import resource\n'), ((7054, 7092), 'resources.test_support.helpers.resource', 'resource', (['"""ska_mid/tm_subarray_node/1"""'], {}), "('ska_mid/tm_subarray_node/1')\n", (7062, 7092), False, 'from resources.test_support.helpers import resource\n'), ((7328, 7366), 'resources.test_support.helpers.resource', 'resource', (['"""ska_mid/tm_subarray_node/1"""'], {}), "('ska_mid/tm_subarray_node/1')\n", (7336, 7366), False, 'from resources.test_support.helpers import resource\n'), ((3062, 3073), 'ska.scripting.domain.SubArray', 'SubArray', (['(1)'], {}), '(1)\n', (3070, 3073), False, 'from ska.scripting.domain import Telescope, SubArray\n'), ((4723, 4734), 'ska.scripting.domain.SubArray', 'SubArray', (['(1)'], {}), '(1)\n', (4731, 4734), False, 'from ska.scripting.domain import Telescope, SubArray\n'), ((5791, 5829), 'resources.test_support.helpers.resource', 'resource', (['"""ska_mid/tm_subarray_node/1"""'], {}), "('ska_mid/tm_subarray_node/1')\n", (5799, 5829), False, 'from resources.test_support.helpers import resource\n'), ((5935, 5951), 'resources.test_support.controls.take_subarray', 'take_subarray', (['(1)'], {}), '(1)\n', (5948, 5951), False, 'from resources.test_support.controls import set_telescope_to_standby, set_telescope_to_running, telescope_is_in_standby, take_subarray, restart_subarray, tmc_is_on\n'), ((5326, 5360), 'resources.test_support.helpers.resource', 'resource', (['"""mid_sdp/elt/subarray_1"""'], {}), "('mid_sdp/elt/subarray_1')\n", (5334, 5360), False, 'from resources.test_support.helpers import resource\n'), ((5415, 5450), 'resources.test_support.helpers.resource', 'resource', (['"""mid_csp/elt/subarray_01"""'], {}), "('mid_csp/elt/subarray_01')\n", (5423, 5450), False, 'from resources.test_support.helpers import resource\n'), ((5505, 5543), 'resources.test_support.helpers.resource', 'resource', (['"""ska_mid/tm_subarray_node/1"""'], {}), "('ska_mid/tm_subarray_node/1')\n", (5513, 5543), False, 'from resources.test_support.helpers import resource\n'), ((6426, 6442), 'resources.test_support.controls.take_subarray', 'take_subarray', (['(1)'], {}), '(1)\n', (6439, 6442), False, 'from resources.test_support.controls import set_telescope_to_standby, set_telescope_to_running, telescope_is_in_standby, take_subarray, restart_subarray, tmc_is_on\n')]
|
#!/usr/bin/env python
import cairo
import rsvg
import gtk
class View:
def __init__(self):
self.string = """<svg width="800" height="600"></svg>"""
self.svg = rsvg.Handle(data=self.string)
self.win = gtk.Window()
self.da = gtk.DrawingArea()
self.win.add(self.da)
self.da.set_size_request(800, 600)
self.da.connect("expose-event", self.expose_cairo)
self.win.connect("destroy", self.destroy)
self.win.show_all()
self.win.present()
def expose_cairo(self, win, event):
self.svg = rsvg.Handle(data=self.string)
cr = self.da.window.cairo_create()
self.svg.render_cairo(cr)
def destroy(self, widget, data=None):
gtk.main_quit()
def renderSVG(self, text):
x, y, w, h = self.win.allocation
self.da.window.invalidate_rect((0,0,w,h), False)
self.string = text
|
[
"gtk.Window",
"gtk.main_quit",
"gtk.DrawingArea",
"rsvg.Handle"
] |
[((330, 359), 'rsvg.Handle', 'rsvg.Handle', ([], {'data': 'self.string'}), '(data=self.string)\n', (341, 359), False, 'import rsvg\n'), ((374, 386), 'gtk.Window', 'gtk.Window', ([], {}), '()\n', (384, 386), False, 'import gtk\n'), ((400, 417), 'gtk.DrawingArea', 'gtk.DrawingArea', ([], {}), '()\n', (415, 417), False, 'import gtk\n'), ((677, 706), 'rsvg.Handle', 'rsvg.Handle', ([], {'data': 'self.string'}), '(data=self.string)\n', (688, 706), False, 'import rsvg\n'), ((817, 832), 'gtk.main_quit', 'gtk.main_quit', ([], {}), '()\n', (830, 832), False, 'import gtk\n')]
|
from gpiozero import LightSensor
from time import sleep
pulseCount = 0
def lightPulse():
global pulseCount
pulseCount = pulseCount + 1
print("Pulse ", pulseCount)
ldr = LightSensor(19,queue_len=1)
ldr.when_light = lightPulse
ldr.threshold = 0.1
while True:
#print(ldr.value)
sleep(1)
|
[
"gpiozero.LightSensor",
"time.sleep"
] |
[((184, 212), 'gpiozero.LightSensor', 'LightSensor', (['(19)'], {'queue_len': '(1)'}), '(19, queue_len=1)\n', (195, 212), False, 'from gpiozero import LightSensor\n'), ((299, 307), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (304, 307), False, 'from time import sleep\n')]
|
from django.contrib.auth.decorators import login_required, permission_required
from django.shortcuts import render
from ..models import Contract
@login_required
@permission_required("buybacks.basic_access")
def my_stats(request):
contracts = Contract.objects.filter(
character__user=request.user,
)
context = {
"contracts": contracts,
"mine": True,
}
return render(request, "buybacks/stats.html", context)
@login_required
@permission_required("buybacks.basic_access")
def program_stats(request, program_pk):
contracts = Contract.objects.filter(
program__pk=program_pk,
)
context = {
"contracts": contracts,
"mine": False,
}
return render(request, "buybacks/stats.html", context)
|
[
"django.shortcuts.render",
"django.contrib.auth.decorators.permission_required"
] |
[((165, 209), 'django.contrib.auth.decorators.permission_required', 'permission_required', (['"""buybacks.basic_access"""'], {}), "('buybacks.basic_access')\n", (184, 209), False, 'from django.contrib.auth.decorators import login_required, permission_required\n'), ((474, 518), 'django.contrib.auth.decorators.permission_required', 'permission_required', (['"""buybacks.basic_access"""'], {}), "('buybacks.basic_access')\n", (493, 518), False, 'from django.contrib.auth.decorators import login_required, permission_required\n'), ((407, 454), 'django.shortcuts.render', 'render', (['request', '"""buybacks/stats.html"""', 'context'], {}), "(request, 'buybacks/stats.html', context)\n", (413, 454), False, 'from django.shortcuts import render\n'), ((728, 775), 'django.shortcuts.render', 'render', (['request', '"""buybacks/stats.html"""', 'context'], {}), "(request, 'buybacks/stats.html', context)\n", (734, 775), False, 'from django.shortcuts import render\n')]
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 CERN.
# Copyright (C) 2020 Northwestern University.
#
# Invenio-Records-Resources is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see LICENSE file for more
# details.
"""Service tests."""
import json
import pytest
from invenio_search import current_search, current_search_client
from mock_module.api import Record
@pytest.fixture()
def input_data():
"""Input data (as coming from the view layer)."""
return {
'metadata': {
'title': 'Test'
},
}
def test_simple_flow(app, client, input_data, headers):
"""Test a simple REST API flow."""
idx = 'records-record-v1.0.0'
h = headers
# Create a record
res = client.post('/mocks', headers=h, data=json.dumps(input_data))
assert res.status_code == 201
id_ = res.json['id']
assert res.json['metadata'] == input_data['metadata']
# Read the record
res = client.get(f'/mocks/{id_}', headers=h)
assert res.status_code == 200
assert res.json['metadata'] == input_data['metadata']
# TODO: Should this be part of the service? we don't know the index easily
Record.index.refresh()
# Search it
res = client.get('/mocks', query_string={'q': f'id:{id_}'}, headers=h)
assert res.status_code == 200
assert res.json['hits']['total'] == 1
assert res.json['hits']['hits'][0]['metadata'] == input_data['metadata']
data = res.json['hits']['hits'][0]
data['metadata']['title'] = 'New title'
# Update it
res = client.put(f'/mocks/{id_}', headers=h, data=json.dumps(data))
assert res.status_code == 200
assert res.json['metadata']['title'] == 'New title'
# Delete it
res = client.delete(f'/mocks/{id_}')
assert res.status_code == 204
assert res.get_data(as_text=True) == ''
Record.index.refresh()
# Try to get it again
res = client.get(f'/mocks/{id_}', headers=h)
assert res.status_code == 410
# Try to get search it again
res = client.get('/mocks', query_string={'q': f'id:{id_}'}, headers=h)
assert res.status_code == 200
assert res.json['hits']['total'] == 0
def test_search_empty_query_string(client, input_data, headers):
idx = 'records-record-v1.0.0'
# Create a record
res = client.post('/mocks', headers=headers, data=json.dumps(input_data))
assert res.status_code == 201
# TODO: Should this be part of the service? we don't know the index easily
Record.index.refresh()
# Search it
res = client.get('/mocks', headers=headers)
assert res.status_code == 200
assert res.json['hits']['total'] == 1
assert res.json['hits']['hits'][0]['metadata'] == input_data['metadata']
# Search it
res = client.get('/mocks', query_string={'q': ''}, headers=headers)
assert res.status_code == 200
assert res.json['hits']['total'] == 1
assert res.json['hits']['hits'][0]['metadata'] == input_data['metadata']
|
[
"mock_module.api.Record.index.refresh",
"pytest.fixture",
"json.dumps"
] |
[((416, 432), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (430, 432), False, 'import pytest\n'), ((1192, 1214), 'mock_module.api.Record.index.refresh', 'Record.index.refresh', ([], {}), '()\n', (1212, 1214), False, 'from mock_module.api import Record\n'), ((1863, 1885), 'mock_module.api.Record.index.refresh', 'Record.index.refresh', ([], {}), '()\n', (1883, 1885), False, 'from mock_module.api import Record\n'), ((2501, 2523), 'mock_module.api.Record.index.refresh', 'Record.index.refresh', ([], {}), '()\n', (2521, 2523), False, 'from mock_module.api import Record\n'), ((803, 825), 'json.dumps', 'json.dumps', (['input_data'], {}), '(input_data)\n', (813, 825), False, 'import json\n'), ((1614, 1630), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (1624, 1630), False, 'import json\n'), ((2359, 2381), 'json.dumps', 'json.dumps', (['input_data'], {}), '(input_data)\n', (2369, 2381), False, 'import json\n')]
|
from configs.meta_params import get_db_list
from utils.parser.bib_parser import bib_gen
def export_exec(paper_id):
for item in get_db_list():
if item['id'] == paper_id:
print(bib_gen(item))
break
|
[
"utils.parser.bib_parser.bib_gen",
"configs.meta_params.get_db_list"
] |
[((133, 146), 'configs.meta_params.get_db_list', 'get_db_list', ([], {}), '()\n', (144, 146), False, 'from configs.meta_params import get_db_list\n'), ((201, 214), 'utils.parser.bib_parser.bib_gen', 'bib_gen', (['item'], {}), '(item)\n', (208, 214), False, 'from utils.parser.bib_parser import bib_gen\n')]
|
from functools import singledispatch
import json
import fsspec
from toolz import groupby
from loguru import logger
from typing import Any, List, Union
from pydantic.datetime_parse import datetime_re
from pydantic.validators import parse_datetime
import numpy as np
from ..types import Interval
from ..indexes import Index, InterpolatingIndex, IntervalIndex, MultiIndex
from ..utils import jsonable, singledispatchmethod, hashable_doc, unhashable_doc
from .base import BaseDataQuery, DatasourceInterface
class JsonBaseQuery(BaseDataQuery):
def __init__(self, index, data, field: str, label: Any) -> None:
self.index = index
self.data = data
self.field = field
self.label = label
@property
def labels(self):
return {self.field: self.label}
def filter(self, record: dict):
raise NotImplementedError
def apply_selection(self, records):
return list(filter(self.filter, records))
def execute(self, limit: int = None, skip: int = None, sort=None):
logger.debug("Applying pandas dataframe selection")
if not self.data:
return []
if sort is None:
data = self.data
else:
if isinstance(sort, str):
sort = [sort]
data = [hashable_doc(d) for d in self.data]
data = sorted(data, key=lambda d: tuple(d[s] for s in sort))
data = [unhashable_doc(d) for d in data]
docs = self.apply_selection(data)
if limit is not None:
start = skip * self.index.DOCS_PER_LABEL if skip is not None else 0
limit = start + limit * self.index.DOCS_PER_LABEL
docs = docs[start:limit]
docs = self.index.reduce(docs, self.labels)
docs = from_json(docs)
logger.debug(f"Done. Found {len(docs)} documents.")
return docs
def min(self, fields: Union[str, List[str]]):
if isinstance(fields, str):
fields = [fields]
docs = self.apply_selection(self.data)
results = {}
for field in fields:
values = [d[field] for d in docs]
results[field] = min(values)
results = from_json(results)
if len(fields) == 1:
return results[fields[0]]
return results
def max(self, fields: Union[str, List[str]]):
if isinstance(fields, str):
fields = [fields]
docs = self.apply_selection(self.data)
results = {}
for field in fields:
values = [d[field] for d in docs]
results[field] = max(values)
results = from_json(results)
if len(fields) == 1:
return results[fields[0]]
return results
def unique(self, fields: Union[str, List[str]]):
if isinstance(fields, str):
fields = [fields]
docs = self.apply_selection(self.data)
results = {}
for field in fields:
values = [doc[field] for doc in docs]
values = set([hashable_doc(v) for v in values])
values = [unhashable_doc(v) for v in values]
results[field] = values
results = from_json(results)
if len(fields) == 1:
return results[fields[0]]
return results
def count(self):
docs = self.apply_selection(self.data)
return len(docs)
class JsonSimpleQuery(JsonBaseQuery):
def filter(self, record: dict):
if self.label is None:
return True
if self.field not in record:
raise KeyError(self.field)
label = self.label
if isinstance(label, slice):
if label.step is None:
ge = record[self.field] >= label.start
lt = record[self.field] < label.stop
return ge and lt
else:
label = list(range(label.start, label.stop, label.step))
if isinstance(label, list):
return record[self.field] in label
else:
return record[self.field] == label
class JsonIntervalQuery(JsonBaseQuery):
def filter(self, record: dict):
if self.label is None:
return record
if self.field not in record:
raise KeyError(self.field)
interval = self.label
if isinstance(interval, tuple):
left, right = interval
elif isinstance(interval, dict):
left, right = interval["left"], interval["right"]
elif isinstance(interval, slice):
left, right = interval.start, interval.stop
elif hasattr(interval, "left") and hasattr(interval, "right"):
left, right = interval.left, interval.right
else:
left = right = interval
left, right = to_json(left), to_json(right)
return (record[self.field]["left"] < right) and (
record[self.field]["right"] > left
)
class JsonInterpolationQuery(JsonBaseQuery):
def apply_selection(self, records, limit=1):
if self.label is None:
return records
if not all(self.field in record for record in records):
raise KeyError(self.field)
field_values = np.array([record[self.field] for record in records])
before_mask = field_values <= self.label
before_values = field_values[before_mask]
after_mask = field_values > self.label
after_values = field_values[after_mask]
before_idxs = np.argsort(np.abs(before_values) - self.label)[:limit]
before_records = [records[i] for i in np.flatnonzero(before_mask)]
before_values = [before_records[i] for i in before_idxs]
after_idxs = np.argsort(np.abs(after_values) - self.label)[:limit]
after_records = [records[i] for i in np.flatnonzero(after_mask)]
after_values = [after_records[i] for i in after_idxs]
return before_values + after_values
class JsonMultiQuery(JsonBaseQuery):
def __init__(self, index, data, queries: List[JsonBaseQuery]) -> None:
self.index = index
self.data = data
self.queries = queries
@property
def labels(self):
return {query.field: query.label for query in self.queries}
def apply_selection(self, records):
if len(self.queries) == 1:
return self.queries[0].apply_selection(records)
for query in self.queries:
if isinstance(query, JsonInterpolationQuery):
selections = []
others = [q.field for q in self.queries if q is not query]
if not others:
records = query.apply_selection(records)
continue
for _, docs in groupby(others, records):
selection = query.apply_selection(docs).reset_index()
selections.extend(selection)
if selections:
records = selections
else:
records = []
else:
records = query.apply_selection(records)
return records
@DatasourceInterface.register_interface(list)
class JsonInterface(DatasourceInterface):
@classmethod
def from_url(cls, url: str, jsonpath="", **kwargs):
if url.endswith(".json"):
with fsspec.open(url, **kwargs) as f:
data = json.load(f)
for p in jsonpath.split("."):
data = data[p] if p else data
if not isinstance(data, list):
raise ValueError("JSON file must contain a list of documents")
return cls(data)
raise NotImplementedError
@singledispatchmethod
def compile_query(self, index, label):
raise NotImplementedError(
f"{self.__class__.__name__} does not support {type(index)} indexes."
)
@compile_query.register(Index)
@compile_query.register(str)
def simple_query(self, index, label):
if isinstance(index, str):
index, name = Index(), index
index.name = name
label = to_json(label)
return JsonSimpleQuery(index, self.source, index.name, label)
@compile_query.register(IntervalIndex)
def interval_query(self, index, label):
label = to_json(label)
return JsonIntervalQuery(index, self.source, index.name, label)
@compile_query.register(InterpolatingIndex)
def interpolating_query(self, index, label):
label = to_json(label)
return JsonInterpolationQuery(index, self.source, index.name, label)
@compile_query.register(list)
@compile_query.register(tuple)
@compile_query.register(MultiIndex)
def multi_query(self, index, labels):
if not isinstance(index, MultiIndex):
index = MultiIndex(*index)
queries = [self.compile_query(idx, labels[idx.name]) for idx in index.indexes]
return JsonMultiQuery(index, self.source, queries)
def _find(self, doc):
for i, d in enumerate(self.source):
if doc.same_index(doc.__class__(**d)):
return i
else:
raise KeyError(doc.index_labels)
def insert(self, doc):
doc = to_json(doc.dict())
self.source.append(doc)
def update(self, doc):
for i, d in enumerate(self.source):
if doc.same_index(doc.__class__(**d)):
self.source[i] = to_json(doc.dict())
break
else:
from rframe.schema import UpdateError
raise UpdateError(f"No document with index {doc.index} found.")
def delete(self, doc):
del self.source[self._find(doc)]
def to_json(obj):
return jsonable(obj)
@singledispatch
def from_json(obj):
return obj
@from_json.register(str)
def from_json_str(obj):
match = datetime_re.match(obj) # type: ignore
if match is None:
return obj
return parse_datetime(obj)
@from_json.register(list)
def from_json_list(obj):
return [from_json(v) for v in obj]
@from_json.register(tuple)
def from_json_tuple(obj):
return tuple(from_json(v) for v in obj)
@from_json.register(dict)
def from_json_dict(obj):
if len(obj) == 2 and "left" in obj and "right" in obj:
left, right = from_json((obj["left"], obj["right"]))
return Interval[left, right]
return {k: from_json(v) for k, v in obj.items()}
|
[
"pydantic.datetime_parse.datetime_re.match",
"loguru.logger.debug",
"json.load",
"numpy.abs",
"numpy.flatnonzero",
"toolz.groupby",
"numpy.array",
"rframe.schema.UpdateError",
"pydantic.validators.parse_datetime",
"fsspec.open"
] |
[((9844, 9866), 'pydantic.datetime_parse.datetime_re.match', 'datetime_re.match', (['obj'], {}), '(obj)\n', (9861, 9866), False, 'from pydantic.datetime_parse import datetime_re\n'), ((9935, 9954), 'pydantic.validators.parse_datetime', 'parse_datetime', (['obj'], {}), '(obj)\n', (9949, 9954), False, 'from pydantic.validators import parse_datetime\n'), ((1039, 1090), 'loguru.logger.debug', 'logger.debug', (['"""Applying pandas dataframe selection"""'], {}), "('Applying pandas dataframe selection')\n", (1051, 1090), False, 'from loguru import logger\n'), ((5214, 5266), 'numpy.array', 'np.array', (['[record[self.field] for record in records]'], {}), '([record[self.field] for record in records])\n', (5222, 5266), True, 'import numpy as np\n'), ((9556, 9613), 'rframe.schema.UpdateError', 'UpdateError', (['f"""No document with index {doc.index} found."""'], {}), "(f'No document with index {doc.index} found.')\n", (9567, 9613), False, 'from rframe.schema import UpdateError\n'), ((5586, 5613), 'numpy.flatnonzero', 'np.flatnonzero', (['before_mask'], {}), '(before_mask)\n', (5600, 5613), True, 'import numpy as np\n'), ((5801, 5827), 'numpy.flatnonzero', 'np.flatnonzero', (['after_mask'], {}), '(after_mask)\n', (5815, 5827), True, 'import numpy as np\n'), ((6727, 6751), 'toolz.groupby', 'groupby', (['others', 'records'], {}), '(others, records)\n', (6734, 6751), False, 'from toolz import groupby\n'), ((7317, 7343), 'fsspec.open', 'fsspec.open', (['url'], {}), '(url, **kwargs)\n', (7328, 7343), False, 'import fsspec\n'), ((7373, 7385), 'json.load', 'json.load', (['f'], {}), '(f)\n', (7382, 7385), False, 'import json\n'), ((5496, 5517), 'numpy.abs', 'np.abs', (['before_values'], {}), '(before_values)\n', (5502, 5517), True, 'import numpy as np\n'), ((5713, 5733), 'numpy.abs', 'np.abs', (['after_values'], {}), '(after_values)\n', (5719, 5733), True, 'import numpy as np\n')]
|
#Follow GameText array and auto assign names and enums
#@author
#@category StarFox
#@keybinding
#@menupath
#@toolbar
import jarray
from array import array
import re
listing = currentProgram.getListing()
AF = currentProgram.getAddressFactory()
DT = currentProgram.getDataTypeManager()
mem = currentProgram.getMemory()
reEnumName = re.compile(r'[^a-zA-Z0-9_]+')
def addrToInt(addr):
return int(str(addr), 16)
def intToAddr(addr):
return AF.getAddress("0x%08X" % addr)
def createEnum(name, values, size=None):
if name is None: name = "autoEnum"
if size is None:
size = 1
if len(values) > 0xFFFF: size = 4
elif len(values) > 0xFF: size = 2
enum = ghidra.program.model.data.EnumDataType(name, size)
for name, val in values.items():
name = reEnumName.sub('', name)
while True:
try:
enum.add(name, val)
break
except java.lang.IllegalArgumentException:
name = "%s_%X" % (name, val)
DT.addDataType(enum, ghidra.program.model.data.DataTypeConflictHandler.REPLACE_EMPTY_STRUCTS_OR_RENAME_AND_ADD_HANDLER)
return enum
def readAddr(addr):
arr = jarray.zeros(4, "b")
mem.getBytes(addr, arr)
v = (((arr[0] & 0xFF) << 24) |
((arr[1] & 0xFF) << 16) |
((arr[2] & 0xFF) << 8) |
(arr[3] & 0xFF))
return intToAddr(v)
def readString(addr):
if type(addr) is int:
addr = intToAddr(addr)
data = []
while True:
try:
b = mem.getByte(addr)
except ghidra.program.model.mem.MemoryAccessException:
printf("Error: can't read string from address 0x%X\n", addrToInt(addr))
b = 0
if b == 0: break
elif b < 0x7F:
data.append(b & 0xFF)
addr = addr.add(1)
a = array('B', data)
return a.tostring()#.decode('shift-jis')
data = listing.getDataAt(currentAddress)
struc = data.getComponent(0).dataType
sLen = struc.getLength()
numTexts = data.length / sLen
texts = {}
for i in range(numTexts):
entry = data.getComponent(i)
id = entry.getComponent(0).value.value
numPhrases = entry.getComponent(1).value.value
#language = entry.getComponent(5).value.value
phrases = entry.getComponent(6)
strs = []
for j in range(numPhrases):
res = readString(readAddr(phrases.value.add(j*4)))
strs.append(res)
text = '_'.join(filter(lambda s: s != "" and not s.isspace(), strs))
label = text.replace(' ', '_')
# add a comment
entry.setComment(entry.EOL_COMMENT,
"[%04X] %s" % (id, '\n'.join(strs)))
# add a label
try:
createLabel(phrases.value, "GameText%04X_%s" % (id, label, False))
except:
pass # probably invalid characters
texts["%04X_%s" % (id, label)] = id
printf("%04X: %s\n", id, text)
createEnum("GameTextId", texts)
createEnum("GameTextId32", texts, 4)
|
[
"jarray.zeros",
"array.array",
"re.compile"
] |
[((336, 364), 're.compile', 're.compile', (['"""[^a-zA-Z0-9_]+"""'], {}), "('[^a-zA-Z0-9_]+')\n", (346, 364), False, 'import re\n'), ((1079, 1099), 'jarray.zeros', 'jarray.zeros', (['(4)', '"""b"""'], {}), "(4, 'b')\n", (1091, 1099), False, 'import jarray\n'), ((1609, 1625), 'array.array', 'array', (['"""B"""', 'data'], {}), "('B', data)\n", (1614, 1625), False, 'from array import array\n')]
|
from rest_framework.generics import ListCreateAPIView, UpdateAPIView
from rest_framework.permissions import IsAuthenticated
from ..models import Profile, User
from ..serializers import ProfileSerializer
from rest_framework.response import Response
from django.shortcuts import get_object_or_404
class ListCreate(ListCreateAPIView):
serializer_class = ProfileSerializer
permission_classes = [IsAuthenticated]
def get(self, request, *args, **kwargs):
user = request.user
profile = Profile.objects.get(user=user)
serializer = self.serializer_class(profile, read_only=True)
return Response(serializer.data)
def perform_create(self, serializer):
user = self.request.user
serializer.save(user=user)
class Update(UpdateAPIView):
queryset = Profile.objects.all()
serializer_class = ProfileSerializer
permission_classes = [IsAuthenticated]
def get_object(self):
queryset = self.queryset
user_id = self.kwargs['user_pk']
user = get_object_or_404(User, pk=user_id)
obj = get_object_or_404(Profile, user=user)
self.check_object_permissions(self.request, obj)
return obj
|
[
"django.shortcuts.get_object_or_404",
"rest_framework.response.Response"
] |
[((624, 649), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (632, 649), False, 'from rest_framework.response import Response\n'), ((1029, 1064), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['User'], {'pk': 'user_id'}), '(User, pk=user_id)\n', (1046, 1064), False, 'from django.shortcuts import get_object_or_404\n'), ((1079, 1116), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Profile'], {'user': 'user'}), '(Profile, user=user)\n', (1096, 1116), False, 'from django.shortcuts import get_object_or_404\n')]
|
import os
from io import TextIOWrapper
from typing import List
import protogen.util as util
from protogen.compiler import Compiler
from protogen.compiler import tab as tab
from protogen.library.python.std import ACCEPTED_TYPES, PYTHON_TYPES
from protogen.util import PGFile, PyClass
class PythonCompiler(Compiler):
def __init__(self, inFiles: List[str], outDir: str, verbose: bool = False):
super().__init__(inFiles, outDir, verbose)
def compile(self):
import shutil
shutil.copyfile(os.path.join(os.path.dirname(__file__),
'message.py'), self.outDir+'/message.py')
for item in self.files:
print('Compiling {} into {}/{}_proto.py'
''.format(item.filename, self.outDir, item.header))
file = open(self.outDir + '/' + item.header + '_proto.py', 'w')
self.generateCode(out=file, file=item)
file.write(os.linesep)
file.close()
def printClass(self, out: TextIOWrapper, file: PGFile, pyClass: PyClass,
indent: int, root: bool):
if root:
out.write(f"\nclass {pyClass.name}(Serializable, Printable):\n")
else:
out.write(
f"\n{tab*indent}class {pyClass.name}(Serializable, Printable):\n")
out.write(
f"\n{tab*(indent+1)}def __init__(self, data: dict = None):\n")
self.printAttributes(out, file, pyClass, indent+1)
out.write(f"\n{tab*(indent+2)}if data is not None:\n")
for item in file.declarations:
if util.inferParentClass(item) == pyClass.fqname:
short = util.inferShortName(item)
v_type, required = file.declarations[item]
if v_type in ACCEPTED_TYPES:
out.write(
f"{tab*(indent+3)}self.data['{short}'][0] = data['{short}']\n")
# local, nested class (needs 'self')
elif v_type in pyClass.gatherSubclasses('name'):
out.write(
f"{tab*(indent+3)}self.data['{short}'][0] = self.{v_type}(data['{short}'])\n")
# local, non-nested class (doesn't need 'self')
else:
out.write(
f"{tab*(indent+3)}self.data['{short}'][0] = {v_type}(data['{short}'])\n")
for item in pyClass.subclasses:
self.printClass(out, file, item, indent+1, False)
self.printMethods(out, file, pyClass, indent+1)
out.write(f"{tab*indent}# End Class {pyClass.name}\n")
def printAttributes(self, out: TextIOWrapper, file: PGFile, pyClass: PyClass, indent: int):
out.write(f'{tab*(indent+1)}self.data = {{\n')
for item in file.declarations:
if util.inferParentClass(item) == pyClass.fqname:
v_type, required = file.declarations[item]
short = util.inferShortName(item)
# primitive data type
if v_type == 'list':
out.write(
f'{tab*(indent+2)}\'{short}\': [[], {required}, False],\n')
elif v_type == 'map':
out.write(
f'{tab*(indent+2)}\'{short}\': [{{}}, {required}, False],\n')
elif v_type in ACCEPTED_TYPES:
out.write(
f'{tab*(indent+2)}\'{short}\': [None, {required}, False],\n')
# local, nested class (needs 'self')
elif v_type in pyClass.gatherSubclasses('name'):
out.write(
f'{tab*(indent+2)}\'{short}\': [self.{v_type}(), {required}, True],\n')
# local, non-nested class (doesn't need 'self')
else:
out.write(
f'{tab*(indent+2)}\'{short}\': [{v_type}(), {required}, True],\n')
out.write(f'{tab*(indent+1)}}}\n')
def printMethods(self, out: TextIOWrapper, file: PGFile, pyClass: PyClass, indent: int):
for item in file.declarations:
if util.inferParentClass(item) == pyClass.fqname:
v_type, req = file.declarations[item]
short = util.inferShortName(item)
# Get methods
if v_type in ACCEPTED_TYPES:
out.write(
f'\n{tab*indent}def get_{short}(self) -> {PYTHON_TYPES[v_type]}:\n')
else:
out.write(
f'\n{tab*indent}def get_{short}(self) -> {v_type}:\n')
out.write(
f'{tab*(indent+1)}return self.data[\'{short}\'][0]\n')
# Set methods
if v_type in PYTHON_TYPES:
out.write(
f'\n{tab*indent}def set_{short}(self, {short}: {PYTHON_TYPES[v_type]}) -> \'{pyClass.name}\':\n'
f'{tab*(indent+1)}self._assertType("{short}", {short}, {PYTHON_TYPES[v_type]}, "{v_type}")\n')
else:
out.write(
f'\n{tab*indent}def set_{short}(self, {short}: {v_type}) -> \'{pyClass.name}\':\n')
out.write(
f'{tab*(indent+1)}self.data[\'{short}\'][0] = {short}\n'
f'{tab*(indent+1)}return self\n')
def printFactory(self, out: TextIOWrapper, file: PGFile):
outString = (
"\n\nclass {}Factory(object):\n"
" @staticmethod\n"
" def deserialize(data: bytes):\n"
" data = Serializable.deserialize(data)\n"
" if len(data) > 1:\n"
" raise AttributeError('This is likely not a Protogen packet.')\n"
"\n"
" packetType = None\n"
" for item in data:\n"
" packetType = item[item.rfind('.')+1:]\n"
)
out.write(outString.format(file.header))
for item in file.classes:
if item.parent is None: # root-level class
out.write(f'{tab*3}if packetType == \'{item.name}\':\n'
f'{tab*4}return {item.name}(data[item])\n')
out.write(
" else:\n"
" raise AttributeError('Respective class not found.')\n")
def generateCode(self, out: TextIOWrapper, file: PGFile):
# out.write("from protogen.library.python.message import Serializable\n"),
# out.write("from protogen.library.python.message import Printable\n\n")
out.write("from .message import Printable, Serializable")
for item in file.classes:
if item.parent is None:
self.printClass(out, file, item, 0, True)
self.printFactory(out, file)
|
[
"protogen.util.inferParentClass",
"os.path.dirname",
"protogen.util.inferShortName"
] |
[((534, 559), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (549, 559), False, 'import os\n'), ((1590, 1617), 'protogen.util.inferParentClass', 'util.inferParentClass', (['item'], {}), '(item)\n', (1611, 1617), True, 'import protogen.util as util\n'), ((1661, 1686), 'protogen.util.inferShortName', 'util.inferShortName', (['item'], {}), '(item)\n', (1680, 1686), True, 'import protogen.util as util\n'), ((2806, 2833), 'protogen.util.inferParentClass', 'util.inferParentClass', (['item'], {}), '(item)\n', (2827, 2833), True, 'import protogen.util as util\n'), ((2936, 2961), 'protogen.util.inferShortName', 'util.inferShortName', (['item'], {}), '(item)\n', (2955, 2961), True, 'import protogen.util as util\n'), ((4116, 4143), 'protogen.util.inferParentClass', 'util.inferParentClass', (['item'], {}), '(item)\n', (4137, 4143), True, 'import protogen.util as util\n'), ((4241, 4266), 'protogen.util.inferShortName', 'util.inferShortName', (['item'], {}), '(item)\n', (4260, 4266), True, 'import protogen.util as util\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.10 on 2016-10-13 20:05
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('financialaid', '0016_load_country_income_thresholds'),
]
operations = [
migrations.AlterField(
model_name='countryincomethreshold',
name='country_code',
field=models.CharField(max_length=2, unique=True),
),
]
|
[
"django.db.models.CharField"
] |
[((440, 483), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(2)', 'unique': '(True)'}), '(max_length=2, unique=True)\n', (456, 483), False, 'from django.db import migrations, models\n')]
|
import pytest
import knot_injector
class TestInjector(object):
@pytest.fixture()
def provider(self):
def test_provider(container=None):
return {'name': 'test'}
return test_provider
@pytest.fixture()
def container(self):
return knot_injector.Container()
def test_adding_service(self, container, provider):
container.service()(provider)
assert container.provide('test_provider')() == {'name': 'test'}
def test_adding_factory(self, container, provider):
container.factory()(provider)
assert container.provide('test_provider')() == {'name': 'test'}
def test_adding_non_unique_provider(self, container, provider):
container.service()(provider)
with pytest.raises(AttributeError) as ex:
container.service()(provider)
assert str(ex) == 'Provider name must be unique'
def test_non_callable_provider(self, container):
import random
container.factory(name='test_prov')(random)
assert container('test_prov') == random
def test_injection(self, container, provider):
import random
container.service(name='test_provider')(provider)
container.factory(name='rnd')(random)
@container.inject
def test_func(test_provider, rnd, msg, **kwargs):
return test_provider, rnd, msg, kwargs
res = test_func(msg="don't obey", answer=42)
assert res == (
{'name': 'test'},
random,
"don't obey",
{'answer': 42},
)
|
[
"pytest.raises",
"pytest.fixture",
"knot_injector.Container"
] |
[((71, 87), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (85, 87), False, 'import pytest\n'), ((226, 242), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (240, 242), False, 'import pytest\n'), ((283, 308), 'knot_injector.Container', 'knot_injector.Container', ([], {}), '()\n', (306, 308), False, 'import knot_injector\n'), ((763, 792), 'pytest.raises', 'pytest.raises', (['AttributeError'], {}), '(AttributeError)\n', (776, 792), False, 'import pytest\n')]
|
from bisect import bisect_left,bisect
def binsearchright(a,k):
c = bisect(a, k)
return c
def binsearchleft(a,k):
b = bisect_left(a,k)
return b
a = [1,1,2,2,2,4,4,4,4,4,4,8,8]
k = int(8)
res = binsearchleft(a, k)
res1 = binsearchright(a,k)
print("{} is present {} times in the array".format(k,abs(res-res1)))
|
[
"bisect.bisect",
"bisect.bisect_left"
] |
[((72, 84), 'bisect.bisect', 'bisect', (['a', 'k'], {}), '(a, k)\n', (78, 84), False, 'from bisect import bisect_left, bisect\n'), ((131, 148), 'bisect.bisect_left', 'bisect_left', (['a', 'k'], {}), '(a, k)\n', (142, 148), False, 'from bisect import bisect_left, bisect\n')]
|
from django.conf.urls import url
from django.urls import path, include
from apps.users.api.views import CustomConfirmEmailView
from django.urls import path, include
from . import views as acc_views
urlpatterns = [
path("", include('rest_auth.urls')),
# The django-rest-passwordreset urls to request a token and confirm pw-reset
path('reset-password/', include('django_rest_passwordreset.urls', namespace='password_reset')),
# overrides register with custom view
# must be in the front of rest_auth.registration.urls
# RES: https://github.com/Tivix/django-rest-auth/issues/292
# RES: https://gist.github.com/iMerica/a6a7efd80d49d6de82c7928140676957
url(r'^register/account-confirm-email/(?P<key>[-:\w]+)/$', CustomConfirmEmailView.as_view(),
name='account_confirm_email'),
path("register/", include('rest_auth.registration.urls')),
# RES PASSWROD RESET : https://stackoverflow.com/questions/53945056/django-rest-auth-password-reset
# url(r'^', include('django.contrib.auth.urls')),
]
|
[
"apps.users.api.views.CustomConfirmEmailView.as_view",
"django.urls.include"
] |
[((230, 255), 'django.urls.include', 'include', (['"""rest_auth.urls"""'], {}), "('rest_auth.urls')\n", (237, 255), False, 'from django.urls import path, include\n'), ((368, 437), 'django.urls.include', 'include', (['"""django_rest_passwordreset.urls"""'], {'namespace': '"""password_reset"""'}), "('django_rest_passwordreset.urls', namespace='password_reset')\n", (375, 437), False, 'from django.urls import path, include\n'), ((744, 776), 'apps.users.api.views.CustomConfirmEmailView.as_view', 'CustomConfirmEmailView.as_view', ([], {}), '()\n', (774, 776), False, 'from apps.users.api.views import CustomConfirmEmailView\n'), ((839, 877), 'django.urls.include', 'include', (['"""rest_auth.registration.urls"""'], {}), "('rest_auth.registration.urls')\n", (846, 877), False, 'from django.urls import path, include\n')]
|
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
dataset = pd.read_csv('Market_Basket_Optimisation.csv', header=None)
transactions = []
for i in range(0,7501):
transactions.append([str(dataset.values[i,j]) for j in range(0,20)])
from apyori import apriori
rules = apriori(transactions,min_support = 0.003,min_confidence = 0.2,min_lift = 3,min_length = 2)
results = list(rules)
|
[
"pandas.read_csv",
"apyori.apriori"
] |
[((82, 140), 'pandas.read_csv', 'pd.read_csv', (['"""Market_Basket_Optimisation.csv"""'], {'header': 'None'}), "('Market_Basket_Optimisation.csv', header=None)\n", (93, 140), True, 'import pandas as pd\n'), ((294, 384), 'apyori.apriori', 'apriori', (['transactions'], {'min_support': '(0.003)', 'min_confidence': '(0.2)', 'min_lift': '(3)', 'min_length': '(2)'}), '(transactions, min_support=0.003, min_confidence=0.2, min_lift=3,\n min_length=2)\n', (301, 384), False, 'from apyori import apriori\n')]
|
import mysql.connector
from mysql.connector import errorcode
import time
import calendar
import datetime
class MysqlConnection:
def __init__(self):
self.mysql_connection()
self.cnx
self.cursor
def mysql_connection(self):
self.cnx = mysql.connector.connect(user='root', password='<PASSWORD>',
host='127.0.0.1',
database='secure_banking_system')
self.cursor = self.cnx.cursor(buffered=True)
def check_username(self, username):
self.cursor.execute(
"SELECT * FROM users WHERE username = %s",
(username,)
)
results = self.cursor.fetchall()
row_count = self.cursor.rowcount
if row_count == 0:
return 1
else:
return 0
def insert_into_table(self, username, password_hash, salt, confidentiality_level, integrity_level,
number_of_attempts, is_block):
self.cursor.execute(
'INSERT INTO users(username, password_hash, salt, confidentiality_level, integrity_level, number_of_attempts, is_block) VALUES(\'%s\',\'%s\',\'%s\',1,1,0,0);' % (
username, password_hash, salt))
self.cnx.commit()
def fetch_hash_and_salt(self, username):
self.cursor.execute(
"select password_hash, salt from users where username = %s",
(username,)
)
results = self.cursor.fetchall()
return results
def fetch_block_information(self, username):
self.cursor.execute(
"select number_of_attempts, is_block from users where username = %s",
(username,)
)
results = self.cursor.fetchall()
return results
def increase_number_of_attempts_and_is_block(self, username):
self.cursor.execute(
"select number_of_attempts, is_block from users where username = %s",
(username,)
)
results = self.cursor.fetchall()
for i in results:
result = i
number_of_attempts, is_block = result
if number_of_attempts == 2:
self.cursor.execute(
'update users set number_of_attempts = number_of_attempts +1, is_block = is_block +1 where username= \'%s\';' % (
username,))
self.cnx.commit()
else:
self.cursor.execute(
'update users set number_of_attempts = number_of_attempts +1 where username= \'%s\';' % (username,))
self.cnx.commit()
def reset_number_of_attempts_and_is_block(self, username):
self.cursor.execute('update users set number_of_attempts = 0 where username= \'%s\';' % (username,))
self.cnx.commit()
self.cursor.execute('update users set is_block = 0 where username= \'%s\';' % (username,))
self.cnx.commit()
def close_connection(self):
self.cursor.close()
self.cnx.close()
def create_new_account(self, username, account_type, amount, conf_label, integrity_label):
self.cursor.execute("select ID from users where username = %s", (username,))
ids = self.cursor.fetchone()
user_id = ids[0]
self.cursor.execute("select account_no from accounts where owner_id = %s", (user_id,))
acids = self.cursor.fetchone()
if acids == None:
self.cursor.execute(
'INSERT INTO accounts(owner_id, account_type_id, amount, confidentiality_level, integrity_level) VALUES(\'%s\',\'%s\',\'%s\',\'%s\',\'%s\');' % (
user_id, account_type, amount, conf_label, integrity_label,))
self.cnx.commit()
self.cursor.execute(
"select account_no from accounts where owner_id = %s and account_type_id = %s and amount = %s and confidentiality_level = %s and integrity_level = %s",
(user_id, account_type, amount, conf_label, integrity_label,))
nos = self.cursor.fetchone()
account_no = nos[0]
response = f"Account Created Successfully. Your Account Number is: {account_no}"
return response
else:
acc_id = acids[0]
response = f"You have already created account {acc_id}."
return response
def add_join_request(self, username, account_no):
self.cursor.execute("select ID from users where username = %s", (username,))
ids = self.cursor.fetchone()
user_id = ids[0]
self.cursor.execute('select accept_status from account_user where account_no = %s and user_id = %s',
(account_no, user_id))
prev = self.cursor.fetchall()
response = ''
if len(prev) != 0:
if prev[0] == 1:
response = f"You Have Already Joint This Account."
else:
response = f"You Have Already Requested to Join This Account."
else:
self.cursor.execute("select amount from accounts where account_no = %s",(account_no,))
acc = self.cursor.fetchone()
if acc != None:
self.cursor.execute(
'Insert into account_user(account_no, user_id) VALUES (\'%s\',\'%s\');' % (account_no, user_id,))
self.cnx.commit()
response = f"Join Request Sent to Account Owner."
else:
response = f"Account Not Found"
return response
def accept_join_request(self, owner, username, conf_label, integrity_label):
self.cursor.execute("select ID from users where username = %s", (owner,))
oids = self.cursor.fetchone()
owner_id = oids[0]
self.cursor.execute("select ID from users where username = %s", (username,))
uids = self.cursor.fetchone()
if uids != None:
user_id = uids[0]
self.cursor.execute("select account_no from accounts where owner_id = %s", (owner_id,))
nos = self.cursor.fetchone()
if nos != None:
account_no = nos[0]
self.cursor.execute("select * from account_user where account_no = %s and user_id = %s", (account_no ,user_id,))
exist_rq = self.cursor.fetchone()
if exist_rq != None:
self.cursor.execute(
'update account_user set accept_status = 1, confidentiality_level = %s, integrity_level = %s where account_no = %s and user_id = %s',
(conf_label, integrity_label, account_no, user_id))
self.cnx.commit()
response = f"User \033[1m{username}\033[0m Joint to Account \033[1m{account_no}\033[0m. "
else:
response = f"Account Not Found. This user didn't send any Join rq."
else:
response = f"Account Not Found. This user didn't send any Join rq."
else:
response = f"Account Not Found"
return response
def show_list_of_account(self, username):
self.cursor.execute("select ID from users where username = %s", (username,))
uids = self.cursor.fetchone()
user_id = uids[0]
self.cursor.execute("select account_no from accounts where owner_id = %s", (user_id,))
nos = self.cursor.fetchone()
if nos!= None:
account_no = nos[0]
self.cursor.execute("select account_no from account_user where user_id = %s and accept_status = 1", (user_id,))
joints = self.cursor.fetchall()
return account_no, joints
else:
return "No Accounts Found.",''
def account_info(self, username, account_no):
self.cursor.execute("select amount from accounts where account_no = %s",(account_no,))
accexists = self.cursor.fetchone()
if accexists != None:
self.cursor.execute("select ID from users where username = %s", (username,))
uids = self.cursor.fetchone()
user_id = uids[0]
query1 = """select users.username,accounts.DateCreated,accounts.amount,account_type.title
from accounts inner join users on accounts.owner_id = users.ID
inner join account_type on account_type.ID = accounts.account_type_id
where accounts.account_no = %s"""
self.cursor.execute(query1, (account_no,))
account_info = self.cursor.fetchone()
# account_info=''
# if acci!= None:
# account_info = acci
query2 = """select users.username
from account_user inner join users on account_user.user_id = users.ID
where account_user.account_no = %s and account_user.accept_status = 1"""
self.cursor.execute(query2, (account_no,))
owners = self.cursor.fetchall()
# owners =''
# if len(os)>0:
# owners = os
query3 = """select *
from transactions
where from_account = %s order by transaction_date DESC limit 5"""
self.cursor.execute(query3, (account_no,))
last5_deposits = self.cursor.fetchall()
# last5_deposits = ''
# if len(last5)>0:
# last5_deposits = last5
query4 = """select *
from transactions
where to_account = %s order by transaction_date DESC limit 5"""
self.cursor.execute(query4, (account_no,))
last5_withdraw = self.cursor.fetchall()
# last5_withdraw=''
# if len(last5w)>0:
# last5_withdraw = last5w
return account_info, owners, last5_deposits, last5_withdraw
else:
print("AAGGGGGGAAGGGGGGAAGGGGGGAAGGGGGGAAGGGGGGAAGGGGGG -> 3")
return 'Account Not Found', '', '', ''
def deposit_to_account(self, owner, to_account, amount):
response = ''
self.cursor.execute("select ID from users where username = %s", (owner,))
uids = self.cursor.fetchone()
user_id = uids[0]
self.cursor.execute("select account_no from accounts where owner_id = %s", (user_id,))
nos = self.cursor.fetchone()
if nos != None:
account_no = nos[0]
self.cursor.execute("select amount from accounts where account_no = %s", (account_no,))
ams = self.cursor.fetchone()
cur_amount = ams[0]
if float(cur_amount) < float(amount):
response = 'Your account balance is not enough.'
else:
self.cursor.execute("select amount from accounts where account_no = %s", (to_account,))
tms = self.cursor.fetchone()
to_cur_amount = ''
if tms != None:
to_cur_amount = tms[0]
self.cursor.execute('update accounts set amount = %s where account_no = %s ',
(float(cur_amount) - float(amount), account_no))
self.cnx.commit()
self.cursor.execute('update accounts set amount = %s where account_no = %s ',
(float(to_cur_amount) + float(amount), to_account))
self.cnx.commit()
self.cursor.execute(
'Insert into transactions(to_account, from_account, amount) VALUES (\'%s\',\'%s\',\'%s\');' % (
to_account, account_no, amount,))
self.cnx.commit()
response = f"Successful Transaction. Current Balance = {float(cur_amount) - float(amount)}"
else:
response = f"Destination Account Number Not Found."
else:
response = f"You Don't Have Any Account. First of All, Create an Account."
return response
def withdraw(self, username, from_account, to_account, amount):
response = ''
self.cursor.execute("select ID from users where username = %s", (username,))
uids = self.cursor.fetchone()
user_id = uids[0]
self.cursor.execute("select account_no from account_user where account_no = %s and user_id = %s",
(from_account, user_id,))
j = self.cursor.fetchone()
if j != None:
is_joint = j[0]
self.cursor.execute("select amount from accounts where account_no = %s", (from_account,))
ams = self.cursor.fetchone()
if ams != None:
cur_amount = ams[0]
if float(cur_amount) < float(amount):
response = 'Source account balance is not enough.'
else:
self.cursor.execute("select amount from accounts where account_no = %s", (to_account,))
tms = self.cursor.fetchone()
to_cur_amount = ''
if tms != None:
to_cur_amount = tms[0]
self.cursor.execute('update accounts set amount = %s where account_no = %s ',
(float(cur_amount) - float(amount), from_account))
self.cnx.commit()
self.cursor.execute('update accounts set amount = %s where account_no = %s ',
(float(to_cur_amount) + float(amount), to_account))
self.cnx.commit()
self.cursor.execute(
'Insert into transactions(to_account, from_account, amount) VALUES (\'%s\',\'%s\',\'%s\');' % (
to_account, from_account, amount,))
self.cnx.commit()
response = f"Successful Transaction. Current Balance = {float(cur_amount) - float(amount)}"
else:
response = f"Destination Account Number Not Found."
else:
response = f"Source Account Number Not Found."
else:
response = f"You Are Not Joint To This Account"
return response
def get_security_labels(self, username, account_no):
response = 'NOPE'
user_integrity_label = -1
user_confidentiality_label = -1
acc_integrity_label = -1
acc_confidentiality_label = -1
self.cursor.execute("select ID from users where username = %s", (username,))
uids = self.cursor.fetchone()
user_id = uids[0]
# print("userID:",user_id)
self.cursor.execute("select owner_id from accounts where account_no = %s", (account_no,))
tms = self.cursor.fetchone()
owner_id = ''
if tms != None:
owner_id = tms[0]
# print("ownerid:",owner_id)
self.cursor.execute("select confidentiality_level, integrity_level from accounts where account_no = %s",
(account_no,))
acc_levels = self.cursor.fetchone()
# print("acc levels:",acc_levels)
acc_integrity_label = acc_levels[1]
acc_confidentiality_label = acc_levels[0]
else:
response = f"Account Not Found."
user_integrity_label = -1
user_confidentiality_label = -1
acc_integrity_label = -1
acc_confidentiality_label = -1
return response, user_integrity_label, user_confidentiality_label, acc_integrity_label, acc_confidentiality_label
if owner_id == user_id:
response = f"OK"
user_integrity_label = acc_integrity_label
user_confidentiality_label = acc_confidentiality_label
return response, user_integrity_label, user_confidentiality_label, acc_integrity_label, acc_confidentiality_label
else:
self.cursor.execute(
'select confidentiality_level, integrity_level from account_user where account_no = %s and user_id = %s and accept_status = 1',
(account_no, user_id))
levels = self.cursor.fetchone()
if levels != None:
user_integrity_label = levels[1]
user_confidentiality_label = levels[0]
# print("user on acc:",levels)
response = f"OK"
return response, user_integrity_label, user_confidentiality_label, acc_integrity_label, acc_confidentiality_label
else:
response = f"Not joint to this account."
user_integrity_label = -1
user_confidentiality_label = -1
acc_integrity_label = -1
acc_confidentiality_label = -1
return response, user_integrity_label, user_confidentiality_label, acc_integrity_label, acc_confidentiality_label
def record_log(self, who, action, status, amount=None, from_account=None, to_account=None):
readable_time = datetime.datetime.fromtimestamp(calendar.timegm(time.gmtime())).isoformat()
if amount is None and from_account is None:
self.cursor.execute(
'INSERT INTO log(username, time_of_action, action, status, amount, from_account, to_account ) VALUES(\'%s\',\'%s\',\'%s\',\'%s\',NULL ,NULL ,NULL );' % (
who, readable_time, action, status))
self.cnx.commit()
elif from_account is None and amount is not None:
self.cursor.execute(
'INSERT INTO log(username, time_of_action, action, status, amount, from_account, to_account ) VALUES(\'%s\',\'%s\',\'%s\',\'%s\',\'%s\' ,NULL,NULL );' % (
who, readable_time, action, status, amount))
self.cnx.commit()
elif from_account is not None and amount is None:
self.cursor.execute(
'INSERT INTO log(username, time_of_action, action, status, amount, from_account, to_account ) VALUES(\'%s\',\'%s\',\'%s\',\'%s\',NULL,\'%s\' ,NULL );' % (
who, readable_time, action, status, from_account))
self.cnx.commit()
elif from_account is not None and amount is None and to_account is not None:
self.cursor.execute(
'INSERT INTO log(username, time_of_action, action, status, amount, from_account, to_account ) VALUES(\'%s\',\'%s\',\'%s\',\'%s\',\'%s\',NULL ,\'%s\' );' % (
who, readable_time, action, status, from_account,to_account))
self.cnx.commit()
else:
self.cursor.execute(
'INSERT INTO log(username, time_of_action, action, status, amount, from_account, to_account ) VALUES(\'%s\',\'%s\',\'%s\',\'%s\',\'%s\',\'%s\',\'%s\');' % (
who, readable_time, action, status, amount, from_account, to_account))
self.cnx.commit()
|
[
"time.gmtime"
] |
[((17168, 17181), 'time.gmtime', 'time.gmtime', ([], {}), '()\n', (17179, 17181), False, 'import time\n')]
|
import uuid
from typing import Literal
from chaosiqagent.types import Job
def create_job(target_type: Literal["experiment", "verification"] = None):
target_id = uuid.uuid4()
if not target_type:
target_type = "experiment"
return Job(
id=uuid.uuid4(),
agent_id=uuid.uuid4(),
org_id=uuid.uuid4(),
team_id=uuid.uuid4(),
target_id=target_id,
target_type=target_type,
target_url=f"https://console.example.com/assets/{target_type}s/{target_id}",
access_token="<PASSWORD>",
payload={},
)
|
[
"uuid.uuid4"
] |
[((168, 180), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (178, 180), False, 'import uuid\n'), ((267, 279), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (277, 279), False, 'import uuid\n'), ((298, 310), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (308, 310), False, 'import uuid\n'), ((327, 339), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (337, 339), False, 'import uuid\n'), ((357, 369), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (367, 369), False, 'import uuid\n')]
|
from __future__ import absolute_import, division
import cv2
import numpy as np
import time
from ..utils.viz import show_frame
class Tracker(object):
def __init__(self, name):
self.name = name
def init(self, image, init_rect):
raise NotImplementedError()
def update(self, image):
raise NotImplementedError()
def track(self, img_files, init_rect, visualize=False):
frame_num = len(img_files)
bndboxes = np.zeros((frame_num, 4))
bndboxes[0, :] = init_rect
speed_fps = np.zeros(frame_num)
for f, img_file in enumerate(img_files):
image = cv2.imread(img_file)
if image.ndim == 2:
image = cv2.cvtColor(image, cv2.COLOR_GRAY2RGB)
elif image.ndim == 3:
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
start_time = time.time()
if f == 0:
self.init(image, init_rect)
else:
bndboxes[f, :] = self.update(image)
elapsed_time = time.time() - start_time
speed_fps[f] = 1. / elapsed_time
if visualize:
show_frame(image, bndboxes[f, :], fig_n=1)
return bndboxes, speed_fps
from .siamfc import TrackerSiamFC
from .goturn import TrackerGOTURN
from .csk import TrackerCSK
from .kcf import TrackerKCF
from .dcf import TrackerDCF
from .dcfnet import TrackerDCFNet
from .mosse import TrackerMOSSE
from .dsst import TrackerDSST
|
[
"cv2.cvtColor",
"cv2.imread",
"numpy.zeros",
"time.time"
] |
[((465, 489), 'numpy.zeros', 'np.zeros', (['(frame_num, 4)'], {}), '((frame_num, 4))\n', (473, 489), True, 'import numpy as np\n'), ((545, 564), 'numpy.zeros', 'np.zeros', (['frame_num'], {}), '(frame_num)\n', (553, 564), True, 'import numpy as np\n'), ((635, 655), 'cv2.imread', 'cv2.imread', (['img_file'], {}), '(img_file)\n', (645, 655), False, 'import cv2\n'), ((875, 886), 'time.time', 'time.time', ([], {}), '()\n', (884, 886), False, 'import time\n'), ((712, 751), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_GRAY2RGB'], {}), '(image, cv2.COLOR_GRAY2RGB)\n', (724, 751), False, 'import cv2\n'), ((1051, 1062), 'time.time', 'time.time', ([], {}), '()\n', (1060, 1062), False, 'import time\n'), ((810, 848), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2RGB'], {}), '(image, cv2.COLOR_BGR2RGB)\n', (822, 848), False, 'import cv2\n')]
|
from unittest import TestCase
from pyfibre.model.objects.segments import (
FibreSegment, CellSegment
)
from pyfibre.tests.probe_classes.objects import (
ProbeFibreNetwork, generate_probe_segment)
from pyfibre.pyfibre_runner import PyFibreRunner
LOAD_NETWORK_PATH = "networkx.read_gpickle"
SAVE_NETWORK_PATH = "networkx.write_gpickle"
LOAD_JSON_PATH = "json.load"
SAVE_JSON_PATH = "json.dump"
LOAD_REGION_PATH = "numpy.load"
SAVE_REGION_PATH = "numpy.save"
def mock_load(*args, klass=None, **kwargs):
print('mock_load called')
return klass()
class TestPyFibreRunner(TestCase):
def setUp(self):
self.runner = PyFibreRunner()
self.fibre_networks = [ProbeFibreNetwork()]
self.fibre_segments = [
generate_probe_segment(FibreSegment)]
self.cell_segments = [
generate_probe_segment(CellSegment)]
def test_defaults(self):
self.assertEqual((5, 35), self.runner.p_denoise)
|
[
"pyfibre.tests.probe_classes.objects.ProbeFibreNetwork",
"pyfibre.pyfibre_runner.PyFibreRunner",
"pyfibre.tests.probe_classes.objects.generate_probe_segment"
] |
[((647, 662), 'pyfibre.pyfibre_runner.PyFibreRunner', 'PyFibreRunner', ([], {}), '()\n', (660, 662), False, 'from pyfibre.pyfibre_runner import PyFibreRunner\n'), ((694, 713), 'pyfibre.tests.probe_classes.objects.ProbeFibreNetwork', 'ProbeFibreNetwork', ([], {}), '()\n', (711, 713), False, 'from pyfibre.tests.probe_classes.objects import ProbeFibreNetwork, generate_probe_segment\n'), ((760, 796), 'pyfibre.tests.probe_classes.objects.generate_probe_segment', 'generate_probe_segment', (['FibreSegment'], {}), '(FibreSegment)\n', (782, 796), False, 'from pyfibre.tests.probe_classes.objects import ProbeFibreNetwork, generate_probe_segment\n'), ((841, 876), 'pyfibre.tests.probe_classes.objects.generate_probe_segment', 'generate_probe_segment', (['CellSegment'], {}), '(CellSegment)\n', (863, 876), False, 'from pyfibre.tests.probe_classes.objects import ProbeFibreNetwork, generate_probe_segment\n')]
|
"""
Code that goes along with the Airflow located at:
http://airflow.readthedocs.org/en/latest/tutorial.html
"""
from datetime import datetime, timedelta
from airflow import DAG
from airflow.contrib.kubernetes.pod import Port
from airflow.contrib.operators.kubernetes_pod_operator import KubernetesPodOperator
from airflow.operators.bash_operator import BashOperator
from airflow.operators.docker_operator import DockerOperator
port = Port('http', 80)
env_vars = {
"KUBERNETES_SERVICE_HOST": "10.0.0.1",
"KUBERNETES_SERVICE_PORT": "443",
"KUBERNETES_SERVICE_PORT_HTTPS": "443"
}
default_args = {
"owner": "airflow",
"depends_on_past": False,
"start_date": datetime(2020, 6, 20),
"email": ["<EMAIL>"],
"email_on_failure": False,
"email_on_retry": False,
"retries": 1,
"retry_delay": timedelta(minutes=1),
# 'queue': 'bash_queue',
# 'pool': 'backfill',
# 'priority_weight': 10,
# 'end_date': datetime(2016, 1, 1),
}
dag = DAG(
"sampledatasource_etl",
default_args=default_args,
schedule_interval=timedelta(minutes=5),
concurrency=1,
max_active_runs=1
)
# t1, t2 and t3 are examples of tasks created by instantiating operators
t1 = BashOperator(task_id="print_date", bash_command="date", dag=dag)
# t2 = BashOperator(task_id="sleep", bash_command="sleep 5", retries=3, dag=dag)
# templated_command = """
# {% for i in range(5) %}
# echo "{{ ds }}"
# echo "{{ macros.ds_add(ds, 7)}}"
# echo "{{ params.my_param }}"
# {% endfor %}
# """
#
# t3 = BashOperator(
# task_id="templated",
# bash_command=templated_command,
# params={"my_param": "Parameter I passed in"},
# dag=dag,
# )
# t4 = DockerOperator(
# task_id="docker_run",
# image="mavx/sampledatasource_etl:latest",
# api_version="auto",
# auto_remove=True,
# command=["python", "etl.py"],
# network_mode="host",
# tty=True,
# docker_conn_id="dockerhub",
# dag=dag,
# )
#
# t5 = DockerOperator(
# task_id="docker_run_echo",
# image="python:3.7-slim",
# api_version="auto",
# auto_remove=True,
# command=["echo", "1"],
# network_mode="host",
# tty=True,
# docker_conn_id="dockerhub",
# dag=dag,
# )
t6 = KubernetesPodOperator(
task_id="k8s_run",
in_cluster=False,
namespace="default",
image="mavx/sampledatasource_etl:latest",
cmds=["python", "etl.py"],
name="k8s_task_pod",
cluster_context="docker-desktop",
is_delete_operator_pod=True,
ports=[port],
dag=dag,
)
# t5 = BashOperator(
# task_id="docker_run_echo",
# bash_command="docker ps",
# # params={"my_param": "Parameter I passed in"},
# dag=dag,
# )
# t2.set_upstream(t1)
# t3.set_upstream(t1)
# t4.set_upstream(t3)
# t5.set_upstream(t3)
t1 >> t6
if __name__ == '__main__':
t6.execute("ds")
|
[
"airflow.contrib.kubernetes.pod.Port",
"airflow.contrib.operators.kubernetes_pod_operator.KubernetesPodOperator",
"datetime.datetime",
"datetime.timedelta",
"airflow.operators.bash_operator.BashOperator"
] |
[((437, 453), 'airflow.contrib.kubernetes.pod.Port', 'Port', (['"""http"""', '(80)'], {}), "('http', 80)\n", (441, 453), False, 'from airflow.contrib.kubernetes.pod import Port\n'), ((1215, 1279), 'airflow.operators.bash_operator.BashOperator', 'BashOperator', ([], {'task_id': '"""print_date"""', 'bash_command': '"""date"""', 'dag': 'dag'}), "(task_id='print_date', bash_command='date', dag=dag)\n", (1227, 1279), False, 'from airflow.operators.bash_operator import BashOperator\n'), ((2263, 2531), 'airflow.contrib.operators.kubernetes_pod_operator.KubernetesPodOperator', 'KubernetesPodOperator', ([], {'task_id': '"""k8s_run"""', 'in_cluster': '(False)', 'namespace': '"""default"""', 'image': '"""mavx/sampledatasource_etl:latest"""', 'cmds': "['python', 'etl.py']", 'name': '"""k8s_task_pod"""', 'cluster_context': '"""docker-desktop"""', 'is_delete_operator_pod': '(True)', 'ports': '[port]', 'dag': 'dag'}), "(task_id='k8s_run', in_cluster=False, namespace=\n 'default', image='mavx/sampledatasource_etl:latest', cmds=['python',\n 'etl.py'], name='k8s_task_pod', cluster_context='docker-desktop',\n is_delete_operator_pod=True, ports=[port], dag=dag)\n", (2284, 2531), False, 'from airflow.contrib.operators.kubernetes_pod_operator import KubernetesPodOperator\n'), ((684, 705), 'datetime.datetime', 'datetime', (['(2020)', '(6)', '(20)'], {}), '(2020, 6, 20)\n', (692, 705), False, 'from datetime import datetime, timedelta\n'), ((830, 850), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(1)'}), '(minutes=1)\n', (839, 850), False, 'from datetime import datetime, timedelta\n'), ((1071, 1091), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(5)'}), '(minutes=5)\n', (1080, 1091), False, 'from datetime import datetime, timedelta\n')]
|
#!/usr/bin/env python
#
# This file is part of snmpclitools software.
#
# Copyright (c) 2005-2019, <NAME> <<EMAIL>>
# License: http://snmplabs.com/snmpclitools/license.html
#
# Notificaton Originator
#
import os
import socket
import sys
import traceback
from pysnmp import error
from pysnmp.entity import engine
from pysnmp.entity.rfc3413 import ntforg
from pysnmp.proto.api import v1, v2c
from pysnmp.proto.proxy import rfc2576
from snmpclitools.cli import base
from snmpclitools.cli import main
from snmpclitools.cli import mibview
from snmpclitools.cli import msgmod
from snmpclitools.cli import pdu
from snmpclitools.cli import secmod
from snmpclitools.cli import target
def getUsage():
return """\
Usage: %s [OPTIONS] <MANAGER> <PARAMETERS>
%s%s%s%s
TRAP options:
-C<TRAPOPT>: set various application specific behaviours:
i: send INFORM-PDU, expect a response
%s
SNMPv1 TRAP management parameters:
enterprise-oid agent generic-trap specific-trap uptime <management-params>
where:
generic-trap: coldStart|warmStart|linkDown|linkUp|authenticationFailure
|egpNeighborLoss|enterpriseSpecific
SNMPv2/SNMPv3 management parameters:
uptime trap-oid <management-params>
%s\
""" % (os.path.basename(sys.argv[0]),
main.getUsage(),
msgmod.getUsage(),
secmod.getUsage(),
mibview.getUsage(),
target.getUsage(),
pdu.getWriteUsage())
# Construct c/l interpreter for this app
class Scanner(msgmod.MPScannerMixIn,
secmod.SMScannerMixIn,
mibview.MibViewScannerMixIn,
target.TargetScannerMixIn,
pdu.ReadPduScannerMixIn,
main.MainScannerMixIn,
base.ScannerTemplate):
def t_appopts(self, s):
""" -C """
self.rv.append(base.ConfigToken('appopts'))
def t_genericTrap(self, s):
""" coldStart|warmStart|linkDown|linkUp|authenticationFailure|egpNeighborLoss|enterpriseSpecific """
self.rv.append(base.ConfigToken('genericTrap', s))
class Parser(msgmod.MPParserMixIn,
secmod.SMParserMixIn,
mibview.MibViewParserMixIn,
target.TargetParserMixIn,
pdu.WritePduParserMixIn,
main.MainParserMixIn,
base.ParserTemplate):
def p_trapParams(self, args):
"""
TrapV1Params ::= EnterpriseOid whitespace AgentName whitespace GenericTrap whitespace SpecificTrap whitespace Uptime whitespace VarBinds
EnterpriseOid ::= string
AgentName ::= string
GenericTrap ::= genericTrap
SpecificTrap ::= string
Uptime ::= string
TrapV2cParams ::= Uptime whitespace TrapOid whitespace VarBinds
TrapOid ::= string
"""
def p_paramsSpec(self, args):
"""
Params ::= TrapV1Params
Params ::= TrapV2cParams
"""
def p_appOptions(self, args):
"""
Option ::= ApplicationOption
ApplicationOption ::= appopts whitespace string
ApplicationOption ::= appopts string
"""
class __Generator(base.GeneratorTemplate):
def n_ApplicationOption(self, cbCtx, node):
snmpEngine, ctx = cbCtx
if len(node) > 2:
opt = node[2].attr
else:
opt = node[1].attr
for c in opt:
if c == 'i':
ctx['informMode'] = 1
else:
raise error.PySnmpError('bad -C option - "%s"' % c)
def n_EnterpriseOid(self, cbCtx, node):
snmpEngine, ctx = cbCtx
ctx['EnterpriseOid'] = node[0].attr
def n_AgentName(self, cbCtx, node):
snmpEngine, ctx = cbCtx
try:
ctx['AgentName'] = socket.gethostbyname(node[0].attr)
except socket.error:
raise error.PySnmpError(
'Bad agent name %s: %s' % (node[0].attr, sys.exc_info()[1])
)
def n_GenericTrap(self, cbCtx, node):
snmpEngine, ctx = cbCtx
ctx['GenericTrap'] = node[0].attr
def n_SpecificTrap(self, cbCtx, node):
snmpEngine, ctx = cbCtx
ctx['SpecificTrap'] = node[0].attr
def n_Uptime(self, cbCtx, node):
snmpEngine, ctx = cbCtx
ctx['Uptime'] = int(node[0].attr)
def n_TrapOid(self, cbCtx, node):
snmpEngine, ctx = cbCtx
ctx['TrapOid'] = node[0].attr
def n_TrapV1Params_exit(self, cbCtx, node):
snmpEngine, ctx = cbCtx
# Initialize v1 PDU with passed params, then proxy it into v2c PDU
v1Pdu = v1.TrapPDU()
v1.apiTrapPDU.setDefaults(v1Pdu)
if 'EnterpriseOid' in ctx:
v1.apiTrapPDU.setEnterprise(v1Pdu, ctx['EnterpriseOid'])
if 'AgentName' in ctx:
v1.apiTrapPDU.setAgentAddr(v1Pdu, ctx['AgentName'])
if 'GenericTrap' in ctx:
v1.apiTrapPDU.setGenericTrap(v1Pdu, ctx['GenericTrap'])
if 'SpecificTrap' in ctx:
v1.apiTrapPDU.setSpecificTrap(v1Pdu, ctx['SpecificTrap'])
if 'Uptime' in ctx:
v1.apiTrapPDU.setTimeStamp(v1Pdu, ctx['Uptime'])
ctx['pdu'] = rfc2576.v1ToV2(v1Pdu)
def n_TrapV2cParams_exit(self, cbCtx, node):
snmpEngine, ctx = cbCtx
if 'informMode' in ctx:
pdu = v2c.InformRequestPDU()
v2c.apiPDU.setDefaults(pdu)
else:
pdu = v2c.TrapPDU()
v2c.apiTrapPDU.setDefaults(pdu)
v2c.apiPDU.setVarBinds(
pdu,
[(v2c.ObjectIdentifier('1.3.6.1.2.1.1.3.0'), v2c.TimeTicks(ctx['Uptime'])),
(v2c.ObjectIdentifier('1.3.6.1.6.3.1.1.4.1.0'), v2c.ObjectIdentifier(ctx['TrapOid']))]
)
ctx['pdu'] = pdu
def generator(cbCtx, ast):
snmpEngine, ctx = cbCtx
return __Generator().preorder((snmpEngine, ctx), ast)
# Run SNMP engine
def cbFun(snmpEngine, notificationHandle, errorIndication, pdu, cbCtx):
if errorIndication:
sys.stderr.write('%s\n' % errorIndication)
return
errorStatus = v2c.apiPDU.getErrorStatus(pdu)
varBinds = v2c.apiPDU.getVarBinds(pdu)
if errorStatus:
errorIndex = v2c.apiPDU.getErrorIndex(pdu)
sys.stderr.write(
'%s at %s\n' %
(errorStatus.prettyPrint(),
errorIndex and varBinds[int(errorIndex) - 1] or '?')
)
return
for oid, val in varBinds:
sys.stdout.write(
'%s\n' % cbCtx['mibViewProxy'].getPrettyOidVal(
cbCtx['mibViewController'], oid, val
)
)
snmpEngine = engine.SnmpEngine()
try:
# Parse c/l into AST
ast = Parser().parse(
Scanner().tokenize(' '.join(sys.argv[1:]))
)
ctx = {}
# Apply configuration to SNMP entity
main.generator((snmpEngine, ctx), ast)
msgmod.generator((snmpEngine, ctx), ast)
secmod.generator((snmpEngine, ctx), ast)
mibview.generator((snmpEngine, ctx), ast)
target.generatorTrap((snmpEngine, ctx), ast)
pdu.writePduGenerator((snmpEngine, ctx), ast)
generator((snmpEngine, ctx), ast)
v2c.apiPDU.setVarBinds(
ctx['pdu'], v2c.apiPDU.getVarBinds(ctx['pdu']) + ctx['varBinds']
)
ntforg.NotificationOriginator().sendPdu(
snmpEngine,
ctx['addrName'],
ctx.get('contextEngineId'),
ctx.get('contextName', ''),
ctx['pdu'],
cbFun, ctx
)
snmpEngine.transportDispatcher.runDispatcher()
except KeyboardInterrupt:
sys.stderr.write('Shutting down...\n')
except error.PySnmpError:
sys.stderr.write('Error: %s\n%s' % (sys.exc_info()[1], getUsage()))
sys.exit(1)
except Exception:
sys.stderr.write('Process terminated: %s\n' % sys.exc_info()[1])
for line in traceback.format_exception(*sys.exc_info()):
sys.stderr.write(line.replace('\n', ';'))
sys.exit(1)
|
[
"pysnmp.proto.api.v2c.apiPDU.setDefaults",
"snmpclitools.cli.msgmod.getUsage",
"pysnmp.proto.api.v2c.apiPDU.getVarBinds",
"pysnmp.proto.api.v1.apiTrapPDU.setTimeStamp",
"sys.exc_info",
"snmpclitools.cli.mibview.getUsage",
"snmpclitools.cli.main.generator",
"pysnmp.proto.proxy.rfc2576.v1ToV2",
"pysnmp.proto.api.v2c.apiTrapPDU.setDefaults",
"pysnmp.proto.api.v1.apiTrapPDU.setEnterprise",
"pysnmp.proto.api.v2c.TimeTicks",
"pysnmp.entity.engine.SnmpEngine",
"snmpclitools.cli.mibview.generator",
"snmpclitools.cli.msgmod.generator",
"socket.gethostbyname",
"pysnmp.proto.api.v1.apiTrapPDU.setAgentAddr",
"pysnmp.proto.api.v1.apiTrapPDU.setSpecificTrap",
"snmpclitools.cli.pdu.getWriteUsage",
"pysnmp.proto.api.v2c.InformRequestPDU",
"snmpclitools.cli.target.getUsage",
"snmpclitools.cli.secmod.generator",
"snmpclitools.cli.secmod.getUsage",
"os.path.basename",
"pysnmp.proto.api.v1.TrapPDU",
"pysnmp.proto.api.v2c.apiPDU.getErrorIndex",
"pysnmp.proto.api.v2c.TrapPDU",
"pysnmp.error.PySnmpError",
"snmpclitools.cli.main.getUsage",
"pysnmp.proto.api.v2c.ObjectIdentifier",
"sys.exit",
"snmpclitools.cli.pdu.writePduGenerator",
"snmpclitools.cli.target.generatorTrap",
"pysnmp.proto.api.v1.apiTrapPDU.setGenericTrap",
"pysnmp.proto.api.v1.apiTrapPDU.setDefaults",
"pysnmp.entity.rfc3413.ntforg.NotificationOriginator",
"pysnmp.proto.api.v2c.apiPDU.getErrorStatus",
"sys.stderr.write",
"snmpclitools.cli.base.ConfigToken"
] |
[((6597, 6616), 'pysnmp.entity.engine.SnmpEngine', 'engine.SnmpEngine', ([], {}), '()\n', (6614, 6616), False, 'from pysnmp.entity import engine\n'), ((6058, 6088), 'pysnmp.proto.api.v2c.apiPDU.getErrorStatus', 'v2c.apiPDU.getErrorStatus', (['pdu'], {}), '(pdu)\n', (6083, 6088), False, 'from pysnmp.proto.api import v1, v2c\n'), ((6104, 6131), 'pysnmp.proto.api.v2c.apiPDU.getVarBinds', 'v2c.apiPDU.getVarBinds', (['pdu'], {}), '(pdu)\n', (6126, 6131), False, 'from pysnmp.proto.api import v1, v2c\n'), ((6791, 6829), 'snmpclitools.cli.main.generator', 'main.generator', (['(snmpEngine, ctx)', 'ast'], {}), '((snmpEngine, ctx), ast)\n', (6805, 6829), False, 'from snmpclitools.cli import main\n'), ((6834, 6874), 'snmpclitools.cli.msgmod.generator', 'msgmod.generator', (['(snmpEngine, ctx)', 'ast'], {}), '((snmpEngine, ctx), ast)\n', (6850, 6874), False, 'from snmpclitools.cli import msgmod\n'), ((6879, 6919), 'snmpclitools.cli.secmod.generator', 'secmod.generator', (['(snmpEngine, ctx)', 'ast'], {}), '((snmpEngine, ctx), ast)\n', (6895, 6919), False, 'from snmpclitools.cli import secmod\n'), ((6924, 6965), 'snmpclitools.cli.mibview.generator', 'mibview.generator', (['(snmpEngine, ctx)', 'ast'], {}), '((snmpEngine, ctx), ast)\n', (6941, 6965), False, 'from snmpclitools.cli import mibview\n'), ((6970, 7014), 'snmpclitools.cli.target.generatorTrap', 'target.generatorTrap', (['(snmpEngine, ctx)', 'ast'], {}), '((snmpEngine, ctx), ast)\n', (6990, 7014), False, 'from snmpclitools.cli import target\n'), ((7019, 7064), 'snmpclitools.cli.pdu.writePduGenerator', 'pdu.writePduGenerator', (['(snmpEngine, ctx)', 'ast'], {}), '((snmpEngine, ctx), ast)\n', (7040, 7064), False, 'from snmpclitools.cli import pdu\n'), ((4583, 4595), 'pysnmp.proto.api.v1.TrapPDU', 'v1.TrapPDU', ([], {}), '()\n', (4593, 4595), False, 'from pysnmp.proto.api import v1, v2c\n'), ((4604, 4636), 'pysnmp.proto.api.v1.apiTrapPDU.setDefaults', 'v1.apiTrapPDU.setDefaults', (['v1Pdu'], {}), '(v1Pdu)\n', (4629, 4636), False, 'from pysnmp.proto.api import v1, v2c\n'), ((5157, 5178), 'pysnmp.proto.proxy.rfc2576.v1ToV2', 'rfc2576.v1ToV2', (['v1Pdu'], {}), '(v1Pdu)\n', (5171, 5178), False, 'from pysnmp.proto.proxy import rfc2576\n'), ((5981, 6023), 'sys.stderr.write', 'sys.stderr.write', (["('%s\\n' % errorIndication)"], {}), "('%s\\n' % errorIndication)\n", (5997, 6023), False, 'import sys\n'), ((6174, 6203), 'pysnmp.proto.api.v2c.apiPDU.getErrorIndex', 'v2c.apiPDU.getErrorIndex', (['pdu'], {}), '(pdu)\n', (6198, 6203), False, 'from pysnmp.proto.api import v1, v2c\n'), ((7502, 7540), 'sys.stderr.write', 'sys.stderr.write', (['"""Shutting down...\n"""'], {}), "('Shutting down...\\n')\n", (7518, 7540), False, 'import sys\n'), ((7644, 7655), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (7652, 7655), False, 'import sys\n'), ((7861, 7872), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (7869, 7872), False, 'import sys\n'), ((1272, 1301), 'os.path.basename', 'os.path.basename', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (1288, 1301), False, 'import os\n'), ((1310, 1325), 'snmpclitools.cli.main.getUsage', 'main.getUsage', ([], {}), '()\n', (1323, 1325), False, 'from snmpclitools.cli import main\n'), ((1334, 1351), 'snmpclitools.cli.msgmod.getUsage', 'msgmod.getUsage', ([], {}), '()\n', (1349, 1351), False, 'from snmpclitools.cli import msgmod\n'), ((1360, 1377), 'snmpclitools.cli.secmod.getUsage', 'secmod.getUsage', ([], {}), '()\n', (1375, 1377), False, 'from snmpclitools.cli import secmod\n'), ((1386, 1404), 'snmpclitools.cli.mibview.getUsage', 'mibview.getUsage', ([], {}), '()\n', (1402, 1404), False, 'from snmpclitools.cli import mibview\n'), ((1413, 1430), 'snmpclitools.cli.target.getUsage', 'target.getUsage', ([], {}), '()\n', (1428, 1430), False, 'from snmpclitools.cli import target\n'), ((1439, 1458), 'snmpclitools.cli.pdu.getWriteUsage', 'pdu.getWriteUsage', ([], {}), '()\n', (1456, 1458), False, 'from snmpclitools.cli import pdu\n'), ((1846, 1873), 'snmpclitools.cli.base.ConfigToken', 'base.ConfigToken', (['"""appopts"""'], {}), "('appopts')\n", (1862, 1873), False, 'from snmpclitools.cli import base\n'), ((2040, 2074), 'snmpclitools.cli.base.ConfigToken', 'base.ConfigToken', (['"""genericTrap"""', 's'], {}), "('genericTrap', s)\n", (2056, 2074), False, 'from snmpclitools.cli import base\n'), ((3761, 3795), 'socket.gethostbyname', 'socket.gethostbyname', (['node[0].attr'], {}), '(node[0].attr)\n', (3781, 3795), False, 'import socket\n'), ((4685, 4741), 'pysnmp.proto.api.v1.apiTrapPDU.setEnterprise', 'v1.apiTrapPDU.setEnterprise', (['v1Pdu', "ctx['EnterpriseOid']"], {}), "(v1Pdu, ctx['EnterpriseOid'])\n", (4712, 4741), False, 'from pysnmp.proto.api import v1, v2c\n'), ((4786, 4837), 'pysnmp.proto.api.v1.apiTrapPDU.setAgentAddr', 'v1.apiTrapPDU.setAgentAddr', (['v1Pdu', "ctx['AgentName']"], {}), "(v1Pdu, ctx['AgentName'])\n", (4812, 4837), False, 'from pysnmp.proto.api import v1, v2c\n'), ((4884, 4939), 'pysnmp.proto.api.v1.apiTrapPDU.setGenericTrap', 'v1.apiTrapPDU.setGenericTrap', (['v1Pdu', "ctx['GenericTrap']"], {}), "(v1Pdu, ctx['GenericTrap'])\n", (4912, 4939), False, 'from pysnmp.proto.api import v1, v2c\n'), ((4987, 5044), 'pysnmp.proto.api.v1.apiTrapPDU.setSpecificTrap', 'v1.apiTrapPDU.setSpecificTrap', (['v1Pdu', "ctx['SpecificTrap']"], {}), "(v1Pdu, ctx['SpecificTrap'])\n", (5016, 5044), False, 'from pysnmp.proto.api import v1, v2c\n'), ((5086, 5134), 'pysnmp.proto.api.v1.apiTrapPDU.setTimeStamp', 'v1.apiTrapPDU.setTimeStamp', (['v1Pdu', "ctx['Uptime']"], {}), "(v1Pdu, ctx['Uptime'])\n", (5112, 5134), False, 'from pysnmp.proto.api import v1, v2c\n'), ((5312, 5334), 'pysnmp.proto.api.v2c.InformRequestPDU', 'v2c.InformRequestPDU', ([], {}), '()\n', (5332, 5334), False, 'from pysnmp.proto.api import v1, v2c\n'), ((5347, 5374), 'pysnmp.proto.api.v2c.apiPDU.setDefaults', 'v2c.apiPDU.setDefaults', (['pdu'], {}), '(pdu)\n', (5369, 5374), False, 'from pysnmp.proto.api import v1, v2c\n'), ((5408, 5421), 'pysnmp.proto.api.v2c.TrapPDU', 'v2c.TrapPDU', ([], {}), '()\n', (5419, 5421), False, 'from pysnmp.proto.api import v1, v2c\n'), ((5434, 5465), 'pysnmp.proto.api.v2c.apiTrapPDU.setDefaults', 'v2c.apiTrapPDU.setDefaults', (['pdu'], {}), '(pdu)\n', (5460, 5465), False, 'from pysnmp.proto.api import v1, v2c\n'), ((7152, 7186), 'pysnmp.proto.api.v2c.apiPDU.getVarBinds', 'v2c.apiPDU.getVarBinds', (["ctx['pdu']"], {}), "(ctx['pdu'])\n", (7174, 7186), False, 'from pysnmp.proto.api import v1, v2c\n'), ((7216, 7247), 'pysnmp.entity.rfc3413.ntforg.NotificationOriginator', 'ntforg.NotificationOriginator', ([], {}), '()\n', (7245, 7247), False, 'from pysnmp.entity.rfc3413 import ntforg\n'), ((3476, 3521), 'pysnmp.error.PySnmpError', 'error.PySnmpError', (['(\'bad -C option - "%s"\' % c)'], {}), '(\'bad -C option - "%s"\' % c)\n', (3493, 3521), False, 'from pysnmp import error\n'), ((7789, 7803), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (7801, 7803), False, 'import sys\n'), ((5530, 5571), 'pysnmp.proto.api.v2c.ObjectIdentifier', 'v2c.ObjectIdentifier', (['"""1.3.6.1.2.1.1.3.0"""'], {}), "('1.3.6.1.2.1.1.3.0')\n", (5550, 5571), False, 'from pysnmp.proto.api import v1, v2c\n'), ((5573, 5601), 'pysnmp.proto.api.v2c.TimeTicks', 'v2c.TimeTicks', (["ctx['Uptime']"], {}), "(ctx['Uptime'])\n", (5586, 5601), False, 'from pysnmp.proto.api import v1, v2c\n'), ((5618, 5663), 'pysnmp.proto.api.v2c.ObjectIdentifier', 'v2c.ObjectIdentifier', (['"""1.3.6.1.6.3.1.1.4.1.0"""'], {}), "('1.3.6.1.6.3.1.1.4.1.0')\n", (5638, 5663), False, 'from pysnmp.proto.api import v1, v2c\n'), ((5665, 5701), 'pysnmp.proto.api.v2c.ObjectIdentifier', 'v2c.ObjectIdentifier', (["ctx['TrapOid']"], {}), "(ctx['TrapOid'])\n", (5685, 5701), False, 'from pysnmp.proto.api import v1, v2c\n'), ((7725, 7739), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (7737, 7739), False, 'import sys\n'), ((7608, 7622), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (7620, 7622), False, 'import sys\n'), ((3920, 3934), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (3932, 3934), False, 'import sys\n')]
|
# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for haiku._src.batch_norm."""
import os
from absl.testing import absltest
from haiku._src import batch_norm
from haiku._src import test_utils
from haiku._src import transform
import jax
import jax.numpy as jnp
import numpy as np
class BatchNormTest(absltest.TestCase):
@test_utils.transform_and_run
def test_basic(self):
data = jnp.arange(2 * 3 * 4, dtype=jnp.float32).reshape([2, 3, 4])
norm = batch_norm.BatchNorm(True, True, 0.9)
result = norm(data, is_training=True)
result_0_replicated = jnp.broadcast_to(result[:, :, :1], result.shape)
# Input data is symmetrical variance per-channel.
np.testing.assert_allclose(result, result_0_replicated)
# Running through again in test mode produces same output.
np.testing.assert_allclose(norm(data, is_training=False), result, rtol=2e-2)
@test_utils.transform_and_run
def test_simple_training(self):
layer = batch_norm.BatchNorm(
create_scale=False, create_offset=False, decay_rate=0.9)
inputs = np.ones([2, 3, 3, 5])
scale = np.full((5,), 0.5)
offset = np.full((5,), 2.0)
result = layer(inputs, True, scale=scale, offset=offset)
np.testing.assert_equal(result, np.full(inputs.shape, 2.0))
@test_utils.transform_and_run
def test_simple_training_nchw(self):
layer = batch_norm.BatchNorm(
create_scale=False,
create_offset=False,
decay_rate=0.9,
data_format="NCHW")
inputs = np.ones([2, 5, 3, 3])
scale = np.full((5, 1, 1), 0.5)
offset = np.full((5, 1, 1), 2.0)
result = layer(inputs, True, scale=scale, offset=offset)
np.testing.assert_equal(result, np.full(inputs.shape, 2.0))
@test_utils.transform_and_run
def test_simple_training_normalized_axes(self):
layer = batch_norm.BatchNorm(
create_scale=False,
create_offset=False,
decay_rate=0.9,
axis=[0, 2, 3]) # Not the second axis.
# This differs only in the second axis.
inputs = np.stack([2.0 * np.ones([5, 3, 3]), np.ones([5, 3, 3])], 1)
result = layer(inputs, True)
# Despite not all values being identical, treating slices from the first
# axis separately leads to a fully normalized = equal array.
np.testing.assert_equal(result, np.zeros(inputs.shape))
def test_simple_training_cross_replica_axis(self):
ldc = jax.local_device_count()
def f(x, is_training=True):
return batch_norm.BatchNorm(
create_scale=False,
create_offset=False,
decay_rate=0.9,
cross_replica_axis="i",
)(x, is_training=is_training)
f = transform.transform_with_state(f)
inputs = np.arange(ldc * 4).reshape(ldc, 4)
key = np.broadcast_to(jax.random.PRNGKey(42), (ldc, 2))
params, state = jax.pmap(f.init, axis_name="i")(key, inputs)
result, _ = jax.pmap(f.apply, axis_name="i")(params, state, key, inputs)
mean = np.mean(inputs, axis=0)
std = np.std(inputs, axis=0) + 1e-10
expected = (inputs - mean) / std
np.testing.assert_array_almost_equal(result, expected)
def test_simple_training_cross_replica_axis_index_groups(self):
ldc = jax.local_device_count()
if ldc < 2:
self.skipTest("Cross-replica test requires at least 2 devices.")
num_groups = ldc // 2
num_group_devices = ldc // num_groups
# for 8 devices this produces [[0, 1], [2, 3], [4, 5], [6, 7]] groups.
groups = np.arange(ldc).reshape(num_groups, num_group_devices).tolist()
def f(x, is_training=True):
return batch_norm.BatchNorm(
create_scale=False,
create_offset=False,
decay_rate=0.9,
cross_replica_axis="i",
cross_replica_axis_index_groups=groups,
)(x, is_training=is_training)
f = transform.transform_with_state(f)
inputs = np.arange(ldc * 4).reshape(ldc, 4).astype(np.float32)
key = np.broadcast_to(jax.random.PRNGKey(42), (ldc, 2))
params, state = jax.pmap(f.init, axis_name="i")(key, inputs)
result, _ = jax.pmap(f.apply, axis_name="i")(params, state, key, inputs)
expected = np.empty_like(inputs)
for g in range(num_groups):
group_inputs = inputs[num_group_devices*g:num_group_devices*(g + 1)]
group_mean = np.mean(group_inputs, axis=0)
group_std = np.std(group_inputs, axis=0) + 1e-10
group_inputs = (group_inputs - group_mean) / group_std
expected[num_group_devices*g:num_group_devices*(g + 1)] = group_inputs
np.testing.assert_array_almost_equal(result, expected)
@test_utils.transform_and_run
def test_no_scale_and_offset(self):
layer = batch_norm.BatchNorm(
create_scale=False, create_offset=False, decay_rate=0.9)
inputs = jnp.ones([2, 5, 3, 3, 3])
result = layer(inputs, True)
np.testing.assert_equal(result, np.zeros_like(inputs))
@test_utils.transform_and_run
def test_no_scale_and_init_provided(self):
with self.assertRaisesRegex(
ValueError, "Cannot set `scale_init` if `create_scale=False`"):
batch_norm.BatchNorm(
create_scale=False,
create_offset=True,
decay_rate=0.9,
scale_init=jnp.ones)
@test_utils.transform_and_run
def test_no_offset_beta_init_provided(self):
with self.assertRaisesRegex(
ValueError, "Cannot set `offset_init` if `create_offset=False`"):
batch_norm.BatchNorm(
create_scale=True,
create_offset=False,
decay_rate=0.9,
offset_init=jnp.zeros)
def test_eps_cast_to_var_dtype(self):
# See https://github.com/google/jax/issues/4718 for more info. In the
# context of this test we need to assert NumPy bf16 params/state and a
# Python float for eps preserve bf16 output.
def f(x, is_training):
return batch_norm.BatchNorm(True, True, 0.9, eps=0.1)(x, is_training)
f = transform.transform_with_state(f)
x = np.ones([], jnp.bfloat16)
key = jax.random.PRNGKey(42)
params, state = jax.device_get(f.init(key, x, True))
y, _ = f.apply(params, state, None, x, False)
self.assertEqual(y.dtype, jnp.bfloat16)
if __name__ == "__main__":
_xla_flags = os.environ.get("XLA_FLAGS", "")
os.environ["XLA_FLAGS"] = (_xla_flags +
" --xla_force_host_platform_device_count=8")
absltest.main()
os.environ["XLA_FLAGS"] = _xla_flags
|
[
"absl.testing.absltest.main",
"numpy.ones",
"jax.random.PRNGKey",
"numpy.mean",
"numpy.arange",
"numpy.testing.assert_array_almost_equal",
"numpy.full",
"numpy.zeros_like",
"numpy.std",
"jax.local_device_count",
"numpy.empty_like",
"haiku._src.batch_norm.BatchNorm",
"numpy.testing.assert_allclose",
"haiku._src.transform.transform_with_state",
"jax.numpy.broadcast_to",
"jax.pmap",
"jax.numpy.ones",
"jax.numpy.arange",
"numpy.zeros",
"os.environ.get"
] |
[((6809, 6840), 'os.environ.get', 'os.environ.get', (['"""XLA_FLAGS"""', '""""""'], {}), "('XLA_FLAGS', '')\n", (6823, 6840), False, 'import os\n'), ((6960, 6975), 'absl.testing.absltest.main', 'absltest.main', ([], {}), '()\n', (6973, 6975), False, 'from absl.testing import absltest\n'), ((1117, 1154), 'haiku._src.batch_norm.BatchNorm', 'batch_norm.BatchNorm', (['(True)', '(True)', '(0.9)'], {}), '(True, True, 0.9)\n', (1137, 1154), False, 'from haiku._src import batch_norm\n'), ((1223, 1271), 'jax.numpy.broadcast_to', 'jnp.broadcast_to', (['result[:, :, :1]', 'result.shape'], {}), '(result[:, :, :1], result.shape)\n', (1239, 1271), True, 'import jax.numpy as jnp\n'), ((1330, 1385), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['result', 'result_0_replicated'], {}), '(result, result_0_replicated)\n', (1356, 1385), True, 'import numpy as np\n'), ((1609, 1686), 'haiku._src.batch_norm.BatchNorm', 'batch_norm.BatchNorm', ([], {'create_scale': '(False)', 'create_offset': '(False)', 'decay_rate': '(0.9)'}), '(create_scale=False, create_offset=False, decay_rate=0.9)\n', (1629, 1686), False, 'from haiku._src import batch_norm\n'), ((1710, 1731), 'numpy.ones', 'np.ones', (['[2, 3, 3, 5]'], {}), '([2, 3, 3, 5])\n', (1717, 1731), True, 'import numpy as np\n'), ((1744, 1762), 'numpy.full', 'np.full', (['(5,)', '(0.5)'], {}), '((5,), 0.5)\n', (1751, 1762), True, 'import numpy as np\n'), ((1776, 1794), 'numpy.full', 'np.full', (['(5,)', '(2.0)'], {}), '((5,), 2.0)\n', (1783, 1794), True, 'import numpy as np\n'), ((2005, 2107), 'haiku._src.batch_norm.BatchNorm', 'batch_norm.BatchNorm', ([], {'create_scale': '(False)', 'create_offset': '(False)', 'decay_rate': '(0.9)', 'data_format': '"""NCHW"""'}), "(create_scale=False, create_offset=False, decay_rate=\n 0.9, data_format='NCHW')\n", (2025, 2107), False, 'from haiku._src import batch_norm\n'), ((2150, 2171), 'numpy.ones', 'np.ones', (['[2, 5, 3, 3]'], {}), '([2, 5, 3, 3])\n', (2157, 2171), True, 'import numpy as np\n'), ((2184, 2207), 'numpy.full', 'np.full', (['(5, 1, 1)', '(0.5)'], {}), '((5, 1, 1), 0.5)\n', (2191, 2207), True, 'import numpy as np\n'), ((2221, 2244), 'numpy.full', 'np.full', (['(5, 1, 1)', '(2.0)'], {}), '((5, 1, 1), 2.0)\n', (2228, 2244), True, 'import numpy as np\n'), ((2466, 2564), 'haiku._src.batch_norm.BatchNorm', 'batch_norm.BatchNorm', ([], {'create_scale': '(False)', 'create_offset': '(False)', 'decay_rate': '(0.9)', 'axis': '[0, 2, 3]'}), '(create_scale=False, create_offset=False, decay_rate=\n 0.9, axis=[0, 2, 3])\n', (2486, 2564), False, 'from haiku._src import batch_norm\n'), ((3036, 3060), 'jax.local_device_count', 'jax.local_device_count', ([], {}), '()\n', (3058, 3060), False, 'import jax\n'), ((3295, 3328), 'haiku._src.transform.transform_with_state', 'transform.transform_with_state', (['f'], {}), '(f)\n', (3325, 3328), False, 'from haiku._src import transform\n'), ((3592, 3615), 'numpy.mean', 'np.mean', (['inputs'], {'axis': '(0)'}), '(inputs, axis=0)\n', (3599, 3615), True, 'import numpy as np\n'), ((3699, 3753), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (['result', 'expected'], {}), '(result, expected)\n', (3735, 3753), True, 'import numpy as np\n'), ((3831, 3855), 'jax.local_device_count', 'jax.local_device_count', ([], {}), '()\n', (3853, 3855), False, 'import jax\n'), ((4446, 4479), 'haiku._src.transform.transform_with_state', 'transform.transform_with_state', (['f'], {}), '(f)\n', (4476, 4479), False, 'from haiku._src import transform\n'), ((4766, 4787), 'numpy.empty_like', 'np.empty_like', (['inputs'], {}), '(inputs)\n', (4779, 4787), True, 'import numpy as np\n'), ((5142, 5196), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (['result', 'expected'], {}), '(result, expected)\n', (5178, 5196), True, 'import numpy as np\n'), ((5280, 5357), 'haiku._src.batch_norm.BatchNorm', 'batch_norm.BatchNorm', ([], {'create_scale': '(False)', 'create_offset': '(False)', 'decay_rate': '(0.9)'}), '(create_scale=False, create_offset=False, decay_rate=0.9)\n', (5300, 5357), False, 'from haiku._src import batch_norm\n'), ((5381, 5406), 'jax.numpy.ones', 'jnp.ones', (['[2, 5, 3, 3, 3]'], {}), '([2, 5, 3, 3, 3])\n', (5389, 5406), True, 'import jax.numpy as jnp\n'), ((6513, 6546), 'haiku._src.transform.transform_with_state', 'transform.transform_with_state', (['f'], {}), '(f)\n', (6543, 6546), False, 'from haiku._src import transform\n'), ((6556, 6581), 'numpy.ones', 'np.ones', (['[]', 'jnp.bfloat16'], {}), '([], jnp.bfloat16)\n', (6563, 6581), True, 'import numpy as np\n'), ((6592, 6614), 'jax.random.PRNGKey', 'jax.random.PRNGKey', (['(42)'], {}), '(42)\n', (6610, 6614), False, 'import jax\n'), ((1893, 1919), 'numpy.full', 'np.full', (['inputs.shape', '(2.0)'], {}), '(inputs.shape, 2.0)\n', (1900, 1919), True, 'import numpy as np\n'), ((2343, 2369), 'numpy.full', 'np.full', (['inputs.shape', '(2.0)'], {}), '(inputs.shape, 2.0)\n', (2350, 2369), True, 'import numpy as np\n'), ((2948, 2970), 'numpy.zeros', 'np.zeros', (['inputs.shape'], {}), '(inputs.shape)\n', (2956, 2970), True, 'import numpy as np\n'), ((3404, 3426), 'jax.random.PRNGKey', 'jax.random.PRNGKey', (['(42)'], {}), '(42)\n', (3422, 3426), False, 'import jax\n'), ((3458, 3489), 'jax.pmap', 'jax.pmap', (['f.init'], {'axis_name': '"""i"""'}), "(f.init, axis_name='i')\n", (3466, 3489), False, 'import jax\n'), ((3519, 3551), 'jax.pmap', 'jax.pmap', (['f.apply'], {'axis_name': '"""i"""'}), "(f.apply, axis_name='i')\n", (3527, 3551), False, 'import jax\n'), ((3626, 3648), 'numpy.std', 'np.std', (['inputs'], {'axis': '(0)'}), '(inputs, axis=0)\n', (3632, 3648), True, 'import numpy as np\n'), ((4574, 4596), 'jax.random.PRNGKey', 'jax.random.PRNGKey', (['(42)'], {}), '(42)\n', (4592, 4596), False, 'import jax\n'), ((4628, 4659), 'jax.pmap', 'jax.pmap', (['f.init'], {'axis_name': '"""i"""'}), "(f.init, axis_name='i')\n", (4636, 4659), False, 'import jax\n'), ((4689, 4721), 'jax.pmap', 'jax.pmap', (['f.apply'], {'axis_name': '"""i"""'}), "(f.apply, axis_name='i')\n", (4697, 4721), False, 'import jax\n'), ((4914, 4943), 'numpy.mean', 'np.mean', (['group_inputs'], {'axis': '(0)'}), '(group_inputs, axis=0)\n', (4921, 4943), True, 'import numpy as np\n'), ((5476, 5497), 'numpy.zeros_like', 'np.zeros_like', (['inputs'], {}), '(inputs)\n', (5489, 5497), True, 'import numpy as np\n'), ((5688, 5789), 'haiku._src.batch_norm.BatchNorm', 'batch_norm.BatchNorm', ([], {'create_scale': '(False)', 'create_offset': '(True)', 'decay_rate': '(0.9)', 'scale_init': 'jnp.ones'}), '(create_scale=False, create_offset=True, decay_rate=0.9,\n scale_init=jnp.ones)\n', (5708, 5789), False, 'from haiku._src import batch_norm\n'), ((6020, 6123), 'haiku._src.batch_norm.BatchNorm', 'batch_norm.BatchNorm', ([], {'create_scale': '(True)', 'create_offset': '(False)', 'decay_rate': '(0.9)', 'offset_init': 'jnp.zeros'}), '(create_scale=True, create_offset=False, decay_rate=0.9,\n offset_init=jnp.zeros)\n', (6040, 6123), False, 'from haiku._src import batch_norm\n'), ((1045, 1085), 'jax.numpy.arange', 'jnp.arange', (['(2 * 3 * 4)'], {'dtype': 'jnp.float32'}), '(2 * 3 * 4, dtype=jnp.float32)\n', (1055, 1085), True, 'import jax.numpy as jnp\n'), ((2711, 2729), 'numpy.ones', 'np.ones', (['[5, 3, 3]'], {}), '([5, 3, 3])\n', (2718, 2729), True, 'import numpy as np\n'), ((3107, 3213), 'haiku._src.batch_norm.BatchNorm', 'batch_norm.BatchNorm', ([], {'create_scale': '(False)', 'create_offset': '(False)', 'decay_rate': '(0.9)', 'cross_replica_axis': '"""i"""'}), "(create_scale=False, create_offset=False, decay_rate=\n 0.9, cross_replica_axis='i')\n", (3127, 3213), False, 'from haiku._src import batch_norm\n'), ((3343, 3361), 'numpy.arange', 'np.arange', (['(ldc * 4)'], {}), '(ldc * 4)\n', (3352, 3361), True, 'import numpy as np\n'), ((4208, 4354), 'haiku._src.batch_norm.BatchNorm', 'batch_norm.BatchNorm', ([], {'create_scale': '(False)', 'create_offset': '(False)', 'decay_rate': '(0.9)', 'cross_replica_axis': '"""i"""', 'cross_replica_axis_index_groups': 'groups'}), "(create_scale=False, create_offset=False, decay_rate=\n 0.9, cross_replica_axis='i', cross_replica_axis_index_groups=groups)\n", (4228, 4354), False, 'from haiku._src import batch_norm\n'), ((4962, 4990), 'numpy.std', 'np.std', (['group_inputs'], {'axis': '(0)'}), '(group_inputs, axis=0)\n', (4968, 4990), True, 'import numpy as np\n'), ((6441, 6487), 'haiku._src.batch_norm.BatchNorm', 'batch_norm.BatchNorm', (['(True)', '(True)', '(0.9)'], {'eps': '(0.1)'}), '(True, True, 0.9, eps=0.1)\n', (6461, 6487), False, 'from haiku._src import batch_norm\n'), ((2691, 2709), 'numpy.ones', 'np.ones', (['[5, 3, 3]'], {}), '([5, 3, 3])\n', (2698, 2709), True, 'import numpy as np\n'), ((4099, 4113), 'numpy.arange', 'np.arange', (['ldc'], {}), '(ldc)\n', (4108, 4113), True, 'import numpy as np\n'), ((4494, 4512), 'numpy.arange', 'np.arange', (['(ldc * 4)'], {}), '(ldc * 4)\n', (4503, 4512), True, 'import numpy as np\n')]
|
import cv2
import numpy as np
def canny(image):
gray = cv2.cvtColor(lane_image, cv2.COLOR_RGB2GRAY)
blur = cv2.GaussianBlur(gray, (5, 5), 0)
canny = cv2.Canny(blur, 50, 150)
return canny
def display_lines(image, lines):
line_image = np.zeros_like(image)
if lines is not None:
for line in lines:
#print(line)
x1, y1, x2, y2 = line.reshape(4)
cv2.line(line_image, (x1, y1), (x2, y2), (255, 0, 0), 10)
return line_image
def region_of_interest(image):
height = image.shape[0]
triangle = np.array([[(200, height), (1100, height), (550, 250)]])
mask = np.zeros_like(image)
cv2.fillPoly(mask, triangle, 255)
masked_image = cv2.bitwise_and(image, mask)
return masked_image
image = cv2.imread('test_image.jpg')
lane_image = np.copy(image)
canny = canny(lane_image)
cropped_image = region_of_interest(canny)
lines = cv2.HoughLinesP(cropped_image, 2, np.pi/180, 100, np.array([]), minLineLength=40, maxLineGap=5)
line_image = display_lines(lane_image, lines)
combo_image = cv2.addWeighted(lane_image, 0.8, line_image, 1, 1)
cv2.imshow('result', combo_image)
cv2.waitKey(0)
|
[
"cv2.line",
"cv2.GaussianBlur",
"cv2.Canny",
"numpy.zeros_like",
"numpy.copy",
"cv2.bitwise_and",
"cv2.waitKey",
"cv2.cvtColor",
"cv2.addWeighted",
"cv2.fillPoly",
"cv2.imread",
"numpy.array",
"cv2.imshow"
] |
[((776, 804), 'cv2.imread', 'cv2.imread', (['"""test_image.jpg"""'], {}), "('test_image.jpg')\n", (786, 804), False, 'import cv2\n'), ((818, 832), 'numpy.copy', 'np.copy', (['image'], {}), '(image)\n', (825, 832), True, 'import numpy as np\n'), ((1065, 1115), 'cv2.addWeighted', 'cv2.addWeighted', (['lane_image', '(0.8)', 'line_image', '(1)', '(1)'], {}), '(lane_image, 0.8, line_image, 1, 1)\n', (1080, 1115), False, 'import cv2\n'), ((1116, 1149), 'cv2.imshow', 'cv2.imshow', (['"""result"""', 'combo_image'], {}), "('result', combo_image)\n", (1126, 1149), False, 'import cv2\n'), ((1150, 1164), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (1161, 1164), False, 'import cv2\n'), ((61, 105), 'cv2.cvtColor', 'cv2.cvtColor', (['lane_image', 'cv2.COLOR_RGB2GRAY'], {}), '(lane_image, cv2.COLOR_RGB2GRAY)\n', (73, 105), False, 'import cv2\n'), ((117, 150), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['gray', '(5, 5)', '(0)'], {}), '(gray, (5, 5), 0)\n', (133, 150), False, 'import cv2\n'), ((163, 187), 'cv2.Canny', 'cv2.Canny', (['blur', '(50)', '(150)'], {}), '(blur, 50, 150)\n', (172, 187), False, 'import cv2\n'), ((257, 277), 'numpy.zeros_like', 'np.zeros_like', (['image'], {}), '(image)\n', (270, 277), True, 'import numpy as np\n'), ((569, 624), 'numpy.array', 'np.array', (['[[(200, height), (1100, height), (550, 250)]]'], {}), '([[(200, height), (1100, height), (550, 250)]])\n', (577, 624), True, 'import numpy as np\n'), ((636, 656), 'numpy.zeros_like', 'np.zeros_like', (['image'], {}), '(image)\n', (649, 656), True, 'import numpy as np\n'), ((661, 694), 'cv2.fillPoly', 'cv2.fillPoly', (['mask', 'triangle', '(255)'], {}), '(mask, triangle, 255)\n', (673, 694), False, 'import cv2\n'), ((714, 742), 'cv2.bitwise_and', 'cv2.bitwise_and', (['image', 'mask'], {}), '(image, mask)\n', (729, 742), False, 'import cv2\n'), ((959, 971), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (967, 971), True, 'import numpy as np\n'), ((413, 470), 'cv2.line', 'cv2.line', (['line_image', '(x1, y1)', '(x2, y2)', '(255, 0, 0)', '(10)'], {}), '(line_image, (x1, y1), (x2, y2), (255, 0, 0), 10)\n', (421, 470), False, 'import cv2\n')]
|
import unittest
from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure
from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A
class TestQuestionCategory(unittest.TestCase):
def test_determine_description(self):
qc = QuestionCategory("counting_times")
self.assertIn("Counting times? A: Number", qc.description)
def test_determine_description_str(self):
qc = QuestionCategory("event_ordering")
self.assertIn("X, Y which comes first?", qc.description)
def test_no_description(self):
qc = QuestionCategory("whatever")
self.assertEqual("N/A", qc.description)
class TestGetCategoryFromQuestionStructure(unittest.TestCase):
def test_regex_classifier_class_counting_times(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = How many times is the bowl used?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("counting_times", a_class.category)
def test_regex_classifier_class_counting_actions(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = How many actions does it take to process the tomato?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("counting_actions", a_class.category)
def test_regex_classifier_class_counting_uses(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = How many spoons are used?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("counting_uses", a_class.category)
def test_regex_classifier_class_ellipsis(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(qa=Q_A("# question 20-9 = What should be served?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("ellipsis", a_class.category)
def test_regex_classifier_class_location_crl(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = Where should you add the chopped vegetables?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("location_crl", a_class.category)
def test_regex_classifier_class_how_1(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = How do you brush the salad dressing?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("method", a_class.category)
def test_regex_classifier_class_how_2(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(qa=Q_A("# question 20-9 = How did you get the cooked vegetable?", "answer = a"),
recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("lifespan_how", a_class.category)
def test_regex_classifier_class_lifespan_what(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(qa=Q_A("# question 20-9 = What's in the lentil salad?", "answer = a"),
recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("lifespan_what", a_class.category)
def test_regex_classifier_class_event_ordering(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(qa=Q_A(
"# question 20-9 = Cutting the stem into bite - size pieces into bite - size pieces and sauting minced "
"meat in a separate pan, which comes first?",
"answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("event_ordering", a_class.category)
def test_regex_classifier_class_result(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = To what extent do you cut carrots and zucchini?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("result", a_class.category)
def test_regex_classifier_class_how_3(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(qa=Q_A("# question 20-9 = How do you prick the dough slightly?", "answer = a"),
recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("method", a_class.category)
def test_regex_classifier_class_time(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = For how long do you boil the potatoes until cooked?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("time", a_class.category)
def test_regex_classifier_class_location_srl(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = Where do you season the trout with salt and pepper?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("location_srl", a_class.category)
def test_regex_classifier_class_extent(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = By how much do you cover the beans with water in a pot?", "answer = a"),
recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("extent", a_class.category)
def test_regex_classifier_class_how_4(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = How do you coat hot syrup mixture the popcorn nut mixture?", "answer = a"),
recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("method", a_class.category)
def test_regex_classifier_class_purpose1(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(qa=Q_A("# question 20-9 = Why do you use gas?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("purpose", a_class.category)
def test_regex_classifier_class_copatient1(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = What do you mix the oil in a small bowl with?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("copatient", a_class.category)
def test_regex_classifier_class_copatient2(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = What do you put the raspberries into a liqudizer with?", "answer = a"),
recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("copatient", a_class.category)
def test_regex_classifier_class_how_5(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(qa=Q_A("# question 20-9 = How do you use the same pot of water??"),
recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("method", a_class.category)
def test_regex_classifier_class_purpose2(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(qa=Q_A("# question 20-9 = Why do you pinch the pizza dough?", "answer = a"),
recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("purpose", a_class.category)
def test_regex_classifier_class_source(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = From where do you remove the spinach and shallots mix?", "answer = a"),
recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("source", a_class.category)
def test_regex_classifier_class_location_change(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = Where was the stuffed mushroom before it was garnished?", "answer = a"),
recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("location_change", a_class.category)
def test_regex_classifier_class_result_na(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(
qa=Q_A("# question 20-9 = To what extent do you cut the shortening in?", "answer = a"), recipe=None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("result", a_class.category)
def test_regex_classifier_class_how_preheat_1(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(Q_A("# question 20-9 = How do you preheat your oven?"), None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("method_preheat", a_class.category)
def test_regex_classifier_class_how_preheat__alt_spelling(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(Q_A("# question 20-9 = How do you pre - heat the oven?", "answer = a"), None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("method_preheat", a_class.category)
def test_regex_classifier_not_recognized(self):
engine = GetCategoryFromQuestionStructure()
question = QuestionAnswerRecipe(Q_A("# question XYZ-ABC = Is this question recognizable?", "answer = No"), None)
a_class = engine.predict_category(question)
self.assertIsNotNone(a_class)
self.assertEqual("not_recognized", a_class.category)
|
[
"src.pipeline.question_category.QuestionCategory",
"src.unpack_data.Q_A",
"src.pipeline.question_category.GetCategoryFromQuestionStructure"
] |
[((278, 312), 'src.pipeline.question_category.QuestionCategory', 'QuestionCategory', (['"""counting_times"""'], {}), "('counting_times')\n", (294, 312), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((440, 474), 'src.pipeline.question_category.QuestionCategory', 'QuestionCategory', (['"""event_ordering"""'], {}), "('event_ordering')\n", (456, 474), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((589, 617), 'src.pipeline.question_category.QuestionCategory', 'QuestionCategory', (['"""whatever"""'], {}), "('whatever')\n", (605, 617), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((807, 841), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (839, 841), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((1213, 1247), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (1245, 1247), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((1638, 1672), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (1670, 1672), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((2028, 2062), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (2060, 2062), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((2401, 2435), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (2433, 2435), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((2806, 2840), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (2838, 2840), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((3197, 3231), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (3229, 3231), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((3630, 3664), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (3662, 3664), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((4055, 4089), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (4087, 4089), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((4572, 4606), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (4604, 4606), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((4974, 5008), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (5006, 5008), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((5391, 5425), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (5423, 5425), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((5802, 5836), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (5834, 5836), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((6215, 6249), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (6247, 6249), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((6637, 6671), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (6669, 6671), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((7065, 7099), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (7097, 7099), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((7432, 7466), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (7464, 7466), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((7840, 7874), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (7872, 7874), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((8264, 8298), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (8296, 8298), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((8673, 8707), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (8705, 8707), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((9090, 9124), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (9122, 9124), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((9521, 9555), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (9553, 9555), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((9956, 9990), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (9988, 9990), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((10363, 10397), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (10395, 10397), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((10738, 10772), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (10770, 10772), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((11112, 11146), 'src.pipeline.question_category.GetCategoryFromQuestionStructure', 'GetCategoryFromQuestionStructure', ([], {}), '()\n', (11144, 11146), False, 'from src.pipeline.question_category import QuestionCategory, GetCategoryFromQuestionStructure\n'), ((10438, 10492), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = How do you preheat your oven?"""'], {}), "('# question 20-9 = How do you preheat your oven?')\n", (10441, 10492), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((10813, 10883), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = How do you pre - heat the oven?"""', '"""answer = a"""'], {}), "('# question 20-9 = How do you pre - heat the oven?', 'answer = a')\n", (10816, 10883), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((11187, 11260), 'src.unpack_data.Q_A', 'Q_A', (['"""# question XYZ-ABC = Is this question recognizable?"""', '"""answer = No"""'], {}), "('# question XYZ-ABC = Is this question recognizable?', 'answer = No')\n", (11190, 11260), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((898, 969), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = How many times is the bowl used?"""', '"""answer = a"""'], {}), "('# question 20-9 = How many times is the bowl used?', 'answer = a')\n", (901, 969), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((1304, 1399), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = How many actions does it take to process the tomato?"""', '"""answer = a"""'], {}), "('# question 20-9 = How many actions does it take to process the tomato?',\n 'answer = a')\n", (1307, 1399), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((1729, 1793), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = How many spoons are used?"""', '"""answer = a"""'], {}), "('# question 20-9 = How many spoons are used?', 'answer = a')\n", (1732, 1793), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((2106, 2167), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = What should be served?"""', '"""answer = a"""'], {}), "('# question 20-9 = What should be served?', 'answer = a')\n", (2109, 2167), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((2492, 2579), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = Where should you add the chopped vegetables?"""', '"""answer = a"""'], {}), "('# question 20-9 = Where should you add the chopped vegetables?',\n 'answer = a')\n", (2495, 2579), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((2897, 2972), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = How do you brush the salad dressing?"""', '"""answer = a"""'], {}), "('# question 20-9 = How do you brush the salad dressing?', 'answer = a')\n", (2900, 2972), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((3275, 3351), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = How did you get the cooked vegetable?"""', '"""answer = a"""'], {}), "('# question 20-9 = How did you get the cooked vegetable?', 'answer = a')\n", (3278, 3351), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((3708, 3774), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = What\'s in the lentil salad?"""', '"""answer = a"""'], {}), '("# question 20-9 = What\'s in the lentil salad?", \'answer = a\')\n', (3711, 3774), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((4133, 4303), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = Cutting the stem into bite - size pieces into bite - size pieces and sauting minced meat in a separate pan, which comes first?"""', '"""answer = a"""'], {}), "('# question 20-9 = Cutting the stem into bite - size pieces into bite - size pieces and sauting minced meat in a separate pan, which comes first?'\n , 'answer = a')\n", (4136, 4303), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((4663, 4753), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = To what extent do you cut carrots and zucchini?"""', '"""answer = a"""'], {}), "('# question 20-9 = To what extent do you cut carrots and zucchini?',\n 'answer = a')\n", (4666, 4753), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((5052, 5127), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = How do you prick the dough slightly?"""', '"""answer = a"""'], {}), "('# question 20-9 = How do you prick the dough slightly?', 'answer = a')\n", (5055, 5127), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((5482, 5576), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = For how long do you boil the potatoes until cooked?"""', '"""answer = a"""'], {}), "('# question 20-9 = For how long do you boil the potatoes until cooked?',\n 'answer = a')\n", (5485, 5576), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((5893, 5987), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = Where do you season the trout with salt and pepper?"""', '"""answer = a"""'], {}), "('# question 20-9 = Where do you season the trout with salt and pepper?',\n 'answer = a')\n", (5896, 5987), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((6306, 6405), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = By how much do you cover the beans with water in a pot?"""', '"""answer = a"""'], {}), "('# question 20-9 = By how much do you cover the beans with water in a pot?'\n , 'answer = a')\n", (6309, 6405), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((6728, 6830), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = How do you coat hot syrup mixture the popcorn nut mixture?"""', '"""answer = a"""'], {}), "('# question 20-9 = How do you coat hot syrup mixture the popcorn nut mixture?'\n , 'answer = a')\n", (6731, 6830), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((7143, 7201), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = Why do you use gas?"""', '"""answer = a"""'], {}), "('# question 20-9 = Why do you use gas?', 'answer = a')\n", (7146, 7201), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((7523, 7611), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = What do you mix the oil in a small bowl with?"""', '"""answer = a"""'], {}), "('# question 20-9 = What do you mix the oil in a small bowl with?',\n 'answer = a')\n", (7526, 7611), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((7931, 8028), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = What do you put the raspberries into a liqudizer with?"""', '"""answer = a"""'], {}), "('# question 20-9 = What do you put the raspberries into a liqudizer with?',\n 'answer = a')\n", (7934, 8028), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((8342, 8405), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = How do you use the same pot of water??"""'], {}), "('# question 20-9 = How do you use the same pot of water??')\n", (8345, 8405), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((8751, 8823), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = Why do you pinch the pizza dough?"""', '"""answer = a"""'], {}), "('# question 20-9 = Why do you pinch the pizza dough?', 'answer = a')\n", (8754, 8823), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((9181, 9278), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = From where do you remove the spinach and shallots mix?"""', '"""answer = a"""'], {}), "('# question 20-9 = From where do you remove the spinach and shallots mix?',\n 'answer = a')\n", (9184, 9278), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((9612, 9711), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = Where was the stuffed mushroom before it was garnished?"""', '"""answer = a"""'], {}), "('# question 20-9 = Where was the stuffed mushroom before it was garnished?'\n , 'answer = a')\n", (9615, 9711), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n'), ((10047, 10134), 'src.unpack_data.Q_A', 'Q_A', (['"""# question 20-9 = To what extent do you cut the shortening in?"""', '"""answer = a"""'], {}), "('# question 20-9 = To what extent do you cut the shortening in?',\n 'answer = a')\n", (10050, 10134), False, 'from src.unpack_data import QuestionAnswerRecipe, Recipe, Q_A\n')]
|
import math
ang = float(input('Digite o ângulo:'))
angr = math.radians(ang)
sen = math.sin(angr)
cos = math.cos(angr)
tan = math.tan(angr)
print('Dado o ângulo {}\nO Seno é {:.2f} \nO Cosseno é {:.2f}\nA Tangente é {:.2f}'.format(ang, sen, cos, tan))
|
[
"math.radians",
"math.tan",
"math.cos",
"math.sin"
] |
[((58, 75), 'math.radians', 'math.radians', (['ang'], {}), '(ang)\n', (70, 75), False, 'import math\n'), ((82, 96), 'math.sin', 'math.sin', (['angr'], {}), '(angr)\n', (90, 96), False, 'import math\n'), ((103, 117), 'math.cos', 'math.cos', (['angr'], {}), '(angr)\n', (111, 117), False, 'import math\n'), ((124, 138), 'math.tan', 'math.tan', (['angr'], {}), '(angr)\n', (132, 138), False, 'import math\n')]
|
from django.contrib import admin
from .models import User
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.forms import UserCreationForm, UserChangeForm
class CustomUser(UserAdmin):
add_form = UserCreationForm
form = UserChangeForm
model = User
add_fieldsets = UserAdmin.add_fieldsets + (
(None, {'fields': ('phone', 'direction', 'date_of_birth')}),
)
fieldsets = UserAdmin.fieldsets + (
(None, {'fields': ('phone', 'direction', 'date_of_birth')}),
)
admin.site.register(User, CustomUser)
|
[
"django.contrib.admin.site.register"
] |
[((523, 560), 'django.contrib.admin.site.register', 'admin.site.register', (['User', 'CustomUser'], {}), '(User, CustomUser)\n', (542, 560), False, 'from django.contrib import admin\n')]
|
####################################################################################
# Default addresses for use in testing.
#
# These addresses are unique, and providing them names makes tests easier to read
# and reason about.
####################################################################################
import smartpy as sp
# An address which acts as a Administrator.
ADMINISTRATOR_ADDRESS = sp.address("tz1VmiY38m3y95HqQLjMwqnMS7sdMfGomzKi")
# An address that represents a baker that will regisster / unregister in the registry
BAKER_ADDRESS = sp.address("tz1abmz7jiCV2GH2u81LRrGgAFFgvQgiDiaf")
# An address will be rotated to
ROTATED_ADDRESS = sp.address("tz1W5VkdB5s7ENMESVBtwyt9kyvLqPcUczRT")
# An address which is never used. This is a `null` value for addresses.
NULL_ADDRESS = sp.address("tz1bTpviNnyx2PXsNmGpCQTMQsGoYordkUoA")
|
[
"smartpy.address"
] |
[((405, 455), 'smartpy.address', 'sp.address', (['"""tz1VmiY38m3y95HqQLjMwqnMS7sdMfGomzKi"""'], {}), "('tz1VmiY38m3y95HqQLjMwqnMS7sdMfGomzKi')\n", (415, 455), True, 'import smartpy as sp\n'), ((559, 609), 'smartpy.address', 'sp.address', (['"""tz1abmz7jiCV2GH2u81LRrGgAFFgvQgiDiaf"""'], {}), "('tz1abmz7jiCV2GH2u81LRrGgAFFgvQgiDiaf')\n", (569, 609), True, 'import smartpy as sp\n'), ((661, 711), 'smartpy.address', 'sp.address', (['"""tz1W5VkdB5s7ENMESVBtwyt9kyvLqPcUczRT"""'], {}), "('tz1W5VkdB5s7ENMESVBtwyt9kyvLqPcUczRT')\n", (671, 711), True, 'import smartpy as sp\n'), ((800, 850), 'smartpy.address', 'sp.address', (['"""tz1bTpviNnyx2PXsNmGpCQTMQsGoYordkUoA"""'], {}), "('tz1bTpviNnyx2PXsNmGpCQTMQsGoYordkUoA')\n", (810, 850), True, 'import smartpy as sp\n')]
|
# importing required modules
from zipfile import ZipFile
import os
def get_all_file_paths(directory):
# initializing empty file paths list
file_paths = []
# crawling through directory and subdirectories
for root, directories, files in os.walk(directory):
for filename in files:
# join the two strings in order to form the full filepath.
filepath = os.path.join(root, filename)
file_paths.append(filepath)
# returning all file paths
return file_paths
def main():
# path to folder which needs to be zipped
directory = './'
# calling function to get all file paths in the directory
file_paths = get_all_file_paths(directory)
# writing files to a zipfile
with ZipFile('All files.zip','w') as zip:
# writing each file one by one
for file in file_paths:
zip.write(file)
print('All files zipped successfully!')
if __name__ == "__main__":
main()
|
[
"os.walk",
"zipfile.ZipFile",
"os.path.join"
] |
[((242, 260), 'os.walk', 'os.walk', (['directory'], {}), '(directory)\n', (249, 260), False, 'import os\n'), ((688, 717), 'zipfile.ZipFile', 'ZipFile', (['"""All files.zip"""', '"""w"""'], {}), "('All files.zip', 'w')\n", (695, 717), False, 'from zipfile import ZipFile\n'), ((363, 391), 'os.path.join', 'os.path.join', (['root', 'filename'], {}), '(root, filename)\n', (375, 391), False, 'import os\n')]
|
import pytest
from pydantic import ValidationError
from tests.odm.models import DocumentWithValidationOnSave
async def test_validate_on_insert():
doc = DocumentWithValidationOnSave(num_1=1, num_2=2)
doc.num_1 = "wrong_value"
with pytest.raises(ValidationError):
await doc.insert()
async def test_validate_on_replace():
doc = DocumentWithValidationOnSave(num_1=1, num_2=2)
await doc.insert()
doc.num_1 = "wrong_value"
with pytest.raises(ValidationError):
await doc.replace()
async def test_validate_on_save_changes():
doc = DocumentWithValidationOnSave(num_1=1, num_2=2)
await doc.insert()
doc.num_1 = "wrong_value"
with pytest.raises(ValidationError):
await doc.save_changes()
async def test_validate_on_save_action():
doc = DocumentWithValidationOnSave(num_1=1, num_2=2)
await doc.insert()
assert doc.num_2 == 3
async def test_validate_on_save_skip_action():
doc = DocumentWithValidationOnSave(num_1=1, num_2=2)
await doc.insert(skip_actions=["num_2_plus_1"])
assert doc.num_2 == 2
|
[
"pytest.raises",
"tests.odm.models.DocumentWithValidationOnSave"
] |
[((159, 205), 'tests.odm.models.DocumentWithValidationOnSave', 'DocumentWithValidationOnSave', ([], {'num_1': '(1)', 'num_2': '(2)'}), '(num_1=1, num_2=2)\n', (187, 205), False, 'from tests.odm.models import DocumentWithValidationOnSave\n'), ((354, 400), 'tests.odm.models.DocumentWithValidationOnSave', 'DocumentWithValidationOnSave', ([], {'num_1': '(1)', 'num_2': '(2)'}), '(num_1=1, num_2=2)\n', (382, 400), False, 'from tests.odm.models import DocumentWithValidationOnSave\n'), ((578, 624), 'tests.odm.models.DocumentWithValidationOnSave', 'DocumentWithValidationOnSave', ([], {'num_1': '(1)', 'num_2': '(2)'}), '(num_1=1, num_2=2)\n', (606, 624), False, 'from tests.odm.models import DocumentWithValidationOnSave\n'), ((806, 852), 'tests.odm.models.DocumentWithValidationOnSave', 'DocumentWithValidationOnSave', ([], {'num_1': '(1)', 'num_2': '(2)'}), '(num_1=1, num_2=2)\n', (834, 852), False, 'from tests.odm.models import DocumentWithValidationOnSave\n'), ((961, 1007), 'tests.odm.models.DocumentWithValidationOnSave', 'DocumentWithValidationOnSave', ([], {'num_1': '(1)', 'num_2': '(2)'}), '(num_1=1, num_2=2)\n', (989, 1007), False, 'from tests.odm.models import DocumentWithValidationOnSave\n'), ((245, 275), 'pytest.raises', 'pytest.raises', (['ValidationError'], {}), '(ValidationError)\n', (258, 275), False, 'import pytest\n'), ((463, 493), 'pytest.raises', 'pytest.raises', (['ValidationError'], {}), '(ValidationError)\n', (476, 493), False, 'import pytest\n'), ((687, 717), 'pytest.raises', 'pytest.raises', (['ValidationError'], {}), '(ValidationError)\n', (700, 717), False, 'import pytest\n')]
|
import discord
from discord.ext import commands
from discord.utils import get
class c216(commands.Cog, name="c216"):
def __init__(self, bot: commands.Bot):
self.bot = bot
@commands.command(name='Automaton_Doll_&_Winter_Lights', aliases=['c216'])
async def example_embed(self, ctx):
embed = discord.Embed(title='Automaton Doll & Winter Lights',
color=0xff8b53)
embed.set_thumbnail(url='https://www.duelingbook.com/images/custom-pics/2300000/2356400.jpg')
embed.add_field(name='Status (Archetype)', value='Casual:3/Tournament:3', inline=True)
embed.add_field(name='Type (Attribute)', value='Machine/Tuner/Effect (LIGHT)', inline=False)
embed.add_field(name='Level (ATK/DEF)', value='3 (0/1800)', inline=False)
embed.add_field(name='Monster Effect', value='During damage calculation, if your monster is battling an opponent\'s monster with higher ATK (Quick Effect): You can discard this card; your battling monster gains ATK/DEF equal to the ATK of the opponent\'s monster it is battling, until the end of the turn.', inline=False)
embed.set_footer(text='Set Code: ANCF')
await ctx.send(embed=embed)
def setup(bot: commands.Bot):
bot.add_cog(c216(bot))
|
[
"discord.ext.commands.command",
"discord.Embed"
] |
[((190, 263), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""Automaton_Doll_&_Winter_Lights"""', 'aliases': "['c216']"}), "(name='Automaton_Doll_&_Winter_Lights', aliases=['c216'])\n", (206, 263), False, 'from discord.ext import commands\n'), ((320, 389), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Automaton Doll & Winter Lights"""', 'color': '(16747347)'}), "(title='Automaton Doll & Winter Lights', color=16747347)\n", (333, 389), False, 'import discord\n')]
|
import wicked as w
import pytest
def test_orbital_space():
w.reset_space()
w.add_space("c", "fermion", "occupied", ["m", "n"])
w.add_space("a", "fermion", "general", ["u", "v", "w", "x", "y", "z"])
w.add_space("v", "fermion", "unoccupied", ["e", "f"])
assert w.num_spaces() == 3
def test_orbital_space_exceptions():
w.reset_space()
w.add_space("c", "fermion", "occupied", ["m", "n"])
with pytest.raises(Exception):
w.add_space("c", "fermion", "occupied", ["m", "n"])
with pytest.raises(Exception):
w.add_space("v", "fermion", "occupied", ["m", "n"])
if __name__ == "__main__":
test_orbital_space()
test_orbital_space_exceptions()
|
[
"wicked.num_spaces",
"wicked.reset_space",
"wicked.add_space",
"pytest.raises"
] |
[((65, 80), 'wicked.reset_space', 'w.reset_space', ([], {}), '()\n', (78, 80), True, 'import wicked as w\n'), ((85, 136), 'wicked.add_space', 'w.add_space', (['"""c"""', '"""fermion"""', '"""occupied"""', "['m', 'n']"], {}), "('c', 'fermion', 'occupied', ['m', 'n'])\n", (96, 136), True, 'import wicked as w\n'), ((141, 211), 'wicked.add_space', 'w.add_space', (['"""a"""', '"""fermion"""', '"""general"""', "['u', 'v', 'w', 'x', 'y', 'z']"], {}), "('a', 'fermion', 'general', ['u', 'v', 'w', 'x', 'y', 'z'])\n", (152, 211), True, 'import wicked as w\n'), ((216, 269), 'wicked.add_space', 'w.add_space', (['"""v"""', '"""fermion"""', '"""unoccupied"""', "['e', 'f']"], {}), "('v', 'fermion', 'unoccupied', ['e', 'f'])\n", (227, 269), True, 'import wicked as w\n'), ((344, 359), 'wicked.reset_space', 'w.reset_space', ([], {}), '()\n', (357, 359), True, 'import wicked as w\n'), ((364, 415), 'wicked.add_space', 'w.add_space', (['"""c"""', '"""fermion"""', '"""occupied"""', "['m', 'n']"], {}), "('c', 'fermion', 'occupied', ['m', 'n'])\n", (375, 415), True, 'import wicked as w\n'), ((281, 295), 'wicked.num_spaces', 'w.num_spaces', ([], {}), '()\n', (293, 295), True, 'import wicked as w\n'), ((425, 449), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (438, 449), False, 'import pytest\n'), ((459, 510), 'wicked.add_space', 'w.add_space', (['"""c"""', '"""fermion"""', '"""occupied"""', "['m', 'n']"], {}), "('c', 'fermion', 'occupied', ['m', 'n'])\n", (470, 510), True, 'import wicked as w\n'), ((520, 544), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (533, 544), False, 'import pytest\n'), ((554, 605), 'wicked.add_space', 'w.add_space', (['"""v"""', '"""fermion"""', '"""occupied"""', "['m', 'n']"], {}), "('v', 'fermion', 'occupied', ['m', 'n'])\n", (565, 605), True, 'import wicked as w\n')]
|
import math
import rospy
from bitbots_animation_server.animation import Keyframe, Animation
from bitbots_splines.smooth_spline import SmoothSpline
class SplineAnimator:
def __init__(self, animation, current_joint_states):
self.anim = animation
self.start_time = rospy.get_time()
self.animation_duration = 0
self.current_point_time = 0
self.spline_dict = {}
#add current joint positions as start
if current_joint_states is not None:
i = 0
for joint in current_joint_states.name:
if joint not in self.spline_dict:
self.spline_dict[joint] = SmoothSpline()
self.spline_dict[joint].add_point(0, math.degrees(current_joint_states.position[i]))
i+=1
else:
rospy.logwarn("No current joint positions. Will play animation starting from first keyframe.")
for joint in self.anim.keyframes[0].goals:
if joint not in self.spline_dict:
self.spline_dict[joint] = SmoothSpline()
self.spline_dict[joint].add_point(0, self.anim.keyframes[0].goals[joint])
#load keyframe positions into the splines
for keyframe in self.anim.keyframes:
self.animation_duration += keyframe.duration + keyframe.pause
self.current_point_time += keyframe.duration
for joint in keyframe.goals:
if joint not in self.spline_dict:
self.spline_dict[joint] = SmoothSpline()
self.spline_dict[joint].add_point(self.current_point_time, keyframe.goals[joint])
self.spline_dict[joint].add_point(self.current_point_time + keyframe.pause, keyframe.goals[joint])
self.current_point_time += keyframe.pause
# compute the splines
for joint in self.spline_dict:
self.spline_dict[joint].compute_spline()
def get_positions_deg(self, time):
if time < 0 or time > self.animation_duration:
return None
ret_dict = {}
for joint in self.spline_dict:
ret_dict[joint] = self.spline_dict[joint].pos(time)
return ret_dict
def get_positions_rad(self, time):
if time < 0 or time > self.animation_duration:
return None
ret_dict = {}
for joint in self.spline_dict:
ret_dict[joint] = math.radians(self.spline_dict[joint].pos(time))
return ret_dict
def get_start_time(self):
return self.start_time
def get_duration(self):
return self.animation_duration
|
[
"rospy.logwarn",
"rospy.get_time",
"math.degrees",
"bitbots_splines.smooth_spline.SmoothSpline"
] |
[((284, 300), 'rospy.get_time', 'rospy.get_time', ([], {}), '()\n', (298, 300), False, 'import rospy\n'), ((836, 940), 'rospy.logwarn', 'rospy.logwarn', (['"""No current joint positions. Will play animation starting from first keyframe."""'], {}), "(\n 'No current joint positions. Will play animation starting from first keyframe.'\n )\n", (849, 940), False, 'import rospy\n'), ((673, 687), 'bitbots_splines.smooth_spline.SmoothSpline', 'SmoothSpline', ([], {}), '()\n', (685, 687), False, 'from bitbots_splines.smooth_spline import SmoothSpline\n'), ((741, 787), 'math.degrees', 'math.degrees', (['current_joint_states.position[i]'], {}), '(current_joint_states.position[i])\n', (753, 787), False, 'import math\n'), ((1082, 1096), 'bitbots_splines.smooth_spline.SmoothSpline', 'SmoothSpline', ([], {}), '()\n', (1094, 1096), False, 'from bitbots_splines.smooth_spline import SmoothSpline\n'), ((1563, 1577), 'bitbots_splines.smooth_spline.SmoothSpline', 'SmoothSpline', ([], {}), '()\n', (1575, 1577), False, 'from bitbots_splines.smooth_spline import SmoothSpline\n')]
|
'''
@since: 5 Feb 2016
@author: <NAME> <<EMAIL>>, <NAME> <<EMAIL>>
@license: Lenovo License
@copyright: Copyright 2016, Lenovo
@organization: Lenovo
@summary: This module provides scriptable interfaces and scriptable python shell.
'''
import code
import logging
import sys
from pylxca import __version__
from pylxca.pylxca_cmd import lxca_ishell
from pylxca.pylxca_cmd.lxca_cmd import fanmuxes
# shell is a global variable
SHELL_OBJ = None
LOGGER = logging.getLogger(__name__)
def pyshell(shell=lxca_ishell.InteractiveShell()):
'''
@summary: this method provides scriptable python shell
'''
global SHELL_OBJ
SHELL_OBJ = shell
SHELL_OBJ.set_ostream_to_null()
def set_interactive():
'''
@summary: This method set the shell in interactive mode
'''
ns = {"connect": connect,
"disconnect": disconnect,
"chassis": chassis,
"cmms": cmms,
"fans": fans,
"fanmuxes": fanmuxes,
"switches": switches,
"powersupplies": powersupplies,
"nodes": nodes,
"scalablesystem": scalablesystem,
"discover": discover,
"manage": manage,
"unmanage": unmanage,
#"jobs": jobs,
"users": users,
"lxcalog": lxcalog,
"ffdc": ffdc,
"updatecomp": updatecomp,
"updatepolicy": updatepolicy,
"updaterepo": updaterepo,
"configpatterns": configpatterns,
"configprofiles": configprofiles,
"configtargets": configtargets,
"tasks": tasks,
"manifests": manifests,
"osimages": osimages,
"resourcegroups": resourcegroups,
"storedcredentials": storedcredentials,
"managementserver": managementserver,
"license": license,
"help": help}
ns.update()
global __version__
code.interact('Interactive Python Shell for Lenovo XClarity Administrator v' + __version__ +
'\nType "dir()" or "help(lxca command object)" for more information.', local=ns)
def connect(*args, **kwargs):
'''
@summary:
Use this function to connect to Lenovo XClarity Administrator
run this function as
con_variable = connect( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['url', 'user', 'pw', 'noverify']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
url url to Lenovo XClarity Administrator Example. https://a.b.c.d
user User Id to Authenticate Lenovo XClarity Administrator
pw Password to Authenticate Lenovo XClarity Administrator
noverify flag to indicate to not verify server certificate
@example
con1 = connect( url = "https://10.243.12.142",user = "USERID", pw = "Password", noverify = "True")
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
keylist = ['url', 'user', 'pw', 'noverify']
if len(args) == 0 and len(kwargs) == 0:
return
for i in range(len(args)):
kwargs[keylist[i]] = args[i]
con = SHELL_OBJ.handle_input_args(command_name, args=args, kwargs=kwargs)
return con
def disconnect(*args, **kwargs):
'''
@summary:
Use this function to disconnect from Lenovo XClarity Administrator
run this function as
disconnect()
it also reset internal current connection to original connection this is used in api version
to retain origianal connection if we are disconnecting other than current connection
i.e
con1 = connect(...)
con2 = connect(...)
con3 = connect(...)
con4 = connect(...) current internal connection is con4
disconnect(con2) will keep current connection to con4
disconnect(con4) or disconnect() will set current connection to None
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
@example
disconnect()
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {}
keylist = ['con']
optional_keylist = ['con']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict, False)
return out_obj
def cmms(*args, **kwargs):
'''
@summary:
Use this function to get CMMs information
run this function as
data_dictionary = cmms( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con', 'uuid', 'chassis']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
uuid cmm uuid
chassis chassis uuid
@example
cmm_list = cmms( con = con1, uuid = 'fc3058cadf8b11d48c9b9b1b1b1b1b57')
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'uuid': 'u', 'chassis': 'c'}
keylist = ['con', 'uuid', 'chassis']
optional_keylist = ['con', 'uuid', 'chassis']
mutually_exclusive_keys = ['uuid', 'chassis']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def chassis(*args, **kwargs):
'''
@summary:
Use this function to get Chassis information
run this function as
data_dictionary = chassis( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con', 'uuid', 'status']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
uuid chassis uuid
status chassis manage status (managed/unmanaged)
@example
chassis_list = chassis( con = con1, uuid = 'fc3058cadf8b11d48c9b9b1b1b1b1b57')
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'uuid': 'u', 'status': 's'}
keylist = ['con', 'uuid', 'status']
optional_keylist = ['con', 'uuid', 'status']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def fans(*args, **kwargs):
'''
@summary:
Use this function to get fans information
run this function as
data_dictionary = fans( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con', 'uuid', 'chassis']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
uuid uuid of fan
chassis chassis uuid
@example
fans_list = fans( con = con1, uuid = 'fc3058cadf8b11d48c9b9b1b1b1b1b57')
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'uuid': 'u', 'chassis': 'c'}
keylist = ['con', 'uuid', 'chassis']
optional_keylist = ['con', 'uuid', 'chassis']
mutually_exclusive_keys = ['uuid', 'chassis']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def fanmuxes(*args, **kwargs):
'''
@summary:
Use this function to get fanmuxes information
run this function as
data_dictionary = fanmuxes( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','uuid','chassis']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
uuid uuid of fanmux
chassis chassis uuid
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'uuid': 'u', 'chassis': 'c'}
keylist = ['con', 'uuid', 'chassis']
optional_keylist = ['con', 'uuid', 'chassis']
mutually_exclusive_keys = ['uuid', 'chassis']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def nodes(*args, **kwargs):
'''
@summary:
Use this function to get nodes information
run this function as
data_dictionary = nodes( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','uuid','chassis','status', 'modify', 'metrics']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
uuid uuid of node
chassis chassis uuid
status nodes manage status (managed/unmanaged)
modify JSON object of modifyable node properties
metrics flag to fetch metrics of all nodes or metrics of a node belonging to the provided uuid
@example
nodes(uuid="FAA6E3D494E511E6A0739B91ED670CE8",modify='{"location":{"location": "new location", "rack": "rack 5","lowestRackUnit": 3}}')
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'uuid': 'u', 'chassis': 'c', 'status': 's', 'modify': 'm', 'metrics': 'x'}
keylist = ['con', 'uuid', 'chassis', 'status', 'modify', 'metrics']
optional_keylist = ['con', 'uuid', 'chassis', 'status', 'modify', 'metrics']
mutually_exclusive_keys = ['uuid', 'chassis']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def switches(*args, **kwargs):
'''
@summary:
Use this function to get switches information
run this function as
data_dictionary = switches( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','uuid','chassis','ports','action']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
uuid uuid of switch
chassis chassis uuid
ports empty ports string list all ports for uuid, comma separated ports
action enable/disable ports
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
# other parameter don't have short option
long_short_key_map = {'uuid': 'u', 'chassis': 'c'}
keylist = ['con', 'uuid', 'chassis', 'ports', 'action']
optional_keylist = ['con', 'uuid', 'chassis', 'ports', 'action']
mutually_exclusive_keys = ['uuid', 'chassis']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def powersupplies(*args, **kwargs):
'''
@summary:
Use this function to get powersupplies information
run this function as
data_dictionary = powersupplies( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','uuid','chassis']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
uuid uuid of power supply
chassis chassis uuid
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'uuid': 'u', 'chassis': 'c'}
keylist = ['con', 'uuid', 'chassis']
optional_keylist = ['con', 'uuid', 'chassis']
mutually_exclusive_keys = ['uuid', 'chassis']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def scalablesystem(*args, **kwargs):
'''
@summary:
Use this function to get scalablesystem information
run this function as
data_dictionary = scalablesystem( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','id','type']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
id scalable complex id
type type (flex/rackserver)
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'id': 'i', 'type': 't'}
keylist = ['con', 'id', 'type']
optional_keylist = ['con', 'id', 'type']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def discover(*args, **kwargs):
'''
@summary:
Use this function to discover endpoint from Lenovo XClarity Administrator
run this function as
data_dictionary = discover( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','ip','job']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
ip One or more IP addresses for each endpoint to be discovered.
job Job ID of discover request
@example
For Getting Maangement job status
job_data = discover(con=con1,job=jobid)
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'ip': 'i', 'job': 'j'}
keylist = ['con', 'ip', 'job']
optional_keylist = ['con', 'ip', 'job']
mutually_exclusive_keys = ['ip', 'job']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def manage(*args, **kwargs):
'''
@summary:
Use this function to manage endpoint from Lenovo XClarity Administrator
run this function as
data_dictionary = manage( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','subcmd','ip','user','pw','rpw','job','force', 'storedcredential_id']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
subcmd
ip One or more IP addresses for each endpoint to be managed.
user user ID to access the endpoint
pw The current password to access the endpoint.
rpw The recovery password to be used for the endpoint.
force force manage
job Job ID of existing manage request
storedcredential_id Store Crendential Id to be used for device manage operation
if this is provided user is not required
Note : mp, type and epuuid parameters are dedpriciated and only kept for backword compatibility.
@example
jobid = manage(con=con1, subcmd='device', ip="10.243.6.68",user="USERID",pw="<PASSWORD>",rpw="<PASSWORD>")
jobid = manage(con=con1, subcmd='device', ip="10.243.6.68",storedcredintail_id="12")
or with named variable it can be represented as
jobid = manage(con= con1, subcmd='device', ip="10.243.6.68",user="USERID","PASSW0RD","PASS<PASSWORD>",True)
jobid = manage(con1, subcmd='device', i="10.243.4.16", u='USERID', p='Purley44LEN', r='Purley55LEN', f='True')
Using storedcredential id for Rackswitch
jobid = manage(con=con1, subcmd='device', i='10.240.157.111', s='402', f='True')
Using storedcredential id for Rackswitch Server
jobid = manage(con=con1, subcmd='device',i="10.243.4.16", r='Purley55LEN', s='404', f='True')
For Getting Maangement job status
jobid = manage(con=con1, subcmd='job_status', job="12")
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'ip': 'i', 'user': 'u', 'pw': 'p',
'rpw': 'r', 'job': 'j', 'force': 'f', 'storedcredential_id': 's'}
keylist = ['con', 'subcmd', 'ip', 'user', 'pw',
'rpw', 'job', 'force', 'storedcredential_id']
optional_keylist = ['con', 'ip', 'user', 'pw',
'rpw', 'job', 'force', 'storedcredential_id']
mutually_exclusive_keys = ['ip', 'job']
mandatory_options_list = {'job': []}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def unmanage(*args, **kwargs):
'''
@summary:
Use this function to unmanage endpoint from Lenovo XClarity Administrator
run this function as
data_dictionary = unmanage( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','subcmd','ep','force','job']
@param
The parameters for this command are as follows
subcmd device \ job_status
ep one or more endpoints to be unmanaged.
This is comma separated list of multiple endpoints, each endpoint should
contain endpoint information separated by semicolon.
endpoint's IP Address(multiple addresses should be separated by #), UUID of the endpoint and
Type of endpoint to be unmanaged ,This can be one of the following values:
Chassis
ThinkServer
Storage
Rackswitch
Rack-Tower
Edge
force Indicates whether to force the unmanagement of an endpoint (True/False)
job Job ID of unmanage request
@example
endpoint = '10.240.195.39;D31C76F0302503B50010D21DE03A0523;Rack-Tower'
unmanage(con_lxca, subcmd='device', ep=endpoint)
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'ep': 'e', 'job': 'j', 'force': 'f'}
keylist = ['con', 'subcmd', 'ep', 'force', 'job']
optional_keylist = ['con', 'ep', 'force', 'job']
mutually_exclusive_keys = ['ep', 'job']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def configpatterns(*args, **kwargs):
'''
@summary:
Use this function to Retrieve information and deploy all server and category patterns
that have been defined in the Lenovo XClarity Administrator
run this function as
data_dictionary = configpatterns( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','subcmd','id', 'includeSettings', 'endpoint','restart','type', pattern_update_dict, name, status]
@param
The parameters for this command are as follows
subcmd list, apply, import , status
id The unique ID that was assigned when the server pattern was created
name name of pattern , this is used for apply
endpoint List of one or more UUIDs for the target servers,If a target is an empty bay,
specify the location ID; otherwise, specify the server UUID
restart When to activate the configurations. This can be one of the following values:
defer - Activate IMM settings but do not restart the server.
immediate - Activate all settings and restart the server immediately.
pending - Manually activate the server profile and restart the server.
type Type of the server, It can be one of the following
flex - Flex System Placeholder chassis empty bays
node
rack
tower
pattern_update_dict dictionary of category_pattern to import.
status check config status for given uuid in endpoint
True
@example
list all configpatterns
rep = configpatterns(con, subcmd = 'list')
list particular configpattern
rep = configpatterns(con, subcmd = 'list', id="53")
list particular configpattern with includeSettings
rep = configpatterns(con, subcmd = 'list', id="53", includeSettings="True")
import SystemInfo category pattern
py_dict = {
"template_type": "SystemInfo",
"template": {
"contact": "contact",
"description": "Pattern created by testAPI",
"location": "location",
"name": "Learned-System_Info-19",
"systemName": {
"autogen": "Disable",
"hyphenChecked": False
},
"type": "SystemInfo",
"uri": "/config/template/61",
"userDefined": True
}
}
import json
my_json = json.dumps(py_dict)
rep = configpatterns(con_lxca, 'import', pattern_update_dict = my_json )
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
# some of them don't have short options
long_short_key_map = {'id': 'i', 'endpoint': 'e', 'restart': 'r',
'type': 't', 'name': 'n', 'pattern_update_dict': 'p'}
keylist = ['con', 'subcmd', 'id', 'includeSettings', 'endpoint',
'restart', 'type', 'pattern_update_dict', 'name']
optional_keylist = ['con', 'id', 'includeSettings', 'endpoint',
'restart', 'type', 'pattern_update_dict', 'name']
mutually_exclusive_keys = ['id', 'pattern_update_dict']
mandatory_options_list = {'id': [], 'pattern_update_dict': []}
# 'includeSettings': ['id']}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def configprofiles(*args, **kwargs):
'''
@summary:
Use this function to Retrieve information server configuration profiles
that have been defined in the Lenovo XClarity Administrator
run this function as
data_dictionary = configprofiles( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con', 'subcmd', 'id', 'name', 'endpoint', 'restart', 'powerdown', 'resetimm', 'resetswitch', 'force']
@param
The parameters for this command are as follows
subcmd list, rename, activate, unassign, delete
id The unique ID that was assigned when the server profile was created
name profile name
endpoint endpoint UUID of the server or location id for flex system
restart restart server to activate profile ( immediate / defer )
options for unassign
powerdown powerdown server
resetimm reset IMM
resetswitch Identifies whether to reset the switch internal port settings to default values
force force unassign operation
@example
rep = configprofiles(con_lxca, 'list')
rep = configprofiles(con_lxca, 'list','3')
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
# some of keys don't have short option
long_short_key_map = {'id': 'i', 'name': 'n', 'endpoint': 'e', 'restart': 'r',
'powerdown': 'p', 'force': 'f'}
keylist = ['con', 'subcmd', 'id', 'name', 'endpoint',
'restart', 'powerdown', 'resetimm', 'resetswitch', 'force']
optional_keylist = ['con', 'id', 'name', 'endpoint',
'restart', 'powerdown', 'resetimm', 'resetswitch', 'force']
mutually_exclusive_keys = []
mandatory_options_list = {'id': [], 'endpoint': ['restart']}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
#remove dummy field added for view
if 'dummy' in out_obj:
out_obj.pop('dummy')
return out_obj
return out_obj
def configtargets(*args, **kwargs):
'''
@summary:
Use this function to get config pattern targets from Lenovo XClarity Administrator
run this function as
data_dictionary = configtargets( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','id']
@param
The parameters for this command are as follows
id config target id
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'id': 'i'}
keylist = ['con', 'id']
optional_keylist = ['con']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def updatepolicy(*args, **kwargs):
'''
@summary:
Use this function to read Firmwar update Policy from Lenovo XClarity Administrator
run this function as
data_dictionary = updatepolicy( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con', 'subcmd', 'info','job','uuid',policy','Type']
@param
The parameters for this command are as follows
subcmd list,query, assign , status
info Specifies the type of information to return. This can be one of the following values:
FIRMWARE- Returns information about firmware that is applicable to each managed endpoint
RESULTS- Returns persisted compare result for servers to which a compliance policy is assigned
NAMELIST - Returns the available compliance policies
jobid Job ID of assign compliance policy operation
uuid UUID of the device to which you want to assign the compliance policy
policy Policyname, Name of the compliance-policy to be assigned to device
Type Device type. This can be one of the following values.
CMM - Chassis Management Module
IOSwitch - Flex switch
RACKSWITCH - RackSwitch switch
STORAGE - Lenovo Storage system
SERVER - Compute node or rack server
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'info': 'i', 'job': 'j',
'uuid': 'u', 'policy': 'p', 'type': 't'}
keylist = ['con', 'subcmd', 'info', 'job', 'uuid', 'policy', 'type']
optional_keylist = ['con', 'info', 'job', 'uuid', 'policy', 'type']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def updaterepo(*args, **kwargs):
'''
@summary:
Use this function to get repository info from Lenovo XClarity Administrator
run this function as
data_dictionary = updaterepo( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con', 'subcmd', 'key', 'mt', 'scope', 'fixids', 'type']
@param
The parameters for this command are as follows
subcmd The action to take. This can be one of the following values.
query - Get info using key parameter
read - Reloads the repository files. The clears the update information in cache and reads the update file again from the repository.
refresh - Retrieves information about the latest available firmware updates from the Lenovo Support website,
and stores the information to the firmware-updates repository.
acquire - Downloads the specified firmware updates from Lenovo Support website, and stores the updates to the firmware-updates repository.
delete - Deletes the specified firmware updates from the firmware-updates repository.
export.not supported
key Returns the specified type of update. This can be one of the following values.
supportedMts - Returns a list of supported machine types
size - Returns the repository size
lastRefreshed - Returns the timestamp of the last repository refresh
importDir - Returns the import directory for the repository.
publicKeys - Returns the supported signed keys
updates - Returns information about all firmware updates
updatesByMt - Returns information about firmware updates for the specified machine type
updatesByMtByComp - Returns the update component names for the specified machine type
mt comma separated machine types
scope scope of operation
fixids comma separated fixids
type filetype for PUT opertaion
@example
rep = updaterepo(con, "query", k="size")
rep = updaterepo(con, subcmd = "read")
rep = updaterepo(con_lxca, subcmd = "read")
rep = updaterepo(con_lxca, subcmd = "acquire", mt="7903")
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'key': 'k', 'mt': 'm',
'scope': 's', 'fixids': 'f', 'type': 't'}
keylist = ['con', 'subcmd', 'key', 'mt', 'scope', 'fixids', 'type']
optional_keylist = ['con', 'key', 'mt', 'scope', 'fixids', 'type']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def updatecomp(*args, **kwargs):
'''
@summary:
Use this function to update firmware of endpoint from Lenovo XClarity Administrator
run this function as
data_dictionary = updatecomp( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
USAGE:
keylist = ['con','subcmd', 'query','mode','action','cmm','switch','server','storage','dev_list']
@param
The parameters for this command are as follows
subcmd info|apply
query The data to return. This can be one of the following values.
components - Returns a list of devices and components that can be updated.
status - Returns the status and progress of firmware updates. This is the default value
mode Indicates when to activate the update. This can be one of the following values.
immediate - Uses Immediate Activaton mode when applying firmware updates to the selected endpoints.
delayed - Uses Delayed Activaton mode when applying firmware updates to the selected endpoints.
prioritized. Firmware updates on the baseboard management controller are activated immediately
action The action to take. This can be one of the following values.
apply - Applies the associated firmware to the submitted components.
power - Perform power action on selected endpoint.
cancelApply - Cancels the firmware update request to the selected components.
cmm cmms information
switch switch information
server servers information
storage storages information
dev_list - update all updateable components
For action = apply / applyBundle / cancelApply, Device information should contain following data separated by comma
UUID - UUID of the device
Fixid - Firmware-update ID of the target package to be applied to the component.
If not provided assigned policy would be used.
Component - Component name
For action = power, Device information should contain following data separated by comma
UUID - UUID of the device
powerState - One of the power state values. Possible values per device type are
Server: powerOn, powerOff, powerCycleSoft, powerCycleSoftGraceful, powerOffHardGraceful
Switch: powerOn, powerOff, powerCycleSoft
CMM: reset
Storage:powerOff,powerCycleSoft
@example
Applying firmware update to server
endpoint = "38B1DC62084411E88C7A0A94EF4EC2EF,lnvgy_fw_lxpm_pdl116o-1.40_anyos_noarch,LXPM Diagnostic Software"
rep = updatecomp(con, 'apply', action='apply', mode='immediate', server=endpoint)
Applying firmware update to ThinkSystem SR635 and SR655
endpoint = "<KEY>"
rep = updatecomp(con, 'apply', action='applyBudle', server=endpoint)
Applying firmware update using dev_list json format
dev_list = {
"DeviceList": [{
"ServerList": [{
"UUID": "38B1DC62084411E88C7A0A94EF4EC2EF",
"Components": [{
"Fixid": "lnvgy_fw_lxpm_pdl116o-1.40_anyos_noarch",
"Component": "LXPM Diagnostic Software"
}]
}]
},
{
"CMMList": []
},
{
"SwitchList": []
},
{
"StorageList": []
}]
}
dev_json = json.dumps(dev_list)
rep = updatecomp(con, 'apply', action='apply', mode='immediate', dev_list=dev_json)
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'query': 'q', 'mode': 'm', 'action': 'a', 'cmm': 'c', 'switch': 'w', 'server': 's',
'storage': 't', 'dev_list': 'l'}
keylist = ['con', 'subcmd', 'query', 'mode', 'action', 'cmm',
'switch', 'server', 'storage', 'dev_list']
optional_keylist = ['con', 'query', 'mode', 'action',
'cmm', 'switch', 'server', 'storage', 'dev_list']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def users(*args, **kwargs):
'''
@summary:
Use this function to get users data from Lenovo XClarity Administrator
run this function as
data_dictionary = users( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','id']
@param
The parameters for this command are as follows
id unique ID of the user to be retrieved
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'id': 'i'}
keylist = ['con', 'id']
optional_keylist = ['con', 'id']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def ffdc(*args, **kwargs):
'''
@summary:
Use this function to Collect and export specific endpoint data
from Lenovo XClarity Administrator
run this function as
data_dictionary = ffdc( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','uuid']
@param
The parameters for this command are as follows
uuid UUID of the target endpoint this is manadatory parameter
@example
ffdc(con = lxca_con, uuid='UUID of endpoint")
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'uuid': 'u'}
keylist = ['con', 'uuid']
optional_keylist = ['con', 'uuid']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def log(*args, **kwargs):
'''
@summary:
Use this function to get Lenovo XClarity Administrator LOG information
run this function as
data_dictionary = log( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','lvl']
@param
The parameters for this command are as follows
lvl log level to be set
Possible Log Levels, Please use following values to set desired log level.
DEBUG: Detailed information, typically of interest only when diagnosing problems.
INFO: Confirmation that things are working as expected.
WARNING: An indication that something unexpected happened, or indicative of some problem in the near future.
ERROR: Due to a more serious problem, the software has not been able to perform some function.
CRITICAL: A serious error, indicating that the program itself may be unable to continue running.
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'lvl': 'l'}
keylist = ['con', 'lvl']
optional_keylist = ['con', 'lvl']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def lxcalog(*args, **kwargs):
'''
@summary:
Use this function to get Lenovo XClarity Administrator LOG information
run this function as
data_dictionary = lxcalog( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','filter']
@param
The parameters for this command are as follows
filter filter for the event
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'filter': 'f'}
keylist = ['con', 'filter']
optional_keylist = ['con', 'filter']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def jobs(*args, **kwargs):
'''
@summary:
Use this function to get jobs information from Lenovo XClarity Administrator
run this function as
data_dictionary = jobs( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con','id','uuid','state','cancel','delete']
@param
The parameters for this command are as follows
id= job id
uuid= uuid of endpoint for which jobs should be retrieved
state= job state to retrieve jobs in specified state.
The state can be one of the following
Pending
Running
Complete
Cancelled
Running_With_Errors
Cancelled_With_Errors
Stopped_With_Error
Interrupted
cancel= cancel job of specified id
delete= delete job of specified id
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'id': 'i', 'uuid': 'u',
'state': 's', 'cancel': 'c', 'delete': 'd'}
keylist = ['con', 'id', 'uuid', 'state', 'cancel', 'delete']
optional_keylist = ['con', 'id', 'uuid', 'state', 'cancel', 'delete']
mutually_exclusive_keys = ['id', 'cancel', 'delete']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def manifests(*args, **kwargs):
'''
@summary:
Use this function to send solution manifest to and retreive manifests from Lenovo XClarity Administrator
run this function as
data_dictionary = manifests( conn_handle, input_args_dictionary{key,value} )
Where KeyList is as follows
keylist = [id','file']
@param
The parameters for this command are as follows
id= solution id
file= path to manifest file
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'id': 'i', 'file': 'f'}
keylist = ['con', 'id', 'file']
optional_keylist = ['con', 'file']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
# return out_obj
return True
def tasks(*args, **kwargs):
'''
@summary:
Use this function to get tasks information
run this function as
= tasks( con, data_dictionary)
Where data_dictionary contain input arguments as follows
keylist = ['jobUID','children','action', 'updateList']
@param
The parameters for this command are as follows
con Connection Object to Lenovo XClarity Administrator
jobUID uuid of job
children result will include children if True
action cancel/update/create
updateList required for update action , string containing list of update
@example
update_list = [{"jobUID":"9","percentage":50}]
str_u = str(update_list)
rep = tasks(con_lxca, a = 'update', u = str_u)
'''
global SHELL_OBJ
con = None
param_dict = {}
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {'jobUID': 'j', 'children': 'c',
'action': 'a', 'updateList': 'u', 'template': 't'}
keylist = ['con', 'jobUID', 'children', 'action', 'updateList','template']
optional_keylist = ['con', 'jobUID', 'children', 'action', 'updateList','template']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def resourcegroups(*args, **kwargs):
'''
@summary:
Use this function to Create, modify, delete or read resource group from Lenovo XClarity Administrator
run this function as
data_dictionary = resourcegroups( con_handle,uuid, name, desc, type, solutionVPD, members, criteria )
Where KeyList is as follows
keylist = ['uuid', 'name','description','type','solutionVPD','members','criteria']
@param
The parameters for this command are as follows
uuid= UUID of already created Group
name= Name of Resource Group
desc= Description of Resource Group
type= Type of Resource Group. <{"static", "dynamic", "solution"}>,
solutionVPD": { "id": <UUID string>,
"machineType": <machine type string>,
"model": <model string>,
"serialNumber": <serial number string>,
"manufacturer": <string>
},
"members": [ "uri","uri",....],
"criteria":[]
@example
list all resourcegroups
rep = resourcegroups(con_lxca, 'list')
list criteria properties for dynamic groups
rep = resourcegroups(con_lxca, 'criteriaproperties')
For creating dynamic group
criteria = {
u'parent': u'root',
u'value': None,
u'criteria': [{
u'operator': u'contains',
u'value': u'test',
u'property': u'hostname',
u'id': u'1001',
u'parent': u'lxca_customUI_resourceViews_allGroupsPage_editGroupDynamicPage_2'
}
],
u'operator': u'AND',
u'property': None,
u'id': u'root'
}
criteria_json = json.dumps(criteria)
print criteria_json
rep = resourcegroups(con_lxca, 'create', n="TEST_DYNAMIC", d="TRIAL_GROUP", t='dynamic', c=criteria_json)
Updating dynamic group
rep = resourcegroups(con_lxca, 'update', u="5C5AB42D94C6A719BEF2A375", n="R1_GROUP", d="TRIAL_GROUP modified", t='dynamic', c=criteria_json)
Delete resouregroup
rep = resourcegroups(con_lxca, 'delete', u="5C5BC6EA90F54D074FC7BC0D")
Create solution group supported for api only for uhm
solutionVPD = {
'id': '59A54997C18DCF0594A8CCD1',
'machineType': 'TESTMTM',
'model': 'TESTMODEL',
'serialNumber': 'TESTSERIAL',
'manufacturer': 'LENOVO'}
members = []
criteria = []
rep = resourcegroups(con_lxca, 'create', n="TEST_solution", d="Test_GROUP", t='solution', s=solutionVPD, m=members, c=criteria)
'''
global SHELL_OBJ
con = None
param_dict = {}
command_name = sys._getframe().f_code.co_name
long_short_key_map = {'uuid': 'u', 'name': 'n', 'description': 'd', 'type': 't', 'solutionVPD': 's',
'members': 'm', 'criteria': 'c'}
keylist = ['con', 'subcmd', 'uuid', 'name', 'description',
'type', 'solutionVPD', 'members', 'criteria']
optional_keylist = ['con', 'uuid', 'name', 'description',
'type', 'solutionVPD', 'members', 'criteria']
mutually_exclusive_keys = []
mandatory_options_list = {'uuid': [], 'name': ['type']}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist, mutually_exclusive_keys,
param_dict, *args, **kwargs)
LOGGER.debug("resourcegroups %s" %str(param_dict))
if 'type' in param_dict:
if 'solution' in param_dict['type']:
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict, False)
else:
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
else:
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys, param_dict, *args, **kwargs):
'''
this function will create param_dict and con from args and kwargs, param_dict will have only long options for keys,
it will convert short option to long option key and finally validate parameters
:param arglist: list of arguments derived from args
:param keylist: keylist of name of fields
:param mandatory_options_list:
:param optional_keylist:
:param mutually_exclusive_keys:
:param param_dict: append to param_dict
:return: connection object
'''
arglist = list(args)
arglist = arglist[::-1]
con = None
for key in keylist:
short_key = long_short_key_map.get(key)
if (key in list(kwargs.keys())):
param_dict[key] = kwargs[key]
elif key in param_dict:
continue
elif short_key and (short_key in list(kwargs.keys())):
param_dict[key] = kwargs[short_key]
elif len(arglist) >= 1:
value = arglist.pop()
if value != None:
param_dict[key] = value
elif key not in optional_keylist:
LOGGER.error(" Invalid Input args %s is not in optional list %s" % (
key, str(mandatory_options_list)))
raise ValueError("Invalid Input Arguments")
if key == 'con':
if key in param_dict:
con = param_dict.pop(key)
# if not con:
# raise AttributeError("Invalid command invocation: Connection Object missing.")
me_key_found = False
for me_key in list(param_dict.keys()):
# Checking mandatory option_list presence
if me_key in list(mandatory_options_list.keys()):
if not set(mandatory_options_list[me_key]).issubset(set(param_dict.keys())):
LOGGER.error(" Invalid command invocation %s of mandatory list %s is not in arguments parovided" % (
me_key, str(mandatory_options_list)))
raise AttributeError("Invalid command invocation")
# Checking mutually exclusive key presense
if me_key in mutually_exclusive_keys:
if me_key_found:
LOGGER.error(" Invalid command invocation %s of mutual exclusive list %s " % (
me_key, str(mutually_exclusive_keys)))
raise AttributeError("Invalid command invocation")
me_key_found = True
if not set(keylist + list(long_short_key_map.values())).issuperset(set(kwargs.keys())):
LOGGER.error(" Invalid Input args: %s unsupported argument passed"
% list(set(kwargs.keys()).difference(set(keylist + long_short_key_map.values()))))
raise ValueError("Invalid Input Arguments")
return con
def osimages(*args, **kwargs):
'''
@summary:
Use this function to retrieve information about, delete, and import OS images,
OS-image profiles, device driver, and boot-options files.
data_dictionary = osimages(input_args, key=values )
Where KeyList is as follows
keylist = [con, subcmd, o]
@param
subcmd
list Retrieve information about all osimages
globalsettings Retrieve or modify global operating-system deployment
settings. Global settings serve as defaults settings
when operating systems are deployed.
hostsettings Retrieve information about the network and storage
settings for all servers, and create or modify the
network and storage settings for one or more servers
hostplatforms Retrieve information about the host platforms and
deploy operating-system images to the host platforms
as a job
import Import OS images and scripts from remote server to
LXCA
remotefileservers Retrieve information about all remote file-server
profiles or to create or modify a remote file-server
profile
delete Delete osimages from LXCA
@example
list all osimages info
osimages(con, subcmd='list')
delete osimages with ids
osimages(con, subcmd='delete', id='i1,i2')
rep = osimages(con_lxca, 'delete', i='20190131054310_trail.py')
List all globalsettings
osimages(con, subcmd = 'globalsettings')
Set Linux default passw0rd using globalsettings
change_linux_password = {
"activeDirectory": {
"allDomains": [],
"defaultDomain": "labs.lenovo.com"
},
"credentials": [{
"name": "root",
"password": "<PASSWORD>",
"passwordChanged": True,
"type": "LINUX"
}, {
"type": "WINDOWS",
"name": "Administrator",
"password": <PASSWORD>,
"passwordChanged": False
}
],
"ipAssignment": "dhcpv4",
"isVLANMode": False,
"licenseKeys": {
"win2012r1": {
"dataCenterLicenseKey": "",
"standardLicenseKey": "",
},
"win2012r2": {
"dataCenterLicenseKey": "",
"standardLicenseKey": ""
},
"win2016r1": {
"dataCenterLicenseKey": "",
"standardLicenseKey": ""
},
"win2019r1": {
"dataCenterLicenseKey": "",
"standardLicenseKey": ""
}
}
}
json_string = json.dumps(change_linux_password)
print json_string
rep = osimages(con_lxca, subcmd = 'globalsettings', osimages_dict = json_string)
Remote file server list
rep = osimages(con_lxca, subcmd = 'remotefileservers')
Create remote file server entry for ftp server
rep = osimages(con_lxca, subcmd = 'remotefileservers',
osimages_dict ='{"username":"guest", "password":"<PASSWORD>",
"protocol":"FTP", "port": 21, "address":"10.243.2.207", "displayName": "new_ftp_207" }')
Update remote file server
rep = osimages(con_lxca, subcmd = 'remotefileservers',
osimages_dict ='{"putid": "1", "protocol":"FTP", "port": 21,
"address":"10.243.2.207", "displayName": "new_ftp_207" }')
Delete remote file server
rep = osimages(con_lxca, subcmd = 'remotefileservers', osimages_dict ='{"deleteid": "1"}')
Import local files of imagetype (UNATTEND, CUSTOM_CONFIG, SCRIPT, OS)
rep = osimages(con_lxca, subcmd='import', imagetype='UNATTEND')
print rep
file_dict = { "jobId":rep["jobId"], "imageName":"SLES", "os":"sles", "description":"SLES_config_file", "file": "/home/naval/sles_unattended.xml" }
rep = osimages(con_lxca, subcmd='import', imagetype='UNATTEND', osimages_dict = json.dumps(file_dict))
import BUNDLE and BUNDLESIG done with single jobid and imagename should be same as basename of files
rep = osimages(con_lxca, subcmd='import', imagetype='BUNDLE')
file_dict = {"jobId":rep["jobId"], "imageName":"bundle_win2016_20180926153236.zip", "file": "/home/naval/osimage_test/bundle_win2016_20180926153236.zip"}
rep1 = osimages(con_lxca, subcmd='import', imagetype='BUNDLE', osimages_dict = json.dumps(file_dict))
file_dict = { "jobId":rep["jobId"], "imageName":"bundle_win2016_20180926153236.zip.asc", "file": "/home/naval/osimage_test/bundle_win2016_20180926153236.zip.asc"}
rep2 = osimages(con_lxca, subcmd='import', imagetype='BUNDLESIG', osimages_dict = json.dumps(file_dict))
get all hostSettings
rep = osimages(con_lxca, 'hostsettings')
create hostsettings entry
host_settings_dict = {u'hosts': [{u'storageSettings': {u'targetDevice': u'localdisk'}, u'uuid': u'A1445C6FDBAA11E6A87F86E06E3AFFFF', u'networkSettings': {u'dns2': u'', u'dns1': u'10.240.0.10', u'hostname': u'nodeundefined', u'vlanId': 0, u'selectedMAC': u'AUTO', u'gateway': u'10.243.0.1', u'subnetMask': u'255.255.240.0', u'mtu': 1500, u'prefixLength': 64, u'ipAddress': u'10.243.9.79'}}, {u'storageSettings': {u'targetDevice': u'localdisk'}, u'uuid': u'A122FB03FF4011E68D9BA32E3A66DDDD', u'networkSettings': {u'dns2': u'', u'dns1': u'10.240.0.10', u'hostname': u'proton1', u'vlanId': 0, u'selectedMAC': u'AUTO', u'gateway': u'10.243.0.1', u'subnetMask': u'255.255.240.0', u'mtu': 1500, u'prefixLength': 64, u'ipAddress': u'10.243.9.87'}}]}
host_settings_json = json.dumps(host_settings_dict)
rep = osimages(con_lxca, 'hostsettings', action='create', osimages_dict = host_settings_json)
update hostSettings entry
host_settings_dict = {u'hosts': [{u'storageSettings': {u'targetDevice': u'localdisk'}, u'uuid': u'A1445C6FDBAA11E6A87F86E06E3AFFFF', u'networkSettings': {u'dns2': u'', u'dns1': u'10.240.0.10', u'hostname': u'nodeundefined', u'vlanId': 0, u'selectedMAC': u'AUTO', u'gateway': u'10.243.0.1', u'subnetMask': u'255.255.240.0', u'mtu': 1500, u'prefixLength': 64, u'ipAddress': u'10.243.25.25'}}, {u'storageSettings': {u'targetDevice': u'localdisk'}, u'uuid': u'A122FB03FF4011E68D9BA32E3A66DDDD', u'networkSettings': {u'dns2': u'', u'dns1': u'10.240.0.10', u'hostname': u'proton1', u'vlanId': 0, u'selectedMAC': u'AUTO', u'gateway': u'10.243.0.1', u'subnetMask': u'255.255.240.0', u'mtu': 1500, u'prefixLength': 64, u'ipAddress': u'10.243.26.26'}}]}
host_settings_json = json.dumps(host_settings_dict)
rep = osimages(con_lxca, 'hostsettings', action='update', osimages_dict = host_settings_json)
'''
global SHELL_OBJ
# #con = None
# param_dict = {}
# command_name = sys._getframe().f_code.co_name
#
# # con = kwargs.get('con')
# # if not con:
# # raise ValueError("Invalid Input Arguments")
#
# logger.info(" osimages got kwargs %s " % str(kwargs))
# if args:
# kwargs['osimages_info'] = args[0]
# #param_dict = (args, kwargs)
# logger.info(" osimages got param_dict %s " % str(kwargs))
# # handle_input_dict only takes param_dict as input argument
# ch = shell_obj.handle_input_dict(command_name, con, kwargs)
# return ch
con = None
param_dict = {}
param_dict = kwargs
kwargs = {} # this is required to avoid invalid argument error in _validate_param
command_name = sys._getframe().f_code.co_name
long_short_key_map = {}
keylist = ['con', 'subcmd']
optional_keylist = ['con']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def managementserver(*args, **kwargs):
'''
@summary:
Use this function to get repository info from Lenovo XClarity Administrator
run this function as
data_dictionary = managementserver( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con', 'subcmd', 'key', 'fixids', 'type', 'action', 'files','jobid']
@param
The parameters for this command are as follows
subcmd
query Retrieve information about all updates in the
management-server updates repository
query_fixids Retrieve information or the readme or change history
file for a specific update in the management-server
updates repository
apply install a management-server update.
acquire Downloads the specified firmware updates from Lenovo
XClarity Support website, and stores the updates to
the updates repository
refresh Retrieves information about the latest available
firmware updates from the Lenovo XClarity Support
website, and stores the information to the updates
repository
delete Deletes the specified fixids - removeMetadata not supported
import Import files to management server
key Returns the specified type of update. This can be one of the following values.
all. Returns all information. This is the default value.
currentVersion. Returns the current version of Lenovo XClarity Administrator.
history. Returns the history of management-server updates.
importDir. Returns the directory for the management-server updates repository.
size. Returns the repository size (in bytes).
updates. Returns information about all updates packages.
updatedDate. Returns the date when the last update was performed.
fixids comma separated fixids
type Type for Get with fixids
changeHistory. Returns the change-history file for the specified management-server update.
readme. Returns the readme file for the specified management-server update
jobid jobid for import
files files to be imported with fullpath and comma separated
@example
TO import files
rep = managementserver(con_lxca, subcmd='import', files='/home/naval/updates/updates/lnvgy_sw_lxca_thinksystemrepo1-1.3.2_anyos_noarch.txt')
rep = managementserver(con_lxca, subcmd='import', j=rep['jobid'], files='/home/naval/updates/updates/lnvgy_sw_lxca_thinksystemrepo1-1.3.2_anyos_noarch.txt')
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
# some paramters don't have short options
long_short_key_map = {'key': 'k', 'fixids': 'f',
'type': 't', 'jobid': 'j'}
keylist = ['con', 'subcmd', 'key', 'fixids', 'type', 'files', 'jobid']
optional_keylist = ['con', 'key', 'fixids',
'type', 'files', 'jobid']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
#remove dummy field added for view
if 'dummy' in out_obj:
out_obj.pop('dummy')
return out_obj
def rules(*args, **kwargs):
'''
@summary:
Use this function to get and set complaince rules on Lenovo XClarity Administrator
run this function as
data_dictionary = managementserver( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con', 'id', 'rule']
@param
The parameters for this command are as follows
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
# some paramters don't have short options
long_short_key_map = {'id': 'i', 'rule': 'r'}
keylist = ['con', 'id', 'rule']
optional_keylist = ['con', 'id', 'rule']
mutually_exclusive_keys = ['id', 'rule']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict, False)
return out_obj
def compositeResults(*args, **kwargs):
'''
@summary:
Use this function to get and set complaince rules on Lenovo XClarity Administrator
run this function as
data_dictionary = managementserver( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con', 'id', ,'query_solutionGroups', 'solutionGroups','targetResources','all_rules']
@param
The parameters for this command are as follows
@example
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
# some paramters don't have short options
long_short_key_map = {'id': 'i', 'query_solutionGroups': 'q',
'solutionGroups': 's', 'targetResources': 't', 'all_rules': 'a'}
keylist = ['con', 'id', 'query_solutionGroups',
'solutionGroups', 'targetResources', 'all_rules']
optional_keylist = ['con', 'id', 'query_solutionGroups',
'solutionGroups', 'targetResources', 'all_rules']
mutually_exclusive_keys = ['id', 'query_solutionGroups',
'solutionGroups', 'targetResources', 'all_rules']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict, False)
return out_obj
def storedcredentials(*args, **kwargs):
'''
@summary:
Use this function to get and set complaince rules on Lenovo XClarity Administrator
run this function as
data_dictionary = storedcredentials( key1 = 'val1', key2 = 'val2', ...)
Where KeyList is as follows
keylist = ['con', 'id', 'user_name', 'description', 'password', 'delete_id']
@param
The parameters for this command are as follows
id Stored credential id of stored credential
user_name user name
password password
description description of user credential
delete_id id to be deleted
@example
rep = storedcredentials(con1)
rep = storedcredentials(con1, id='955')
rep = storedcredentials(con1,u='admin1', p='admin1', d='description of stored credentials')
rep = storedcredentials(con1, delete_id='954')
rep = storedcredentials(con1,i='955', u='admin1', p='admin1', d='description of stored credentials for admin')
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
# some paramters don't have short options
long_short_key_map = {'id': 'i', 'user_name': 'u',
'description': 'd', 'password': 'p'}
keylist = ['con', 'id', 'user_name',
'description', 'password', 'delete_id']
optional_keylist = ['con', 'id', 'user_name',
'description', 'password', 'delete_id']
mutually_exclusive_keys = ['id', 'delete_id']
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict)
return out_obj
def license(*args, **kwargs):
'''
@summary:
Use this function to retrieve information about warnings regarding non-compliance of installed Lenovo XClarity Administrator license
run this function as
license(con)
@param
The parameter for this command is as follows
con Connection Object to Lenovo XClarity Administrator
@example
license(con)
'''
global SHELL_OBJ
command_name = sys._getframe().f_code.co_name
param_dict = {}
con = None
long_short_key_map = {}
keylist = ['con']
optional_keylist = ['con']
mutually_exclusive_keys = []
mandatory_options_list = {}
con = _validate_param(keylist, long_short_key_map, mandatory_options_list, optional_keylist,
mutually_exclusive_keys,
param_dict, *args, **kwargs)
out_obj = SHELL_OBJ.handle_input_dict(command_name, con, param_dict, False)
return out_obj
|
[
"pylxca.pylxca_cmd.lxca_ishell.InteractiveShell",
"code.interact",
"sys._getframe",
"logging.getLogger"
] |
[((452, 479), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (469, 479), False, 'import logging\n'), ((500, 530), 'pylxca.pylxca_cmd.lxca_ishell.InteractiveShell', 'lxca_ishell.InteractiveShell', ([], {}), '()\n', (528, 530), False, 'from pylxca.pylxca_cmd import lxca_ishell\n'), ((1878, 2071), 'code.interact', 'code.interact', (['(\'Interactive Python Shell for Lenovo XClarity Administrator v\' +\n __version__ +\n """\nType "dir()" or "help(lxca command object)" for more information.""")'], {'local': 'ns'}), '(\n \'Interactive Python Shell for Lenovo XClarity Administrator v\' +\n __version__ +\n """\nType "dir()" or "help(lxca command object)" for more information.""",\n local=ns)\n', (1891, 2071), False, 'import code\n'), ((2959, 2974), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (2972, 2974), False, 'import sys\n'), ((4106, 4121), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (4119, 4121), False, 'import sys\n'), ((5200, 5215), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (5213, 5215), False, 'import sys\n'), ((6419, 6434), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (6432, 6434), False, 'import sys\n'), ((7582, 7597), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (7595, 7597), False, 'import sys\n'), ((8702, 8717), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (8715, 8717), False, 'import sys\n'), ((10209, 10224), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (10222, 10224), False, 'import sys\n'), ((11572, 11587), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (11585, 11587), False, 'import sys\n'), ((12793, 12808), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (12806, 12808), False, 'import sys\n'), ((13927, 13942), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (13940, 13942), False, 'import sys\n'), ((15165, 15180), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (15178, 15180), False, 'import sys\n'), ((17782, 17797), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (17795, 17797), False, 'import sys\n'), ((20002, 20017), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (20015, 20017), False, 'import sys\n'), ((23400, 23415), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (23413, 23415), False, 'import sys\n'), ((25686, 25701), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (25699, 25701), False, 'import sys\n'), ((27183, 27198), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (27196, 27198), False, 'import sys\n'), ((29143, 29158), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (29156, 29158), False, 'import sys\n'), ((32163, 32178), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (32176, 32178), False, 'import sys\n'), ((36375, 36390), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (36388, 36390), False, 'import sys\n'), ((37683, 37698), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (37696, 37698), False, 'import sys\n'), ((38775, 38790), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (38788, 38790), False, 'import sys\n'), ((40330, 40345), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (40343, 40345), False, 'import sys\n'), ((41312, 41327), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (41325, 41327), False, 'import sys\n'), ((42899, 42914), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (42912, 42914), False, 'import sys\n'), ((44144, 44159), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (44157, 44159), False, 'import sys\n'), ((45562, 45577), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (45575, 45577), False, 'import sys\n'), ((48941, 48956), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (48954, 48956), False, 'import sys\n'), ((60328, 60343), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (60341, 60343), False, 'import sys\n'), ((63701, 63716), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (63714, 63716), False, 'import sys\n'), ((64982, 64997), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (64995, 64997), False, 'import sys\n'), ((66114, 66129), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (66127, 66129), False, 'import sys\n'), ((68133, 68148), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (68146, 68148), False, 'import sys\n'), ((69387, 69402), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (69400, 69402), False, 'import sys\n')]
|
import discord
from discord.ext import commands
import asyncio
# configuration files
import configparser
import sys, traceback
# grammar check
import language_check
grammar_en = language_check.LanguageTool('en-US')
# https://www.geeksforgeeks.org/get-synonymsantonyms-nltk-wordnet-python/
import nltk
from nltk.corpus import wordnet
import re
from PIL import ImageFont, ImageDraw, Image
# load configuration to get around hard coded tokens
config = configparser.ConfigParser()
with open('config.ini') as config_file:
config.read_file(config_file)
# startup stuff for debugging
print('using discordpy version', discord.__version__)
client = discord.Client()
@client.event
async def on_ready():
# print some stuff when the bot goes online
print('Logged in ' + str(client.user.name) + ' - ' +
str(client.user.id) + '\n' + 'Version ' + str(discord.__version__))
def handle_grammar(message):
matches = grammar_en.check(message.content)
sentences = []
for match in matches:
bad_word = message.content[match.offset:match.offset + match.errorlength]
if(match.ruleId == 'UPPERCASE_SENTENCE_START'):
sentences.append('Wow. I can\'t believe you didn\'t start your sentence with a capital letter.')
elif(match.ruleId == 'MORFOLOGIK_RULE_EN_US'): # spelling errors
if(bad_word[0] == '@'):
continue # don't over react when the @mention someone!
sentences.append('Ugh. The word \'' + bad_word + '\' is not spelled correctly.')
elif(match.ruleId == 'IT_VBZ'): # this are a problem <- fixes that sort of issue
sentences.append('Subject verb disagreement! Did you mean to use \'' + match.replacements[0] + '\' instead? (hint: you did)')
elif(match.ruleId in ['IT_IS', 'EN_CONTRACTION_SPELLING']): # its when should be it's and that when should be that's
sentences.append('Missing an apostrophe here: \'' + bad_word + '\'. Fix it.')
elif(match.ruleId == 'PROGRESSIVE_VERBS'): # ???
sentences.append('This usage is not usually used in the progressive form.' +
'I know you don\'t know what that means so here it is: http://lmgtfy.com/?q=progressive+form')
elif(match.ruleId == 'PERS_PRONOUN_AGREEMENT_SENT_START'): # I are should be I am
sentences.append('really? That verb does not agree with \'' + bad_word + '\'. You clearly meant \'' + match.replacements[0] + '\'')
else:
sentences.append([match])
punctuation = ['.', '!', '?']
# looks like the following:
# ['This is a sentence' '.' 'Okay' '?' '']
split = re.split('(\.|\!|\?)', message.content)
if(split[-1] == ''):
split = split[:-1] # chop off last empty if ending in punctuation
img = ''
if(split[-1] not in punctuation):
smaller_text = split[-1][-15:]
font = ImageFont.truetype('DejaVuSans.ttf', 75)
im = Image.open('NoteBack.png')
d = ImageDraw.Draw(im)
d.text((200,560), smaller_text, fill=(0, 0, 0), font=font)
pos = d.textsize(smaller_text, font)
arrow = Image.open('arrow.png')
im.paste(arrow, (200+pos[0], 560+pos[1]), arrow)
im.save('tmp.jpg')
img = 'tmp.jpg'
sentences.append('!?!?!?!?!? NO PUNCTUATION? Look at where you missed it!')
return sentences, img
def handle_thesaurus(message):
my_string_1 = message.content.split("Thesaurus ", 1)[1]
end = my_string_1.find(" ")
my_string_2 = my_string_1
if end != -1:
my_string_2 = my_string_1[0:end]
if my_string_2 == "":
my_string_2 = "nothing"
# get thesaurus definitions:
synonyms = []
for syn in wordnet.synsets(my_string_2):
for l in syn.lemmas():
if l.name() != my_string_2:
synonyms.append(l.name())
if len(synonyms) == 0:
ret_string = my_string_2 + " isn't a word i know, which probably means you're wrong. Goof."
return retstring
synonyms_2 = ', '.join(synonyms)
my_string_3 = "Here, dummy:\n\n\'" + my_string_2 + "\' synonyms: "
for i in synonyms_2:
my_string_3 = my_string_3 + i
return my_string_3
@client.event
async def on_message(message):
if(message.author.bot):
return
if "Thesaurus " in message.content:
my_string = handle_thesaurus(message)
await message.channel.send(my_string)
else:
sentences, img_loc = handle_grammar(message)
for sentence in sentences:
await message.channel.send(sentence)
if(img_loc != ''):
await message.channel.send(img_loc, file=discord.File(img_loc))
# now actually connect the bot
client.run(config.get(section='Configuration', option='connection_token'),
bot=True, reconnect=True)
|
[
"re.split",
"discord.File",
"language_check.LanguageTool",
"nltk.corpus.wordnet.synsets",
"PIL.Image.open",
"PIL.ImageFont.truetype",
"PIL.ImageDraw.Draw",
"configparser.ConfigParser",
"discord.Client"
] |
[((179, 215), 'language_check.LanguageTool', 'language_check.LanguageTool', (['"""en-US"""'], {}), "('en-US')\n", (206, 215), False, 'import language_check\n'), ((454, 481), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (479, 481), False, 'import configparser\n'), ((651, 667), 'discord.Client', 'discord.Client', ([], {}), '()\n', (665, 667), False, 'import discord\n'), ((2646, 2688), 're.split', 're.split', (['"""(\\\\.|\\\\!|\\\\?)"""', 'message.content'], {}), "('(\\\\.|\\\\!|\\\\?)', message.content)\n", (2654, 2688), False, 'import re\n'), ((3719, 3747), 'nltk.corpus.wordnet.synsets', 'wordnet.synsets', (['my_string_2'], {}), '(my_string_2)\n', (3734, 3747), False, 'from nltk.corpus import wordnet\n'), ((2893, 2933), 'PIL.ImageFont.truetype', 'ImageFont.truetype', (['"""DejaVuSans.ttf"""', '(75)'], {}), "('DejaVuSans.ttf', 75)\n", (2911, 2933), False, 'from PIL import ImageFont, ImageDraw, Image\n'), ((2947, 2973), 'PIL.Image.open', 'Image.open', (['"""NoteBack.png"""'], {}), "('NoteBack.png')\n", (2957, 2973), False, 'from PIL import ImageFont, ImageDraw, Image\n'), ((2986, 3004), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['im'], {}), '(im)\n', (3000, 3004), False, 'from PIL import ImageFont, ImageDraw, Image\n'), ((3133, 3156), 'PIL.Image.open', 'Image.open', (['"""arrow.png"""'], {}), "('arrow.png')\n", (3143, 3156), False, 'from PIL import ImageFont, ImageDraw, Image\n'), ((4673, 4694), 'discord.File', 'discord.File', (['img_loc'], {}), '(img_loc)\n', (4685, 4694), False, 'import discord\n')]
|
from django.contrib import admin
from .models import Order, OrderedItem
@admin.register(Order)
class ItemAdmin(admin.ModelAdmin):
pass
@admin.register(OrderedItem)
class ItemAdmin(admin.ModelAdmin):
pass
|
[
"django.contrib.admin.register"
] |
[((79, 100), 'django.contrib.admin.register', 'admin.register', (['Order'], {}), '(Order)\n', (93, 100), False, 'from django.contrib import admin\n'), ((153, 180), 'django.contrib.admin.register', 'admin.register', (['OrderedItem'], {}), '(OrderedItem)\n', (167, 180), False, 'from django.contrib import admin\n')]
|
from setuptools import setup, find_packages
setup(
name='myapp',
version='0.1',
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/clozinski/grok.install',
description="""\
Simple Part of the Install package.
""",
packages=find_packages('.'),
package_dir={'': '.'},
include_package_data=True,
zip_safe=False,
license='ZPL',
install_requires=['setuptools',
'grok',
'grokui.admin',
'grokcore.startup',
'grokcore.message',
],
entry_points="""
[console_scripts]
interactive_debug_prompt = grokcore.startup.startup:interactive_debug_prompt
[paste.app_factory]
main = grokcore.startup:application_factory""",
)
|
[
"setuptools.find_packages"
] |
[((267, 285), 'setuptools.find_packages', 'find_packages', (['"""."""'], {}), "('.')\n", (280, 285), False, 'from setuptools import setup, find_packages\n')]
|
# Generated by Django 2.2.3 on 2019-07-04 07:19
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('DjangoGraphs', '0003_auto_20190704_0628'),
]
operations = [
migrations.AddField(
model_name='type',
name='unit',
field=models.CharField(blank='', default='', max_length=4),
),
]
|
[
"django.db.models.CharField"
] |
[((335, 387), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '""""""', 'default': '""""""', 'max_length': '(4)'}), "(blank='', default='', max_length=4)\n", (351, 387), False, 'from django.db import migrations, models\n')]
|
import unittest
from mock import patch
from src.backup.datastore.Backup import Backup
from src.retention.should_perform_retention_predicate import \
ShouldPerformRetentionPredicate
from src.commons.error_reporting import ErrorReporting
class TestShouldPerformRetentionPredicate(unittest.TestCase):
def setUp(self):
patch(
'src.commons.config.environment.Environment.version_id',
return_value='dummy_version'
).start()
patch('googleapiclient.discovery.build').start()
patch('oauth2client.client.GoogleCredentials.get_application_default') \
.start()
def tearDown(self):
patch.stopall()
def test_should_return_true_for_valid_backup_list(self):
# given
backups = self.__create_valid_backups()
# when
result = ShouldPerformRetentionPredicate.test(backups)
# then
self.assertEqual(True, result)
def test_should_return_false_for_empty_list(self):
# given
empty_list = []
# when
result = ShouldPerformRetentionPredicate.test(empty_list)
# then
self.assertEqual(False, result)
@patch.object(ErrorReporting, '_create_http')
@patch.object(ErrorReporting, 'report')
def test_should_return_false_and_trigger_error_reporting_if_there_are_multiple_backups_referencing_same_table_in_bq(
self, report, _):
# given
backups = \
self.__create_backups_with_part_of_referencing_same_table_in_bq()
# when
result = ShouldPerformRetentionPredicate.test(backups)
# then
self.assertEqual(False, result)
report.assert_called_once()
@staticmethod
def __create_valid_backups():
backup_1 = Backup(table_id='table_id_1', dataset_id='dataset_id_1')
backup_2 = Backup(table_id='table_id_2', dataset_id='dataset_id_1')
backup_3 = Backup(table_id='table_id_3', dataset_id='dataset_id_1')
backup_4 = Backup(table_id='table_id_4', dataset_id='dataset_id_1')
return [backup_1, backup_2, backup_3, backup_4]
@staticmethod
def __create_backups_with_part_of_referencing_same_table_in_bq():
backup_1 = Backup(table_id='table_id_1', dataset_id='dataset_id_1')
backup_2 = Backup(table_id='table_id_2', dataset_id='dataset_id_1')
backup_3 = Backup(table_id='table_id_3', dataset_id='dataset_id_1')
backup_4 = Backup(table_id='table_id_3', dataset_id='dataset_id_1')
return [backup_4, backup_2, backup_3, backup_1]
|
[
"mock.patch.object",
"src.retention.should_perform_retention_predicate.ShouldPerformRetentionPredicate.test",
"mock.patch",
"src.backup.datastore.Backup.Backup",
"mock.patch.stopall"
] |
[((1178, 1222), 'mock.patch.object', 'patch.object', (['ErrorReporting', '"""_create_http"""'], {}), "(ErrorReporting, '_create_http')\n", (1190, 1222), False, 'from mock import patch\n'), ((1228, 1266), 'mock.patch.object', 'patch.object', (['ErrorReporting', '"""report"""'], {}), "(ErrorReporting, 'report')\n", (1240, 1266), False, 'from mock import patch\n'), ((662, 677), 'mock.patch.stopall', 'patch.stopall', ([], {}), '()\n', (675, 677), False, 'from mock import patch\n'), ((837, 882), 'src.retention.should_perform_retention_predicate.ShouldPerformRetentionPredicate.test', 'ShouldPerformRetentionPredicate.test', (['backups'], {}), '(backups)\n', (873, 882), False, 'from src.retention.should_perform_retention_predicate import ShouldPerformRetentionPredicate\n'), ((1067, 1115), 'src.retention.should_perform_retention_predicate.ShouldPerformRetentionPredicate.test', 'ShouldPerformRetentionPredicate.test', (['empty_list'], {}), '(empty_list)\n', (1103, 1115), False, 'from src.retention.should_perform_retention_predicate import ShouldPerformRetentionPredicate\n'), ((1565, 1610), 'src.retention.should_perform_retention_predicate.ShouldPerformRetentionPredicate.test', 'ShouldPerformRetentionPredicate.test', (['backups'], {}), '(backups)\n', (1601, 1610), False, 'from src.retention.should_perform_retention_predicate import ShouldPerformRetentionPredicate\n'), ((1775, 1831), 'src.backup.datastore.Backup.Backup', 'Backup', ([], {'table_id': '"""table_id_1"""', 'dataset_id': '"""dataset_id_1"""'}), "(table_id='table_id_1', dataset_id='dataset_id_1')\n", (1781, 1831), False, 'from src.backup.datastore.Backup import Backup\n'), ((1851, 1907), 'src.backup.datastore.Backup.Backup', 'Backup', ([], {'table_id': '"""table_id_2"""', 'dataset_id': '"""dataset_id_1"""'}), "(table_id='table_id_2', dataset_id='dataset_id_1')\n", (1857, 1907), False, 'from src.backup.datastore.Backup import Backup\n'), ((1927, 1983), 'src.backup.datastore.Backup.Backup', 'Backup', ([], {'table_id': '"""table_id_3"""', 'dataset_id': '"""dataset_id_1"""'}), "(table_id='table_id_3', dataset_id='dataset_id_1')\n", (1933, 1983), False, 'from src.backup.datastore.Backup import Backup\n'), ((2003, 2059), 'src.backup.datastore.Backup.Backup', 'Backup', ([], {'table_id': '"""table_id_4"""', 'dataset_id': '"""dataset_id_1"""'}), "(table_id='table_id_4', dataset_id='dataset_id_1')\n", (2009, 2059), False, 'from src.backup.datastore.Backup import Backup\n'), ((2225, 2281), 'src.backup.datastore.Backup.Backup', 'Backup', ([], {'table_id': '"""table_id_1"""', 'dataset_id': '"""dataset_id_1"""'}), "(table_id='table_id_1', dataset_id='dataset_id_1')\n", (2231, 2281), False, 'from src.backup.datastore.Backup import Backup\n'), ((2301, 2357), 'src.backup.datastore.Backup.Backup', 'Backup', ([], {'table_id': '"""table_id_2"""', 'dataset_id': '"""dataset_id_1"""'}), "(table_id='table_id_2', dataset_id='dataset_id_1')\n", (2307, 2357), False, 'from src.backup.datastore.Backup import Backup\n'), ((2377, 2433), 'src.backup.datastore.Backup.Backup', 'Backup', ([], {'table_id': '"""table_id_3"""', 'dataset_id': '"""dataset_id_1"""'}), "(table_id='table_id_3', dataset_id='dataset_id_1')\n", (2383, 2433), False, 'from src.backup.datastore.Backup import Backup\n'), ((2453, 2509), 'src.backup.datastore.Backup.Backup', 'Backup', ([], {'table_id': '"""table_id_3"""', 'dataset_id': '"""dataset_id_1"""'}), "(table_id='table_id_3', dataset_id='dataset_id_1')\n", (2459, 2509), False, 'from src.backup.datastore.Backup import Backup\n'), ((335, 432), 'mock.patch', 'patch', (['"""src.commons.config.environment.Environment.version_id"""'], {'return_value': '"""dummy_version"""'}), "('src.commons.config.environment.Environment.version_id', return_value\n ='dummy_version')\n", (340, 432), False, 'from mock import patch\n'), ((478, 518), 'mock.patch', 'patch', (['"""googleapiclient.discovery.build"""'], {}), "('googleapiclient.discovery.build')\n", (483, 518), False, 'from mock import patch\n'), ((535, 605), 'mock.patch', 'patch', (['"""oauth2client.client.GoogleCredentials.get_application_default"""'], {}), "('oauth2client.client.GoogleCredentials.get_application_default')\n", (540, 605), False, 'from mock import patch\n')]
|
from typing import Dict, List, Set
from random import choice
class RandomizedSet:
def __init__(self):
self.vals: List[int] = []
self.indices: Dict[int, int] = {}
def insert(self, val: int) -> bool:
if val in self.indices.keys():
return False
self.indices[val] = len(self.vals)
self.vals.append(val)
return True
def remove(self, val: int) -> bool:
if val not in self.indices.keys():
return False
ind = self.indices[val]
self.indices[self.vals[-1]] = ind
self.vals[ind], self.vals[-1] = self.vals[-1], self.vals[ind]
self.vals.pop()
self.indices.pop(val)
return True
def getRandom(self) -> int:
return choice(self.vals)
# Your RandomizedSet object will be instantiated and called as such:
# obj = RandomizedSet()
# param_1 = obj.insert(val)
# param_2 = obj.remove(val)
# param_3 = obj.getRandom()
|
[
"random.choice"
] |
[((770, 787), 'random.choice', 'choice', (['self.vals'], {}), '(self.vals)\n', (776, 787), False, 'from random import choice\n')]
|
# -*- coding: UTF-8 -*-
"""
此脚本用于展示如何利用神经网络解决分类问题
"""
import os
from mlp import ANN
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import make_blobs, make_circles, make_moons
from sklearn.linear_model import LogisticRegression
from sklearn.preprocessing import StandardScaler, OneHotEncoder
def generateData(n):
"""
"""
np.random.seed(12046)
blobs = make_blobs(n_samples=n, centers = [[-2, -2], [2, 2]])
circles = make_circles(n_samples=n, factor=.4, noise=.05)
moons = make_moons(n_samples=n, noise=.05)
blocks = np.random.rand(n, 2) - 0.5
y = (blocks[:, 0] * blocks[:, 1] < 0) + 0
blocks = (blocks, y)
# 由于神经网络对数据的线性变换不稳定,因此将数据做归一化处理
scaler = StandardScaler()
blobs = (scaler.fit_transform(blobs[0]), blobs[1])
circles = (scaler.fit_transform(circles[0]), circles[1])
moons = (scaler.fit_transform(moons[0]), moons[1])
blocks = (scaler.fit_transform(blocks[0]), blocks[1])
return blobs, circles, moons, blocks
def drawData(ax, data):
"""
将数据可视化
"""
X, y = data
label1 = X[y>0]
ax.scatter(label1[:, 0], label1[:, 1], marker="o")
label0 = X[y==0]
ax.scatter(label0[:, 0], label0[:, 1], marker="^", color="k")
return ax
def drawModel(ax, model):
"""
将模型的分离超平面可视化
"""
x1 = np.linspace(ax.get_xlim()[0], ax.get_xlim()[1], 100)
x2 = np.linspace(ax.get_ylim()[0], ax.get_ylim()[1], 100)
X1, X2 = np.meshgrid(x1, x2)
Y = model.predict_proba(np.c_[X1.ravel(), X2.ravel()])[:, 1]
Y = Y.reshape(X1.shape)
ax.contourf(X1, X2, Y, levels=[0, 0.5], colors=["gray"], alpha=0.4)
return ax
def trainLogit(data):
"""
"""
X, y = data
model = LogisticRegression()
model.fit(X, y)
return model
def trainANN(data, logPath):
"""
"""
X, y = data
enc = OneHotEncoder()
y = enc.fit_transform(y.reshape(-1, 1)).toarray()
model = ANN([4, 4, 2], logPath)
model.fit(X, y)
return model
def visualize(data):
"""
"""
# 创建一个图形框
fig = plt.figure(figsize=(10, 10), dpi=80)
fig1 = plt.figure(figsize=(10, 10), dpi=80)
# 在图形框里画四幅图
for i in range(len(data)):
ax = fig.add_subplot(2, 2, i+1)
ax1 = fig1.add_subplot(2, 2, i+1)
drawData(ax, data[i])
# Windows下的存储路径与Linux并不相同
if os.name == "nt":
drawModel(ax, trainANN(data[i], "logs\\data_%s" % (i+1)))
else:
drawModel(ax, trainANN(data[i], "logs/data_%s" % (i+1)))
drawData(ax1, data[i])
drawModel(ax1, trainLogit(data[i]))
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
ax1.get_xaxis().set_visible(False)
ax1.get_yaxis().set_visible(False)
plt.show()
if __name__ == "__main__":
data = generateData(200)
visualize(data)
|
[
"sklearn.datasets.make_circles",
"numpy.meshgrid",
"sklearn.preprocessing.StandardScaler",
"numpy.random.seed",
"matplotlib.pyplot.show",
"mlp.ANN",
"sklearn.preprocessing.OneHotEncoder",
"sklearn.datasets.make_blobs",
"sklearn.datasets.make_moons",
"sklearn.linear_model.LogisticRegression",
"matplotlib.pyplot.figure",
"numpy.random.rand"
] |
[((366, 387), 'numpy.random.seed', 'np.random.seed', (['(12046)'], {}), '(12046)\n', (380, 387), True, 'import numpy as np\n'), ((400, 451), 'sklearn.datasets.make_blobs', 'make_blobs', ([], {'n_samples': 'n', 'centers': '[[-2, -2], [2, 2]]'}), '(n_samples=n, centers=[[-2, -2], [2, 2]])\n', (410, 451), False, 'from sklearn.datasets import make_blobs, make_circles, make_moons\n'), ((468, 517), 'sklearn.datasets.make_circles', 'make_circles', ([], {'n_samples': 'n', 'factor': '(0.4)', 'noise': '(0.05)'}), '(n_samples=n, factor=0.4, noise=0.05)\n', (480, 517), False, 'from sklearn.datasets import make_blobs, make_circles, make_moons\n'), ((528, 563), 'sklearn.datasets.make_moons', 'make_moons', ([], {'n_samples': 'n', 'noise': '(0.05)'}), '(n_samples=n, noise=0.05)\n', (538, 563), False, 'from sklearn.datasets import make_blobs, make_circles, make_moons\n'), ((723, 739), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (737, 739), False, 'from sklearn.preprocessing import StandardScaler, OneHotEncoder\n'), ((1453, 1472), 'numpy.meshgrid', 'np.meshgrid', (['x1', 'x2'], {}), '(x1, x2)\n', (1464, 1472), True, 'import numpy as np\n'), ((1720, 1740), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {}), '()\n', (1738, 1740), False, 'from sklearn.linear_model import LogisticRegression\n'), ((1851, 1866), 'sklearn.preprocessing.OneHotEncoder', 'OneHotEncoder', ([], {}), '()\n', (1864, 1866), False, 'from sklearn.preprocessing import StandardScaler, OneHotEncoder\n'), ((1933, 1956), 'mlp.ANN', 'ANN', (['[4, 4, 2]', 'logPath'], {}), '([4, 4, 2], logPath)\n', (1936, 1956), False, 'from mlp import ANN\n'), ((2057, 2093), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 10)', 'dpi': '(80)'}), '(figsize=(10, 10), dpi=80)\n', (2067, 2093), True, 'import matplotlib.pyplot as plt\n'), ((2105, 2141), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 10)', 'dpi': '(80)'}), '(figsize=(10, 10), dpi=80)\n', (2115, 2141), True, 'import matplotlib.pyplot as plt\n'), ((2765, 2775), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2773, 2775), True, 'import matplotlib.pyplot as plt\n'), ((576, 596), 'numpy.random.rand', 'np.random.rand', (['n', '(2)'], {}), '(n, 2)\n', (590, 596), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
# (C) Copyright 2018, Intel Corporation
# SPDX-License-Identifier: MIT
import mraa
import time
# TODO Ensure you are running this sample on a Linux system with;
# * python-mraa bindings installed
# * Permissions to write to the aio system, this often requires root
# The following should read a ADC/AIO value from your mraa supported board.
# 2 is used as the default with this sample but this may not a the AIO on your board.
# NOTE: Not all boards *have* an AIO/ADC please see your boards documention for details.
aio = mraa.Aio(2)
while True:
print(aio.read())
|
[
"mraa.Aio"
] |
[((548, 559), 'mraa.Aio', 'mraa.Aio', (['(2)'], {}), '(2)\n', (556, 559), False, 'import mraa\n')]
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import unittest
import retworkx
class TestIsomorphic(unittest.TestCase):
def test_empty_isomorphic(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(retworkx.is_isomorphic(dag_a, dag_b, id_order=id_order))
def test_empty_isomorphic_compare_nodes(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(
retworkx.is_isomorphic(dag_a, dag_b, lambda x, y: x == y, id_order=id_order)
)
def test_isomorphic_identical(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
node_a = dag_a.add_node("a_1")
dag_a.add_child(node_a, "a_2", "a_1")
dag_a.add_child(node_a, "a_3", "a_2")
node_b = dag_b.add_node("a_1")
dag_b.add_child(node_b, "a_2", "a_1")
dag_b.add_child(node_b, "a_3", "a_2")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(retworkx.is_isomorphic(dag_a, dag_b, id_order=id_order))
def test_isomorphic_mismatch_node_data(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
node_a = dag_a.add_node("a_1")
dag_a.add_child(node_a, "a_2", "a_1")
dag_a.add_child(node_a, "a_3", "a_2")
node_b = dag_b.add_node("b_1")
dag_b.add_child(node_b, "b_2", "b_1")
dag_b.add_child(node_b, "b_3", "b_2")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(retworkx.is_isomorphic(dag_a, dag_b, id_order=id_order))
def test_isomorphic_compare_nodes_mismatch_node_data(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
node_a = dag_a.add_node("a_1")
dag_a.add_child(node_a, "a_2", "a_1")
dag_a.add_child(node_a, "a_3", "a_2")
node_b = dag_b.add_node("b_1")
dag_b.add_child(node_b, "b_2", "b_1")
dag_b.add_child(node_b, "b_3", "b_2")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertFalse(
retworkx.is_isomorphic(dag_a, dag_b, lambda x, y: x == y, id_order=id_order)
)
def test_is_isomorphic_nodes_compare_raises(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
node_a = dag_a.add_node("a_1")
dag_a.add_child(node_a, "a_2", "a_1")
dag_a.add_child(node_a, "a_3", "a_2")
node_b = dag_b.add_node("b_1")
dag_b.add_child(node_b, "b_2", "b_1")
dag_b.add_child(node_b, "b_3", "b_2")
def compare_nodes(a, b):
raise TypeError("Failure")
self.assertRaises(TypeError, retworkx.is_isomorphic, (dag_a, dag_b, compare_nodes))
def test_isomorphic_compare_nodes_identical(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
node_a = dag_a.add_node("a_1")
dag_a.add_child(node_a, "a_2", "a_1")
dag_a.add_child(node_a, "a_3", "a_2")
node_b = dag_b.add_node("a_1")
dag_b.add_child(node_b, "a_2", "a_1")
dag_b.add_child(node_b, "a_3", "a_2")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(
retworkx.is_isomorphic(dag_a, dag_b, lambda x, y: x == y, id_order=id_order)
)
def test_isomorphic_compare_edges_identical(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
node_a = dag_a.add_node("a_1")
dag_a.add_child(node_a, "a_2", "a_1")
dag_a.add_child(node_a, "a_3", "a_2")
node_b = dag_b.add_node("a_1")
dag_b.add_child(node_b, "a_2", "a_1")
dag_b.add_child(node_b, "a_3", "a_2")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(
retworkx.is_isomorphic(
dag_a,
dag_b,
edge_matcher=lambda x, y: x == y,
id_order=id_order,
)
)
def test_isomorphic_compare_nodes_with_removals(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
qr_0_in = dag_a.add_node("qr[0]")
qr_1_in = dag_a.add_node("qr[1]")
cr_0_in = dag_a.add_node("cr[0]")
qr_0_out = dag_a.add_node("qr[0]")
qr_1_out = dag_a.add_node("qr[1]")
cr_0_out = dag_a.add_node("qr[0]")
cu1 = dag_a.add_child(qr_0_in, "cu1", "qr[0]")
dag_a.add_edge(qr_1_in, cu1, "qr[1]")
measure_0 = dag_a.add_child(cr_0_in, "measure", "cr[0]")
dag_a.add_edge(cu1, measure_0, "qr[0]")
measure_1 = dag_a.add_child(cu1, "measure", "qr[1]")
dag_a.add_edge(measure_0, measure_1, "cr[0]")
dag_a.add_edge(measure_1, qr_1_out, "qr[1]")
dag_a.add_edge(measure_1, cr_0_out, "cr[0]")
dag_a.add_edge(measure_0, qr_0_out, "qr[0]")
dag_a.remove_node(cu1)
dag_a.add_edge(qr_0_in, measure_0, "qr[0]")
dag_a.add_edge(qr_1_in, measure_1, "qr[1]")
qr_0_in = dag_b.add_node("qr[0]")
qr_1_in = dag_b.add_node("qr[1]")
cr_0_in = dag_b.add_node("cr[0]")
qr_0_out = dag_b.add_node("qr[0]")
qr_1_out = dag_b.add_node("qr[1]")
cr_0_out = dag_b.add_node("qr[0]")
measure_0 = dag_b.add_child(cr_0_in, "measure", "cr[0]")
dag_b.add_edge(qr_0_in, measure_0, "qr[0]")
measure_1 = dag_b.add_child(qr_1_in, "measure", "qr[1]")
dag_b.add_edge(measure_1, qr_1_out, "qr[1]")
dag_b.add_edge(measure_1, cr_0_out, "cr[0]")
dag_b.add_edge(measure_0, measure_1, "cr[0]")
dag_b.add_edge(measure_0, qr_0_out, "qr[0]")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(
retworkx.is_isomorphic(dag_a, dag_b, lambda x, y: x == y, id_order=id_order)
)
def test_isomorphic_compare_nodes_with_removals_deepcopy(self):
dag_a = retworkx.PyDAG()
dag_b = retworkx.PyDAG()
qr_0_in = dag_a.add_node("qr[0]")
qr_1_in = dag_a.add_node("qr[1]")
cr_0_in = dag_a.add_node("cr[0]")
qr_0_out = dag_a.add_node("qr[0]")
qr_1_out = dag_a.add_node("qr[1]")
cr_0_out = dag_a.add_node("qr[0]")
cu1 = dag_a.add_child(qr_0_in, "cu1", "qr[0]")
dag_a.add_edge(qr_1_in, cu1, "qr[1]")
measure_0 = dag_a.add_child(cr_0_in, "measure", "cr[0]")
dag_a.add_edge(cu1, measure_0, "qr[0]")
measure_1 = dag_a.add_child(cu1, "measure", "qr[1]")
dag_a.add_edge(measure_0, measure_1, "cr[0]")
dag_a.add_edge(measure_1, qr_1_out, "qr[1]")
dag_a.add_edge(measure_1, cr_0_out, "cr[0]")
dag_a.add_edge(measure_0, qr_0_out, "qr[0]")
dag_a.remove_node(cu1)
dag_a.add_edge(qr_0_in, measure_0, "qr[0]")
dag_a.add_edge(qr_1_in, measure_1, "qr[1]")
qr_0_in = dag_b.add_node("qr[0]")
qr_1_in = dag_b.add_node("qr[1]")
cr_0_in = dag_b.add_node("cr[0]")
qr_0_out = dag_b.add_node("qr[0]")
qr_1_out = dag_b.add_node("qr[1]")
cr_0_out = dag_b.add_node("qr[0]")
measure_0 = dag_b.add_child(cr_0_in, "measure", "cr[0]")
dag_b.add_edge(qr_0_in, measure_0, "qr[0]")
measure_1 = dag_b.add_child(qr_1_in, "measure", "qr[1]")
dag_b.add_edge(measure_1, qr_1_out, "qr[1]")
dag_b.add_edge(measure_1, cr_0_out, "cr[0]")
dag_b.add_edge(measure_0, measure_1, "cr[0]")
dag_b.add_edge(measure_0, qr_0_out, "qr[0]")
for id_order in [False, True]:
with self.subTest(id_order=id_order):
self.assertTrue(
retworkx.is_isomorphic(
copy.deepcopy(dag_a),
copy.deepcopy(dag_b),
lambda x, y: x == y,
id_order=id_order,
)
)
def test_digraph_isomorphic_parallel_edges_with_edge_matcher(self):
graph = retworkx.PyDiGraph()
graph.extend_from_weighted_edge_list([(0, 1, "a"), (0, 1, "b"), (1, 2, "c")])
self.assertTrue(retworkx.is_isomorphic(graph, graph, edge_matcher=lambda x, y: x == y))
def test_digraph_isomorphic_self_loop(self):
graph = retworkx.PyDiGraph()
graph.add_nodes_from([0])
graph.add_edges_from([(0, 0, "a")])
self.assertTrue(retworkx.is_isomorphic(graph, graph))
def test_digraph_non_isomorphic_edge_mismatch_self_loop(self):
graph = retworkx.PyDiGraph()
graph.add_nodes_from([0])
graph.add_edges_from([(0, 0, "a")])
second_graph = retworkx.PyDiGraph()
second_graph.add_nodes_from([0])
second_graph.add_edges_from([(0, 0, "b")])
self.assertFalse(
retworkx.is_isomorphic(graph, second_graph, edge_matcher=lambda x, y: x == y)
)
def test_digraph_non_isomorphic_rule_out_incoming(self):
graph = retworkx.PyDiGraph()
graph.add_nodes_from([0, 1, 2, 3])
graph.add_edges_from_no_data([(0, 1), (0, 2), (2, 1)])
second_graph = retworkx.PyDiGraph()
second_graph.add_nodes_from([0, 1, 2, 3])
second_graph.add_edges_from_no_data([(0, 1), (0, 2), (3, 1)])
self.assertFalse(retworkx.is_isomorphic(graph, second_graph, id_order=True))
def test_digraph_non_isomorphic_rule_ins_outgoing(self):
graph = retworkx.PyDiGraph()
graph.add_nodes_from([0, 1, 2, 3])
graph.add_edges_from_no_data([(1, 0), (2, 0), (1, 2)])
second_graph = retworkx.PyDiGraph()
second_graph.add_nodes_from([0, 1, 2, 3])
second_graph.add_edges_from_no_data([(1, 0), (2, 0), (1, 3)])
self.assertFalse(retworkx.is_isomorphic(graph, second_graph, id_order=True))
def test_digraph_non_isomorphic_rule_ins_incoming(self):
graph = retworkx.PyDiGraph()
graph.add_nodes_from([0, 1, 2, 3])
graph.add_edges_from_no_data([(1, 0), (2, 0), (2, 1)])
second_graph = retworkx.PyDiGraph()
second_graph.add_nodes_from([0, 1, 2, 3])
second_graph.add_edges_from_no_data([(1, 0), (2, 0), (3, 1)])
self.assertFalse(retworkx.is_isomorphic(graph, second_graph, id_order=True))
def test_isomorphic_parallel_edges(self):
first = retworkx.PyDiGraph()
first.extend_from_edge_list([(0, 1), (0, 1), (1, 2), (2, 3)])
second = retworkx.PyDiGraph()
second.extend_from_edge_list([(0, 1), (1, 2), (1, 2), (2, 3)])
self.assertFalse(retworkx.is_isomorphic(first, second))
def test_digraph_isomorphic_insufficient_call_limit(self):
graph = retworkx.generators.directed_path_graph(5)
self.assertFalse(retworkx.is_isomorphic(graph, graph, call_limit=2))
def test_digraph_vf2_mapping_identical(self):
graph = retworkx.generators.directed_grid_graph(2, 2)
second_graph = retworkx.generators.directed_grid_graph(2, 2)
mapping = retworkx.digraph_vf2_mapping(graph, second_graph)
self.assertEqual(next(mapping), {0: 0, 1: 1, 2: 2, 3: 3})
def test_digraph_vf2_mapping_identical_removals(self):
graph = retworkx.generators.directed_path_graph(2)
second_graph = retworkx.generators.directed_path_graph(4)
second_graph.remove_nodes_from([1, 2])
second_graph.add_edge(0, 3, None)
mapping = retworkx.digraph_vf2_mapping(graph, second_graph)
self.assertEqual({0: 0, 1: 3}, next(mapping))
def test_digraph_vf2_mapping_identical_removals_first(self):
second_graph = retworkx.generators.directed_path_graph(2)
graph = retworkx.generators.directed_path_graph(4)
graph.remove_nodes_from([1, 2])
graph.add_edge(0, 3, None)
mapping = retworkx.digraph_vf2_mapping(graph, second_graph)
self.assertEqual({0: 0, 3: 1}, next(mapping))
def test_digraph_vf2_mapping_identical_vf2pp(self):
graph = retworkx.generators.directed_grid_graph(2, 2)
second_graph = retworkx.generators.directed_grid_graph(2, 2)
mapping = retworkx.digraph_vf2_mapping(graph, second_graph, id_order=False)
self.assertEqual(next(mapping), {0: 0, 1: 1, 2: 2, 3: 3})
def test_digraph_vf2_mapping_identical_removals_vf2pp(self):
graph = retworkx.generators.directed_path_graph(2)
second_graph = retworkx.generators.directed_path_graph(4)
second_graph.remove_nodes_from([1, 2])
second_graph.add_edge(0, 3, None)
mapping = retworkx.digraph_vf2_mapping(graph, second_graph, id_order=False)
self.assertEqual({0: 0, 1: 3}, next(mapping))
def test_digraph_vf2_mapping_identical_removals_first_vf2pp(self):
second_graph = retworkx.generators.directed_path_graph(2)
graph = retworkx.generators.directed_path_graph(4)
graph.remove_nodes_from([1, 2])
graph.add_edge(0, 3, None)
mapping = retworkx.digraph_vf2_mapping(graph, second_graph, id_order=False)
self.assertEqual({0: 0, 3: 1}, next(mapping))
def test_digraph_vf2_number_of_valid_mappings(self):
graph = retworkx.generators.directed_mesh_graph(3)
mapping = retworkx.digraph_vf2_mapping(graph, graph, id_order=True)
total = 0
for _ in mapping:
total += 1
self.assertEqual(total, 6)
def test_empty_digraph_vf2_mapping(self):
g_a = retworkx.PyDiGraph()
g_b = retworkx.PyDiGraph()
for id_order in [False, True]:
with self.subTest(id_order=id_order):
mapping = retworkx.digraph_vf2_mapping(g_a, g_b, id_order=id_order, subgraph=False)
self.assertEqual({}, next(mapping))
|
[
"retworkx.generators.directed_mesh_graph",
"copy.deepcopy",
"retworkx.is_isomorphic",
"retworkx.digraph_vf2_mapping",
"retworkx.PyDiGraph",
"retworkx.generators.directed_path_graph",
"retworkx.PyDAG",
"retworkx.generators.directed_grid_graph"
] |
[((686, 702), 'retworkx.PyDAG', 'retworkx.PyDAG', ([], {}), '()\n', (700, 702), False, 'import retworkx\n'), ((719, 735), 'retworkx.PyDAG', 'retworkx.PyDAG', ([], {}), '()\n', (733, 735), False, 'import retworkx\n'), ((983, 999), 'retworkx.PyDAG', 'retworkx.PyDAG', ([], {}), '()\n', (997, 999), False, 'import retworkx\n'), ((1016, 1032), 'retworkx.PyDAG', 'retworkx.PyDAG', ([], {}), '()\n', (1030, 1032), False, 'import retworkx\n'), ((1329, 1345), 'retworkx.PyDAG', 'retworkx.PyDAG', ([], {}), '()\n', (1343, 1345), False, 'import retworkx\n'), ((1362, 1378), 'retworkx.PyDAG', 'retworkx.PyDAG', ([], {}), '()\n', (1376, 1378), False, 'import retworkx\n'), ((1888, 1904), 'retworkx.PyDAG', 'retworkx.PyDAG', ([], {}), '()\n', (1902, 1904), False, 'import retworkx\n'), ((1921, 1937), 'retworkx.PyDAG', 'retworkx.PyDAG', ([], {}), '()\n', (1935, 1937), False, 'import retworkx\n'), ((2461, 2477), 'retworkx.PyDAG', 'retworkx.PyDAG', ([], {}), '()\n', (2475, 2477), False, 'import retworkx\n'), ((2494, 2510), 'retworkx.PyDAG', 'retworkx.PyDAG', ([], {}), '()\n', (2508, 2510), False, 'import retworkx\n'), ((3085, 3101), 'retworkx.PyDAG', 'retworkx.PyDAG', ([], {}), '()\n', (3099, 3101), False, 'import retworkx\n'), ((3118, 3134), 'retworkx.PyDAG', 'retworkx.PyDAG', ([], {}), '()\n', (3132, 3134), False, 'import retworkx\n'), ((3637, 3653), 'retworkx.PyDAG', 'retworkx.PyDAG', ([], {}), '()\n', (3651, 3653), False, 'import retworkx\n'), ((3670, 3686), 'retworkx.PyDAG', 'retworkx.PyDAG', ([], {}), '()\n', (3684, 3686), False, 'import retworkx\n'), ((4260, 4276), 'retworkx.PyDAG', 'retworkx.PyDAG', ([], {}), '()\n', (4274, 4276), False, 'import retworkx\n'), ((4293, 4309), 'retworkx.PyDAG', 'retworkx.PyDAG', ([], {}), '()\n', (4307, 4309), False, 'import retworkx\n'), ((5019, 5035), 'retworkx.PyDAG', 'retworkx.PyDAG', ([], {}), '()\n', (5033, 5035), False, 'import retworkx\n'), ((5052, 5068), 'retworkx.PyDAG', 'retworkx.PyDAG', ([], {}), '()\n', (5066, 5068), False, 'import retworkx\n'), ((6922, 6938), 'retworkx.PyDAG', 'retworkx.PyDAG', ([], {}), '()\n', (6936, 6938), False, 'import retworkx\n'), ((6955, 6971), 'retworkx.PyDAG', 'retworkx.PyDAG', ([], {}), '()\n', (6969, 6971), False, 'import retworkx\n'), ((8978, 8998), 'retworkx.PyDiGraph', 'retworkx.PyDiGraph', ([], {}), '()\n', (8996, 8998), False, 'import retworkx\n'), ((9247, 9267), 'retworkx.PyDiGraph', 'retworkx.PyDiGraph', ([], {}), '()\n', (9265, 9267), False, 'import retworkx\n'), ((9492, 9512), 'retworkx.PyDiGraph', 'retworkx.PyDiGraph', ([], {}), '()\n', (9510, 9512), False, 'import retworkx\n'), ((9614, 9634), 'retworkx.PyDiGraph', 'retworkx.PyDiGraph', ([], {}), '()\n', (9632, 9634), False, 'import retworkx\n'), ((9931, 9951), 'retworkx.PyDiGraph', 'retworkx.PyDiGraph', ([], {}), '()\n', (9949, 9951), False, 'import retworkx\n'), ((10081, 10101), 'retworkx.PyDiGraph', 'retworkx.PyDiGraph', ([], {}), '()\n', (10099, 10101), False, 'import retworkx\n'), ((10385, 10405), 'retworkx.PyDiGraph', 'retworkx.PyDiGraph', ([], {}), '()\n', (10403, 10405), False, 'import retworkx\n'), ((10535, 10555), 'retworkx.PyDiGraph', 'retworkx.PyDiGraph', ([], {}), '()\n', (10553, 10555), False, 'import retworkx\n'), ((10839, 10859), 'retworkx.PyDiGraph', 'retworkx.PyDiGraph', ([], {}), '()\n', (10857, 10859), False, 'import retworkx\n'), ((10989, 11009), 'retworkx.PyDiGraph', 'retworkx.PyDiGraph', ([], {}), '()\n', (11007, 11009), False, 'import retworkx\n'), ((11278, 11298), 'retworkx.PyDiGraph', 'retworkx.PyDiGraph', ([], {}), '()\n', (11296, 11298), False, 'import retworkx\n'), ((11386, 11406), 'retworkx.PyDiGraph', 'retworkx.PyDiGraph', ([], {}), '()\n', (11404, 11406), False, 'import retworkx\n'), ((11622, 11664), 'retworkx.generators.directed_path_graph', 'retworkx.generators.directed_path_graph', (['(5)'], {}), '(5)\n', (11661, 11664), False, 'import retworkx\n'), ((11809, 11854), 'retworkx.generators.directed_grid_graph', 'retworkx.generators.directed_grid_graph', (['(2)', '(2)'], {}), '(2, 2)\n', (11848, 11854), False, 'import retworkx\n'), ((11878, 11923), 'retworkx.generators.directed_grid_graph', 'retworkx.generators.directed_grid_graph', (['(2)', '(2)'], {}), '(2, 2)\n', (11917, 11923), False, 'import retworkx\n'), ((11942, 11991), 'retworkx.digraph_vf2_mapping', 'retworkx.digraph_vf2_mapping', (['graph', 'second_graph'], {}), '(graph, second_graph)\n', (11970, 11991), False, 'import retworkx\n'), ((12134, 12176), 'retworkx.generators.directed_path_graph', 'retworkx.generators.directed_path_graph', (['(2)'], {}), '(2)\n', (12173, 12176), False, 'import retworkx\n'), ((12200, 12242), 'retworkx.generators.directed_path_graph', 'retworkx.generators.directed_path_graph', (['(4)'], {}), '(4)\n', (12239, 12242), False, 'import retworkx\n'), ((12350, 12399), 'retworkx.digraph_vf2_mapping', 'retworkx.digraph_vf2_mapping', (['graph', 'second_graph'], {}), '(graph, second_graph)\n', (12378, 12399), False, 'import retworkx\n'), ((12543, 12585), 'retworkx.generators.directed_path_graph', 'retworkx.generators.directed_path_graph', (['(2)'], {}), '(2)\n', (12582, 12585), False, 'import retworkx\n'), ((12602, 12644), 'retworkx.generators.directed_path_graph', 'retworkx.generators.directed_path_graph', (['(4)'], {}), '(4)\n', (12641, 12644), False, 'import retworkx\n'), ((12738, 12787), 'retworkx.digraph_vf2_mapping', 'retworkx.digraph_vf2_mapping', (['graph', 'second_graph'], {}), '(graph, second_graph)\n', (12766, 12787), False, 'import retworkx\n'), ((12915, 12960), 'retworkx.generators.directed_grid_graph', 'retworkx.generators.directed_grid_graph', (['(2)', '(2)'], {}), '(2, 2)\n', (12954, 12960), False, 'import retworkx\n'), ((12984, 13029), 'retworkx.generators.directed_grid_graph', 'retworkx.generators.directed_grid_graph', (['(2)', '(2)'], {}), '(2, 2)\n', (13023, 13029), False, 'import retworkx\n'), ((13048, 13113), 'retworkx.digraph_vf2_mapping', 'retworkx.digraph_vf2_mapping', (['graph', 'second_graph'], {'id_order': '(False)'}), '(graph, second_graph, id_order=False)\n', (13076, 13113), False, 'import retworkx\n'), ((13262, 13304), 'retworkx.generators.directed_path_graph', 'retworkx.generators.directed_path_graph', (['(2)'], {}), '(2)\n', (13301, 13304), False, 'import retworkx\n'), ((13328, 13370), 'retworkx.generators.directed_path_graph', 'retworkx.generators.directed_path_graph', (['(4)'], {}), '(4)\n', (13367, 13370), False, 'import retworkx\n'), ((13478, 13543), 'retworkx.digraph_vf2_mapping', 'retworkx.digraph_vf2_mapping', (['graph', 'second_graph'], {'id_order': '(False)'}), '(graph, second_graph, id_order=False)\n', (13506, 13543), False, 'import retworkx\n'), ((13693, 13735), 'retworkx.generators.directed_path_graph', 'retworkx.generators.directed_path_graph', (['(2)'], {}), '(2)\n', (13732, 13735), False, 'import retworkx\n'), ((13752, 13794), 'retworkx.generators.directed_path_graph', 'retworkx.generators.directed_path_graph', (['(4)'], {}), '(4)\n', (13791, 13794), False, 'import retworkx\n'), ((13888, 13953), 'retworkx.digraph_vf2_mapping', 'retworkx.digraph_vf2_mapping', (['graph', 'second_graph'], {'id_order': '(False)'}), '(graph, second_graph, id_order=False)\n', (13916, 13953), False, 'import retworkx\n'), ((14082, 14124), 'retworkx.generators.directed_mesh_graph', 'retworkx.generators.directed_mesh_graph', (['(3)'], {}), '(3)\n', (14121, 14124), False, 'import retworkx\n'), ((14143, 14200), 'retworkx.digraph_vf2_mapping', 'retworkx.digraph_vf2_mapping', (['graph', 'graph'], {'id_order': '(True)'}), '(graph, graph, id_order=True)\n', (14171, 14200), False, 'import retworkx\n'), ((14364, 14384), 'retworkx.PyDiGraph', 'retworkx.PyDiGraph', ([], {}), '()\n', (14382, 14384), False, 'import retworkx\n'), ((14399, 14419), 'retworkx.PyDiGraph', 'retworkx.PyDiGraph', ([], {}), '()\n', (14417, 14419), False, 'import retworkx\n'), ((9109, 9179), 'retworkx.is_isomorphic', 'retworkx.is_isomorphic', (['graph', 'graph'], {'edge_matcher': '(lambda x, y: x == y)'}), '(graph, graph, edge_matcher=lambda x, y: x == y)\n', (9131, 9179), False, 'import retworkx\n'), ((9370, 9406), 'retworkx.is_isomorphic', 'retworkx.is_isomorphic', (['graph', 'graph'], {}), '(graph, graph)\n', (9392, 9406), False, 'import retworkx\n'), ((9765, 9842), 'retworkx.is_isomorphic', 'retworkx.is_isomorphic', (['graph', 'second_graph'], {'edge_matcher': '(lambda x, y: x == y)'}), '(graph, second_graph, edge_matcher=lambda x, y: x == y)\n', (9787, 9842), False, 'import retworkx\n'), ((10247, 10305), 'retworkx.is_isomorphic', 'retworkx.is_isomorphic', (['graph', 'second_graph'], {'id_order': '(True)'}), '(graph, second_graph, id_order=True)\n', (10269, 10305), False, 'import retworkx\n'), ((10701, 10759), 'retworkx.is_isomorphic', 'retworkx.is_isomorphic', (['graph', 'second_graph'], {'id_order': '(True)'}), '(graph, second_graph, id_order=True)\n', (10723, 10759), False, 'import retworkx\n'), ((11155, 11213), 'retworkx.is_isomorphic', 'retworkx.is_isomorphic', (['graph', 'second_graph'], {'id_order': '(True)'}), '(graph, second_graph, id_order=True)\n', (11177, 11213), False, 'import retworkx\n'), ((11503, 11540), 'retworkx.is_isomorphic', 'retworkx.is_isomorphic', (['first', 'second'], {}), '(first, second)\n', (11525, 11540), False, 'import retworkx\n'), ((11690, 11740), 'retworkx.is_isomorphic', 'retworkx.is_isomorphic', (['graph', 'graph'], {'call_limit': '(2)'}), '(graph, graph, call_limit=2)\n', (11712, 11740), False, 'import retworkx\n'), ((14535, 14608), 'retworkx.digraph_vf2_mapping', 'retworkx.digraph_vf2_mapping', (['g_a', 'g_b'], {'id_order': 'id_order', 'subgraph': '(False)'}), '(g_a, g_b, id_order=id_order, subgraph=False)\n', (14563, 14608), False, 'import retworkx\n'), ((858, 913), 'retworkx.is_isomorphic', 'retworkx.is_isomorphic', (['dag_a', 'dag_b'], {'id_order': 'id_order'}), '(dag_a, dag_b, id_order=id_order)\n', (880, 913), False, 'import retworkx\n'), ((1176, 1252), 'retworkx.is_isomorphic', 'retworkx.is_isomorphic', (['dag_a', 'dag_b', '(lambda x, y: x == y)'], {'id_order': 'id_order'}), '(dag_a, dag_b, lambda x, y: x == y, id_order=id_order)\n', (1198, 1252), False, 'import retworkx\n'), ((1764, 1819), 'retworkx.is_isomorphic', 'retworkx.is_isomorphic', (['dag_a', 'dag_b'], {'id_order': 'id_order'}), '(dag_a, dag_b, id_order=id_order)\n', (1786, 1819), False, 'import retworkx\n'), ((2323, 2378), 'retworkx.is_isomorphic', 'retworkx.is_isomorphic', (['dag_a', 'dag_b'], {'id_order': 'id_order'}), '(dag_a, dag_b, id_order=id_order)\n', (2345, 2378), False, 'import retworkx\n'), ((2918, 2994), 'retworkx.is_isomorphic', 'retworkx.is_isomorphic', (['dag_a', 'dag_b', '(lambda x, y: x == y)'], {'id_order': 'id_order'}), '(dag_a, dag_b, lambda x, y: x == y, id_order=id_order)\n', (2940, 2994), False, 'import retworkx\n'), ((4093, 4169), 'retworkx.is_isomorphic', 'retworkx.is_isomorphic', (['dag_a', 'dag_b', '(lambda x, y: x == y)'], {'id_order': 'id_order'}), '(dag_a, dag_b, lambda x, y: x == y, id_order=id_order)\n', (4115, 4169), False, 'import retworkx\n'), ((4716, 4809), 'retworkx.is_isomorphic', 'retworkx.is_isomorphic', (['dag_a', 'dag_b'], {'edge_matcher': '(lambda x, y: x == y)', 'id_order': 'id_order'}), '(dag_a, dag_b, edge_matcher=lambda x, y: x == y,\n id_order=id_order)\n', (4738, 4809), False, 'import retworkx\n'), ((6742, 6818), 'retworkx.is_isomorphic', 'retworkx.is_isomorphic', (['dag_a', 'dag_b', '(lambda x, y: x == y)'], {'id_order': 'id_order'}), '(dag_a, dag_b, lambda x, y: x == y, id_order=id_order)\n', (6764, 6818), False, 'import retworkx\n'), ((8693, 8713), 'copy.deepcopy', 'copy.deepcopy', (['dag_a'], {}), '(dag_a)\n', (8706, 8713), False, 'import copy\n'), ((8739, 8759), 'copy.deepcopy', 'copy.deepcopy', (['dag_b'], {}), '(dag_b)\n', (8752, 8759), False, 'import copy\n')]
|
import arcade
GAME_TITLE = 'Drawing Example'
WINDOW_HEIGHT = 600
WINDOW_WIDTH = 600
def on_draw(delta_time):
arcade.start_render()
arcade.draw_text(GAME_TITLE, 200, 300, arcade.color.BLACK, 12)
def main():
arcade.open_window(WINDOW_WIDTH, WINDOW_HEIGHT, GAME_TITLE)
arcade.set_background_color(arcade.color.WHEAT)
arcade.schedule(on_draw, 1 / 2)
arcade.finish_render()
arcade.run()
if __name__ == '__main__':
main()
|
[
"arcade.run",
"arcade.open_window",
"arcade.schedule",
"arcade.start_render",
"arcade.set_background_color",
"arcade.draw_text",
"arcade.finish_render"
] |
[((116, 137), 'arcade.start_render', 'arcade.start_render', ([], {}), '()\n', (135, 137), False, 'import arcade\n'), ((142, 204), 'arcade.draw_text', 'arcade.draw_text', (['GAME_TITLE', '(200)', '(300)', 'arcade.color.BLACK', '(12)'], {}), '(GAME_TITLE, 200, 300, arcade.color.BLACK, 12)\n', (158, 204), False, 'import arcade\n'), ((223, 282), 'arcade.open_window', 'arcade.open_window', (['WINDOW_WIDTH', 'WINDOW_HEIGHT', 'GAME_TITLE'], {}), '(WINDOW_WIDTH, WINDOW_HEIGHT, GAME_TITLE)\n', (241, 282), False, 'import arcade\n'), ((287, 334), 'arcade.set_background_color', 'arcade.set_background_color', (['arcade.color.WHEAT'], {}), '(arcade.color.WHEAT)\n', (314, 334), False, 'import arcade\n'), ((339, 370), 'arcade.schedule', 'arcade.schedule', (['on_draw', '(1 / 2)'], {}), '(on_draw, 1 / 2)\n', (354, 370), False, 'import arcade\n'), ((375, 397), 'arcade.finish_render', 'arcade.finish_render', ([], {}), '()\n', (395, 397), False, 'import arcade\n'), ((402, 414), 'arcade.run', 'arcade.run', ([], {}), '()\n', (412, 414), False, 'import arcade\n')]
|
from functools import reduce
from math import gcd
def lcm(a, b):
return a * (b // gcd(a, b))
result = reduce(lcm, range(1, 21), 20)
print(result)
assert result == 232792560
|
[
"math.gcd"
] |
[((88, 97), 'math.gcd', 'gcd', (['a', 'b'], {}), '(a, b)\n', (91, 97), False, 'from math import gcd\n')]
|
import numpy as np
import cv2
import tensorflow as tf
import argparse
import time
from PIL import Image
import tf1st
if __name__ == "__main__":
# Argument parse
parser = argparse.ArgumentParser(description='Neural Style Transfer with OpenCV and Tensorflow')
parser.add_argument('--input-image', default="./images/federer.jpg", type=str, help='image to style')
parser.add_argument('--style-image', default="./images/vangogh.jpg", type=str, help='styling image')
parser.add_argument('--content-weight', default=1000, type=float, help='weight of the content image')
parser.add_argument('--style-weight', default=0.01, type=float, help='weight of the styling image')
parser.add_argument('--iterations', default=1000, type=int, help='number of iterations')
parser.add_argument('--result-image', default="./images/result.jpg", type=str, help='resulting image')
args = parser.parse_args()
# Enable eager execution for tensorflow
tf.enable_eager_execution()
print("Eager execution: {}".format(tf.executing_eagerly()))
model = tf1st.get_model()
for layer in model.layers:
layer.trainable = False
# Get the style and content feature representations (from our specified intermediate layers)
style_features, content_features = tf1st.get_feature_representations(model, args.input_image, args.style_image)
gram_style_features = [tf1st.gram_matrix(style_feature) for style_feature in style_features]
# Set initial image
init_image = tf1st.load_and_process_img(args.input_image)
init_image = tf.Variable(init_image, dtype=tf.float32)
# Create our optimizer
opt = tf.train.AdamOptimizer(learning_rate=5, beta1=0.99, epsilon=1e-1)
# Store our best result
best_loss, best_img = float('inf'), None
# Create a nice config
loss_weights = (args.style_weight, args.content_weight)
cfg = {
'model': model,
'loss_weights': loss_weights,
'init_image': init_image,
'gram_style_features': gram_style_features,
'content_features': content_features
}
# For displaying
num_rows = 2
num_cols = 5
display_interval = args.iterations/(num_rows*num_cols)
start_time = time.time()
global_start = time.time()
norm_means = np.array([103.939, 116.779, 123.68])
min_vals = -norm_means
max_vals = 255 - norm_means
imgs = []
for i in range(args.iterations):
grads, all_loss = tf1st.compute_grads(cfg)
loss, style_score, content_score = all_loss
opt.apply_gradients([(grads, init_image)])
clipped = tf.clip_by_value(init_image, min_vals, max_vals)
init_image.assign(clipped)
end_time = time.time()
if loss < best_loss:
# Update best loss and best image from total loss.
best_loss = loss
best_img = tf1st.deprocess_img(init_image.numpy())
start_time = time.time()
# Use the .numpy() method to get the concrete numpy array
plot_img = init_image.numpy()
plot_img = tf1st.deprocess_img(plot_img)
imgs.append(plot_img)
final_img = cv2.cvtColor(np.array(Image.fromarray(plot_img)), cv2.COLOR_BGR2RGB)
cv2.imshow('Actual Styled Image', final_img)
cv2.imwrite(args.result_image, final_img)
cv2.waitKey(1)
print('Iteration: {}'.format(i))
print('Total loss: {:.4e}, '
'style loss: {:.4e}, '
'content loss: {:.4e}, '
'time: {:.4f}s'.format(loss, style_score, content_score, time.time() - start_time))
print('Total time: {:.4f}s'.format(time.time() - global_start))
time.sleep(5)
print('Done')
|
[
"argparse.ArgumentParser",
"tensorflow.clip_by_value",
"tf1st.gram_matrix",
"tensorflow.executing_eagerly",
"tensorflow.Variable",
"tf1st.get_model",
"cv2.imshow",
"cv2.imwrite",
"cv2.waitKey",
"time.sleep",
"tensorflow.enable_eager_execution",
"tf1st.load_and_process_img",
"tf1st.deprocess_img",
"time.time",
"numpy.array",
"tf1st.get_feature_representations",
"PIL.Image.fromarray",
"tensorflow.train.AdamOptimizer",
"tf1st.compute_grads"
] |
[((176, 268), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Neural Style Transfer with OpenCV and Tensorflow"""'}), "(description=\n 'Neural Style Transfer with OpenCV and Tensorflow')\n", (199, 268), False, 'import argparse\n'), ((947, 974), 'tensorflow.enable_eager_execution', 'tf.enable_eager_execution', ([], {}), '()\n', (972, 974), True, 'import tensorflow as tf\n'), ((1048, 1065), 'tf1st.get_model', 'tf1st.get_model', ([], {}), '()\n', (1063, 1065), False, 'import tf1st\n'), ((1260, 1336), 'tf1st.get_feature_representations', 'tf1st.get_feature_representations', (['model', 'args.input_image', 'args.style_image'], {}), '(model, args.input_image, args.style_image)\n', (1293, 1336), False, 'import tf1st\n'), ((1472, 1516), 'tf1st.load_and_process_img', 'tf1st.load_and_process_img', (['args.input_image'], {}), '(args.input_image)\n', (1498, 1516), False, 'import tf1st\n'), ((1532, 1573), 'tensorflow.Variable', 'tf.Variable', (['init_image'], {'dtype': 'tf.float32'}), '(init_image, dtype=tf.float32)\n', (1543, 1573), True, 'import tensorflow as tf\n'), ((1607, 1671), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {'learning_rate': '(5)', 'beta1': '(0.99)', 'epsilon': '(0.1)'}), '(learning_rate=5, beta1=0.99, epsilon=0.1)\n', (1629, 1671), True, 'import tensorflow as tf\n'), ((2149, 2160), 'time.time', 'time.time', ([], {}), '()\n', (2158, 2160), False, 'import time\n'), ((2178, 2189), 'time.time', 'time.time', ([], {}), '()\n', (2187, 2189), False, 'import time\n'), ((2208, 2244), 'numpy.array', 'np.array', (['[103.939, 116.779, 123.68]'], {}), '([103.939, 116.779, 123.68])\n', (2216, 2244), True, 'import numpy as np\n'), ((3503, 3516), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (3513, 3516), False, 'import time\n'), ((1362, 1394), 'tf1st.gram_matrix', 'tf1st.gram_matrix', (['style_feature'], {}), '(style_feature)\n', (1379, 1394), False, 'import tf1st\n'), ((2375, 2399), 'tf1st.compute_grads', 'tf1st.compute_grads', (['cfg'], {}), '(cfg)\n', (2394, 2399), False, 'import tf1st\n'), ((2509, 2557), 'tensorflow.clip_by_value', 'tf.clip_by_value', (['init_image', 'min_vals', 'max_vals'], {}), '(init_image, min_vals, max_vals)\n', (2525, 2557), True, 'import tensorflow as tf\n'), ((2604, 2615), 'time.time', 'time.time', ([], {}), '()\n', (2613, 2615), False, 'import time\n'), ((2807, 2818), 'time.time', 'time.time', ([], {}), '()\n', (2816, 2818), False, 'import time\n'), ((2932, 2961), 'tf1st.deprocess_img', 'tf1st.deprocess_img', (['plot_img'], {}), '(plot_img)\n', (2951, 2961), False, 'import tf1st\n'), ((3077, 3121), 'cv2.imshow', 'cv2.imshow', (['"""Actual Styled Image"""', 'final_img'], {}), "('Actual Styled Image', final_img)\n", (3087, 3121), False, 'import cv2\n'), ((3126, 3167), 'cv2.imwrite', 'cv2.imwrite', (['args.result_image', 'final_img'], {}), '(args.result_image, final_img)\n', (3137, 3167), False, 'import cv2\n'), ((3172, 3186), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (3183, 3186), False, 'import cv2\n'), ((1012, 1034), 'tensorflow.executing_eagerly', 'tf.executing_eagerly', ([], {}), '()\n', (1032, 1034), True, 'import tensorflow as tf\n'), ((3026, 3051), 'PIL.Image.fromarray', 'Image.fromarray', (['plot_img'], {}), '(plot_img)\n', (3041, 3051), False, 'from PIL import Image\n'), ((3401, 3412), 'time.time', 'time.time', ([], {}), '()\n', (3410, 3412), False, 'import time\n'), ((3467, 3478), 'time.time', 'time.time', ([], {}), '()\n', (3476, 3478), False, 'import time\n')]
|
'''MobileNetV1 in PyTorch.
See the paper "MobileNets: Efficient Convolutional Neural Networks for Mobile Vision Applications"
for more details.
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
from utils import Swish, _make_divisible
class MbBlock(nn.Module):
'''Depthwise conv + Pointwise conv'''
def __init__(self, in_planes, out_planes, stride=1, activation='relu'):
super(MbBlock, self).__init__()
self.conv1 = nn.Conv2d(in_planes, in_planes, kernel_size=3, stride=stride, padding=1, groups=in_planes, bias=False)
self.bn1 = nn.BatchNorm2d(in_planes)
self.conv2 = nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=1, padding=0, bias=False)
self.bn2 = nn.BatchNorm2d(out_planes)
self.nn_act = Swish() if activation == 'swish' else nn.ReLU(inplace=True)
def forward(self, x):
out = self.nn_act(self.bn1(self.conv1(x)))
out = self.nn_act(self.bn2(self.conv2(out)))
return out
class ConvBlock(nn.Module):
def __init__(self, in_planes, out_planes, stride=1, activation='swish'):
super(ConvBlock, self).__init__()
self.conv = nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn = nn.BatchNorm2d(out_planes)
self.nn_act = Swish() if activation == 'swish' else nn.ReLU(inplace=True)
def forward(self, x):
out = self.nn_act(self.bn(self.conv(x)))
return out
defaultcfg = [32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32]
class MobileNetV1(nn.Module):
def __init__(self, dataset='cifar10', cfg=None, width_mul=None, activation='relu'):
super(MobileNetV1, self).__init__()
if dataset == 'cifar10':
num_classes = 10
elif dataset == 'cifar100':
num_classes = 100
else:
raise NotImplementedError
self.num_classes = num_classes
self.activation = activation
if cfg is None:
cfg = defaultcfg
self.cfg = cfg
self.conv_block = ConvBlock(3, cfg[0], activation=self.activation)
self.layers = self._make_layers(in_planes=cfg[0])
self.linear = nn.Linear(cfg[-1], num_classes)
def _make_layers(self, in_planes):
layers = []
for i, x in enumerate(self.cfg[1:]):
out_planes = x
if (i+1) in [2, 4, 6, 12]:
stride = 2
else:
stride = 1
layers.append(MbBlock(in_planes, out_planes, stride, activation=self.activation))
in_planes = out_planes
return nn.Sequential(*layers)
def forward(self, x):
out = self.conv_block(x)
out = self.layers(out)
out = F.avg_pool2d(out, 2)
out = out.view(out.size(0), -1)
out = self.linear(out)
return out
|
[
"torch.nn.ReLU",
"torch.nn.Sequential",
"torch.nn.functional.avg_pool2d",
"torch.nn.Conv2d",
"torch.nn.BatchNorm2d",
"utils.Swish",
"torch.nn.Linear"
] |
[((464, 570), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_planes', 'in_planes'], {'kernel_size': '(3)', 'stride': 'stride', 'padding': '(1)', 'groups': 'in_planes', 'bias': '(False)'}), '(in_planes, in_planes, kernel_size=3, stride=stride, padding=1,\n groups=in_planes, bias=False)\n', (473, 570), True, 'import torch.nn as nn\n'), ((586, 611), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['in_planes'], {}), '(in_planes)\n', (600, 611), True, 'import torch.nn as nn\n'), ((633, 718), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_planes', 'out_planes'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)', 'bias': '(False)'}), '(in_planes, out_planes, kernel_size=1, stride=1, padding=0, bias=False\n )\n', (642, 718), True, 'import torch.nn as nn\n'), ((733, 759), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['out_planes'], {}), '(out_planes)\n', (747, 759), True, 'import torch.nn as nn\n'), ((1163, 1252), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_planes', 'out_planes'], {'kernel_size': '(3)', 'stride': 'stride', 'padding': '(1)', 'bias': '(False)'}), '(in_planes, out_planes, kernel_size=3, stride=stride, padding=1,\n bias=False)\n', (1172, 1252), True, 'import torch.nn as nn\n'), ((1267, 1293), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['out_planes'], {}), '(out_planes)\n', (1281, 1293), True, 'import torch.nn as nn\n'), ((2200, 2231), 'torch.nn.Linear', 'nn.Linear', (['cfg[-1]', 'num_classes'], {}), '(cfg[-1], num_classes)\n', (2209, 2231), True, 'import torch.nn as nn\n'), ((2620, 2642), 'torch.nn.Sequential', 'nn.Sequential', (['*layers'], {}), '(*layers)\n', (2633, 2642), True, 'import torch.nn as nn\n'), ((2748, 2768), 'torch.nn.functional.avg_pool2d', 'F.avg_pool2d', (['out', '(2)'], {}), '(out, 2)\n', (2760, 2768), True, 'import torch.nn.functional as F\n'), ((783, 790), 'utils.Swish', 'Swish', ([], {}), '()\n', (788, 790), False, 'from utils import Swish, _make_divisible\n'), ((821, 842), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (828, 842), True, 'import torch.nn as nn\n'), ((1317, 1324), 'utils.Swish', 'Swish', ([], {}), '()\n', (1322, 1324), False, 'from utils import Swish, _make_divisible\n'), ((1355, 1376), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1362, 1376), True, 'import torch.nn as nn\n')]
|
import logging
from typing import Optional
from django.conf import settings
from django.db.models.query import QuerySet
from rest_framework.exceptions import ValidationError
import subscriptions.interfaces as interface
from .models import Subscription
from .serializers import SubscriptionsSerializer
from utils.base_services import BaseService
logger = logging.getLogger(__name__)
class SubscriptionsService(BaseService):
instance = Subscription
serializer_class = SubscriptionsSerializer
# REST API logic
def list_subs(self) -> dict:
logger.info('Метод SubscriptionsService list_subs вызван')
queryset = self.get_queryset()
page = self.paginate_queryset(queryset)
users = [interface.UserInterface().get_user(pk=obj.author, request=self.request) for obj in page] # noqa
if page is not None:
serializer = self.get_serializer(users, many=True)
return self.get_paginated_data(serializer.data)
serializer = SubscriptionsSerializer(
[interface.UserInterface().get_user(pk=obj.author, request=self.request) for obj in queryset], # noqa
many=True,
)
return serializer.data
def subscribe(self, pk: int = None) -> dict:
logger.info('Метод SubscriptionsService subscribe вызван')
author = interface.UserInterface().get_user(pk=pk, request=self.request)
serializer = self.get_serializer(data=author)
serializer.is_valid(raise_exception=True)
serializer.save()
author['is_subscribed'] = True
serializer.instance = author
return serializer.data
def unsubscribe(self, pk: int = None) -> bool:
logger.info('Метод SubscriptionsService unsubscribe вызван')
self._validate_unsubscribe_request(self.request.user.id, pk)
self.instance.objects.get(follower=self.request.user.id, author=pk).delete()
return True
# APP API logic
def check_is_subscribed(self, user: int, author: int) -> bool:
logger.info('Метод SubscriptionsService check_is_subscribed вызван')
context = {'follower': user, 'author': author}
return self.check_is_in(context)
# Interface logic
def get_author_recipes(self, author: int) -> QuerySet:
logger.info('Метод SubscriptionsService get_recipes вызван')
return interface.RecipesInrerface().get_author_recipes(author=author)
def get_count_author_recipes(self, author: int) -> int:
logger.info('Метод SubscriptionsService get_count_recipes вызван')
return interface.RecipesInrerface().get_count_author_recipes(author=author)
# Service logic
def get_queryset(self):
queryset = super().get_queryset()
return queryset.filter(follower=self.request.user.id)
# local functions
def _validate_unsubscribe_request(self, follower: int, author: int) -> Optional[Exception]:
if follower == author:
raise ValidationError(
{'errors': settings.ERROR_MESSAGE.get('self_unsubscription')}
)
if not self.instance.objects.filter(follower=follower, author=author).exists():
raise ValidationError(
{'errors': settings.ERROR_MESSAGE.get('not_subscribe')}
)
class SubscriptionsAdminService:
def get_user(self, pk: int) -> QuerySet:
logger.info('Метод SubscriptionsAdminService get_user вызван')
return interface.UsersAdminInterface().get_user(pk=pk)
def get_users(self) -> QuerySet:
logger.info('Метод SubscriptionsAdminService get_users вызван')
return interface.UsersAdminInterface().get_users()
|
[
"subscriptions.interfaces.RecipesInrerface",
"django.conf.settings.ERROR_MESSAGE.get",
"logging.getLogger",
"subscriptions.interfaces.UsersAdminInterface",
"subscriptions.interfaces.UserInterface"
] |
[((357, 384), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (374, 384), False, 'import logging\n'), ((1339, 1364), 'subscriptions.interfaces.UserInterface', 'interface.UserInterface', ([], {}), '()\n', (1362, 1364), True, 'import subscriptions.interfaces as interface\n'), ((2362, 2390), 'subscriptions.interfaces.RecipesInrerface', 'interface.RecipesInrerface', ([], {}), '()\n', (2388, 2390), True, 'import subscriptions.interfaces as interface\n'), ((2576, 2604), 'subscriptions.interfaces.RecipesInrerface', 'interface.RecipesInrerface', ([], {}), '()\n', (2602, 2604), True, 'import subscriptions.interfaces as interface\n'), ((3450, 3481), 'subscriptions.interfaces.UsersAdminInterface', 'interface.UsersAdminInterface', ([], {}), '()\n', (3479, 3481), True, 'import subscriptions.interfaces as interface\n'), ((3623, 3654), 'subscriptions.interfaces.UsersAdminInterface', 'interface.UsersAdminInterface', ([], {}), '()\n', (3652, 3654), True, 'import subscriptions.interfaces as interface\n'), ((730, 755), 'subscriptions.interfaces.UserInterface', 'interface.UserInterface', ([], {}), '()\n', (753, 755), True, 'import subscriptions.interfaces as interface\n'), ((3010, 3059), 'django.conf.settings.ERROR_MESSAGE.get', 'settings.ERROR_MESSAGE.get', (['"""self_unsubscription"""'], {}), "('self_unsubscription')\n", (3036, 3059), False, 'from django.conf import settings\n'), ((3225, 3268), 'django.conf.settings.ERROR_MESSAGE.get', 'settings.ERROR_MESSAGE.get', (['"""not_subscribe"""'], {}), "('not_subscribe')\n", (3251, 3268), False, 'from django.conf import settings\n'), ((1039, 1064), 'subscriptions.interfaces.UserInterface', 'interface.UserInterface', ([], {}), '()\n', (1062, 1064), True, 'import subscriptions.interfaces as interface\n')]
|
import os
import numpy as np
import pandas as pd
from datetime import datetime, timedelta
import re
date_now = datetime.now()
#
# data = pd.read_csv('/home/haishuowang/spider_data/2019-07-17/兰格钢铁网', sep='|', header=None)
# data.columns = ['Title', 'w_time', 'n_time', 'Link', 'Info']
# data = data[~data['Title'].duplicated(keep='first')]
#
# # x =
# data['deal_title'] = data.apply(lambda x: x['Title'].replace(f'{int(date_now.month)}月', '')
# .replace(f'{int(date_now.day)}日', '')
# , axis=1)
#
mid_word = ['稳', '→', '震荡', '平', ]
buy_word = ['涨', '上调', '↑', '上行', '强势', '走高']
sell_word = ['跌', '降', '下调', '探低', '↓', '下行', '弱势', '走低']
# # 方大特钢
#
#
file_name_list = ['全球金属网', '兰格钢铁网', '大宗内参', '海鑫钢网', '瑞达期货', '生意社', '西本新干线']
def contain(x, key_word, label=1):
for key in key_word:
if key in x:
return label
else:
return np.nan
def load_spot_data(read_path):
file_data = pd.read_csv(read_path, sep='|', header=None)
file_data.columns = ['Title', 'w_time', 'n_time', 'Link', 'Info']
file_data.index = pd.to_datetime(file_data['n_time'])
return file_data
def filer_target_word(raw_df):
target_df = raw_df[raw_df['Title'].str.contains('钢')]
return target_df
def get_file_pos(file_name):
root_path = '/home/haishuowang/temp'
date_list = sorted(os.listdir(root_path))
# file_name = '兰格钢铁网'
data_list = []
for target_date in date_list:
read_path = f'{root_path}/{target_date}/{file_name}'
if os.path.exists(f'{root_path}/{target_date}/{file_name}'):
file_data = pd.read_csv(read_path, sep='|', header=None)
file_data.columns = ['Title', 'w_time', 'n_time', 'Link', 'Info']
file_data.index = pd.to_datetime(file_data['n_time']) + timedelta(minutes=10)
file_data = file_data.sort_index()
mid = file_data['Title'].apply(lambda x: contain(x, mid_word, label=0))
mid.name = 'mid'
buy = file_data['Title'].apply(lambda x: contain(x, buy_word, label=1))
buy.name = 'buy'
sell = file_data['Title'].apply(lambda x: contain(x, sell_word, label=-1))
sell.name = 'sell'
mid_info = file_data['Info'].apply(lambda x: contain(x, mid_word, label=0))
mid_info.name = 'mid_info'
buy_info = file_data['Info'].apply(lambda x: contain(x, buy_word, label=1))
buy_info.name = 'buy_info'
sell_info = file_data['Info'].apply(lambda x: contain(x, sell_word, label=-1))
sell_info.name = 'sell_info'
# no_info = mid_info.isna() & buy_info.isna() & sell_info.isna()
part_info = pd.concat([file_data['Title'], mid, buy, sell, mid_info, buy_info, sell_info], axis=1)
data_list.append(part_info)
else:
print(target_date)
pass
all_info = pd.concat(data_list, axis=0)
all_info.to_csv(f'/home/haishuowang/PycharmProjects/{file_name}.csv')
return all_info
def get_spider_file_pos(file_name='生意社'):
root_path = '/home/haishuowang/spider_data'
date_list = sorted([x for x in os.listdir(root_path) if len(x) == 10 and '-' in x and x > '2019-07-18'])
data_list = []
for target_date in date_list:
read_path = f'/home/haishuowang/spider_data/{target_date}/{file_name}'
if os.path.exists(f'{root_path}/{target_date}/{file_name}'):
file_data = load_spot_data(read_path)
file_data = filer_target_word(file_data)
file_data.index = pd.to_datetime(file_data['n_time']) + timedelta(minutes=10)
file_data = file_data.sort_index()
mid = file_data['Title'].apply(lambda x: contain(x, mid_word, label=0))
mid.name = 'mid'
buy = file_data['Title'].apply(lambda x: contain(x, buy_word, label=1))
buy.name = 'buy'
sell = file_data['Title'].apply(lambda x: contain(x, sell_word, label=-1))
sell.name = 'sell'
mid_info = file_data['Info'].apply(lambda x: contain(x, mid_word, label=0))
mid_info.name = 'mid_info'
buy_info = file_data['Info'].apply(lambda x: contain(x, buy_word, label=1))
buy_info.name = 'buy_info'
sell_info = file_data['Info'].apply(lambda x: contain(x, sell_word, label=-1))
sell_info.name = 'sell_info'
part_info = pd.concat([file_data['Title'], mid, buy, sell, mid_info, buy_info, sell_info], axis=1)
data_list.append(part_info)
else:
print(target_date)
pass
all_info = pd.concat(data_list, axis=0)
all_info.to_csv(f'/home/haishuowang/PycharmProjects/{file_name}_spider.csv')
return all_info
# data_0717 = load_spot_data('/home/haishuowang/spider_data/2019-07-17/生意社')
# data_0719 = load_spot_data('/home/haishuowang/spider_data/2019-07-19/生意社')
# # data_0720 = load_spot_data('/home/haishuowang/spider_data/2019-07-20/生意社')
# data_0722 = load_spot_data('/home/haishuowang/spider_data/2019-07-22/生意社')
# data_0723 = load_spot_data('/home/haishuowang/spider_data/2019-07-23/生意社')
#
# data_0717 = filer_target_word(data_0717)
# data_0719 = filer_target_word(data_0719)
# data_0722 = filer_target_word(data_0722)
# data_0723 = filer_target_word(data_0723)
# all_info = get_spider_file_pos(file_name='生意社')
# for file_name in file_name_list:
# all_info = get_file_pos(file_name)
# if __name__ == '__main__':
def deal_jd_data(fut_name='鸡蛋', file_name='金谷高科'):
root_path = f'/home/haishuowang/PycharmProjects/dat_whs/{fut_name}/temp'
target_date_list = sorted([x for x in os.listdir(root_path) if x >= '2019-06-25'])
print(target_date_list)
result_list = []
for target_date in target_date_list:
if os.path.exists(f'{root_path}/{target_date}/{file_name}'):
print('_______________')
print(target_date)
info_data = load_spot_data(f'{root_path}/{target_date}/{file_name}')
print(info_data)
# print(info_data[info_data['Title'].str.contains('辽宁')])
# result_list.append(info_data[info_data['Title'].str.contains('辽宁')])
else:
pass
return pd.concat(result_list, axis=0)
def deal_cf_data(fut_name='棉花', file_name='金谷高科'):
root_path = f'/home/haishuowang/PycharmProjects/dat_whs/{fut_name}/temp'
info_data = load_spot_data(f'{root_path}/{file_name}')
return info_data.sort_index().drop_duplicates()
# '/home/haishuowang/PycharmProjects/dat_whs/甲醛/temp/生意社'
# info_data = deal_cf_data(fut_name='甲醛', file_name='生意社')
fut_name = '甲醛'
file_name = '生意社'
info_data = deal_cf_data(fut_name, file_name)
def title_filter(info_sr):
print(info_sr)
title = info_sr.iloc[0]
print(info_sr.name)
month_t, day_t = pd.to_datetime(info_sr.name).strftime('%m/%d').split('/')
print(title, month_t, day_t)
date_str = f'{str(int(month_t))}月{str(int(day_t))}日'
if date_str in title:
return True
else:
return False
info_data = info_data[info_data[['Title']].apply(title_filter, axis=1)]
mid = info_data['Title'].apply(lambda x: contain(x, mid_word, label=0))
mid.name = 'mid'
buy = info_data['Title'].apply(lambda x: contain(x, buy_word, label=1))
buy.name = 'buy'
sell = info_data['Title'].apply(lambda x: contain(x, sell_word, label=-1))
sell.name = 'sell'
mid_info = info_data['Info'].apply(lambda x: contain(x, mid_word, label=0))
mid_info.name = 'mid_info'
buy_info = info_data['Info'].apply(lambda x: contain(x, buy_word, label=1))
buy_info.name = 'buy_info'
sell_info = info_data['Info'].apply(lambda x: contain(x, sell_word, label=-1))
sell_info.name = 'sell_info'
part_info = pd.concat([info_data['Title'], mid, buy, sell, mid_info, buy_info, sell_info], axis=1)
part_info['pos_1'] = part_info[['mid', 'buy', 'sell']].sum(1)
part_info['pos_2'] = part_info[['mid', 'buy', 'sell', 'mid_info', 'buy_info', 'sell_info']].sum(1)
part_info.to_csv(f'~/PycharmProjects/dat_whs/{fut_name}_{file_name}.csv', sep='|')
# result_list = []
# for n_time, part_a in a.iterrows():
# city_info_list = part_a['Info'].split(':')[1].split('。')[:-1]
# for city_info in city_info_list:
# # print(city_info)
# city_name = city_info.split('市')[0]
# price_info = city_info.split('市')[1].split(',')[0]
# price_num = float(re.findall('(?<=为).*?(?=元)', price_info)[0])
# print(n_time, city_name, price_num)
# result_list.append([n_time, city_name, price_num])
#
# result_info = pd.DataFrame(result_list, columns=['n_time', 'city_name', 'price_num']).set_index(['n_time', 'city_name'])
# print(result_info.xs('大连', level=1))
|
[
"os.listdir",
"pandas.read_csv",
"os.path.exists",
"pandas.to_datetime",
"datetime.timedelta",
"datetime.datetime.now",
"pandas.concat"
] |
[((112, 126), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (124, 126), False, 'from datetime import datetime, timedelta\n'), ((7761, 7851), 'pandas.concat', 'pd.concat', (["[info_data['Title'], mid, buy, sell, mid_info, buy_info, sell_info]"], {'axis': '(1)'}), "([info_data['Title'], mid, buy, sell, mid_info, buy_info,\n sell_info], axis=1)\n", (7770, 7851), True, 'import pandas as pd\n'), ((981, 1025), 'pandas.read_csv', 'pd.read_csv', (['read_path'], {'sep': '"""|"""', 'header': 'None'}), "(read_path, sep='|', header=None)\n", (992, 1025), True, 'import pandas as pd\n'), ((1118, 1153), 'pandas.to_datetime', 'pd.to_datetime', (["file_data['n_time']"], {}), "(file_data['n_time'])\n", (1132, 1153), True, 'import pandas as pd\n'), ((2938, 2966), 'pandas.concat', 'pd.concat', (['data_list'], {'axis': '(0)'}), '(data_list, axis=0)\n', (2947, 2966), True, 'import pandas as pd\n'), ((4666, 4694), 'pandas.concat', 'pd.concat', (['data_list'], {'axis': '(0)'}), '(data_list, axis=0)\n', (4675, 4694), True, 'import pandas as pd\n'), ((6267, 6297), 'pandas.concat', 'pd.concat', (['result_list'], {'axis': '(0)'}), '(result_list, axis=0)\n', (6276, 6297), True, 'import pandas as pd\n'), ((1382, 1403), 'os.listdir', 'os.listdir', (['root_path'], {}), '(root_path)\n', (1392, 1403), False, 'import os\n'), ((1557, 1613), 'os.path.exists', 'os.path.exists', (['f"""{root_path}/{target_date}/{file_name}"""'], {}), "(f'{root_path}/{target_date}/{file_name}')\n", (1571, 1613), False, 'import os\n'), ((3405, 3461), 'os.path.exists', 'os.path.exists', (['f"""{root_path}/{target_date}/{file_name}"""'], {}), "(f'{root_path}/{target_date}/{file_name}')\n", (3419, 3461), False, 'import os\n'), ((5836, 5892), 'os.path.exists', 'os.path.exists', (['f"""{root_path}/{target_date}/{file_name}"""'], {}), "(f'{root_path}/{target_date}/{file_name}')\n", (5850, 5892), False, 'import os\n'), ((1639, 1683), 'pandas.read_csv', 'pd.read_csv', (['read_path'], {'sep': '"""|"""', 'header': 'None'}), "(read_path, sep='|', header=None)\n", (1650, 1683), True, 'import pandas as pd\n'), ((2733, 2823), 'pandas.concat', 'pd.concat', (["[file_data['Title'], mid, buy, sell, mid_info, buy_info, sell_info]"], {'axis': '(1)'}), "([file_data['Title'], mid, buy, sell, mid_info, buy_info,\n sell_info], axis=1)\n", (2742, 2823), True, 'import pandas as pd\n'), ((4461, 4551), 'pandas.concat', 'pd.concat', (["[file_data['Title'], mid, buy, sell, mid_info, buy_info, sell_info]"], {'axis': '(1)'}), "([file_data['Title'], mid, buy, sell, mid_info, buy_info,\n sell_info], axis=1)\n", (4470, 4551), True, 'import pandas as pd\n'), ((1792, 1827), 'pandas.to_datetime', 'pd.to_datetime', (["file_data['n_time']"], {}), "(file_data['n_time'])\n", (1806, 1827), True, 'import pandas as pd\n'), ((1830, 1851), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(10)'}), '(minutes=10)\n', (1839, 1851), False, 'from datetime import datetime, timedelta\n'), ((3188, 3209), 'os.listdir', 'os.listdir', (['root_path'], {}), '(root_path)\n', (3198, 3209), False, 'import os\n'), ((3597, 3632), 'pandas.to_datetime', 'pd.to_datetime', (["file_data['n_time']"], {}), "(file_data['n_time'])\n", (3611, 3632), True, 'import pandas as pd\n'), ((3635, 3656), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(10)'}), '(minutes=10)\n', (3644, 3656), False, 'from datetime import datetime, timedelta\n'), ((5690, 5711), 'os.listdir', 'os.listdir', (['root_path'], {}), '(root_path)\n', (5700, 5711), False, 'import os\n'), ((6861, 6889), 'pandas.to_datetime', 'pd.to_datetime', (['info_sr.name'], {}), '(info_sr.name)\n', (6875, 6889), True, 'import pandas as pd\n')]
|
"""This module provides base class of [Node](mkapi.core.node.Node) and
[Module](mkapi.core.module.Module)."""
from dataclasses import dataclass, field
from typing import Any, Iterator, List, Union
from mkapi.core.base import Base, Type
from mkapi.core.docstring import Docstring, get_docstring
from mkapi.core.object import (get_origin, get_qualname,
get_sourcefile_and_lineno,
split_prefix_and_name)
from mkapi.core.signature import Signature, get_signature
"a.b.c".rpartition(".")
@dataclass
class Object(Base):
"""Object class represents an object.
Args:
name: Object name.
prefix: Object prefix.
qualname: Qualified name.
kind: Object kind such as 'class', 'function', *etc.*
signature: Signature if object is module or callable.
Attributes:
id: ID attribute of HTML.
type: Type for missing Returns and Yields sections.
"""
prefix: str = ""
qualname: str = ""
kind: str = ""
signature: Signature = field(default_factory=Signature)
module: str = field(init=False)
markdown: str = field(init=False)
id: str = field(init=False)
type: Type = field(default_factory=Type, init=False)
def __post_init__(self):
from mkapi.core import linker
self.id = self.name
if self.prefix:
self.id = ".".join([self.prefix, self.name])
if not self.qualname:
self.module = self.id
else:
self.module = self.id[: -len(self.qualname) - 1]
if not self.markdown:
name = linker.link(self.name, self.id)
if self.prefix:
prefix = linker.link(self.prefix, self.prefix)
self.markdown = ".".join([prefix, name])
else:
self.markdown = name
def __repr__(self):
class_name = self.__class__.__name__
id = self.id
return f"{class_name}({id!r})"
def __iter__(self) -> Iterator[Base]:
yield from self.type
yield self
@dataclass
class Tree:
"""Tree class. This class is the base class of [Node](mkapi.core.node.Node)
and [Module](mkapi.core.module.Module).
Args:
obj: Object.
Attributes:
sourcefile: Source file path.
lineno: Line number.
object: Object instance.
docstring: Docstring instance.
parent: Parent instance.
members: Member instances.
"""
obj: Any = field()
sourcefile: str = field(init=False)
lineno: int = field(init=False)
object: Object = field(init=False)
docstring: Docstring = field(init=False)
parent: Any = field(default=None, init=False)
members: List[Any] = field(init=False)
def __post_init__(self):
obj = get_origin(self.obj)
self.sourcefile, self.lineno = get_sourcefile_and_lineno(obj)
prefix, name = split_prefix_and_name(obj)
qualname = get_qualname(obj)
kind = self.get_kind()
signature = get_signature(obj)
self.object = Object(
prefix=prefix, name=name, qualname=qualname, kind=kind, signature=signature,
)
self.docstring = get_docstring(obj)
self.obj = obj
self.members = self.get_members()
for member in self.members:
member.parent = self
def __repr__(self):
class_name = self.__class__.__name__
id = self.object.id
sections = len(self.docstring.sections)
numbers = len(self.members)
return f"{class_name}({id!r}, num_sections={sections}, num_members={numbers})"
def __getitem__(self, index: Union[int, str, List[str]]):
"""Returns a member {class} instance.
If `index` is str, a member Tree instance whose name is equal to `index`
is returned.
Raises:
IndexError: If no member found.
"""
if isinstance(index, list):
node = self
for name in index:
node = node[name]
return node
if isinstance(index, int):
return self.members[index]
if isinstance(index, str) and "." in index:
names = index.split(".")
return self[names]
for member in self.members:
if member.object.name == index:
return member
raise IndexError
def __len__(self):
return len(self.members)
def __contains__(self, name):
for member in self.members:
if member.object.name == name:
return True
return False
def get_kind(self) -> str:
"""Returns kind of self."""
raise NotImplementedError
def get_members(self) -> List["Tree"]:
"""Returns a list of members."""
raise NotImplementedError
def get_markdown(self) -> str:
"""Returns a Markdown source for docstring of self."""
raise NotImplementedError
def walk(self) -> Iterator["Tree"]:
"""Yields all members."""
yield self
for member in self.members:
yield from member.walk()
|
[
"mkapi.core.signature.get_signature",
"mkapi.core.object.get_origin",
"mkapi.core.linker.link",
"mkapi.core.docstring.get_docstring",
"dataclasses.field",
"mkapi.core.object.get_qualname",
"mkapi.core.object.split_prefix_and_name",
"mkapi.core.object.get_sourcefile_and_lineno"
] |
[((1059, 1091), 'dataclasses.field', 'field', ([], {'default_factory': 'Signature'}), '(default_factory=Signature)\n', (1064, 1091), False, 'from dataclasses import dataclass, field\n'), ((1110, 1127), 'dataclasses.field', 'field', ([], {'init': '(False)'}), '(init=False)\n', (1115, 1127), False, 'from dataclasses import dataclass, field\n'), ((1148, 1165), 'dataclasses.field', 'field', ([], {'init': '(False)'}), '(init=False)\n', (1153, 1165), False, 'from dataclasses import dataclass, field\n'), ((1180, 1197), 'dataclasses.field', 'field', ([], {'init': '(False)'}), '(init=False)\n', (1185, 1197), False, 'from dataclasses import dataclass, field\n'), ((1215, 1254), 'dataclasses.field', 'field', ([], {'default_factory': 'Type', 'init': '(False)'}), '(default_factory=Type, init=False)\n', (1220, 1254), False, 'from dataclasses import dataclass, field\n'), ((2506, 2513), 'dataclasses.field', 'field', ([], {}), '()\n', (2511, 2513), False, 'from dataclasses import dataclass, field\n'), ((2536, 2553), 'dataclasses.field', 'field', ([], {'init': '(False)'}), '(init=False)\n', (2541, 2553), False, 'from dataclasses import dataclass, field\n'), ((2572, 2589), 'dataclasses.field', 'field', ([], {'init': '(False)'}), '(init=False)\n', (2577, 2589), False, 'from dataclasses import dataclass, field\n'), ((2611, 2628), 'dataclasses.field', 'field', ([], {'init': '(False)'}), '(init=False)\n', (2616, 2628), False, 'from dataclasses import dataclass, field\n'), ((2656, 2673), 'dataclasses.field', 'field', ([], {'init': '(False)'}), '(init=False)\n', (2661, 2673), False, 'from dataclasses import dataclass, field\n'), ((2692, 2723), 'dataclasses.field', 'field', ([], {'default': 'None', 'init': '(False)'}), '(default=None, init=False)\n', (2697, 2723), False, 'from dataclasses import dataclass, field\n'), ((2749, 2766), 'dataclasses.field', 'field', ([], {'init': '(False)'}), '(init=False)\n', (2754, 2766), False, 'from dataclasses import dataclass, field\n'), ((2811, 2831), 'mkapi.core.object.get_origin', 'get_origin', (['self.obj'], {}), '(self.obj)\n', (2821, 2831), False, 'from mkapi.core.object import get_origin, get_qualname, get_sourcefile_and_lineno, split_prefix_and_name\n'), ((2871, 2901), 'mkapi.core.object.get_sourcefile_and_lineno', 'get_sourcefile_and_lineno', (['obj'], {}), '(obj)\n', (2896, 2901), False, 'from mkapi.core.object import get_origin, get_qualname, get_sourcefile_and_lineno, split_prefix_and_name\n'), ((2925, 2951), 'mkapi.core.object.split_prefix_and_name', 'split_prefix_and_name', (['obj'], {}), '(obj)\n', (2946, 2951), False, 'from mkapi.core.object import get_origin, get_qualname, get_sourcefile_and_lineno, split_prefix_and_name\n'), ((2971, 2988), 'mkapi.core.object.get_qualname', 'get_qualname', (['obj'], {}), '(obj)\n', (2983, 2988), False, 'from mkapi.core.object import get_origin, get_qualname, get_sourcefile_and_lineno, split_prefix_and_name\n'), ((3040, 3058), 'mkapi.core.signature.get_signature', 'get_signature', (['obj'], {}), '(obj)\n', (3053, 3058), False, 'from mkapi.core.signature import Signature, get_signature\n'), ((3213, 3231), 'mkapi.core.docstring.get_docstring', 'get_docstring', (['obj'], {}), '(obj)\n', (3226, 3231), False, 'from mkapi.core.docstring import Docstring, get_docstring\n'), ((1621, 1652), 'mkapi.core.linker.link', 'linker.link', (['self.name', 'self.id'], {}), '(self.name, self.id)\n', (1632, 1652), False, 'from mkapi.core import linker\n'), ((1706, 1743), 'mkapi.core.linker.link', 'linker.link', (['self.prefix', 'self.prefix'], {}), '(self.prefix, self.prefix)\n', (1717, 1743), False, 'from mkapi.core import linker\n')]
|
from functools import partial
import importlib
import pytest
from pathlib import Path
import zarr
import dask.array as dsa
import dask
import dask.core
import xarray
import numpy
from rechunker import api
_DIMENSION_KEY = "_ARRAY_DIMENSIONS"
def requires_import(module, *args):
try:
importlib.import_module(module)
except ImportError:
skip = True
else:
skip = False
mark = pytest.mark.skipif(skip, reason=f"requires {module}")
return pytest.param(*args, marks=mark)
requires_beam = partial(requires_import, "apache_beam")
requires_prefect = partial(requires_import, "prefect")
requires_pywren = partial(requires_import, "pywren_ibm_cloud")
@pytest.fixture(params=[(8000, 200), {"y": 8000, "x": 200}])
def target_chunks(request):
return request.param
def test_invalid_executor():
with pytest.raises(ValueError, match="unrecognized executor"):
api._get_executor("unknown")
@pytest.mark.parametrize("shape", [(100, 50)])
@pytest.mark.parametrize("source_chunks", [(10, 50)])
@pytest.mark.parametrize("target_chunks", [(20, 10)])
@pytest.mark.parametrize("max_mem", ["10MB"])
@pytest.mark.parametrize("executor", ["dask"])
def test_rechunk_dataset(
tmp_path, shape, source_chunks, target_chunks, max_mem, executor
):
target_store = str(tmp_path / "target.zarr")
temp_store = str(tmp_path / "temp.zarr")
a = numpy.arange(numpy.prod(shape)).reshape(shape).astype("f4")
a[-1] = numpy.nan
ds = xarray.Dataset(
dict(
a=xarray.DataArray(
a, dims=["x", "y"], attrs={"a1": 1, "a2": [1, 2, 3], "a3": "x"}
),
b=xarray.DataArray(numpy.ones(shape[0]), dims=["x"]),
c=xarray.DataArray(numpy.ones(shape[1]), dims=["y"]),
),
coords=dict(
cx=xarray.DataArray(numpy.ones(shape[0]), dims=["x"]),
cy=xarray.DataArray(numpy.ones(shape[1]), dims=["y"]),
),
attrs={"a1": 1, "a2": [1, 2, 3], "a3": "x"},
)
ds = ds.chunk(chunks=dict(zip(["x", "y"], source_chunks)))
options = dict(
a=dict(
compressor=zarr.Blosc(cname="zstd"),
dtype="int32",
scale_factor=0.1,
_FillValue=-9999,
)
)
rechunked = api.rechunk(
ds,
target_chunks=dict(a=target_chunks, b=target_chunks[:1]),
max_mem=max_mem,
target_store=target_store,
target_options=options,
temp_store=temp_store,
executor=executor,
)
assert isinstance(rechunked, api.Rechunked)
rechunked.execute()
# Validate encoded variables
dst = xarray.open_zarr(target_store, decode_cf=False)
assert dst.a.dtype == options["a"]["dtype"]
assert all(dst.a.values[-1] == options["a"]["_FillValue"])
assert dst.a.encoding["compressor"] is not None
# Validate decoded variables
dst = xarray.open_zarr(target_store, decode_cf=True)
assert dst.a.data.chunksize == target_chunks
assert dst.b.data.chunksize == target_chunks[:1]
assert dst.c.data.chunksize == source_chunks[1:]
xarray.testing.assert_equal(ds.compute(), dst.compute())
assert ds.attrs == dst.attrs
@pytest.mark.parametrize("shape", [(8000, 8000)])
@pytest.mark.parametrize("source_chunks", [(200, 8000)])
@pytest.mark.parametrize("dtype", ["f4"])
@pytest.mark.parametrize("max_mem", [25600000, "25.6MB"])
@pytest.mark.parametrize(
"executor",
[
"dask",
"python",
requires_beam("beam"),
requires_prefect("prefect"),
requires_pywren("pywren"),
],
)
@pytest.mark.parametrize(
"dims,target_chunks",
[
(None, (8000, 200)),
# would be nice to support this syntax eventually
pytest.param(None, (-1, 200), marks=pytest.mark.xfail),
(["y", "x"], (8000, 200)),
(["y", "x"], {"y": 8000, "x": 200}),
# can't infer missing dimension chunk specification
pytest.param(["y", "x"], {"x": 200}, marks=pytest.mark.xfail),
# can't use dict syntax without array dims
pytest.param(None, {"y": 8000, "x": 200}, marks=pytest.mark.xfail),
],
)
def test_rechunk_array(
tmp_path, shape, source_chunks, dtype, dims, target_chunks, max_mem, executor
):
### Create source array ###
store_source = str(tmp_path / "source.zarr")
source_array = zarr.ones(
shape, chunks=source_chunks, dtype=dtype, store=store_source
)
# add some attributes
source_array.attrs["foo"] = "bar"
if dims:
source_array.attrs[_DIMENSION_KEY] = dims
### Create targets ###
target_store = str(tmp_path / "target.zarr")
temp_store = str(tmp_path / "temp.zarr")
rechunked = api.rechunk(
source_array,
target_chunks,
max_mem,
target_store,
temp_store=temp_store,
executor=executor,
)
assert isinstance(rechunked, api.Rechunked)
target_array = zarr.open(target_store)
if isinstance(target_chunks, dict):
target_chunks_list = [target_chunks[d] for d in dims]
else:
target_chunks_list = target_chunks
assert target_array.chunks == tuple(target_chunks_list)
assert dict(source_array.attrs) == dict(target_array.attrs)
result = rechunked.execute()
assert isinstance(result, zarr.Array)
a_tar = dsa.from_zarr(target_array)
assert dsa.equal(a_tar, 1).all().compute()
@pytest.mark.parametrize("shape", [(8000, 8000)])
@pytest.mark.parametrize("source_chunks", [(200, 8000), (800, 8000)])
@pytest.mark.parametrize("dtype", ["f4"])
@pytest.mark.parametrize("max_mem", [25600000])
@pytest.mark.parametrize(
"target_chunks", [(200, 8000), (800, 8000), (8000, 200), (400, 8000),],
)
def test_rechunk_dask_array(
tmp_path, shape, source_chunks, dtype, target_chunks, max_mem
):
### Create source array ###
source_array = dsa.ones(shape, chunks=source_chunks, dtype=dtype)
### Create targets ###
target_store = str(tmp_path / "target.zarr")
temp_store = str(tmp_path / "temp.zarr")
rechunked = api.rechunk(
source_array, target_chunks, max_mem, target_store, temp_store=temp_store
)
assert isinstance(rechunked, api.Rechunked)
target_array = zarr.open(target_store)
assert target_array.chunks == tuple(target_chunks)
result = rechunked.execute()
assert isinstance(result, zarr.Array)
a_tar = dsa.from_zarr(target_array)
assert dsa.equal(a_tar, 1).all().compute()
@pytest.mark.parametrize(
"executor",
[
"dask",
"python",
requires_beam("beam"),
requires_prefect("prefect"),
requires_pywren("pywren"),
],
)
def test_rechunk_group(tmp_path, executor):
store_source = str(tmp_path / "source.zarr")
group = zarr.group(store_source)
group.attrs["foo"] = "bar"
# 800 byte chunks
a = group.ones("a", shape=(5, 10, 20), chunks=(1, 10, 20), dtype="f4")
a.attrs["foo"] = "bar"
b = group.ones("b", shape=(20,), chunks=(10,), dtype="f4")
b.attrs["foo"] = "bar"
target_store = str(tmp_path / "target.zarr")
temp_store = str(tmp_path / "temp.zarr")
max_mem = 1600 # should force a two-step plan for a
target_chunks = {"a": (5, 10, 4), "b": (20,)}
rechunked = api.rechunk(
group,
target_chunks,
max_mem,
target_store,
temp_store=temp_store,
executor=executor,
)
assert isinstance(rechunked, api.Rechunked)
target_group = zarr.open(target_store)
assert "a" in target_group
assert "b" in target_group
assert dict(group.attrs) == dict(target_group.attrs)
rechunked.execute()
for aname in target_chunks:
assert target_group[aname].chunks == target_chunks[aname]
a_tar = dsa.from_zarr(target_group[aname])
assert dsa.equal(a_tar, 1).all().compute()
def sample_xarray_dataset():
return xarray.Dataset(
dict(
a=xarray.DataArray(
dsa.ones(shape=(10, 20, 40), chunks=(5, 10, 4), dtype="f4"),
dims=("x", "y", "z"),
attrs={"foo": "bar"},
),
b=xarray.DataArray(
dsa.ones(shape=(8000,), chunks=(200,), dtype="f4"),
dims="w",
attrs={"foo": "bar"},
),
),
attrs={"foo": "bar"},
)
def sample_zarr_group(tmp_path):
path = str(tmp_path / "source.zarr")
group = zarr.group(path)
group.attrs["foo"] = "bar"
# 800 byte chunks
a = group.ones("a", shape=(10, 20, 40), chunks=(5, 10, 4), dtype="f4")
a.attrs["foo"] = "bar"
b = group.ones("b", shape=(8000,), chunks=(200,), dtype="f4")
b.attrs["foo"] = "bar"
return group
def sample_zarr_array(tmp_path):
shape = (8000, 8000)
source_chunks = (200, 8000)
dtype = "f4"
dims = None
path = str(tmp_path / "source.zarr")
array = zarr.ones(shape, chunks=source_chunks, dtype=dtype, store=path)
# add some attributes
array.attrs["foo"] = "bar"
if dims:
array.attrs[_DIMENSION_KEY] = dims
return array
@pytest.fixture(params=["Array", "Group", "Dataset"])
def rechunk_args(tmp_path, request):
target_store = str(tmp_path / "target.zarr")
temp_store = str(tmp_path / "temp.zarr")
max_mem = 1600 # should force a two-step plan for a and b
target_chunks = {"a": (10, 5, 4), "b": (100,)}
args = dict(
target_chunks=target_chunks,
max_mem=max_mem,
target_store=target_store,
temp_store=temp_store,
)
if request.param == "Dataset":
ds = sample_xarray_dataset()
args.update({"source": ds})
elif request.param == "Group":
group = sample_zarr_group(tmp_path)
args.update({"source": group})
else:
array = sample_zarr_array(tmp_path)
max_mem = 25600000
target_chunks = (8000, 200)
args.update(
{"source": array, "target_chunks": target_chunks, "max_mem": max_mem,}
)
return args
@pytest.fixture()
def rechunked(rechunk_args):
return api.rechunk(**rechunk_args)
def test_repr(rechunked):
assert isinstance(rechunked, api.Rechunked)
repr_str = repr(rechunked)
assert repr_str.startswith("<Rechunked>")
assert all(thing in repr_str for thing in ["Source", "Intermediate", "Target"])
def test_repr_html(rechunked):
rechunked._repr_html_() # no exceptions
def _is_collection(source):
assert isinstance(
source,
(dask.array.Array, zarr.core.Array, zarr.hierarchy.Group, xarray.Dataset),
)
return isinstance(source, (zarr.hierarchy.Group, xarray.Dataset))
def _wrap_options(source, options):
if _is_collection(source):
options = {v: options for v in source}
return options
def test_rechunk_option_overwrite(rechunk_args):
api.rechunk(**rechunk_args).execute()
# TODO: make this match more reliable based on outcome of
# https://github.com/zarr-developers/zarr-python/issues/605
with pytest.raises(ValueError, match=r"path .* contains an array"):
api.rechunk(**rechunk_args).execute()
options = _wrap_options(rechunk_args["source"], dict(overwrite=True))
api.rechunk(**rechunk_args, target_options=options).execute()
def test_rechunk_passthrough(rechunk_args):
# Verify that no errors are raised when the target chunks == source chunks
if _is_collection(rechunk_args["source"]):
rechunk_args["target_chunks"] = {v: None for v in rechunk_args["source"]}
else:
rechunk_args["target_chunks"] = None
api.rechunk(**rechunk_args).execute()
def test_rechunk_no_temp_dir_provided_error(rechunk_args):
# Verify that the correct error is raised when no temp_store is given
# and the chunks to write differ from the chunks to read
args = {k: v for k, v in rechunk_args.items() if k != "temp_store"}
with pytest.raises(ValueError, match="A temporary store location must be provided"):
api.rechunk(**args).execute()
def test_rechunk_option_compression(rechunk_args):
def rechunk(compressor):
options = _wrap_options(
rechunk_args["source"], dict(overwrite=True, compressor=compressor)
)
rechunked = api.rechunk(**rechunk_args, target_options=options)
rechunked.execute()
return sum(
file.stat().st_size
for file in Path(rechunked._target.store.path).rglob("*")
)
size_uncompressed = rechunk(None)
size_compressed = rechunk(
zarr.Blosc(cname="zstd", clevel=9, shuffle=zarr.Blosc.SHUFFLE)
)
assert size_compressed < size_uncompressed
def test_rechunk_invalid_option(rechunk_args):
if isinstance(rechunk_args["source"], xarray.Dataset):
# Options are essentially unbounded for Xarray (for CF encoding params),
# so check only options with special error cases
options = _wrap_options(rechunk_args["source"], {"chunks": 10})
with pytest.raises(
ValueError,
match="Chunks must be provided in ``target_chunks`` rather than options",
):
api.rechunk(**rechunk_args, target_options=options)
else:
for o in ["shape", "chunks", "dtype", "store", "name", "unknown"]:
options = _wrap_options(rechunk_args["source"], {o: True})
with pytest.raises(ValueError, match=f"Zarr options must not include {o}"):
api.rechunk(**rechunk_args, temp_options=options)
with pytest.raises(ValueError, match=f"Zarr options must not include {o}"):
api.rechunk(**rechunk_args, target_options=options)
def test_rechunk_bad_target_chunks(rechunk_args):
if not _is_collection(rechunk_args["source"]):
return
rechunk_args = dict(rechunk_args)
rechunk_args["target_chunks"] = (10, 10)
with pytest.raises(
ValueError, match="You must specify ``target-chunks`` as a dict"
):
api.rechunk(**rechunk_args)
def test_rechunk_invalid_source(tmp_path):
with pytest.raises(
ValueError,
match="Source must be a Zarr Array, Zarr Group, Dask Array or Xarray Dataset",
):
api.rechunk(
[[1, 2], [3, 4]], target_chunks=(10, 10), max_mem=100, target_store=tmp_path
)
@pytest.mark.parametrize(
"source,target_chunks",
[
(sample_xarray_dataset(), {"a": (10, 5, 4), "b": (100,)}),
(dsa.ones((20, 10), chunks=(5, 5)), (10, 10)),
],
)
@pytest.mark.parametrize(
"executor",
[
"python",
requires_beam("beam"),
requires_prefect("prefect"),
requires_pywren("pywren"),
],
)
def test_unsupported_executor(tmp_path, source, target_chunks, executor):
with pytest.raises(
NotImplementedError, match="Executor type .* not supported for source",
):
api.rechunk(
source,
target_chunks=target_chunks,
max_mem=1600,
target_store=str(tmp_path / "target.zarr"),
temp_store=str(tmp_path / "temp.zarr"),
executor=executor,
)
def test_rechunk_no_target_chunks(rechunk_args):
rechunk_args = dict(rechunk_args)
if _is_collection(rechunk_args["source"]):
rechunk_args["target_chunks"] = {v: None for v in rechunk_args["source"]}
else:
rechunk_args["target_chunks"] = None
api.rechunk(**rechunk_args)
def test_no_intermediate():
a = zarr.ones((4, 4), chunks=(2, 2))
b = zarr.ones((4, 4), chunks=(4, 1))
rechunked = api.Rechunked(None, None, source=a, intermediate=None, target=b)
assert "Intermediate" not in repr(rechunked)
rechunked._repr_html_()
def test_no_intermediate_fused(tmp_path):
shape = (8000, 8000)
source_chunks = (200, 8000)
dtype = "f4"
max_mem = 25600000
target_chunks = (400, 8000)
store_source = str(tmp_path / "source.zarr")
source_array = zarr.ones(
shape, chunks=source_chunks, dtype=dtype, store=store_source
)
target_store = str(tmp_path / "target.zarr")
rechunked = api.rechunk(source_array, target_chunks, max_mem, target_store)
num_tasks = len([v for v in rechunked.plan.dask.values() if dask.core.istask(v)])
assert num_tasks < 20 # less than if no fuse
def test_pywren_function_executor(tmp_path):
pytest.importorskip("pywren_ibm_cloud")
from rechunker.executors.pywren import (
pywren_local_function_executor,
PywrenExecutor,
)
# Create a Pywren function exectutor that we manage ourselves
# and pass in to rechunker's PywrenExecutor
with pywren_local_function_executor() as function_executor:
executor = PywrenExecutor(function_executor)
shape = (8000, 8000)
source_chunks = (200, 8000)
dtype = "f4"
max_mem = 25600000
target_chunks = (400, 8000)
### Create source array ###
store_source = str(tmp_path / "source.zarr")
source_array = zarr.ones(
shape, chunks=source_chunks, dtype=dtype, store=store_source
)
### Create targets ###
target_store = str(tmp_path / "target.zarr")
temp_store = str(tmp_path / "temp.zarr")
rechunked = api.rechunk(
source_array,
target_chunks,
max_mem,
target_store,
temp_store=temp_store,
executor=executor,
)
assert isinstance(rechunked, api.Rechunked)
target_array = zarr.open(target_store)
assert target_array.chunks == tuple(target_chunks)
result = rechunked.execute()
assert isinstance(result, zarr.Array)
a_tar = dsa.from_zarr(target_array)
assert dsa.equal(a_tar, 1).all().compute()
|
[
"dask.array.equal",
"rechunker.api.rechunk",
"numpy.ones",
"pathlib.Path",
"pytest.mark.skipif",
"rechunker.executors.pywren.pywren_local_function_executor",
"dask.core.istask",
"pytest.mark.parametrize",
"zarr.Blosc",
"numpy.prod",
"rechunker.api._get_executor",
"zarr.open",
"dask.array.ones",
"zarr.ones",
"pytest.raises",
"rechunker.api.Rechunked",
"functools.partial",
"importlib.import_module",
"pytest.fixture",
"zarr.group",
"dask.array.from_zarr",
"pytest.importorskip",
"pytest.param",
"xarray.DataArray",
"xarray.open_zarr",
"rechunker.executors.pywren.PywrenExecutor"
] |
[((534, 573), 'functools.partial', 'partial', (['requires_import', '"""apache_beam"""'], {}), "(requires_import, 'apache_beam')\n", (541, 573), False, 'from functools import partial\n'), ((593, 628), 'functools.partial', 'partial', (['requires_import', '"""prefect"""'], {}), "(requires_import, 'prefect')\n", (600, 628), False, 'from functools import partial\n'), ((647, 691), 'functools.partial', 'partial', (['requires_import', '"""pywren_ibm_cloud"""'], {}), "(requires_import, 'pywren_ibm_cloud')\n", (654, 691), False, 'from functools import partial\n'), ((695, 754), 'pytest.fixture', 'pytest.fixture', ([], {'params': "[(8000, 200), {'y': 8000, 'x': 200}]"}), "(params=[(8000, 200), {'y': 8000, 'x': 200}])\n", (709, 754), False, 'import pytest\n'), ((946, 991), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""shape"""', '[(100, 50)]'], {}), "('shape', [(100, 50)])\n", (969, 991), False, 'import pytest\n'), ((993, 1045), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""source_chunks"""', '[(10, 50)]'], {}), "('source_chunks', [(10, 50)])\n", (1016, 1045), False, 'import pytest\n'), ((1047, 1099), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""target_chunks"""', '[(20, 10)]'], {}), "('target_chunks', [(20, 10)])\n", (1070, 1099), False, 'import pytest\n'), ((1101, 1145), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""max_mem"""', "['10MB']"], {}), "('max_mem', ['10MB'])\n", (1124, 1145), False, 'import pytest\n'), ((1147, 1192), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""executor"""', "['dask']"], {}), "('executor', ['dask'])\n", (1170, 1192), False, 'import pytest\n'), ((3194, 3242), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""shape"""', '[(8000, 8000)]'], {}), "('shape', [(8000, 8000)])\n", (3217, 3242), False, 'import pytest\n'), ((3244, 3299), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""source_chunks"""', '[(200, 8000)]'], {}), "('source_chunks', [(200, 8000)])\n", (3267, 3299), False, 'import pytest\n'), ((3301, 3341), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""dtype"""', "['f4']"], {}), "('dtype', ['f4'])\n", (3324, 3341), False, 'import pytest\n'), ((3343, 3399), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""max_mem"""', "[25600000, '25.6MB']"], {}), "('max_mem', [25600000, '25.6MB'])\n", (3366, 3399), False, 'import pytest\n'), ((5411, 5459), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""shape"""', '[(8000, 8000)]'], {}), "('shape', [(8000, 8000)])\n", (5434, 5459), False, 'import pytest\n'), ((5461, 5529), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""source_chunks"""', '[(200, 8000), (800, 8000)]'], {}), "('source_chunks', [(200, 8000), (800, 8000)])\n", (5484, 5529), False, 'import pytest\n'), ((5531, 5571), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""dtype"""', "['f4']"], {}), "('dtype', ['f4'])\n", (5554, 5571), False, 'import pytest\n'), ((5573, 5619), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""max_mem"""', '[25600000]'], {}), "('max_mem', [25600000])\n", (5596, 5619), False, 'import pytest\n'), ((5621, 5720), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""target_chunks"""', '[(200, 8000), (800, 8000), (8000, 200), (400, 8000)]'], {}), "('target_chunks', [(200, 8000), (800, 8000), (8000, \n 200), (400, 8000)])\n", (5644, 5720), False, 'import pytest\n'), ((9101, 9153), 'pytest.fixture', 'pytest.fixture', ([], {'params': "['Array', 'Group', 'Dataset']"}), "(params=['Array', 'Group', 'Dataset'])\n", (9115, 9153), False, 'import pytest\n'), ((10028, 10044), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (10042, 10044), False, 'import pytest\n'), ((419, 472), 'pytest.mark.skipif', 'pytest.mark.skipif', (['skip'], {'reason': 'f"""requires {module}"""'}), "(skip, reason=f'requires {module}')\n", (437, 472), False, 'import pytest\n'), ((484, 515), 'pytest.param', 'pytest.param', (['*args'], {'marks': 'mark'}), '(*args, marks=mark)\n', (496, 515), False, 'import pytest\n'), ((2640, 2687), 'xarray.open_zarr', 'xarray.open_zarr', (['target_store'], {'decode_cf': '(False)'}), '(target_store, decode_cf=False)\n', (2656, 2687), False, 'import xarray\n'), ((2895, 2941), 'xarray.open_zarr', 'xarray.open_zarr', (['target_store'], {'decode_cf': '(True)'}), '(target_store, decode_cf=True)\n', (2911, 2941), False, 'import xarray\n'), ((4360, 4431), 'zarr.ones', 'zarr.ones', (['shape'], {'chunks': 'source_chunks', 'dtype': 'dtype', 'store': 'store_source'}), '(shape, chunks=source_chunks, dtype=dtype, store=store_source)\n', (4369, 4431), False, 'import zarr\n'), ((4712, 4822), 'rechunker.api.rechunk', 'api.rechunk', (['source_array', 'target_chunks', 'max_mem', 'target_store'], {'temp_store': 'temp_store', 'executor': 'executor'}), '(source_array, target_chunks, max_mem, target_store, temp_store=\n temp_store, executor=executor)\n', (4723, 4822), False, 'from rechunker import api\n'), ((4941, 4964), 'zarr.open', 'zarr.open', (['target_store'], {}), '(target_store)\n', (4950, 4964), False, 'import zarr\n'), ((5333, 5360), 'dask.array.from_zarr', 'dsa.from_zarr', (['target_array'], {}), '(target_array)\n', (5346, 5360), True, 'import dask.array as dsa\n'), ((5874, 5924), 'dask.array.ones', 'dsa.ones', (['shape'], {'chunks': 'source_chunks', 'dtype': 'dtype'}), '(shape, chunks=source_chunks, dtype=dtype)\n', (5882, 5924), True, 'import dask.array as dsa\n'), ((6064, 6155), 'rechunker.api.rechunk', 'api.rechunk', (['source_array', 'target_chunks', 'max_mem', 'target_store'], {'temp_store': 'temp_store'}), '(source_array, target_chunks, max_mem, target_store, temp_store=\n temp_store)\n', (6075, 6155), False, 'from rechunker import api\n'), ((6233, 6256), 'zarr.open', 'zarr.open', (['target_store'], {}), '(target_store)\n', (6242, 6256), False, 'import zarr\n'), ((6401, 6428), 'dask.array.from_zarr', 'dsa.from_zarr', (['target_array'], {}), '(target_array)\n', (6414, 6428), True, 'import dask.array as dsa\n'), ((6777, 6801), 'zarr.group', 'zarr.group', (['store_source'], {}), '(store_source)\n', (6787, 6801), False, 'import zarr\n'), ((7267, 7370), 'rechunker.api.rechunk', 'api.rechunk', (['group', 'target_chunks', 'max_mem', 'target_store'], {'temp_store': 'temp_store', 'executor': 'executor'}), '(group, target_chunks, max_mem, target_store, temp_store=\n temp_store, executor=executor)\n', (7278, 7370), False, 'from rechunker import api\n'), ((7489, 7512), 'zarr.open', 'zarr.open', (['target_store'], {}), '(target_store)\n', (7498, 7512), False, 'import zarr\n'), ((8443, 8459), 'zarr.group', 'zarr.group', (['path'], {}), '(path)\n', (8453, 8459), False, 'import zarr\n'), ((8904, 8967), 'zarr.ones', 'zarr.ones', (['shape'], {'chunks': 'source_chunks', 'dtype': 'dtype', 'store': 'path'}), '(shape, chunks=source_chunks, dtype=dtype, store=path)\n', (8913, 8967), False, 'import zarr\n'), ((10085, 10112), 'rechunker.api.rechunk', 'api.rechunk', ([], {}), '(**rechunk_args)\n', (10096, 10112), False, 'from rechunker import api\n'), ((15377, 15404), 'rechunker.api.rechunk', 'api.rechunk', ([], {}), '(**rechunk_args)\n', (15388, 15404), False, 'from rechunker import api\n'), ((15443, 15475), 'zarr.ones', 'zarr.ones', (['(4, 4)'], {'chunks': '(2, 2)'}), '((4, 4), chunks=(2, 2))\n', (15452, 15475), False, 'import zarr\n'), ((15484, 15516), 'zarr.ones', 'zarr.ones', (['(4, 4)'], {'chunks': '(4, 1)'}), '((4, 4), chunks=(4, 1))\n', (15493, 15516), False, 'import zarr\n'), ((15533, 15597), 'rechunker.api.Rechunked', 'api.Rechunked', (['None', 'None'], {'source': 'a', 'intermediate': 'None', 'target': 'b'}), '(None, None, source=a, intermediate=None, target=b)\n', (15546, 15597), False, 'from rechunker import api\n'), ((15917, 15988), 'zarr.ones', 'zarr.ones', (['shape'], {'chunks': 'source_chunks', 'dtype': 'dtype', 'store': 'store_source'}), '(shape, chunks=source_chunks, dtype=dtype, store=store_source)\n', (15926, 15988), False, 'import zarr\n'), ((16070, 16133), 'rechunker.api.rechunk', 'api.rechunk', (['source_array', 'target_chunks', 'max_mem', 'target_store'], {}), '(source_array, target_chunks, max_mem, target_store)\n', (16081, 16133), False, 'from rechunker import api\n'), ((16322, 16361), 'pytest.importorskip', 'pytest.importorskip', (['"""pywren_ibm_cloud"""'], {}), "('pywren_ibm_cloud')\n", (16341, 16361), False, 'import pytest\n'), ((301, 332), 'importlib.import_module', 'importlib.import_module', (['module'], {}), '(module)\n', (324, 332), False, 'import importlib\n'), ((848, 904), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""unrecognized executor"""'}), "(ValueError, match='unrecognized executor')\n", (861, 904), False, 'import pytest\n'), ((914, 942), 'rechunker.api._get_executor', 'api._get_executor', (['"""unknown"""'], {}), "('unknown')\n", (931, 942), False, 'from rechunker import api\n'), ((3747, 3801), 'pytest.param', 'pytest.param', (['None', '(-1, 200)'], {'marks': 'pytest.mark.xfail'}), '(None, (-1, 200), marks=pytest.mark.xfail)\n', (3759, 3801), False, 'import pytest\n'), ((3951, 4012), 'pytest.param', 'pytest.param', (["['y', 'x']", "{'x': 200}"], {'marks': 'pytest.mark.xfail'}), "(['y', 'x'], {'x': 200}, marks=pytest.mark.xfail)\n", (3963, 4012), False, 'import pytest\n'), ((4073, 4139), 'pytest.param', 'pytest.param', (['None', "{'y': 8000, 'x': 200}"], {'marks': 'pytest.mark.xfail'}), "(None, {'y': 8000, 'x': 200}, marks=pytest.mark.xfail)\n", (4085, 4139), False, 'import pytest\n'), ((7771, 7805), 'dask.array.from_zarr', 'dsa.from_zarr', (['target_group[aname]'], {}), '(target_group[aname])\n', (7784, 7805), True, 'import dask.array as dsa\n'), ((11020, 11080), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""path .* contains an array"""'}), "(ValueError, match='path .* contains an array')\n", (11033, 11080), False, 'import pytest\n'), ((11897, 11975), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""A temporary store location must be provided"""'}), "(ValueError, match='A temporary store location must be provided')\n", (11910, 11975), False, 'import pytest\n'), ((12240, 12291), 'rechunker.api.rechunk', 'api.rechunk', ([], {'target_options': 'options'}), '(**rechunk_args, target_options=options)\n', (12251, 12291), False, 'from rechunker import api\n'), ((12530, 12592), 'zarr.Blosc', 'zarr.Blosc', ([], {'cname': '"""zstd"""', 'clevel': '(9)', 'shuffle': 'zarr.Blosc.SHUFFLE'}), "(cname='zstd', clevel=9, shuffle=zarr.Blosc.SHUFFLE)\n", (12540, 12592), False, 'import zarr\n'), ((13853, 13932), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""You must specify ``target-chunks`` as a dict"""'}), "(ValueError, match='You must specify ``target-chunks`` as a dict')\n", (13866, 13932), False, 'import pytest\n'), ((13956, 13983), 'rechunker.api.rechunk', 'api.rechunk', ([], {}), '(**rechunk_args)\n', (13967, 13983), False, 'from rechunker import api\n'), ((14038, 14147), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""Source must be a Zarr Array, Zarr Group, Dask Array or Xarray Dataset"""'}), "(ValueError, match=\n 'Source must be a Zarr Array, Zarr Group, Dask Array or Xarray Dataset')\n", (14051, 14147), False, 'import pytest\n'), ((14175, 14268), 'rechunker.api.rechunk', 'api.rechunk', (['[[1, 2], [3, 4]]'], {'target_chunks': '(10, 10)', 'max_mem': '(100)', 'target_store': 'tmp_path'}), '([[1, 2], [3, 4]], target_chunks=(10, 10), max_mem=100,\n target_store=tmp_path)\n', (14186, 14268), False, 'from rechunker import api\n'), ((14741, 14831), 'pytest.raises', 'pytest.raises', (['NotImplementedError'], {'match': '"""Executor type .* not supported for source"""'}), "(NotImplementedError, match=\n 'Executor type .* not supported for source')\n", (14754, 14831), False, 'import pytest\n'), ((16601, 16633), 'rechunker.executors.pywren.pywren_local_function_executor', 'pywren_local_function_executor', ([], {}), '()\n', (16631, 16633), False, 'from rechunker.executors.pywren import pywren_local_function_executor, PywrenExecutor\n'), ((16676, 16709), 'rechunker.executors.pywren.PywrenExecutor', 'PywrenExecutor', (['function_executor'], {}), '(function_executor)\n', (16690, 16709), False, 'from rechunker.executors.pywren import pywren_local_function_executor, PywrenExecutor\n'), ((16973, 17044), 'zarr.ones', 'zarr.ones', (['shape'], {'chunks': 'source_chunks', 'dtype': 'dtype', 'store': 'store_source'}), '(shape, chunks=source_chunks, dtype=dtype, store=store_source)\n', (16982, 17044), False, 'import zarr\n'), ((17222, 17332), 'rechunker.api.rechunk', 'api.rechunk', (['source_array', 'target_chunks', 'max_mem', 'target_store'], {'temp_store': 'temp_store', 'executor': 'executor'}), '(source_array, target_chunks, max_mem, target_store, temp_store=\n temp_store, executor=executor)\n', (17233, 17332), False, 'from rechunker import api\n'), ((17487, 17510), 'zarr.open', 'zarr.open', (['target_store'], {}), '(target_store)\n', (17496, 17510), False, 'import zarr\n'), ((17671, 17698), 'dask.array.from_zarr', 'dsa.from_zarr', (['target_array'], {}), '(target_array)\n', (17684, 17698), True, 'import dask.array as dsa\n'), ((10847, 10874), 'rechunker.api.rechunk', 'api.rechunk', ([], {}), '(**rechunk_args)\n', (10858, 10874), False, 'from rechunker import api\n'), ((11207, 11258), 'rechunker.api.rechunk', 'api.rechunk', ([], {'target_options': 'options'}), '(**rechunk_args, target_options=options)\n', (11218, 11258), False, 'from rechunker import api\n'), ((11582, 11609), 'rechunker.api.rechunk', 'api.rechunk', ([], {}), '(**rechunk_args)\n', (11593, 11609), False, 'from rechunker import api\n'), ((12977, 13081), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""Chunks must be provided in ``target_chunks`` rather than options"""'}), "(ValueError, match=\n 'Chunks must be provided in ``target_chunks`` rather than options')\n", (12990, 13081), False, 'import pytest\n'), ((13125, 13176), 'rechunker.api.rechunk', 'api.rechunk', ([], {'target_options': 'options'}), '(**rechunk_args, target_options=options)\n', (13136, 13176), False, 'from rechunker import api\n'), ((14425, 14458), 'dask.array.ones', 'dsa.ones', (['(20, 10)'], {'chunks': '(5, 5)'}), '((20, 10), chunks=(5, 5))\n', (14433, 14458), True, 'import dask.array as dsa\n'), ((1529, 1614), 'xarray.DataArray', 'xarray.DataArray', (['a'], {'dims': "['x', 'y']", 'attrs': "{'a1': 1, 'a2': [1, 2, 3], 'a3': 'x'}"}), "(a, dims=['x', 'y'], attrs={'a1': 1, 'a2': [1, 2, 3], 'a3':\n 'x'})\n", (1545, 1614), False, 'import xarray\n'), ((11091, 11118), 'rechunker.api.rechunk', 'api.rechunk', ([], {}), '(**rechunk_args)\n', (11102, 11118), False, 'from rechunker import api\n'), ((11985, 12004), 'rechunker.api.rechunk', 'api.rechunk', ([], {}), '(**args)\n', (11996, 12004), False, 'from rechunker import api\n'), ((13350, 13419), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': 'f"""Zarr options must not include {o}"""'}), "(ValueError, match=f'Zarr options must not include {o}')\n", (13363, 13419), False, 'import pytest\n'), ((13437, 13486), 'rechunker.api.rechunk', 'api.rechunk', ([], {'temp_options': 'options'}), '(**rechunk_args, temp_options=options)\n', (13448, 13486), False, 'from rechunker import api\n'), ((13504, 13573), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': 'f"""Zarr options must not include {o}"""'}), "(ValueError, match=f'Zarr options must not include {o}')\n", (13517, 13573), False, 'import pytest\n'), ((13591, 13642), 'rechunker.api.rechunk', 'api.rechunk', ([], {'target_options': 'options'}), '(**rechunk_args, target_options=options)\n', (13602, 13642), False, 'from rechunker import api\n'), ((16199, 16218), 'dask.core.istask', 'dask.core.istask', (['v'], {}), '(v)\n', (16215, 16218), False, 'import dask\n'), ((1673, 1693), 'numpy.ones', 'numpy.ones', (['shape[0]'], {}), '(shape[0])\n', (1683, 1693), False, 'import numpy\n'), ((1739, 1759), 'numpy.ones', 'numpy.ones', (['shape[1]'], {}), '(shape[1])\n', (1749, 1759), False, 'import numpy\n'), ((2132, 2156), 'zarr.Blosc', 'zarr.Blosc', ([], {'cname': '"""zstd"""'}), "(cname='zstd')\n", (2142, 2156), False, 'import zarr\n'), ((5372, 5391), 'dask.array.equal', 'dsa.equal', (['a_tar', '(1)'], {}), '(a_tar, 1)\n', (5381, 5391), True, 'import dask.array as dsa\n'), ((6440, 6459), 'dask.array.equal', 'dsa.equal', (['a_tar', '(1)'], {}), '(a_tar, 1)\n', (6449, 6459), True, 'import dask.array as dsa\n'), ((7977, 8036), 'dask.array.ones', 'dsa.ones', ([], {'shape': '(10, 20, 40)', 'chunks': '(5, 10, 4)', 'dtype': '"""f4"""'}), "(shape=(10, 20, 40), chunks=(5, 10, 4), dtype='f4')\n", (7985, 8036), True, 'import dask.array as dsa\n'), ((8177, 8227), 'dask.array.ones', 'dsa.ones', ([], {'shape': '(8000,)', 'chunks': '(200,)', 'dtype': '"""f4"""'}), "(shape=(8000,), chunks=(200,), dtype='f4')\n", (8185, 8227), True, 'import dask.array as dsa\n'), ((1407, 1424), 'numpy.prod', 'numpy.prod', (['shape'], {}), '(shape)\n', (1417, 1424), False, 'import numpy\n'), ((1838, 1858), 'numpy.ones', 'numpy.ones', (['shape[0]'], {}), '(shape[0])\n', (1848, 1858), False, 'import numpy\n'), ((1905, 1925), 'numpy.ones', 'numpy.ones', (['shape[1]'], {}), '(shape[1])\n', (1915, 1925), False, 'import numpy\n'), ((7821, 7840), 'dask.array.equal', 'dsa.equal', (['a_tar', '(1)'], {}), '(a_tar, 1)\n', (7830, 7840), True, 'import dask.array as dsa\n'), ((17714, 17733), 'dask.array.equal', 'dsa.equal', (['a_tar', '(1)'], {}), '(a_tar, 1)\n', (17723, 17733), True, 'import dask.array as dsa\n'), ((12396, 12430), 'pathlib.Path', 'Path', (['rechunked._target.store.path'], {}), '(rechunked._target.store.path)\n', (12400, 12430), False, 'from pathlib import Path\n')]
|
# -*- coding: utf-8 -*-
"""
@project : WechatTogether
@Time : 2020/9/9 14:21
@Auth : AJay13
@File :interface_article_list.py
@IDE :PyCharm
@Motto:ABC(Always Be Coding)
"""
# 分类管理接口: 分类列表、删除分类、修改分离、添加分类
__all__ = ['InterFaceWechtTagList','InterfaceArticleFlag']
from flask import views
from sqlalchemy import and_
import config
from exts import db
from apis.common import response_code
from apis.common.api_version import api_version
from apis.common.auth import login_required
from apis.v1.tags.verify_tag import TagListForm,ArticleFlagForm
from apps.admin.models import WechatArticle, WechatArticleList
from models import WechatTag
class InterFaceWechtTagList(views.MethodView):
'''
公众号分类的接口
'''
@api_version
# @login_required # 自动完成认证
def get(self, version):
form = TagListForm().validate_for_api() # 验证表单
page = int(form.page.data)
limit = int(form.limit.data)
start = (page - 1) * limit
end = start + limit
tag_data = []
tag_obj = WechatTag.query
tags = tag_obj.slice(start, end)
total = tag_obj.count()
for i in tags:
tag = {}
tag['id'] = i.id
tag['tag_name'] = i.tag_name
tag['tag_en'] = i.tag_en
tag['tag_summary'] = i.tag_summary
tag['create_time'] = i.create_time
tag_data.append(tag)
return response_code.LayuiSuccess(message='查询成功!', data=tag_data, count=total)
class InterfaceArticleFlag(views.MethodView):
'''
# 如果flag=1 精华文章。else 普通文章
'''
@api_version
@login_required # 自动完成认证
def post(self, version):
form = ArticleFlagForm().validate_for_api() # 验证表单
id =form.id.data
flag =form.flag.data
wechat_article = WechatArticle.query.get(id)
if wechat_article:
if wechat_article.flag != flag:
wechat_article.flag = flag
db.session.commit()
return response_code.LayuiSuccess(message='文章:“{}”修改成功!'.format(wechat_article.title))
return response_code.ParameterException(message='已经被被人修改,刷新看看!!')
return response_code.ParameterException(message='修改失败!')
|
[
"exts.db.session.commit",
"apis.common.response_code.LayuiSuccess",
"apis.common.response_code.ParameterException",
"apis.v1.tags.verify_tag.TagListForm",
"apis.v1.tags.verify_tag.ArticleFlagForm",
"apps.admin.models.WechatArticle.query.get"
] |
[((1408, 1479), 'apis.common.response_code.LayuiSuccess', 'response_code.LayuiSuccess', ([], {'message': '"""查询成功!"""', 'data': 'tag_data', 'count': 'total'}), "(message='查询成功!', data=tag_data, count=total)\n", (1434, 1479), False, 'from apis.common import response_code\n'), ((1793, 1820), 'apps.admin.models.WechatArticle.query.get', 'WechatArticle.query.get', (['id'], {}), '(id)\n', (1816, 1820), False, 'from apps.admin.models import WechatArticle, WechatArticleList\n'), ((2168, 2217), 'apis.common.response_code.ParameterException', 'response_code.ParameterException', ([], {'message': '"""修改失败!"""'}), "(message='修改失败!')\n", (2200, 2217), False, 'from apis.common import response_code\n'), ((2094, 2152), 'apis.common.response_code.ParameterException', 'response_code.ParameterException', ([], {'message': '"""已经被被人修改,刷新看看!!"""'}), "(message='已经被被人修改,刷新看看!!')\n", (2126, 2152), False, 'from apis.common import response_code\n'), ((807, 820), 'apis.v1.tags.verify_tag.TagListForm', 'TagListForm', ([], {}), '()\n', (818, 820), False, 'from apis.v1.tags.verify_tag import TagListForm, ArticleFlagForm\n'), ((1668, 1685), 'apis.v1.tags.verify_tag.ArticleFlagForm', 'ArticleFlagForm', ([], {}), '()\n', (1683, 1685), False, 'from apis.v1.tags.verify_tag import TagListForm, ArticleFlagForm\n'), ((1951, 1970), 'exts.db.session.commit', 'db.session.commit', ([], {}), '()\n', (1968, 1970), False, 'from exts import db\n')]
|
#! /usr/bin/python3
from gpiozero import LEDBoard
from time import sleep
redLEDs = LEDBoard(15, 18, 17, 27)
greLEDs = LEDBoard(14, 2, 3, 4)
bluLEDs = LEDBoard(23, 24, 22, 25)
while True:
redLEDs.on()
sleep(0.5)
greLEDs.on()
sleep(0.5)
bluLEDs.on()
sleep(0.5)
redLEDs.off()
sleep(0.5)
greLEDs.off()
sleep(0.5)
bluLEDs.off()
sleep(0.5)
#light the reds in turn
redLEDs.value = (1, 0, 0, 0)
sleep(0.25)
redLEDs.value = (1, 1, 0, 0)
sleep(0.25)
redLEDs.value = (1, 1, 1, 0)
sleep(0.25)
redLEDs.value = (1, 1, 1, 1)
sleep(0.25)
redLEDs.value = (0, 0, 0, 0)
sleep(0.25)
#light the greens in turn
greLEDs.value = (1, 0, 0, 0)
sleep(0.25)
greLEDs.value = (1, 1, 0, 0)
sleep(0.25)
greLEDs.value = (1, 1, 1, 0)
sleep(0.25)
greLEDs.value = (1, 1, 1, 1)
sleep(0.25)
greLEDs.value = (0, 0, 0, 0)
sleep(0.25)
#light the blues in turn
bluLEDs.value = (1, 0, 0, 0)
sleep(0.25)
bluLEDs.value = (1, 1, 0, 0)
sleep(0.25)
bluLEDs.value = (1, 1, 1, 0)
sleep(0.25)
bluLEDs.value = (1, 1, 1, 1)
sleep(0.25)
bluLEDs.value = (0, 0, 0, 0)
sleep(0.25)
|
[
"gpiozero.LEDBoard",
"time.sleep"
] |
[((84, 108), 'gpiozero.LEDBoard', 'LEDBoard', (['(15)', '(18)', '(17)', '(27)'], {}), '(15, 18, 17, 27)\n', (92, 108), False, 'from gpiozero import LEDBoard\n'), ((119, 140), 'gpiozero.LEDBoard', 'LEDBoard', (['(14)', '(2)', '(3)', '(4)'], {}), '(14, 2, 3, 4)\n', (127, 140), False, 'from gpiozero import LEDBoard\n'), ((151, 175), 'gpiozero.LEDBoard', 'LEDBoard', (['(23)', '(24)', '(22)', '(25)'], {}), '(23, 24, 22, 25)\n', (159, 175), False, 'from gpiozero import LEDBoard\n'), ((210, 220), 'time.sleep', 'sleep', (['(0.5)'], {}), '(0.5)\n', (215, 220), False, 'from time import sleep\n'), ((242, 252), 'time.sleep', 'sleep', (['(0.5)'], {}), '(0.5)\n', (247, 252), False, 'from time import sleep\n'), ((274, 284), 'time.sleep', 'sleep', (['(0.5)'], {}), '(0.5)\n', (279, 284), False, 'from time import sleep\n'), ((307, 317), 'time.sleep', 'sleep', (['(0.5)'], {}), '(0.5)\n', (312, 317), False, 'from time import sleep\n'), ((340, 350), 'time.sleep', 'sleep', (['(0.5)'], {}), '(0.5)\n', (345, 350), False, 'from time import sleep\n'), ((373, 383), 'time.sleep', 'sleep', (['(0.5)'], {}), '(0.5)\n', (378, 383), False, 'from time import sleep\n'), ((449, 460), 'time.sleep', 'sleep', (['(0.25)'], {}), '(0.25)\n', (454, 460), False, 'from time import sleep\n'), ((498, 509), 'time.sleep', 'sleep', (['(0.25)'], {}), '(0.25)\n', (503, 509), False, 'from time import sleep\n'), ((547, 558), 'time.sleep', 'sleep', (['(0.25)'], {}), '(0.25)\n', (552, 558), False, 'from time import sleep\n'), ((596, 607), 'time.sleep', 'sleep', (['(0.25)'], {}), '(0.25)\n', (601, 607), False, 'from time import sleep\n'), ((645, 656), 'time.sleep', 'sleep', (['(0.25)'], {}), '(0.25)\n', (650, 656), False, 'from time import sleep\n'), ((724, 735), 'time.sleep', 'sleep', (['(0.25)'], {}), '(0.25)\n', (729, 735), False, 'from time import sleep\n'), ((773, 784), 'time.sleep', 'sleep', (['(0.25)'], {}), '(0.25)\n', (778, 784), False, 'from time import sleep\n'), ((822, 833), 'time.sleep', 'sleep', (['(0.25)'], {}), '(0.25)\n', (827, 833), False, 'from time import sleep\n'), ((871, 882), 'time.sleep', 'sleep', (['(0.25)'], {}), '(0.25)\n', (876, 882), False, 'from time import sleep\n'), ((920, 931), 'time.sleep', 'sleep', (['(0.25)'], {}), '(0.25)\n', (925, 931), False, 'from time import sleep\n'), ((998, 1009), 'time.sleep', 'sleep', (['(0.25)'], {}), '(0.25)\n', (1003, 1009), False, 'from time import sleep\n'), ((1047, 1058), 'time.sleep', 'sleep', (['(0.25)'], {}), '(0.25)\n', (1052, 1058), False, 'from time import sleep\n'), ((1096, 1107), 'time.sleep', 'sleep', (['(0.25)'], {}), '(0.25)\n', (1101, 1107), False, 'from time import sleep\n'), ((1145, 1156), 'time.sleep', 'sleep', (['(0.25)'], {}), '(0.25)\n', (1150, 1156), False, 'from time import sleep\n'), ((1194, 1205), 'time.sleep', 'sleep', (['(0.25)'], {}), '(0.25)\n', (1199, 1205), False, 'from time import sleep\n')]
|
from app import apfell, db_objects, auth
from sanic.response import json
from app.database_models.model import Operator
from sanic import response
from sanic.exceptions import abort
from app import crypto
from sanic_auth import User
# ------------ OPERATORS ------------------------
@apfell.route("/api/v1.0/operators/", methods=['GET'])
async def get_all_operators(request):
ops = await db_objects.execute(Operator.select())
return json([p.to_json() for p in ops])
@apfell.route("/api/v1.0/operators/", methods=['POST'])
async def create_operator(request):
data = request.json
if not 'username' in data:
return json({'status': 'error',
'error': '"username" field is required'})
if not isinstance(data['username'], str) or not len(data['username']):
return json({'status': 'error',
'error': '"username" must be string with at least one character'})
password = await crypto.hash_SHA512(data['password'])
# we need to create a new user
try:
user = await db_objects.create(Operator, username=data['username'], password=password)
# login_user = User(id=user.id, name=user.username)
# auth.login_user(request, login_user)
return response.json({'status': 'success'})
except:
return json({'status': 'error',
'error': 'failed to add user'})
@apfell.route("/api/v1.0/operators/<id:int>", methods=['GET'])
async def get_one_operator(request, id):
try:
op = await db_objects.get(Operator, id=id)
return json(str(op))
except:
print("Failed in /api/v1.0/operators/id for a GET request")
return abort(404)
@apfell.route("/api/v1.0/operators/<id:int>", methods=["PUT"])
async def update_operator(request, id):
try:
op = await db_objects.get(Operator, id=id)
data = request.json
if 'username' in data:
op.username = data['username']
if 'password' in data:
op.password = await crypto.hash_SHA512(data['password'])
await db_objects.update(op)
return json({'status': 'success'})
except:
abort(404)
@apfell.route("/api/v1.0/operators/<id:int>", methods=["DELETE"])
async def remove_operator(request, id):
try:
op = await db_objects.get(Operator, id=id)
await db_objects.delete(op)
return json({'status': 'success'})
except:
abort(404)
|
[
"sanic.exceptions.abort",
"app.db_objects.get",
"app.database_models.model.Operator.select",
"app.db_objects.delete",
"app.crypto.hash_SHA512",
"app.db_objects.create",
"app.db_objects.update",
"sanic.response.json",
"app.apfell.route"
] |
[((286, 339), 'app.apfell.route', 'apfell.route', (['"""/api/v1.0/operators/"""'], {'methods': "['GET']"}), "('/api/v1.0/operators/', methods=['GET'])\n", (298, 339), False, 'from app import apfell, db_objects, auth\n'), ((479, 533), 'app.apfell.route', 'apfell.route', (['"""/api/v1.0/operators/"""'], {'methods': "['POST']"}), "('/api/v1.0/operators/', methods=['POST'])\n", (491, 533), False, 'from app import apfell, db_objects, auth\n'), ((1395, 1456), 'app.apfell.route', 'apfell.route', (['"""/api/v1.0/operators/<id:int>"""'], {'methods': "['GET']"}), "('/api/v1.0/operators/<id:int>', methods=['GET'])\n", (1407, 1456), False, 'from app import apfell, db_objects, auth\n'), ((1696, 1757), 'app.apfell.route', 'apfell.route', (['"""/api/v1.0/operators/<id:int>"""'], {'methods': "['PUT']"}), "('/api/v1.0/operators/<id:int>', methods=['PUT'])\n", (1708, 1757), False, 'from app import apfell, db_objects, auth\n'), ((2173, 2237), 'app.apfell.route', 'apfell.route', (['"""/api/v1.0/operators/<id:int>"""'], {'methods': "['DELETE']"}), "('/api/v1.0/operators/<id:int>', methods=['DELETE'])\n", (2185, 2237), False, 'from app import apfell, db_objects, auth\n'), ((640, 706), 'sanic.response.json', 'json', (['{\'status\': \'error\', \'error\': \'"username" field is required\'}'], {}), '({\'status\': \'error\', \'error\': \'"username" field is required\'})\n', (644, 706), False, 'from sanic.response import json\n'), ((818, 913), 'sanic.response.json', 'json', (['{\'status\': \'error\', \'error\':\n \'"username" must be string with at least one character\'}'], {}), '({\'status\': \'error\', \'error\':\n \'"username" must be string with at least one character\'})\n', (822, 913), False, 'from sanic.response import json\n'), ((952, 988), 'app.crypto.hash_SHA512', 'crypto.hash_SHA512', (["data['password']"], {}), "(data['password'])\n", (970, 988), False, 'from app import crypto\n'), ((1250, 1286), 'sanic.response.json', 'response.json', (["{'status': 'success'}"], {}), "({'status': 'success'})\n", (1263, 1286), False, 'from sanic import response\n'), ((2111, 2138), 'sanic.response.json', 'json', (["{'status': 'success'}"], {}), "({'status': 'success'})\n", (2115, 2138), False, 'from sanic.response import json\n'), ((2389, 2416), 'sanic.response.json', 'json', (["{'status': 'success'}"], {}), "({'status': 'success'})\n", (2393, 2416), False, 'from sanic.response import json\n'), ((413, 430), 'app.database_models.model.Operator.select', 'Operator.select', ([], {}), '()\n', (428, 430), False, 'from app.database_models.model import Operator\n'), ((1054, 1127), 'app.db_objects.create', 'db_objects.create', (['Operator'], {'username': "data['username']", 'password': 'password'}), "(Operator, username=data['username'], password=password)\n", (1071, 1127), False, 'from app import apfell, db_objects, auth\n'), ((1314, 1370), 'sanic.response.json', 'json', (["{'status': 'error', 'error': 'failed to add user'}"], {}), "({'status': 'error', 'error': 'failed to add user'})\n", (1318, 1370), False, 'from sanic.response import json\n'), ((1526, 1557), 'app.db_objects.get', 'db_objects.get', (['Operator'], {'id': 'id'}), '(Operator, id=id)\n', (1540, 1557), False, 'from app import apfell, db_objects, auth\n'), ((1682, 1692), 'sanic.exceptions.abort', 'abort', (['(404)'], {}), '(404)\n', (1687, 1692), False, 'from sanic.exceptions import abort\n'), ((1826, 1857), 'app.db_objects.get', 'db_objects.get', (['Operator'], {'id': 'id'}), '(Operator, id=id)\n', (1840, 1857), False, 'from app import apfell, db_objects, auth\n'), ((2074, 2095), 'app.db_objects.update', 'db_objects.update', (['op'], {}), '(op)\n', (2091, 2095), False, 'from app import apfell, db_objects, auth\n'), ((2159, 2169), 'sanic.exceptions.abort', 'abort', (['(404)'], {}), '(404)\n', (2164, 2169), False, 'from sanic.exceptions import abort\n'), ((2306, 2337), 'app.db_objects.get', 'db_objects.get', (['Operator'], {'id': 'id'}), '(Operator, id=id)\n', (2320, 2337), False, 'from app import apfell, db_objects, auth\n'), ((2352, 2373), 'app.db_objects.delete', 'db_objects.delete', (['op'], {}), '(op)\n', (2369, 2373), False, 'from app import apfell, db_objects, auth\n'), ((2437, 2447), 'sanic.exceptions.abort', 'abort', (['(404)'], {}), '(404)\n', (2442, 2447), False, 'from sanic.exceptions import abort\n'), ((2023, 2059), 'app.crypto.hash_SHA512', 'crypto.hash_SHA512', (["data['password']"], {}), "(data['password'])\n", (2041, 2059), False, 'from app import crypto\n')]
|
#!/usr/bin/env python3
"""The main entry point to the PDP trainer/tester/predictor."""
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.md file
# in the project root for full license information.
import numpy as np
import torch
import torch.optim as optim
import logging
import argparse, os, yaml, csv
from pdp.generator import *
from pdp.trainer import SatFactorGraphTrainer
##########################################################################################################################
def write_to_csv(result_list, file_path):
with open(file_path, mode = 'w', newline = '') as f:
writer = csv.writer(f, delimiter = ',', quotechar = '"', quoting = csv.QUOTE_MINIMAL)
for row in result_list:
writer.writerow([row[0], row[1][1, 0]])
def write_to_csv_time(result_list, file_path):
with open(file_path, mode = 'w', newline = '') as f:
writer = csv.writer(f, delimiter = ',', quotechar = '"', quoting = csv.QUOTE_MINIMAL)
for row in result_list:
writer.writerow([row[0], row[2]])
def run(random_seed, config_file, is_training, load_model, cpu, reset_step, use_generator, batch_replication):
"""Runs the train/test/predict procedures."""
if not use_generator:
np.random.seed(random_seed)
torch.manual_seed(random_seed)
# Set the configurations (from either JSON or YAML file)
with open(config_file, 'r') as f:
config = yaml.load(f)
config['train_path'] = [os.path.abspath(p) for p in config['train_path']]
config['validation_path'] = [os.path.abspath(p) for p in config['train_path']]
config['model_path'] = os.path.abspath(config['model_path'])
# Set the logger
format = '[%(levelname)s] %(asctime)s - %(name)s: %(message)s'
logging.basicConfig(level = logging.DEBUG, format = format)
logger = logging.getLogger(config['model_name'] + ' (' + config['version'] + ')')
# Check if the input path is a list or on
if not isinstance(config['train_path'], list):
config['train_path'] = [os.path.join(config['train_path'], f) for f in os.listdir(config['train_path']) if
os.path.isfile(os.path.join(config['train_path'], f)) and f.endswith('.json')]
if not isinstance(config['validation_path'], list):
config['validation_path'] = [os.path.join(config['validation_path'], f) for f in os.listdir(config['validation_path']) if
os.path.isfile(os.path.join(config['validation_path'], f)) and f.endswith('.json')]
if config['verbose']:
if use_generator:
logger.info("Generating training examples via %s generator." % config['generator'])
else:
logger.info("Training file(s): %s" % config['train_path'])
logger.info("Validation file(s): %s" % config['validation_path'])
best_model_path_base = os.path.join(os.path.relpath(config['model_path']), config['model_name'], config['version'], "best")
last_model_path_base = os.path.join(os.path.relpath(config['model_path']), config['model_name'], config['version'], "last")
if not os.path.exists(best_model_path_base):
os.makedirs(best_model_path_base)
if not os.path.exists(last_model_path_base):
os.makedirs(last_model_path_base)
trainer = SatFactorGraphTrainer(config = config, use_cuda = not cpu, logger = logger)
# Training
if is_training:
if config['verbose']:
logger.info("Starting the training phase...")
generator = None
if use_generator:
if config['generator'] == 'modular':
generator = ModularCNFGenerator(config['min_k'], config['min_n'], config['max_n'], config['min_q'], config['max_q'], config['min_c'],
config['max_c'], config['min_alpha'], config['max_alpha'])
elif config['generator'] == 'v-modular':
generator = VariableModularCNFGenerator(config['min_k'], config['max_k'], config['min_n'], config['max_n'], config['min_q'],
config['max_q'], config['min_c'], config['max_c'], config['min_alpha'], config['max_alpha'])
else:
generator = UniformCNFGenerator(config['min_n'], config['max_n'], config['min_k'], config['max_k'], config['min_alpha'],
config['max_alpha'])
model_list, errors, losses = trainer.train(train_list = config['train_path'], validation_list = config['validation_path'],
optimizer = optim.Adam(trainer.get_parameter_list(), lr = config['learning_rate'], weight_decay = config['weight_decay']),
last_export_path_base = last_model_path_base, best_export_path_base = best_model_path_base, metric_index = config['metric_index'],
load_model = load_model, reset_step = reset_step, generator = generator, train_epoch_size = config['train_epoch_size'])
if config['verbose']:
logger.info("Starting the test phase...")
for test_files in config['test_path']:
if config['verbose']:
logger.info("Testing " + test_files)
if load_model == "last":
import_path_base = last_model_path_base
elif load_model == "best":
import_path_base = best_model_path_base
else:
import_path_base = None
result = trainer.test(test_list = test_files, import_path_base = import_path_base, batch_replication = batch_replication)
if config['verbose']:
for row in result:
filename, errors, _ = row
print('Dataset: ' + filename)
print("Accuracy: \t%s" % (1 - errors[0]))
print("Recall: \t%s" % (1 - errors[1]))
if os.path.isdir(test_files):
write_to_csv(result,
os.path.join(test_files, config['model_type'] + '_' + config['model_name'] + '_' + config['version'] + '-results.csv'))
write_to_csv_time(result, os.path.join(test_files,
config['model_type'] + '_' + config['model_name'] + '_' + config['version'] + '-results-time.csv'))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('config', help = 'The configuration JSON file')
parser.add_argument('-t', '--test', help = 'The test mode', action = 'store_true')
parser.add_argument('-l', '--load_model', help = 'Load the previous model')
parser.add_argument('-c', '--cpu_mode', help = 'Run on CPU', action = 'store_true')
parser.add_argument('-r', '--reset', help = 'Reset the global step', action = 'store_true')
parser.add_argument('-g', '--use_generator', help = 'Reset the global step', action = 'store_true')
parser.add_argument('-b', '--batch_replication', help = 'Batch replication factor', type = int, default = 1)
args = parser.parse_args()
run(0, args.config, not args.test, args.load_model, args.cpu_mode, args.reset, args.use_generator, args.batch_replication)
|
[
"yaml.load",
"os.path.abspath",
"numpy.random.seed",
"csv.writer",
"argparse.ArgumentParser",
"logging.basicConfig",
"os.makedirs",
"torch.manual_seed",
"os.path.isdir",
"os.path.exists",
"os.path.relpath",
"pdp.trainer.SatFactorGraphTrainer",
"os.path.join",
"os.listdir",
"logging.getLogger"
] |
[((1884, 1939), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'format': 'format'}), '(level=logging.DEBUG, format=format)\n', (1903, 1939), False, 'import logging\n'), ((1958, 2030), 'logging.getLogger', 'logging.getLogger', (["(config['model_name'] + ' (' + config['version'] + ')')"], {}), "(config['model_name'] + ' (' + config['version'] + ')')\n", (1975, 2030), False, 'import logging\n'), ((3456, 3525), 'pdp.trainer.SatFactorGraphTrainer', 'SatFactorGraphTrainer', ([], {'config': 'config', 'use_cuda': '(not cpu)', 'logger': 'logger'}), '(config=config, use_cuda=not cpu, logger=logger)\n', (3477, 3525), False, 'from pdp.trainer import SatFactorGraphTrainer\n'), ((6471, 6496), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (6494, 6496), False, 'import argparse, os, yaml, csv\n'), ((687, 757), 'csv.writer', 'csv.writer', (['f'], {'delimiter': '""","""', 'quotechar': '"""\\""""', 'quoting': 'csv.QUOTE_MINIMAL'}), '(f, delimiter=\',\', quotechar=\'"\', quoting=csv.QUOTE_MINIMAL)\n', (697, 757), False, 'import argparse, os, yaml, csv\n'), ((980, 1050), 'csv.writer', 'csv.writer', (['f'], {'delimiter': '""","""', 'quotechar': '"""\\""""', 'quoting': 'csv.QUOTE_MINIMAL'}), '(f, delimiter=\',\', quotechar=\'"\', quoting=csv.QUOTE_MINIMAL)\n', (990, 1050), False, 'import argparse, os, yaml, csv\n'), ((1344, 1371), 'numpy.random.seed', 'np.random.seed', (['random_seed'], {}), '(random_seed)\n', (1358, 1371), True, 'import numpy as np\n'), ((1381, 1411), 'torch.manual_seed', 'torch.manual_seed', (['random_seed'], {}), '(random_seed)\n', (1398, 1411), False, 'import torch\n'), ((1533, 1545), 'yaml.load', 'yaml.load', (['f'], {}), '(f)\n', (1542, 1545), False, 'import argparse, os, yaml, csv\n'), ((1749, 1786), 'os.path.abspath', 'os.path.abspath', (["config['model_path']"], {}), "(config['model_path'])\n", (1764, 1786), False, 'import argparse, os, yaml, csv\n'), ((3030, 3067), 'os.path.relpath', 'os.path.relpath', (["config['model_path']"], {}), "(config['model_path'])\n", (3045, 3067), False, 'import argparse, os, yaml, csv\n'), ((3161, 3198), 'os.path.relpath', 'os.path.relpath', (["config['model_path']"], {}), "(config['model_path'])\n", (3176, 3198), False, 'import argparse, os, yaml, csv\n'), ((3263, 3299), 'os.path.exists', 'os.path.exists', (['best_model_path_base'], {}), '(best_model_path_base)\n', (3277, 3299), False, 'import argparse, os, yaml, csv\n'), ((3310, 3343), 'os.makedirs', 'os.makedirs', (['best_model_path_base'], {}), '(best_model_path_base)\n', (3321, 3343), False, 'import argparse, os, yaml, csv\n'), ((3358, 3394), 'os.path.exists', 'os.path.exists', (['last_model_path_base'], {}), '(last_model_path_base)\n', (3372, 3394), False, 'import argparse, os, yaml, csv\n'), ((3405, 3438), 'os.makedirs', 'os.makedirs', (['last_model_path_base'], {}), '(last_model_path_base)\n', (3416, 3438), False, 'import argparse, os, yaml, csv\n'), ((6002, 6027), 'os.path.isdir', 'os.path.isdir', (['test_files'], {}), '(test_files)\n', (6015, 6027), False, 'import argparse, os, yaml, csv\n'), ((1579, 1597), 'os.path.abspath', 'os.path.abspath', (['p'], {}), '(p)\n', (1594, 1597), False, 'import argparse, os, yaml, csv\n'), ((1667, 1685), 'os.path.abspath', 'os.path.abspath', (['p'], {}), '(p)\n', (1682, 1685), False, 'import argparse, os, yaml, csv\n'), ((2165, 2202), 'os.path.join', 'os.path.join', (["config['train_path']", 'f'], {}), "(config['train_path'], f)\n", (2177, 2202), False, 'import argparse, os, yaml, csv\n'), ((2457, 2499), 'os.path.join', 'os.path.join', (["config['validation_path']", 'f'], {}), "(config['validation_path'], f)\n", (2469, 2499), False, 'import argparse, os, yaml, csv\n'), ((2212, 2244), 'os.listdir', 'os.listdir', (["config['train_path']"], {}), "(config['train_path'])\n", (2222, 2244), False, 'import argparse, os, yaml, csv\n'), ((2509, 2546), 'os.listdir', 'os.listdir', (["config['validation_path']"], {}), "(config['validation_path'])\n", (2519, 2546), False, 'import argparse, os, yaml, csv\n'), ((6089, 6211), 'os.path.join', 'os.path.join', (['test_files', "(config['model_type'] + '_' + config['model_name'] + '_' + config['version'\n ] + '-results.csv')"], {}), "(test_files, config['model_type'] + '_' + config['model_name'] +\n '_' + config['version'] + '-results.csv')\n", (6101, 6211), False, 'import argparse, os, yaml, csv\n'), ((6248, 6375), 'os.path.join', 'os.path.join', (['test_files', "(config['model_type'] + '_' + config['model_name'] + '_' + config['version'\n ] + '-results-time.csv')"], {}), "(test_files, config['model_type'] + '_' + config['model_name'] +\n '_' + config['version'] + '-results-time.csv')\n", (6260, 6375), False, 'import argparse, os, yaml, csv\n'), ((2296, 2333), 'os.path.join', 'os.path.join', (["config['train_path']", 'f'], {}), "(config['train_path'], f)\n", (2308, 2333), False, 'import argparse, os, yaml, csv\n'), ((2603, 2645), 'os.path.join', 'os.path.join', (["config['validation_path']", 'f'], {}), "(config['validation_path'], f)\n", (2615, 2645), False, 'import argparse, os, yaml, csv\n')]
|
# Copyright 2000-2007 <NAME> <<EMAIL>>
# <NAME>
#
# All Rights Reserved
#
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose is hereby granted without fee,
# provided that the above copyright notice appear in all copies and
# that both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR <NAME> DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Wedge pyrepl behaviour into cmd.Cmd-derived classes.
replize, when given a subclass of cmd.Cmd, returns a class that
behaves almost identically to the supplied class, except that it uses
pyrepl instead if raw_input.
It was designed to let you do this:
>>> import pdb
>>> from pyrepl import replize
>>> pdb.Pdb = replize(pdb.Pdb)
which is in fact done by the `pythoni' script that comes with
pyrepl."""
from __future__ import print_function
from pyrepl import completing_reader as cr, reader, completer
from pyrepl.completing_reader import CompletingReader as CR
import cmd
class CmdReader(CR):
def collect_keymap(self):
return super(CmdReader, self).collect_keymap() + (
("\\M-\\n", "invalid-key"),
("\\n", "accept"))
CR_init = CR.__init__
def __init__(self, completions):
self.CR_init(self)
self.completions = completions
def get_completions(self, stem):
if len(stem) != self.pos:
return []
return sorted(set(s for s in self.completions
if s.startswith(stem)))
def replize(klass, history_across_invocations=1):
"""Return a subclass of the cmd.Cmd-derived klass that uses
pyrepl instead of readline.
Raises a ValueError if klass does not derive from cmd.Cmd.
The optional history_across_invocations parameter (default 1)
controls whether instances of the returned class share
histories."""
completions = [s[3:]
for s in completer.get_class_members(klass)
if s.startswith("do_")]
if not issubclass(klass, cmd.Cmd):
raise Exception
# if klass.cmdloop.im_class is not cmd.Cmd:
# print "this may not work"
class CmdRepl(klass):
k_init = klass.__init__
if history_across_invocations:
_CmdRepl__history = []
def __init__(self, *args, **kw):
self.k_init(*args, **kw)
self.__reader = CmdReader(completions)
self.__reader.history = CmdRepl._CmdRepl__history
self.__reader.historyi = len(CmdRepl._CmdRepl__history)
else:
def __init__(self, *args, **kw):
self.k_init(*args, **kw)
self.__reader = CmdReader(completions)
def cmdloop(self, intro=None):
self.preloop()
if intro is not None:
self.intro = intro
if self.intro:
print(self.intro)
stop = None
while not stop:
if self.cmdqueue:
line = self.cmdqueue[0]
del self.cmdqueue[0]
else:
try:
self.__reader.ps1 = self.prompt
line = self.__reader.readline()
except EOFError:
line = "EOF"
line = self.precmd(line)
stop = self.onecmd(line)
stop = self.postcmd(stop, line)
self.postloop()
CmdRepl.__name__ = "replize(%s.%s)"%(klass.__module__, klass.__name__)
return CmdRepl
|
[
"pyrepl.completer.get_class_members"
] |
[((2397, 2431), 'pyrepl.completer.get_class_members', 'completer.get_class_members', (['klass'], {}), '(klass)\n', (2424, 2431), False, 'from pyrepl import completing_reader as cr, reader, completer\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 29 10:58:31 2016
@author: <EMAIL>
"""
import numpy as np
import scipy
import matplotlib.pyplot as plt
import seaborn as sns
from matplotlib.patches import Ellipse
def plot_cov_ellipse(cov, pos, nstd=2, ax=None, **kwargs):
"""
Plots an `nstd` sigma error ellipse based on the specified covariance
matrix (`cov`). Additional keyword arguments are passed on to the
ellipse patch artist.
Parameters
----------
cov : The 2x2 covariance matrix to base the ellipse on
pos : The location of the center of the ellipse. Expects a 2-element
sequence of [x0, y0].
nstd : The radius of the ellipse in numbers of standard deviations.
Defaults to 2 standard deviations.
ax : The axis that the ellipse will be plotted on. Defaults to the
current axis.
Additional keyword arguments are pass on to the ellipse patch.
Returns
-------
A matplotlib ellipse artist
"""
def eigsorted(cov):
vals, vecs = np.linalg.eigh(cov)
order = vals.argsort()[::-1]
return vals[order], vecs[:,order]
if ax is None:
ax = plt.gca()
vals, vecs = eigsorted(cov)
theta = np.degrees(np.arctan2(*vecs[:,0][::-1]))
# Width and height are "full" widths, not radius
width, height = 2 * nstd * np.sqrt(vals)
ellip = Ellipse(xy=pos, width=width, height=height, angle=theta, **kwargs)
ax.add_artist(ellip)
return ellip
|
[
"numpy.arctan2",
"numpy.linalg.eigh",
"matplotlib.pyplot.gca",
"matplotlib.patches.Ellipse",
"numpy.sqrt"
] |
[((1407, 1473), 'matplotlib.patches.Ellipse', 'Ellipse', ([], {'xy': 'pos', 'width': 'width', 'height': 'height', 'angle': 'theta'}), '(xy=pos, width=width, height=height, angle=theta, **kwargs)\n', (1414, 1473), False, 'from matplotlib.patches import Ellipse\n'), ((1068, 1087), 'numpy.linalg.eigh', 'np.linalg.eigh', (['cov'], {}), '(cov)\n', (1082, 1087), True, 'import numpy as np\n'), ((1200, 1209), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (1207, 1209), True, 'import matplotlib.pyplot as plt\n'), ((1266, 1295), 'numpy.arctan2', 'np.arctan2', (['*vecs[:, 0][::-1]'], {}), '(*vecs[:, 0][::-1])\n', (1276, 1295), True, 'import numpy as np\n'), ((1381, 1394), 'numpy.sqrt', 'np.sqrt', (['vals'], {}), '(vals)\n', (1388, 1394), True, 'import numpy as np\n')]
|
import time
import pytest
from freiner.limits import RateLimitItemPerMinute, RateLimitItemPerSecond
from freiner.storage.memory import MemoryStorage
from freiner.strategies.fixed_window import FixedWindowRateLimiter
from freiner.strategies.fixed_window_elastic import FixedWindowElasticExpiryRateLimiter
from freiner.strategies.moving_window import MovingWindowRateLimiter
from ..util import freeze_time
@pytest.fixture
def storage() -> MemoryStorage:
return MemoryStorage()
def test_fixed_window_simple(storage: MemoryStorage):
limiter = FixedWindowRateLimiter(storage)
with freeze_time():
limit = RateLimitItemPerSecond(2, 1)
assert limiter.test(limit) is True
assert limiter.hit(limit) is True
assert limiter.test(limit) is True
assert limiter.hit(limit) is True
assert limiter.test(limit) is False
assert limiter.hit(limit) is False
def test_fixed_window(storage: MemoryStorage):
limiter = FixedWindowRateLimiter(storage)
with freeze_time() as frozen_datetime:
limit = RateLimitItemPerSecond(10, 2)
start = time.time()
assert all([limiter.hit(limit) for _ in range(0, 10)]) is True
assert limiter.hit(limit) is False
frozen_datetime.tick(1)
assert limiter.hit(limit) is False
window_stats = limiter.get_window_stats(limit)
assert window_stats.reset_time == start + 2
assert window_stats.remaining_count == 0
frozen_datetime.tick(1)
assert limiter.get_window_stats(limit).remaining_count == 10
assert limiter.hit(limit) is True
def test_fixed_window_with_elastic_expiry(storage: MemoryStorage):
limiter = FixedWindowElasticExpiryRateLimiter(storage)
with freeze_time() as frozen_datetime:
limit = RateLimitItemPerSecond(10, 2)
start = time.time()
assert all([limiter.hit(limit) for _ in range(0, 10)]) is True
assert limiter.hit(limit) is False
frozen_datetime.tick(1)
assert limiter.hit(limit) is False
window_stats = limiter.get_window_stats(limit)
# three extensions to the expiry
assert window_stats.reset_time == start + 3
assert window_stats.remaining_count == 0
frozen_datetime.tick(1)
assert limiter.hit(limit) is False
frozen_datetime.tick(3)
start = time.time()
assert limiter.hit(limit) is True
window_stats = limiter.get_window_stats(limit)
assert window_stats.reset_time == start + 2
assert window_stats.remaining_count == 9
def test_moving_window_simple(storage: MemoryStorage):
limiter = MovingWindowRateLimiter(storage)
with freeze_time():
limit = RateLimitItemPerSecond(2, 1)
assert limiter.test(limit) is True
assert limiter.hit(limit) is True
assert limiter.test(limit) is True
assert limiter.hit(limit) is True
assert limiter.test(limit) is False
assert limiter.hit(limit) is False
def test_moving_window(storage: MemoryStorage):
limiter = MovingWindowRateLimiter(storage)
with freeze_time() as frozen_datetime:
limit = RateLimitItemPerMinute(10)
for i in range(0, 5):
assert limiter.hit(limit) is True
assert limiter.hit(limit) is True
assert limiter.get_window_stats(limit).remaining_count == 10 - ((i + 1) * 2)
frozen_datetime.tick(10)
assert limiter.get_window_stats(limit).remaining_count == 0
assert limiter.hit(limit) is False
frozen_datetime.tick(20)
window_stats = limiter.get_window_stats(limit)
assert window_stats.reset_time == time.time() + 30
assert window_stats.remaining_count == 4
frozen_datetime.tick(30)
assert limiter.get_window_stats(limit).remaining_count == 10
|
[
"freiner.strategies.moving_window.MovingWindowRateLimiter",
"freiner.limits.RateLimitItemPerMinute",
"freiner.strategies.fixed_window.FixedWindowRateLimiter",
"time.time",
"freiner.strategies.fixed_window_elastic.FixedWindowElasticExpiryRateLimiter",
"freiner.storage.memory.MemoryStorage",
"freiner.limits.RateLimitItemPerSecond"
] |
[((468, 483), 'freiner.storage.memory.MemoryStorage', 'MemoryStorage', ([], {}), '()\n', (481, 483), False, 'from freiner.storage.memory import MemoryStorage\n'), ((554, 585), 'freiner.strategies.fixed_window.FixedWindowRateLimiter', 'FixedWindowRateLimiter', (['storage'], {}), '(storage)\n', (576, 585), False, 'from freiner.strategies.fixed_window import FixedWindowRateLimiter\n'), ((976, 1007), 'freiner.strategies.fixed_window.FixedWindowRateLimiter', 'FixedWindowRateLimiter', (['storage'], {}), '(storage)\n', (998, 1007), False, 'from freiner.strategies.fixed_window import FixedWindowRateLimiter\n'), ((1699, 1743), 'freiner.strategies.fixed_window_elastic.FixedWindowElasticExpiryRateLimiter', 'FixedWindowElasticExpiryRateLimiter', (['storage'], {}), '(storage)\n', (1734, 1743), False, 'from freiner.strategies.fixed_window_elastic import FixedWindowElasticExpiryRateLimiter\n'), ((2655, 2687), 'freiner.strategies.moving_window.MovingWindowRateLimiter', 'MovingWindowRateLimiter', (['storage'], {}), '(storage)\n', (2678, 2687), False, 'from freiner.strategies.moving_window import MovingWindowRateLimiter\n'), ((3079, 3111), 'freiner.strategies.moving_window.MovingWindowRateLimiter', 'MovingWindowRateLimiter', (['storage'], {}), '(storage)\n', (3102, 3111), False, 'from freiner.strategies.moving_window import MovingWindowRateLimiter\n'), ((626, 654), 'freiner.limits.RateLimitItemPerSecond', 'RateLimitItemPerSecond', (['(2)', '(1)'], {}), '(2, 1)\n', (648, 654), False, 'from freiner.limits import RateLimitItemPerMinute, RateLimitItemPerSecond\n'), ((1067, 1096), 'freiner.limits.RateLimitItemPerSecond', 'RateLimitItemPerSecond', (['(10)', '(2)'], {}), '(10, 2)\n', (1089, 1096), False, 'from freiner.limits import RateLimitItemPerMinute, RateLimitItemPerSecond\n'), ((1113, 1124), 'time.time', 'time.time', ([], {}), '()\n', (1122, 1124), False, 'import time\n'), ((1803, 1832), 'freiner.limits.RateLimitItemPerSecond', 'RateLimitItemPerSecond', (['(10)', '(2)'], {}), '(10, 2)\n', (1825, 1832), False, 'from freiner.limits import RateLimitItemPerMinute, RateLimitItemPerSecond\n'), ((1849, 1860), 'time.time', 'time.time', ([], {}), '()\n', (1858, 1860), False, 'import time\n'), ((2374, 2385), 'time.time', 'time.time', ([], {}), '()\n', (2383, 2385), False, 'import time\n'), ((2728, 2756), 'freiner.limits.RateLimitItemPerSecond', 'RateLimitItemPerSecond', (['(2)', '(1)'], {}), '(2, 1)\n', (2750, 2756), False, 'from freiner.limits import RateLimitItemPerMinute, RateLimitItemPerSecond\n'), ((3171, 3197), 'freiner.limits.RateLimitItemPerMinute', 'RateLimitItemPerMinute', (['(10)'], {}), '(10)\n', (3193, 3197), False, 'from freiner.limits import RateLimitItemPerMinute, RateLimitItemPerSecond\n'), ((3690, 3701), 'time.time', 'time.time', ([], {}), '()\n', (3699, 3701), False, 'import time\n')]
|
import re
def str_contains_numbers(str):
"""
Check if a string contains at least one number.
:param str: the string to check.
:return: true if the string contains at least one number, false else.
"""
return bool(re.search(r'\d', str))
def str_is_num_version(str):
"""
Check if a string contains a number of version.
:param str: the string to check.
:return: true if the string contains a number of version, false else.
"""
return bool(re.search(r' \d+((\.\d+)+)?', str))
def word_is_num_version(str):
"""
Check if a word contains a number of version.
:param str: the word to check.
:return: true if the word contains a number of version, false else.
"""
return bool(re.search(r'\d+((\.\d+)+)?', str))
def str_contains_num_version_range(str):
"""
Check if a string contains a range of number version.
:param str: the string to check.
:return: true if the string contains a a range of number version, false else.
"""
return bool(re.search(r'\d+((\.\d+)+)? < \d+((\.\d+)+)?', str))
def str_contains_num_version_range_with_x(str):
"""
Check if a string contains a range of number version with x.
:param str: the string to check.
:return: true if the string contains a a range of number version with x, false else.
"""
return bool(re.search(r'\d+((\.\d+)+)?(\.x)? < \d+((\.\d+)+)?(\.x)?', str))
def get_vulnerability_extension(vulnerability_file):
"""
Get the extension of the vulnerability passed as parameter.
:param vulnerability_file: the vulnerability we want to get its extension.
:return: the extension of the vulnerability passed as parameter.
"""
regex = re.search(r'\.(?P<extension>\w+)', vulnerability_file)
extension = '.' + regex.group('extension')
return extension
|
[
"re.search"
] |
[((1715, 1770), 're.search', 're.search', (['"""\\\\.(?P<extension>\\\\w+)"""', 'vulnerability_file'], {}), "('\\\\.(?P<extension>\\\\w+)', vulnerability_file)\n", (1724, 1770), False, 'import re\n'), ((238, 259), 're.search', 're.search', (['"""\\\\d"""', 'str'], {}), "('\\\\d', str)\n", (247, 259), False, 'import re\n'), ((487, 523), 're.search', 're.search', (['""" \\\\d+((\\\\.\\\\d+)+)?"""', 'str'], {}), "(' \\\\d+((\\\\.\\\\d+)+)?', str)\n", (496, 523), False, 'import re\n'), ((744, 779), 're.search', 're.search', (['"""\\\\d+((\\\\.\\\\d+)+)?"""', 'str'], {}), "('\\\\d+((\\\\.\\\\d+)+)?', str)\n", (753, 779), False, 'import re\n'), ((1031, 1086), 're.search', 're.search', (['"""\\\\d+((\\\\.\\\\d+)+)? < \\\\d+((\\\\.\\\\d+)+)?"""', 'str'], {}), "('\\\\d+((\\\\.\\\\d+)+)? < \\\\d+((\\\\.\\\\d+)+)?', str)\n", (1040, 1086), False, 'import re\n'), ((1356, 1425), 're.search', 're.search', (['"""\\\\d+((\\\\.\\\\d+)+)?(\\\\.x)? < \\\\d+((\\\\.\\\\d+)+)?(\\\\.x)?"""', 'str'], {}), "('\\\\d+((\\\\.\\\\d+)+)?(\\\\.x)? < \\\\d+((\\\\.\\\\d+)+)?(\\\\.x)?', str)\n", (1365, 1425), False, 'import re\n')]
|
# coding: utf-8
"""
Cloudbreak API
Cloudbreak is a powerful left surf that breaks over a coral reef, a mile off southwest the island of Tavarua, Fiji. Cloudbreak is a cloud agnostic Hadoop as a Service API. Abstracts the provisioning and ease management and monitoring of on-demand clusters. SequenceIQ's Cloudbreak is a RESTful application development platform with the goal of helping developers to build solutions for deploying Hadoop YARN clusters in different environments. Once it is deployed in your favourite servlet container it exposes a REST API allowing to span up Hadoop clusters of arbitary sizes and cloud providers. Provisioning Hadoop has never been easier. Cloudbreak is built on the foundation of cloud providers API (Amazon AWS, Microsoft Azure, Google Cloud Platform, Openstack), Apache Ambari, Docker lightweight containers, Swarm and Consul. For further product documentation follow the link: <a href=\"http://hortonworks.com/apache/cloudbreak/\">http://hortonworks.com/apache/cloudbreak/</a>
OpenAPI spec version: 2.9.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class V1utilApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def check_client_version(self, version, **kwargs):
"""
checks the client version
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.check_client_version(version, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str version: (required)
:return: VersionCheckResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.check_client_version_with_http_info(version, **kwargs)
else:
(data) = self.check_client_version_with_http_info(version, **kwargs)
return data
def check_client_version_with_http_info(self, version, **kwargs):
"""
checks the client version
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.check_client_version_with_http_info(version, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str version: (required)
:return: VersionCheckResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['version']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method check_client_version" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'version' is set
if ('version' not in params) or (params['version'] is None):
raise ValueError("Missing the required parameter `version` when calling `check_client_version`")
collection_formats = {}
path_params = {}
if 'version' in params:
path_params['version'] = params['version']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/util/client/{version}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VersionCheckResult',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_rds_database_util(self, **kwargs):
"""
create a database for the service in the RDS if the connection could be created
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_rds_database_util(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param RDSBuildRequest body:
:param list[str] target:
:return: RdsBuildResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_rds_database_util_with_http_info(**kwargs)
else:
(data) = self.create_rds_database_util_with_http_info(**kwargs)
return data
def create_rds_database_util_with_http_info(self, **kwargs):
"""
create a database for the service in the RDS if the connection could be created
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_rds_database_util_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param RDSBuildRequest body:
:param list[str] target:
:return: RdsBuildResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'target']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_rds_database_util" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'target' in params:
query_params.append(('target', params['target']))
collection_formats['target'] = 'multi'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/util/rds-database', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RdsBuildResult',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_cloud_storage_matrix(self, **kwargs):
"""
returns supported cloud storage for stack version
Define stack version at least at patch level eg. 2.6.0
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_cloud_storage_matrix(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str stack_version:
:return: list[CloudStorageSupportedResponse]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_cloud_storage_matrix_with_http_info(**kwargs)
else:
(data) = self.get_cloud_storage_matrix_with_http_info(**kwargs)
return data
def get_cloud_storage_matrix_with_http_info(self, **kwargs):
"""
returns supported cloud storage for stack version
Define stack version at least at patch level eg. 2.6.0
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_cloud_storage_matrix_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str stack_version:
:return: list[CloudStorageSupportedResponse]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['stack_version']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_cloud_storage_matrix" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'stack_version' in params:
query_params.append(('stackVersion', params['stack_version']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/util/cloudstoragematrix', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[CloudStorageSupportedResponse]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_custom_parameters(self, **kwargs):
"""
returns custom parameters
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_custom_parameters(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ParametersQueryRequest body:
:return: ParametersQueryResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_custom_parameters_with_http_info(**kwargs)
else:
(data) = self.get_custom_parameters_with_http_info(**kwargs)
return data
def get_custom_parameters_with_http_info(self, **kwargs):
"""
returns custom parameters
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_custom_parameters_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ParametersQueryRequest body:
:return: ParametersQueryResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_custom_parameters" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/util/custom-parameters', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ParametersQueryResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_file_system_parameters(self, **kwargs):
"""
returns filesystem parameters
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_file_system_parameters(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param StructuredParametersQueryRequest body:
:return: StructuredParameterQueriesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_file_system_parameters_with_http_info(**kwargs)
else:
(data) = self.get_file_system_parameters_with_http_info(**kwargs)
return data
def get_file_system_parameters_with_http_info(self, **kwargs):
"""
returns filesystem parameters
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_file_system_parameters_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param StructuredParametersQueryRequest body:
:return: StructuredParameterQueriesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_file_system_parameters" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/util/filesystem-parameters', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StructuredParameterQueriesResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_knox_services(self, blueprint_name, **kwargs):
"""
returns supported knox services
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_knox_services(blueprint_name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str blueprint_name: (required)
:return: list[ExposedServiceResponse]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_knox_services_with_http_info(blueprint_name, **kwargs)
else:
(data) = self.get_knox_services_with_http_info(blueprint_name, **kwargs)
return data
def get_knox_services_with_http_info(self, blueprint_name, **kwargs):
"""
returns supported knox services
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_knox_services_with_http_info(blueprint_name, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str blueprint_name: (required)
:return: list[ExposedServiceResponse]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['blueprint_name']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_knox_services" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'blueprint_name' is set
if ('blueprint_name' not in params) or (params['blueprint_name'] is None):
raise ValueError("Missing the required parameter `blueprint_name` when calling `get_knox_services`")
collection_formats = {}
path_params = {}
if 'blueprint_name' in params:
path_params['blueprintName'] = params['blueprint_name']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/util/knoxservices/{blueprintName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ExposedServiceResponse]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_stack_matrix_util(self, **kwargs):
"""
returns default ambari details for distinct HDP and HDF
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_stack_matrix_util(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: StackMatrix
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_stack_matrix_util_with_http_info(**kwargs)
else:
(data) = self.get_stack_matrix_util_with_http_info(**kwargs)
return data
def get_stack_matrix_util_with_http_info(self, **kwargs):
"""
returns default ambari details for distinct HDP and HDF
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_stack_matrix_util_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: StackMatrix
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_stack_matrix_util" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/util/stackmatrix', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StackMatrix',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def test_ambari_database_util(self, **kwargs):
"""
tests a database connection parameters
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.test_ambari_database_util(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param AmbariDatabaseDetails body:
:return: AmbariDatabaseTestResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.test_ambari_database_util_with_http_info(**kwargs)
else:
(data) = self.test_ambari_database_util_with_http_info(**kwargs)
return data
def test_ambari_database_util_with_http_info(self, **kwargs):
"""
tests a database connection parameters
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.test_ambari_database_util_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param AmbariDatabaseDetails body:
:return: AmbariDatabaseTestResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method test_ambari_database_util" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/util/ambari-database', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AmbariDatabaseTestResult',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
[
"six.iteritems"
] |
[((3993, 4020), 'six.iteritems', 'iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (4002, 4020), False, 'from six import iteritems\n'), ((8457, 8484), 'six.iteritems', 'iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (8466, 8484), False, 'from six import iteritems\n'), ((12827, 12854), 'six.iteritems', 'iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (12836, 12854), False, 'from six import iteritems\n'), ((16935, 16962), 'six.iteritems', 'iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (16944, 16962), False, 'from six import iteritems\n'), ((21065, 21092), 'six.iteritems', 'iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (21074, 21092), False, 'from six import iteritems\n'), ((25243, 25270), 'six.iteritems', 'iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (25252, 25270), False, 'from six import iteritems\n'), ((29543, 29570), 'six.iteritems', 'iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (29552, 29570), False, 'from six import iteritems\n'), ((33553, 33580), 'six.iteritems', 'iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (33562, 33580), False, 'from six import iteritems\n')]
|
#!/usr/bin/env python
from __future__ import print_function
from parcon import *
from collections import namedtuple
hexchars = '0123456789abcdefABCDEF'
Reg = namedtuple("Reg", ["num"])
Imm = namedtuple("Imm", ["value"])
MemRef = namedtuple("MemRef", ["reg", "offset"])
def keywords(vs):
return First(*[Keyword(SignificantLiteral(v)) for v in vs])
hexnum = SignificantLiteral('0x') + +CharIn(hexchars)
decnum = +Digit()
offset = (CharIn("+-") + Exact(hexnum | decnum))[flatten]["".join][lambda x: int(x, 0)]
imm = (-CharIn("+-") + Exact(hexnum | decnum))[flatten]["".join][lambda x: int(x, 0)][Imm]
reg = Literal('r') + integer[int][Reg]
memref = (Literal('[') + reg + Optional(offset, 0) + Literal(']'))[lambda x: MemRef(*x)]
unary_alu_ops = ['neg', 'neg32', 'le16', 'le32', 'le64', 'be16', 'be32', 'be64']
binary_alu_ops = ['add', 'sub', 'mul', 'div', 'or', 'and', 'lsh', 'rsh',
'mod', 'xor', 'mov', 'arsh']
binary_alu_ops.extend([x + '32' for x in binary_alu_ops])
alu_instruction = \
(keywords(unary_alu_ops) + reg) | \
(keywords(binary_alu_ops) + reg + "," + (reg | imm))
mem_sizes = ['w', 'h', 'b', 'dw']
mem_store_reg_ops = ['stx' + s for s in mem_sizes]
mem_store_imm_ops = ['st' + s for s in mem_sizes]
mem_load_ops = ['ldx' + s for s in mem_sizes]
mem_instruction = \
(keywords(mem_store_reg_ops) + memref + "," + reg) | \
(keywords(mem_store_imm_ops) + memref + "," + imm) | \
(keywords(mem_load_ops) + reg + "," + memref) | \
(keywords(["lddw"]) + reg + "," + imm)
jmp_cmp_ops = ['jeq', 'jgt', 'jge', 'jlt', 'jle', 'jset', 'jne', 'jsgt', 'jsge', 'jslt', 'jsle']
jmp_instruction = \
(keywords(jmp_cmp_ops) + reg + "," + (reg | imm) + "," + offset) | \
(keywords(['ja']) + offset) | \
(keywords(['call']) + imm) | \
(keywords(['exit'])[lambda x: (x, )])
instruction = alu_instruction | mem_instruction | jmp_instruction
start = ZeroOrMore(instruction + Optional(Literal(';'))) + End()
def parse(source):
return start.parse_string(source)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Assembly parser", formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('file', type=argparse.FileType('r'), default='-')
args = parser.parse_args()
result = parse(args.file.read())
for inst in result:
print(repr(inst))
|
[
"collections.namedtuple",
"argparse.ArgumentParser",
"argparse.FileType"
] |
[((160, 186), 'collections.namedtuple', 'namedtuple', (['"""Reg"""', "['num']"], {}), "('Reg', ['num'])\n", (170, 186), False, 'from collections import namedtuple\n'), ((193, 221), 'collections.namedtuple', 'namedtuple', (['"""Imm"""', "['value']"], {}), "('Imm', ['value'])\n", (203, 221), False, 'from collections import namedtuple\n'), ((231, 270), 'collections.namedtuple', 'namedtuple', (['"""MemRef"""', "['reg', 'offset']"], {}), "('MemRef', ['reg', 'offset'])\n", (241, 270), False, 'from collections import namedtuple\n'), ((2087, 2200), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Assembly parser"""', 'formatter_class': 'argparse.RawDescriptionHelpFormatter'}), "(description='Assembly parser', formatter_class=\n argparse.RawDescriptionHelpFormatter)\n", (2110, 2200), False, 'import argparse\n'), ((2233, 2255), 'argparse.FileType', 'argparse.FileType', (['"""r"""'], {}), "('r')\n", (2250, 2255), False, 'import argparse\n')]
|
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the BC agent."""
from absl.testing import absltest
from absl.testing import parameterized
from acme import specs
from acme import types
from acme.agents.jax import bc
from acme.jax import networks as networks_lib
from acme.jax import utils
from acme.testing import fakes
import chex
import haiku as hk
import jax
import jax.numpy as jnp
from jax.scipy import special
import numpy as np
import optax
def make_networks(
spec: specs.EnvironmentSpec,
discrete_actions: bool = False) -> networks_lib.FeedForwardNetwork:
"""Creates networks used by the agent."""
if discrete_actions:
final_layer_size = spec.actions.num_values
else:
final_layer_size = np.prod(spec.actions.shape, dtype=int)
def _actor_fn(obs, is_training=False, key=None):
# is_training and key allows to defined train/test dependant modules
# like dropout.
del is_training
del key
if discrete_actions:
network = hk.nets.MLP([64, 64, final_layer_size])
else:
network = hk.Sequential([
networks_lib.LayerNormMLP([64, 64], activate_final=True),
networks_lib.NormalTanhDistribution(final_layer_size),
])
return network(obs)
policy = hk.without_apply_rng(hk.transform(_actor_fn))
# Create dummy observations and actions to create network parameters.
dummy_obs = utils.zeros_like(spec.observations)
dummy_obs = utils.add_batch_dim(dummy_obs)
network = networks_lib.FeedForwardNetwork(
lambda key: policy.init(key, dummy_obs), policy.apply)
return network
class BCTest(parameterized.TestCase):
@parameterized.parameters(
('logp',),
('mse',),
('peerbc',)
)
def test_continuous_actions(self, loss_name):
with chex.fake_pmap_and_jit():
num_sgd_steps_per_step = 1
num_steps = 5
# Create a fake environment to test with.
environment = fakes.ContinuousEnvironment(
episode_length=10, bounded=True, action_dim=6)
spec = specs.make_environment_spec(environment)
dataset_demonstration = fakes.transition_dataset(environment)
dataset_demonstration = dataset_demonstration.map(
lambda sample: types.Transition(*sample.data))
dataset_demonstration = dataset_demonstration.batch(8).as_numpy_iterator()
# Construct the agent.
network = make_networks(spec)
if loss_name == 'logp':
loss_fn = bc.logp(
logp_fn=lambda dist_params, actions: dist_params.log_prob(actions))
elif loss_name == 'mse':
loss_fn = bc.mse(
sample_fn=lambda dist_params, key: dist_params.sample(seed=key))
elif loss_name == 'peerbc':
base_loss_fn = bc.logp(
logp_fn=lambda dist_params, actions: dist_params.log_prob(actions))
loss_fn = bc.peerbc(base_loss_fn, zeta=0.1)
else:
raise ValueError
learner = bc.BCLearner(
network=network,
random_key=jax.random.PRNGKey(0),
loss_fn=loss_fn,
optimizer=optax.adam(0.01),
demonstrations=dataset_demonstration,
num_sgd_steps_per_step=num_sgd_steps_per_step)
# Train the agent
for _ in range(num_steps):
learner.step()
@parameterized.parameters(
('logp',),
('rcal',))
def test_discrete_actions(self, loss_name):
with chex.fake_pmap_and_jit():
num_sgd_steps_per_step = 1
num_steps = 5
# Create a fake environment to test with.
environment = fakes.DiscreteEnvironment(
num_actions=10, num_observations=100, obs_shape=(10,),
obs_dtype=np.float32)
spec = specs.make_environment_spec(environment)
dataset_demonstration = fakes.transition_dataset(environment)
dataset_demonstration = dataset_demonstration.map(
lambda sample: types.Transition(*sample.data))
dataset_demonstration = dataset_demonstration.batch(8).as_numpy_iterator()
# Construct the agent.
network = make_networks(spec, discrete_actions=True)
def logp_fn(logits, actions):
max_logits = jnp.max(logits, axis=-1, keepdims=True)
logits = logits - max_logits
logits_actions = jnp.sum(
jax.nn.one_hot(actions, spec.actions.num_values) * logits, axis=-1)
log_prob = logits_actions - special.logsumexp(logits, axis=-1)
return log_prob
if loss_name == 'logp':
loss_fn = bc.logp(logp_fn=logp_fn)
elif loss_name == 'rcal':
base_loss_fn = bc.logp(logp_fn=logp_fn)
loss_fn = bc.rcal(base_loss_fn, discount=0.99, alpha=0.1)
else:
raise ValueError
learner = bc.BCLearner(
network=network,
random_key=jax.random.PRNGKey(0),
loss_fn=loss_fn,
optimizer=optax.adam(0.01),
demonstrations=dataset_demonstration,
num_sgd_steps_per_step=num_sgd_steps_per_step)
# Train the agent
for _ in range(num_steps):
learner.step()
if __name__ == '__main__':
absltest.main()
|
[
"absl.testing.absltest.main",
"chex.fake_pmap_and_jit",
"optax.adam",
"acme.types.Transition",
"acme.jax.networks.NormalTanhDistribution",
"jax.random.PRNGKey",
"jax.nn.one_hot",
"acme.testing.fakes.transition_dataset",
"numpy.prod",
"acme.testing.fakes.DiscreteEnvironment",
"acme.agents.jax.bc.peerbc",
"haiku.nets.MLP",
"acme.jax.utils.zeros_like",
"acme.jax.utils.add_batch_dim",
"haiku.transform",
"jax.scipy.special.logsumexp",
"acme.agents.jax.bc.logp",
"acme.specs.make_environment_spec",
"acme.testing.fakes.ContinuousEnvironment",
"acme.agents.jax.bc.rcal",
"jax.numpy.max",
"absl.testing.parameterized.parameters",
"acme.jax.networks.LayerNormMLP"
] |
[((1951, 1986), 'acme.jax.utils.zeros_like', 'utils.zeros_like', (['spec.observations'], {}), '(spec.observations)\n', (1967, 1986), False, 'from acme.jax import utils\n'), ((2001, 2031), 'acme.jax.utils.add_batch_dim', 'utils.add_batch_dim', (['dummy_obs'], {}), '(dummy_obs)\n', (2020, 2031), False, 'from acme.jax import utils\n'), ((2199, 2257), 'absl.testing.parameterized.parameters', 'parameterized.parameters', (["('logp',)", "('mse',)", "('peerbc',)"], {}), "(('logp',), ('mse',), ('peerbc',))\n", (2223, 2257), False, 'from absl.testing import parameterized\n'), ((3823, 3869), 'absl.testing.parameterized.parameters', 'parameterized.parameters', (["('logp',)", "('rcal',)"], {}), "(('logp',), ('rcal',))\n", (3847, 3869), False, 'from absl.testing import parameterized\n'), ((5606, 5621), 'absl.testing.absltest.main', 'absltest.main', ([], {}), '()\n', (5619, 5621), False, 'from absl.testing import absltest\n'), ((1301, 1339), 'numpy.prod', 'np.prod', (['spec.actions.shape'], {'dtype': 'int'}), '(spec.actions.shape, dtype=int)\n', (1308, 1339), True, 'import numpy as np\n'), ((1839, 1862), 'haiku.transform', 'hk.transform', (['_actor_fn'], {}), '(_actor_fn)\n', (1851, 1862), True, 'import haiku as hk\n'), ((1558, 1597), 'haiku.nets.MLP', 'hk.nets.MLP', (['[64, 64, final_layer_size]'], {}), '([64, 64, final_layer_size])\n', (1569, 1597), True, 'import haiku as hk\n'), ((2341, 2365), 'chex.fake_pmap_and_jit', 'chex.fake_pmap_and_jit', ([], {}), '()\n', (2363, 2365), False, 'import chex\n'), ((2489, 2563), 'acme.testing.fakes.ContinuousEnvironment', 'fakes.ContinuousEnvironment', ([], {'episode_length': '(10)', 'bounded': '(True)', 'action_dim': '(6)'}), '(episode_length=10, bounded=True, action_dim=6)\n', (2516, 2563), False, 'from acme.testing import fakes\n'), ((2589, 2629), 'acme.specs.make_environment_spec', 'specs.make_environment_spec', (['environment'], {}), '(environment)\n', (2616, 2629), False, 'from acme import specs\n'), ((2660, 2697), 'acme.testing.fakes.transition_dataset', 'fakes.transition_dataset', (['environment'], {}), '(environment)\n', (2684, 2697), False, 'from acme.testing import fakes\n'), ((3938, 3962), 'chex.fake_pmap_and_jit', 'chex.fake_pmap_and_jit', ([], {}), '()\n', (3960, 3962), False, 'import chex\n'), ((4087, 4194), 'acme.testing.fakes.DiscreteEnvironment', 'fakes.DiscreteEnvironment', ([], {'num_actions': '(10)', 'num_observations': '(100)', 'obs_shape': '(10,)', 'obs_dtype': 'np.float32'}), '(num_actions=10, num_observations=100, obs_shape=(\n 10,), obs_dtype=np.float32)\n', (4112, 4194), False, 'from acme.testing import fakes\n'), ((4225, 4265), 'acme.specs.make_environment_spec', 'specs.make_environment_spec', (['environment'], {}), '(environment)\n', (4252, 4265), False, 'from acme import specs\n'), ((4296, 4333), 'acme.testing.fakes.transition_dataset', 'fakes.transition_dataset', (['environment'], {}), '(environment)\n', (4320, 4333), False, 'from acme.testing import fakes\n'), ((4676, 4715), 'jax.numpy.max', 'jnp.max', (['logits'], {'axis': '(-1)', 'keepdims': '(True)'}), '(logits, axis=-1, keepdims=True)\n', (4683, 4715), True, 'import jax.numpy as jnp\n'), ((5012, 5036), 'acme.agents.jax.bc.logp', 'bc.logp', ([], {'logp_fn': 'logp_fn'}), '(logp_fn=logp_fn)\n', (5019, 5036), False, 'from acme.agents.jax import bc\n'), ((1650, 1706), 'acme.jax.networks.LayerNormMLP', 'networks_lib.LayerNormMLP', (['[64, 64]'], {'activate_final': '(True)'}), '([64, 64], activate_final=True)\n', (1675, 1706), True, 'from acme.jax import networks as networks_lib\n'), ((1718, 1771), 'acme.jax.networks.NormalTanhDistribution', 'networks_lib.NormalTanhDistribution', (['final_layer_size'], {}), '(final_layer_size)\n', (1753, 1771), True, 'from acme.jax import networks as networks_lib\n'), ((2780, 2810), 'acme.types.Transition', 'types.Transition', (['*sample.data'], {}), '(*sample.data)\n', (2796, 2810), False, 'from acme import types\n'), ((3545, 3566), 'jax.random.PRNGKey', 'jax.random.PRNGKey', (['(0)'], {}), '(0)\n', (3563, 3566), False, 'import jax\n'), ((3615, 3631), 'optax.adam', 'optax.adam', (['(0.01)'], {}), '(0.01)\n', (3625, 3631), False, 'import optax\n'), ((4416, 4446), 'acme.types.Transition', 'types.Transition', (['*sample.data'], {}), '(*sample.data)\n', (4432, 4446), False, 'from acme import types\n'), ((4904, 4938), 'jax.scipy.special.logsumexp', 'special.logsumexp', (['logits'], {'axis': '(-1)'}), '(logits, axis=-1)\n', (4921, 4938), False, 'from jax.scipy import special\n'), ((5093, 5117), 'acme.agents.jax.bc.logp', 'bc.logp', ([], {'logp_fn': 'logp_fn'}), '(logp_fn=logp_fn)\n', (5100, 5117), False, 'from acme.agents.jax import bc\n'), ((5136, 5183), 'acme.agents.jax.bc.rcal', 'bc.rcal', (['base_loss_fn'], {'discount': '(0.99)', 'alpha': '(0.1)'}), '(base_loss_fn, discount=0.99, alpha=0.1)\n', (5143, 5183), False, 'from acme.agents.jax import bc\n'), ((5301, 5322), 'jax.random.PRNGKey', 'jax.random.PRNGKey', (['(0)'], {}), '(0)\n', (5319, 5322), False, 'import jax\n'), ((5371, 5387), 'optax.adam', 'optax.adam', (['(0.01)'], {}), '(0.01)\n', (5381, 5387), False, 'import optax\n'), ((3395, 3428), 'acme.agents.jax.bc.peerbc', 'bc.peerbc', (['base_loss_fn'], {'zeta': '(0.1)'}), '(base_loss_fn, zeta=0.1)\n', (3404, 3428), False, 'from acme.agents.jax import bc\n'), ((4799, 4847), 'jax.nn.one_hot', 'jax.nn.one_hot', (['actions', 'spec.actions.num_values'], {}), '(actions, spec.actions.num_values)\n', (4813, 4847), False, 'import jax\n')]
|
import os
import string
from .Gasteiger import getGasteiger_parameters, getGasteigerCharge
from .getForcefield import *
from .handleAtoms import Atomtypes, AtomsInfo, AtomLink
from .handleBonds import *
from .PCFF import (
PCFF_getAngletypes,
PCFF_getDihstypes,
PCFF_getImpstypes,
PCFF_getAtommass,
PCFF_getPairCoeffs,
PCFF_readPairCoeffs,
PCFF_getBondCoeffs,
PCFF_getAngleCoeffs,
getBBCoeffs,
getBACoeffs,
PCFF_getDihsCoeffs,
getMBTCoeffs,
getEBTCoeffs,
getATCoeffs,
getAATCoeffs,
getBB13Coeffs,
PCFF_getImpsCoeffs,
getAACoeffs,
)
from .qeq import Qeq_charge_equilibration
##############################################################################################
def checkAtomtype(inpfile):
atomtypes = Atomtypes(inpfile)
Forcefieldfile = getDreidingParamFile()
typedefault = ("H_", "C_3", "N_3", "O_3", "F_", "S_3", "Cl", "I_", "Br_")
atypes = []
flag = 0
fin = open(Forcefieldfile, "r")
dataline = fin.readline()
while dataline != "" and dataline != "\n" and flag == 0:
words = dataline[0 : len(dataline) - 1]
if str(words).upper() == "ATOMTYPES":
flag = 1
dataline = fin.readline()
words = dataline[0 : len(dataline) - 1].split()
while str(words[0]).upper() != "END":
atype = str(words[0])
atypes.append(atype)
dataline = fin.readline()
words = dataline[0 : len(dataline) - 1].split()
dataline = fin.readline()
fin.close()
# print(atypes)
anychange = "NO"
changed = []
for i in range(len(atomtypes)):
atomtypeID = atomtypes[i][0]
atomtype = atomtypes[i][1]
if atomtype not in atypes:
anychange = "YES"
for j in range(len(typedefault)):
deftype = typedefault[j]
if atomtype[0:2] == deftype[0:2]:
atomtypes[i][1] = deftype
changed.append([atomtype, deftype])
for i in range(len(atomtypes)):
atomtypeID = atomtypes[i][0]
atomtype = atomtypes[i][1]
if atomtype not in atypes:
anychange = "YES"
for j in range(len(typedefault)):
deftype = typedefault[j]
if atomtype[0] == deftype[0]:
atomtypes[i][1] = deftype
changed.append([atomtype, deftype])
for i in range(len(atomtypes)):
atomtypeID = atomtypes[i][0]
atomtype = atomtypes[i][1]
if atomtype not in atypes:
anychange = "YES"
deftype = "C_3"
atomtypes[i][1] = deftype
changed.append([atomtype, deftype])
if anychange == "YES":
fout = open("atom_type_reassigned.dat", "w")
for i in range(len(atomtypes)):
atomtypeID = atomtypes[i][0]
atomtype = atomtypes[i][1]
fout.write(str(atomtypeID) + " " + atomtype + "\n")
# print >> fout, atomtypeID, atomtype
fout.close()
if anychange == "YES":
wout = open("Datafile_warnings1.txt", "w")
wout.write(
"##==============Warning: Force field parameters============================"
+ "\n"
)
# print >> wout, "##==============Warning: Force field parameters============================"
wout.write("## Atom type is re-assigned as following:" + "\n")
# print >> wout, "## Atom type is re-assigned as following:"
wout.write("##" + str(changed) + "\n")
# print >> wout, "##", changed
wout.write(
"##==============Warning: Force field parameters============================"
+ "\n"
)
# print >> wout, "##==============Warning: Force field parameters============================"
wout.close()
return atomtypes, changed
##############################################################################################
def printCoeffs(fout, ptitle, ptypes, ptypecoeffs):
fout.write("\n")
# print >>fout
fout.write(ptitle + "\n")
# print >>fout,ptitle
fout.write("\n")
# print >>fout
for i in range(len(ptypes)):
outline = ""
for j in range(len(ptypecoeffs[i + 1])):
outline = outline + str(ptypecoeffs[i + 1][j]) + " \t"
fout.write(outline + "\n")
# print >>fout,outline
##############################################################################################
# Write out force field parameters.
def outputDreidingCoeffs(fout, atomtypes, bondtypes, angletypes, dihstypes, impstypes):
warning1 = getPairCoeffs(atomtypes) # coeffs are in file "paircoeffs.txt"
paircoeffs = readPairCoeffs()
fout.write("\n")
# print >>fout
fout.write("Pair Coeffs" + "\n")
# print >>fout,"Pair Coeffs"
fout.write("\n")
# print >>fout
for i in range(len(paircoeffs)):
fout.write(
"%3i %12.6f %12.6f %s %s"
% (
paircoeffs[i][0],
paircoeffs[i][1],
paircoeffs[i][2],
paircoeffs[i][3],
paircoeffs[i][4],
)
+ "\n"
)
# print >>fout,'%3i %12.6f %12.6f %s %s' % (paircoeffs[i][0],paircoeffs[i][1],paircoeffs[i][2],paircoeffs[i][3],paircoeffs[i][4])
fout.write("\n")
# print >>fout
fout.write("Bond Coeffs" + "\n")
# print >>fout,"Bond Coeffs"
fout.write("\n")
# print >>fout
bondcoeffs, warning2 = getBondCoeffs(bondtypes)
for i in range(len(bondtypes)):
fout.write(
"%3i %12.6f %12.6f %s%s%s%s"
% (
bondcoeffs[i][0],
bondcoeffs[i][1],
bondcoeffs[i][2],
str(" # "),
bondcoeffs[i][3],
str(" "),
bondcoeffs[i][4],
)
+ "\n"
)
# print >>fout,'%3i %12.6f %12.6f %s%s%s%s' % (bondcoeffs[i][0],bondcoeffs[i][1],bondcoeffs[i][2],str(" # "),bondcoeffs[i][3],str(" "),bondcoeffs[i][4])
fout.write("\n")
# print >>fout
fout.write("Angle Coeffs" + "\n")
# print >>fout,"Angle Coeffs"
fout.write("\n")
# print >>fout
anglecoeffs, warning3 = getAngleCoeffs(angletypes)
for i in range(len(angletypes)):
fout.write(
"%3i %12.6f %12.6f %s%s%s"
% (
anglecoeffs[i][0],
anglecoeffs[i][1],
anglecoeffs[i][2],
str(" # X "),
anglecoeffs[i][3],
str(" X "),
)
+ "\n"
)
# print >>fout,'%3i %12.6f %12.6f %s%s%s' % (anglecoeffs[i][0],anglecoeffs[i][1],anglecoeffs[i][2],str(" # X "),anglecoeffs[i][3],str(" X "))
fout.write("\n")
# print >>fout
fout.write("Dihedral Coeffs" + "\n")
# print >>fout,"Dihedral Coeffs"
fout.write("\n")
# print >>fout
dihscoeffs, warning4 = getDihsCoeffs(dihstypes)
for i in range(len(dihstypes)):
fout.write(
"%3i %12.6f %3i %3i %s%s%s%s%s%s%s%s"
% (
dihscoeffs[i][0],
dihscoeffs[i][1],
dihscoeffs[i][3],
dihscoeffs[i][2],
str(" # "),
dihscoeffs[i][4],
str(" "),
dihscoeffs[i][5],
str(" "),
dihscoeffs[i][6],
str(" "),
dihscoeffs[i][7],
)
+ "\n"
)
# print >>fout,'%3i %12.6f %3i %3i %s%s%s%s%s%s%s%s' % (dihscoeffs[i][0],dihscoeffs[i][1],dihscoeffs[i][3],dihscoeffs[i][2],str(" # "),dihscoeffs[i][4],str(" "),dihscoeffs[i][5],str(" "),dihscoeffs[i][6],str(" "),dihscoeffs[i][7])
fout.write("\n")
# print >>fout
fout.write("Improper Coeffs" + "\n")
# print >>fout,"Improper Coeffs"
fout.write("\n")
# print >>fout
impscoeffs, warning5 = getImpsCoeffs(impstypes)
for i in range(len(impscoeffs)):
fout.write(
"%3i %12.6f %12.6f %s%s%s"
% (
impscoeffs[i][0],
impscoeffs[i][1],
impscoeffs[i][2],
str(" # "),
impscoeffs[i][3],
str(" X X X "),
)
+ "\n"
)
# print >>fout,'%3i %12.6f %12.6f %s%s%s' % (impscoeffs[i][0],impscoeffs[i][1],impscoeffs[i][2],str(" # "),impscoeffs[i][3],str(" X X X "))
if (
warning1 != ""
or warning2 != ""
or warning3 != ""
or warning4 != ""
or warning5 != ""
):
wout = open("Datafile_warnings2.txt", "w")
wout.write("##" + warning1 + "\n")
# print >>wout,"##",warning1
wout.write("##" + warning2 + "\n")
# print >>wout,"##",warning2
wout.write("##" + warning3 + "\n")
# print >>wout,"##",warning3
wout.write("##" + warning4 + "\n")
# print >>wout,"##",warning4
wout.write("##" + warning5 + "\n")
# print >>wout,"##",warning5
wout.write(
"##==============Warning: Force field parameters============================"
+ "\n"
)
# print >>wout,"##==============Warning: Force field parameters============================"
wout.close()
##############################################################################################
# Write out force field parameters.
def outputPCFFCoeffs(fout, atomtypes, bondtypes, angletypes, dihstypes, impstypes):
PCFF_getPairCoeffs(atomtypes) # coeffs are in file "paircoeffs.txt"
paircoeffs = PCFF_readPairCoeffs()
printCoeffs(fout, "Pair Coeffs", atomtypes, paircoeffs)
bondcoeffs = PCFF_getBondCoeffs(bondtypes)
printCoeffs(fout, "Bond Coeffs", bondtypes, bondcoeffs)
anglecoeffs = PCFF_getAngleCoeffs(angletypes)
printCoeffs(fout, "Angle Coeffs", angletypes, anglecoeffs)
BBcoeffs = getBBCoeffs(angletypes, bondtypes, bondcoeffs)
printCoeffs(fout, "BondBond Coeffs", angletypes, BBcoeffs)
BAcoeffs = getBACoeffs(angletypes, bondtypes, bondcoeffs)
printCoeffs(fout, "BondAngle Coeffs", angletypes, BAcoeffs)
dihscoeffs = PCFF_getDihsCoeffs(dihstypes)
printCoeffs(fout, "Dihedral Coeffs", dihstypes, dihscoeffs)
MBTcoeffs = getMBTCoeffs(dihstypes, bondtypes, bondcoeffs)
printCoeffs(fout, "MiddleBondTorsion Coeffs", dihstypes, MBTcoeffs)
EBTcoeffs = getEBTCoeffs(dihstypes, bondtypes, bondcoeffs)
printCoeffs(fout, "EndBondTorsion Coeffs", dihstypes, EBTcoeffs)
ATcoeffs = getATCoeffs(dihstypes, angletypes, anglecoeffs)
printCoeffs(fout, "AngleTorsion Coeffs", dihstypes, ATcoeffs)
AATcoeffs = getAATCoeffs(dihstypes, angletypes, anglecoeffs)
printCoeffs(fout, "AngleAngleTorsion Coeffs", dihstypes, AATcoeffs)
BB13coeffs = getBB13Coeffs(dihstypes, bondtypes, bondcoeffs)
printCoeffs(fout, "BondBond13 Coeffs", dihstypes, BB13coeffs)
impscoeffs = PCFF_getImpsCoeffs(impstypes)
printCoeffs(fout, "Improper Coeffs", impstypes, impscoeffs)
AAcoeffs = getAACoeffs(impstypes, angletypes, anglecoeffs)
printCoeffs(fout, "AngleAngle Coeffs", impstypes, AAcoeffs)
########################################################################
def getFileName():
fin = open("structure.name", "r")
dataline = fin.readline()
words = dataline[0 : len(dataline) - 1].split()
structureName = words[0]
return structureName
########################################################################
def getForcefield():
fin = open("forcefield.name", "r")
dataline = fin.readline()
words = dataline[0 : len(dataline) - 1].split()
forcefieldName = words[0]
return forcefieldName
########################################################################
def readPairCoeffs():
paircoeffs = []
fin = open("LJpaircoeffs.txt", "r")
dataline = fin.readline()
while dataline != "":
words = dataline[0 : len(dataline) - 1].split()
atomtype = eval(words[1])
D0 = eval(words[2])
R0 = eval(words[3])
C1 = words[4]
C2 = words[5]
paircoeffs.append([atomtype, D0, R0, C1, C2])
dataline = fin.readline()
return paircoeffs
########################################################################
# Assumes cubic cell
def createReaxDatafile(
forcefield, structureName, xlo, xhi, ylo, yhi, zlo, zhi, chargeMethod
):
inpfile = "atom_type.dat"
if str(forcefield).upper() == "DREIDING":
atomtypes, anychange = checkAtomtype(inpfile)
else:
atomtypes = Atomtypes(inpfile)
anychange = []
print("Atomtypes total=", len(atomtypes))
inpfile = "atoms.dat"
baseatoms = AtomsInfo(inpfile)
print("Atoms total=", len(baseatoms))
natomtype = len(atomtypes)
totalatoms = len(baseatoms)
atommass = getAtommass(atomtypes)
####################################################################
# Output reaxFF data file for lammps
datafile = structureName + "_reaxFF.data"
fout = open(datafile, "w")
fout.write("LAMMPS data file using " + forcefield + " for " + structureName + "\n")
# print >>fout,"LAMMPS data file using "+forcefield+" for "+structureName
fout.write("\n")
# print >>fout
fout.write(str(totalatoms) + " atoms" + "\n")
# print >>fout,str(totalatoms)+" atoms"
fout.write(str(natomtype) + " atom types" + "\n")
# print >>fout,str(natomtype)+" atom types"
fout.write("\n")
# print >>fout
fout.write(xlo + " " + xhi + " xlo xhi" + "\n")
# print >>fout,xlo+" "+xhi+" xlo xhi"
fout.write(ylo + " " + yhi + " ylo yhi" + "\n")
# print >>fout,ylo+" "+yhi+" ylo yhi"
fout.write(zlo + " " + zhi + " zlo zhi" + "\n")
# print >>fout,zlo+" "+zhi+" zlo zhi"
fout.write("\n")
# print >>fout
fout.write("Masses" + "\n")
# print >>fout,"Masses"
fout.write("\n")
# print >>fout
for i in range(len(atommass)):
atomtype = atommass[i][2]
# atomtype=atomtype[0]
fout.write(
"%3i %12.6f %s%s"
% (atommass[i][0], atommass[i][1], str(" # "), atomtype)
+ "\n"
)
# print >>fout,'%3i %12.6f %s%s' % (atommass[i][0],atommass[i][1],str(" # "), atomtype)
####################################################################
# Output atom data
fout.write("\n")
# print >>fout
fout.write("Atoms # full" + "\n")
# print >>fout, "Atoms"
fout.write("\n")
# print >>fout
for i in range(len(baseatoms)):
dataline = str(baseatoms[i])
w = string.split(dataline[1 : len(dataline) - 1], ",")
fout.write(
(
"%6d %3d %3d %10.5f %15.8f %15.8f %15.8f"
% (
eval(w[0]),
eval(w[1]),
eval(w[2]),
eval(w[3]),
eval(w[4]),
eval(w[5]),
eval(w[6]),
)
)
+ "\n"
)
# print >>fout, ('%6d %3d %3d %10.5f %15.8f %15.8f %15.8f' %
# (eval(w[0]),eval(w[1]),eval(w[2]),eval(w[3]),eval(w[4]),eval(w[5]),eval(w[6])))
fout.write("\n")
# print >>fout
fout.close()
print(datafile + " created!")
return datafile
####################################################################
def createDatafile(
forcefield, structureName, xlo, xhi, ylo, yhi, zlo, zhi, xy, xz, yz, chargeMethod
):
inpfile = ".tmp/types/newatom_type.dat"
if str(forcefield).upper() == "DREIDING":
atomtypes, anychange = checkAtomtype(inpfile)
else:
atomtypes = Atomtypes(inpfile)
anychange = []
inpfile = ".tmp/types/newatoms.dat"
baseatoms = AtomsInfo(inpfile)
# Update bondtype if default types used
inpfile = ".tmp/types/newbond_type.dat"
bondtypes = getBondtypes(inpfile)
for i in range(len(bondtypes)):
atom1type = bondtypes[i][1]
atom2type = bondtypes[i][2]
for j in range(len(anychange)):
replaced = anychange[j][0]
defatype = anychange[j][1]
if atom1type.upper() == replaced.upper():
bondtypes[i][1] = defatype
if atom2type.upper() == replaced.upper():
bondtypes[i][2] = defatype
inpfile = ".tmp/types/newbonds.dat"
basebonds = getBonds(inpfile, 0, 1)
print("Equilibrating charge... \n")
if chargeMethod == "Gasteiger":
# Replace charge for Gasteiger charge
forcefield = getForcefield()
Gparas = getGasteiger_parameters(forcefield)
Q = getGasteigerCharge(Gparas, atomtypes, baseatoms, basebonds)
elif chargeMethod == "QEq":
Q = Qeq_charge_equilibration(baseatoms)
atomlinks = AtomLink(baseatoms, basebonds)
# print("Links generated")
baseangles = createAngles(atomlinks)
# print("Angles generated")
if str(forcefield).upper() == "PCFF":
angletypes, baseangles = PCFF_getAngletypes(baseangles, baseatoms, atomtypes)
if str(forcefield).upper() == "DREIDING":
angletypes, baseangles = getAngletypes(baseangles, baseatoms, atomtypes)
# print("Angles updated")
basedihs = createDihedrals(atomlinks, basebonds)
# print("Dihs generated")
if str(forcefield).upper() == "PCFF":
dihstypes, basedihs = PCFF_getDihstypes(basedihs, baseatoms, atomtypes)
if str(forcefield).upper() == "DREIDING":
dihstypes, basedihs = getDihstypes(basedihs, baseatoms, atomtypes)
# print("Dihs updated")
baseimps = createImpropers(atomlinks)
# print("Imps generated")
if str(forcefield).upper() == "PCFF":
impstypes, baseimps = PCFF_getImpstypes(baseimps, baseatoms, atomtypes)
if str(forcefield).upper() == "DREIDING":
impstypes, baseimps = getImpstypes(baseimps, baseatoms, atomtypes)
# print("Imps updated")
####################################################################
# Total quantities
natomtype = len(atomtypes)
nbondtype = len(bondtypes)
nangletype = len(angletypes)
ndihstype = len(dihstypes)
nimpstype = len(impstypes)
totalatoms = len(baseatoms)
totalbonds = len(basebonds)
totalangles = len(baseangles)
totaldihs = len(basedihs)
totalimps = len(baseimps)
####################################################################
atommass = getAtommass(atomtypes)
if str(forcefield).upper() == "PCFF":
atommass = PCFF_getAtommass(atomtypes)
# Output Lammps data file
####################################################################
# Output head of data file for lammps
datafile = structureName + ".data"
# datafile = "LAMMPSDataFile.data"
fout = open(datafile, "w")
fout.write("LAMMPS data file using " + forcefield + " for " + structureName + "\n")
# print >>fout,"LAMMPS data file using "+forcefield+" for "+structureName
fout.write("\n")
# print >>fout
fout.write(str(totalatoms) + " atoms" + "\n")
# print >>fout,str(totalatoms)+" atoms"
fout.write(str(totalbonds) + " bonds" + "\n")
# print >>fout,str(totalbonds)+" bonds"
fout.write(str(totalangles) + " angles" + "\n")
# print >>fout,str(totalangles)+" angles"
fout.write(str(totaldihs) + " dihedrals" + "\n")
# print >>fout,str(totaldihs)+" dihedrals"
fout.write(str(totalimps) + " impropers" + "\n")
# print >>fout,str(totalimps)+" impropers"
fout.write("\n")
# print >>fout
fout.write(str(natomtype) + " atom types" + "\n")
# print >>fout,str(natomtype)+" atom types"
fout.write(str(nbondtype) + " bond types" + "\n")
# print >>fout,str(nbondtype)+" bond types"
fout.write(str(nangletype) + " angle types" + "\n")
# print >>fout,str(nangletype)+" angle types"
fout.write(str(ndihstype) + " dihedral types" + "\n")
# print >>fout,str(ndihstype)+" dihedral types"
fout.write(str(nimpstype) + " improper types" + "\n")
# print >>fout,str(nimpstype)+" improper types"
fout.write("\n")
# print >>fout
fout.write(xlo + " " + xhi + " xlo xhi" + "\n")
# print >>fout,xlo+" "+xhi+" xlo xhi"
fout.write(ylo + " " + yhi + " ylo yhi" + "\n")
# print >>fout,ylo+" "+yhi+" ylo yhi"
fout.write(zlo + " " + zhi + " zlo zhi" + "\n")
# print >>fout,zlo+" "+zhi+" zlo zhi"
if xy == "0.0" and xz == "0.0" and yz == "0.0":
fout.write("\n")
# print >>fout
else:
fout.write(xy + " " + xz + " " + yz + " xy xz yz" + "\n")
# print >>fout,xy+" "+xz+" "+yz+" xy xz yz"
fout.write("\n")
# print >>fout
fout.write("Masses" + "\n")
# print >>fout,"Masses"
fout.write("\n")
# print >>fout
for i in range(len(atommass)):
fout.write(
"%3i %12.6f %s%s"
% (atommass[i][0], atommass[i][1], str(" # "), atommass[i][2])
+ "\n"
)
# print >>fout,'%3i %12.6f %s%s' % (atommass[i][0],atommass[i][1],str(" # "), atommass[i][2])
####################################################################
# Output data
fout.write("\n")
# print >>fout
fout.write("Atoms # full" + "\n")
# print >>fout, "Atoms"
fout.write("\n")
# print >>fout
for i in range(len(baseatoms)):
dataline = str(baseatoms[i])
w = dataline[1 : len(dataline) - 1].split(",")
fout.write(
(
"%6d %3d %3d %10.5f %15.8f %15.8f %15.8f %3d %3d %3d"
% (
eval(w[0]),
eval(w[1]),
eval(w[2]),
Q[i + 1],
eval(w[4]),
eval(w[5]),
eval(w[6]),
eval(w[7]),
eval(w[8]),
eval(w[9]),
)
)
+ "\n"
)
# print >>fout, ('%6d %3d %3d %10.5f %15.8f %15.8f %15.8f %3d %3d %3d' %
# (eval(w[0]),eval(w[1]),eval(w[2]),Q[i+1],eval(w[4]),eval(w[5]),eval(w[6]), eval(w[7]), eval(w[8]), eval(w[9])))
fout.write("\n")
# print >>fout
fout.write("Bonds" + "\n")
# print >>fout, "Bonds"
fout.write("\n")
# print >>fout
for i in range(len(basebonds)):
dataline = str(basebonds[i])
words = dataline[1 : len(dataline) - 1].split(",")
outline = ""
for i in range(len(words)):
outline = outline + str(words[i]) + " "
fout.write(outline + "\n")
# print >>fout, outline
fout.write("\n")
# print >>fout
fout.write("Angles" + "\n")
# print >>fout, "Angles"
fout.write("\n")
# print >>fout
for i in range(len(baseangles)):
dataline = str(baseangles[i])
words = dataline[1 : len(dataline) - 1].split(",")
outline = ""
for i in range(len(words)):
outline = outline + str(words[i]) + " "
fout.write(outline + "\n")
# print >>fout, outline
fout.write("\n")
# print >>fout
fout.write("Dihedrals" + "\n")
# print >>fout, "Dihedrals"
fout.write("\n")
# print >>fout
for i in range(len(basedihs)):
dataline = str(basedihs[i])
words = dataline[1 : len(dataline) - 1].split(",")
outline = ""
for i in range(len(words)):
outline = outline + str(words[i]) + " "
fout.write(outline + "\n")
# print >>fout, outline
fout.write("\n")
# print >>fout
fout.write("Impropers" + "\n")
# print >>fout, "Impropers"
fout.write("\n")
# print >>fout
for i in range(len(baseimps)):
dataline = str(baseimps[i])
words = dataline[1 : len(dataline) - 1].split(",")
outline = ""
for i in range(len(words)):
outline = outline + str(words[i]) + " "
fout.write(outline + "\n")
# print >>fout, outline
# Coeffs
if str(forcefield).upper() == "DREIDING":
outputDreidingCoeffs(
fout, atomtypes, bondtypes, angletypes, dihstypes, impstypes
)
if str(forcefield).upper() == "PCFF":
outputPCFFCoeffs(fout, atomtypes, bondtypes, angletypes, dihstypes, impstypes)
fout.close()
print(datafile + " created!")
if os.path.exists("Datafile_warnings.txt"):
cmd2 = "rm Datafile_warnings.txt"
os.system(cmd2)
if os.path.exists("Datafile_warnings1.txt"):
cmd1 = "cat Datafile_warnings1.txt >>Datafile_warnings.txt"
cmd2 = "rm Datafile_warnings1.txt"
os.system(cmd1)
os.system(cmd2)
if os.path.exists("Datafile_warnings2.txt"):
cmd1 = "cat Datafile_warnings2.txt >>Datafile_warnings.txt"
cmd2 = "rm Datafile_warnings2.txt"
os.system(cmd1)
os.system(cmd2)
return datafile
########################################################################
|
[
"os.path.exists",
"os.system"
] |
[((24493, 24532), 'os.path.exists', 'os.path.exists', (['"""Datafile_warnings.txt"""'], {}), "('Datafile_warnings.txt')\n", (24507, 24532), False, 'import os\n'), ((24607, 24647), 'os.path.exists', 'os.path.exists', (['"""Datafile_warnings1.txt"""'], {}), "('Datafile_warnings1.txt')\n", (24621, 24647), False, 'import os\n'), ((24815, 24855), 'os.path.exists', 'os.path.exists', (['"""Datafile_warnings2.txt"""'], {}), "('Datafile_warnings2.txt')\n", (24829, 24855), False, 'import os\n'), ((24584, 24599), 'os.system', 'os.system', (['cmd2'], {}), '(cmd2)\n', (24593, 24599), False, 'import os\n'), ((24768, 24783), 'os.system', 'os.system', (['cmd1'], {}), '(cmd1)\n', (24777, 24783), False, 'import os\n'), ((24792, 24807), 'os.system', 'os.system', (['cmd2'], {}), '(cmd2)\n', (24801, 24807), False, 'import os\n'), ((24976, 24991), 'os.system', 'os.system', (['cmd1'], {}), '(cmd1)\n', (24985, 24991), False, 'import os\n'), ((25000, 25015), 'os.system', 'os.system', (['cmd2'], {}), '(cmd2)\n', (25009, 25015), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('services', '0005_auto_20150901_1253'),
]
operations = [
migrations.AlterField(
model_name='story',
name='content_type',
field=models.CharField(blank=True, max_length=1, null=True, choices=[(b'T', b'text'), (b'U', b'url'), (b'I', b'image')]),
),
migrations.AlterField(
model_name='story',
name='date',
field=models.DateField(auto_now_add=True),
),
]
|
[
"django.db.models.CharField",
"django.db.models.DateField"
] |
[((358, 476), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(1)', 'null': '(True)', 'choices': "[(b'T', b'text'), (b'U', b'url'), (b'I', b'image')]"}), "(blank=True, max_length=1, null=True, choices=[(b'T',\n b'text'), (b'U', b'url'), (b'I', b'image')])\n", (374, 476), False, 'from django.db import models, migrations\n'), ((591, 626), 'django.db.models.DateField', 'models.DateField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (607, 626), False, 'from django.db import models, migrations\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2020, <NAME> & QuatroPe
# License: BSD-3-Clause
# Full Text: https://github.com/quatrope/djmpl/blob/master/LICENSE
# =============================================================================
# DOCS
# =============================================================================
"""Tests for django_matplotlib.core
"""
# =============================================================================
# IMPORTS
# =============================================================================
from django.utils.safestring import SafeString
import django_matplotlib as djmpl
from django_matplotlib import core, settings
import jinja2
import matplotlib.pyplot as plt
from pyquery import PyQuery as pq
import pytest
# =============================================================================
# CONSTANTS
# =============================================================================
ALL_ENGINE_NAMES = list(settings.TEMPLATES_FORMATERS) + list(
settings.TEMPLATE_ALIAS
)
plt.rcParams.update({"figure.max_open_warning": 0})
# =============================================================================
# TESTS
# =============================================================================
@pytest.mark.parametrize(
"engine, safe_type",
[("django", SafeString), ("jinja2", jinja2.Markup), ("str", str)],
)
def test_png(engine, safe_type):
plot = djmpl.subplots(plot_format="png", template_engine=engine)
html = plot.to_html()
assert isinstance(html, safe_type)
div = pq(html)
assert len(div) == 1
assert div[0].tag == "div"
assert div.has_class("djmpl")
assert div.has_class("djmpl-png")
children = div[0].getchildren()
assert len(children) == 1
img = children[0]
assert img.tag == "img"
assert img.attrib["src"].split(",", 1)[0] == "data:image/png;base64"
@pytest.mark.parametrize(
"engine, safe_type",
[("django", SafeString), ("jinja2", jinja2.Markup), ("str", str)],
)
def test_svg(engine, safe_type):
plot = djmpl.subplots(plot_format="svg", template_engine=engine)
html = plot.to_html()
assert isinstance(html, safe_type)
div = pq(html)
assert len(div) == 1
assert div[0].tag == "div"
assert div.has_class("djmpl")
assert div.has_class("djmpl-svg")
children = div[0].getchildren()
assert len(children) == 3
img = children[-1]
assert img.tag == "svg"
@pytest.mark.parametrize(
"engine, safe_type",
[("django", SafeString), ("jinja2", jinja2.Markup), ("str", str)],
)
def test_mpld3(engine, safe_type):
plot = djmpl.subplots(plot_format="mpld3", template_engine=engine)
html = plot.to_html()
assert isinstance(html, safe_type)
div = pq(html)
assert len(div) == 1
assert div[0].tag == "div"
assert div.has_class("djmpl")
assert div.has_class("djmpl-mpld3")
children = div[0].getchildren()
assert len(children) == 3
img = children[-1]
assert img.tag == "script"
@pytest.mark.parametrize("fmt", settings.AVAILABLE_FORMATS)
@pytest.mark.parametrize("engine", ALL_ENGINE_NAMES)
def test_valid_engine_and_format(fmt, engine):
plot = djmpl.subplots(plot_format=fmt, template_engine=engine)
assert plot.plot_format == fmt
assert plot.template_engine == core.template_by_alias(engine)
@pytest.mark.parametrize("engine", ALL_ENGINE_NAMES)
def test_invalid_and_format(engine):
with pytest.raises(ValueError):
djmpl.subplots(plot_format="%NOT-EXISTS%", template_engine=engine)
@pytest.mark.parametrize("fmt", settings.AVAILABLE_FORMATS)
def test_invalid_engine(fmt):
with pytest.raises(core.EngineNotSupported):
djmpl.subplots(plot_format=fmt, template_engine="%NOT-EXISTS%")
|
[
"pyquery.PyQuery",
"django_matplotlib.core.template_by_alias",
"django_matplotlib.subplots",
"pytest.raises",
"matplotlib.pyplot.rcParams.update",
"pytest.mark.parametrize"
] |
[((1055, 1106), 'matplotlib.pyplot.rcParams.update', 'plt.rcParams.update', (["{'figure.max_open_warning': 0}"], {}), "({'figure.max_open_warning': 0})\n", (1074, 1106), True, 'import matplotlib.pyplot as plt\n'), ((1279, 1395), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""engine, safe_type"""', "[('django', SafeString), ('jinja2', jinja2.Markup), ('str', str)]"], {}), "('engine, safe_type', [('django', SafeString), (\n 'jinja2', jinja2.Markup), ('str', str)])\n", (1302, 1395), False, 'import pytest\n'), ((1912, 2028), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""engine, safe_type"""', "[('django', SafeString), ('jinja2', jinja2.Markup), ('str', str)]"], {}), "('engine, safe_type', [('django', SafeString), (\n 'jinja2', jinja2.Markup), ('str', str)])\n", (1935, 2028), False, 'import pytest\n'), ((2473, 2589), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""engine, safe_type"""', "[('django', SafeString), ('jinja2', jinja2.Markup), ('str', str)]"], {}), "('engine, safe_type', [('django', SafeString), (\n 'jinja2', jinja2.Markup), ('str', str)])\n", (2496, 2589), False, 'import pytest\n'), ((3043, 3101), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""fmt"""', 'settings.AVAILABLE_FORMATS'], {}), "('fmt', settings.AVAILABLE_FORMATS)\n", (3066, 3101), False, 'import pytest\n'), ((3103, 3154), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""engine"""', 'ALL_ENGINE_NAMES'], {}), "('engine', ALL_ENGINE_NAMES)\n", (3126, 3154), False, 'import pytest\n'), ((3373, 3424), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""engine"""', 'ALL_ENGINE_NAMES'], {}), "('engine', ALL_ENGINE_NAMES)\n", (3396, 3424), False, 'import pytest\n'), ((3576, 3634), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""fmt"""', 'settings.AVAILABLE_FORMATS'], {}), "('fmt', settings.AVAILABLE_FORMATS)\n", (3599, 3634), False, 'import pytest\n'), ((1446, 1503), 'django_matplotlib.subplots', 'djmpl.subplots', ([], {'plot_format': '"""png"""', 'template_engine': 'engine'}), "(plot_format='png', template_engine=engine)\n", (1460, 1503), True, 'import django_matplotlib as djmpl\n'), ((1580, 1588), 'pyquery.PyQuery', 'pq', (['html'], {}), '(html)\n', (1582, 1588), True, 'from pyquery import PyQuery as pq\n'), ((2079, 2136), 'django_matplotlib.subplots', 'djmpl.subplots', ([], {'plot_format': '"""svg"""', 'template_engine': 'engine'}), "(plot_format='svg', template_engine=engine)\n", (2093, 2136), True, 'import django_matplotlib as djmpl\n'), ((2213, 2221), 'pyquery.PyQuery', 'pq', (['html'], {}), '(html)\n', (2215, 2221), True, 'from pyquery import PyQuery as pq\n'), ((2642, 2701), 'django_matplotlib.subplots', 'djmpl.subplots', ([], {'plot_format': '"""mpld3"""', 'template_engine': 'engine'}), "(plot_format='mpld3', template_engine=engine)\n", (2656, 2701), True, 'import django_matplotlib as djmpl\n'), ((2778, 2786), 'pyquery.PyQuery', 'pq', (['html'], {}), '(html)\n', (2780, 2786), True, 'from pyquery import PyQuery as pq\n'), ((3213, 3268), 'django_matplotlib.subplots', 'djmpl.subplots', ([], {'plot_format': 'fmt', 'template_engine': 'engine'}), '(plot_format=fmt, template_engine=engine)\n', (3227, 3268), True, 'import django_matplotlib as djmpl\n'), ((3339, 3369), 'django_matplotlib.core.template_by_alias', 'core.template_by_alias', (['engine'], {}), '(engine)\n', (3361, 3369), False, 'from django_matplotlib import core, settings\n'), ((3471, 3496), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3484, 3496), False, 'import pytest\n'), ((3506, 3572), 'django_matplotlib.subplots', 'djmpl.subplots', ([], {'plot_format': '"""%NOT-EXISTS%"""', 'template_engine': 'engine'}), "(plot_format='%NOT-EXISTS%', template_engine=engine)\n", (3520, 3572), True, 'import django_matplotlib as djmpl\n'), ((3674, 3712), 'pytest.raises', 'pytest.raises', (['core.EngineNotSupported'], {}), '(core.EngineNotSupported)\n', (3687, 3712), False, 'import pytest\n'), ((3722, 3785), 'django_matplotlib.subplots', 'djmpl.subplots', ([], {'plot_format': 'fmt', 'template_engine': '"""%NOT-EXISTS%"""'}), "(plot_format=fmt, template_engine='%NOT-EXISTS%')\n", (3736, 3785), True, 'import django_matplotlib as djmpl\n')]
|
# Created by <NAME> on 10/7/2019, 12:53 AM
import sys
import os
project_path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
sys.path.append(project_path)
from detectors.relative_entropy_detector import RelativeEntropyDetector
from common.dataset import CSVDataset
from utils.analysis import draw_array
def test_detector():
# read in the data
file_path = project_path + "/../data/NAB_data/data/realAWSCloudwatch/ec2_cpu_utilization_5f5533.csv"
data = CSVDataset(file_path, header=1, values=1, test_size=0).get_data()[0]["values"]
# finding min max of the value
min_value = min(data)
max_value = max(data)
# initialize the detector
detector = RelativeEntropyDetector()
# set the window_size to be 52 and n_bins to be 5 for testing a normal case
detector.initialize(input_min=min_value, input_max=max_value, window_size=52, n_bins=5)
# handle all the record
result = detector.handle_record_sequence(data)
draw_array(result)
if __name__ == "__main__":
test_detector()
|
[
"sys.path.append",
"utils.analysis.draw_array",
"os.path.abspath",
"common.dataset.CSVDataset",
"detectors.relative_entropy_detector.RelativeEntropyDetector"
] |
[((173, 202), 'sys.path.append', 'sys.path.append', (['project_path'], {}), '(project_path)\n', (188, 202), False, 'import sys\n'), ((728, 753), 'detectors.relative_entropy_detector.RelativeEntropyDetector', 'RelativeEntropyDetector', ([], {}), '()\n', (751, 753), False, 'from detectors.relative_entropy_detector import RelativeEntropyDetector\n'), ((1010, 1028), 'utils.analysis.draw_array', 'draw_array', (['result'], {}), '(result)\n', (1020, 1028), False, 'from utils.analysis import draw_array\n'), ((143, 168), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (158, 168), False, 'import os\n'), ((515, 569), 'common.dataset.CSVDataset', 'CSVDataset', (['file_path'], {'header': '(1)', 'values': '(1)', 'test_size': '(0)'}), '(file_path, header=1, values=1, test_size=0)\n', (525, 569), False, 'from common.dataset import CSVDataset\n')]
|
###############################################################################
#
# Purpose: Use VisIt CLI to iterate over Curves in a material database and
# compute and plot some common difference curves and output the results
# to either a curve or image file format.
#
# Programmer: <NAME>
# Date: Wed May 27 13:15:07 PDT 2009
#
#
# Modifications:
# <NAME>, Mon Jun 15 17:52:15 PDT 2009
# Removed subclassing used to override behavior of Optparse in presence of
# unrecognized options. By using Argv(), VisIt-specific options never wind
# up getting passed to this script.
###############################################################################
import sys, re, os, glob
from optparse import *
#
# Convert '#FFCC13" strings to color tuple
#
def ColorTupleFromHexString(s):
if s[0] != '#':
return (0, 0, 0, 255)
return (int("0x%s"%s[1:3],16), \
int("0x%s"%s[3:5],16), \
int("0x%s"%s[5:7],16), \
255)
#
# Command-line options
#
def BuildCommandLineOptions():
parser = OptionParser()
parser.add_option("--image-width",
help="Set width of images [%default].",
type="int", dest="image_width", default="500", metavar="INT")
parser.add_option("--image-height",
help="Set height of images [%default].",
type="int", dest="image_height", default="500", metavar="INT")
parser.add_option("--data-min",
type="float", dest="data_min", metavar="FLOAT",
help="Mininum data value to be applied to all plots. If no "
"value is specified, the minimum will be allowed to vary "
"as needed from plot to plot.")
parser.add_option("--data-max",
type="float", dest="data_max", metavar="FLOAT",
help="Mininum data value to be applied to all plots. If no "
"value is specified, the minimum will be allowed to vary "
"as needed from plot to plot.")
parser.add_option("--log-data",
help="Display data (y) axis in log scaling.",
action="store_true", dest="log_data", default=False)
parser.add_option("--x-min",
type="float", dest="x_min", metavar="FLOAT",
help="Mininum positional (x) value to be applied to all plots. If no "
"value is specified, the minimum will be allowed to vary "
"as needed from plot to plot.")
parser.add_option("--x-max",
type="float", dest="x_max", metavar="FLOAT",
help="Maximum positional (x) value to be applied to all plots. If no "
"value is specified, the minimum will be allowed to vary "
"as needed from plot to plot.")
parser.add_option("--log-x",
help="Display positional (x) axis in log scaling.",
action="store_true", dest="log_x", default=False)
parser.add_option("--image-format",
help="Set output format for images (e.g. 'tiff', 'png', 'jpeg'). "
"If none specified, no images will be saved.",
dest="image_format", metavar="STRING")
parser.add_option("--curve-format",
help="Set output format for curves (e.g. 'ultra', 'curve'). "
"If none specified, no curve files will be saved.",
dest="curve_format", metavar="STRING")
parser.add_option("--color0",
help="Set color to be used for first curve plot.",
dest="color0", metavar="#RRGGBB")
parser.add_option("--color1",
help="Set color to be used for second curve plot.",
dest="color1", metavar="#RRGGBB")
parser.add_option("--line-width",
help="Set line width for curves.",
type="int", default=0, dest="line_width", metavar="INT")
parser.add_option("--point-density",
help="Plot symbols representing individual points in curves every Nth point. "
"A value of zero turns the display of points off [%default].",
type="int", default=0, dest="point_density", metavar="N")
parser.add_option("--point-size",
help="Size of symbols representing individual points in curve plots.",
type="int", default=5, dest="point_size", metavar="INT")
parser.add_option("--show-legend",
help="Display curve plot legends.",
action="store_true", dest="show_legend", default=False)
parser.add_option("--show-labels",
help="Display curve plot labels.",
action="store_true", dest="show_labels", default=False)
parser.set_usage("matexprs.py [options] dbname")
return parser
#
# Iterate through curves, finding all unique 'dirs' containing curves.
#
def GetVarMap(metadata):
dirMap = {}
for i in range(metadata.GetNumCurves()):
dirinfo = re.search("(.*)/([^/]*)", metadata.GetCurves(i).name)
if dirinfo != None:
dirname = dirinfo.group(1)
varname = dirinfo.group(2)
varMap = {}
if dirname in dirMap:
varMap = dirMap[dirname]
varMap[varname] = 1
dirMap[dirname] = varMap
return dirMap
#
# Begin main program
#
parser = BuildCommandLineOptions()
#
# This bit of logic allows users to get usage/help from
# the command 'python matexpers.py --help'. Without it
# using VisIt's cli the '--help' will get interpreted
# in internallauncher and never make it into this script.
#
if "-h" in sys.argv or \
"--help" in sys.argv or \
"-help" in sys.argv or \
"help" in sys.argv:
parser.print_help()
sys.exit(1)
#
# Argv() is a function defined by VisIt's cli that
# returns ONLY the options after the argument (filename)
# to the '-s' command-line option. In theory, that
# should be only the arguments that this script itself
# should interpret.
#
(clOpts, clArgs) = parser.parse_args(list(Argv()))
#
# Set the name of the database. It is the only 'positional'
# argument on the command line.
#
dbname = ""
if len(clArgs) > 0:
dbname = clArgs[0]
if not glob.glob(dbname):
if dbname == "":
sys.stderr.write("No database specified.\n")
else:
sys.stderr.write("Invalid database, \"%s\", specified.\n"%dbname)
parser.print_usage()
sys.exit(1)
#
# Open the database, get metadata, get info on curve 'dirs'
#
OpenDatabase(dbname)
metadata = GetMetaData(dbname)
dirMap = GetVarMap(metadata)
#
# Build up base save window attributes
#
swa = SaveWindowAttributes()
swa.family = 0
swa.width = clOpts.image_width
swa.height = clOpts.image_height
#
# Build up base curve attributes
#
ca = CurveAttributes()
ca.lineWidth = clOpts.line_width
if clOpts.color0 != None:
ca.color = ColorTupleFromHexString(clOpts.color0)
ca.cycleColors = 0
ca.showLabels = clOpts.show_labels
#if clOpts.point_density > 0:
# ca.showPoints = 1
#ca.pointSize = clOpts.point_size
ca.showLegend = clOpts.show_legend
#ca.symbolDensity = clOpts.point_density
SetDefaultPlotOptions(ca)
#
# Iterate through all curve 'dirs', finding instances where
# all essential variables exist. Create expressions and plot 'em
#
for k in list(dirMap.keys()):
if not ("Ec" in dirMap[k] and \
"cEc" in dirMap[k] and \
"cEc_fit" in dirMap[k]):
print("Ignoring %s because not all required vars are present."%k)
#del dirMap[k]
continue
DefineCurveExpression("%s/c0"%k, "<%s/Ec>-<%s/cEc_fit>"%(k,k))
DefineCurveExpression("%s/c1"%k, "<%s/cEc>-<%s/cEc_fit>"%(k,k))
AddPlot("Curve","%s/c0"%k)
AddPlot("Curve","%s/c1"%k)
DrawPlots()
v = GetViewCurve()
if clOpts.x_min != None:
v.domainCoords = (clOpts.x_min, v.domainCoords[1])
if clOpts.x_max != None:
v.domainCoords = (v.domainCoords[0], clOpts.x_max)
if clOpts.log_x:
v.domainScale = v.LOG
if clOpts.data_min != None:
v.rangeCoords = (clOpts.data_min, v.rangeCoords[1])
if clOpts.data_max != None:
v.rangeCoords = (v.rangeCoords[0], clOpts.data_max)
if clOpts.log_data:
v.rangeScale = v.LOG
SetViewCurve(v)
if clOpts.color1 != None:
ca2 = CurveAttributes()
ca2.color = ColorTupleFromHexString(clOpts.color1)
ca2.cycleColors = 0
SetActivePlots((1,))
SetPlotOptions(ca2)
DrawPlots()
if clOpts.curve_format != None:
swa.format = getattr(swa,clOpts.curve_format.upper())
swa.fileName = k # .curve is added automatically
SetSaveWindowAttributes(swa)
SaveWindow()
if clOpts.image_format != None:
swa.format = getattr(swa,clOpts.image_format.upper())
#swa.fileName = "%s.%s"%(k,clOpts.image_format.lower())
swa.fileName = k
SetSaveWindowAttributes(swa)
SaveWindow()
DeleteAllPlots()
|
[
"sys.stderr.write",
"sys.exit",
"glob.glob"
] |
[((5446, 5457), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5454, 5457), False, 'import sys, re, os, glob\n'), ((5908, 5925), 'glob.glob', 'glob.glob', (['dbname'], {}), '(dbname)\n', (5917, 5925), False, 'import sys, re, os, glob\n'), ((6114, 6125), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6122, 6125), False, 'import sys, re, os, glob\n'), ((5956, 6000), 'sys.stderr.write', 'sys.stderr.write', (['"""No database specified.\n"""'], {}), "('No database specified.\\n')\n", (5972, 6000), False, 'import sys, re, os, glob\n'), ((6019, 6084), 'sys.stderr.write', 'sys.stderr.write', (['(\'Invalid database, "%s", specified.\\n\' % dbname)'], {}), '(\'Invalid database, "%s", specified.\\n\' % dbname)\n', (6035, 6084), False, 'import sys, re, os, glob\n')]
|
import flask
import flask_restx
import flask.testing
import layabauth.flask
from layabauth.testing import *
@pytest.fixture
def app() -> flask.Flask:
application = flask.Flask(__name__)
application.testing = True
api = flask_restx.Api(application)
@api.route("/requires_scopes")
class RequiresScopes(flask_restx.Resource):
@layabauth.flask.requires_authentication("https://test_identity_provider")
def get(self):
layabauth.flask.requires_scopes(
lambda token, token_body: token_body["scopes"], "scope1", "scope2"
)
return flask.g.token_body
return application
@pytest.fixture
def jwks_uri():
return "https://test_identity_provider"
@pytest.fixture
def token_body():
return {"upn": "<EMAIL>", "scopes": ["scope2", "scope3"]}
def test_auth_mock_with_1_scope_ok_1_missing(
client: flask.testing.FlaskClient, auth_mock
):
response = client.open(
method="GET",
path="/requires_scopes",
headers={"Authorization": "Bearer my_token"},
)
assert response.status_code == 403
assert response.json == {"message": "The scope1 must be provided in the token."}
|
[
"flask.Flask",
"flask_restx.Api"
] |
[((171, 192), 'flask.Flask', 'flask.Flask', (['__name__'], {}), '(__name__)\n', (182, 192), False, 'import flask\n'), ((234, 262), 'flask_restx.Api', 'flask_restx.Api', (['application'], {}), '(application)\n', (249, 262), False, 'import flask_restx\n')]
|