code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#################
# Import modules
#################
from __future__ import print_function, absolute_import, division
# get command line parameters
import sys
# walk directories
import glob
# access to OS functionality
import os
# (de)serialize config file
import json
# call processes
import subprocess
# get the user name
import getpass
# xml parsing
import xml.etree.ElementTree as ET
# copy stuff
import copy
# import pyqt for everything graphical
from PyQt5 import QtCore, QtGui, QtWidgets
#################
# Helper classes
#################
# annotation helper
from cityscapesscripts.helpers.annotation import Point, Annotation, CsPoly
from cityscapesscripts.helpers.labels import name2label, assureSingleInstanceName
# Helper class that contains the current configuration of the Gui
# This config is loaded when started and saved when leaving
class configuration:
# Constructor
def __init__(self):
# The filename of the image we currently working on
self.currentFile = ""
# The filename of the labels we currently working on
self.currentLabelFile = ""
# The filename of the corrections we currently working on
self.currentCorrectionFile = ""
# The path where the Cityscapes dataset is located
self.csPath = ""
# The path of the images of the currently loaded city
self.city = ""
# The name of the currently loaded city
self.cityName = ""
# The type of the current annotations
self.gtType = ""
# The split, where the currently loaded city belongs to
self.split = ""
# The path of the labels. In this folder we expect a folder for each city
# Within these city folders we expect the label with a filename matching
# the images, except for the extension
self.labelPath = ""
# The path to store correction markings
self.correctionPath = ""
# The transparency of the labels over the image
self.transp = 0.5
# The zoom toggle
self.zoom = False
# The zoom factor
self.zoomFactor = 1.0
# The size of the zoom window. Currently there is no setter or getter for that
self.zoomSize = 400 # px
# The highlight toggle
self.highlight = False
# The highlight label
self.highlightLabelSelection = ""
# Screenshot file
self.screenshotFilename = "%i"
# Correction mode
self.correctionMode = False
# Warn before saving that you are overwriting files
self.showSaveWarning = True
# Load from given filename
def load(self, filename):
if os.path.isfile(filename):
with open(filename, 'r') as f:
jsonText = f.read()
jsonDict = json.loads(jsonText)
for key in jsonDict:
if key in self.__dict__:
self.__dict__[key] = jsonDict[key]
self.fixConsistency()
# Make sure the config is consistent.
# Automatically called after loading
def fixConsistency(self):
if self.currentFile:
self.currentFile = os.path.normpath(self.currentFile)
if self.currentLabelFile:
self.currentLabelFile = os.path.normpath(self.currentLabelFile)
if self.currentCorrectionFile:
self.currentCorrectionFile = os.path.normpath(
self.currentCorrectionFile)
if self.csPath:
self.csPath = os.path.normpath(self.csPath)
if not os.path.isdir(self.csPath):
self.csPath = ""
if self.city:
self.city = os.path.normpath(self.city)
if not os.path.isdir(self.city):
self.city = ""
if self.labelPath:
self.labelPath = os.path.normpath(self.labelPath)
if self.correctionPath:
self.correctionPath = os.path.normpath(self.correctionPath)
if self.city:
self.cityName == os.path.basename(self.city)
if not os.path.isfile(self.currentFile) or os.path.dirname(self.currentFile) != self.city:
self.currentFile = ""
if not os.path.isfile(self.currentLabelFile) or \
not os.path.isdir(os.path.join(self.labelPath, self.cityName)) or \
os.path.dirname(self.currentLabelFile) != os.path.join(self.labelPath, self.cityName):
self.currentLabelFile = ""
if not os.path.isfile(self.currentCorrectionFile) or \
not os.path.isdir(os.path.join(self.correctionPath, self.cityName)) or \
os.path.dirname(self.currentCorrectionFile) != os.path.join(self.correctionPath, self.cityName):
self.currentCorrectionFile = ""
# Save to given filename (using pickle)
def save(self, filename):
with open(filename, 'w') as f:
f.write(json.dumps(self.__dict__,
default=lambda o: o.__dict__, sort_keys=True, indent=4))
def enum(**enums):
return type('Enum', (), enums)
class CorrectionBox:
types = enum(TO_CORRECT=1, TO_REVIEW=2, RESOLVED=3, QUESTION=4)
def __init__(self, rect=None, annotation=""):
self.type = CorrectionBox.types.TO_CORRECT
self.bbox = rect
self.annotation = annotation
self.selected = False
return
def get_colour(self):
if self.type == CorrectionBox.types.TO_CORRECT:
return QtGui.QColor(255, 0, 0)
elif self.type == CorrectionBox.types.TO_REVIEW:
return QtGui.QColor(255, 255, 0)
elif self.type == CorrectionBox.types.RESOLVED:
return QtGui.QColor(0, 255, 0)
elif self.type == CorrectionBox.types.QUESTION:
return QtGui.QColor(0, 0, 255)
def select(self):
if not self.selected:
self.selected = True
return
def unselect(self):
if self.selected:
self.selected = False
return
# Read the information from the given object node in an XML file
# The node must have the tag object and contain all expected fields
def readFromXMLNode(self, correctionNode):
if not correctionNode.tag == 'correction':
return
typeNode = correctionNode.find('type')
self.type = int(typeNode.text)
annotationNode = correctionNode.find('annotation')
self.annotation = annotationNode.text
bboxNode = correctionNode.find('bbox')
x = float(bboxNode.find('x').text)
y = float(bboxNode.find('y').text)
width = float(bboxNode.find('width').text)
height = float(bboxNode.find('height').text)
self.bbox = QtCore.QRectF(x, y, width, height)
# Append the information to a node of an XML file
# Creates an object node with all children and appends to the given node
# Usually the given node is the root
def appendToXMLNode(self, node):
# New object node
correctionNode = ET.SubElement(node, 'correction')
correctionNode.tail = "\n"
correctionNode.text = "\n"
# Name node
typeNode = ET.SubElement(correctionNode, 'type')
typeNode.tail = "\n"
typeNode.text = str(int(self.type))
# Deleted node
annotationNode = ET.SubElement(correctionNode, 'annotation')
annotationNode.tail = "\n"
annotationNode.text = str(self.annotation)
# Polygon node
bboxNode = ET.SubElement(correctionNode, 'bbox')
bboxNode.text = "\n"
bboxNode.tail = "\n"
xNode = ET.SubElement(bboxNode, 'x')
xNode.tail = "\n"
yNode = ET.SubElement(bboxNode, 'y')
yNode.tail = "\n"
xNode.text = str(int(round(self.bbox.x())))
yNode.text = str(int(round(self.bbox.y())))
wNode = ET.SubElement(bboxNode, 'width')
wNode.tail = "\n"
hNode = ET.SubElement(bboxNode, 'height')
hNode.tail = "\n"
wNode.text = str(int(round(self.bbox.width())))
hNode.text = str(int(round(self.bbox.height())))
#################
# Main GUI class
#################
# The main class which is a QtGui -> Main Window
class CityscapesLabelTool(QtWidgets.QMainWindow):
#############################
## Construction / Destruction
#############################
# Constructor
def __init__(self):
# Construct base class
super(CityscapesLabelTool, self).__init__()
# The filename of where the config is saved and loaded
configDir = os.path.dirname(__file__)
self.configFile = os.path.join(configDir, "cityscapesLabelTool.conf")
# This is the configuration.
self.config = configuration()
self.config.load(self.configFile)
# Other member variables
# The width that we actually use to show the image
self.w = 0
# The height that we actually use to show the image
self.h = 0
# The horizontal offset where we start drawing within the widget
self.xoff = 0
# The vertical offset where we start drawing withing the widget
self.yoff = 0
# A gap that we leave around the image as little border
self.bordergap = 20
# The scale that was used, ie
# self.w = self.scale * self.image.width()
# self.h = self.scale * self.image.height()
self.scale = 1.0
# Filenames of all images in current city
self.images = []
# Image extension
self.imageExt = "_leftImg8bit.png"
# Ground truth extension
self.gtExt = "{}_polygons.json"
# Current image as QImage
self.image = QtGui.QImage()
# Index of the current image within the city folder
self.idx = 0
# All annotated objects in current image
self.annotation = None
# The XML ElementTree representing the corrections for the current image
self.correctionXML = None
# A list of changes that we did on the current annotation
# Each change is simply a descriptive string
self.changes = []
# The current object the mouse points to. It's index in self.annotation.objects
self.mouseObj = -1
# The currently selected objects. Their index in self.annotation.objects
self.selObjs = []
# The objects that are highlighted. List of object instances
self.highlightObjs = []
# A label that is selected for highlighting
self.highlightObjLabel = None
# Texture for highlighting
self.highlightTexture = None
# The position of the mouse
self.mousePos = None
# TODO: NEEDS BETTER EXPLANATION/ORGANISATION
self.mousePosOrig = None
# The position of the mouse scaled to label coordinates
self.mousePosScaled = None
# If the mouse is outside of the image
self.mouseOutsideImage = True
# The position of the mouse upon enabling the zoom window
self.mousePosOnZoom = None
# The button state of the mouse
self.mouseButtons = 0
# A list of objects with changed layer
self.changedLayer = []
# A list of objects with changed polygon
self.changedPolygon = []
# A polygon that is drawn by the user
self.drawPoly = QtGui.QPolygonF()
# Treat the polygon as being closed
self.drawPolyClosed = False
# A point of this poly that is dragged
self.draggedPt = -1
# A list of toolbar actions that need an image
self.actImage = []
# A list of toolbar actions that need an image that is not the first
self.actImageNotFirst = []
# A list of toolbar actions that need an image that is not the last
self.actImageNotLast = []
# A list of toolbar actions that need changes
self.actChanges = []
# A list of toolbar actions that need a drawn polygon or selected objects
self.actPolyOrSelObj = []
# A list of toolbar actions that need a closed drawn polygon
self.actClosedPoly = []
# A list of toolbar actions that need selected objects
self.actSelObj = []
# A list of toolbar actions that need a single active selected object
self.singleActSelObj = []
# Toggle status of auto-doing screenshots
self.screenshotToggleState = False
# Toggle status of the play icon
self.playState = False
# Temporary zero transparency
self.transpTempZero = False
# Toggle correction mode on and off
self.correctAction = []
self.corrections = []
self.selected_correction = -1
self.in_progress_bbox = None
self.in_progress_correction = None
self.mousePressEvent = []
# Default label
self.defaultLabel = 'static'
if not self.defaultLabel in name2label:
print('The {0} label is missing in the internal label definitions.'.format(
self.defaultLabel))
return
# Last selected label
self.lastLabel = self.defaultLabel
# Setup the GUI
self.initUI()
# Initially clear stuff
self.deselectAllObjects()
self.clearPolygon()
self.clearChanges()
# If we already know a city from the saved config -> load it
self.loadCity()
self.imageChanged()
# Destructor
def __del__(self):
self.config.save(self.configFile)
# Construct everything GUI related. Called by constructor
def initUI(self):
# Create a toolbar
self.toolbar = self.addToolBar('Tools')
# Add the tool buttons
iconDir = os.path.join(os.path.dirname(__file__), 'icons')
# Loading a new city
loadAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'open.png')), '&Tools', self)
loadAction.setShortcuts(['o'])
self.setTip(loadAction, 'Open city')
loadAction.triggered.connect(self.selectCity)
self.toolbar.addAction(loadAction)
# Open previous image
backAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'back.png')), '&Tools', self)
backAction.setShortcut('left')
backAction.setStatusTip('Previous image')
backAction.triggered.connect(self.prevImage)
self.toolbar.addAction(backAction)
self.actImageNotFirst.append(backAction)
# Open next image
nextAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'next.png')), '&Tools', self)
nextAction.setShortcut('right')
self.setTip(nextAction, 'Next image')
nextAction.triggered.connect(self.nextImage)
self.toolbar.addAction(nextAction)
self.actImageNotLast.append(nextAction)
# Play
playAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'play.png')), '&Tools', self)
playAction.setShortcut(' ')
playAction.setCheckable(True)
playAction.setChecked(False)
self.setTip(playAction, 'Play all images')
playAction.triggered.connect(self.playImages)
self.toolbar.addAction(playAction)
self.actImageNotLast.append(playAction)
self.playAction = playAction
# Select image
selImageAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'shuffle.png')), '&Tools', self)
selImageAction.setShortcut('i')
self.setTip(selImageAction, 'Select image')
selImageAction.triggered.connect(self.selectImage)
self.toolbar.addAction(selImageAction)
self.actImage.append(selImageAction)
# Save the current image
saveAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'save.png')), '&Tools', self)
saveAction.setShortcut('s')
self.setTip(saveAction, 'Save changes')
saveAction.triggered.connect(self.save)
self.toolbar.addAction(saveAction)
self.actChanges.append(saveAction)
# Clear the currently edited polygon
clearPolAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'clearpolygon.png')), '&Tools', self)
clearPolAction.setShortcuts(['q', 'Esc'])
self.setTip(clearPolAction, 'Clear polygon')
clearPolAction.triggered.connect(self.clearPolygonAction)
self.toolbar.addAction(clearPolAction)
self.actPolyOrSelObj.append(clearPolAction)
# Create new object from drawn polygon
newObjAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'newobject.png')), '&Tools', self)
newObjAction.setShortcuts(['n'])
self.setTip(newObjAction, 'New object')
newObjAction.triggered.connect(self.newObject)
self.toolbar.addAction(newObjAction)
self.actClosedPoly.append(newObjAction)
# Delete the currently selected object
deleteObjectAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'deleteobject.png')), '&Tools', self)
deleteObjectAction.setShortcuts(['d', 'delete'])
self.setTip(deleteObjectAction, 'Delete object')
deleteObjectAction.triggered.connect(self.deleteObject)
self.toolbar.addAction(deleteObjectAction)
self.actSelObj.append(deleteObjectAction)
# Undo changes in current image, ie. reload labels from file
undoAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'undo.png')), '&Tools', self)
undoAction.setShortcut('u')
self.setTip(undoAction, 'Undo all unsaved changes')
undoAction.triggered.connect(self.undo)
self.toolbar.addAction(undoAction)
self.actChanges.append(undoAction)
# Modify the label of a selected object
labelAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'modify.png')), '&Tools', self)
labelAction.setShortcuts(['m', 'l'])
self.setTip(labelAction, 'Modify label')
labelAction.triggered.connect(self.modifyLabel)
self.toolbar.addAction(labelAction)
self.actSelObj.append(labelAction)
# Move selected object a layer up
layerUpAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'layerup.png')), '&Tools', self)
layerUpAction.setShortcuts(['Up'])
self.setTip(layerUpAction, 'Move object a layer up')
layerUpAction.triggered.connect(self.layerUp)
self.toolbar.addAction(layerUpAction)
self.singleActSelObj.append(layerUpAction)
# Move selected object a layer down
layerDownAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'layerdown.png')), '&Tools', self)
layerDownAction.setShortcuts(['Down'])
self.setTip(layerDownAction, 'Move object a layer down')
layerDownAction.triggered.connect(self.layerDown)
self.toolbar.addAction(layerDownAction)
self.singleActSelObj.append(layerDownAction)
# Enable/disable zoom. Toggle button
zoomAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'zoom.png')), '&Tools', self)
zoomAction.setShortcuts(['z'])
zoomAction.setCheckable(True)
zoomAction.setChecked(self.config.zoom)
self.setTip(zoomAction, 'Enable/disable permanent zoom')
zoomAction.toggled.connect(self.zoomToggle)
self.toolbar.addAction(zoomAction)
self.actImage.append(zoomAction)
# Highlight objects of a certain class
highlightAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'highlight.png')), '&Tools', self)
highlightAction.setShortcuts(['g'])
highlightAction.setCheckable(True)
highlightAction.setChecked(self.config.highlight)
self.setTip(highlightAction,
'Enable/disable highlight of certain object class')
highlightAction.toggled.connect(self.highlightClassToggle)
self.toolbar.addAction(highlightAction)
self.actImage.append(highlightAction)
# Decrease transparency
minusAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'minus.png')), '&Tools', self)
minusAction.setShortcut('-')
self.setTip(minusAction, 'Decrease transparency')
minusAction.triggered.connect(self.minus)
self.toolbar.addAction(minusAction)
# Increase transparency
plusAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'plus.png')), '&Tools', self)
plusAction.setShortcut('+')
self.setTip(plusAction, 'Increase transparency')
plusAction.triggered.connect(self.plus)
self.toolbar.addAction(plusAction)
# Take a screenshot
screenshotAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'screenshot.png')), '&Tools', self)
screenshotAction.setShortcut('t')
self.setTip(screenshotAction, 'Take a screenshot')
screenshotAction.triggered.connect(self.screenshot)
self.toolbar.addAction(screenshotAction)
self.actImage.append(screenshotAction)
# Take a screenshot in each loaded frame
screenshotToggleAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'screenshotToggle.png')), '&Tools', self)
screenshotToggleAction.setShortcut('Ctrl+t')
screenshotToggleAction.setCheckable(True)
screenshotToggleAction.setChecked(False)
self.setTip(screenshotToggleAction,
'Take a screenshot in each loaded frame')
screenshotToggleAction.toggled.connect(self.screenshotToggle)
self.toolbar.addAction(screenshotToggleAction)
self.actImage.append(screenshotToggleAction)
# Display path to current image in message bar
displayFilepathAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'filepath.png')), '&Tools', self)
displayFilepathAction.setShortcut('f')
self.setTip(displayFilepathAction, 'Show path to current image')
displayFilepathAction.triggered.connect(self.displayFilepath)
self.toolbar.addAction(displayFilepathAction)
# Open correction mode
self.correctAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'checked6.png')), '&Tools', self)
self.correctAction.setShortcut('c')
self.correctAction.setCheckable(True)
self.correctAction.setChecked(self.config.correctionMode)
if self.config.correctionMode:
self.correctAction.setIcon(QtGui.QIcon(
os.path.join(iconDir, 'checked6_red.png')))
self.setTip(self.correctAction, 'Toggle correction mode')
self.correctAction.triggered.connect(self.toggleCorrectionMode)
self.toolbar.addAction(self.correctAction)
# Display help message
helpAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'help19.png')), '&Tools', self)
helpAction.setShortcut('h')
self.setTip(helpAction, 'Help')
helpAction.triggered.connect(self.displayHelpMessage)
self.toolbar.addAction(helpAction)
# Close the application
exitAction = QtWidgets.QAction(QtGui.QIcon(
os.path.join(iconDir, 'exit.png')), '&Tools', self)
# exitAction.setShortcuts(['Esc'])
self.setTip(exitAction, 'Exit')
exitAction.triggered.connect(self.close)
self.toolbar.addAction(exitAction)
# The default text for the status bar
self.defaultStatusbar = 'Ready'
# Create a statusbar. Init with default
self.statusBar().showMessage(self.defaultStatusbar)
# Enable mouse move events
self.setMouseTracking(True)
self.toolbar.setMouseTracking(True)
# Open in full screen
screenShape = QtWidgets.QDesktopWidget().screenGeometry()
self.resize(screenShape.width(), screenShape.height())
# Set a title
self.applicationTitle = 'Cityscapes Label Tool v1.0'
self.setWindowTitle(self.applicationTitle)
# And show the application
self.show()
#############################
# Toolbar call-backs
#############################
# The user pressed "select city"
# The purpose of this method is to set these configuration attributes:
# - self.config.city : path to the folder containing the images to annotate
# - self.config.cityName : name of this folder, i.e. the city
# - self.config.labelPath : path to the folder to store the polygons
# - self.config.correctionPath : path to store the correction boxes in
# - self.config.gtType : type of ground truth, e.g. gtFine or gtCoarse
# - self.config.split : type of split, e.g. train, val, test
# The current implementation uses the environment variable 'CITYSCAPES_DATASET'
# to determine the dataset root folder and search available data within.
# Annotation types are required to start with 'gt', e.g. gtFine or gtCoarse.
# To add your own annotations you could create a folder gtCustom with similar structure.
#
# However, this implementation could be easily changed to a completely different folder structure.
# Just make sure to specify all three paths and a descriptive name as 'cityName'.
# The gtType and split can be left empty.
def selectCity(self):
# Reset the status bar to this message when leaving
restoreMessage = self.statusBar().currentMessage()
csPath = self.config.csPath
if not csPath or not os.path.isdir(csPath):
if 'CITYSCAPES_DATASET' in os.environ:
csPath = os.environ['CITYSCAPES_DATASET']
else:
csPath = os.path.join(os.path.dirname(
os.path.realpath(__file__)), '..', '..')
availableCities = []
annotations = sorted(glob.glob(os.path.join(csPath, 'gt*')))
annotations = [os.path.basename(a) for a in annotations]
splits = ["train_extra", "train", "val", "test"]
for gt in annotations:
for split in splits:
cities = glob.glob(os.path.join(csPath, gt, split, '*'))
cities.sort()
availableCities.extend(
[(split, gt, os.path.basename(c)) for c in cities if os.path.isdir(c)])
# List of possible labels
items = [split + ", " + gt + ", " +
city for (split, gt, city) in availableCities]
# default
previousItem = self.config.split + ", " + \
self.config.gtType + ", " + self.config.cityName
default = 0
if previousItem in items:
default = items.index(previousItem)
# Specify title
dlgTitle = "Select city"
message = dlgTitle
question = dlgTitle
message = "Select city for editing"
question = "Which city would you like to edit?"
self.statusBar().showMessage(message)
if items:
# Create and wait for dialog
(item, ok) = QtWidgets.QInputDialog.getItem(
self, dlgTitle, question, items, default, False)
# Restore message
self.statusBar().showMessage(restoreMessage)
if ok and item:
(split, gt, city) = [str(i) for i in item.split(', ')]
self.config.city = os.path.normpath(
os.path.join(csPath, "leftImg8bit", split, city))
self.config.cityName = city
self.config.labelPath = os.path.normpath(
os.path.join(csPath, gt, split, city))
self.config.correctionPath = os.path.normpath(
os.path.join(csPath, gt+'_corrections', split, city))
self.config.gtType = gt
self.config.split = split
self.deselectAllObjects()
self.clearPolygon()
self.loadCity()
self.imageChanged()
else:
warning = ""
warning += "The data was not found. Please:\n\n"
warning += " - make sure the scripts folder is in the Cityscapes root folder\n"
warning += "or\n"
warning += " - set CITYSCAPES_DATASET to the Cityscapes root folder\n"
warning += " e.g. 'export CITYSCAPES_DATASET=<root_path>'\n"
reply = QtWidgets.QMessageBox.information(
self, "ERROR!", warning, QtWidgets.QMessageBox.Ok)
if reply == QtWidgets.QMessageBox.Ok:
sys.exit()
return
# Switch to previous image in file list
# Load the image
# Load its labels
# Update the mouse selection
# View
def prevImage(self):
if not self.images:
return
if self.idx > 0:
if self.checkAndSave():
self.idx -= 1
self.imageChanged()
return
# Switch to next image in file list
# Load the image
# Load its labels
# Update the mouse selection
# View
def nextImage(self):
if not self.images:
return
if self.idx < len(self.images)-1:
if self.checkAndSave():
self.idx += 1
self.imageChanged()
elif self.playState:
self.playState = False
self.playAction.setChecked(False)
if self.playState:
QtCore.QTimer.singleShot(0, self.nextImage)
return
# Play images, i.e. auto-switch to next image
def playImages(self, status):
self.playState = status
if self.playState:
QtCore.QTimer.singleShot(0, self.nextImage)
# switch correction mode on and off
def toggleCorrectionMode(self):
if not self.config.correctionMode:
self.config.correctionMode = True
iconDir = os.path.join(os.path.dirname(sys.argv[0]), 'icons')
self.correctAction.setIcon(QtGui.QIcon(
os.path.join(iconDir, 'checked6_red.png')))
else:
self.config.correctionMode = False
iconDir = os.path.join(os.path.dirname(sys.argv[0]), 'icons')
self.correctAction.setIcon(QtGui.QIcon(
os.path.join(iconDir, 'checked6.png')))
self.update()
return
# Switch to a selected image of the file list
# Ask the user for an image
# Load the image
# Load its labels
# Update the mouse selection
# View
def selectImage(self):
if not self.images:
return
dlgTitle = "Select image to load"
self.statusBar().showMessage(dlgTitle)
items = ["{}: {}".format(num, os.path.basename(i))
for (num, i) in enumerate(self.images)]
(item, ok) = QtWidgets.QInputDialog.getItem(
self, dlgTitle, "Image", items, self.idx, False)
if (ok and item):
idx = items.index(item)
if idx != self.idx and self.checkAndSave():
self.idx = idx
self.imageChanged()
else:
# Restore the message
self.statusBar().showMessage(self.defaultStatusbar)
# Save labels
def save(self):
# Status
saved = False
# Message to show at the status bar when done
message = ""
# Only save if there are changes, labels, an image filename and an image
if self.changes and (self.annotation or self.corrections) and self.config.currentFile and self.image:
if self.annotation:
# set image dimensions
self.annotation.imgWidth = self.image.width()
self.annotation.imgHeight = self.image.height()
# Determine the filename
# If we have a loaded label file, then this is also the filename
filename = self.config.currentLabelFile
# If not, then generate one
if not filename:
filename = self.getLabelFilename(True)
if filename:
proceed = True
# warn user that he is overwriting an old file
if os.path.isfile(filename) and self.config.showSaveWarning:
msgBox = QtWidgets.QMessageBox(self)
msgBox.setWindowTitle("Overwriting")
msgBox.setText(
"Saving overwrites the original file and it cannot be reversed. Do you want to continue?")
msgBox.addButton(QtWidgets.QMessageBox.Cancel)
okAndNeverAgainButton = msgBox.addButton(
'OK and never ask again', QtWidgets.QMessageBox.AcceptRole)
okButton = msgBox.addButton(QtWidgets.QMessageBox.Ok)
msgBox.setDefaultButton(QtWidgets.QMessageBox.Ok)
msgBox.setIcon(QtWidgets.QMessageBox.Warning)
msgBox.exec_()
# User clicked on "OK"
if msgBox.clickedButton() == okButton:
pass
# User clicked on "OK and never ask again"
elif msgBox.clickedButton() == okAndNeverAgainButton:
self.config.showSaveWarning = False
else:
# Do nothing
message += "Nothing saved, no harm has been done. "
proceed = False
# Save JSON file
if proceed:
try:
self.annotation.toJsonFile(filename)
saved = True
message += "Saved labels to {0} ".format(filename)
except IOError as e:
message += "Error writing labels to {0}. Message: {1} ".format(
filename, e.strerror)
else:
message += "Error writing labels. Cannot generate a valid filename. "
if self.corrections or self.config.currentCorrectionFile:
# Determine the filename
# If we have a loaded label file, then this is also the filename
filename = self.config.currentCorrectionFile
# If not, then generate one
if not filename:
filename = self.getCorrectionFilename(True)
if filename:
# Prepare the root
root = ET.Element('correction')
root.text = "\n"
root.tail = "\n"
# Add the filename of the image that is annotated
filenameNode = ET.SubElement(root, 'filename')
filenameNode.text = os.path.basename(
self.config.currentFile)
filenameNode.tail = "\n"
# Add the folder where this image is located in
# For compatibility with the LabelMe Tool, we need to use the folder
# StereoDataset/cityName
folderNode = ET.SubElement(root, 'folder')
folderNode.text = "StereoDataset/" + self.config.cityName
folderNode.tail = "\n"
# The name of the tool. Here, we do not follow the output of the LabelMe tool,
# since this is crap anyway
sourceNode = ET.SubElement(root, 'source')
sourceNode.text = "\n"
sourceNode.tail = "\n"
sourceImageNode = ET.SubElement(sourceNode, 'sourceImage')
sourceImageNode.text = "Label Cities"
sourceImageNode.tail = "\n"
sourceAnnotationNode = ET.SubElement(
sourceNode, 'sourceAnnotation')
sourceAnnotationNode.text = "mcLabelTool"
sourceAnnotationNode.tail = "\n"
# The image size
imagesizeNode = ET.SubElement(root, 'imagesize')
imagesizeNode.text = "\n"
imagesizeNode.tail = "\n"
nrowsNode = ET.SubElement(imagesizeNode, 'nrows')
nrowsNode.text = str(self.image.height())
nrowsNode.tail = "\n"
ncolsNode = ET.SubElement(imagesizeNode, 'ncols')
ncolsNode.text = str(self.image.height())
ncolsNode.tail = "\n"
# Add all objects
for correction in self.corrections:
correction.appendToXMLNode(root)
# Create the actual XML tree
self.correctionXML = ET.ElementTree(root)
# Save XML file
try:
self.correctionXML.write(filename)
saved = True
message += "Saved corrections to {0} ".format(filename)
except IOError as e:
message += "Error writing corrections to {0}. Message: {1} ".format(
filename, e.strerror)
else:
message += "Error writing corrections. Cannot generate a valid filename. "
# Clear changes
if saved:
self.clearChanges()
else:
message += "Nothing to save "
saved = True
# Show the status message
self.statusBar().showMessage(message)
return saved
# Undo changes, ie. reload labels
def undo(self):
# check if we really want to do this in case there are multiple changes
if len(self.changes) > 1:
# Backup of status message
restoreMessage = self.statusBar().currentMessage()
# Create the dialog
dlgTitle = "Undo changes?"
self.statusBar().showMessage(dlgTitle)
text = "Do you want to undo the following changes?\n"
for c in self.changes:
text += "- " + c + '\n'
buttons = QtWidgets.QMessageBox.Ok | QtWidgets.QMessageBox.Cancel
ret = QtWidgets.QMessageBox.question(
self, dlgTitle, text, buttons, QtWidgets.QMessageBox.Ok)
proceed = False
# If the user selected yes -> undo
if ret == QtWidgets.QMessageBox.Ok:
proceed = True
self.statusBar().showMessage(restoreMessage)
# If we do not proceed -> return
if not proceed:
return
# Clear labels to force a reload
self.annotation = None
# Reload
self.imageChanged()
# Clear the drawn polygon and update
def clearPolygonAction(self):
self.deselectAllObjects()
self.clearPolygon()
self.update()
# Create a new object from the current polygon
def newObject(self):
# Default label
label = self.lastLabel
# Ask the user for a label
(label, ok) = self.getLabelFromUser(label)
if ok and label:
# Append and create the new object
self.appendObject(label, self.drawPoly)
# Clear the drawn polygon
self.deselectAllObjects()
self.clearPolygon()
# Default message
self.statusBar().showMessage(self.defaultStatusbar)
# Set as default label for next time
self.lastLabel = label
# Redraw
self.update()
# Delete the currently selected object
def deleteObject(self):
# Cannot do anything without a selected object
if not self.selObjs:
return
# Cannot do anything without labels
if not self.annotation:
return
for selObj in self.selObjs:
# The selected object that is deleted
obj = self.annotation.objects[selObj]
# Delete
obj.delete()
# Save changes
self.addChange(
"Deleted object {0} with label {1}".format(obj.id, obj.label))
# Clear polygon
self.deselectAllObjects()
self.clearPolygon()
# Redraw
self.update()
# Modify the label of a selected object
def modifyLabel(self):
# Cannot do anything without labels
if not self.annotation:
return
# Cannot do anything without a selected object
if not self.selObjs:
return
# The last selected object
obj = self.annotation.objects[self.selObjs[-1]]
# default label
defaultLabel = obj.label
defaultId = -1
# If there is only one object the dialog text can be improved
if len(self.selObjs) == 1:
defaultId = obj.id
(label, ok) = self.getLabelFromUser(defaultLabel, defaultId)
if ok and label:
for selObj in self.selObjs:
# The selected object that is modified
obj = self.annotation.objects[selObj]
# Save changes
if obj.label != label:
self.addChange("Set label {0} for object {1} with previous label {2}".format(
label, obj.id, obj.label))
obj.label = label
obj.updateDate()
# Update
self.update()
# Move object a layer up
def layerUp(self):
# Change layer
self.modifyLayer(+1)
# Update
self.update()
# Move object a layer down
def layerDown(self):
# Change layer
self.modifyLayer(-1)
# Update
self.update()
# Toggle zoom
def zoomToggle(self, status):
self.config.zoom = status
if status:
self.mousePosOnZoom = self.mousePos
self.update()
# Toggle highlight
def highlightClassToggle(self, status):
if status:
defaultLabel = ""
if self.config.highlightLabelSelection and self.config.highlightLabelSelection in name2label:
defaultLabel = self.config.highlightLabelSelection
(label, ok) = self.getLabelFromUser(defaultLabel)
if ok and label:
self.config.highlightLabelSelection = label
else:
status = False
self.config.highlight = status
self.update()
# Increase label transparency
def minus(self):
self.config.transp = max(self.config.transp-0.1, 0.0)
self.update()
def displayFilepath(self):
self.statusBar().showMessage(
"Current image: {0}".format(self.config.currentFile))
self.update()
# Decrease label transparency
def plus(self):
self.config.transp = min(self.config.transp+0.1, 1.0)
self.update()
# Take a screenshot
def screenshot(self):
# Get a filename for saving
dlgTitle = "Get screenshot filename"
filter = "Images (*.png *.xpm *.jpg)"
answer, _ = QtWidgets.QFileDialog.getSaveFileName(
self, dlgTitle, self.config.screenshotFilename, filter, options=QtWidgets.QFileDialog.DontUseNativeDialog)
if answer:
self.config.screenshotFilename = str(answer)
else:
return
# Actually make the screenshot
self.doScreenshot()
# Toggle auto-making of screenshots
def screenshotToggle(self, status):
self.screenshotToggleState = status
if status:
self.screenshot()
def displayHelpMessage(self):
message = self.applicationTitle + "\n\n"
message += "INSTRUCTIONS\n"
message += " - press open (left button) to select a city from drop-down menu\n"
message += " - browse images and edit labels using\n"
message += " the toolbar buttons (check tooltips) and the controls below\n"
message += " - note that the editing happens in-place;\n"
message += " if you want to annotate your own images or edit a custom\n"
message += " set of labels, check (and modify) the code of the method 'loadCity'\n"
message += " - note that this tool modifys the JSON polygon files, but\n"
message += " does not create or update the pngs; for the latter use\n"
message += " the preparation tools that come with this tool box.\n"
message += "\n"
message += "CONTROLS\n"
message += " - highlight objects [move mouse]\n"
message += " - draw new polygon\n"
message += " - start drawing a polygon [left click]\n"
message += " - add point to open polygon [left click]\n"
message += " - delete last added point [Backspace]\n"
message += " - close polygon [left click on first point]\n"
message += " - select closed polygon, existing object [Ctrl + left click]\n"
message += " - move point [left click and hold on point, move mouse]\n"
message += " - add point [click on edge]\n"
message += " - delete point from polygon [Shift + left click on point]\n"
message += " - deselect polygon [Q]\n"
message += " - select multiple polygons [Ctrl + left click]\n"
message += " - intersect/merge two polygons: draw new polygon, then\n"
message += " - intersect [Shift + left click on existing polygon]\n"
message += " - merge [Alt + left click on existing polygon]\n"
message += " - open zoom window [Z or hold down right mouse button]\n"
message += " - zoom in/out [mousewheel]\n"
message += " - enlarge/shrink zoom window [shift+mousewheel]\n"
message += " - start correction mode [C]\n"
message += " - draw a correction box [left click and hold, move, release]\n"
message += " - set box type [1,2,3,4]\n"
message += " - previous/next box [E,R]\n"
message += " - delete box [D]\n"
message += " - modify text, use ascii only [M]\n"
QtWidgets.QMessageBox.about(self, "HELP!", message)
self.update()
# Close the application
def closeEvent(self, event):
if self.checkAndSave():
event.accept()
else:
event.ignore()
#############################
# Custom events
#############################
def imageChanged(self):
# Clear corrections
self.corrections = []
self.selected_correction = -1
# Clear the polygon
self.deselectAllObjects()
self.clearPolygon()
# Load the first image
self.loadImage()
# Load its labels if available
self.loadLabels()
# Load its corrections if available
self.loadCorrections()
# Update the object the mouse points to
self.updateMouseObject()
# Update the GUI
self.update()
# Save screenshot if set
if self.screenshotToggleState:
self.doScreenshot()
#############################
# File I/O
#############################
# Load the currently selected city if possible
def loadCity(self):
# Search for all *.pngs to get the image list
self.images = []
if os.path.isdir(self.config.city):
self.images = glob.glob(os.path.join(
self.config.city, '*' + self.imageExt))
self.images.sort()
if self.config.currentFile in self.images:
self.idx = self.images.index(self.config.currentFile)
else:
self.idx = 0
# Load the currently selected image
# Does only load if not previously loaded
# Does not refresh the GUI
def loadImage(self):
success = False
message = self.defaultStatusbar
if self.images:
filename = self.images[self.idx]
filename = os.path.normpath(filename)
if not self.image.isNull() and filename == self.config.currentFile:
success = True
else:
self.image = QtGui.QImage(filename)
if self.image.isNull():
message = "Failed to read image: {0}".format(filename)
else:
message = "Read image: {0}".format(filename)
self.config.currentFile = filename
success = True
# Update toolbar actions that need an image
for act in self.actImage:
act.setEnabled(success)
for act in self.actImageNotFirst:
act.setEnabled(success and self.idx > 0)
for act in self.actImageNotLast:
act.setEnabled(success and self.idx < len(self.images)-1)
self.statusBar().showMessage(message)
# Load the labels from file
# Only loads if they exist
# Otherwise the filename is stored and that's it
def loadLabels(self):
filename = self.getLabelFilename()
if not filename or not os.path.isfile(filename):
self.clearAnnotation()
return
# If we have everything and the filename did not change, then we are good
if self.annotation and filename == self.currentLabelFile:
return
# Clear the current labels first
self.clearAnnotation()
try:
self.annotation = Annotation()
self.annotation.fromJsonFile(filename)
except IOError as e:
# This is the error if the file does not exist
message = "Error parsing labels in {0}. Message: {1}".format(
filename, e.strerror)
self.statusBar().showMessage(message)
# Remember the filename loaded
self.currentLabelFile = filename
# Remeber the status bar message to restore it later
restoreMessage = self.statusBar().currentMessage()
# Restore the message
self.statusBar().showMessage(restoreMessage)
# Load the labels from file
# Only loads if they exist
# Otherwise the filename is stored and that's it
def loadCorrections(self): # TODO
filename = self.getCorrectionFilename()
if not filename:
self.clearCorrections()
return
# If we have everything and the filename did not change, then we are good
if self.correctionXML and self.corrections and filename == self.config.currentCorrectionFile:
return
# Clear the current labels first
self.clearCorrections()
# We do not always expect to have corrections, therefore prevent a failure due to missing file
if not os.path.isfile(filename):
return
try:
# Try to parse the XML file
self.correctionXML = ET.parse(filename)
except IOError as e:
# This is the error if the file does not exist
message = "Error parsing corrections in {0}. Message: {1}".format(
filename, e.strerror)
self.statusBar().showMessage(message)
self.correctionXML = []
return
except ET.ParseError as e:
# This is the error if the content is no valid XML
message = "Error parsing corrections in {0}. Message: {1}".format(
filename, e)
self.statusBar().showMessage(message)
self.correctionXML = []
return
# Remember the filename loaded
self.config.currentCorrectionFile = filename
# Remeber the status bar message to restore it later
restoreMessage = self.statusBar().currentMessage()
# Iterate through all objects in the XML
root = self.correctionXML.getroot()
for i, objNode in enumerate(root.findall('correction')):
# Instantate a new object and read the XML node
obj = CorrectionBox()
obj.readFromXMLNode(objNode)
if i == 0:
self.selected_correction = 0
obj.select()
# Append the object to our list of labels
self.corrections.append(obj)
# Restore the message
self.statusBar().showMessage(restoreMessage)
def modify_correction_type(self, correction_type):
if self.selected_correction >= 0:
self.corrections[self.selected_correction].type = correction_type
self.addChange("Modified correction type.")
self.update()
return
def delete_selected_annotation(self):
if self.selected_correction >= 0 and self.config.correctionMode:
del self.corrections[self.selected_correction]
if self.selected_correction == len(self.corrections):
self.selected_correction = self.selected_correction - 1
if self.selected_correction >= 0:
self.corrections[self.selected_correction].select()
self.addChange("Deleted correction.")
self.update()
return
def modify_correction_description(self):
if self.selected_correction >= 0 and self.config.correctionMode:
description = QtWidgets.QInputDialog.getText(self, "Modify Error Description", "Please describe the labeling error briefly.",
text=self.corrections[self.selected_correction].annotation)
if description[1]:
self.corrections[self.selected_correction].annotation = description[0]
self.addChange("Changed correction description.")
self.update()
return
def select_next_correction(self):
if self.selected_correction >= 0:
self.corrections[self.selected_correction].unselect()
if self.selected_correction == (len(self.corrections) - 1):
self.selected_correction = 0
else:
self.selected_correction = self.selected_correction + 1
self.corrections[self.selected_correction].select()
self.update()
return
def select_previous_correction(self):
if self.selected_correction >= 0:
self.corrections[self.selected_correction].unselect()
if self.selected_correction == 0:
self.selected_correction = (len(self.corrections) - 1)
else:
self.selected_correction = self.selected_correction - 1
self.corrections[self.selected_correction].select()
self.update()
return
#############################
# Drawing
#############################
# This method is called when redrawing everything
# Can be manually triggered by self.update()
# Note that there must not be any other self.update within this method
# or any methods that are called within
def paintEvent(self, event):
# Create a QPainter that can perform draw actions within a widget or image
qp = QtGui.QPainter()
# Begin drawing in the application widget
qp.begin(self)
# Update scale
self.updateScale(qp)
# Determine the object ID to highlight
self.getHighlightedObject(qp)
# Draw the image first
self.drawImage(qp)
# Draw the labels on top
overlay = self.drawLabels(qp)
# Draw the user drawn polygon
self.drawDrawPoly(qp)
self.drawDrawRect(qp)
# Draw the label name next to the mouse
self.drawLabelAtMouse(qp)
# Draw the zoom
# self.drawZoom(qp, overlay)
self.drawZoom(qp, None)
# Thats all drawing
qp.end()
# Forward the paint event
QtWidgets.QMainWindow.paintEvent(self, event)
# Update the scaling
def updateScale(self, qp):
if not self.image.width() or not self.image.height():
return
# Horizontal offset
self.xoff = self.bordergap
# Vertical offset
self.yoff = self.toolbar.height()+self.bordergap
# We want to make sure to keep the image aspect ratio and to make it fit within the widget
# Without keeping the aspect ratio, each side of the image is scaled (multiplied) with
sx = float(qp.device().width() - 2*self.xoff) / self.image.width()
sy = float(qp.device().height() - 2*self.yoff) / self.image.height()
# To keep the aspect ratio while making sure it fits, we use the minimum of both scales
# Remember the scale for later
self.scale = min(sx, sy)
# These are then the actual dimensions used
self.w = self.scale * self.image.width()
self.h = self.scale * self.image.height()
# Determine the highlighted object for drawing
def getHighlightedObject(self, qp):
# These variables we want to fill
self.highlightObjs = []
self.highlightObjLabel = None
# Without labels we cannot do so
if not self.annotation:
return
# If available set the selected objects
highlightObjIds = self.selObjs
# If not available but the polygon is empty or closed, its the mouse object
if not highlightObjIds and (self.drawPoly.isEmpty() or self.drawPolyClosed) and self.mouseObj >= 0 and not self.mouseOutsideImage:
highlightObjIds = [self.mouseObj]
# Get the actual object that is highlighted
if highlightObjIds:
self.highlightObjs = [self.annotation.objects[i]
for i in highlightObjIds]
# Set the highlight object label if appropriate
if self.config.highlight:
self.highlightObjLabel = self.config.highlightLabelSelection
elif len(highlightObjIds) == 1 and self.config.correctionMode:
self.highlightObjLabel = self.annotation.objects[highlightObjIds[-1]].label
# Draw the image in the given QPainter qp
def drawImage(self, qp):
# Return if no image available
if self.image.isNull():
return
# Save the painters current setting to a stack
qp.save()
# Draw the image
qp.drawImage(QtCore.QRect(self.xoff, self.yoff,
self.w, self.h), self.image)
# Restore the saved setting from the stack
qp.restore()
def getPolygon(self, obj):
poly = QtGui.QPolygonF()
for pt in obj.polygon:
point = QtCore.QPointF(pt.x, pt.y)
poly.append(point)
return poly
# Draw the labels in the given QPainter qp
# optionally provide a list of labels to ignore
def drawLabels(self, qp, ignore=[]):
if self.image.isNull() or self.w <= 0 or self.h <= 0:
return
if not self.annotation:
return
if self.transpTempZero:
return
# The overlay is created in the viewing coordinates
# This way, the drawing is more dense and the polygon edges are nicer
# We create an image that is the overlay
# Within this image we draw using another QPainter
# Finally we use the real QPainter to overlay the overlay-image on what is drawn so far
# The image that is used to draw the overlays
overlay = QtGui.QImage(
self.w, self.h, QtGui.QImage.Format_ARGB32_Premultiplied)
# Fill the image with the default color
defaultLabel = name2label[self.defaultLabel]
col = QtGui.QColor(*defaultLabel.color)
overlay.fill(col)
# Create a new QPainter that draws in the overlay image
qp2 = QtGui.QPainter()
qp2.begin(overlay)
# The color of the outlines
qp2.setPen(QtGui.QColor('white'))
# Draw all objects
for obj in self.annotation.objects:
# Some are flagged to not be drawn. Skip them
if not obj.draw:
continue
# The label of the object
name = assureSingleInstanceName(obj.label)
# If we do not know a color for this label, warn the user
if not name in name2label:
print(
"The annotations contain unkown labels. This should not happen. Please inform the datasets authors. Thank you!")
print("Details: label '{}', file '{}'".format(
name, self.currentLabelFile))
continue
# If we ignore this label, skip
if name in ignore:
continue
poly = self.getPolygon(obj)
# Scale the polygon properly
polyToDraw = poly * \
QtGui.QTransform.fromScale(self.scale, self.scale)
# Default drawing
# Color from color table, solid brush
col = QtGui.QColor(*name2label[name].color)
brush = QtGui.QBrush(col, QtCore.Qt.SolidPattern)
qp2.setBrush(brush)
# Overwrite drawing if this is the highlighted object
if (obj in self.highlightObjs or name == self.highlightObjLabel):
# First clear everything below of the polygon
qp2.setCompositionMode(QtGui.QPainter.CompositionMode_Clear)
qp2.drawPolygon(polyToDraw)
qp2.setCompositionMode(
QtGui.QPainter.CompositionMode_SourceOver)
# Set the drawing to a special pattern
brush = QtGui.QBrush(col, QtCore.Qt.DiagCrossPattern)
qp2.setBrush(brush)
qp2.drawPolygon(polyToDraw)
# Draw outline of selected object dotted
for obj in self.highlightObjs:
brush = QtGui.QBrush(QtCore.Qt.NoBrush)
qp2.setBrush(brush)
qp2.setPen(QtCore.Qt.DashLine)
polyToDraw = self.getPolygon(
obj) * QtGui.QTransform.fromScale(self.scale, self.scale)
qp2.drawPolygon(polyToDraw)
# End the drawing of the overlay
qp2.end()
# Save QPainter settings to stack
qp.save()
# Define transparency
qp.setOpacity(self.config.transp)
# Draw the overlay image
qp.drawImage(self.xoff, self.yoff, overlay)
# Restore settings
qp.restore()
return overlay
def drawDrawRect(self, qp):
qp.save()
qp.setBrush(QtGui.QBrush(QtCore.Qt.NoBrush))
qp.setFont(QtGui.QFont('QFont::AnyStyle', 14))
thickPen = QtGui.QPen()
qp.setPen(thickPen)
for c in self.corrections:
rect = copy.deepcopy(c.bbox)
width = rect.width()
height = rect.height()
rect.setX(c.bbox.x() * self.scale + self.xoff)
rect.setY(c.bbox.y() * self.scale + self.yoff)
rect.setWidth(width * self.scale)
rect.setHeight(height * self.scale)
if c.selected:
thickPen.setColor(QtGui.QColor(0, 0, 0))
if c.type == CorrectionBox.types.QUESTION:
descr = "QUESTION"
elif c.type == CorrectionBox.types.RESOLVED:
descr = "FIXED"
else:
descr = "ERROR"
qp.setPen(thickPen)
qp.drawText(QtCore.QPoint(self.xoff, self.yoff + self.h + 20),
"(%s: %s)" % (descr, c.annotation))
pen_width = 6
else:
pen_width = 3
colour = c.get_colour()
thickPen.setColor(colour)
thickPen.setWidth(pen_width)
qp.setPen(thickPen)
qp.drawRect(rect)
if self.in_progress_bbox is not None:
rect = copy.deepcopy(self.in_progress_bbox)
width = rect.width()
height = rect.height()
rect.setX(self.in_progress_bbox.x() * self.scale + self.xoff)
rect.setY(self.in_progress_bbox.y() * self.scale + self.yoff)
rect.setWidth(width * self.scale)
rect.setHeight(height * self.scale)
thickPen.setColor(QtGui.QColor(255, 0, 0))
thickPen.setWidth(3)
qp.setPen(thickPen)
qp.drawRect(rect)
qp.restore()
# Draw the polygon that is drawn and edited by the user
# Usually the polygon must be rescaled properly. However when drawing
# The polygon within the zoom, this is not needed. Therefore the option transform.
def drawDrawPoly(self, qp, transform=None):
# Nothing to do?
if self.drawPoly.isEmpty():
return
if not self.image:
return
# Save QPainter settings to stack
qp.save()
# The polygon - make a copy
poly = QtGui.QPolygonF(self.drawPoly)
# Append the current mouse position
if not self.drawPolyClosed and (self.mousePosScaled is not None):
poly.append(self.mousePosScaled)
# Transform
if not transform:
poly = poly * QtGui.QTransform.fromScale(self.scale, self.scale)
poly.translate(self.xoff, self.yoff)
else:
poly = poly * transform
# Do not fill the polygon
qp.setBrush(QtGui.QBrush(QtCore.Qt.NoBrush))
# Draw the polygon edges
polyColor = QtGui.QColor(255, 0, 0)
qp.setPen(polyColor)
if not self.drawPolyClosed:
qp.drawPolyline(poly)
else:
qp.drawPolygon(poly)
# Get the ID of the closest point to the mouse
if self.mousePosScaled is not None:
closestPt = self.getClosestPoint(
self.drawPoly, self.mousePosScaled)
else:
closestPt = (-1, -1)
# If a polygon edge is selected, draw in bold
if closestPt[0] != closestPt[1]:
thickPen = QtGui.QPen(polyColor)
thickPen.setWidth(3)
qp.setPen(thickPen)
qp.drawLine(poly[closestPt[0]], poly[closestPt[1]])
# Draw the polygon points
qp.setPen(polyColor)
startDrawingPts = 0
# A bit different if not closed
if not self.drawPolyClosed:
# Draw
self.drawPoint(qp, poly.first(), True, closestPt ==
(0, 0) and self.drawPoly.size() > 1)
# Do not draw again
startDrawingPts = 1
# The next in red
for pt in range(startDrawingPts, poly.size()):
self.drawPoint(
qp, poly[pt], False, self.drawPolyClosed and closestPt == (pt, pt))
# Restore QPainter settings from stack
qp.restore()
# Draw the label name next to the mouse
def drawLabelAtMouse(self, qp):
# Nothing to do without a highlighted object
if not self.highlightObjs:
return
# Also we do not want to draw the label, if we have a drawn polygon
if not self.drawPoly.isEmpty():
return
# Nothing to without a mouse position
if not self.mousePos:
return
# Save QPainter settings to stack
qp.save()
# That is the mouse positiong
mouse = self.mousePos
# Will show zoom
showZoom = self.config.zoom and not self.image.isNull() and self.w and self.h
# The text that is written next to the mouse
mouseText = self.highlightObjs[-1].label
# Where to write the text
# Depends on the zoom (additional offset to mouse to make space for zoom?)
# The location in the image (if we are at the top we want to write below of the mouse)
off = 36
if showZoom:
off += self.config.zoomSize/2
if mouse.y()-off > self.toolbar.height():
top = mouse.y()-off
btm = mouse.y()
vAlign = QtCore.Qt.AlignTop
else:
# The height of the cursor
if not showZoom:
off += 20
top = mouse.y()
btm = mouse.y()+off
vAlign = QtCore.Qt.AlignBottom
# Here we can draw
rect = QtCore.QRect()
rect.setTopLeft(QtCore.QPoint(mouse.x()-100, top))
rect.setBottomRight(QtCore.QPoint(mouse.x()+100, btm))
# The color
qp.setPen(QtGui.QColor('white'))
# The font to use
font = QtGui.QFont("Helvetica", 20, QtGui.QFont.Bold)
qp.setFont(font)
# Non-transparent
qp.setOpacity(1)
# Draw the text, horizontally centered
qp.drawText(rect, QtCore.Qt.AlignHCenter | vAlign, mouseText)
# Restore settings
qp.restore()
# Draw the zoom
def drawZoom(self, qp, overlay):
# Zoom disabled?
if not self.config.zoom:
return
# No image
if self.image.isNull() or not self.w or not self.h:
return
# No mouse
if not self.mousePos:
return
# Abbrevation for the zoom window size
zoomSize = self.config.zoomSize
# Abbrevation for the mouse position
mouse = self.mousePos
# The pixel that is the zoom center
pix = self.mousePosScaled
# The size of the part of the image that is drawn in the zoom window
selSize = zoomSize / (self.config.zoomFactor * self.config.zoomFactor)
# The selection window for the image
sel = QtCore.QRectF(pix.x() - selSize/2, pix.y() -
selSize/2, selSize, selSize)
# The selection window for the widget
view = QtCore.QRectF(mouse.x()-zoomSize/2,
mouse.y()-zoomSize/2, zoomSize, zoomSize)
# Show the zoom image
qp.drawImage(view, self.image, sel)
# If we are currently drawing the polygon, we need to draw again in the zoom
if not self.drawPoly.isEmpty():
transform = QtGui.QTransform()
quadFrom = QtGui.QPolygonF()
quadFrom.append(sel.topLeft())
quadFrom.append(sel.topRight())
quadFrom.append(sel.bottomRight())
quadFrom.append(sel.bottomLeft())
quadTo = QtGui.QPolygonF()
quadTo.append(view.topLeft())
quadTo.append(view.topRight())
quadTo.append(view.bottomRight())
quadTo.append(view.bottomLeft())
if QtGui.QTransform.quadToQuad(quadFrom, quadTo, transform):
qp.setClipRect(view)
# transform.translate(self.xoff,self.yoff)
self.drawDrawPoly(qp, transform)
else:
print("not possible")
#############################
# Mouse/keyboard events
#############################
# Mouse moved
# Need to save the mouse position
# Need to drag a polygon point
# Need to update the mouse selected object
def mouseMoveEvent(self, event):
if self.image.isNull() or self.w == 0 or self.h == 0:
return
self.updateMousePos(event.localPos())
if not self.config.correctionMode:
# If we are dragging a point, update
if self.draggedPt >= 0:
# Update the dragged point
self.drawPoly.replace(self.draggedPt, self.mousePosScaled)
# If the polygon is the polygon of the selected object,
# update the object polygon and
# keep track of the changes we do
if self.selObjs:
obj = self.annotation.objects[self.selObjs[-1]]
obj.polygon[self.draggedPt] = Point(
self.mousePosScaled.x(), self.mousePosScaled.y())
# Check if we changed the object's polygon the first time
if not obj.id in self.changedPolygon:
self.changedPolygon.append(obj.id)
self.addChange(
"Changed polygon of object {0} with label {1}".format(obj.id, obj.label))
else:
if self.in_progress_bbox is not None:
p0 = (self.mousePosScaled.x(), self.mousePosScaled.y())
p1 = (self.mousePressEvent.x(), self.mousePressEvent.y())
xy = min(p0[0], p1[0]), min(p0[1], p1[1])
w, h = abs(p0[0] - p1[0]), abs(p0[1] - p1[1])
self.in_progress_bbox = QtCore.QRectF(xy[0], xy[1], w, h)
# p.set_x(xy[0])
# p.set_y(xy[1])
# p.set_width(w)
# p.set_height(h)
# Update the object selected by the mouse
self.updateMouseObject()
# Redraw
self.update()
# Mouse left the widget
def leaveEvent(self, event):
self.mousePos = None
self.mousePosScaled = None
self.mouseOutsideImage = True
# Mouse button pressed
# Start dragging of polygon point
# Enable temporary toggling of zoom
def mousePressEvent(self, event):
self.mouseButtons = event.buttons()
shiftPressed = QtWidgets.QApplication.keyboardModifiers() == QtCore.Qt.ShiftModifier
self.updateMousePos(event.localPos())
self.mousePressEvent = self.mousePosScaled
# Handle left click
if event.button() == QtCore.Qt.LeftButton:
# If the drawn polygon is closed and the mouse clicks a point,
# Then this one is dragged around
if not self.config.correctionMode:
if self.drawPolyClosed and (self.mousePosScaled is not None):
closestPt = self.getClosestPoint(
self.drawPoly, self.mousePosScaled)
if shiftPressed:
if closestPt[0] == closestPt[1]:
del self.drawPoly[closestPt[0]]
# If the polygon is the polygon of the selected object,
# update the object
# and keep track of the changes we do
if self.selObjs:
obj = self.annotation.objects[self.selObjs[-1]]
del obj.polygon[closestPt[0]]
# Check if we changed the object's polygon the first time
if not obj.id in self.changedPolygon:
self.changedPolygon.append(obj.id)
self.addChange(
"Changed polygon of object {0} with label {1}".format(obj.id, obj.label))
self.update()
else:
# If we got a point (or nothing), we make it dragged
if closestPt[0] == closestPt[1]:
self.draggedPt = closestPt[0]
# If we got an edge, we insert a point and make it dragged
else:
self.drawPoly.insert(
closestPt[1], self.mousePosScaled)
self.draggedPt = closestPt[1]
# If the polygon is the polygon of the selected object,
# update the object
# and keep track of the changes we do
if self.selObjs:
obj = self.annotation.objects[self.selObjs[-1]]
obj.polygon.insert(closestPt[1], Point(
self.mousePosScaled.x(), self.mousePosScaled.y()))
# Check if we changed the object's polygon the first time
if not obj.id in self.changedPolygon:
self.changedPolygon.append(obj.id)
self.addChange(
"Changed polygon of object {0} with label {1}".format(obj.id, obj.label))
else:
assert self.in_progress_bbox == None
self.in_progress_bbox = QtCore.QRectF(
self.mousePosScaled.x(), self.mousePosScaled.y(), 0, 0)
# Handle right click
elif event.button() == QtCore.Qt.RightButton:
self.toggleZoom(event.localPos())
# Redraw
self.update()
# Mouse button released
# End dragging of polygon
# Select an object
# Add a point to the polygon
# Disable temporary toggling of zoom
def mouseReleaseEvent(self, event):
self.mouseButtons = event.buttons()
ctrlPressed = event.modifiers() & QtCore.Qt.ControlModifier
shiftPressed = event.modifiers() & QtCore.Qt.ShiftModifier
altPressed = event.modifiers() & QtCore.Qt.AltModifier
# Handle left click
if event.button() == QtCore.Qt.LeftButton:
if not self.config.correctionMode:
# Check if Ctrl is pressed
if ctrlPressed:
# If also Shift is pressed and we have a closed polygon, then we intersect
# the polygon with the mouse object
if shiftPressed and self.drawPolyClosed:
self.intersectPolygon()
# If also Alt is pressed and we have a closed polygon, then we merge
# the polygon with the mouse object
if altPressed and self.drawPolyClosed:
self.mergePolygon()
# Make the current mouse object the selected
# and process the selection
else:
self.selectObject()
# Add the point to the drawn polygon if not already closed
elif not self.drawPolyClosed:
# If the mouse would close the poly make sure to do so
if self.ptClosesPoly():
self.closePolygon()
elif self.mousePosScaled is not None:
if not self.drawPolyClosed and self.drawPoly.isEmpty():
self.mousePosOnZoom = self.mousePos
self.addPtToPoly(self.mousePosScaled)
# Otherwise end a possible dragging
elif self.drawPolyClosed:
self.draggedPt = -1
else:
if self.in_progress_bbox is not None:
if self.in_progress_bbox.width() > 20:
description = QtWidgets.QInputDialog.getText(
self, "Error Description", "Please describe the labeling error briefly.")
if description[1] and description[0]:
self.corrections.append(CorrectionBox(
self.in_progress_bbox, annotation=description[0]))
# last_annotation = self.in_progress_annotation #TODO: self?
self.corrections[self.selected_correction].unselect(
)
self.selected_correction = len(self.corrections)-1
self.corrections[self.selected_correction].select()
self.addChange("Added correction.")
self.in_progress_annotation = None
self.in_progress_bbox = None
# Handle right click
elif event.button() == QtCore.Qt.RightButton:
self.toggleZoom(event.localPos())
# Redraw
self.update()
# Mouse wheel scrolled
def wheelEvent(self, event):
deltaDegree = event.angleDelta().y() / 8 # Rotation in degree
deltaSteps = deltaDegree / 15 # Usually one step on the mouse is 15 degrees
if self.config.zoom:
# If shift is pressed, change zoom window size
if event.modifiers() and QtCore.Qt.Key_Shift:
self.config.zoomSize += deltaSteps * 10
self.config.zoomSize = max(self.config.zoomSize, 10)
self.config.zoomSize = min(self.config.zoomSize, 1000)
# Change zoom factor
else:
self.config.zoomFactor += deltaSteps * 0.05
self.config.zoomFactor = max(self.config.zoomFactor, 0.1)
self.config.zoomFactor = min(self.config.zoomFactor, 10)
self.update()
# Key pressed
def keyPressEvent(self, e):
# Ctrl key changes mouse cursor
if e.key() == QtCore.Qt.Key_Control:
QtWidgets.QApplication.setOverrideCursor(
QtGui.QCursor(QtCore.Qt.PointingHandCursor))
# Backspace deletes last point from polygon
elif e.key() == QtCore.Qt.Key_Backspace:
if not self.drawPolyClosed:
del self.drawPoly[-1]
self.update()
# set alpha to temporary zero
elif e.key() == QtCore.Qt.Key_0:
self.transpTempZero = True
self.update()
elif e.key() == QtCore.Qt.Key_E:
self.select_next_correction()
elif e.key() == QtCore.Qt.Key_R:
self.select_previous_correction()
elif e.key() == QtCore.Qt.Key_1:
self.modify_correction_type(CorrectionBox.types.TO_CORRECT)
elif e.key() == QtCore.Qt.Key_2:
self.modify_correction_type(CorrectionBox.types.TO_REVIEW)
elif e.key() == QtCore.Qt.Key_3:
self.modify_correction_type(CorrectionBox.types.RESOLVED)
elif e.key() == QtCore.Qt.Key_4:
self.modify_correction_type(CorrectionBox.types.QUESTION)
elif e.key() == QtCore.Qt.Key_D and self.config.correctionMode:
self.delete_selected_annotation()
elif e.key() == QtCore.Qt.Key_M and self.config.correctionMode:
self.modify_correction_description()
# Key released
def keyReleaseEvent(self, e):
# Ctrl key changes mouse cursor
if e.key() == QtCore.Qt.Key_Control:
QtWidgets.QApplication.restoreOverrideCursor()
# check for zero to release temporary zero
# somehow, for the numpad key in some machines, a check on Insert is needed aswell
elif e.key() == QtCore.Qt.Key_0 or e.key() == QtCore.Qt.Key_Insert:
self.transpTempZero = False
self.update()
#############################
# Little helper methods
#############################
# Helper method that sets tooltip and statustip
# Provide an QAction and the tip text
# This text is appended with a hotkeys and then assigned
def setTip(self, action, tip):
tip += " (Hotkeys: '" + \
"', '".join([str(s.toString()) for s in action.shortcuts()]) + "')"
action.setStatusTip(tip)
action.setToolTip(tip)
# Set the mouse positions
# There are the original positions refering to the screen
# Scaled refering to the image
# And a zoom version, where the mouse movement is artificially slowed down
def updateMousePos(self, mousePosOrig):
if self.config.zoomFactor <= 1 or (self.drawPolyClosed or self.drawPoly.isEmpty()):
sens = 1.0
else:
sens = 1.0/pow(self.config.zoomFactor, 3)
if self.config.zoom and self.mousePosOnZoom is not None:
mousePos = QtCore.QPointF(round((1-sens)*self.mousePosOnZoom.x() + (
sens)*mousePosOrig.x()), round((1-sens)*self.mousePosOnZoom.y() + sens*mousePosOrig.y()))
else:
mousePos = mousePosOrig
mousePosScaled = QtCore.QPointF(float(mousePos.x(
) - self.xoff) / self.scale, float(mousePos.y() - self.yoff) / self.scale)
mouseOutsideImage = not self.image.rect().contains(mousePosScaled.toPoint())
mousePosScaled.setX(max(mousePosScaled.x(), 0.))
mousePosScaled.setY(max(mousePosScaled.y(), 0.))
mousePosScaled.setX(min(mousePosScaled.x(), self.image.rect().right()))
mousePosScaled.setY(
min(mousePosScaled.y(), self.image.rect().bottom()))
if not self.image.rect().contains(mousePosScaled.toPoint()):
self.mousePos = None
self.mousePosScaled = None
self.mousePosOrig = None
self.updateMouseObject()
self.update()
return
self.mousePos = mousePos
self.mousePosScaled = mousePosScaled
self.mousePosOrig = mousePosOrig
self.mouseOutsideImage = mouseOutsideImage
# Toggle the zoom and update all mouse positions
def toggleZoom(self, mousePosOrig):
self.config.zoom = not self.config.zoom
if self.config.zoom:
self.mousePosOnZoom = self.mousePos
# Update the mouse position afterwards
self.updateMousePos(mousePosOrig)
else:
# Update the mouse position first
self.updateMousePos(mousePosOrig)
# Update the dragged point to the non-zoom point
if not self.config.correctionMode and self.draggedPt >= 0:
self.drawPoly.replace(self.draggedPt, self.mousePosScaled)
# Get the point/edge index within the given polygon that is close to the given point
# Returns (-1,-1) if none is close enough
# Returns (i,i) if the point with index i is closed
# Returns (i,i+1) if the edge from points i to i+1 is closest
def getClosestPoint(self, poly, pt):
closest = (-1, -1)
distTh = 4.0
dist = 1e9 # should be enough
for i in range(poly.size()):
curDist = self.ptDist(poly[i], pt)
if curDist < dist:
closest = (i, i)
dist = curDist
# Close enough?
if dist <= distTh:
return closest
# Otherwise see if the polygon is closed, but a line is close enough
if self.drawPolyClosed and poly.size() >= 2:
for i in range(poly.size()):
pt1 = poly[i]
j = i+1
if j == poly.size():
j = 0
pt2 = poly[j]
edge = QtCore.QLineF(pt1, pt2)
normal = edge.normalVector()
normalThroughMouse = QtCore.QLineF(
pt.x(), pt.y(), pt.x()+normal.dx(), pt.y()+normal.dy())
intersectionPt = QtCore.QPointF()
intersectionType = edge.intersect(
normalThroughMouse, intersectionPt)
if intersectionType == QtCore.QLineF.BoundedIntersection:
curDist = self.ptDist(intersectionPt, pt)
if curDist < dist:
closest = (i, j)
dist = curDist
# Close enough?
if dist <= distTh:
return closest
# If we didnt return yet, we didnt find anything
return (-1, -1)
# Get distance between two points
def ptDist(self, pt1, pt2):
# A line between both
line = QtCore.QLineF(pt1, pt2)
# Length
lineLength = line.length()
return lineLength
# Determine if the given point closes the drawn polygon (snapping)
def ptClosesPoly(self):
if self.drawPoly.isEmpty():
return False
if self.mousePosScaled is None:
return False
closestPt = self.getClosestPoint(self.drawPoly, self.mousePosScaled)
return closestPt == (0, 0)
# Draw a point using the given QPainter qp
# If its the first point in a polygon its drawn in green
# if not in red
# Also the radius might be increased
def drawPoint(self, qp, pt, isFirst, increaseRadius):
# The first in green
if isFirst:
qp.setBrush(QtGui.QBrush(QtGui.QColor(
0, 255, 0), QtCore.Qt.SolidPattern))
# Other in red
else:
qp.setBrush(QtGui.QBrush(QtGui.QColor(
255, 0, 0), QtCore.Qt.SolidPattern))
# Standard radius
r = 3.0
# Increase maybe
if increaseRadius:
r *= 2.5
# Draw
qp.drawEllipse(pt, r, r)
# Determine if the given candidate for a label path makes sense
def isLabelPathValid(self, labelPath):
return os.path.isdir(labelPath)
# Ask the user to select a label
# If you like, you can give an object ID for a better dialog texting
# Note that giving an object ID assumes that its current label is the default label
# If you dont, the message "Select new label" is used
# Return is (label, ok). 'ok' is false if the user pressed Cancel
def getLabelFromUser(self, defaultLabel="", objID=-1):
# Reset the status bar to this message when leaving
restoreMessage = self.statusBar().currentMessage()
# Update defaultLabel
if not defaultLabel:
defaultLabel = self.defaultLabel
# List of possible labels
items = list(name2label.keys())
items.sort()
default = items.index(defaultLabel)
if default < 0:
self.statusBar().showMessage(
'The selected label is missing in the internal color map.')
return
# Specify title
dlgTitle = "Select label"
message = dlgTitle
question = dlgTitle
if objID >= 0:
message = "Select new label for object {0} with current label {1}".format(
objID, defaultLabel)
question = "Label for object {0}".format(objID)
self.statusBar().showMessage(message)
# Create and wait for dialog
(item, ok) = QtWidgets.QInputDialog.getItem(
self, dlgTitle, question, items, default, False)
# Process the answer a bit
item = str(item)
# Restore message
self.statusBar().showMessage(restoreMessage)
# Return
return (item, ok)
# Add a point to the drawn polygon
def addPtToPoly(self, pt):
self.drawPoly.append(pt)
# Enable actions that need a polygon
for act in self.actPolyOrSelObj:
act.setEnabled(True)
# Clear the drawn polygon
def clearPolygon(self):
# We do not clear, since the drawPoly might be a reference on an object one
self.drawPoly = QtGui.QPolygonF()
self.drawPolyClosed = False
# Disable actions that need a polygon
for act in self.actPolyOrSelObj:
act.setEnabled(bool(self.selObjs))
for act in self.actClosedPoly:
act.setEnabled(False)
# We just closed the polygon and need to deal with this situation
def closePolygon(self):
self.drawPolyClosed = True
for act in self.actClosedPoly:
act.setEnabled(True)
message = "What should I do with the polygon? Press n to create a new object, "
message += "press Ctrl + Shift + Left Click to intersect with another object, "
message += "press Ctrl + Alt + Left Click to merge with another object."
self.statusBar().showMessage(message)
# Intersect the drawn polygon with the mouse object
# and create a new object with same label and so on
def intersectPolygon(self):
# Cannot do anything without labels
if not self.annotation:
return
# Cannot do anything without a single selected object
if self.mouseObj < 0:
return
# The selected object that is modified
obj = self.annotation.objects[self.mouseObj]
# The intersection of the polygons
intersection = self.drawPoly.intersected(self.getPolygon(obj))
if not intersection.isEmpty():
# Ask the user for a label
self.drawPoly = intersection
(label, ok) = self.getLabelFromUser(obj.label)
if ok and label:
# Append and create the new object
self.appendObject(label, intersection)
# Clear the drawn polygon
self.clearPolygon()
# Default message
self.statusBar().showMessage(self.defaultStatusbar)
# Deselect
self.deselectAllObjects()
# Redraw
self.update()
# Merge the drawn polygon with the mouse object
# and create a new object with same label and so on
def mergePolygon(self):
# Cannot do anything without labels
if not self.annotation:
return
# Cannot do anything without a single selected object
if self.mouseObj < 0:
return
# The selected object that is modified
obj = self.annotation.objects[self.mouseObj]
# The union of the polygons
union = self.drawPoly.united(self.getPolygon(obj))
if not union.isEmpty():
# Ask the user for a label
self.drawPoly = union
(label, ok) = self.getLabelFromUser(obj.label)
if ok and label:
# Append and create the new object
self.appendObject(label, union)
# Clear the drawn polygon
self.clearPolygon()
# Default message
self.statusBar().showMessage(self.defaultStatusbar)
# Deselect
self.deselectAllObjects()
# Redraw
self.update()
# Edit an object's polygon or clear the polygon if multiple objects are selected
def initPolygonFromObject(self):
# Cannot do anything without labels
if not self.annotation:
return
# Cannot do anything without any selected object
if not self.selObjs:
return
# If there are multiple objects selected, we clear the polygon
if len(self.selObjs) > 1:
self.clearPolygon()
self.update()
return
# The selected object that is used for init
obj = self.annotation.objects[self.selObjs[-1]]
# Make a reference to the polygon
self.drawPoly = self.getPolygon(obj)
# Make sure its closed
self.drawPolyClosed = True
# Update toolbar icons
# Enable actions that need a polygon
for act in self.actPolyOrSelObj:
act.setEnabled(True)
# Enable actions that need a closed polygon
for act in self.actClosedPoly:
act.setEnabled(True)
# Redraw
self.update()
# Create new object
def appendObject(self, label, polygon):
# Create empty annotation object
# if first object
if not self.annotation:
self.annotation = Annotation()
# Search the highest ID
newID = 0
for obj in self.annotation.objects:
if obj.id >= newID:
newID = obj.id + 1
# New object
# Insert the object in the labels list
obj = CsPoly()
obj.label = label
obj.polygon = [Point(p.x(), p.y()) for p in polygon]
obj.id = newID
obj.deleted = 0
obj.verified = 0
obj.user = getpass.getuser()
obj.updateDate()
self.annotation.objects.append(obj)
# Append to changes
self.addChange(
"Created object {0} with label {1}".format(newID, label))
# Clear the drawn polygon
self.deselectAllObjects()
self.clearPolygon()
# select the new object
self.mouseObj = 0
self.selectObject()
# Helper for leaving an image
# Returns true if the image can be left, false if not
# Checks for possible changes and asks the user if they should be saved
# If the user says yes, then they are saved and true is returned
def checkAndSave(self):
# Without changes it's ok to leave the image
if not self.changes:
return True
# Backup of status message
restoreMessage = self.statusBar().currentMessage()
# Create the dialog
dlgTitle = "Save changes?"
self.statusBar().showMessage(dlgTitle)
text = "Do you want to save the following changes?\n"
for c in self.changes:
text += "- " + c + '\n'
buttons = QtWidgets.QMessageBox.Save | QtWidgets.QMessageBox.Discard | QtWidgets.QMessageBox.Cancel
ret = QtWidgets.QMessageBox.question(
self, dlgTitle, text, buttons, QtWidgets.QMessageBox.Save)
proceed = False
# If the user selected yes -> save
if ret == QtWidgets.QMessageBox.Save:
proceed = self.save()
# If the user selected to discard the changes, clear them
elif ret == QtWidgets.QMessageBox.Discard:
self.clearChanges()
proceed = True
# Otherwise prevent leaving the image
else:
proceed = False
self.statusBar().showMessage(restoreMessage)
return proceed
# Actually save a screenshot
def doScreenshot(self):
# For creating the screenshot we re-use the label drawing function
# However, we draw in an image using a QPainter
# Create such an image
img = QtGui.QImage(self.image)
# Create a QPainter that can perform draw actions within a widget or image
qp = QtGui.QPainter()
# Begin drawing in the image
qp.begin(img)
# Remember some settings
xoff = self.xoff
yoff = self.yoff
scale = self.scale
w = self.w
h = self.h
# Update scale
self.xoff = 0
self.yoff = 0
self.scale = 1
self.w = self.image.width()
self.h = self.image.height()
# Detactivate the highlighted object
self.highlightObjs = []
# Blur the license plates
# make this variabel a member and use as option if desired
blurLicensePlates = True
if blurLicensePlates:
self.blurLicensePlates(qp)
# Draw the labels on top
ignore = []
if blurLicensePlates:
ignore.append('numberplate')
self.drawLabels(qp, ignore)
# Finish drawing
qp.end()
# Reset scale and stuff
self.xoff = xoff
self.yoff = yoff
self.scale = scale
self.w = w
self.h = h
# Generate the real filename for saving
file = self.config.screenshotFilename
# Replace occurance of %c with the city name (as directory)
# Generate the directory if necessary
cityIdx = file.find('%c')
if cityIdx >= 0:
if self.config.cityName:
dir = os.path.join(file[:cityIdx], self.config.cityName)
if not os.path.exists(dir):
os.makedirs(dir)
file = file.replace('%c', self.config.cityName + '/', 1)
if file.find('%c') > 0:
message = "Found multiple '%c' in screenshot filename. Not allowed"
file = None
else:
message = "Do not have a city name. Cannot replace '%c' in screenshot filename."
file = None
# Replace occurances of %i with the image filename (without extension)
if file:
file = file.replace('%i', os.path.splitext(
os.path.basename(self.config.currentFile))[0])
# Add extension .png if no extension given
if file:
if not os.path.splitext(file)[1]:
file += '.png'
# Save
if file:
success = img.save(file)
if success:
message = "Saved screenshot to " + file
else:
message = "Failed to save screenshot"
self.statusBar().showMessage(message)
# Update to reset everything to the correct state
self.update()
# Blur the license plates
# Argument is a qPainter
# Thus, only use this method for screenshots.
def blurLicensePlates(self, qp):
# license plate name
searchedNames = ['license plate']
# the image
img = self.image
# Draw all objects
for obj in self.annotation.objects:
# Some are flagged to not be drawn. Skip them
if not obj.draw:
continue
# The label of the object
name = obj.label
# If we do not know a color for this label, skip
if name not in name2label:
continue
# If we do not blur this label, skip
if not name in searchedNames:
continue
# Scale the polygon properly
polyToDraw = self.getPolygon(
obj) * QtGui.QTransform.fromScale(self.scale, self.scale)
bb = polyToDraw.boundingRect()
# Get the mean color within the polygon
meanR = 0
meanG = 0
meanB = 0
num = 0
for y in range(max(int(bb.top()), 0), min(int(bb.bottom()+1.5), img.height())):
for x in range(max(int(bb.left()), 0), min(int(bb.right()+1.5), img.width())):
col = img.pixel(x, y)
meanR += QtGui.QColor(col).red()
meanG += QtGui.QColor(col).green()
meanB += QtGui.QColor(col).blue()
num += 1
meanR /= float(num)
meanG /= float(num)
meanB /= float(num)
col = QtGui.QColor(meanR, meanG, meanB)
qp.setPen(col)
brush = QtGui.QBrush(col, QtCore.Qt.SolidPattern)
qp.setBrush(brush)
# Default drawing
qp.drawPolygon(polyToDraw)
# Update the object that is selected by the current mouse curser
def updateMouseObject(self):
self.mouseObj = -1
if self.mousePosScaled is None:
return
if not self.annotation or not self.annotation.objects:
return
for idx in reversed(range(len(self.annotation.objects))):
obj = self.annotation.objects[idx]
if obj.draw and self.getPolygon(obj).containsPoint(self.mousePosScaled, QtCore.Qt.OddEvenFill):
self.mouseObj = idx
break
# Print info about the currently selected object at the status bar
def infoOnSelectedObject(self):
if not self.selObjs:
return
objID = self.selObjs[-1]
if self.annotation and objID >= 0:
obj = self.annotation.objects[objID]
self.statusBar().showMessage(
"Label of object {0}: {1}".format(obj.id, obj.label))
# else:
# self.statusBar().showMessage(self.defaultStatusbar)
# Make the object selected by the mouse the real selected object
def selectObject(self):
# If there is no mouse selection, we are good
if self.mouseObj < 0:
self.deselectObject()
return
# Append the object to selection if it's not in there
if not self.mouseObj in self.selObjs:
self.selObjs.append(self.mouseObj)
# Otherwise remove the object
else:
self.deselectObject()
# update polygon
self.initPolygonFromObject()
# If we have selected objects make the toolbar actions active
if self.selObjs:
for act in self.actSelObj + self.actPolyOrSelObj:
act.setEnabled(True)
# If we have a single selected object make their toolbar actions active
for act in self.singleActSelObj:
act.setEnabled(len(self.selObjs) == 1)
self.infoOnSelectedObject()
# Deselect object
def deselectObject(self):
# If there is no object to deselect, we are good
if not self.selObjs:
return
# If the mouse does not select and object, remove the last one
if self.mouseObj < 0:
del self.selObjs[-1]
# Otherwise try to find the mouse obj in the list
if self.mouseObj in self.selObjs:
self.selObjs.remove(self.mouseObj)
# No object left?
if not self.selObjs:
for act in self.actSelObj:
act.setEnabled(False)
for act in self.actPolyOrSelObj:
act.setEnabled(bool(self.drawPoly))
# If we have a single selected object make their toolbar actions active
for act in self.singleActSelObj:
act.setEnabled(len(self.selObjs) == 1)
self.infoOnSelectedObject()
# Deselect all objects
def deselectAllObjects(self):
# If there is no object to deselect, we are good
self.selObjs = []
self.mouseObj = -1
for act in self.actSelObj:
act.setEnabled(False)
# If we have a single selected object make their toolbar actions active
for act in self.singleActSelObj:
act.setEnabled(len(self.selObjs) == 1)
self.infoOnSelectedObject()
# Modify the layer of the selected object
# Move the layer up (negative offset) or down (postive offset)
def modifyLayer(self, offset):
# Cannot do anything without labels
if not self.annotation:
return
# Cannot do anything without a single selected object
if len(self.selObjs) != 1:
return
# The selected object that is modified
obj = self.annotation.objects[self.selObjs[-1]]
# The index in the label list we are right now
oldidx = self.selObjs[-1]
# The index we want to move to
newidx = oldidx + offset
# Make sure not not exceed zero and the list
newidx = max(newidx, 0)
newidx = min(newidx, len(self.annotation.objects)-1)
# If new and old idx are equal, there is nothing to do
if oldidx == newidx:
return
# Move the entry in the labels list
self.annotation.objects.insert(
newidx, self.annotation.objects.pop(oldidx))
# Update the selected object to the new index
self.selObjs[-1] = newidx
self.statusBar().showMessage(
"Moved object {0} with label {1} to layer {2}".format(obj.id, obj.label, newidx))
# Check if we moved the object the first time
if not obj.id in self.changedLayer:
self.changedLayer.append(obj.id)
self.addChange(
"Changed layer for object {0} with label {1}".format(obj.id, obj.label))
# Add a new change
def addChange(self, text):
if not text:
return
self.changes.append(text)
for act in self.actChanges:
act.setEnabled(True)
# Clear list of changes
def clearChanges(self):
self.changes = []
self.changedLayer = []
self.changedPolygon = []
for act in self.actChanges:
act.setEnabled(False)
# Clear the current labels
def clearAnnotation(self):
self.annotation = None
self.clearChanges()
self.deselectAllObjects()
self.clearPolygon()
self.config.currentLabelFile = ""
def clearCorrections(self):
self.correctionXML = None
self.corrections = []
# self.clearChanges() #TODO perhaps?
# self.clearPolygon()
self.config.currentCorrectionFile = ""
# Get the filename where to load/save labels
# Returns empty string if not possible
# Set the createDirs to true, if you want to create needed directories
def getLabelFilename(self, createDirs=False):
# We need the name of the current city
if not self.config.cityName:
return ""
# And we need to have a directory where labels should be searched
if not self.config.labelPath:
return ""
# Without the name of the current images, there is also nothing we can do
if not self.config.currentFile:
return ""
# Check if the label directory is valid. This folder is selected by the user
# and thus expected to exist
if not self.isLabelPathValid(self.config.labelPath):
return ""
# Dirs are not automatically created in this version of the tool
if not os.path.isdir(self.config.labelPath):
return ""
labelDir = self.config.labelPath
# extension of ground truth files
if self.config.gtType:
ext = self.gtExt.format('_'+self.config.gtType)
else:
ext = self.gtExt.format('')
# Generate the filename of the label file
filename = os.path.basename(self.config.currentFile)
filename = filename.replace(self.imageExt, ext)
filename = os.path.join(labelDir, filename)
filename = os.path.normpath(filename)
return filename
# Get the filename where to load/save labels
# Returns empty string if not possible
# Set the createDirs to true, if you want to create needed directories
def getCorrectionFilename(self, createDirs=False):
# And we need to have a directory where corrections are stored
if not self.config.correctionPath:
return ""
# Without the name of the current images, there is also nothing we can do
if not self.config.currentFile:
return ""
# Folder where to store the labels
correctionDir = self.config.correctionPath
# If the folder does not exist, create it if allowed
if not os.path.isdir(correctionDir):
if createDirs:
os.makedirs(correctionDir)
if not os.path.isdir(correctionDir):
return ""
else:
return ""
# Generate the filename of the label file
filename = os.path.basename(self.config.currentFile)
filename = filename.replace(self.imageExt, '.xml')
filename = os.path.join(correctionDir, filename)
filename = os.path.normpath(filename)
return filename
# Disable the popup menu on right click
def createPopupMenu(self):
pass
def main():
app = QtWidgets.QApplication(sys.argv)
tool = CityscapesLabelTool()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
[
"PyQt5.QtWidgets.QMessageBox.about",
"getpass.getuser",
"PyQt5.QtGui.QColor",
"cityscapesscripts.helpers.labels.name2label.keys",
"json.dumps",
"PyQt5.QtCore.QRectF",
"os.path.isfile",
"PyQt5.QtCore.QLineF",
"PyQt5.QtGui.QBrush",
"xml.etree.ElementTree.SubElement",
"PyQt5.QtWidgets.QApplication",
"os.path.join",
"cityscapesscripts.helpers.annotation.Annotation",
"PyQt5.QtGui.QPainter",
"json.loads",
"PyQt5.QtGui.QTransform.fromScale",
"PyQt5.QtWidgets.QApplication.keyboardModifiers",
"os.path.dirname",
"xml.etree.ElementTree.Element",
"PyQt5.QtWidgets.QApplication.restoreOverrideCursor",
"PyQt5.QtWidgets.QInputDialog.getText",
"os.path.exists",
"os.path.normpath",
"PyQt5.QtWidgets.QMessageBox.question",
"PyQt5.QtWidgets.QMessageBox.information",
"xml.etree.ElementTree.ElementTree",
"xml.etree.ElementTree.parse",
"copy.deepcopy",
"PyQt5.QtWidgets.QDesktopWidget",
"PyQt5.QtCore.QRect",
"os.path.basename",
"PyQt5.QtWidgets.QMainWindow.paintEvent",
"os.path.realpath",
"PyQt5.QtGui.QCursor",
"PyQt5.QtGui.QImage",
"PyQt5.QtGui.QPen",
"PyQt5.QtCore.QPoint",
"cityscapesscripts.helpers.annotation.CsPoly",
"PyQt5.QtGui.QTransform",
"PyQt5.QtWidgets.QInputDialog.getItem",
"sys.exit",
"os.makedirs",
"cityscapesscripts.helpers.labels.assureSingleInstanceName",
"PyQt5.QtWidgets.QMessageBox",
"os.path.isdir",
"PyQt5.QtCore.QTimer.singleShot",
"PyQt5.QtGui.QPolygonF",
"PyQt5.QtGui.QFont",
"PyQt5.QtWidgets.QFileDialog.getSaveFileName",
"PyQt5.QtGui.QTransform.quadToQuad",
"os.path.splitext",
"PyQt5.QtCore.QPointF"
] |
[((110551, 110583), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (110573, 110583), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2725, 2749), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (2739, 2749), False, 'import os\n'), ((6745, 6779), 'PyQt5.QtCore.QRectF', 'QtCore.QRectF', (['x', 'y', 'width', 'height'], {}), '(x, y, width, height)\n', (6758, 6779), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7042, 7075), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['node', '"""correction"""'], {}), "(node, 'correction')\n", (7055, 7075), True, 'import xml.etree.ElementTree as ET\n'), ((7186, 7223), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['correctionNode', '"""type"""'], {}), "(correctionNode, 'type')\n", (7199, 7223), True, 'import xml.etree.ElementTree as ET\n'), ((7346, 7389), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['correctionNode', '"""annotation"""'], {}), "(correctionNode, 'annotation')\n", (7359, 7389), True, 'import xml.etree.ElementTree as ET\n'), ((7519, 7556), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['correctionNode', '"""bbox"""'], {}), "(correctionNode, 'bbox')\n", (7532, 7556), True, 'import xml.etree.ElementTree as ET\n'), ((7632, 7660), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['bboxNode', '"""x"""'], {}), "(bboxNode, 'x')\n", (7645, 7660), True, 'import xml.etree.ElementTree as ET\n'), ((7703, 7731), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['bboxNode', '"""y"""'], {}), "(bboxNode, 'y')\n", (7716, 7731), True, 'import xml.etree.ElementTree as ET\n'), ((7878, 7910), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['bboxNode', '"""width"""'], {}), "(bboxNode, 'width')\n", (7891, 7910), True, 'import xml.etree.ElementTree as ET\n'), ((7953, 7986), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['bboxNode', '"""height"""'], {}), "(bboxNode, 'height')\n", (7966, 7986), True, 'import xml.etree.ElementTree as ET\n'), ((8594, 8619), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (8609, 8619), False, 'import os\n'), ((8646, 8697), 'os.path.join', 'os.path.join', (['configDir', '"""cityscapesLabelTool.conf"""'], {}), "(configDir, 'cityscapesLabelTool.conf')\n", (8658, 8697), False, 'import os\n'), ((9728, 9742), 'PyQt5.QtGui.QImage', 'QtGui.QImage', ([], {}), '()\n', (9740, 9742), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11386, 11403), 'PyQt5.QtGui.QPolygonF', 'QtGui.QPolygonF', ([], {}), '()\n', (11401, 11403), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((31051, 31130), 'PyQt5.QtWidgets.QInputDialog.getItem', 'QtWidgets.QInputDialog.getItem', (['self', 'dlgTitle', '"""Image"""', 'items', 'self.idx', '(False)'], {}), "(self, dlgTitle, 'Image', items, self.idx, False)\n", (31081, 31130), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((43515, 43669), 'PyQt5.QtWidgets.QFileDialog.getSaveFileName', 'QtWidgets.QFileDialog.getSaveFileName', (['self', 'dlgTitle', 'self.config.screenshotFilename', 'filter'], {'options': 'QtWidgets.QFileDialog.DontUseNativeDialog'}), '(self, dlgTitle, self.config.\n screenshotFilename, filter, options=QtWidgets.QFileDialog.\n DontUseNativeDialog)\n', (43552, 43669), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((46541, 46592), 'PyQt5.QtWidgets.QMessageBox.about', 'QtWidgets.QMessageBox.about', (['self', '"""HELP!"""', 'message'], {}), "(self, 'HELP!', message)\n", (46568, 46592), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((47759, 47790), 'os.path.isdir', 'os.path.isdir', (['self.config.city'], {}), '(self.config.city)\n', (47772, 47790), False, 'import os\n'), ((55452, 55468), 'PyQt5.QtGui.QPainter', 'QtGui.QPainter', ([], {}), '()\n', (55466, 55468), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((56170, 56215), 'PyQt5.QtWidgets.QMainWindow.paintEvent', 'QtWidgets.QMainWindow.paintEvent', (['self', 'event'], {}), '(self, event)\n', (56202, 56215), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((58845, 58862), 'PyQt5.QtGui.QPolygonF', 'QtGui.QPolygonF', ([], {}), '()\n', (58860, 58862), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((59732, 59802), 'PyQt5.QtGui.QImage', 'QtGui.QImage', (['self.w', 'self.h', 'QtGui.QImage.Format_ARGB32_Premultiplied'], {}), '(self.w, self.h, QtGui.QImage.Format_ARGB32_Premultiplied)\n', (59744, 59802), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((59931, 59964), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['*defaultLabel.color'], {}), '(*defaultLabel.color)\n', (59943, 59964), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((60069, 60085), 'PyQt5.QtGui.QPainter', 'QtGui.QPainter', ([], {}), '()\n', (60083, 60085), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((62920, 62932), 'PyQt5.QtGui.QPen', 'QtGui.QPen', ([], {}), '()\n', (62930, 62932), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((65189, 65219), 'PyQt5.QtGui.QPolygonF', 'QtGui.QPolygonF', (['self.drawPoly'], {}), '(self.drawPoly)\n', (65204, 65219), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((65749, 65772), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (65761, 65772), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((68537, 68551), 'PyQt5.QtCore.QRect', 'QtCore.QRect', ([], {}), '()\n', (68549, 68551), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((68777, 68823), 'PyQt5.QtGui.QFont', 'QtGui.QFont', (['"""Helvetica"""', '(20)', 'QtGui.QFont.Bold'], {}), "('Helvetica', 20, QtGui.QFont.Bold)\n", (68788, 68823), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((87462, 87485), 'PyQt5.QtCore.QLineF', 'QtCore.QLineF', (['pt1', 'pt2'], {}), '(pt1, pt2)\n', (87475, 87485), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((88715, 88739), 'os.path.isdir', 'os.path.isdir', (['labelPath'], {}), '(labelPath)\n', (88728, 88739), False, 'import os\n'), ((90077, 90156), 'PyQt5.QtWidgets.QInputDialog.getItem', 'QtWidgets.QInputDialog.getItem', (['self', 'dlgTitle', 'question', 'items', 'default', '(False)'], {}), '(self, dlgTitle, question, items, default, False)\n', (90107, 90156), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((90745, 90762), 'PyQt5.QtGui.QPolygonF', 'QtGui.QPolygonF', ([], {}), '()\n', (90760, 90762), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((95319, 95327), 'cityscapesscripts.helpers.annotation.CsPoly', 'CsPoly', ([], {}), '()\n', (95325, 95327), False, 'from cityscapesscripts.helpers.annotation import Point, Annotation, CsPoly\n'), ((95508, 95525), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\n', (95523, 95525), False, 'import getpass\n'), ((96731, 96825), 'PyQt5.QtWidgets.QMessageBox.question', 'QtWidgets.QMessageBox.question', (['self', 'dlgTitle', 'text', 'buttons', 'QtWidgets.QMessageBox.Save'], {}), '(self, dlgTitle, text, buttons, QtWidgets.\n QMessageBox.Save)\n', (96761, 96825), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((97560, 97584), 'PyQt5.QtGui.QImage', 'QtGui.QImage', (['self.image'], {}), '(self.image)\n', (97572, 97584), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((97681, 97697), 'PyQt5.QtGui.QPainter', 'QtGui.QPainter', ([], {}), '()\n', (97695, 97697), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((109018, 109059), 'os.path.basename', 'os.path.basename', (['self.config.currentFile'], {}), '(self.config.currentFile)\n', (109034, 109059), False, 'import os\n'), ((109135, 109167), 'os.path.join', 'os.path.join', (['labelDir', 'filename'], {}), '(labelDir, filename)\n', (109147, 109167), False, 'import os\n'), ((109187, 109213), 'os.path.normpath', 'os.path.normpath', (['filename'], {}), '(filename)\n', (109203, 109213), False, 'import os\n'), ((110210, 110251), 'os.path.basename', 'os.path.basename', (['self.config.currentFile'], {}), '(self.config.currentFile)\n', (110226, 110251), False, 'import os\n'), ((110330, 110367), 'os.path.join', 'os.path.join', (['correctionDir', 'filename'], {}), '(correctionDir, filename)\n', (110342, 110367), False, 'import os\n'), ((110387, 110413), 'os.path.normpath', 'os.path.normpath', (['filename'], {}), '(filename)\n', (110403, 110413), False, 'import os\n'), ((3223, 3257), 'os.path.normpath', 'os.path.normpath', (['self.currentFile'], {}), '(self.currentFile)\n', (3239, 3257), False, 'import os\n'), ((3328, 3367), 'os.path.normpath', 'os.path.normpath', (['self.currentLabelFile'], {}), '(self.currentLabelFile)\n', (3344, 3367), False, 'import os\n'), ((3448, 3492), 'os.path.normpath', 'os.path.normpath', (['self.currentCorrectionFile'], {}), '(self.currentCorrectionFile)\n', (3464, 3492), False, 'import os\n'), ((3560, 3589), 'os.path.normpath', 'os.path.normpath', (['self.csPath'], {}), '(self.csPath)\n', (3576, 3589), False, 'import os\n'), ((3716, 3743), 'os.path.normpath', 'os.path.normpath', (['self.city'], {}), '(self.city)\n', (3732, 3743), False, 'import os\n'), ((3876, 3908), 'os.path.normpath', 'os.path.normpath', (['self.labelPath'], {}), '(self.labelPath)\n', (3892, 3908), False, 'import os\n'), ((3976, 4013), 'os.path.normpath', 'os.path.normpath', (['self.correctionPath'], {}), '(self.correctionPath)\n', (3992, 4013), False, 'import os\n'), ((5512, 5535), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (5524, 5535), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13789, 13814), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (13804, 13814), False, 'import os\n'), ((26189, 26208), 'os.path.basename', 'os.path.basename', (['a'], {}), '(a)\n', (26205, 26208), False, 'import os\n'), ((27308, 27387), 'PyQt5.QtWidgets.QInputDialog.getItem', 'QtWidgets.QInputDialog.getItem', (['self', 'dlgTitle', 'question', 'items', 'default', '(False)'], {}), '(self, dlgTitle, question, items, default, False)\n', (27338, 27387), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((28652, 28741), 'PyQt5.QtWidgets.QMessageBox.information', 'QtWidgets.QMessageBox.information', (['self', '"""ERROR!"""', 'warning', 'QtWidgets.QMessageBox.Ok'], {}), "(self, 'ERROR!', warning, QtWidgets.\n QMessageBox.Ok)\n", (28685, 28741), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((29687, 29730), 'PyQt5.QtCore.QTimer.singleShot', 'QtCore.QTimer.singleShot', (['(0)', 'self.nextImage'], {}), '(0, self.nextImage)\n', (29711, 29730), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((29902, 29945), 'PyQt5.QtCore.QTimer.singleShot', 'QtCore.QTimer.singleShot', (['(0)', 'self.nextImage'], {}), '(0, self.nextImage)\n', (29926, 29945), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((38640, 38732), 'PyQt5.QtWidgets.QMessageBox.question', 'QtWidgets.QMessageBox.question', (['self', 'dlgTitle', 'text', 'buttons', 'QtWidgets.QMessageBox.Ok'], {}), '(self, dlgTitle, text, buttons, QtWidgets.\n QMessageBox.Ok)\n', (38670, 38732), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((48400, 48426), 'os.path.normpath', 'os.path.normpath', (['filename'], {}), '(filename)\n', (48416, 48426), False, 'import os\n'), ((49858, 49870), 'cityscapesscripts.helpers.annotation.Annotation', 'Annotation', ([], {}), '()\n', (49868, 49870), False, 'from cityscapesscripts.helpers.annotation import Point, Annotation, CsPoly\n'), ((51139, 51163), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (51153, 51163), False, 'import os\n'), ((51271, 51289), 'xml.etree.ElementTree.parse', 'ET.parse', (['filename'], {}), '(filename)\n', (51279, 51289), True, 'import xml.etree.ElementTree as ET\n'), ((53632, 53812), 'PyQt5.QtWidgets.QInputDialog.getText', 'QtWidgets.QInputDialog.getText', (['self', '"""Modify Error Description"""', '"""Please describe the labeling error briefly."""'], {'text': 'self.corrections[self.selected_correction].annotation'}), "(self, 'Modify Error Description',\n 'Please describe the labeling error briefly.', text=self.corrections[\n self.selected_correction].annotation)\n", (53662, 53812), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((58628, 58678), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['self.xoff', 'self.yoff', 'self.w', 'self.h'], {}), '(self.xoff, self.yoff, self.w, self.h)\n', (58640, 58678), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((58914, 58940), 'PyQt5.QtCore.QPointF', 'QtCore.QPointF', (['pt.x', 'pt.y'], {}), '(pt.x, pt.y)\n', (58928, 58940), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((60169, 60190), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['"""white"""'], {}), "('white')\n", (60181, 60190), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((60433, 60468), 'cityscapesscripts.helpers.labels.assureSingleInstanceName', 'assureSingleInstanceName', (['obj.label'], {}), '(obj.label)\n', (60457, 60468), False, 'from cityscapesscripts.helpers.labels import name2label, assureSingleInstanceName\n'), ((61256, 61293), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['*name2label[name].color'], {}), '(*name2label[name].color)\n', (61268, 61293), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((61314, 61355), 'PyQt5.QtGui.QBrush', 'QtGui.QBrush', (['col', 'QtCore.Qt.SolidPattern'], {}), '(col, QtCore.Qt.SolidPattern)\n', (61326, 61355), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((62129, 62160), 'PyQt5.QtGui.QBrush', 'QtGui.QBrush', (['QtCore.Qt.NoBrush'], {}), '(QtCore.Qt.NoBrush)\n', (62141, 62160), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((62813, 62844), 'PyQt5.QtGui.QBrush', 'QtGui.QBrush', (['QtCore.Qt.NoBrush'], {}), '(QtCore.Qt.NoBrush)\n', (62825, 62844), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((62865, 62899), 'PyQt5.QtGui.QFont', 'QtGui.QFont', (['"""QFont::AnyStyle"""', '(14)'], {}), "('QFont::AnyStyle', 14)\n", (62876, 62899), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((63016, 63037), 'copy.deepcopy', 'copy.deepcopy', (['c.bbox'], {}), '(c.bbox)\n', (63029, 63037), False, 'import copy\n'), ((64159, 64195), 'copy.deepcopy', 'copy.deepcopy', (['self.in_progress_bbox'], {}), '(self.in_progress_bbox)\n', (64172, 64195), False, 'import copy\n'), ((65662, 65693), 'PyQt5.QtGui.QBrush', 'QtGui.QBrush', (['QtCore.Qt.NoBrush'], {}), '(QtCore.Qt.NoBrush)\n', (65674, 65693), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((66283, 66304), 'PyQt5.QtGui.QPen', 'QtGui.QPen', (['polyColor'], {}), '(polyColor)\n', (66293, 66304), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((68713, 68734), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['"""white"""'], {}), "('white')\n", (68725, 68734), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((70318, 70336), 'PyQt5.QtGui.QTransform', 'QtGui.QTransform', ([], {}), '()\n', (70334, 70336), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((70360, 70377), 'PyQt5.QtGui.QPolygonF', 'QtGui.QPolygonF', ([], {}), '()\n', (70375, 70377), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((70579, 70596), 'PyQt5.QtGui.QPolygonF', 'QtGui.QPolygonF', ([], {}), '()\n', (70594, 70596), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((70788, 70844), 'PyQt5.QtGui.QTransform.quadToQuad', 'QtGui.QTransform.quadToQuad', (['quadFrom', 'quadTo', 'transform'], {}), '(quadFrom, quadTo, transform)\n', (70815, 70844), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((73455, 73497), 'PyQt5.QtWidgets.QApplication.keyboardModifiers', 'QtWidgets.QApplication.keyboardModifiers', ([], {}), '()\n', (73495, 73497), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((82480, 82526), 'PyQt5.QtWidgets.QApplication.restoreOverrideCursor', 'QtWidgets.QApplication.restoreOverrideCursor', ([], {}), '()\n', (82524, 82526), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((89406, 89423), 'cityscapesscripts.helpers.labels.name2label.keys', 'name2label.keys', ([], {}), '()\n', (89421, 89423), False, 'from cityscapesscripts.helpers.labels import name2label, assureSingleInstanceName\n'), ((95061, 95073), 'cityscapesscripts.helpers.annotation.Annotation', 'Annotation', ([], {}), '()\n', (95071, 95073), False, 'from cityscapesscripts.helpers.annotation import Point, Annotation, CsPoly\n'), ((101876, 101909), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['meanR', 'meanG', 'meanB'], {}), '(meanR, meanG, meanB)\n', (101888, 101909), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((101957, 101998), 'PyQt5.QtGui.QBrush', 'QtGui.QBrush', (['col', 'QtCore.Qt.SolidPattern'], {}), '(col, QtCore.Qt.SolidPattern)\n', (101969, 101998), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((108659, 108695), 'os.path.isdir', 'os.path.isdir', (['self.config.labelPath'], {}), '(self.config.labelPath)\n', (108672, 108695), False, 'import os\n'), ((109913, 109941), 'os.path.isdir', 'os.path.isdir', (['correctionDir'], {}), '(correctionDir)\n', (109926, 109941), False, 'import os\n'), ((2857, 2877), 'json.loads', 'json.loads', (['jsonText'], {}), '(jsonText)\n', (2867, 2877), False, 'import json\n'), ((3609, 3635), 'os.path.isdir', 'os.path.isdir', (['self.csPath'], {}), '(self.csPath)\n', (3622, 3635), False, 'import os\n'), ((3763, 3787), 'os.path.isdir', 'os.path.isdir', (['self.city'], {}), '(self.city)\n', (3776, 3787), False, 'import os\n'), ((4066, 4093), 'os.path.basename', 'os.path.basename', (['self.city'], {}), '(self.city)\n', (4082, 4093), False, 'import os\n'), ((4110, 4142), 'os.path.isfile', 'os.path.isfile', (['self.currentFile'], {}), '(self.currentFile)\n', (4124, 4142), False, 'import os\n'), ((4146, 4179), 'os.path.dirname', 'os.path.dirname', (['self.currentFile'], {}), '(self.currentFile)\n', (4161, 4179), False, 'import os\n'), ((4244, 4281), 'os.path.isfile', 'os.path.isfile', (['self.currentLabelFile'], {}), '(self.currentLabelFile)\n', (4258, 4281), False, 'import os\n'), ((4377, 4415), 'os.path.dirname', 'os.path.dirname', (['self.currentLabelFile'], {}), '(self.currentLabelFile)\n', (4392, 4415), False, 'import os\n'), ((4419, 4462), 'os.path.join', 'os.path.join', (['self.labelPath', 'self.cityName'], {}), '(self.labelPath, self.cityName)\n', (4431, 4462), False, 'import os\n'), ((4519, 4561), 'os.path.isfile', 'os.path.isfile', (['self.currentCorrectionFile'], {}), '(self.currentCorrectionFile)\n', (4533, 4561), False, 'import os\n'), ((4662, 4705), 'os.path.dirname', 'os.path.dirname', (['self.currentCorrectionFile'], {}), '(self.currentCorrectionFile)\n', (4677, 4705), False, 'import os\n'), ((4709, 4757), 'os.path.join', 'os.path.join', (['self.correctionPath', 'self.cityName'], {}), '(self.correctionPath, self.cityName)\n', (4721, 4757), False, 'import os\n'), ((4938, 5023), 'json.dumps', 'json.dumps', (['self.__dict__'], {'default': '(lambda o: o.__dict__)', 'sort_keys': '(True)', 'indent': '(4)'}), '(self.__dict__, default=lambda o: o.__dict__, sort_keys=True,\n indent=4)\n', (4948, 5023), False, 'import json\n'), ((5612, 5637), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(255)', '(255)', '(0)'], {}), '(255, 255, 0)\n', (5624, 5637), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13919, 13952), 'os.path.join', 'os.path.join', (['iconDir', '"""open.png"""'], {}), "(iconDir, 'open.png')\n", (13931, 13952), False, 'import os\n'), ((14247, 14280), 'os.path.join', 'os.path.join', (['iconDir', '"""back.png"""'], {}), "(iconDir, 'back.png')\n", (14259, 14280), False, 'import os\n'), ((14624, 14657), 'os.path.join', 'os.path.join', (['iconDir', '"""next.png"""'], {}), "(iconDir, 'next.png')\n", (14636, 14657), False, 'import os\n'), ((14986, 15019), 'os.path.join', 'os.path.join', (['iconDir', '"""play.png"""'], {}), "(iconDir, 'play.png')\n", (14998, 15019), False, 'import os\n'), ((15474, 15510), 'os.path.join', 'os.path.join', (['iconDir', '"""shuffle.png"""'], {}), "(iconDir, 'shuffle.png')\n", (15486, 15510), False, 'import os\n'), ((15870, 15903), 'os.path.join', 'os.path.join', (['iconDir', '"""save.png"""'], {}), "(iconDir, 'save.png')\n", (15882, 15903), False, 'import os\n'), ((16254, 16295), 'os.path.join', 'os.path.join', (['iconDir', '"""clearpolygon.png"""'], {}), "(iconDir, 'clearpolygon.png')\n", (16266, 16295), False, 'import os\n'), ((16696, 16734), 'os.path.join', 'os.path.join', (['iconDir', '"""newobject.png"""'], {}), "(iconDir, 'newobject.png')\n", (16708, 16734), False, 'import os\n'), ((17110, 17151), 'os.path.join', 'os.path.join', (['iconDir', '"""deleteobject.png"""'], {}), "(iconDir, 'deleteobject.png')\n", (17122, 17151), False, 'import os\n'), ((17583, 17616), 'os.path.join', 'os.path.join', (['iconDir', '"""undo.png"""'], {}), "(iconDir, 'undo.png')\n", (17595, 17616), False, 'import os\n'), ((17979, 18014), 'os.path.join', 'os.path.join', (['iconDir', '"""modify.png"""'], {}), "(iconDir, 'modify.png')\n", (17991, 18014), False, 'import os\n'), ((18380, 18416), 'os.path.join', 'os.path.join', (['iconDir', '"""layerup.png"""'], {}), "(iconDir, 'layerup.png')\n", (18392, 18416), False, 'import os\n'), ((18804, 18842), 'os.path.join', 'os.path.join', (['iconDir', '"""layerdown.png"""'], {}), "(iconDir, 'layerdown.png')\n", (18816, 18842), False, 'import os\n'), ((19242, 19275), 'os.path.join', 'os.path.join', (['iconDir', '"""zoom.png"""'], {}), "(iconDir, 'zoom.png')\n", (19254, 19275), False, 'import os\n'), ((19737, 19775), 'os.path.join', 'os.path.join', (['iconDir', '"""highlight.png"""'], {}), "(iconDir, 'highlight.png')\n", (19749, 19775), False, 'import os\n'), ((20307, 20341), 'os.path.join', 'os.path.join', (['iconDir', '"""minus.png"""'], {}), "(iconDir, 'minus.png')\n", (20319, 20341), False, 'import os\n'), ((20646, 20679), 'os.path.join', 'os.path.join', (['iconDir', '"""plus.png"""'], {}), "(iconDir, 'plus.png')\n", (20658, 20679), False, 'import os\n'), ((20981, 21020), 'os.path.join', 'os.path.join', (['iconDir', '"""screenshot.png"""'], {}), "(iconDir, 'screenshot.png')\n", (20993, 21020), False, 'import os\n'), ((21422, 21467), 'os.path.join', 'os.path.join', (['iconDir', '"""screenshotToggle.png"""'], {}), "(iconDir, 'screenshotToggle.png')\n", (21434, 21467), False, 'import os\n'), ((22053, 22090), 'os.path.join', 'os.path.join', (['iconDir', '"""filepath.png"""'], {}), "(iconDir, 'filepath.png')\n", (22065, 22090), False, 'import os\n'), ((22457, 22494), 'os.path.join', 'os.path.join', (['iconDir', '"""checked6.png"""'], {}), "(iconDir, 'checked6.png')\n", (22469, 22494), False, 'import os\n'), ((23105, 23140), 'os.path.join', 'os.path.join', (['iconDir', '"""help19.png"""'], {}), "(iconDir, 'help19.png')\n", (23117, 23140), False, 'import os\n'), ((23437, 23470), 'os.path.join', 'os.path.join', (['iconDir', '"""exit.png"""'], {}), "(iconDir, 'exit.png')\n", (23449, 23470), False, 'import os\n'), ((24027, 24053), 'PyQt5.QtWidgets.QDesktopWidget', 'QtWidgets.QDesktopWidget', ([], {}), '()\n', (24051, 24053), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((25801, 25822), 'os.path.isdir', 'os.path.isdir', (['csPath'], {}), '(csPath)\n', (25814, 25822), False, 'import os\n'), ((26136, 26163), 'os.path.join', 'os.path.join', (['csPath', '"""gt*"""'], {}), "(csPath, 'gt*')\n", (26148, 26163), False, 'import os\n'), ((28820, 28830), 'sys.exit', 'sys.exit', ([], {}), '()\n', (28828, 28830), False, 'import sys\n'), ((30148, 30176), 'os.path.dirname', 'os.path.dirname', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (30163, 30176), False, 'import os\n'), ((30395, 30423), 'os.path.dirname', 'os.path.dirname', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (30410, 30423), False, 'import os\n'), ((30952, 30971), 'os.path.basename', 'os.path.basename', (['i'], {}), '(i)\n', (30968, 30971), False, 'import os\n'), ((47828, 47879), 'os.path.join', 'os.path.join', (['self.config.city', "('*' + self.imageExt)"], {}), "(self.config.city, '*' + self.imageExt)\n", (47840, 47879), False, 'import os\n'), ((48585, 48607), 'PyQt5.QtGui.QImage', 'QtGui.QImage', (['filename'], {}), '(filename)\n', (48597, 48607), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((49493, 49517), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (49507, 49517), False, 'import os\n'), ((61106, 61156), 'PyQt5.QtGui.QTransform.fromScale', 'QtGui.QTransform.fromScale', (['self.scale', 'self.scale'], {}), '(self.scale, self.scale)\n', (61132, 61156), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((61897, 61942), 'PyQt5.QtGui.QBrush', 'QtGui.QBrush', (['col', 'QtCore.Qt.DiagCrossPattern'], {}), '(col, QtCore.Qt.DiagCrossPattern)\n', (61909, 61942), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((62301, 62351), 'PyQt5.QtGui.QTransform.fromScale', 'QtGui.QTransform.fromScale', (['self.scale', 'self.scale'], {}), '(self.scale, self.scale)\n', (62327, 62351), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((64538, 64561), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (64550, 64561), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((65457, 65507), 'PyQt5.QtGui.QTransform.fromScale', 'QtGui.QTransform.fromScale', (['self.scale', 'self.scale'], {}), '(self.scale, self.scale)\n', (65483, 65507), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((72804, 72837), 'PyQt5.QtCore.QRectF', 'QtCore.QRectF', (['xy[0]', 'xy[1]', 'w', 'h'], {}), '(xy[0], xy[1], w, h)\n', (72817, 72837), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((81075, 81118), 'PyQt5.QtGui.QCursor', 'QtGui.QCursor', (['QtCore.Qt.PointingHandCursor'], {}), '(QtCore.Qt.PointingHandCursor)\n', (81088, 81118), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((86576, 86599), 'PyQt5.QtCore.QLineF', 'QtCore.QLineF', (['pt1', 'pt2'], {}), '(pt1, pt2)\n', (86589, 86599), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((86806, 86822), 'PyQt5.QtCore.QPointF', 'QtCore.QPointF', ([], {}), '()\n', (86820, 86822), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((99028, 99078), 'os.path.join', 'os.path.join', (['file[:cityIdx]', 'self.config.cityName'], {}), '(file[:cityIdx], self.config.cityName)\n', (99040, 99078), False, 'import os\n'), ((101109, 101159), 'PyQt5.QtGui.QTransform.fromScale', 'QtGui.QTransform.fromScale', (['self.scale', 'self.scale'], {}), '(self.scale, self.scale)\n', (101135, 101159), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((109986, 110012), 'os.makedirs', 'os.makedirs', (['correctionDir'], {}), '(correctionDir)\n', (109997, 110012), False, 'import os\n'), ((4316, 4359), 'os.path.join', 'os.path.join', (['self.labelPath', 'self.cityName'], {}), '(self.labelPath, self.cityName)\n', (4328, 4359), False, 'import os\n'), ((4596, 4644), 'os.path.join', 'os.path.join', (['self.correctionPath', 'self.cityName'], {}), '(self.correctionPath, self.cityName)\n', (4608, 4644), False, 'import os\n'), ((5713, 5736), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(0)', '(255)', '(0)'], {}), '(0, 255, 0)\n', (5725, 5736), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((22776, 22817), 'os.path.join', 'os.path.join', (['iconDir', '"""checked6_red.png"""'], {}), "(iconDir, 'checked6_red.png')\n", (22788, 22817), False, 'import os\n'), ((26387, 26423), 'os.path.join', 'os.path.join', (['csPath', 'gt', 'split', '"""*"""'], {}), "(csPath, gt, split, '*')\n", (26399, 26423), False, 'import os\n'), ((27666, 27714), 'os.path.join', 'os.path.join', (['csPath', '"""leftImg8bit"""', 'split', 'city'], {}), "(csPath, 'leftImg8bit', split, city)\n", (27678, 27714), False, 'import os\n'), ((27839, 27876), 'os.path.join', 'os.path.join', (['csPath', 'gt', 'split', 'city'], {}), '(csPath, gt, split, city)\n', (27851, 27876), False, 'import os\n'), ((27961, 28015), 'os.path.join', 'os.path.join', (['csPath', "(gt + '_corrections')", 'split', 'city'], {}), "(csPath, gt + '_corrections', split, city)\n", (27973, 28015), False, 'import os\n'), ((30255, 30296), 'os.path.join', 'os.path.join', (['iconDir', '"""checked6_red.png"""'], {}), "(iconDir, 'checked6_red.png')\n", (30267, 30296), False, 'import os\n'), ((30502, 30539), 'os.path.join', 'os.path.join', (['iconDir', '"""checked6.png"""'], {}), "(iconDir, 'checked6.png')\n", (30514, 30539), False, 'import os\n'), ((34904, 34928), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""correction"""'], {}), "('correction')\n", (34914, 34928), True, 'import xml.etree.ElementTree as ET\n'), ((35108, 35139), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['root', '"""filename"""'], {}), "(root, 'filename')\n", (35121, 35139), True, 'import xml.etree.ElementTree as ET\n'), ((35180, 35221), 'os.path.basename', 'os.path.basename', (['self.config.currentFile'], {}), '(self.config.currentFile)\n', (35196, 35221), False, 'import os\n'), ((35527, 35556), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['root', '"""folder"""'], {}), "(root, 'folder')\n", (35540, 35556), True, 'import xml.etree.ElementTree as ET\n'), ((35858, 35887), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['root', '"""source"""'], {}), "(root, 'source')\n", (35871, 35887), True, 'import xml.etree.ElementTree as ET\n'), ((36012, 36052), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['sourceNode', '"""sourceImage"""'], {}), "(sourceNode, 'sourceImage')\n", (36025, 36052), True, 'import xml.etree.ElementTree as ET\n'), ((36202, 36247), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['sourceNode', '"""sourceAnnotation"""'], {}), "(sourceNode, 'sourceAnnotation')\n", (36215, 36247), True, 'import xml.etree.ElementTree as ET\n'), ((36461, 36493), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['root', '"""imagesize"""'], {}), "(root, 'imagesize')\n", (36474, 36493), True, 'import xml.etree.ElementTree as ET\n'), ((36618, 36655), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['imagesizeNode', '"""nrows"""'], {}), "(imagesizeNode, 'nrows')\n", (36631, 36655), True, 'import xml.etree.ElementTree as ET\n'), ((36792, 36829), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['imagesizeNode', '"""ncols"""'], {}), "(imagesizeNode, 'ncols')\n", (36805, 36829), True, 'import xml.etree.ElementTree as ET\n'), ((37176, 37196), 'xml.etree.ElementTree.ElementTree', 'ET.ElementTree', (['root'], {}), '(root)\n', (37190, 37196), True, 'import xml.etree.ElementTree as ET\n'), ((63382, 63403), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (63394, 63403), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((63722, 63771), 'PyQt5.QtCore.QPoint', 'QtCore.QPoint', (['self.xoff', '(self.yoff + self.h + 20)'], {}), '(self.xoff, self.yoff + self.h + 20)\n', (63735, 63771), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((88216, 88239), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(0)', '(255)', '(0)'], {}), '(0, 255, 0)\n', (88228, 88239), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((88357, 88380), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (88369, 88380), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((99102, 99121), 'os.path.exists', 'os.path.exists', (['dir'], {}), '(dir)\n', (99116, 99121), False, 'import os\n'), ((99143, 99159), 'os.makedirs', 'os.makedirs', (['dir'], {}), '(dir)\n', (99154, 99159), False, 'import os\n'), ((99839, 99861), 'os.path.splitext', 'os.path.splitext', (['file'], {}), '(file)\n', (99855, 99861), False, 'import os\n'), ((110036, 110064), 'os.path.isdir', 'os.path.isdir', (['correctionDir'], {}), '(correctionDir)\n', (110049, 110064), False, 'import os\n'), ((5812, 5835), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['(0)', '(0)', '(255)'], {}), '(0, 0, 255)\n', (5824, 5835), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((26026, 26052), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (26042, 26052), False, 'import os\n'), ((32453, 32477), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (32467, 32477), False, 'import os\n'), ((32544, 32571), 'PyQt5.QtWidgets.QMessageBox', 'QtWidgets.QMessageBox', (['self'], {}), '(self)\n', (32565, 32571), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((78981, 79089), 'PyQt5.QtWidgets.QInputDialog.getText', 'QtWidgets.QInputDialog.getText', (['self', '"""Error Description"""', '"""Please describe the labeling error briefly."""'], {}), "(self, 'Error Description',\n 'Please describe the labeling error briefly.')\n", (79011, 79089), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((99705, 99746), 'os.path.basename', 'os.path.basename', (['self.config.currentFile'], {}), '(self.config.currentFile)\n', (99721, 99746), False, 'import os\n'), ((26528, 26547), 'os.path.basename', 'os.path.basename', (['c'], {}), '(c)\n', (26544, 26547), False, 'import os\n'), ((26568, 26584), 'os.path.isdir', 'os.path.isdir', (['c'], {}), '(c)\n', (26581, 26584), False, 'import os\n'), ((101600, 101617), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['col'], {}), '(col)\n', (101612, 101617), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((101653, 101670), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['col'], {}), '(col)\n', (101665, 101670), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((101708, 101725), 'PyQt5.QtGui.QColor', 'QtGui.QColor', (['col'], {}), '(col)\n', (101720, 101725), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')]
|
import csv
import json
import datetime
def processCSV(csv_file):
reader = list(csv.reader(csv_file))
result_list = []
header = reader[0]
for row in reader[1:]:
row_object = {}
for num,col in enumerate(row):
row_object[header[num]] = col
result_list.append(row_object)
data = {"data" : result_list}
today = datetime.datetime.today().strftime("%m-%d-%Y")
with open(f"results-{today}.json","w") as file:
result = json.dumps(data, indent=3, sort_keys=False)
file.write(result)
if __name__=="__main__":
file = input("Enter path of CSV file: ")
try:
with open(file,"r") as f:
processCSV(f)
except FileNotFoundError:
print("*" * 20)
print(f"FileNotFoundError: The file on path {file} was not found, change your current directory or check the file name")
print("*" * 20)
|
[
"csv.reader",
"datetime.datetime.today",
"json.dumps"
] |
[((84, 104), 'csv.reader', 'csv.reader', (['csv_file'], {}), '(csv_file)\n', (94, 104), False, 'import csv\n'), ((486, 529), 'json.dumps', 'json.dumps', (['data'], {'indent': '(3)', 'sort_keys': '(False)'}), '(data, indent=3, sort_keys=False)\n', (496, 529), False, 'import json\n'), ((369, 394), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (392, 394), False, 'import datetime\n')]
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the ParallelInterleaveDataset serialization."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.data.experimental.kernel_tests.serialization import dataset_serialization_test_base
from tensorflow.python.data.experimental.ops import interleave_ops
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import sparse_ops
from tensorflow.python.platform import test
class ParallelInterleaveDatasetSerializationTest(
dataset_serialization_test_base.DatasetSerializationTestBase):
def setUp(self):
self.input_values = np.array([4, 5, 6], dtype=np.int64)
self.num_repeats = 2
self.num_outputs = np.sum(self.input_values) * 2
def _build_ds(self, cycle_length, block_length, sloppy=False):
return (dataset_ops.Dataset.from_tensor_slices(
self.input_values).repeat(self.num_repeats).apply(
interleave_ops.parallel_interleave(
lambda x: dataset_ops.Dataset.range(10 * x, 11 * x),
cycle_length, block_length, sloppy)))
def testSerializationCore(self):
# cycle_length > 1, block_length > 1
cycle_length = 2
block_length = 3
self.run_core_tests(lambda: self._build_ds(cycle_length, block_length),
self.num_outputs)
# cycle_length = 1
cycle_length = 1
block_length = 3
self.run_core_tests(lambda: self._build_ds(cycle_length, block_length),
self.num_outputs)
# block_length = 1
cycle_length = 2
block_length = 1
self.run_core_tests(lambda: self._build_ds(cycle_length, block_length),
self.num_outputs)
def testSerializationWithSloppy(self):
break_points = self.gen_break_points(self.num_outputs, 10)
expected_outputs = np.repeat(
np.concatenate([np.arange(10 * x, 11 * x) for x in self.input_values]),
self.num_repeats).tolist()
def run_test(cycle_length, block_length):
actual = self.gen_outputs(
lambda: self._build_ds(cycle_length, block_length, True),
break_points, self.num_outputs)
self.assertSequenceEqual(sorted(actual), expected_outputs)
# cycle_length > 1, block_length > 1
run_test(2, 3)
# cycle_length = 1
run_test(1, 3)
# block_length = 1
run_test(2, 1)
def testSparseCore(self):
def _map_fn(i):
return sparse_tensor.SparseTensorValue(
indices=[[0, 0], [1, 1]], values=(i * [1, -1]), dense_shape=[2, 2])
def _interleave_fn(x):
return dataset_ops.Dataset.from_tensor_slices(
sparse_ops.sparse_to_dense(x.indices, x.dense_shape, x.values))
def _build_dataset():
return dataset_ops.Dataset.range(10).map(_map_fn).apply(
interleave_ops.parallel_interleave(_interleave_fn, 1))
self.run_core_tests(_build_dataset, 20)
if __name__ == '__main__':
test.main()
|
[
"tensorflow.python.platform.test.main",
"tensorflow.python.ops.sparse_ops.sparse_to_dense",
"numpy.sum",
"tensorflow.python.data.experimental.ops.interleave_ops.parallel_interleave",
"tensorflow.python.framework.sparse_tensor.SparseTensorValue",
"tensorflow.python.data.ops.dataset_ops.Dataset.range",
"numpy.array",
"tensorflow.python.data.ops.dataset_ops.Dataset.from_tensor_slices",
"numpy.arange"
] |
[((3690, 3701), 'tensorflow.python.platform.test.main', 'test.main', ([], {}), '()\n', (3699, 3701), False, 'from tensorflow.python.platform import test\n'), ((1411, 1446), 'numpy.array', 'np.array', (['[4, 5, 6]'], {'dtype': 'np.int64'}), '([4, 5, 6], dtype=np.int64)\n', (1419, 1446), True, 'import numpy as np\n'), ((1495, 1520), 'numpy.sum', 'np.sum', (['self.input_values'], {}), '(self.input_values)\n', (1501, 1520), True, 'import numpy as np\n'), ((3193, 3295), 'tensorflow.python.framework.sparse_tensor.SparseTensorValue', 'sparse_tensor.SparseTensorValue', ([], {'indices': '[[0, 0], [1, 1]]', 'values': '(i * [1, -1])', 'dense_shape': '[2, 2]'}), '(indices=[[0, 0], [1, 1]], values=i * [1, -1\n ], dense_shape=[2, 2])\n', (3224, 3295), False, 'from tensorflow.python.framework import sparse_tensor\n'), ((3395, 3457), 'tensorflow.python.ops.sparse_ops.sparse_to_dense', 'sparse_ops.sparse_to_dense', (['x.indices', 'x.dense_shape', 'x.values'], {}), '(x.indices, x.dense_shape, x.values)\n', (3421, 3457), False, 'from tensorflow.python.ops import sparse_ops\n'), ((3559, 3612), 'tensorflow.python.data.experimental.ops.interleave_ops.parallel_interleave', 'interleave_ops.parallel_interleave', (['_interleave_fn', '(1)'], {}), '(_interleave_fn, 1)\n', (3593, 3612), False, 'from tensorflow.python.data.experimental.ops import interleave_ops\n'), ((1776, 1817), 'tensorflow.python.data.ops.dataset_ops.Dataset.range', 'dataset_ops.Dataset.range', (['(10 * x)', '(11 * x)'], {}), '(10 * x, 11 * x)\n', (1801, 1817), False, 'from tensorflow.python.data.ops import dataset_ops\n'), ((1603, 1660), 'tensorflow.python.data.ops.dataset_ops.Dataset.from_tensor_slices', 'dataset_ops.Dataset.from_tensor_slices', (['self.input_values'], {}), '(self.input_values)\n', (1641, 1660), False, 'from tensorflow.python.data.ops import dataset_ops\n'), ((2639, 2664), 'numpy.arange', 'np.arange', (['(10 * x)', '(11 * x)'], {}), '(10 * x, 11 * x)\n', (2648, 2664), True, 'import numpy as np\n'), ((3499, 3528), 'tensorflow.python.data.ops.dataset_ops.Dataset.range', 'dataset_ops.Dataset.range', (['(10)'], {}), '(10)\n', (3524, 3528), False, 'from tensorflow.python.data.ops import dataset_ops\n')]
|
# -*- coding: utf-8 -*-
import collections
import sympy
from abc import ABC, abstractmethod
from qibo import get_device, config
from qibo.config import raise_error
from collections.abc import Iterable
from typing import List, Sequence, Tuple
class Gate:
"""The base class for gate implementation.
All base gates should inherit this class.
"""
from qibo.abstractions import gates as module
def __init__(self):
"""
Attributes:
name (str): Name of the gate.
is_controlled_by (bool): ``True`` if the gate was created using the
:meth:`qibo.abstractions.abstract_gates.Gate.controlled_by` method,
otherwise ``False``.
init_args (list): Arguments used to initialize the gate.
init_kwargs (dict): Arguments used to initialize the gate.
target_qubits (tuple): Tuple with ids of target qubits.
control_qubits (tuple): Tuple with ids of control qubits sorted in
increasing order.
nqubits (int): Number of qubits that this gate acts on.
nstates (int): Size of state vectors that this gate acts on.
density_matrix (bool): Controls if the gate acts on state vectors or
density matrices.
"""
self.name = None
self.is_controlled_by = False
# args for creating gate
self.init_args = []
self.init_kwargs = {}
self._target_qubits = tuple()
self._control_qubits = set()
self._nqubits = None
self._nstates = None
config.ALLOW_SWITCHERS = False
self.is_prepared = False
self.well_defined = True
# Keeps track of whether parametrized gates are well-defined
# (parameter value is known during circuit creation) or if they are
# measurement dependent so the parameter value is determined during
# execution
# Using density matrices or state vectors
self._density_matrix = False
self._active_call = "_state_vector_call"
@property
def target_qubits(self) -> Tuple[int]:
"""Tuple with ids of target qubits."""
return self._target_qubits
@property
def control_qubits(self) -> Tuple[int]:
"""Tuple with ids of control qubits sorted in increasing order."""
return tuple(sorted(self._control_qubits))
@property
def qubits(self) -> Tuple[int]:
"""Tuple with ids of all qubits (control and target) that the gate acts."""
return self.control_qubits + self.target_qubits
def _set_target_qubits(self, qubits: Sequence[int]):
"""Helper method for setting target qubits."""
self._target_qubits = tuple(qubits)
if len(self._target_qubits) != len(set(qubits)):
repeated = self._find_repeated(qubits)
raise_error(ValueError, "Target qubit {} was given twice for gate {}."
"".format(repeated, self.name))
def _set_control_qubits(self, qubits: Sequence[int]):
"""Helper method for setting control qubits."""
self._control_qubits = set(qubits)
if len(self._control_qubits) != len(qubits):
repeated = self._find_repeated(qubits)
raise_error(ValueError, "Control qubit {} was given twice for gate {}."
"".format(repeated, self.name))
@target_qubits.setter
def target_qubits(self, qubits: Sequence[int]):
"""Sets target qubits tuple."""
self._set_target_qubits(qubits)
self._check_control_target_overlap()
@control_qubits.setter
def control_qubits(self, qubits: Sequence[int]):
"""Sets control qubits set."""
self._set_control_qubits(qubits)
self._check_control_target_overlap()
def _set_targets_and_controls(self, target_qubits: Sequence[int],
control_qubits: Sequence[int]):
"""Sets target and control qubits simultaneously.
This is used for the reduced qubit updates in the distributed circuits
because using the individual setters may raise errors due to temporary
overlap of control and target qubits.
"""
self._set_target_qubits(target_qubits)
self._set_control_qubits(control_qubits)
self._check_control_target_overlap()
@staticmethod
def _find_repeated(qubits: Sequence[int]) -> int:
"""Finds the first qubit id that is repeated in a sequence of qubit ids."""
temp_set = set()
for qubit in qubits:
if qubit in temp_set:
return qubit
temp_set.add(qubit)
def _check_control_target_overlap(self):
"""Checks that there are no qubits that are both target and controls."""
common = set(self._target_qubits) & self._control_qubits
if common:
raise_error(ValueError, "{} qubits are both targets and controls for "
"gate {}.".format(common, self.name))
@property
def nqubits(self) -> int:
"""Number of qubits that this gate acts on."""
if self._nqubits is None:
raise_error(ValueError, "Accessing number of qubits for gate {} but "
"this is not yet set.".format(self))
return self._nqubits
@property
def nstates(self) -> int:
"""Size of the state vectors that this gate acts on."""
if self._nstates is None:
raise_error(ValueError, "Accessing number of qubits for gate {} but "
"this is not yet set.".format(self))
return self._nstates
@nqubits.setter
def nqubits(self, n: int):
"""Sets the total number of qubits that this gate acts on.
This setter is used by `circuit.add` if the gate is added in a circuit
or during `__call__` if the gate is called directly on a state.
The user is not supposed to set `nqubits` by hand.
"""
if self._nqubits is not None and n != self.nqubits:
raise_error(ValueError, "Cannot set gate number of qubits to {} "
"because it is already set to {}."
"".format(n, self.nqubits))
self._nqubits = n
self._nstates = 2**n
@property
def density_matrix(self) -> bool:
"""Controls if the gate acts on state vectors or density matrices."""
return self._density_matrix
@density_matrix.setter
def density_matrix(self, x: bool):
"""Density matrix flag switcher."""
if self.is_prepared:
raise_error(RuntimeError,
"Density matrix mode cannot be switched after "
"preparing the gate for execution.")
self._density_matrix = x
if x:
self._active_call = "_density_matrix_call"
else:
self._active_call = "_state_vector_call"
def commutes(self, gate: "Gate") -> bool:
"""Checks if two gates commute.
Args:
gate: Gate to check if it commutes with the current gate.
Returns:
``True`` if the gates commute, otherwise ``False``.
"""
if isinstance(gate, SpecialGate):
return False
t1 = set(self.target_qubits)
t2 = set(gate.target_qubits)
a = self.__class__ == gate.__class__ and t1 == t2
b = not (t1 & set(gate.qubits) or t2 & set(self.qubits))
return a or b
def _on_qubits(self, *q) -> "Gate":
"""Helper method for :meth:`qibo.abstractions.circuit.AbstractCircuit.on_qubits`.
Creates the same gate targeting different qubits.
Args:
q (int): Qubit index (or indeces) that the new gate should act on.
Note that q is interpreted as a map from the original qubit ids
to the new ones. It is required for `len(q)` to be greater than
the max qubit id of the original gate.
Returns:
A :class:`qibo.abstractions.gates.Gate` object of the original gate
type targeting the given qubits.
Example:
.. testcode::
from qibo import models, gates
c = models.Circuit(4)
# Add some CNOT gates
c.add(gates.CNOT(2, 3)._on_qubits(0, 1, 2, 3)) # equivalent to gates.CNOT(2, 3)
c.add(gates.CNOT(2, 3)._on_qubits(1, 2, 3, 0)) # equivalent to gates.CNOT(3, 0)
c.add(gates.CNOT(2, 3)._on_qubits(2, 0, 1, 3)) # equivalent to gates.CNOT(1, 3)
c.add(gates.CNOT(2, 3)._on_qubits(0, 3, 2, 1)) # equivalent to gates.CNOT(2, 1)
print(c.draw())
.. testoutput::
q0: ───X─────
q1: ───|─o─X─
q2: ─o─|─|─o─
q3: ─X─o─X───
"""
if self.is_controlled_by:
targets = (q[i] for i in self.target_qubits)
controls = (q[i] for i in self.control_qubits)
gate = self.__class__(*targets, **self.init_kwargs)
gate = gate.controlled_by(*controls)
else:
qubits = (q[i] for i in self.qubits)
gate = self.__class__(*qubits, **self.init_kwargs)
return gate
def _dagger(self) -> "Gate":
"""Helper method for :meth:`qibo.abstractions.gates.Gate.dagger`."""
# By default the ``_dagger`` method creates an equivalent gate, assuming
# that the gate is Hermitian (true for common gates like H or Paulis).
# If the gate is not Hermitian the ``_dagger`` method should be modified.
return self.__class__(*self.init_args, **self.init_kwargs)
def dagger(self) -> "Gate":
"""Returns the dagger (conjugate transpose) of the gate.
Returns:
A :class:`qibo.abstractions.gates.Gate` object representing the dagger of
the original gate.
"""
new_gate = self._dagger()
new_gate.is_controlled_by = self.is_controlled_by
new_gate.control_qubits = self.control_qubits
return new_gate
def check_controls(func): # pylint: disable=E0213
def wrapper(self, *args):
if self.control_qubits:
raise_error(RuntimeError, "Cannot use `controlled_by` method "
"on gate {} because it is already "
"controlled by {}."
"".format(self, self.control_qubits))
if self._nqubits is not None:
raise_error(RuntimeError, "Cannot use controlled_by on a gate "
"for which the number of qubits is "
"set.")
return func(self, *args) # pylint: disable=E1102
return wrapper
@check_controls
def controlled_by(self, *qubits: int) -> "Gate":
"""Controls the gate on (arbitrarily many) qubits.
Args:
*qubits (int): Ids of the qubits that the gate will be controlled on.
Returns:
A :class:`qibo.abstractions.gates.Gate` object in with the corresponding
gate being controlled in the given qubits.
"""
if qubits:
self.is_controlled_by = True
self.control_qubits = qubits
return self
def decompose(self, *free) -> List["Gate"]:
"""Decomposes multi-control gates to gates supported by OpenQASM.
Decompositions are based on `arXiv:9503016 <https://arxiv.org/abs/quant-ph/9503016>`_.
Args:
free: Ids of free qubits to use for the gate decomposition.
Returns:
List with gates that have the same effect as applying the original gate.
"""
# TODO: Implement this method for all gates not supported by OpenQASM.
# Currently this is implemented only for multi-controlled X gates.
# If it is used on a different gate it will just return a deep copy
# of the same gate.
return [self.__class__(*self.init_args, **self.init_kwargs)]
class SpecialGate(Gate):
"""Abstract class for special gates.
Current special gates are :class:`qibo.abstractions.gates.CallbackGate` and
:class:`qibo.abstractions.gates.Flatten`.
"""
def commutes(self, gate):
return False
def _on_qubits(self, *q):
raise_error(NotImplementedError,
"Cannot use special gates on subroutines.")
class Channel(Gate):
"""Abstract class for channels."""
def __init__(self):
super().__init__()
self.gates = tuple()
# create inversion gates to restore the original state vector
# because of the in-place updates used in custom operators
self._inverse_gates = None
@property
def inverse_gates(self):
if self._inverse_gates is None:
self._inverse_gates = self.calculate_inverse_gates()
for gate in self._inverse_gates:
if gate is not None:
if self._nqubits is not None:
gate.nqubits = self._nqubits
gate.density_matrix = self.density_matrix
return self._inverse_gates
@abstractmethod
def calculate_inverse_gates(self): # pragma: no cover
raise_error(NotImplementedError)
@Gate.nqubits.setter
def nqubits(self, n: int):
Gate.nqubits.fset(self, n) # pylint: disable=no-member
for gate in self.gates:
gate.nqubits = n
if self._inverse_gates is not None:
for gate in self._inverse_gates:
if gate is not None:
gate.nqubits = n
@Gate.density_matrix.setter
def density_matrix(self, x):
Gate.density_matrix.fset(self, x) # pylint: disable=no-member
for gate in self.gates:
gate.density_matrix = x
if self._inverse_gates is not None:
for gate in self._inverse_gates:
if gate is not None:
gate.density_matrix = x
def controlled_by(self, *q):
""""""
raise_error(ValueError, "Noise channel cannot be controlled on qubits.")
def _on_qubits(self, *q): # pragma: no cover
# future TODO
raise_error(NotImplementedError, "`_on_qubits` method is not available "
"for the `Channel` gate.")
class ParametrizedGate(Gate):
"""Base class for parametrized gates.
Implements the basic functionality of parameter setters and getters.
"""
def __init__(self, trainable=True):
super(ParametrizedGate, self).__init__()
self.parameter_names = "theta"
self.nparams = 1
self.trainable = trainable
self._parameters = []
self.symbolic_parameters = {}
@property
def parameters(self):
"""Returns a tuple containing the current value of gate's parameters."""
if isinstance(self.parameter_names, str):
return self._parameters[0]
return tuple(self._parameters)
@parameters.setter
def parameters(self, x):
"""Updates the values of gate's parameters."""
if isinstance(self.parameter_names, str):
nparams = 1
if not isinstance(x, collections.abc.Iterable):
x = [x]
else:
# Captures the ``Unitary`` gate case where the given parameter
# can be an array
try:
if len(x) != 1:
x = [x]
except TypeError: # tf.Variable case
s = tuple(x.shape)
if not s or s[0] != 1:
x = [x]
else:
nparams = len(self.parameter_names)
if not self._parameters:
self._parameters = nparams * [None]
if len(x) != nparams:
raise_error(ValueError, "Parametrized gate has {} parameters "
"but {} update values were given."
"".format(nparams, len(x)))
for i, v in enumerate(x):
if isinstance(v, sympy.Expr):
self.well_defined = False
self.symbolic_parameters[i] = v
self._parameters[i] = v
# This part uses ``BackendGate`` attributes (see below), assuming
# that the gate was initialized using a calculation backend.
# I could not find a cleaner way to write this so that the
# ``circuit.set_parameters`` method works properly.
# pylint: disable=E1101
if isinstance(self, BaseBackendGate):
self._reset_unitary()
for devgate in self.device_gates:
devgate.parameters = x
def substitute_symbols(self):
params = list(self._parameters)
for i, param in self.symbolic_parameters.items():
for symbol in param.free_symbols:
param = symbol.evaluate(param)
params[i] = float(param)
self.parameters = params
class BaseBackendGate(Gate, ABC):
"""Abstract class for gate objects that can be used in calculations.
"""
module = None
def __init__(self):
"""
Attributes:
unitary: Unitary matrix representation of the gate in the computational
basis.
is_prepared: ``True`` if the gate is prepared for action to states.
A gate is prepared when its matrix and/or other tensors required
in the computation are calculated.
See :meth:`qibo.abstractions.abstract_gates.BackendGate.prepare` for more
details.
Note that gate preparation is triggered automatically when a gate
is added to a circuit or when it acts on a state.
device: Hardware device to use in order to simulate this gate.
density_matrix: ``True`` if the gate will act on density matrices,
``False`` if the gate will act on state vectors.
"""
Gate.__init__(self)
self._matrix = None
self._cache = None
# Cast gate matrices to the proper device
self.device = get_device()
# Reference to copies of this gate that are casted in devices when
# a distributed circuit is used
self.device_gates = set()
self.original_gate = None
@property
def matrix(self):
"""Unitary matrix representing the gate in the computational basis."""
if len(self.qubits) > 2:
raise_error(NotImplementedError, "Cannot calculate unitary matrix for "
"gates that target more than two qubits.")
if self._matrix is None:
self._matrix = self._construct_unitary()
if self.is_controlled_by and tuple(self._matrix.shape) == (2, 2):
self._matrix = self._control_unitary(self._matrix)
return self._matrix
def __matmul__(self, other: "Gate") -> "Gate":
"""Gate multiplication."""
if self.qubits != other.qubits:
raise_error(NotImplementedError, "Cannot multiply gates that target "
"different qubits.")
if self.__class__.__name__ == other.__class__.__name__:
square_identity = {"H", "X", "Y", "Z", "CNOT", "CZ", "SWAP"}
if self.__class__.__name__ in square_identity:
from qibo.gates import I
return I(*self.qubits)
return self.module.Unitary(self.matrix @ other.matrix, *self.qubits)
def __rmatmul__(self, other): # pragma: no cover
# always falls back to left ``__matmul__``
return self.__matmul__(other)
@staticmethod
@abstractmethod
def _control_unitary(unitary): # pragma: no cover
"""Updates the unitary matrix of the gate if it is controlled."""
raise_error(NotImplementedError)
@abstractmethod
def _construct_unitary(self): # pragma: no cover
"""Constructs the gate's unitary matrix."""
return raise_error(NotImplementedError)
def _reset_unitary(self):
"""Resets the gate matrices back to ``None``.
Useful when the gate matrix need to be recalculated.
"""
self._matrix = None
@property
@abstractmethod
def cache(self): # pragma: no cover
raise_error(NotImplementedError)
@abstractmethod
def _set_nqubits(self, state): # pragma: no cover
"""Sets ``gate.nqubits`` and prepares gates for application to states.
This method is used only when gates are called directly on states
without being a part of circuit. If a gate is added in a circuit it
is automatically prepared and this method is not required.
"""
raise_error(NotImplementedError)
@abstractmethod
def _state_vector_call(self, state): # pragma: no cover
"""Applies the gate on a state vector."""
raise_error(NotImplementedError)
@abstractmethod
def _density_matrix_call(self, state): # pragma: no cover
"""Applies the gate on a density matrix."""
raise_error(NotImplementedError)
@abstractmethod
def _density_matrix_half_call(self, state): # pragma: no cover
"""Half application of gate to density matrix.
For an arbitrary unitary gate U the
:meth:`qibo.abstractions.abstract_gates.BaseBackendGate._density_matrix_call`
calculates
.. math::
U\\rho U^\\dagger
while this method calculates only
.. math::
U\\rho
This is useful for :class:`qibo.abstractions.hamiltonians.SymbolicHamiltonian`
multiplication to density matrices.
"""
raise_error(NotImplementedError)
def __call__(self, state):
"""Applies the gate on a state.
Falls back to a state vector or density matrix call according to the
current value of the ``gate.density_matrix`` flag.
It automatically prepares the gate if it is not already prepared.
"""
if not self.is_prepared:
self._set_nqubits(state)
if not self.well_defined:
self.substitute_symbols() # pylint: disable=E1101
# method available only for parametrized gates
return getattr(self, self._active_call)(state)
|
[
"qibo.gates.I",
"qibo.get_device",
"qibo.config.raise_error"
] |
[((12534, 12610), 'qibo.config.raise_error', 'raise_error', (['NotImplementedError', '"""Cannot use special gates on subroutines."""'], {}), "(NotImplementedError, 'Cannot use special gates on subroutines.')\n", (12545, 12610), False, 'from qibo.config import raise_error\n'), ((13464, 13496), 'qibo.config.raise_error', 'raise_error', (['NotImplementedError'], {}), '(NotImplementedError)\n', (13475, 13496), False, 'from qibo.config import raise_error\n'), ((14272, 14344), 'qibo.config.raise_error', 'raise_error', (['ValueError', '"""Noise channel cannot be controlled on qubits."""'], {}), "(ValueError, 'Noise channel cannot be controlled on qubits.')\n", (14283, 14344), False, 'from qibo.config import raise_error\n'), ((14425, 14525), 'qibo.config.raise_error', 'raise_error', (['NotImplementedError', '"""`_on_qubits` method is not available for the `Channel` gate."""'], {}), "(NotImplementedError,\n '`_on_qubits` method is not available for the `Channel` gate.')\n", (14436, 14525), False, 'from qibo.config import raise_error\n'), ((18394, 18406), 'qibo.get_device', 'get_device', ([], {}), '()\n', (18404, 18406), False, 'from qibo import get_device, config\n'), ((20108, 20140), 'qibo.config.raise_error', 'raise_error', (['NotImplementedError'], {}), '(NotImplementedError)\n', (20119, 20140), False, 'from qibo.config import raise_error\n'), ((20282, 20314), 'qibo.config.raise_error', 'raise_error', (['NotImplementedError'], {}), '(NotImplementedError)\n', (20293, 20314), False, 'from qibo.config import raise_error\n'), ((20585, 20617), 'qibo.config.raise_error', 'raise_error', (['NotImplementedError'], {}), '(NotImplementedError)\n', (20596, 20617), False, 'from qibo.config import raise_error\n'), ((21010, 21042), 'qibo.config.raise_error', 'raise_error', (['NotImplementedError'], {}), '(NotImplementedError)\n', (21021, 21042), False, 'from qibo.config import raise_error\n'), ((21182, 21214), 'qibo.config.raise_error', 'raise_error', (['NotImplementedError'], {}), '(NotImplementedError)\n', (21193, 21214), False, 'from qibo.config import raise_error\n'), ((21358, 21390), 'qibo.config.raise_error', 'raise_error', (['NotImplementedError'], {}), '(NotImplementedError)\n', (21369, 21390), False, 'from qibo.config import raise_error\n'), ((21966, 21998), 'qibo.config.raise_error', 'raise_error', (['NotImplementedError'], {}), '(NotImplementedError)\n', (21977, 21998), False, 'from qibo.config import raise_error\n'), ((6688, 6804), 'qibo.config.raise_error', 'raise_error', (['RuntimeError', '"""Density matrix mode cannot be switched after preparing the gate for execution."""'], {}), "(RuntimeError,\n 'Density matrix mode cannot be switched after preparing the gate for execution.'\n )\n", (6699, 6804), False, 'from qibo.config import raise_error\n'), ((18751, 18871), 'qibo.config.raise_error', 'raise_error', (['NotImplementedError', '"""Cannot calculate unitary matrix for gates that target more than two qubits."""'], {}), "(NotImplementedError,\n 'Cannot calculate unitary matrix for gates that target more than two qubits.'\n )\n", (18762, 18871), False, 'from qibo.config import raise_error\n'), ((19301, 19392), 'qibo.config.raise_error', 'raise_error', (['NotImplementedError', '"""Cannot multiply gates that target different qubits."""'], {}), "(NotImplementedError,\n 'Cannot multiply gates that target different qubits.')\n", (19312, 19392), False, 'from qibo.config import raise_error\n'), ((10694, 10805), 'qibo.config.raise_error', 'raise_error', (['RuntimeError', '"""Cannot use controlled_by on a gate for which the number of qubits is set."""'], {}), "(RuntimeError,\n 'Cannot use controlled_by on a gate for which the number of qubits is set.'\n )\n", (10705, 10805), False, 'from qibo.config import raise_error\n'), ((19697, 19712), 'qibo.gates.I', 'I', (['*self.qubits'], {}), '(*self.qubits)\n', (19698, 19712), False, 'from qibo.gates import I\n')]
|
import hashlib
import os
def generate_hash(param_set):
_salt_keys = ('mode',
'width',
'height',
'upscale',
'quality',
'direction',
'degree')
hash_string = param_set.path
for key in _salt_keys:
value = param_set.__getattribute__(key)
if value:
hash_string += str(value)
hashed = hashlib.md5(hash_string.encode('utf8')).hexdigest()
hashed_with_format = '.'.join([hashed, param_set.img_format])
return hashed_with_format
def get_name_with_ext(path):
return os.path.basename(path)
def split_name_and_ext(path):
return os.path.splitext(path)
def get_name_without_ext(path):
return os.path.splitext(path)[0]
def get_ext(path):
return os.path.splitext(path)[1]
|
[
"os.path.splitext",
"os.path.basename"
] |
[((622, 644), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (638, 644), False, 'import os\n'), ((688, 710), 'os.path.splitext', 'os.path.splitext', (['path'], {}), '(path)\n', (704, 710), False, 'import os\n'), ((756, 778), 'os.path.splitext', 'os.path.splitext', (['path'], {}), '(path)\n', (772, 778), False, 'import os\n'), ((814, 836), 'os.path.splitext', 'os.path.splitext', (['path'], {}), '(path)\n', (830, 836), False, 'import os\n')]
|
import tensorflow_quantum as tfq
import tensorflow as tf
import cirq
import sympy
import matplotlib.pyplot as plt
import numpy as np
def make_data(qubits):
train, train_label = [], []
# 0 XOR 0
cir = cirq.Circuit()
cir.append([cirq.I(qubits[0])])
cir.append([cirq.I(qubits[1])])
train.append(cir)
train_label.append(-1)
# 1 XOR 0
cir = cirq.Circuit()
cir.append([cirq.X(qubits[0])])
cir.append([cirq.I(qubits[1])])
train.append(cir)
train_label.append(1)
# 0 XOR 1
cir = cirq.Circuit()
cir.append([cirq.I(qubits[0])])
cir.append([cirq.X(qubits[1])])
train.append(cir)
train_label.append(1)
# 1 XOR 1
cir = cirq.Circuit()
cir.append([cirq.X(qubits[0])])
cir.append([cirq.X(qubits[1])])
train.append(cir)
train_label.append(-1)
return tfq.convert_to_tensor(train), np.array(train_label), tfq.convert_to_tensor(train), np.array(train_label)
def one_qubit_unitary(bit, symbols):
return cirq.Circuit(
cirq.rx(symbols[0]).on(bit),
cirq.ry(symbols[1]).on(bit),
cirq.rz(symbols[2]).on(bit))
def two_qubit_pool(source_qubit, sink_qubit, symbols):
pool_circuit = cirq.Circuit()
sink_basis_selector = one_qubit_unitary(sink_qubit, symbols[0:3])
source_basis_selector = one_qubit_unitary(source_qubit, symbols[3:6])
pool_circuit.append(sink_basis_selector)
pool_circuit.append(source_basis_selector)
pool_circuit.append(cirq.CNOT(control=source_qubit, target=sink_qubit))
pool_circuit.append(sink_basis_selector**-1)
return pool_circuit
def make_circuit(qubits):
x1 = sympy.symbols('X1_rot')
y1 = sympy.symbols('Y1_rot')
z1 = sympy.symbols('Z1_rot')
x2 = sympy.symbols('X2_rot')
y2 = sympy.symbols('Y2_rot')
z2 = sympy.symbols('Z2_rot')
pool = sympy.symbols('pooling0:6')
c = cirq.Circuit()
c.append(cirq.CNOT(qubits[0], qubits[1]))
c.append(cirq.rx(x1).on(qubits[0]))
c.append(cirq.ry(y1).on(qubits[0]))
c.append(cirq.rz(z1).on(qubits[0]))
c.append(cirq.rx(x2).on(qubits[1]))
c.append(cirq.ry(y2).on(qubits[1]))
c.append(cirq.rz(z2).on(qubits[1]))
c += two_qubit_pool(qubits[0], qubits[1], pool)
return c
def hinge_accuracy(y_true, y_pred):
y_true = tf.squeeze(y_true) > 0.0
y_pred = tf.squeeze(y_pred) > 0.0
result = tf.cast(y_true == y_pred, tf.float32)
return tf.reduce_mean(result)
qubits = [cirq.GridQubit(0,i) for i in range(2)]
train, train_label, test, test_label = make_data(qubits)
readout_operators = [cirq.Z(qubits[1])]
inputs = tf.keras.Input(shape=(), dtype=tf.dtypes.string)
trial_circuit = make_circuit(qubits)
print(trial_circuit)
layer1 = tfq.layers.PQC(make_circuit(qubits), readout_operators, repetitions=1000, \
differentiator=tfq.differentiators.ParameterShift())(inputs)
model = tf.keras.models.Model(inputs=inputs, outputs=layer1)
def np_hinge(true, pred):
t = true > 0
p = pred > 0
result = t == p
return np.mean(result)
tf_loss = []
tf_acc = []
N = 100
params = np.random.uniform(0, 2 * np.pi, 12)
#params = np.zeros((12,))
model.set_weights(np.array([params]))
opt = tf.keras.optimizers.Adam(lr=0.01)
for i in range(N):
with tf.GradientTape() as tape:
guess = model(train)
error = tf.keras.losses.MAE(train_label, tf.squeeze(guess))
grad = tape.gradient(error, model.trainable_variables)
opt.apply_gradients(zip(grad, model.trainable_variables))
acc = np_hinge(train_label, guess.numpy().flatten())
tf_loss.append(error)
tf_acc.append(acc)
if i % 10 == 0:
print("Epoch {}/{}, Loss {}, Acc {}".format(i, N, error, acc))
import optimizers
from quantum_diffs import ParameterShift
def f(x):
model.set_weights(np.array([x]))
ret = model(train)
return tf.keras.losses.MAE(train_label, tf.squeeze(ret)).numpy()
def f1(x):
model.set_weights(np.array([x]))
ret = model(train)
return ret.numpy()
opt = optimizers.Adam(lr=0.01)
cutsom = []
accs = []
i = 0
while i < N:
guess = f(params)
cutsom.append(guess)
gradients = ParameterShift(f, params)
params = opt.apply_grad(gradients, params)
acc = np_hinge(train_label, f1(params).flatten())
accs.append(acc)
if i % 10 == 0:
print("Epoch {}/{}, Loss {}, Acc {}".format(i, N, guess, acc))
i += 1
plt.plot(tf_loss, label='TFQ')
plt.plot(cutsom, label='Custom')
plt.legend()
plt.title("Training Loss")
plt.xlabel("Epochs")
plt.ylabel("MAE Loss")
plt.show()
plt.plot(tf_acc, label='TFQ')
plt.plot(accs, label='Custom')
plt.legend()
plt.title("Training Acc")
plt.xlabel("Epochs")
plt.ylabel("Accuracy")
plt.show()
|
[
"matplotlib.pyplot.title",
"cirq.rx",
"cirq.ry",
"numpy.mean",
"cirq.CNOT",
"cirq.rz",
"cirq.I",
"tensorflow.keras.Input",
"tensorflow.cast",
"tensorflow.keras.optimizers.Adam",
"tensorflow.squeeze",
"cirq.Z",
"matplotlib.pyplot.show",
"tensorflow_quantum.differentiators.ParameterShift",
"matplotlib.pyplot.legend",
"tensorflow.reduce_mean",
"cirq.GridQubit",
"tensorflow.keras.models.Model",
"cirq.X",
"matplotlib.pyplot.ylabel",
"numpy.random.uniform",
"sympy.symbols",
"matplotlib.pyplot.plot",
"optimizers.Adam",
"numpy.array",
"quantum_diffs.ParameterShift",
"cirq.Circuit",
"tensorflow_quantum.convert_to_tensor",
"matplotlib.pyplot.xlabel",
"tensorflow.GradientTape"
] |
[((2587, 2635), 'tensorflow.keras.Input', 'tf.keras.Input', ([], {'shape': '()', 'dtype': 'tf.dtypes.string'}), '(shape=(), dtype=tf.dtypes.string)\n', (2601, 2635), True, 'import tensorflow as tf\n'), ((2852, 2904), 'tensorflow.keras.models.Model', 'tf.keras.models.Model', ([], {'inputs': 'inputs', 'outputs': 'layer1'}), '(inputs=inputs, outputs=layer1)\n', (2873, 2904), True, 'import tensorflow as tf\n'), ((3056, 3091), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(2 * np.pi)', '(12)'], {}), '(0, 2 * np.pi, 12)\n', (3073, 3091), True, 'import numpy as np\n'), ((3164, 3197), 'tensorflow.keras.optimizers.Adam', 'tf.keras.optimizers.Adam', ([], {'lr': '(0.01)'}), '(lr=0.01)\n', (3188, 3197), True, 'import tensorflow as tf\n'), ((3972, 3996), 'optimizers.Adam', 'optimizers.Adam', ([], {'lr': '(0.01)'}), '(lr=0.01)\n', (3987, 3996), False, 'import optimizers\n'), ((4352, 4382), 'matplotlib.pyplot.plot', 'plt.plot', (['tf_loss'], {'label': '"""TFQ"""'}), "(tf_loss, label='TFQ')\n", (4360, 4382), True, 'import matplotlib.pyplot as plt\n'), ((4383, 4415), 'matplotlib.pyplot.plot', 'plt.plot', (['cutsom'], {'label': '"""Custom"""'}), "(cutsom, label='Custom')\n", (4391, 4415), True, 'import matplotlib.pyplot as plt\n'), ((4416, 4428), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (4426, 4428), True, 'import matplotlib.pyplot as plt\n'), ((4429, 4455), 'matplotlib.pyplot.title', 'plt.title', (['"""Training Loss"""'], {}), "('Training Loss')\n", (4438, 4455), True, 'import matplotlib.pyplot as plt\n'), ((4456, 4476), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epochs"""'], {}), "('Epochs')\n", (4466, 4476), True, 'import matplotlib.pyplot as plt\n'), ((4477, 4499), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""MAE Loss"""'], {}), "('MAE Loss')\n", (4487, 4499), True, 'import matplotlib.pyplot as plt\n'), ((4500, 4510), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4508, 4510), True, 'import matplotlib.pyplot as plt\n'), ((4513, 4542), 'matplotlib.pyplot.plot', 'plt.plot', (['tf_acc'], {'label': '"""TFQ"""'}), "(tf_acc, label='TFQ')\n", (4521, 4542), True, 'import matplotlib.pyplot as plt\n'), ((4543, 4573), 'matplotlib.pyplot.plot', 'plt.plot', (['accs'], {'label': '"""Custom"""'}), "(accs, label='Custom')\n", (4551, 4573), True, 'import matplotlib.pyplot as plt\n'), ((4574, 4586), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (4584, 4586), True, 'import matplotlib.pyplot as plt\n'), ((4587, 4612), 'matplotlib.pyplot.title', 'plt.title', (['"""Training Acc"""'], {}), "('Training Acc')\n", (4596, 4612), True, 'import matplotlib.pyplot as plt\n'), ((4613, 4633), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epochs"""'], {}), "('Epochs')\n", (4623, 4633), True, 'import matplotlib.pyplot as plt\n'), ((4634, 4656), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Accuracy"""'], {}), "('Accuracy')\n", (4644, 4656), True, 'import matplotlib.pyplot as plt\n'), ((4657, 4667), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4665, 4667), True, 'import matplotlib.pyplot as plt\n'), ((213, 227), 'cirq.Circuit', 'cirq.Circuit', ([], {}), '()\n', (225, 227), False, 'import cirq\n'), ((373, 387), 'cirq.Circuit', 'cirq.Circuit', ([], {}), '()\n', (385, 387), False, 'import cirq\n'), ((532, 546), 'cirq.Circuit', 'cirq.Circuit', ([], {}), '()\n', (544, 546), False, 'import cirq\n'), ((691, 705), 'cirq.Circuit', 'cirq.Circuit', ([], {}), '()\n', (703, 705), False, 'import cirq\n'), ((1192, 1206), 'cirq.Circuit', 'cirq.Circuit', ([], {}), '()\n', (1204, 1206), False, 'import cirq\n'), ((1628, 1651), 'sympy.symbols', 'sympy.symbols', (['"""X1_rot"""'], {}), "('X1_rot')\n", (1641, 1651), False, 'import sympy\n'), ((1661, 1684), 'sympy.symbols', 'sympy.symbols', (['"""Y1_rot"""'], {}), "('Y1_rot')\n", (1674, 1684), False, 'import sympy\n'), ((1694, 1717), 'sympy.symbols', 'sympy.symbols', (['"""Z1_rot"""'], {}), "('Z1_rot')\n", (1707, 1717), False, 'import sympy\n'), ((1727, 1750), 'sympy.symbols', 'sympy.symbols', (['"""X2_rot"""'], {}), "('X2_rot')\n", (1740, 1750), False, 'import sympy\n'), ((1760, 1783), 'sympy.symbols', 'sympy.symbols', (['"""Y2_rot"""'], {}), "('Y2_rot')\n", (1773, 1783), False, 'import sympy\n'), ((1793, 1816), 'sympy.symbols', 'sympy.symbols', (['"""Z2_rot"""'], {}), "('Z2_rot')\n", (1806, 1816), False, 'import sympy\n'), ((1828, 1855), 'sympy.symbols', 'sympy.symbols', (['"""pooling0:6"""'], {}), "('pooling0:6')\n", (1841, 1855), False, 'import sympy\n'), ((1864, 1878), 'cirq.Circuit', 'cirq.Circuit', ([], {}), '()\n', (1876, 1878), False, 'import cirq\n'), ((2356, 2393), 'tensorflow.cast', 'tf.cast', (['(y_true == y_pred)', 'tf.float32'], {}), '(y_true == y_pred, tf.float32)\n', (2363, 2393), True, 'import tensorflow as tf\n'), ((2406, 2428), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['result'], {}), '(result)\n', (2420, 2428), True, 'import tensorflow as tf\n'), ((2440, 2460), 'cirq.GridQubit', 'cirq.GridQubit', (['(0)', 'i'], {}), '(0, i)\n', (2454, 2460), False, 'import cirq\n'), ((2559, 2576), 'cirq.Z', 'cirq.Z', (['qubits[1]'], {}), '(qubits[1])\n', (2565, 2576), False, 'import cirq\n'), ((2997, 3012), 'numpy.mean', 'np.mean', (['result'], {}), '(result)\n', (3004, 3012), True, 'import numpy as np\n'), ((3137, 3155), 'numpy.array', 'np.array', (['[params]'], {}), '([params])\n', (3145, 3155), True, 'import numpy as np\n'), ((4101, 4126), 'quantum_diffs.ParameterShift', 'ParameterShift', (['f', 'params'], {}), '(f, params)\n', (4115, 4126), False, 'from quantum_diffs import ParameterShift\n'), ((838, 866), 'tensorflow_quantum.convert_to_tensor', 'tfq.convert_to_tensor', (['train'], {}), '(train)\n', (859, 866), True, 'import tensorflow_quantum as tfq\n'), ((868, 889), 'numpy.array', 'np.array', (['train_label'], {}), '(train_label)\n', (876, 889), True, 'import numpy as np\n'), ((891, 919), 'tensorflow_quantum.convert_to_tensor', 'tfq.convert_to_tensor', (['train'], {}), '(train)\n', (912, 919), True, 'import tensorflow_quantum as tfq\n'), ((921, 942), 'numpy.array', 'np.array', (['train_label'], {}), '(train_label)\n', (929, 942), True, 'import numpy as np\n'), ((1467, 1517), 'cirq.CNOT', 'cirq.CNOT', ([], {'control': 'source_qubit', 'target': 'sink_qubit'}), '(control=source_qubit, target=sink_qubit)\n', (1476, 1517), False, 'import cirq\n'), ((1892, 1923), 'cirq.CNOT', 'cirq.CNOT', (['qubits[0]', 'qubits[1]'], {}), '(qubits[0], qubits[1])\n', (1901, 1923), False, 'import cirq\n'), ((2280, 2298), 'tensorflow.squeeze', 'tf.squeeze', (['y_true'], {}), '(y_true)\n', (2290, 2298), True, 'import tensorflow as tf\n'), ((2318, 2336), 'tensorflow.squeeze', 'tf.squeeze', (['y_pred'], {}), '(y_pred)\n', (2328, 2336), True, 'import tensorflow as tf\n'), ((3227, 3244), 'tensorflow.GradientTape', 'tf.GradientTape', ([], {}), '()\n', (3242, 3244), True, 'import tensorflow as tf\n'), ((3763, 3776), 'numpy.array', 'np.array', (['[x]'], {}), '([x])\n', (3771, 3776), True, 'import numpy as np\n'), ((3904, 3917), 'numpy.array', 'np.array', (['[x]'], {}), '([x])\n', (3912, 3917), True, 'import numpy as np\n'), ((244, 261), 'cirq.I', 'cirq.I', (['qubits[0]'], {}), '(qubits[0])\n', (250, 261), False, 'import cirq\n'), ((280, 297), 'cirq.I', 'cirq.I', (['qubits[1]'], {}), '(qubits[1])\n', (286, 297), False, 'import cirq\n'), ((404, 421), 'cirq.X', 'cirq.X', (['qubits[0]'], {}), '(qubits[0])\n', (410, 421), False, 'import cirq\n'), ((440, 457), 'cirq.I', 'cirq.I', (['qubits[1]'], {}), '(qubits[1])\n', (446, 457), False, 'import cirq\n'), ((563, 580), 'cirq.I', 'cirq.I', (['qubits[0]'], {}), '(qubits[0])\n', (569, 580), False, 'import cirq\n'), ((599, 616), 'cirq.X', 'cirq.X', (['qubits[1]'], {}), '(qubits[1])\n', (605, 616), False, 'import cirq\n'), ((722, 739), 'cirq.X', 'cirq.X', (['qubits[0]'], {}), '(qubits[0])\n', (728, 739), False, 'import cirq\n'), ((758, 775), 'cirq.X', 'cirq.X', (['qubits[1]'], {}), '(qubits[1])\n', (764, 775), False, 'import cirq\n'), ((2798, 2834), 'tensorflow_quantum.differentiators.ParameterShift', 'tfq.differentiators.ParameterShift', ([], {}), '()\n', (2832, 2834), True, 'import tensorflow_quantum as tfq\n'), ((3332, 3349), 'tensorflow.squeeze', 'tf.squeeze', (['guess'], {}), '(guess)\n', (3342, 3349), True, 'import tensorflow as tf\n'), ((1014, 1033), 'cirq.rx', 'cirq.rx', (['symbols[0]'], {}), '(symbols[0])\n', (1021, 1033), False, 'import cirq\n'), ((1051, 1070), 'cirq.ry', 'cirq.ry', (['symbols[1]'], {}), '(symbols[1])\n', (1058, 1070), False, 'import cirq\n'), ((1088, 1107), 'cirq.rz', 'cirq.rz', (['symbols[2]'], {}), '(symbols[2])\n', (1095, 1107), False, 'import cirq\n'), ((1938, 1949), 'cirq.rx', 'cirq.rx', (['x1'], {}), '(x1)\n', (1945, 1949), False, 'import cirq\n'), ((1978, 1989), 'cirq.ry', 'cirq.ry', (['y1'], {}), '(y1)\n', (1985, 1989), False, 'import cirq\n'), ((2018, 2029), 'cirq.rz', 'cirq.rz', (['z1'], {}), '(z1)\n', (2025, 2029), False, 'import cirq\n'), ((2058, 2069), 'cirq.rx', 'cirq.rx', (['x2'], {}), '(x2)\n', (2065, 2069), False, 'import cirq\n'), ((2098, 2109), 'cirq.ry', 'cirq.ry', (['y2'], {}), '(y2)\n', (2105, 2109), False, 'import cirq\n'), ((2138, 2149), 'cirq.rz', 'cirq.rz', (['z2'], {}), '(z2)\n', (2145, 2149), False, 'import cirq\n'), ((3845, 3860), 'tensorflow.squeeze', 'tf.squeeze', (['ret'], {}), '(ret)\n', (3855, 3860), True, 'import tensorflow as tf\n')]
|
from django.conf.urls import patterns, url
from kitsune.wiki import api
# API urls
urlpatterns = patterns(
'',
url(r'^$', api.DocumentList.as_view(), name='document-list'),
url(r'^(?P<slug>[^/]+)$', api.DocumentDetail.as_view(), name='document-detail'),
)
|
[
"kitsune.wiki.api.DocumentDetail.as_view",
"kitsune.wiki.api.DocumentList.as_view"
] |
[((133, 159), 'kitsune.wiki.api.DocumentList.as_view', 'api.DocumentList.as_view', ([], {}), '()\n', (157, 159), False, 'from kitsune.wiki import api\n'), ((214, 242), 'kitsune.wiki.api.DocumentDetail.as_view', 'api.DocumentDetail.as_view', ([], {}), '()\n', (240, 242), False, 'from kitsune.wiki import api\n')]
|
from rest_framework import viewsets
from ice_creams.models import Flavor
from ice_creams.models import IceCream
from ice_creams.models import IceCreamServing
from ice_creams.models import Topping
from .serializers import FlavorSerializer
from .serializers import IceCreamSerializer
from .serializers import IceCreamServingSerializer
from .serializers import ToppingSerializer
class ToppingViewSet(viewsets.ModelViewSet):
"""
Viewset for toppings.
"""
queryset = Topping.objects.all()
serializer_class = ToppingSerializer
class FlavorViewSet(viewsets.ModelViewSet):
"""
Viewset for toppings.
"""
queryset = Flavor.objects.all()
serializer_class = FlavorSerializer
class IceCreamServingViewSet(viewsets.ModelViewSet):
"""
Viewset for toppings.
"""
queryset = IceCreamServing.objects.all()
serializer_class = IceCreamServingSerializer
class IceCreamViewSet(viewsets.ModelViewSet):
"""
Viewset for toppings.
"""
queryset = IceCream.objects.all()
serializer_class = IceCreamSerializer
|
[
"ice_creams.models.Topping.objects.all",
"ice_creams.models.IceCreamServing.objects.all",
"ice_creams.models.Flavor.objects.all",
"ice_creams.models.IceCream.objects.all"
] |
[((482, 503), 'ice_creams.models.Topping.objects.all', 'Topping.objects.all', ([], {}), '()\n', (501, 503), False, 'from ice_creams.models import Topping\n'), ((648, 668), 'ice_creams.models.Flavor.objects.all', 'Flavor.objects.all', ([], {}), '()\n', (666, 668), False, 'from ice_creams.models import Flavor\n'), ((821, 850), 'ice_creams.models.IceCreamServing.objects.all', 'IceCreamServing.objects.all', ([], {}), '()\n', (848, 850), False, 'from ice_creams.models import IceCreamServing\n'), ((1005, 1027), 'ice_creams.models.IceCream.objects.all', 'IceCream.objects.all', ([], {}), '()\n', (1025, 1027), False, 'from ice_creams.models import IceCream\n')]
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
AWS provisioner.
"""
from textwrap import dedent
from time import time
from effect.retry import retry
from effect import Effect, Constant
from ._libcloud import LibcloudProvisioner
from ._install import (
provision,
task_install_ssh_key,
)
from ._ssh import run_remotely
from ._effect import sequence
_usernames = {
'centos-7': 'centos',
'ubuntu-14.04': 'ubuntu',
'ubuntu-15.04': 'ubuntu',
}
def get_default_username(distribution):
"""
Return the username available by default on a system.
:param str distribution: Name of the operating system distribution
:return str: The username made available by AWS for this distribution.
"""
return _usernames[distribution]
def provision_aws(node, package_source, distribution, variants):
"""
Provision flocker on this node.
:param LibcloudNode node: Node to provision.
:param PackageSource package_source: See func:`task_install_flocker`
:param bytes distribution: See func:`task_install_flocker`
:param set variants: The set of variant configurations to use when
provisioning
"""
username = get_default_username(distribution)
commands = []
# cloud-init may not have allowed sudo without tty yet, so try SSH key
# installation for a few more seconds:
start = []
def for_ten_seconds(*args, **kwargs):
if not start:
start.append(time())
return Effect(Constant((time() - start[0]) < 30))
commands.append(run_remotely(
username=username,
address=node.address,
commands=retry(task_install_ssh_key(), for_ten_seconds),
))
commands.append(run_remotely(
username='root',
address=node.address,
commands=provision(
package_source=package_source,
distribution=node.distribution,
variants=variants,
),
))
return sequence(commands)
IMAGE_NAMES = {
'centos-7': 'CentOS 7 x86_64 (2014_09_29) EBS HVM'
'-b7ee8a69-ee97-4a49-9e68-afaee216db2e-ami-d2a117ba.2',
'ubuntu-14.04': 'ubuntu/images/hvm-ssd/ubuntu-trusty-14.04-amd64-server-20150325', # noqa
'ubuntu-15.04': 'ubuntu/images/hvm-ssd/ubuntu-vivid-15.04-amd64-server-20150422', # noqa
}
def aws_provisioner(access_key, secret_access_token, keyname,
region, zone, security_groups):
"""
Create a LibCloudProvisioner for provisioning nodes on AWS EC2.
:param bytes access_key: The access_key to connect to AWS with.
:param bytes secret_access_token: The corresponding secret token.
:param bytes region: The AWS region in which to launch the instance.
:param bytes zone: The AWS zone in which to launch the instance.
:param bytes keyname: The name of an existing ssh public key configured in
AWS. The provision step assumes the corresponding private key is
available from an agent.
:param list security_groups: List of security groups to put created nodes
in.
"""
# Import these here, so that this can be imported without
# installing libcloud.
from libcloud.compute.providers import get_driver, Provider
driver = get_driver(Provider.EC2)(
key=access_key,
secret=secret_access_token,
region=region)
location = [loc for loc in driver.list_locations()
if loc.availability_zone.name == zone][0]
def create_arguments(disk_size):
return {
"location": location,
"ex_securitygroup": security_groups,
"ex_blockdevicemappings": [
{"DeviceName": "/dev/sda1",
"Ebs": {"VolumeSize": disk_size,
"DeleteOnTermination": True,
"VolumeType": "gp2"}}
],
# On some operating systems, a tty is requried for sudo.
# Since AWS systems have a non-root user as the login,
# disable this, so we can use sudo with conch.
"ex_userdata": dedent("""\
#!/bin/sh
sed -i '/Defaults *requiretty/d' /etc/sudoers
""")
}
provisioner = LibcloudProvisioner(
driver=driver,
keyname=keyname,
image_names=IMAGE_NAMES,
create_node_arguments=create_arguments,
provision=provision_aws,
default_size="m3.large",
get_default_user=get_default_username,
use_private_addresses=True,
)
return provisioner
|
[
"textwrap.dedent",
"time.time",
"libcloud.compute.providers.get_driver"
] |
[((3244, 3268), 'libcloud.compute.providers.get_driver', 'get_driver', (['Provider.EC2'], {}), '(Provider.EC2)\n', (3254, 3268), False, 'from libcloud.compute.providers import get_driver, Provider\n'), ((4077, 4205), 'textwrap.dedent', 'dedent', (['""" #!/bin/sh\n sed -i \'/Defaults *requiretty/d\' /etc/sudoers\n """'], {}), '(\n """ #!/bin/sh\n sed -i \'/Defaults *requiretty/d\' /etc/sudoers\n """\n )\n', (4083, 4205), False, 'from textwrap import dedent\n'), ((1473, 1479), 'time.time', 'time', ([], {}), '()\n', (1477, 1479), False, 'from time import time\n'), ((1513, 1519), 'time.time', 'time', ([], {}), '()\n', (1517, 1519), False, 'from time import time\n')]
|
from unittest import TestCase
import numpy as np
from hamcrest import assert_that, is_
from core.batch_generator import BatchGenerator
class DummyBatchGenerator(BatchGenerator):
def __init__(self, batch_items, batch_size):
super().__init__(batch_items, batch_size, 'en')
def shuffle_entries(self):
pass
def extract_features(self, first, last):
return np.random.rand(i, 26)[first:last]
def extract_labels(self, first, last):
return [f'some label' for i in range(first, last)]
class TestBatchGenerator(TestCase):
def test_batch_generator_attributes(self):
batch_items = list(range(33))
batch_size = 16
generator = DummyBatchGenerator(batch_items, batch_size)
assert_that(len(generator), is_(3), f'len() should reflect the number of batches')
assert_that(len(generator[0][0]['the_input']), is_(batch_size), f'first batch should be full')
assert_that(len(generator[1][0]['the_input']), is_(batch_size), f'second batch should be full')
assert_that(len(generator[2][0]['the_input']), is_(1), f'last batch should be residual')
def test_batch_generator_finite(self):
batch_items = [1, 2, 3, 4, 5, 6, 7]
batch_size = 3
generator = DummyBatchGenerator(batch_items, batch_size)
assert_that(len(generator), is_(3))
for i, (batch_inputs, batch_outputs) in enumerate(generator):
assert_that(batch_inputs['the_input'].ndim, is_(3))
if i % len(generator) == len(generator) - 1:
assert_that(batch_inputs['the_input'].shape[0], is_(1), f'last batch should be residual')
else:
assert_that(batch_inputs['the_input'].shape[0], is_(batch_size), 'batch should be full')
assert_that(batch_inputs['the_input'].shape[2], is_(26))
if i >= len(generator):
break # we need to break out because generator is infinite
assert_that(generator.cur_index, is_(1), f'finite generator should be exhausted')
def test_bath_generator_infinite(self):
batch_items = [1, 2, 3, 4, 5, 6, 7]
batch_size = 3
generator = DummyBatchGenerator(batch_items, batch_size)
assert_that(len(generator), is_(3), 'length should still reflect the number of batches')
first_batch = generator[0]
second_batch = generator[1]
third_batch = generator[2]
for i, (batch_inputs, batch_outputs) in enumerate(generator):
if i % batch_size == 0:
assert_that(batch_inputs['the_input'].shape, is_(first_batch[0]['the_input'].shape))
elif i % batch_size == 1:
assert_that(batch_inputs['the_input'].shape, is_(second_batch[0]['the_input'].shape))
else:
assert_that(batch_inputs['the_input'].shape, is_(third_batch[0]['the_input'].shape))
if i > 10:
break # we need to break out because generator is infinite
assert_that(i, is_(11))
assert_that(generator.cur_index, is_(3), )
|
[
"numpy.random.rand",
"hamcrest.is_"
] |
[((394, 415), 'numpy.random.rand', 'np.random.rand', (['i', '(26)'], {}), '(i, 26)\n', (408, 415), True, 'import numpy as np\n'), ((780, 786), 'hamcrest.is_', 'is_', (['(3)'], {}), '(3)\n', (783, 786), False, 'from hamcrest import assert_that, is_\n'), ((890, 905), 'hamcrest.is_', 'is_', (['batch_size'], {}), '(batch_size)\n', (893, 905), False, 'from hamcrest import assert_that, is_\n'), ((993, 1008), 'hamcrest.is_', 'is_', (['batch_size'], {}), '(batch_size)\n', (996, 1008), False, 'from hamcrest import assert_that, is_\n'), ((1097, 1103), 'hamcrest.is_', 'is_', (['(1)'], {}), '(1)\n', (1100, 1103), False, 'from hamcrest import assert_that, is_\n'), ((1351, 1357), 'hamcrest.is_', 'is_', (['(3)'], {}), '(3)\n', (1354, 1357), False, 'from hamcrest import assert_that, is_\n'), ((2002, 2008), 'hamcrest.is_', 'is_', (['(1)'], {}), '(1)\n', (2005, 2008), False, 'from hamcrest import assert_that, is_\n'), ((2264, 2270), 'hamcrest.is_', 'is_', (['(3)'], {}), '(3)\n', (2267, 2270), False, 'from hamcrest import assert_that, is_\n'), ((3020, 3027), 'hamcrest.is_', 'is_', (['(11)'], {}), '(11)\n', (3023, 3027), False, 'from hamcrest import assert_that, is_\n'), ((3070, 3076), 'hamcrest.is_', 'is_', (['(3)'], {}), '(3)\n', (3073, 3076), False, 'from hamcrest import assert_that, is_\n'), ((1485, 1491), 'hamcrest.is_', 'is_', (['(3)'], {}), '(3)\n', (1488, 1491), False, 'from hamcrest import assert_that, is_\n'), ((1839, 1846), 'hamcrest.is_', 'is_', (['(26)'], {}), '(26)\n', (1842, 1846), False, 'from hamcrest import assert_that, is_\n'), ((1614, 1620), 'hamcrest.is_', 'is_', (['(1)'], {}), '(1)\n', (1617, 1620), False, 'from hamcrest import assert_that, is_\n'), ((1738, 1753), 'hamcrest.is_', 'is_', (['batch_size'], {}), '(batch_size)\n', (1741, 1753), False, 'from hamcrest import assert_that, is_\n'), ((2598, 2636), 'hamcrest.is_', 'is_', (["first_batch[0]['the_input'].shape"], {}), "(first_batch[0]['the_input'].shape)\n", (2601, 2636), False, 'from hamcrest import assert_that, is_\n'), ((2737, 2776), 'hamcrest.is_', 'is_', (["second_batch[0]['the_input'].shape"], {}), "(second_batch[0]['the_input'].shape)\n", (2740, 2776), False, 'from hamcrest import assert_that, is_\n'), ((2857, 2895), 'hamcrest.is_', 'is_', (["third_batch[0]['the_input'].shape"], {}), "(third_batch[0]['the_input'].shape)\n", (2860, 2895), False, 'from hamcrest import assert_that, is_\n')]
|
from torch.utils.data import Dataset
from datasets.utils import FullDatasetBase
from torchvision.datasets import ImageFolder
from torchvision import transforms
class ImageNet(FullDatasetBase):
mean = (0.485, 0.456, 0.406)
std = (0.229, 0.224, 0.225)
img_shape = (3, 224, 224)
num_classes = 1000
name = "imagenet"
def gen_train_transforms(self):
base_transforms, _ = self.gen_base_transforms()
train_transforms = transforms.Compose([
transforms.RandomResizedCrop(size=224),
transforms.RandomHorizontalFlip(),
base_transforms
])
return train_transforms, _
def gen_test_transforms(self):
base_transforms, _ = self.gen_base_transforms()
test_transforms = transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
base_transforms
])
return test_transforms, _
def gen_train_datasets(self, transform=None, target_transform=None) -> Dataset:
return ImageFolder(root="/data/ImageNet/train",
transform=transform, target_transform=target_transform)
def gen_val_datasets(self, transform=None, target_transform=None) -> Dataset:
return ImageFolder(root="/data/ImageNet/val",
transform=transform, target_transform=target_transform)
def gen_test_datasets(self, transform=None, target_transform=None) -> Dataset:
return ImageFolder(root="/data/ImageNet/val",
transform=transform, target_transform=target_transform)
@staticmethod
def is_dataset_name(name: str):
import re
return re.match("(imagenet|ImageNet|Imagenet)$", name)
|
[
"torchvision.transforms.RandomHorizontalFlip",
"re.match",
"torchvision.datasets.ImageFolder",
"torchvision.transforms.CenterCrop",
"torchvision.transforms.RandomResizedCrop",
"torchvision.transforms.Resize"
] |
[((1038, 1138), 'torchvision.datasets.ImageFolder', 'ImageFolder', ([], {'root': '"""/data/ImageNet/train"""', 'transform': 'transform', 'target_transform': 'target_transform'}), "(root='/data/ImageNet/train', transform=transform,\n target_transform=target_transform)\n", (1049, 1138), False, 'from torchvision.datasets import ImageFolder\n'), ((1260, 1358), 'torchvision.datasets.ImageFolder', 'ImageFolder', ([], {'root': '"""/data/ImageNet/val"""', 'transform': 'transform', 'target_transform': 'target_transform'}), "(root='/data/ImageNet/val', transform=transform,\n target_transform=target_transform)\n", (1271, 1358), False, 'from torchvision.datasets import ImageFolder\n'), ((1481, 1579), 'torchvision.datasets.ImageFolder', 'ImageFolder', ([], {'root': '"""/data/ImageNet/val"""', 'transform': 'transform', 'target_transform': 'target_transform'}), "(root='/data/ImageNet/val', transform=transform,\n target_transform=target_transform)\n", (1492, 1579), False, 'from torchvision.datasets import ImageFolder\n'), ((1691, 1738), 're.match', 're.match', (['"""(imagenet|ImageNet|Imagenet)$"""', 'name'], {}), "('(imagenet|ImageNet|Imagenet)$', name)\n", (1699, 1738), False, 'import re\n'), ((489, 527), 'torchvision.transforms.RandomResizedCrop', 'transforms.RandomResizedCrop', ([], {'size': '(224)'}), '(size=224)\n', (517, 527), False, 'from torchvision import transforms\n'), ((541, 574), 'torchvision.transforms.RandomHorizontalFlip', 'transforms.RandomHorizontalFlip', ([], {}), '()\n', (572, 574), False, 'from torchvision import transforms\n'), ((801, 823), 'torchvision.transforms.Resize', 'transforms.Resize', (['(256)'], {}), '(256)\n', (818, 823), False, 'from torchvision import transforms\n'), ((837, 863), 'torchvision.transforms.CenterCrop', 'transforms.CenterCrop', (['(224)'], {}), '(224)\n', (858, 863), False, 'from torchvision import transforms\n')]
|
# -*- coding: utf-8 -*-
"""
Seismic wavelets.
:copyright: 2015 Agile Geoscience
:license: Apache 2.0
"""
from collections import namedtuple
import numpy as np
from scipy.signal import hilbert
from scipy.signal import chirp
def sinc(duration, dt, f, return_t=False, taper='blackman'):
"""
sinc function centered on t=0, with a dominant frequency of f Hz.
If you pass a 1D array of frequencies, you get a wavelet bank in return.
Args:
duration (float): The length in seconds of the wavelet.
dt (float): The sample interval in seconds (often one of 0.001, 0.002,
or 0.004).
f (ndarray): Dominant frequency of the wavelet in Hz. If a sequence is
passed, you will get a 2D array in return, one row per frequency.
return_t (bool): If True, then the function returns a tuple of
wavelet, time-basis, where time is the range from -duration/2 to
duration/2 in steps of dt.
taper (str or function): The window or tapering function to apply.
To use one of NumPy's functions, pass 'bartlett', 'blackman' (the
default), 'hamming', or 'hanning'; to apply no tapering, pass
'none'. To apply your own function, pass a function taking only
the length of the window and returning the window function.
Returns:
ndarray. sinc wavelet(s) with centre frequency f sampled on t.
"""
f = np.asanyarray(f).reshape(-1, 1)
t = np.arange(-duration/2., duration/2., dt)
t[t == 0] = 1e-12 # Avoid division by zero.
f[f == 0] = 1e-12 # Avoid division by zero.
w = np.squeeze(np.sin(2*np.pi*f*t) / (2*np.pi*f*t))
if taper:
funcs = {
'bartlett': np.bartlett,
'blackman': np.blackman,
'hamming': np.hamming,
'hanning': np.hanning,
'none': lambda x: x,
}
func = funcs.get(taper, taper)
w *= func(t.size)
if return_t:
RickerWavelet = namedtuple('RickerWavelet', ['amplitude', 'time'])
return RickerWavelet(w, t)
else:
return w
def ricker(duration, dt, f, return_t=False):
"""
Also known as the mexican hat wavelet, models the function:
A = (1-2 \pi^2 f^2 t^2) e^{-\pi^2 f^2 t^2}
If you pass a 1D array of frequencies, you get a wavelet bank in return.
Args:
duration (float): The length in seconds of the wavelet.
dt (float): The sample interval in seconds (often one of 0.001, 0.002,
or 0.004).
f (ndarray): Centre frequency of the wavelet in Hz. If a sequence is
passed, you will get a 2D array in return, one row per frequency.
return_t (bool): If True, then the function returns a tuple of
wavelet, time-basis, where time is the range from -duration/2 to
duration/2 in steps of dt.
Returns:
ndarray. Ricker wavelet(s) with centre frequency f sampled on t.
"""
f = np.asanyarray(f).reshape(-1, 1)
t = np.arange(-duration/2, duration/2, dt)
pft2 = (np.pi * f * t)**2
w = np.squeeze((1 - (2 * pft2)) * np.exp(-pft2))
if return_t:
RickerWavelet = namedtuple('RickerWavelet', ['amplitude', 'time'])
return RickerWavelet(w, t)
else:
return w
def sweep(duration, dt, f,
autocorrelate=True,
return_t=False,
taper='blackman',
**kwargs):
"""
Generates a linear frequency modulated wavelet (sweep). Wraps
scipy.signal.chirp, adding dimensions as necessary.
Args:
duration (float): The length in seconds of the wavelet.
dt (float): is the sample interval in seconds (usually 0.001, 0.002,
or 0.004)
f (ndarray): Any sequence like (f1, f2). A list of lists will create a
wavelet bank.
autocorrelate (bool): Whether to autocorrelate the sweep(s) to create
a wavelet. Default is `True`.
return_t (bool): If True, then the function returns a tuple of
wavelet, time-basis, where time is the range from -duration/2 to
duration/2 in steps of dt.
taper (str or function): The window or tapering function to apply.
To use one of NumPy's functions, pass 'bartlett', 'blackman' (the
default), 'hamming', or 'hanning'; to apply no tapering, pass
'none'. To apply your own function, pass a function taking only
the length of the window and returning the window function.
**kwargs: Further arguments are passed to scipy.signal.chirp. They are
`method` ('linear','quadratic','logarithmic'), `phi` (phase offset
in degrees), and `vertex_zero`.
Returns:
ndarray: The waveform.
"""
t0, t1 = -duration/2, duration/2
t = np.arange(t0, t1, dt)
f = np.asanyarray(f).reshape(-1, 1)
f1, f2 = f
c = [chirp(t, f1_+(f2_-f1_)/2., t1, f2_, **kwargs)
for f1_, f2_
in zip(f1, f2)]
if autocorrelate:
w = [np.correlate(c_, c_, mode='same') for c_ in c]
w = np.squeeze(w) / np.amax(w)
if taper:
funcs = {
'bartlett': np.bartlett,
'blackman': np.blackman,
'hamming': np.hamming,
'hanning': np.hanning,
'none': lambda x: x,
}
func = funcs.get(taper, taper)
w *= func(t.size)
if return_t:
Sweep = namedtuple('Sweep', ['amplitude', 'time'])
return Sweep(w, t)
else:
return w
def ormsby(duration, dt, f, return_t=False):
"""
The Ormsby wavelet requires four frequencies which together define a
trapezoid shape in the spectrum. The Ormsby wavelet has several sidelobes,
unlike Ricker wavelets.
Args:
duration (float): The length in seconds of the wavelet.
dt (float): The sample interval in seconds (usually 0.001, 0.002,
or 0.004).
f (ndarray): Sequence of form (f1, f2, f3, f4), or list of lists of
frequencies, which will return a 2D wavelet bank.
Returns:
ndarray: A vector containing the Ormsby wavelet, or a bank of them.
"""
f = np.asanyarray(f).reshape(-1, 1)
try:
f1, f2, f3, f4 = f
except ValueError:
raise ValueError("The last dimension must be 4")
def numerator(f, t):
return (np.sinc(f * t)**2) * ((np.pi * f) ** 2)
pf43 = (np.pi * f4) - (np.pi * f3)
pf21 = (np.pi * f2) - (np.pi * f1)
t = np.arange(-duration/2, duration/2, dt)
w = ((numerator(f4, t)/pf43) - (numerator(f3, t)/pf43) -
(numerator(f2, t)/pf21) + (numerator(f1, t)/pf21))
w = np.squeeze(w) / np.amax(w)
if return_t:
OrmsbyWavelet = namedtuple('OrmsbyWavelet', ['amplitude', 'time'])
return OrmsbyWavelet(w, t)
else:
return w
def rotate_phase(w, phi, degrees=False):
"""
Performs a phase rotation of wavelet or wavelet bank using:
The analytic signal can be written in the form S(t) = A(t)exp(j*theta(t))
where A(t) = magnitude(hilbert(w(t))) and theta(t) = angle(hilbert(w(t))
then a constant phase rotation phi would produce the analytic signal
S(t) = A(t)exp(j*(theta(t) + phi)). To get the non analytic signal
we take real(S(t)) == A(t)cos(theta(t) + phi)
== A(t)(cos(theta(t))cos(phi) - sin(theta(t))sin(phi)) <= trig idenity
== w(t)cos(phi) - h(t)sin(phi)
A = w(t)Cos(phi) - h(t)Sin(phi)
Where w(t) is the wavelet and h(t) is its Hilbert transform.
Args:
w (ndarray): The wavelet vector, can be a 2D wavelet bank.
phi (float): The phase rotation angle (in radians) to apply.
degrees (bool): If phi is in degrees not radians.
Returns:
The phase rotated signal (or bank of signals).
"""
if degrees:
phi = phi * np.pi / 180.0
a = hilbert(w, axis=0)
w = (np.real(a) * np.cos(phi) - np.imag(a) * np.sin(phi))
return w
|
[
"numpy.asanyarray",
"numpy.sinc",
"numpy.amax",
"numpy.imag",
"scipy.signal.chirp",
"numpy.arange",
"collections.namedtuple",
"scipy.signal.hilbert",
"numpy.sin",
"numpy.squeeze",
"numpy.exp",
"numpy.correlate",
"numpy.real",
"numpy.cos"
] |
[((1481, 1527), 'numpy.arange', 'np.arange', (['(-duration / 2.0)', '(duration / 2.0)', 'dt'], {}), '(-duration / 2.0, duration / 2.0, dt)\n', (1490, 1527), True, 'import numpy as np\n'), ((3024, 3066), 'numpy.arange', 'np.arange', (['(-duration / 2)', '(duration / 2)', 'dt'], {}), '(-duration / 2, duration / 2, dt)\n', (3033, 3066), True, 'import numpy as np\n'), ((4826, 4847), 'numpy.arange', 'np.arange', (['t0', 't1', 'dt'], {}), '(t0, t1, dt)\n', (4835, 4847), True, 'import numpy as np\n'), ((6512, 6554), 'numpy.arange', 'np.arange', (['(-duration / 2)', '(duration / 2)', 'dt'], {}), '(-duration / 2, duration / 2, dt)\n', (6521, 6554), True, 'import numpy as np\n'), ((7882, 7900), 'scipy.signal.hilbert', 'hilbert', (['w'], {'axis': '(0)'}), '(w, axis=0)\n', (7889, 7900), False, 'from scipy.signal import hilbert\n'), ((2003, 2053), 'collections.namedtuple', 'namedtuple', (['"""RickerWavelet"""', "['amplitude', 'time']"], {}), "('RickerWavelet', ['amplitude', 'time'])\n", (2013, 2053), False, 'from collections import namedtuple\n'), ((3188, 3238), 'collections.namedtuple', 'namedtuple', (['"""RickerWavelet"""', "['amplitude', 'time']"], {}), "('RickerWavelet', ['amplitude', 'time'])\n", (3198, 3238), False, 'from collections import namedtuple\n'), ((4914, 4966), 'scipy.signal.chirp', 'chirp', (['t', '(f1_ + (f2_ - f1_) / 2.0)', 't1', 'f2_'], {}), '(t, f1_ + (f2_ - f1_) / 2.0, t1, f2_, **kwargs)\n', (4919, 4966), False, 'from scipy.signal import chirp\n'), ((5099, 5112), 'numpy.squeeze', 'np.squeeze', (['w'], {}), '(w)\n', (5109, 5112), True, 'import numpy as np\n'), ((5115, 5125), 'numpy.amax', 'np.amax', (['w'], {}), '(w)\n', (5122, 5125), True, 'import numpy as np\n'), ((5445, 5487), 'collections.namedtuple', 'namedtuple', (['"""Sweep"""', "['amplitude', 'time']"], {}), "('Sweep', ['amplitude', 'time'])\n", (5455, 5487), False, 'from collections import namedtuple\n'), ((6682, 6695), 'numpy.squeeze', 'np.squeeze', (['w'], {}), '(w)\n', (6692, 6695), True, 'import numpy as np\n'), ((6698, 6708), 'numpy.amax', 'np.amax', (['w'], {}), '(w)\n', (6705, 6708), True, 'import numpy as np\n'), ((6751, 6801), 'collections.namedtuple', 'namedtuple', (['"""OrmsbyWavelet"""', "['amplitude', 'time']"], {}), "('OrmsbyWavelet', ['amplitude', 'time'])\n", (6761, 6801), False, 'from collections import namedtuple\n'), ((1441, 1457), 'numpy.asanyarray', 'np.asanyarray', (['f'], {}), '(f)\n', (1454, 1457), True, 'import numpy as np\n'), ((1639, 1664), 'numpy.sin', 'np.sin', (['(2 * np.pi * f * t)'], {}), '(2 * np.pi * f * t)\n', (1645, 1664), True, 'import numpy as np\n'), ((2984, 3000), 'numpy.asanyarray', 'np.asanyarray', (['f'], {}), '(f)\n', (2997, 3000), True, 'import numpy as np\n'), ((3131, 3144), 'numpy.exp', 'np.exp', (['(-pft2)'], {}), '(-pft2)\n', (3137, 3144), True, 'import numpy as np\n'), ((4857, 4873), 'numpy.asanyarray', 'np.asanyarray', (['f'], {}), '(f)\n', (4870, 4873), True, 'import numpy as np\n'), ((5043, 5076), 'numpy.correlate', 'np.correlate', (['c_', 'c_'], {'mode': '"""same"""'}), "(c_, c_, mode='same')\n", (5055, 5076), True, 'import numpy as np\n'), ((6193, 6209), 'numpy.asanyarray', 'np.asanyarray', (['f'], {}), '(f)\n', (6206, 6209), True, 'import numpy as np\n'), ((7910, 7920), 'numpy.real', 'np.real', (['a'], {}), '(a)\n', (7917, 7920), True, 'import numpy as np\n'), ((7923, 7934), 'numpy.cos', 'np.cos', (['phi'], {}), '(phi)\n', (7929, 7934), True, 'import numpy as np\n'), ((7937, 7947), 'numpy.imag', 'np.imag', (['a'], {}), '(a)\n', (7944, 7947), True, 'import numpy as np\n'), ((7950, 7961), 'numpy.sin', 'np.sin', (['phi'], {}), '(phi)\n', (7956, 7961), True, 'import numpy as np\n'), ((6384, 6398), 'numpy.sinc', 'np.sinc', (['(f * t)'], {}), '(f * t)\n', (6391, 6398), True, 'import numpy as np\n')]
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
import os
from tensorlayer import logging
from tensorlayer import visualize
from tensorlayer.files.utils import del_file
from tensorlayer.files.utils import folder_exists
from tensorlayer.files.utils import load_file_list
from tensorlayer.files.utils import maybe_download_and_extract
from tensorlayer.files.utils import natural_keys
from tensorlayer.files.utils import read_file
__all__ = ['load_flickr25k_dataset']
def load_flickr25k_dataset(tag='sky', path="data", n_threads=50, printable=False):
"""Load Flickr25K dataset.
Returns a list of images by a given tag from Flick25k dataset,
it will download Flickr25k from `the official website <http://press.liacs.nl/mirflickr/mirdownload.html>`__
at the first time you use it.
Parameters
------------
tag : str or None
What images to return.
- If you want to get images with tag, use string like 'dog', 'red', see `Flickr Search <https://www.flickr.com/search/>`__.
- If you want to get all images, set to ``None``.
path : str
The path that the data is downloaded to, defaults is ``data/flickr25k/``.
n_threads : int
The number of thread to read image.
printable : boolean
Whether to print infomation when reading images, default is ``False``.
Examples
-----------
Get images with tag of sky
>>> images = tl.files.load_flickr25k_dataset(tag='sky')
Get all images
>>> images = tl.files.load_flickr25k_dataset(tag=None, n_threads=100, printable=True)
"""
path = os.path.join(path, 'flickr25k')
filename = 'mirflickr25k.zip'
url = 'http://press.liacs.nl/mirflickr/mirflickr25k/'
# download dataset
if folder_exists(os.path.join(path, "mirflickr")) is False:
logging.info("[*] Flickr25k is nonexistent in {}".format(path))
maybe_download_and_extract(filename, path, url, extract=True)
del_file(os.path.join(path, filename))
# return images by the given tag.
# 1. image path list
folder_imgs = os.path.join(path, "mirflickr")
path_imgs = load_file_list(path=folder_imgs, regx='\\.jpg', printable=False)
path_imgs.sort(key=natural_keys)
# 2. tag path list
folder_tags = os.path.join(path, "mirflickr", "meta", "tags")
path_tags = load_file_list(path=folder_tags, regx='\\.txt', printable=False)
path_tags.sort(key=natural_keys)
# 3. select images
if tag is None:
logging.info("[Flickr25k] reading all images")
else:
logging.info("[Flickr25k] reading images with tag: {}".format(tag))
images_list = []
for idx, _v in enumerate(path_tags):
tags = read_file(os.path.join(folder_tags, path_tags[idx])).split('\n')
# logging.info(idx+1, tags)
if tag is None or tag in tags:
images_list.append(path_imgs[idx])
images = visualize.read_images(images_list, folder_imgs, n_threads=n_threads, printable=printable)
return images
|
[
"tensorlayer.logging.info",
"tensorlayer.files.utils.maybe_download_and_extract",
"tensorlayer.visualize.read_images",
"tensorlayer.files.utils.load_file_list",
"os.path.join"
] |
[((1596, 1627), 'os.path.join', 'os.path.join', (['path', '"""flickr25k"""'], {}), "(path, 'flickr25k')\n", (1608, 1627), False, 'import os\n'), ((2080, 2111), 'os.path.join', 'os.path.join', (['path', '"""mirflickr"""'], {}), "(path, 'mirflickr')\n", (2092, 2111), False, 'import os\n'), ((2128, 2192), 'tensorlayer.files.utils.load_file_list', 'load_file_list', ([], {'path': 'folder_imgs', 'regx': '"""\\\\.jpg"""', 'printable': '(False)'}), "(path=folder_imgs, regx='\\\\.jpg', printable=False)\n", (2142, 2192), False, 'from tensorlayer.files.utils import load_file_list\n'), ((2272, 2319), 'os.path.join', 'os.path.join', (['path', '"""mirflickr"""', '"""meta"""', '"""tags"""'], {}), "(path, 'mirflickr', 'meta', 'tags')\n", (2284, 2319), False, 'import os\n'), ((2336, 2400), 'tensorlayer.files.utils.load_file_list', 'load_file_list', ([], {'path': 'folder_tags', 'regx': '"""\\\\.txt"""', 'printable': '(False)'}), "(path=folder_tags, regx='\\\\.txt', printable=False)\n", (2350, 2400), False, 'from tensorlayer.files.utils import load_file_list\n'), ((2901, 2994), 'tensorlayer.visualize.read_images', 'visualize.read_images', (['images_list', 'folder_imgs'], {'n_threads': 'n_threads', 'printable': 'printable'}), '(images_list, folder_imgs, n_threads=n_threads,\n printable=printable)\n', (2922, 2994), False, 'from tensorlayer import visualize\n'), ((1889, 1950), 'tensorlayer.files.utils.maybe_download_and_extract', 'maybe_download_and_extract', (['filename', 'path', 'url'], {'extract': '(True)'}), '(filename, path, url, extract=True)\n', (1915, 1950), False, 'from tensorlayer.files.utils import maybe_download_and_extract\n'), ((2490, 2536), 'tensorlayer.logging.info', 'logging.info', (['"""[Flickr25k] reading all images"""'], {}), "('[Flickr25k] reading all images')\n", (2502, 2536), False, 'from tensorlayer import logging\n'), ((1766, 1797), 'os.path.join', 'os.path.join', (['path', '"""mirflickr"""'], {}), "(path, 'mirflickr')\n", (1778, 1797), False, 'import os\n'), ((1968, 1996), 'os.path.join', 'os.path.join', (['path', 'filename'], {}), '(path, filename)\n', (1980, 1996), False, 'import os\n'), ((2710, 2751), 'os.path.join', 'os.path.join', (['folder_tags', 'path_tags[idx]'], {}), '(folder_tags, path_tags[idx])\n', (2722, 2751), False, 'import os\n')]
|
#!/usr/bin/python3
from distutils.core import setup
setup(name='termpdf.py',
version='0.1.0',
description='Graphical pdf reader that works inside the kitty terminal',
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/dsanson/termpdf.py',
scripts=['termpdf.py'],
requires=[
'PyMuPDF',
'pyperclip',
'pdfrw',
'pybtex',
'pynvim',
'roman',
'pagelabels'
]
)
|
[
"distutils.core.setup"
] |
[((54, 388), 'distutils.core.setup', 'setup', ([], {'name': '"""termpdf.py"""', 'version': '"""0.1.0"""', 'description': '"""Graphical pdf reader that works inside the kitty terminal"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'url': '"""https://github.com/dsanson/termpdf.py"""', 'scripts': "['termpdf.py']", 'requires': "['PyMuPDF', 'pyperclip', 'pdfrw', 'pybtex', 'pynvim', 'roman', 'pagelabels']"}), "(name='termpdf.py', version='0.1.0', description=\n 'Graphical pdf reader that works inside the kitty terminal', author=\n '<NAME>', author_email='<EMAIL>', url=\n 'https://github.com/dsanson/termpdf.py', scripts=['termpdf.py'],\n requires=['PyMuPDF', 'pyperclip', 'pdfrw', 'pybtex', 'pynvim', 'roman',\n 'pagelabels'])\n", (59, 388), False, 'from distutils.core import setup\n')]
|
# Copyright 2020 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Simple client to send profiling request to ModelServer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.profiler import profiler_client
def main(argv):
server = argv[1] if len(argv) > 1 else 'localhost:8500'
logdir = argv[2] if len(argv) > 2 else '/tmp'
duration_ms = argv[3] if len(argv) > 3 else 2000
profiler_client.trace(server, logdir, duration_ms)
if __name__ == '__main__':
tf.compat.v1.app.run()
|
[
"tensorflow.python.profiler.profiler_client.trace",
"tensorflow.compat.v1.app.run"
] |
[((1107, 1157), 'tensorflow.python.profiler.profiler_client.trace', 'profiler_client.trace', (['server', 'logdir', 'duration_ms'], {}), '(server, logdir, duration_ms)\n', (1128, 1157), False, 'from tensorflow.python.profiler import profiler_client\n'), ((1189, 1211), 'tensorflow.compat.v1.app.run', 'tf.compat.v1.app.run', ([], {}), '()\n', (1209, 1211), True, 'import tensorflow as tf\n')]
|
#!/usr/bin/env python
# encoding: utf-8
"""Knowledge Management CLI."""
import sys
import os
import arrow
import logging
import sqlite3
from logging.handlers import TimedRotatingFileHandler
import click
from omegaconf import OmegaConf
from command import hourly, daily, robustify, summarize
from action import twitter, wayback, obsidian, mastodon
from source import pinboard
from source import hypothesis
class Details: # pylint: disable=too-few-public-methods
"""Application-specific context"""
def __init__(
self,
logger_handle=None,
dry_run=False,
config=None,
sources=None,
actions=None,
):
self.logger = logger_handle
self.dry_run = dry_run
self.config = config
self.sources = sources
self.actions = actions
self.kmtools_db_conn = None
self.obsidian = obsidian.Obsidian(
config.obsidian.db_directory,
config.obsidian.daily_directory,
config.obsidian.source_directory,
)
@property
def kmtools_db(self):
if self.kmtools_db_conn:
return self.kmtools_db_conn
if self.config.kmtools.dbfile:
self.kmtools_db_conn = sqlite3.connect(self.config.kmtools.dbfile)
self.kmtools_db_conn.row_factory = sqlite3.Row
self.kmtools_db_conn.execute("BEGIN EXCLUSIVE")
self.kmtools_db_conn.set_trace_callback(self.logger.debug)
else:
raise RuntimeError("KM-Tools database location not set")
return self.kmtools_db_conn
def output_fd(self, file):
"""Route output depending on whether this is a dry run or not
:param details: context object
:param file: full path to output file
return: file descriptor, stdout when dry_run, otherwise append file
"""
if self.dry_run:
click.secho(f">>> Would write to {file} >>>", fg="green")
fd = os.fdopen(os.dup(sys.stdout.fileno()), "w")
else:
fd = open(file, "a")
return fd
@click.group(context_settings={"help_option_names": ["-h", "--help"]})
@click.option("--dry-run", is_flag=True)
@click.option("-d", "--debug", is_flag=True, default=False, help="turn on debugging")
@click.option(
"-v", "--verbose", is_flag=True, default=False, help="turn on verbose messages"
)
@click.option("-l", "--logfile", default=None, help="log file path")
@click.pass_context
def cli(ctx, dry_run, debug, verbose, logfile):
"""Root command line function"""
config = OmegaConf.load("config.yml")
OmegaConf.set_readonly(config, True)
log = logging.getLogger(__name__)
if sys.stdin and sys.stdin.isatty():
if not logfile:
handler = logging.StreamHandler(sys.stderr)
else:
try:
handler = logging.FileHandler(logfile)
except IOError:
log.error("Could not write to %s, falling back to stdout", logfile)
else:
logpath = logfile if logfile else config.kmtools.logfile
if logpath:
try:
handler = TimedRotatingFileHandler(
logpath, when="midnight", backupCount=8
)
except IOError:
log.error("Could not write to %s, falling back to stdout", logpath)
formatter = logging.Formatter(
"%(asctime)s - %(levelname)s - %(module)s@%(lineno)s - %(message)s"
)
handler.setFormatter(formatter)
log.addHandler(handler)
if debug:
log.setLevel(logging.DEBUG)
elif verbose:
log.setLevel(logging.INFO)
else:
log.setLevel(logging.WARNING)
# Register source dispatchers
sources = {}
sources["Pinboard"] = pinboard.register_source()
sources["Hypothesis"] = hypothesis.register_source()
# Register actions
actions = {}
actions["Twitter"] = twitter.register_hourly_action()
actions["Wayback"] = wayback.register_hourly_action()
actions["Mastodon"] = mastodon.register_hourly_action()
ctx.obj = Details(log, dry_run, config, sources, actions)
# Register commands
cli.add_command(pinboard.pinboard)
cli.add_command(hypothesis.hypothesis)
cli.add_command(wayback.wayback)
cli.add_command(obsidian.obsidian)
cli.add_command(mastodon.mastodon)
cli.add_command(hourly.hourly)
cli.add_command(daily.daily)
cli.add_command(robustify.robustify)
cli.add_command(summarize.summarize_command)
# pylint: disable=no-value-for-parameter
if __name__ == "__main__":
cli()
|
[
"click.option",
"omegaconf.OmegaConf.set_readonly",
"logging.Formatter",
"source.hypothesis.register_source",
"action.obsidian.Obsidian",
"sys.stdin.isatty",
"logging.FileHandler",
"sys.stdout.fileno",
"source.pinboard.register_source",
"logging.handlers.TimedRotatingFileHandler",
"click.group",
"action.wayback.register_hourly_action",
"click.secho",
"logging.StreamHandler",
"sqlite3.connect",
"action.mastodon.register_hourly_action",
"action.twitter.register_hourly_action",
"omegaconf.OmegaConf.load",
"logging.getLogger"
] |
[((2083, 2152), 'click.group', 'click.group', ([], {'context_settings': "{'help_option_names': ['-h', '--help']}"}), "(context_settings={'help_option_names': ['-h', '--help']})\n", (2094, 2152), False, 'import click\n'), ((2154, 2193), 'click.option', 'click.option', (['"""--dry-run"""'], {'is_flag': '(True)'}), "('--dry-run', is_flag=True)\n", (2166, 2193), False, 'import click\n'), ((2195, 2284), 'click.option', 'click.option', (['"""-d"""', '"""--debug"""'], {'is_flag': '(True)', 'default': '(False)', 'help': '"""turn on debugging"""'}), "('-d', '--debug', is_flag=True, default=False, help=\n 'turn on debugging')\n", (2207, 2284), False, 'import click\n'), ((2281, 2379), 'click.option', 'click.option', (['"""-v"""', '"""--verbose"""'], {'is_flag': '(True)', 'default': '(False)', 'help': '"""turn on verbose messages"""'}), "('-v', '--verbose', is_flag=True, default=False, help=\n 'turn on verbose messages')\n", (2293, 2379), False, 'import click\n'), ((2382, 2449), 'click.option', 'click.option', (['"""-l"""', '"""--logfile"""'], {'default': 'None', 'help': '"""log file path"""'}), "('-l', '--logfile', default=None, help='log file path')\n", (2394, 2449), False, 'import click\n'), ((2568, 2596), 'omegaconf.OmegaConf.load', 'OmegaConf.load', (['"""config.yml"""'], {}), "('config.yml')\n", (2582, 2596), False, 'from omegaconf import OmegaConf\n'), ((2601, 2637), 'omegaconf.OmegaConf.set_readonly', 'OmegaConf.set_readonly', (['config', '(True)'], {}), '(config, True)\n', (2623, 2637), False, 'from omegaconf import OmegaConf\n'), ((2649, 2676), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (2666, 2676), False, 'import logging\n'), ((3367, 3458), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(levelname)s - %(module)s@%(lineno)s - %(message)s"""'], {}), "(\n '%(asctime)s - %(levelname)s - %(module)s@%(lineno)s - %(message)s')\n", (3384, 3458), False, 'import logging\n'), ((3762, 3788), 'source.pinboard.register_source', 'pinboard.register_source', ([], {}), '()\n', (3786, 3788), False, 'from source import pinboard\n'), ((3817, 3845), 'source.hypothesis.register_source', 'hypothesis.register_source', ([], {}), '()\n', (3843, 3845), False, 'from source import hypothesis\n'), ((3912, 3944), 'action.twitter.register_hourly_action', 'twitter.register_hourly_action', ([], {}), '()\n', (3942, 3944), False, 'from action import twitter, wayback, obsidian, mastodon\n'), ((3970, 4002), 'action.wayback.register_hourly_action', 'wayback.register_hourly_action', ([], {}), '()\n', (4000, 4002), False, 'from action import twitter, wayback, obsidian, mastodon\n'), ((4029, 4062), 'action.mastodon.register_hourly_action', 'mastodon.register_hourly_action', ([], {}), '()\n', (4060, 4062), False, 'from action import twitter, wayback, obsidian, mastodon\n'), ((878, 997), 'action.obsidian.Obsidian', 'obsidian.Obsidian', (['config.obsidian.db_directory', 'config.obsidian.daily_directory', 'config.obsidian.source_directory'], {}), '(config.obsidian.db_directory, config.obsidian.\n daily_directory, config.obsidian.source_directory)\n', (895, 997), False, 'from action import twitter, wayback, obsidian, mastodon\n'), ((2698, 2716), 'sys.stdin.isatty', 'sys.stdin.isatty', ([], {}), '()\n', (2714, 2716), False, 'import sys\n'), ((1228, 1271), 'sqlite3.connect', 'sqlite3.connect', (['self.config.kmtools.dbfile'], {}), '(self.config.kmtools.dbfile)\n', (1243, 1271), False, 'import sqlite3\n'), ((1895, 1952), 'click.secho', 'click.secho', (['f""">>> Would write to {file} >>>"""'], {'fg': '"""green"""'}), "(f'>>> Would write to {file} >>>', fg='green')\n", (1906, 1952), False, 'import click\n'), ((2764, 2797), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stderr'], {}), '(sys.stderr)\n', (2785, 2797), False, 'import logging\n'), ((2855, 2883), 'logging.FileHandler', 'logging.FileHandler', (['logfile'], {}), '(logfile)\n', (2874, 2883), False, 'import logging\n'), ((3134, 3199), 'logging.handlers.TimedRotatingFileHandler', 'TimedRotatingFileHandler', (['logpath'], {'when': '"""midnight"""', 'backupCount': '(8)'}), "(logpath, when='midnight', backupCount=8)\n", (3158, 3199), False, 'from logging.handlers import TimedRotatingFileHandler\n'), ((1987, 2006), 'sys.stdout.fileno', 'sys.stdout.fileno', ([], {}), '()\n', (2004, 2006), False, 'import sys\n')]
|
# -*- coding: utf-8 -*-
"""
Interactive EDX background refitter
Created on Wed Oct 11 00:44:29 2017
@author: tkc
"""
import sys
import numpy as np
import tkinter as tk
import os
import tkinter.messagebox as tkmess
from tkinter import filedialog
import matplotlib as mpl # using path, figure, rcParams
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2TkAgg
from matplotlib.widgets import Lasso
from matplotlib import path
# import defined data classes
if 'C:\\Users\\tkc\\Documents\\Python_Scripts\\EDX' not in sys.path:
sys.path.append('C:\\Users\\tkc\\Documents\\Python_Scripts\\EDX')
from EDX_data_classes import EDXfile, EDXdataset
PLOT_SIZE = (10,6) # 8, 5 or
MPL_STYLE = {
"text.color":"k",
"axes.labelcolor":"black",
"axes.edgecolor":"0.4",
"axes.facecolor":"white",
"xtick.color": "lightblue",
"ytick.color": "lightblue",
"figure.facecolor":"white",
"figure.edgecolor":"white",
"text.usetex":False
}
mpl.rcParams.update(MPL_STYLE)
#-------------------Misc.---------------------#
def launch_refitter():
''' Launcher function for tk refitter GUI '''
root = tk.Tk()
root.wm_title("EDX refitter")
screensize=[root.winfo_screenwidth(),root.winfo_screenheight()]
w, h=[int(1.0*i) for i in screensize]
h-=100 # Taskbar
x,y=[int(0.00*i) for i in screensize]
root.geometry('%dx%d+%d+%d' % (w,h,x,y))
# choose EDX data directory (starting at current)
currdir=filedialog.askdirectory(initialdir =os.getcwd(),
title='Select EDX data directory')
plotter = GUIMain(root, currdir)
root.mainloop()
return
class GUIMain():
''' Main container for plotter, options (at right), and fileloader (bottom)
pass current working directory as default directory'''
def __init__(self,root, currdir):
self.root = root
self.root.wm_title("EDX refitter ")
self.top_frame = tk.Frame(self.root)
self.top_frame.pack(side=tk.TOP)
self.bottom_frame = tk.Frame(self.root)
self.bottom_frame.pack(side=tk.BOTTOM)
self.plot_frame = tk.Frame(self.top_frame)
self.plot_frame.pack(side=tk.LEFT)
self.refit_frame = tk.Frame(self.top_frame)
self.refit_frame .pack(side=tk.LEFT)
self.loader_frame = tk.Frame(self.bottom_frame)
self.loader_frame.pack(side=tk.LEFT,fill=tk.BOTH)
self.plotter = GUIPlotter(self.plot_frame,self)
self.refitter = GUIRefitter(self.refit_frame,self)
self.loader = GUIprojectloader(self.loader_frame,self, currdir)
class NavSelectToolbar(NavigationToolbar2TkAgg):
''' Custom matplotlib toolbar w/ lasso pt remover and point picker
parent is GUIplotter
'''
def __init__(self, canvas, root, parent):
self.canvas = canvas
self.root = root
self.parent = parent # plotter is parent
self.ax= self.parent.ax # axes needed for interaction
self.xys = None # for xy vals later associated with plot
self.select = None # lasso selected points for removal
# Generic mpl toolbar using tkagg (with standard buttons)
NavigationToolbar2TkAgg.__init__(self, canvas,root)
# Create lasso and link to multi_select_callback
self.lasso_button= tk.Button(master=self, text='lasso', padx=2, pady=2,
command=self.startlasso)
self.lasso_button.pack(side=tk.LEFT,fill="y")
self.remove_pts_button= tk.Button(master=self, text='Remove pts',
padx=2, pady=2, command=self.removepts)
self.remove_pts_button.pack(side=tk.LEFT,fill="y")
self.picker_button= tk.Button(master=self, text='add point', padx=2,
pady=2, command=self.startpicker)
self.picker_button.pack(side=tk.LEFT,fill="y")
self.show_button= tk.Button(master=self, text='Show backfit segments',
padx=2, pady=2, command=self.showbackseg)
self.show_button.pack(side=tk.LEFT,fill="y")
print('toolbar loaded')
# temp definition of pick_button (invoked in GUIplotter)
def startlasso(self):
''' Activated by lasso menu bar button on click; disconnects prior IDs, prep for lasso button press
'''
print('startlasso called')
self.cid = self.canvas.mpl_connect('button_press_event', self.onpresslasso)
print('end of startlasso')
def onpresslasso(self, event):
''' Create lasso when button pressed on active canvas/axes '''
# ensure that current dataset is active
print('onpress lasso called')
self.xys = self.parent.xy # passed from plotter (parent)
print('Length of xys is', len(self.xys))
self.lasso = Lasso(event.inaxes, (event.xdata, event.ydata), self.callbacklasso)
# self.canvas.widgetlock(self.lasso) # skip... gives ValueError: already locked
print('end of onpress lasso')
def callbacklasso(self, verts):
print('callback called')
p = path.Path(verts)
# true/false array
ind = p.contains_points(self.xys)
self.selected=[i for i in range(0, len(self.xys)) if ind[i]==True]
print('Selected points are:', self.selected)
self.canvas.draw_idle()
# self.canvas.widgetlock.release(self.lasso) # valueerror you don't own this lock
del self.lasso
self.canvas.mpl_disconnect(self.cid) # disconnect lasso tool
print('finished with callback')
def startpicker(self):
''' Activated by lasso menu bar button on click; disconnects prior IDs, prep for lasso button press
'''
print('startpicker called')
self.cid = self.canvas.mpl_connect('button_press_event', self.onpresspick)
print('end of startlasso')
def onpresspick(self, event):
''' Make picker connection for adding pointsGet closest point in spectrum and
add to background points for refitting (in plotter)'''
# just need event.xdata and ydata
print('onpresspick called')
print('X/y is', event.xdata, event.ydata)
self.parent.point_add_callback(event.xdata, event.ydata)
self.canvas.mpl_disconnect(self.cid)
def showbackseg(self):
''' adds separate background fitted segments to plot in different colors
'''
print('showbackseg called')
# Shows current fit values over different ranges in different colors
self.parent.showfitsegments()
def removepts(self):
''' Remove points currently in index of active lman
'''
print('remove pts called')
if self.selected == None:
print('No active lassoed points')
return
print('Chosen indices are', self.selected)
# Call point removal method
self.parent.points_removed_callback(self.selected)
print('end of removepts call')
class GUIPlotter():
def __init__(self,root, parent):
self.root = root
self.parent = parent
self.xy = None # used by lasso selector (init below in plot_backfitpts)
self.backsubset = None
self.figure = mpl.figure.Figure(figsize=PLOT_SIZE, dpi=100)
self.ax = self.figure.add_subplot(111)
self.figure.subplots_adjust(bottom=0.15,right=0.95,top=0.95)
self.canvas = FigureCanvasTkAgg(self.figure,self.root)
# Custom navselecttoolbar w/ interactive buttons
self.toolbar = NavSelectToolbar(self.canvas,self.root,self)
self.toolbar.update()
self.plot_widget = self.canvas.get_tk_widget()
self.plot_widget.pack(side=tk.TOP, fill=tk.BOTH, expand=1)
self.toolbar.pack(side=tk.TOP, fill=tk.BOTH, expand=1)
self.EDXfile = None
self.canvas.show()
def associate_EDXfile(self, EDXfile):
''' Associate EDX file created/loaded in GUIrefitter with GUIplotter
called from GUIrefitter '''
# Is this a separate instance from that in guiopts?
# print('New EDXfile associated with plot window')
self.EDXfile = EDXfile
self.plot() #
def plot(self,**kwargs):
if self.EDXfile is None:return
self.ax.cla() # clear axes
self.current_plot = self.ax.plot(self.EDXfile.EDXdf['Energy'],
self.EDXfile.EDXdf['Counts'], color='k', picker=True,**kwargs)
# add existing background fit in red
self.ax.plot(self.EDXfile.EDXdf['Energy'], self.EDXfile.EDXdf['Backfit'],
color='r', picker=True,**kwargs)
# Now plot backfitpts as scatter
self.plot_backfitpts()
self.canvas.show()
def plot_backfitpts(self):
''' Get subset of points used for background fits '''
# Make big list of all energy vals (in eV) used across all fit regions
bpts=[]
for i, ptlist in enumerate(self.EDXfile.backfitpts):
bpts.extend(ptlist)
self.backsubset=self.EDXfile.EDXdf[self.EDXfile.EDXdf.index.isin(bpts)]
# plot backfit subset from filtered EDXdf dataframe
self.backsubset.plot.scatter(x='Energy', y='Counts', color='b', ax=self.ax)
# Initialize xy vals for use with lasso or selector (list of x,y tuples)
self.xy=[]
# print('length of backsubset is', len(self.backsubset))
for index, row in self.backsubset.iterrows():
self.xy.append((row.Energy, row.Counts))
def point_add_callback(self, xpoint, ypoint):
''' Linked to pick_button in NavSelectToolbar.. find nearest spectral datapoint and
add to backfit points list'''
if self.EDXfile is None:return
print('point_add_callback called')
# Remove this single value from every list in backptslist
self.parent.refitter.add_pts(xpoint, ypoint)
def points_removed_callback(self, inds):
''' Linked to lasso_button in NavSelectToolbar.. can launch GUI multiviewer
inds are lasso selected points (backpts xys in same order as original) '''
#TODO fix problem with lasso selector that won't die
if self.EDXfile is None:
return
# removepts=self.xy[]
print('lasso callback called')
badxvals=[]
for i, index in enumerate(inds):
badxvals.append(self.xy[index][0])
print('Bad xvals are:', ",".join([str(i) for i in badxvals]))
# background points stored as index numbers (starting at 0)
# conversion is xval (in keV) = indexnum*.01 +0.01)
badslice=self.backsubset[self.backsubset['Energy'].isin(badxvals)]
badinds=badslice.index.tolist() # Need to remove these from
# TODO Use energy vals or use index numbers
print('Bad ind #s are', ','.join([str(i) for i in badinds]))
self.parent.refitter.remove_badpts(badinds)
def showfitsegments(self):
''' Plot separate fitted segments in different colors on plot over appropriate ranges '''
#TODO fix problem with lasso selector that won't die
if self.EDXfile is None:
return
print('Running showfitsegments')
colorlist=['g','c','m','olive','pink','purple']
for i, ft in enumerate(self.EDXfile.fitorders):
A=self.EDXfile.backfitparams[i][0]
B=self.EDXfile.backfitparams[i][1]
C=self.EDXfile.backfitparams[i][2]
D=self.EDXfile.backfitparams[i][3]
xvals=np.arange(min(self.EDXfile.backfitpts[i])/100, max(self.EDXfile.backfitpts[i])/100, 0.1)
if ft=='linear':
print('plotting linear')
self.ax.plot(xvals, A*xvals+B, color=colorlist[i])
elif ft=='parabola':
self.ax.plot(xvals, A*xvals**2+B*xvals+C, color=colorlist[i])
elif ft=='cubic':
self.ax.plot(xvals, A*xvals**3+B*xvals**2+C*xvals+D, color=colorlist[i])
self.canvas.show() # Now show these lines
class GUIprojectloader():
''' Picks directory and loads main Auger param files
needs current path (which should be set to working data directory) '''
def __init__(self,root, parent, currdir):
self.root = root
self.parent = parent # GUImain is parent
self.top_frame = tk.Frame(self.root)
self.top_frame.pack(side=tk.TOP,anchor=tk.W)
self.bottom_frame = tk.Frame(self.root)
self.bottom_frame.pack(side=tk.BOTTOM,fill=tk.BOTH,expand=1)
tk.Label(self.top_frame,text="Directory:",padx=8,pady=2,
height=1).pack(side=tk.LEFT,anchor=tk.W)
self.directory_entry = tk.Entry(self.top_frame,width=90,bg="lightblue",
fg="black",highlightcolor="lightblue",insertbackground="black",
highlightthickness=2)
self.directory_entry.pack(side=tk.LEFT,fill=tk.BOTH,expand=1,anchor=tk.W)
self.directory_entry.insert(0,currdir)
tk.Button(self.top_frame,text="Browse",command=self.launch_dir_finder
).pack(side=tk.LEFT,fill=tk.BOTH,expand=1,anchor=tk.W)
self.load_button = tk.Button(self.bottom_frame,text="Load EDX project folder",
width=60, command=self.load_EDXdataset)
self.load_button.pack(side=tk.BOTTOM,expand=1,anchor=tk.CENTER)
self.autoload_EDXdataset(currdir)
def autoload_EDXdataset(self, currdir):
''' Autoload directory chosen in launcher
'''
EDXdata = EDXdataset(currdir)
# pass to GUIrefitter and set spectral selector spinbox values
self.parent.refitter.associate_EDXdataset(EDXdata)
def load_EDXdataset(self):
''' Load standard AES files (paramlogwith data returned to a DataManager '''
directory = self.directory_entry.get()
EDXdata = EDXdataset(directory)
# pass to GUIrefitter and set spectral selector spinbox values
self.parent.refitter.associate_EDXdataset(EDXdata)
# TODO associate EDXdataset with GUIplotter (or just selected EDXfile)
def launch_dir_finder(self):
directory = filedialog.askdirectory()
self.directory_entry.delete(0,tk.END)
self.directory_entry.insert(0,directory)
class GUIRefitter():
''' Parent is GUImain, manages EDXfile displayed in GUIplotter
handles addition/removal of points for background (re)fitting'''
def __init__(self,root,parent):
self.root = root
self.parent = parent
self.EDXdataset = None # created in GUIprojectloader but associated here
# Instance of EDXfile local to the refitter
self.EDXfile = None
self.specselect_frame = tk.Frame(self.root,pady=10)
self.specselect_frame.pack(side=tk.TOP,fill=tk.X,expand=1)
self.currfile_frame = tk.Frame(self.root,pady=10)
self.currfile_frame.pack(side=tk.TOP,fill=tk.X,expand=1)
self.misc_opts_frame = tk.Frame(self.root,pady=10)
self.misc_opts_frame.pack(side=tk.TOP,fill=tk.X,expand=1)
# Frame for background fit ev ranges and points selected
self.backregs_frame = tk.Frame(self.root,pady=10)
self.backregs_frame.pack(side=tk.TOP,fill=tk.X,expand=1)
# Simple spinbox for file selection in specselect frame
self.specspin=tk.Spinbox(self.specselect_frame, command=self.on_specspin_change)
# TODO does this need config before EDXdataset is loaded??
self.specspin.pack(side=tk.TOP) # throw into specselect sub-frame
# bools list that become true if any fitranges or backfitpts are altered
self.fitflags = None
# for readback of manually changed fitrange values
self.tkbegins= None # list with starting evs of fitranges
self.tkends= None # list with ending evs of fitranges
self.tkbackpts= None # list with background points in each fitrange
# Replot button should link w/ plot in GUIplotter
self.replot_button = tk.Button(
self.misc_opts_frame,text="Replot",command=self.parent.plotter.plot(),
padx=2, pady=6)
self.replot_button.pack(side=tk.TOP,fill=tk.X,expand=1)
self.refit_button = tk.Button(
self.misc_opts_frame,text="Redo backfit", command=self.on_redo_backfit,
padx=2, pady=6)
self.refit_button.pack(side=tk.TOP,fill=tk.X,expand=1)
self.refit2_button = tk.Button(
self.misc_opts_frame,text="Redo backfit all", command=self.on_redo_backfit_all,
padx=2, pady=6)
self.refit2_button.pack(side=tk.TOP,fill=tk.X,expand=1)
self.readback_button = tk.Button(
self.misc_opts_frame,text="Readback fitranges", command=self.read_backregs,
padx=2, pady=6)
self.readback_button.pack(side=tk.TOP,fill=tk.X,expand=1)
self.train_button = self._custom_button(
self.misc_opts_frame,"Save backfit training", self.save_train)
self.train_button.pack(side=tk.TOP,fill=tk.X,expand=1)
self.save_button = self._custom_button(
self.misc_opts_frame,"Save EDXfile changes", self.on_save)
self.save_button.pack(side=tk.TOP,fill=tk.X,expand=1)
self.quit_button = self._custom_button(
self.misc_opts_frame,"Quit", self.on_quitapp)
self.quit_button.pack(side=tk.TOP,fill=tk.X,expand=1)
def associate_EDXdataset(self, EDXdataset):
''' associate loaded EDXdataset with GUIrefitter
(passed as arg from GUIprojectloader)
called by GUIprojectloader '''
self.EDXdataset= EDXdataset
print('EDXdataset associated w/ GUIopts has ', len(EDXdataset.EDXlog),' files.')
# Set specspin range (using zero-based indexing)
self.specspin.config(from_=0, to=len(EDXdataset.EDXlog)-1)
# clear any existing widgets in backreg frame
for child in self.backregs_frame.winfo_children():
child.destroy()
# load first EDXfile into GUIplotter?
self.load_EDXfile(0) # zero-based indexing so row zero
# pass EDXfile laoded/created to GUIplotter
self.parent.plotter.associate_EDXfile(self.EDXfile)
# load background regions info from EDXfile into backregs_frame
self.display_backregs()
def load_EDXfile(self, rowindex):
''' Load an EDXfile out of EDXdataset using dataframe row (.iloc) '''
# Make instance of EDXfile class using parent (not EDXdataset itself) and rowindex
self.EDXfile=EDXfile(self.EDXdataset, rowindex)
# Update displayed filename
self.display_filename()
# create fitflags of correct length
self.fitflags=[False]*len(self.EDXfile.fitranges)
''' testing fit types problem
for i, val in enumerate(self.EDXfile.fitorders):
print('Region', i, 'fitorder is', val)
'''
def display_filename(self):
''' Displays csv name of currently-active emsa/csv file
called after every new load '''
# clear filename display
self.currfile_frame.grid_forget()
tempstr='EDX Filename: '+self.EDXfile.filename
tk.Label(self.currfile_frame, text=tempstr).pack()
def on_specspin_change(self):
''' Load and plot chosen file, update backfit ranges, points, etc.
'''
# clear old entries from any prior file
for child in self.backregs_frame.winfo_children():
child.destroy()
for child in self.currfile_frame.winfo_children():
child.destroy()
# EDXproject file must be loaded or no effect
self.load_EDXfile(int(self.specspin.get()))
# Update displayed fitregions, backfitpts
self.display_backregs()
# pass to GUIplotter
self.parent.plotter.associate_EDXfile(self.EDXfile)
def on_redo_backfit(self):
''' Update fitranges, backfitpts from display, then call refit method in EDXfile
linked to button '''
# Note .. read back of fitranges, backpts done separately with button
print('EDX background refitting initiated')
self.EDXfile.process_refit(self.fitflags)
print('EDX background refitting finished from GUIrefitter')
# Pass updated EDXfile to plotter
self.parent.plotter.associate_EDXfile(self.EDXfile)
# reset fitflags to False
self.fitflags=[False]*len(self.EDXfile.fitranges)
def on_redo_backfit_all(self):
''' Refit of all regions ignoring fit flags
Update fitranges, backfitpts from display, then call refit method in EDXfile
linked to button '''
# Note .. read back of fitranges, backpts done separately with button
print('EDX background refitting initiated')
# set all to true to force complete refit (screwed up for some reason)
self.EDXfile.process_refit([True]*len(self.fitflags))
print('EDX background refitting finished from GUIrefitter')
# Pass updated EDXfile to plotter
self.parent.plotter.associate_EDXfile(self.EDXfile)
# Reset fitflags to False
self.fitflags=[False]*len(self.EDXfile.fitranges)
def save_train(self):
''' call save training points method of currently-active EDXfile
training data about points added or removed from backfitpts
used to later improve fitting process
'''
# save any modified fitranges or backfitpts to backfitparamslog
print('GUIrefitter save_train called')
self.EDXfile.save_train()
def on_save(self):
''' call save method of currently-active EDXfile
changeflags?? '''
# save any modified fitranges or backfitpts to backfitparamslog
print('GUIrefitter on_save called')
self.EDXfile.save_backfits()
# save EDXfile itself (with modified background column)
self.EDXfile.save_csvfile()
def on_quitapp(self):
msg = "Quitting:\nUnsaved progress will be lost.\nDo you wish to Continue?"
if tkmess.askokcancel("EDX refitter",msg):
self.parent.root.destroy()
def _custom_button(self,root,text,command,**kwargs):
''' use for lasso and point picker '''
button = tk.Button(root, text=text,
command=command,padx=2, pady=2,height=1, width=15,**kwargs)
button.pack(side=tk.TOP,fill=None,expand=1)
return button
def remove_badpts(self, badinds):
''' Bad pt index #s in list returned from plotter after lasso-ing
remove from EDXfile.Backfitpts, regenerate
indices are effectively same as eV
Does badinds have index #s or actual energy vals in eV
'''
print('GUIrefitter remove_badpts called')
# add points removed to existing list
self.EDXfile.removedpts.extend(badinds)
print(len(self.EDXfile.removedpts), ' points removed.')
for fitnum, vals in enumerate(self.EDXfile.backfitpts):
# See if badpts lie in this fit range
common=[i for i in badinds if i in vals]
if len(common)>0:
# Troubleshoot remove pts error
try:
self.fitflags[fitnum]=True # reset existing flag if change is made
except:
print('Problem resetting flag', str(fitnum))
# print('Removed ', len(common), 'faulty background fit points.')
print(len(common),' values to remove for', fitnum)
newvals=[i for i in vals if i not in badinds]
# Check if rightmost or leftmost points in range have been removed
[lowlim, hilim]=self.EDXfile.backptrange[fitnum]
if lowlim in badinds or hilim in badinds:
self.fix_badrange(fitnum, badinds)
# alters background points and background ranges (but extends regions)
else:
self.EDXfile.backfitpts[fitnum]=newvals # list of lists
self.display_backregs() # Update fitrange, backpts tkinter variables display
# Update guiplot
self.parent.plotter.associate_EDXfile(self.EDXfile)
def fix_badrange(self, fitnum, badinds):
''' After bad point lasso removal, ensure that all regions still have
valid edge points
badinds are index numbers (should be same as vals stored in backfitparamslog)'''
print('Fixing bad range after endpoint removed')
# get all current backpoints from all regions
allbackpts=self.EDXfile.get_allbackpts()
# Make sure to remove bad points from list
allbackpts=[i for i in allbackpts if i not in badinds]
# current boundaries for this fit region
[lowlim, hilim]=self.EDXfile.backptrange[fitnum]
# Current backpoints in this fit range
currbackpts=self.EDXfile.backfitpts[fitnum]
# Remove bad points
currbackpts=[i for i in currbackpts if i not in badinds]
if lowlim in badinds:
# Find next smallest value
print('Removing lower limit', lowlim)
try:
# Largest of negative differences is next lowest
newmin=lowlim+max([i-lowlim for i in allbackpts if i-lowlim<0])
except:
newmin=0
# Reset backpts and associated range
oldrange=self.EDXfile.backptrange[fitnum]
self.EDXfile.backptrange[fitnum]=[newmin, oldrange[1]]
# add
print('adding ', newmin, ' to region', fitnum,' backpts list')
currbackpts.append(newmin)
if hilim in badinds:
print('Removing upper limit', lowlim)
# Find next largest value
try:
newmax=hilim+min([i-hilim for i in allbackpts if i-hilim>0])
except:
print('Problem removing largest background points value')
#TODO fix for this problem
# Reset backpts and associated range
oldrange=self.EDXfile.backptrange[fitnum]
self.EDXfile.backptrange[fitnum]=[oldrange[0], newmax]
# add
print('Adding ', newmax,' to region', fitnum,' backpts list')
currbackpts.append(newmax)
# Write changes back to this regions backfitpts
self.EDXfile.backfitpts[fitnum]=currbackpts
def add_pts(self, xval, yval):
''' Add closest single point (in energy) to background fit ranges'''
# get index/eV of closest data point in energy col of EDX dataframe
print('GUIrefitter add_pts called')
newval=self.EDXfile.EDXdf.Energy[(self.EDXfile.EDXdf.Energy-xval).abs().argsort()[:1]].index[0]
# print('newval is', str(newval))
# add points removed to existing list
self.EDXfile.addedpts.append(newval)
# Add newval to each backfitpts if within its fitrange
for i, [fmin, fmax] in enumerate(self.EDXfile.fitranges):
# evranges in format '0-100'
if fmin < newval < fmax:
print('Newval', str(newval), 'added by addpts')
self.fitflags[i]=True
vals=self.EDXfile.backfitpts[i]
vals.append(newval)
vals.sort()
self.EDXfile.backfitpts[i]=vals
# Handle values greater than upper limit
elif newval> fmax:
self.fitflags[i]=True
vals=self.EDXfile.backfitpts[i]
vals.append(newval)
vals.sort()
self.EDXfile.backfitpts[i]=vals
# alter total fit range
self.EDXfile.fitranges[i]=[fmin, newval]
self.display_backregs() # Update display
self.parent.plotter.associate_EDXfile(self.EDXfile) # update guiplot
def display_backregs(self):
''' Display fitranges, associated backpts for loaded EDXfile
'''
# Clear any existing widgets in backreg frame
for child in self.backregs_frame.winfo_children():
child.destroy()
# Write header row into backregs
rowframe=tk.Frame(self.backregs_frame)
tk.Label(rowframe, text='Min').pack(side=tk.LEFT)
tk.Label(rowframe, text='Max').pack(side=tk.LEFT)
tk.Label(rowframe, text='Ptmin').pack(side=tk.LEFT)
tk.Label(rowframe, text='Ptmax').pack(side=tk.LEFT)
tk.Label(rowframe, text='#pts').pack(side=tk.LEFT)
tk.Label(rowframe, text='Order').pack(side=tk.LEFT)
rowframe.pack(fill=tk.X, expand=1)
# Now display values associated w/ each
self.tkbegins=[] # list of tk string vars for fitrange beginnings
self.tkends=[] # fitrange ends
# self.tkbackpts=[] # list of tk string vars for backpts
self.tkptbegins=[]
self.tkptends=[]
self.tkfitorders=[] # fit types (linear (true) or parabola (false/default))
# Unfortunately tk/mpl combo requires use of pack (not grid)
for i, [fmin, fmax] in enumerate(self.EDXfile.fitranges):
# ev ranges are stored as "0-100" eV so needs parsing
self.tkbegins.append(tk.IntVar())
self.tkbegins[i].set(fmin)
self.tkends.append(tk.IntVar())
self.tkends[i].set(fmax)
# self.tkbackpts.append(tk.StringVar())
# bool var to keep track of linear (true) or parabola (false/default)
self.tkfitorders.append(tk.IntVar())
self.tkfitorders[i].set(self.EDXfile.fitorders[i])
# backpoints are list of ints
#print('backfitspts are of type ', type(self.EDXfile.backfitpts[i]))
templist=self.EDXfile.backfitpts[i]
# Something is rewriting EDXfile.backfitpts[i] to int
# Set beginning of points included range
self.tkptbegins.append(tk.StringVar())
self.tkptbegins[i].set(str(min(templist)))
# set end of points included range
self.tkptends.append(tk.StringVar())
self.tkptends[i].set(str(max(templist)))
# templist=[str(i) for i in templist]
# tempstr=', '.join(templist)
# self.tkbackpts[i].set(tempstr)
# Add new row (via frame) .. .textvariable can be ints, right?
rowframe=tk.Frame(self.backregs_frame)
tk.Entry(rowframe, textvariable=self.tkbegins[i], width=5).pack(side=tk.LEFT)
tk.Entry(rowframe, textvariable=self.tkends[i], width=5).pack(side=tk.LEFT)
tk.Entry(rowframe, textvariable=self.tkptbegins[i], width=5).pack(side=tk.LEFT)
tk.Entry(rowframe, textvariable=self.tkptends[i], width=5).pack(side=tk.LEFT)
numpts=str(len(self.EDXfile.backfitpts[i]))
tk.Label(rowframe, text=numpts, width=5).pack(side=tk.LEFT)
# tk.Entry(rowframe, textvariable=self.tkbackpts[i]).pack(side=tk.LEFT)
tk.Entry(rowframe, textvariable=self.tkfitorders[i], width=5).pack(side=tk.LEFT)
rowframe.pack(fill=tk.X, expand=1)
def read_backregs(self):
''' Readback manually altered fitranges, fitorders, and backpoints lists
allows on-the-fly fit tweaking '''
print('read_backregs started')
# Set of bools keeping track of any altered params
self.fitflags=[False]*len(self.EDXfile.fitranges)
# Get old values for compare w/ readback
for i, [oldmin, oldmax] in enumerate(self.EDXfile.fitranges):
# Also need to compare int lists of background points (readback vs current attributes)
oldvals=self.EDXfile.backfitpts[i]
oldbpmin=min(oldvals)
oldbpmax=max(oldvals)
# if backpoints ranges are changed, all backpts needed for every fitrange
if int(self.tkbegins[i].get())!=oldmin or int(self.tkends[i].get())!=oldmax:
# reset range and set of backpoints
self.EDXfile.fitranges[i]=[int(self.tkbegins[i].get()),
int(self.tkends[i].get())]
self.fitflags[i]=True # keeping track of altered fitregions
print('Background fit region', str(i),' changed')
# Also need to check for backpoints changes w/o fitrange changes
if int(self.tkptbegins[i].get())!=oldbpmin or int(self.tkptends[i].get())!=oldbpmax:
# Need to reset backpoints in this fit region
allback=self.EDXfile.get_allbackpts() # list of all backpts (ints)
newvals=[i for i in allback if i >= oldbpmin and i <= oldbpmax]
self.EDXfile.backfitpts[i]=newvals
# reset range and set of backpoints
self.fitflags[i]=True # keeping track of altered fitregions
print('Background fit points for region', str(i),' changed')
# Check if fitorder has been changed
if int(self.tkfitorders[i].get())!=self.EDXfile.fitorders[i]:
self.EDXfile.fitorders[i]=int(self.tkfitorders[i].get())
self.fitflags[i]=True
print('Region',str(i),' changed to order',
self.EDXfile.fitorders[i], ' polynomial')
def runmenucommand(self, kwargs):
''' Method call from menu launched popup '''
print('Running command', kwargs.get('command',''))
def populate_specselector(self, spelist):
''' On project load, regenerate list of tk bools from spelist, update specselect frame view '''
self.spec_tklist=[]
for i, name in self.spelist:
self.spec_tklist.append(tk.BooleanVar())
self.spec_tklist[i].set(0) # Default unselected
# Fill spectra selector frame w/ associated checkbuttons
tk.Checkbutton(self.specselect_frame, text=name, variable=self.spec_tklist[i]).pack(side=tk.TOP)
|
[
"tkinter.StringVar",
"tkinter.BooleanVar",
"tkinter.Frame",
"tkinter.Label",
"sys.path.append",
"tkinter.Spinbox",
"tkinter.Checkbutton",
"EDX_data_classes.EDXdataset",
"tkinter.Button",
"matplotlib.rcParams.update",
"tkinter.Entry",
"matplotlib.widgets.Lasso",
"matplotlib.figure.Figure",
"EDX_data_classes.EDXfile",
"tkinter.Tk",
"tkinter.filedialog.askdirectory",
"matplotlib.backends.backend_tkagg.NavigationToolbar2TkAgg.__init__",
"tkinter.messagebox.askokcancel",
"tkinter.IntVar",
"matplotlib.backends.backend_tkagg.FigureCanvasTkAgg",
"os.getcwd",
"matplotlib.path.Path"
] |
[((989, 1019), 'matplotlib.rcParams.update', 'mpl.rcParams.update', (['MPL_STYLE'], {}), '(MPL_STYLE)\n', (1008, 1019), True, 'import matplotlib as mpl\n'), ((561, 626), 'sys.path.append', 'sys.path.append', (['"""C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX"""'], {}), "('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX')\n", (576, 626), False, 'import sys\n'), ((1154, 1161), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (1159, 1161), True, 'import tkinter as tk\n'), ((1931, 1950), 'tkinter.Frame', 'tk.Frame', (['self.root'], {}), '(self.root)\n', (1939, 1950), True, 'import tkinter as tk\n'), ((2021, 2040), 'tkinter.Frame', 'tk.Frame', (['self.root'], {}), '(self.root)\n', (2029, 2040), True, 'import tkinter as tk\n'), ((2114, 2138), 'tkinter.Frame', 'tk.Frame', (['self.top_frame'], {}), '(self.top_frame)\n', (2122, 2138), True, 'import tkinter as tk\n'), ((2209, 2233), 'tkinter.Frame', 'tk.Frame', (['self.top_frame'], {}), '(self.top_frame)\n', (2217, 2233), True, 'import tkinter as tk\n'), ((2308, 2335), 'tkinter.Frame', 'tk.Frame', (['self.bottom_frame'], {}), '(self.bottom_frame)\n', (2316, 2335), True, 'import tkinter as tk\n'), ((3154, 3206), 'matplotlib.backends.backend_tkagg.NavigationToolbar2TkAgg.__init__', 'NavigationToolbar2TkAgg.__init__', (['self', 'canvas', 'root'], {}), '(self, canvas, root)\n', (3186, 3206), False, 'from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2TkAgg\n'), ((3290, 3367), 'tkinter.Button', 'tk.Button', ([], {'master': 'self', 'text': '"""lasso"""', 'padx': '(2)', 'pady': '(2)', 'command': 'self.startlasso'}), "(master=self, text='lasso', padx=2, pady=2, command=self.startlasso)\n", (3299, 3367), True, 'import tkinter as tk\n'), ((3492, 3578), 'tkinter.Button', 'tk.Button', ([], {'master': 'self', 'text': '"""Remove pts"""', 'padx': '(2)', 'pady': '(2)', 'command': 'self.removepts'}), "(master=self, text='Remove pts', padx=2, pady=2, command=self.\n removepts)\n", (3501, 3578), True, 'import tkinter as tk\n'), ((3704, 3791), 'tkinter.Button', 'tk.Button', ([], {'master': 'self', 'text': '"""add point"""', 'padx': '(2)', 'pady': '(2)', 'command': 'self.startpicker'}), "(master=self, text='add point', padx=2, pady=2, command=self.\n startpicker)\n", (3713, 3791), True, 'import tkinter as tk\n'), ((3907, 4005), 'tkinter.Button', 'tk.Button', ([], {'master': 'self', 'text': '"""Show backfit segments"""', 'padx': '(2)', 'pady': '(2)', 'command': 'self.showbackseg'}), "(master=self, text='Show backfit segments', padx=2, pady=2,\n command=self.showbackseg)\n", (3916, 4005), True, 'import tkinter as tk\n'), ((4818, 4885), 'matplotlib.widgets.Lasso', 'Lasso', (['event.inaxes', '(event.xdata, event.ydata)', 'self.callbacklasso'], {}), '(event.inaxes, (event.xdata, event.ydata), self.callbacklasso)\n', (4823, 4885), False, 'from matplotlib.widgets import Lasso\n'), ((5096, 5112), 'matplotlib.path.Path', 'path.Path', (['verts'], {}), '(verts)\n', (5105, 5112), False, 'from matplotlib import path\n'), ((7236, 7281), 'matplotlib.figure.Figure', 'mpl.figure.Figure', ([], {'figsize': 'PLOT_SIZE', 'dpi': '(100)'}), '(figsize=PLOT_SIZE, dpi=100)\n', (7253, 7281), True, 'import matplotlib as mpl\n'), ((7420, 7461), 'matplotlib.backends.backend_tkagg.FigureCanvasTkAgg', 'FigureCanvasTkAgg', (['self.figure', 'self.root'], {}), '(self.figure, self.root)\n', (7437, 7461), False, 'from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2TkAgg\n'), ((12358, 12377), 'tkinter.Frame', 'tk.Frame', (['self.root'], {}), '(self.root)\n', (12366, 12377), True, 'import tkinter as tk\n'), ((12459, 12478), 'tkinter.Frame', 'tk.Frame', (['self.root'], {}), '(self.root)\n', (12467, 12478), True, 'import tkinter as tk\n'), ((12702, 12844), 'tkinter.Entry', 'tk.Entry', (['self.top_frame'], {'width': '(90)', 'bg': '"""lightblue"""', 'fg': '"""black"""', 'highlightcolor': '"""lightblue"""', 'insertbackground': '"""black"""', 'highlightthickness': '(2)'}), "(self.top_frame, width=90, bg='lightblue', fg='black',\n highlightcolor='lightblue', insertbackground='black', highlightthickness=2)\n", (12710, 12844), True, 'import tkinter as tk\n'), ((13176, 13280), 'tkinter.Button', 'tk.Button', (['self.bottom_frame'], {'text': '"""Load EDX project folder"""', 'width': '(60)', 'command': 'self.load_EDXdataset'}), "(self.bottom_frame, text='Load EDX project folder', width=60,\n command=self.load_EDXdataset)\n", (13185, 13280), True, 'import tkinter as tk\n'), ((13528, 13547), 'EDX_data_classes.EDXdataset', 'EDXdataset', (['currdir'], {}), '(currdir)\n', (13538, 13547), False, 'from EDX_data_classes import EDXfile, EDXdataset\n'), ((13876, 13897), 'EDX_data_classes.EDXdataset', 'EDXdataset', (['directory'], {}), '(directory)\n', (13886, 13897), False, 'from EDX_data_classes import EDXfile, EDXdataset\n'), ((14173, 14198), 'tkinter.filedialog.askdirectory', 'filedialog.askdirectory', ([], {}), '()\n', (14196, 14198), False, 'from tkinter import filedialog\n'), ((14736, 14764), 'tkinter.Frame', 'tk.Frame', (['self.root'], {'pady': '(10)'}), '(self.root, pady=10)\n', (14744, 14764), True, 'import tkinter as tk\n'), ((14861, 14889), 'tkinter.Frame', 'tk.Frame', (['self.root'], {'pady': '(10)'}), '(self.root, pady=10)\n', (14869, 14889), True, 'import tkinter as tk\n'), ((14985, 15013), 'tkinter.Frame', 'tk.Frame', (['self.root'], {'pady': '(10)'}), '(self.root, pady=10)\n', (14993, 15013), True, 'import tkinter as tk\n'), ((15174, 15202), 'tkinter.Frame', 'tk.Frame', (['self.root'], {'pady': '(10)'}), '(self.root, pady=10)\n', (15182, 15202), True, 'import tkinter as tk\n'), ((15353, 15419), 'tkinter.Spinbox', 'tk.Spinbox', (['self.specselect_frame'], {'command': 'self.on_specspin_change'}), '(self.specselect_frame, command=self.on_specspin_change)\n', (15363, 15419), True, 'import tkinter as tk\n'), ((16248, 16351), 'tkinter.Button', 'tk.Button', (['self.misc_opts_frame'], {'text': '"""Redo backfit"""', 'command': 'self.on_redo_backfit', 'padx': '(2)', 'pady': '(6)'}), "(self.misc_opts_frame, text='Redo backfit', command=self.\n on_redo_backfit, padx=2, pady=6)\n", (16257, 16351), True, 'import tkinter as tk\n'), ((16472, 16583), 'tkinter.Button', 'tk.Button', (['self.misc_opts_frame'], {'text': '"""Redo backfit all"""', 'command': 'self.on_redo_backfit_all', 'padx': '(2)', 'pady': '(6)'}), "(self.misc_opts_frame, text='Redo backfit all', command=self.\n on_redo_backfit_all, padx=2, pady=6)\n", (16481, 16583), True, 'import tkinter as tk\n'), ((16699, 16806), 'tkinter.Button', 'tk.Button', (['self.misc_opts_frame'], {'text': '"""Readback fitranges"""', 'command': 'self.read_backregs', 'padx': '(2)', 'pady': '(6)'}), "(self.misc_opts_frame, text='Readback fitranges', command=self.\n read_backregs, padx=2, pady=6)\n", (16708, 16806), True, 'import tkinter as tk\n'), ((18605, 18639), 'EDX_data_classes.EDXfile', 'EDXfile', (['self.EDXdataset', 'rowindex'], {}), '(self.EDXdataset, rowindex)\n', (18612, 18639), False, 'from EDX_data_classes import EDXfile, EDXdataset\n'), ((22138, 22177), 'tkinter.messagebox.askokcancel', 'tkmess.askokcancel', (['"""EDX refitter"""', 'msg'], {}), "('EDX refitter', msg)\n", (22156, 22177), True, 'import tkinter.messagebox as tkmess\n'), ((22343, 22437), 'tkinter.Button', 'tk.Button', (['root'], {'text': 'text', 'command': 'command', 'padx': '(2)', 'pady': '(2)', 'height': '(1)', 'width': '(15)'}), '(root, text=text, command=command, padx=2, pady=2, height=1, width\n =15, **kwargs)\n', (22352, 22437), True, 'import tkinter as tk\n'), ((28233, 28262), 'tkinter.Frame', 'tk.Frame', (['self.backregs_frame'], {}), '(self.backregs_frame)\n', (28241, 28262), True, 'import tkinter as tk\n'), ((1516, 1527), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1525, 1527), False, 'import os\n'), ((30431, 30460), 'tkinter.Frame', 'tk.Frame', (['self.backregs_frame'], {}), '(self.backregs_frame)\n', (30439, 30460), True, 'import tkinter as tk\n'), ((12556, 12625), 'tkinter.Label', 'tk.Label', (['self.top_frame'], {'text': '"""Directory:"""', 'padx': '(8)', 'pady': '(2)', 'height': '(1)'}), "(self.top_frame, text='Directory:', padx=8, pady=2, height=1)\n", (12564, 12625), True, 'import tkinter as tk\n'), ((13006, 13078), 'tkinter.Button', 'tk.Button', (['self.top_frame'], {'text': '"""Browse"""', 'command': 'self.launch_dir_finder'}), "(self.top_frame, text='Browse', command=self.launch_dir_finder)\n", (13015, 13078), True, 'import tkinter as tk\n'), ((19244, 19287), 'tkinter.Label', 'tk.Label', (['self.currfile_frame'], {'text': 'tempstr'}), '(self.currfile_frame, text=tempstr)\n', (19252, 19287), True, 'import tkinter as tk\n'), ((28271, 28301), 'tkinter.Label', 'tk.Label', (['rowframe'], {'text': '"""Min"""'}), "(rowframe, text='Min')\n", (28279, 28301), True, 'import tkinter as tk\n'), ((28329, 28359), 'tkinter.Label', 'tk.Label', (['rowframe'], {'text': '"""Max"""'}), "(rowframe, text='Max')\n", (28337, 28359), True, 'import tkinter as tk\n'), ((28387, 28419), 'tkinter.Label', 'tk.Label', (['rowframe'], {'text': '"""Ptmin"""'}), "(rowframe, text='Ptmin')\n", (28395, 28419), True, 'import tkinter as tk\n'), ((28447, 28479), 'tkinter.Label', 'tk.Label', (['rowframe'], {'text': '"""Ptmax"""'}), "(rowframe, text='Ptmax')\n", (28455, 28479), True, 'import tkinter as tk\n'), ((28507, 28538), 'tkinter.Label', 'tk.Label', (['rowframe'], {'text': '"""#pts"""'}), "(rowframe, text='#pts')\n", (28515, 28538), True, 'import tkinter as tk\n'), ((28566, 28598), 'tkinter.Label', 'tk.Label', (['rowframe'], {'text': '"""Order"""'}), "(rowframe, text='Order')\n", (28574, 28598), True, 'import tkinter as tk\n'), ((29260, 29271), 'tkinter.IntVar', 'tk.IntVar', ([], {}), '()\n', (29269, 29271), True, 'import tkinter as tk\n'), ((29343, 29354), 'tkinter.IntVar', 'tk.IntVar', ([], {}), '()\n', (29352, 29354), True, 'import tkinter as tk\n'), ((29563, 29574), 'tkinter.IntVar', 'tk.IntVar', ([], {}), '()\n', (29572, 29574), True, 'import tkinter as tk\n'), ((29964, 29978), 'tkinter.StringVar', 'tk.StringVar', ([], {}), '()\n', (29976, 29978), True, 'import tkinter as tk\n'), ((30116, 30130), 'tkinter.StringVar', 'tk.StringVar', ([], {}), '()\n', (30128, 30130), True, 'import tkinter as tk\n'), ((33770, 33785), 'tkinter.BooleanVar', 'tk.BooleanVar', ([], {}), '()\n', (33783, 33785), True, 'import tkinter as tk\n'), ((30473, 30531), 'tkinter.Entry', 'tk.Entry', (['rowframe'], {'textvariable': 'self.tkbegins[i]', 'width': '(5)'}), '(rowframe, textvariable=self.tkbegins[i], width=5)\n', (30481, 30531), True, 'import tkinter as tk\n'), ((30563, 30619), 'tkinter.Entry', 'tk.Entry', (['rowframe'], {'textvariable': 'self.tkends[i]', 'width': '(5)'}), '(rowframe, textvariable=self.tkends[i], width=5)\n', (30571, 30619), True, 'import tkinter as tk\n'), ((30651, 30711), 'tkinter.Entry', 'tk.Entry', (['rowframe'], {'textvariable': 'self.tkptbegins[i]', 'width': '(5)'}), '(rowframe, textvariable=self.tkptbegins[i], width=5)\n', (30659, 30711), True, 'import tkinter as tk\n'), ((30743, 30801), 'tkinter.Entry', 'tk.Entry', (['rowframe'], {'textvariable': 'self.tkptends[i]', 'width': '(5)'}), '(rowframe, textvariable=self.tkptends[i], width=5)\n', (30751, 30801), True, 'import tkinter as tk\n'), ((30889, 30929), 'tkinter.Label', 'tk.Label', (['rowframe'], {'text': 'numpts', 'width': '(5)'}), '(rowframe, text=numpts, width=5)\n', (30897, 30929), True, 'import tkinter as tk\n'), ((31045, 31106), 'tkinter.Entry', 'tk.Entry', (['rowframe'], {'textvariable': 'self.tkfitorders[i]', 'width': '(5)'}), '(rowframe, textvariable=self.tkfitorders[i], width=5)\n', (31053, 31106), True, 'import tkinter as tk\n'), ((33928, 34006), 'tkinter.Checkbutton', 'tk.Checkbutton', (['self.specselect_frame'], {'text': 'name', 'variable': 'self.spec_tklist[i]'}), '(self.specselect_frame, text=name, variable=self.spec_tklist[i])\n', (33942, 34006), True, 'import tkinter as tk\n')]
|
# Copyright 2021 Hathor Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import random
import sys
import time
from enum import Enum
from typing import Any, Iterator, List, NamedTuple, Optional, Tuple, Union
from structlog import get_logger
from twisted.internet import defer
from twisted.internet.defer import Deferred
from twisted.internet.interfaces import IReactorCore
from twisted.python.threadpool import ThreadPool
import hathor.util
from hathor import daa
from hathor.checkpoint import Checkpoint
from hathor.conf import HathorSettings
from hathor.consensus import ConsensusAlgorithm
from hathor.exception import InvalidNewTransaction
from hathor.indexes import TokensIndex, WalletIndex
from hathor.mining import BlockTemplate, BlockTemplates
from hathor.p2p.peer_discovery import PeerDiscovery
from hathor.p2p.peer_id import PeerId
from hathor.p2p.protocol import HathorProtocol
from hathor.profiler import get_cpu_profiler
from hathor.pubsub import HathorEvents, PubSubManager
from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion, sum_weights
from hathor.transaction.exceptions import TxValidationError
from hathor.transaction.storage import TransactionStorage
from hathor.wallet import BaseWallet
settings = HathorSettings()
logger = get_logger()
cpu = get_cpu_profiler()
class HathorManager:
""" HathorManager manages the node with the help of other specialized classes.
Its primary objective is to handle DAG-related matters, ensuring that the DAG is always valid and connected.
"""
class NodeState(Enum):
# This node is still initializing
INITIALIZING = 'INITIALIZING'
# This node is ready to establish new connections, sync, and exchange transactions.
READY = 'READY'
def __init__(self, reactor: IReactorCore, peer_id: Optional[PeerId] = None, network: Optional[str] = None,
hostname: Optional[str] = None, pubsub: Optional[PubSubManager] = None,
wallet: Optional[BaseWallet] = None, tx_storage: Optional[TransactionStorage] = None,
peer_storage: Optional[Any] = None, default_port: int = 40403, wallet_index: bool = False,
stratum_port: Optional[int] = None, ssl: bool = True,
capabilities: Optional[List[str]] = None, checkpoints: Optional[List[Checkpoint]] = None) -> None:
"""
:param reactor: Twisted reactor which handles the mainloop and the events.
:param peer_id: Id of this node. If not given, a new one is created.
:param network: Name of the network this node participates. Usually it is either testnet or mainnet.
:type network: string
:param hostname: The hostname of this node. It is used to generate its entrypoints.
:type hostname: string
:param pubsub: If not given, a new one is created.
:type pubsub: :py:class:`hathor.pubsub.PubSubManager`
:param tx_storage: If not given, a :py:class:`TransactionMemoryStorage` one is created.
:type tx_storage: :py:class:`hathor.transaction.storage.transaction_storage.TransactionStorage`
:param peer_storage: If not given, a new one is created.
:type peer_storage: :py:class:`hathor.p2p.peer_storage.PeerStorage`
:param default_port: Network default port. It is used when only ip addresses are discovered.
:type default_port: int
:param wallet_index: If should add a wallet index in the storage
:type wallet_index: bool
:param stratum_port: Stratum server port. Stratum server will only be created if it is not None.
:type stratum_port: Optional[int]
"""
from hathor.metrics import Metrics
from hathor.p2p.factory import HathorClientFactory, HathorServerFactory
from hathor.p2p.manager import ConnectionsManager
from hathor.transaction.storage.memory_storage import TransactionMemoryStorage
self.log = logger.new()
self.reactor = reactor
if hasattr(self.reactor, 'addSystemEventTrigger'):
self.reactor.addSystemEventTrigger('after', 'shutdown', self.stop)
self.state: Optional[HathorManager.NodeState] = None
self.profiler: Optional[Any] = None
# Hostname, used to be accessed by other peers.
self.hostname = hostname
# Remote address, which can be different from local address.
self.remote_address = None
self.my_peer = peer_id or PeerId()
self.network = network or 'testnet'
self.is_started: bool = False
self.cpu = cpu
# XXX: first checkpoint must be genesis (height=0)
self.checkpoints: List[Checkpoint] = checkpoints or []
self.checkpoints_ready: List[bool] = [False] * len(self.checkpoints)
if not self.checkpoints or self.checkpoints[0].height > 0:
self.checkpoints.insert(0, Checkpoint(0, settings.GENESIS_BLOCK_HASH))
self.checkpoints_ready.insert(0, True)
else:
self.checkpoints_ready[0] = True
# XXX Should we use a singleton or a new PeerStorage? [msbrogli 2018-08-29]
self.pubsub = pubsub or PubSubManager(self.reactor)
self.tx_storage = tx_storage or TransactionMemoryStorage()
self.tx_storage.pubsub = self.pubsub
if wallet_index and self.tx_storage.with_index:
self.tx_storage.wallet_index = WalletIndex(self.pubsub)
self.tx_storage.tokens_index = TokensIndex()
self.metrics = Metrics(
pubsub=self.pubsub,
avg_time_between_blocks=settings.AVG_TIME_BETWEEN_BLOCKS,
tx_storage=self.tx_storage,
reactor=self.reactor,
)
self.consensus_algorithm = ConsensusAlgorithm()
self.peer_discoveries: List[PeerDiscovery] = []
self.ssl = ssl
self.server_factory = HathorServerFactory(self.network, self.my_peer, node=self, use_ssl=ssl)
self.client_factory = HathorClientFactory(self.network, self.my_peer, node=self, use_ssl=ssl)
self.connections = ConnectionsManager(self.reactor, self.my_peer, self.server_factory, self.client_factory,
self.pubsub, self, ssl)
self.wallet = wallet
if self.wallet:
self.wallet.pubsub = self.pubsub
self.wallet.reactor = self.reactor
if stratum_port:
# XXX: only import if needed
from hathor.stratum import StratumFactory
self.stratum_factory: Optional[StratumFactory] = StratumFactory(manager=self, port=stratum_port)
else:
self.stratum_factory = None
# Set stratum factory for metrics object
self.metrics.stratum_factory = self.stratum_factory
self._allow_mining_without_peers = False
# Thread pool used to resolve pow when sending tokens
self.pow_thread_pool = ThreadPool(minthreads=0, maxthreads=settings.MAX_POW_THREADS, name='Pow thread pool')
# List of addresses to listen for new connections (eg: [tcp:8000])
self.listen_addresses: List[str] = []
# Full verification execute all validations for transactions and blocks when initializing the node
# Can be activated on the command line with --full-verification
self._full_verification = False
# List of whitelisted peers
self.peers_whitelist: List[str] = []
# List of capabilities of the peer
if capabilities is not None:
self.capabilities = capabilities
else:
self.capabilities = [settings.CAPABILITY_WHITELIST, settings.CAPABILITY_SYNC_V2]
def start(self) -> None:
""" A factory must be started only once. And it is usually automatically started.
"""
if self.is_started:
raise Exception('HathorManager is already started')
self.is_started = True
self.log.info('start manager', network=self.network)
# If it's a full verification, we save on the storage that we are starting it
# this is required because if we stop the initilization in the middle, the metadata
# saved on the storage is not reliable anymore, only if we finish it
if self._full_verification:
self.tx_storage.start_full_verification()
else:
# If it's a fast initialization and the last time a full initialization stopped in the middle
# we can't allow the full node to continue, so we need to remove the storage and do a full sync
# or execute an initialization with full verification
if self.tx_storage.is_running_full_verification():
self.log.error(
'Error initializing node. The last time you started your node you did a full verification '
'that was stopped in the middle. The storage is not reliable anymore and, because of that, '
'you must initialize with a full verification again or remove your storage and do a full sync.'
)
sys.exit()
# If self.tx_storage.is_running_manager() is True, the last time the node was running it had a sudden crash
# because of that, we must run a full verification because some storage data might be wrong.
# The metadata is the only piece of the storage that may be wrong, not the blocks and transactions.
if self.tx_storage.is_running_manager():
self.log.error(
'Error initializing node. The last time you executed your full node it wasn\'t stopped correctly. '
'The storage is not reliable anymore and, because of that, so you must run a full verification '
'or remove your storage and do a full sync.'
)
sys.exit()
self.state = self.NodeState.INITIALIZING
self.pubsub.publish(HathorEvents.MANAGER_ON_START)
self.connections.start()
self.pow_thread_pool.start()
# Disable get transaction lock when initializing components
self.tx_storage.disable_lock()
# Initialize manager's components.
self._initialize_components()
if self._full_verification:
# Before calling self._initialize_components() I start 'full verification' mode and after that I need to
# finish it. It's just to know if the full node has stopped a full initialization in the middle
self.tx_storage.finish_full_verification()
self.tx_storage.enable_lock()
# Metric starts to capture data
self.metrics.start()
for description in self.listen_addresses:
self.listen(description)
self.do_discovery()
self.start_time = time.time()
if self.wallet:
self.wallet.start()
if self.stratum_factory:
self.stratum_factory.start()
# Start running
self.tx_storage.start_running_manager()
def stop(self) -> Deferred:
if not self.is_started:
raise Exception('HathorManager is already stopped')
self.is_started = False
waits = []
self.log.info('stop manager')
self.tx_storage.stop_running_manager()
self.connections.stop()
self.pubsub.publish(HathorEvents.MANAGER_ON_STOP)
if self.pow_thread_pool.started:
self.pow_thread_pool.stop()
# Metric stops to capture data
self.metrics.stop()
if self.wallet:
self.wallet.stop()
if self.stratum_factory:
wait_stratum = self.stratum_factory.stop()
if wait_stratum:
waits.append(wait_stratum)
return defer.DeferredList(waits)
def do_discovery(self) -> None:
"""
Do a discovery and connect on all discovery strategies.
"""
for peer_discovery in self.peer_discoveries:
peer_discovery.discover_and_connect(self.connections.connect_to)
def start_profiler(self) -> None:
"""
Start profiler. It can be activated from a web resource, as well.
"""
if not self.profiler:
import cProfile
self.profiler = cProfile.Profile()
self.profiler.enable()
def stop_profiler(self, save_to: Optional[str] = None) -> None:
"""
Stop the profile and optionally save the results for future analysis.
:param save_to: path where the results will be saved
:type save_to: str
"""
assert self.profiler is not None
self.profiler.disable()
if save_to:
self.profiler.dump_stats(save_to)
def _initialize_components(self) -> None:
"""You are not supposed to run this method manually. You should run `doStart()` to initialize the
manager.
This method runs through all transactions, verifying them and updating our wallet.
"""
self.log.info('initialize')
if self.wallet:
self.wallet._manually_initialize()
t0 = time.time()
t1 = t0
cnt = 0
cnt2 = 0
t2 = t0
h = 0
block_count = 0
tx_count = 0
if self.tx_storage.get_count_tx_blocks() > 3 and not self.tx_storage.is_db_clean():
# If has more than 3 txs on storage (the genesis txs that are always on storage by default)
# and the db is not clean (the db has old data before we cleaned the voided txs/blocks)
# then we can't move forward and ask the user to remove the old db
self.log.error(
'Error initializing the node. You can\'t use an old database right now. '
'Please remove your database or start your full node again with an empty data folder.'
)
sys.exit()
# If has reached this line, the db is clean, so we add this attribute to it
self.tx_storage.set_db_clean()
# self.start_profiler()
self.log.debug('load blocks and transactions')
for tx in self.tx_storage._topological_sort():
assert tx.hash is not None
tx_meta = tx.get_metadata()
t2 = time.time()
dt = hathor.util.LogDuration(t2 - t1)
dcnt = cnt - cnt2
tx_rate = '?' if dt == 0 else dcnt / dt
h = max(h, tx_meta.height)
if dt > 30:
ts_date = datetime.datetime.fromtimestamp(self.tx_storage.latest_timestamp)
if h == 0:
self.log.debug('start loading transactions...')
else:
self.log.info('load transactions...', tx_rate=tx_rate, tx_new=dcnt, dt=dt,
total=cnt, latest_ts=ts_date, height=h)
t1 = t2
cnt2 = cnt
cnt += 1
# It's safe to skip block weight verification during initialization because
# we trust the difficulty stored in metadata
skip_block_weight_verification = True
if block_count % settings.VERIFY_WEIGHT_EVERY_N_BLOCKS == 0:
skip_block_weight_verification = False
try:
assert self.on_new_tx(
tx,
quiet=True,
fails_silently=False,
skip_block_weight_verification=skip_block_weight_verification
)
except (InvalidNewTransaction, TxValidationError):
self.log.error('unexpected error when initializing', tx=tx, exc_info=True)
raise
if tx.is_block:
block_count += 1
if time.time() - t2 > 1:
dt = hathor.util.LogDuration(time.time() - t2)
self.log.warn('tx took too long to load', tx=tx.hash_hex, dt=dt)
self.log.debug('done loading transactions')
# self.stop_profiler(save_to='profiles/initializing.prof')
self.state = self.NodeState.READY
tdt = hathor.util.LogDuration(t2 - t0)
tx_rate = '?' if tdt == 0 else cnt / tdt
self.log.info('ready', tx_count=cnt, tx_rate=tx_rate, total_dt=tdt, height=h, blocks=block_count, txs=tx_count)
def add_listen_address(self, addr: str) -> None:
self.listen_addresses.append(addr)
def add_peer_discovery(self, peer_discovery: PeerDiscovery) -> None:
self.peer_discoveries.append(peer_discovery)
def get_new_tx_parents(self, timestamp: Optional[float] = None) -> List[bytes]:
"""Select which transactions will be confirmed by a new transaction.
:return: The hashes of the parents for a new transaction.
:rtype: List[bytes(hash)]
"""
timestamp = timestamp or self.reactor.seconds()
ret = list(self.tx_storage.get_tx_tips(timestamp - 1))
random.shuffle(ret)
ret = ret[:2]
if len(ret) == 1:
# If there is only one tip, let's randomly choose one of its parents.
parents = list(self.tx_storage.get_tx_tips(ret[0].begin - 1))
ret.append(random.choice(parents))
assert len(ret) == 2, 'timestamp={} tips={}'.format(
timestamp, [x.hex() for x in self.tx_storage.get_tx_tips(timestamp - 1)])
return [x.data for x in ret]
def generate_parent_txs(self, timestamp: Optional[float]) -> 'ParentTxs':
"""Select which transactions will be confirmed by a new block.
This method tries to return a stable result, such that for a given timestamp and storage state it will always
return the same.
"""
if timestamp is None:
timestamp = self.reactor.seconds()
can_include_intervals = sorted(self.tx_storage.get_tx_tips(timestamp - 1))
assert can_include_intervals, 'tips cannot be empty'
max_timestamp = max(int(i.begin) for i in can_include_intervals)
must_include: List[bytes] = []
assert len(can_include_intervals) > 0, f'invalid timestamp "{timestamp}", no tips found"'
if len(can_include_intervals) < 2:
# If there is only one tip, let's randomly choose one of its parents.
must_include_interval = can_include_intervals[0]
must_include = [must_include_interval.data]
can_include_intervals = sorted(self.tx_storage.get_tx_tips(must_include_interval.begin - 1))
can_include = [i.data for i in can_include_intervals]
return ParentTxs(max_timestamp, can_include, must_include)
def allow_mining_without_peers(self) -> None:
"""Allow mining without being synced to at least one peer.
It should be used only for debugging purposes.
"""
self._allow_mining_without_peers = True
def can_start_mining(self) -> bool:
""" Return whether we can start mining.
"""
if self._allow_mining_without_peers:
return True
return self.connections.has_synced_peer()
def get_block_templates(self, parent_block_hash: Optional[bytes] = None,
timestamp: Optional[int] = None) -> BlockTemplates:
""" Cached version of `make_block_templates`, cache is invalidated when latest_timestamp changes."""
if parent_block_hash is not None:
return BlockTemplates([self.make_block_template(parent_block_hash, timestamp)], storage=self.tx_storage)
return BlockTemplates(self.make_block_templates(timestamp), storage=self.tx_storage)
# FIXME: the following caching scheme breaks tests:
# cached_timestamp: Optional[int]
# cached_block_template: BlockTemplates
# cached_timestamp, cached_block_template = getattr(self, '_block_templates_cache', (None, None))
# if cached_timestamp == self.tx_storage.latest_timestamp:
# return cached_block_template
# block_templates = BlockTemplates(self.make_block_templates(), storage=self.tx_storage)
# setattr(self, '_block_templates_cache', (self.tx_storage.latest_timestamp, block_templates))
# return block_templates
def make_block_templates(self, timestamp: Optional[int] = None) -> Iterator[BlockTemplate]:
""" Makes block templates for all possible best tips as of the latest timestamp.
Each block template has all the necessary info to build a block to be mined without requiring further
information from the blockchain state. Which is ideal for use by external mining servers.
"""
for parent_block_hash in self.tx_storage.get_best_block_tips():
yield self.make_block_template(parent_block_hash, timestamp)
def make_block_template(self, parent_block_hash: bytes, timestamp: Optional[int] = None) -> BlockTemplate:
""" Makes a block template using the given parent block.
"""
parent_block = self.tx_storage.get_transaction(parent_block_hash)
assert isinstance(parent_block, Block)
parent_txs = self.generate_parent_txs(parent_block.timestamp + settings.MAX_DISTANCE_BETWEEN_BLOCKS)
if timestamp is None:
current_timestamp = int(max(self.tx_storage.latest_timestamp, self.reactor.seconds()))
else:
current_timestamp = timestamp
return self._make_block_template(parent_block, parent_txs, current_timestamp)
def make_custom_block_template(self, parent_block_hash: bytes, parent_tx_hashes: List[bytes],
timestamp: Optional[int] = None) -> BlockTemplate:
""" Makes a block template using the given parent block and txs.
"""
parent_block = self.tx_storage.get_transaction(parent_block_hash)
assert isinstance(parent_block, Block)
# gather the actual txs to query their timestamps
parent_tx_list: List[Transaction] = []
for tx_hash in parent_tx_hashes:
tx = self.tx_storage.get_transaction(tx_hash)
assert isinstance(tx, Transaction)
parent_tx_list.append(tx)
max_timestamp = max(tx.timestamp for tx in parent_tx_list)
parent_txs = ParentTxs(max_timestamp, parent_tx_hashes, [])
if timestamp is None:
current_timestamp = int(max(self.tx_storage.latest_timestamp, self.reactor.seconds()))
else:
current_timestamp = timestamp
return self._make_block_template(parent_block, parent_txs, current_timestamp)
def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', current_timestamp: int,
with_weight_decay: bool = False) -> BlockTemplate:
""" Further implementation of making block template, used by make_block_template and make_custom_block_template
"""
assert parent_block.hash is not None
# the absolute minimum would be the previous timestamp + 1
timestamp_abs_min = parent_block.timestamp + 1
# and absolute maximum limited by max time between blocks
if not parent_block.is_genesis:
timestamp_abs_max = parent_block.timestamp + settings.MAX_DISTANCE_BETWEEN_BLOCKS - 1
else:
timestamp_abs_max = 0xffffffff
assert timestamp_abs_max > timestamp_abs_min
# actual minimum depends on the timestamps of the parent txs
# it has to be at least the max timestamp of parents + 1
timestamp_min = max(timestamp_abs_min, parent_txs.max_timestamp + 1)
assert timestamp_min <= timestamp_abs_max
# when we have weight decay, the max timestamp will be when the next decay happens
if with_weight_decay and settings.WEIGHT_DECAY_ENABLED:
# we either have passed the first decay or not, the range will vary depending on that
if timestamp_min > timestamp_abs_min + settings.WEIGHT_DECAY_ACTIVATE_DISTANCE:
timestamp_max_decay = timestamp_min + settings.WEIGHT_DECAY_WINDOW_SIZE
else:
timestamp_max_decay = timestamp_abs_min + settings.WEIGHT_DECAY_ACTIVATE_DISTANCE
timestamp_max = min(timestamp_abs_max, timestamp_max_decay)
else:
timestamp_max = timestamp_abs_max
timestamp = min(max(current_timestamp, timestamp_min), timestamp_max)
weight = daa.calculate_next_weight(parent_block, timestamp)
parent_block_metadata = parent_block.get_metadata()
height = parent_block_metadata.height + 1
parents = [parent_block.hash] + parent_txs.must_include
parents_any = parent_txs.can_include
# simplify representation when you only have one to choose from
if len(parents) + len(parents_any) == 3:
parents.extend(sorted(parents_any))
parents_any = []
assert len(parents) + len(parents_any) >= 3, 'There should be enough parents to choose from'
assert 1 <= len(parents) <= 3, 'Impossible number of parents'
if __debug__ and len(parents) == 3:
assert len(parents_any) == 0, 'Extra parents to choose from that cannot be chosen'
return BlockTemplate(
versions={TxVersion.REGULAR_BLOCK.value, TxVersion.MERGE_MINED_BLOCK.value},
reward=daa.get_tokens_issued_per_block(height),
weight=weight,
timestamp_now=current_timestamp,
timestamp_min=timestamp_min,
timestamp_max=timestamp_max,
parents=parents,
parents_any=parents_any,
height=height,
score=sum_weights(parent_block_metadata.score, weight),
)
def generate_mining_block(self, timestamp: Optional[int] = None,
parent_block_hash: Optional[bytes] = None,
data: bytes = b'', address: Optional[bytes] = None,
merge_mined: bool = False) -> Union[Block, MergeMinedBlock]:
""" Generates a block ready to be mined. The block includes new issued tokens,
parents, and the weight.
:return: A block ready to be mined
:rtype: :py:class:`hathor.transaction.Block`
"""
if address is None:
if self.wallet is None:
raise ValueError('No wallet available and no mining address given')
address = self.wallet.get_unused_address_bytes(mark_as_used=False)
assert address is not None
block = self.get_block_templates(parent_block_hash, timestamp).generate_mining_block(
merge_mined=merge_mined,
address=address or None, # XXX: because we allow b'' for explicit empty output script
data=data,
)
return block
def get_tokens_issued_per_block(self, height: int) -> int:
"""Return the number of tokens issued (aka reward) per block of a given height."""
return daa.get_tokens_issued_per_block(height)
def validate_new_tx(self, tx: BaseTransaction, skip_block_weight_verification: bool = False) -> bool:
""" Process incoming transaction during initialization.
These transactions came only from storage.
"""
assert tx.hash is not None
if self.state == self.NodeState.INITIALIZING:
if tx.is_genesis:
return True
else:
if tx.is_genesis:
raise InvalidNewTransaction('Genesis? {}'.format(tx.hash_hex))
now = self.reactor.seconds()
if tx.timestamp - now > settings.MAX_FUTURE_TIMESTAMP_ALLOWED:
raise InvalidNewTransaction('Ignoring transaction in the future {} (timestamp={}, now={})'.format(
tx.hash_hex, tx.timestamp, now))
if self.state != self.NodeState.INITIALIZING and not tx.can_validate_full():
raise InvalidNewTransaction('Cannot validate, missing dependency')
# validate transaction, raises a TxValidationError if tx is not valid
tx.validate_full()
return True
def submit_block(self, blk: Block, fails_silently: bool = True) -> bool:
"""Used by submit block from all mining APIs.
"""
tips = self.tx_storage.get_best_block_tips()
parent_hash = blk.get_block_parent_hash()
if parent_hash not in tips:
return False
return self.propagate_tx(blk, fails_silently=fails_silently)
def propagate_tx(self, tx: BaseTransaction, fails_silently: bool = True) -> bool:
"""Push a new transaction to the network. It is used by both the wallet and the mining modules.
:return: True if the transaction was accepted
:rtype: bool
"""
if tx.storage:
assert tx.storage == self.tx_storage, 'Invalid tx storage'
else:
tx.storage = self.tx_storage
return self.on_new_tx(tx, fails_silently=fails_silently)
@cpu.profiler('on_new_tx')
def on_new_tx(self, tx: BaseTransaction, *, conn: Optional[HathorProtocol] = None,
quiet: bool = False, fails_silently: bool = True, propagate_to_peers: bool = True,
skip_block_weight_verification: bool = False) -> bool:
"""This method is called when any transaction arrive.
If `fails_silently` is False, it may raise either InvalidNewTransaction or TxValidationError.
:return: True if the transaction was accepted
:rtype: bool
"""
assert tx.hash is not None
if self.state != self.NodeState.INITIALIZING:
if self.tx_storage.transaction_exists(tx.hash):
if not fails_silently:
raise InvalidNewTransaction('Transaction already exists {}'.format(tx.hash_hex))
self.log.debug('on_new_tx(): Transaction already exists', tx=tx.hash_hex)
return False
if self.state != self.NodeState.INITIALIZING or self._full_verification:
try:
assert self.validate_new_tx(tx, skip_block_weight_verification=skip_block_weight_verification) is True
except (InvalidNewTransaction, TxValidationError):
# Discard invalid Transaction/block.
self.log.debug('tx/block discarded', tx=tx, exc_info=True)
if not fails_silently:
raise
return False
if self.state != self.NodeState.INITIALIZING:
self.tx_storage.save_transaction(tx)
else:
self.tx_storage._add_to_cache(tx)
if self._full_verification:
tx.reset_metadata()
else:
# When doing a fast init, we don't update the consensus, so we must trust the data on the metadata
# For transactions, we don't store them on the tips index if they are voided
# We have to execute _add_to_cache before because _del_from_cache does not remove from all indexes
metadata = tx.get_metadata()
if not tx.is_block and metadata.voided_by:
self.tx_storage._del_from_cache(tx)
if self.state != self.NodeState.INITIALIZING or self._full_verification:
try:
tx.update_initial_metadata()
self.consensus_algorithm.update(tx)
except Exception:
self.log.exception('unexpected error when processing tx', tx=tx)
self.tx_storage.remove_transaction(tx)
raise
if not quiet:
ts_date = datetime.datetime.fromtimestamp(tx.timestamp)
now = datetime.datetime.fromtimestamp(self.reactor.seconds())
if tx.is_block:
self.log.info('new block', tx=tx, ts_date=ts_date, time_from_now=tx.get_time_from_now(now))
else:
self.log.info('new tx', tx=tx, ts_date=ts_date, time_from_now=tx.get_time_from_now(now))
if propagate_to_peers:
# Propagate to our peers.
self.connections.send_tx_to_peers(tx)
if self.wallet:
# TODO Remove it and use pubsub instead.
self.wallet.on_new_tx(tx)
# Publish to pubsub manager the new tx accepted
self.pubsub.publish(HathorEvents.NETWORK_NEW_TX_ACCEPTED, tx=tx)
return True
def listen(self, description: str, use_ssl: Optional[bool] = None) -> None:
endpoint = self.connections.listen(description, use_ssl)
if self.hostname:
proto, _, _ = description.partition(':')
address = '{}://{}:{}'.format(proto, self.hostname, endpoint._port)
self.my_peer.entrypoints.append(address)
def add_peer_to_whitelist(self, peer_id):
if not settings.ENABLE_PEER_WHITELIST:
return
if peer_id in self.peers_whitelist:
self.log.info('peer already in whitelist', peer_id=peer_id)
else:
self.peers_whitelist.append(peer_id)
def remove_peer_from_whitelist_and_disconnect(self, peer_id: str) -> None:
if not settings.ENABLE_PEER_WHITELIST:
return
if peer_id in self.peers_whitelist:
self.peers_whitelist.remove(peer_id)
# disconnect from node
self.connections.drop_connection_by_peer_id(peer_id)
class ParentTxs(NamedTuple):
""" Tuple where the `must_include` hash, when present (at most 1), must be included in a pair, and a list of hashes
where any of them can be included. This is done in order to make sure that when there is only one tx tip, it is
included.
"""
max_timestamp: int
can_include: List[bytes]
must_include: List[bytes]
def get_random_parents(self) -> Tuple[bytes, bytes]:
""" Get parents from self.parents plus a random choice from self.parents_any to make it 3 in total.
Using tuple as return type to make it explicit that the length is always 2.
"""
assert len(self.must_include) <= 1
fill = [x for _, x in sorted(random.sample(list(enumerate(self.can_include)), 2 - len(self.must_include)))]
p1, p2 = self.must_include[:] + fill
return p1, p2
def get_all_tips(self) -> List[bytes]:
"""All generated "tips", can_include + must_include."""
return self.must_include + self.can_include
|
[
"hathor.p2p.manager.ConnectionsManager",
"random.shuffle",
"cProfile.Profile",
"hathor.p2p.factory.HathorClientFactory",
"hathor.pubsub.PubSubManager",
"hathor.checkpoint.Checkpoint",
"hathor.daa.calculate_next_weight",
"hathor.p2p.factory.HathorServerFactory",
"hathor.stratum.StratumFactory",
"hathor.transaction.sum_weights",
"twisted.python.threadpool.ThreadPool",
"hathor.daa.get_tokens_issued_per_block",
"hathor.p2p.peer_id.PeerId",
"hathor.exception.InvalidNewTransaction",
"hathor.indexes.WalletIndex",
"hathor.consensus.ConsensusAlgorithm",
"datetime.datetime.fromtimestamp",
"hathor.indexes.TokensIndex",
"sys.exit",
"hathor.transaction.storage.memory_storage.TransactionMemoryStorage",
"random.choice",
"hathor.profiler.get_cpu_profiler",
"time.time",
"hathor.conf.HathorSettings",
"hathor.metrics.Metrics",
"twisted.internet.defer.DeferredList",
"structlog.get_logger"
] |
[((1781, 1797), 'hathor.conf.HathorSettings', 'HathorSettings', ([], {}), '()\n', (1795, 1797), False, 'from hathor.conf import HathorSettings\n'), ((1807, 1819), 'structlog.get_logger', 'get_logger', ([], {}), '()\n', (1817, 1819), False, 'from structlog import get_logger\n'), ((1826, 1844), 'hathor.profiler.get_cpu_profiler', 'get_cpu_profiler', ([], {}), '()\n', (1842, 1844), False, 'from hathor.profiler import get_cpu_profiler\n'), ((6043, 6183), 'hathor.metrics.Metrics', 'Metrics', ([], {'pubsub': 'self.pubsub', 'avg_time_between_blocks': 'settings.AVG_TIME_BETWEEN_BLOCKS', 'tx_storage': 'self.tx_storage', 'reactor': 'self.reactor'}), '(pubsub=self.pubsub, avg_time_between_blocks=settings.\n AVG_TIME_BETWEEN_BLOCKS, tx_storage=self.tx_storage, reactor=self.reactor)\n', (6050, 6183), False, 'from hathor.metrics import Metrics\n'), ((6274, 6294), 'hathor.consensus.ConsensusAlgorithm', 'ConsensusAlgorithm', ([], {}), '()\n', (6292, 6294), False, 'from hathor.consensus import ConsensusAlgorithm\n'), ((6406, 6477), 'hathor.p2p.factory.HathorServerFactory', 'HathorServerFactory', (['self.network', 'self.my_peer'], {'node': 'self', 'use_ssl': 'ssl'}), '(self.network, self.my_peer, node=self, use_ssl=ssl)\n', (6425, 6477), False, 'from hathor.p2p.factory import HathorClientFactory, HathorServerFactory\n'), ((6508, 6579), 'hathor.p2p.factory.HathorClientFactory', 'HathorClientFactory', (['self.network', 'self.my_peer'], {'node': 'self', 'use_ssl': 'ssl'}), '(self.network, self.my_peer, node=self, use_ssl=ssl)\n', (6527, 6579), False, 'from hathor.p2p.factory import HathorClientFactory, HathorServerFactory\n'), ((6607, 6724), 'hathor.p2p.manager.ConnectionsManager', 'ConnectionsManager', (['self.reactor', 'self.my_peer', 'self.server_factory', 'self.client_factory', 'self.pubsub', 'self', 'ssl'], {}), '(self.reactor, self.my_peer, self.server_factory, self.\n client_factory, self.pubsub, self, ssl)\n', (6625, 6724), False, 'from hathor.p2p.manager import ConnectionsManager\n'), ((7449, 7539), 'twisted.python.threadpool.ThreadPool', 'ThreadPool', ([], {'minthreads': '(0)', 'maxthreads': 'settings.MAX_POW_THREADS', 'name': '"""Pow thread pool"""'}), "(minthreads=0, maxthreads=settings.MAX_POW_THREADS, name=\n 'Pow thread pool')\n", (7459, 7539), False, 'from twisted.python.threadpool import ThreadPool\n'), ((11335, 11346), 'time.time', 'time.time', ([], {}), '()\n', (11344, 11346), False, 'import time\n'), ((12291, 12316), 'twisted.internet.defer.DeferredList', 'defer.DeferredList', (['waits'], {}), '(waits)\n', (12309, 12316), False, 'from twisted.internet import defer\n'), ((13638, 13649), 'time.time', 'time.time', ([], {}), '()\n', (13647, 13649), False, 'import time\n'), ((17436, 17455), 'random.shuffle', 'random.shuffle', (['ret'], {}), '(ret)\n', (17450, 17455), False, 'import random\n'), ((24839, 24889), 'hathor.daa.calculate_next_weight', 'daa.calculate_next_weight', (['parent_block', 'timestamp'], {}), '(parent_block, timestamp)\n', (24864, 24889), False, 'from hathor import daa\n'), ((27382, 27421), 'hathor.daa.get_tokens_issued_per_block', 'daa.get_tokens_issued_per_block', (['height'], {}), '(height)\n', (27413, 27421), False, 'from hathor import daa\n'), ((5005, 5013), 'hathor.p2p.peer_id.PeerId', 'PeerId', ([], {}), '()\n', (5011, 5013), False, 'from hathor.p2p.peer_id import PeerId\n'), ((5698, 5725), 'hathor.pubsub.PubSubManager', 'PubSubManager', (['self.reactor'], {}), '(self.reactor)\n', (5711, 5725), False, 'from hathor.pubsub import HathorEvents, PubSubManager\n'), ((5766, 5792), 'hathor.transaction.storage.memory_storage.TransactionMemoryStorage', 'TransactionMemoryStorage', ([], {}), '()\n', (5790, 5792), False, 'from hathor.transaction.storage.memory_storage import TransactionMemoryStorage\n'), ((5937, 5961), 'hathor.indexes.WalletIndex', 'WalletIndex', (['self.pubsub'], {}), '(self.pubsub)\n', (5948, 5961), False, 'from hathor.indexes import TokensIndex, WalletIndex\n'), ((6005, 6018), 'hathor.indexes.TokensIndex', 'TokensIndex', ([], {}), '()\n', (6016, 6018), False, 'from hathor.indexes import TokensIndex, WalletIndex\n'), ((7094, 7141), 'hathor.stratum.StratumFactory', 'StratumFactory', ([], {'manager': 'self', 'port': 'stratum_port'}), '(manager=self, port=stratum_port)\n', (7108, 7141), False, 'from hathor.stratum import StratumFactory\n'), ((12795, 12813), 'cProfile.Profile', 'cProfile.Profile', ([], {}), '()\n', (12811, 12813), False, 'import cProfile\n'), ((14398, 14408), 'sys.exit', 'sys.exit', ([], {}), '()\n', (14406, 14408), False, 'import sys\n'), ((14775, 14786), 'time.time', 'time.time', ([], {}), '()\n', (14784, 14786), False, 'import time\n'), ((28301, 28361), 'hathor.exception.InvalidNewTransaction', 'InvalidNewTransaction', (['"""Cannot validate, missing dependency"""'], {}), "('Cannot validate, missing dependency')\n", (28322, 28361), False, 'from hathor.exception import InvalidNewTransaction\n'), ((31986, 32031), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['tx.timestamp'], {}), '(tx.timestamp)\n', (32017, 32031), False, 'import datetime\n'), ((5427, 5469), 'hathor.checkpoint.Checkpoint', 'Checkpoint', (['(0)', 'settings.GENESIS_BLOCK_HASH'], {}), '(0, settings.GENESIS_BLOCK_HASH)\n', (5437, 5469), False, 'from hathor.checkpoint import Checkpoint\n'), ((9618, 9628), 'sys.exit', 'sys.exit', ([], {}), '()\n', (9626, 9628), False, 'import sys\n'), ((10388, 10398), 'sys.exit', 'sys.exit', ([], {}), '()\n', (10396, 10398), False, 'import sys\n'), ((15008, 15073), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['self.tx_storage.latest_timestamp'], {}), '(self.tx_storage.latest_timestamp)\n', (15039, 15073), False, 'import datetime\n'), ((17683, 17705), 'random.choice', 'random.choice', (['parents'], {}), '(parents)\n', (17696, 17705), False, 'import random\n'), ((25755, 25794), 'hathor.daa.get_tokens_issued_per_block', 'daa.get_tokens_issued_per_block', (['height'], {}), '(height)\n', (25786, 25794), False, 'from hathor import daa\n'), ((26061, 26109), 'hathor.transaction.sum_weights', 'sum_weights', (['parent_block_metadata.score', 'weight'], {}), '(parent_block_metadata.score, weight)\n', (26072, 26109), False, 'from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion, sum_weights\n'), ((16265, 16276), 'time.time', 'time.time', ([], {}), '()\n', (16274, 16276), False, 'import time\n'), ((16332, 16343), 'time.time', 'time.time', ([], {}), '()\n', (16341, 16343), False, 'import time\n')]
|
import torch
from ._unpooling import Unpooling, Unpooling1d, Unpooling2d
from ._dense import Dense, Dense1d, Dense2d
from typing import Union, List, Tuple
class Upsampling(torch.nn.Module):
"""
An upsampling layer is an 'UnpoolingNd' layer
followed by a 'DenseNd' layer.
"""
@classmethod
def from_dump(cls, dump: dict) -> object:
obj = cls.__new__(cls)
torch.nn.Module.__init__(obj)
obj.stacked_channels = dump["stacked channels"]
obj.unpooling = Unpooling.from_dump(dump["unpooling"])
obj.dense = Dense.from_dump(dump["dense"])
return obj
def __init__(self, in_features: int,
dense_layer: Union[List[dict], dict],
upsampling_factor: Union[int, Tuple[int, int]],
upsampling_method: str = "nearest",
stacked_channels: int = 0,
**kwargs):
"""
Parameters
----------
in_features : int
the number of channels of the input
dense_layer : dict, or list of dict
the parameters of all layers of the 'DenseNd'
upsampling_factor : int, or tuple of int
the upsampling factor
upsampling_method : one of {'nearest', 'interpolate'}
the method used to unpool
stacked_channels : int
The number of channels of the Xstack argument
of the 'forward' method
**kwargs
additional kwargs passed to DenseNd
"""
super().__init__()
unpooling = self.UnpoolingNd(factor=upsampling_factor,
method=upsampling_method)
dense = self.DenseNd(in_features+stacked_channels,
dense_layer, **kwargs)
self.unpooling = unpooling
self.stacked_channels = stacked_channels
self.dense = dense
def forward(self, X: torch.tensor,
Xstack: Union[torch.Tensor, None] = None) -> torch.Tensor:
"""
Upsample X then apply a dense layer.
Optionnaly concatenate Xstack to X after uopsampling,
and before the dense layer
Parameters:
-----------
X : torch.Tensor
the input of the model
Xstack : torch.Tensor or None
if a tensor is provided, the channels of Xstack are concatenated
to the channels of X after the upsampling layer.
This is usefull for UNet architectures
Returns:
-------
torch.Tensor :
result of the layer
"""
X = self.unpooling(X)
if Xstack is not None:
X = self.concat(Xstack, X)
X = self.dense(X)
return X
def shape_in(self, shape_out: list) -> list:
return self.dense.shape_in(self.pooling.shape_in(shape_out))
def shape_out(self, shape_in: list) -> list:
return self.dense.shape_out(self.pooling.shape_out(shape_in))
def in_features(self, out_features: int) -> int:
return self.dense.in_features(out_features) - self.stacked_channels
def out_features(self, in_features: int) -> int:
return self.dense.out_features(in_features+self.stacked_channels)
def concat(self, X1: torch.Tensor, X2: torch.Tensor) -> torch.Tensor:
"""
return [X1, X2] concatenated along the channel axis
if X2 is smaller than X1, it is padded with 0
"""
padding = [[0, l1 - l2] for l1, l2 in
zip(X1.shape[-1:1:-1], X2.shape[-1:1:-1])]
padding = sum(padding, [])
if any(p > 0 for p in padding):
X2 = torch.nn.functional.pad(X2, padding, value=0.)
return torch.cat([X1, X2], dim=1)
@property
def dump(self) -> dict:
return {"type": type(self).__name__,
"stacked channels": self.stacked_channels,
"unpooling": self.unpooling.dump,
"dense": self.dense.dump}
class Upsampling1d(Upsampling):
UnpoolingNd = Unpooling1d
DenseNd = Dense1d
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@property
def factor(self) -> int:
f = 1
for activated in self.dense:
f *= activated.weighting.stride
f *= self.pooling.pooling_window
return f
class Upsampling2d(Upsampling):
UnpoolingNd = Unpooling2d
DenseNd = Dense2d
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@property
def factor(self) -> Tuple[int, int]:
fh, fw = 1, 1
for activated in self.dense:
h, w = activated.weighting.stride
fh *= h
fw *= w
h, w = self.pooling.pooling_window
return [fh*h, fw*w]
|
[
"torch.nn.Module.__init__",
"torch.cat",
"torch.nn.functional.pad"
] |
[((396, 425), 'torch.nn.Module.__init__', 'torch.nn.Module.__init__', (['obj'], {}), '(obj)\n', (420, 425), False, 'import torch\n'), ((3702, 3728), 'torch.cat', 'torch.cat', (['[X1, X2]'], {'dim': '(1)'}), '([X1, X2], dim=1)\n', (3711, 3728), False, 'import torch\n'), ((3640, 3687), 'torch.nn.functional.pad', 'torch.nn.functional.pad', (['X2', 'padding'], {'value': '(0.0)'}), '(X2, padding, value=0.0)\n', (3663, 3687), False, 'import torch\n')]
|
#!/usr/bin/python3
import pytest
from brownie.convert import to_address
addr = "0x14b0Ed2a7C4cC60DD8F676AE44D0831d3c9b2a9E"
addr_encoded = b"\x14\xb0\xed*|L\xc6\r\xd8\xf6v\xaeD\xd0\x83\x1d<\x9b*\x9e"
def test_success():
assert to_address(addr) == addr
assert to_address(addr.lower()) == addr
assert to_address(addr.upper()) == addr
assert to_address(addr[2:]) == addr
def test_bytes_success():
assert to_address(addr_encoded) == addr
def test_wrong_length():
with pytest.raises(ValueError):
to_address("0x00")
with pytest.raises(ValueError):
to_address(addr[:20])
with pytest.raises(ValueError):
to_address(addr + "00")
|
[
"pytest.raises",
"brownie.convert.to_address"
] |
[((236, 252), 'brownie.convert.to_address', 'to_address', (['addr'], {}), '(addr)\n', (246, 252), False, 'from brownie.convert import to_address\n'), ((360, 380), 'brownie.convert.to_address', 'to_address', (['addr[2:]'], {}), '(addr[2:])\n', (370, 380), False, 'from brownie.convert import to_address\n'), ((428, 452), 'brownie.convert.to_address', 'to_address', (['addr_encoded'], {}), '(addr_encoded)\n', (438, 452), False, 'from brownie.convert import to_address\n'), ((497, 522), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (510, 522), False, 'import pytest\n'), ((532, 550), 'brownie.convert.to_address', 'to_address', (['"""0x00"""'], {}), "('0x00')\n", (542, 550), False, 'from brownie.convert import to_address\n'), ((560, 585), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (573, 585), False, 'import pytest\n'), ((595, 616), 'brownie.convert.to_address', 'to_address', (['addr[:20]'], {}), '(addr[:20])\n', (605, 616), False, 'from brownie.convert import to_address\n'), ((626, 651), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (639, 651), False, 'import pytest\n'), ((661, 684), 'brownie.convert.to_address', 'to_address', (["(addr + '00')"], {}), "(addr + '00')\n", (671, 684), False, 'from brownie.convert import to_address\n')]
|
"""make scheduler_params a separate JSONB field
Revision ID: f5f55452fa58
Revises: <PASSWORD>
Create Date: 2021-09-28 16:48:42.834962
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'f5f55452fa58'
down_revision = '<PASSWORD>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('kpi', sa.Column('scheduler_params', postgresql.JSONB(astext_type=sa.Text()), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('kpi', 'scheduler_params')
# ### end Alembic commands ###
|
[
"alembic.op.drop_column",
"sqlalchemy.Text"
] |
[((685, 726), 'alembic.op.drop_column', 'op.drop_column', (['"""kpi"""', '"""scheduler_params"""'], {}), "('kpi', 'scheduler_params')\n", (699, 726), False, 'from alembic import op\n'), ((533, 542), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (540, 542), True, 'import sqlalchemy as sa\n')]
|
#!/usr/bin/env python3
import logging
import asyncio
import platform
from loguru import logger
from bleak import BleakClient
from config import BLE_CHARACTERISTIC_UUID, BLE_ADDR
from push import ble_packet_event
async def ble_service(loop: asyncio.AbstractEventLoop,
tx: asyncio.Queue,
disconnected_event: asyncio.Event):
async def put_to_queue(data):
ble_packet_event.inc(1)
await tx.put(data)
def notification_handler(sender, data):
loop.create_task(put_to_queue(data))
logger.info("scanning client...")
client = BleakClient(BLE_ADDR, loop=loop)
logger.info("connecting to device {0} ...".format(BLE_ADDR))
await client.connect()
x = await client.is_connected()
logger.info("connected: {0}".format(x))
await client.start_notify(BLE_CHARACTERISTIC_UUID, notification_handler)
logger.info("notification registered")
def disconnect_callback(client):
loop.call_soon_threadsafe(disconnected_event.set)
client.set_disconnected_callback(disconnect_callback)
try:
await disconnected_event.wait()
except:
await client.stop_notify(BLE_CHARACTERISTIC_UUID, notification_handler)
logger.info("disconnected from device")
await tx.put(None)
await asyncio.sleep(0.5)
|
[
"loguru.logger.info",
"bleak.BleakClient",
"asyncio.sleep",
"push.ble_packet_event.inc"
] |
[((556, 589), 'loguru.logger.info', 'logger.info', (['"""scanning client..."""'], {}), "('scanning client...')\n", (567, 589), False, 'from loguru import logger\n'), ((604, 636), 'bleak.BleakClient', 'BleakClient', (['BLE_ADDR'], {'loop': 'loop'}), '(BLE_ADDR, loop=loop)\n', (615, 636), False, 'from bleak import BleakClient\n'), ((892, 930), 'loguru.logger.info', 'logger.info', (['"""notification registered"""'], {}), "('notification registered')\n", (903, 930), False, 'from loguru import logger\n'), ((1233, 1272), 'loguru.logger.info', 'logger.info', (['"""disconnected from device"""'], {}), "('disconnected from device')\n", (1244, 1272), False, 'from loguru import logger\n'), ((410, 433), 'push.ble_packet_event.inc', 'ble_packet_event.inc', (['(1)'], {}), '(1)\n', (430, 433), False, 'from push import ble_packet_event\n'), ((1308, 1326), 'asyncio.sleep', 'asyncio.sleep', (['(0.5)'], {}), '(0.5)\n', (1321, 1326), False, 'import asyncio\n')]
|
import ee
import numpy as np
import pandas as pd
import geopandas as gpd
from shapely.geometry import box
import rabpro
from rabpro.basin_stats import Dataset
# coords_file = gpd.read_file(r"tests/data/Big Blue River.geojson")
# total_bounds = coords_file.total_bounds
total_bounds = np.array([-85.91331249, 39.42609864, -85.88453019, 39.46429816])
gdf = gpd.GeoDataFrame({"idx": [1], "geometry": [box(*total_bounds)]}, crs="EPSG:4326")
def clean_res(feature):
res = pd.DataFrame(feature["properties"], index=[0])
res["id"] = feature["id"]
return res
def test_customreducer():
def asdf(feat):
return feat.getNumber("max")
data, task = rabpro.basin_stats.compute(
[Dataset("JRC/GSW1_3/YearlyHistory", "waterClass", stats=["max"])],
basins_gdf=gdf,
reducer_funcs=[asdf],
test=True,
)
res = pd.concat([clean_res(feature) for feature in data[0]["features"]])
assert all(res["asdf"] == res["max"])
def test_categorical_imgcol():
urls, task = rabpro.basin_stats.compute(
[Dataset("MODIS/006/MCD12Q1", "LC_Type1", stats=["freqhist"])], basins_gdf=gdf
)
res = rabpro.basin_stats.fetch_gee(urls, ["lulc"])
assert res.shape[1] > 1
def test_timeindexed_imgcol():
urls, tasks = rabpro.basin_stats.compute(
[Dataset("JRC/GSW1_3/YearlyHistory", "waterClass",)], basins_gdf=gdf
)
res = rabpro.basin_stats.fetch_gee(urls, ["waterclass"])
assert res["waterclass_mean"].iloc[0] > 0
assert res.shape[0] > 0
def test_timeindexedspecific_imgcol():
data, task = rabpro.basin_stats.compute(
[
Dataset(
"JRC/GSW1_3/YearlyHistory",
"waterClass",
start="2017-01-01",
end="2019-01-01",
)
],
basins_gdf=gdf,
test=True,
)
res = pd.concat([clean_res(feature) for feature in data[0]["features"]])
assert res.shape[0] == 2
def test_nontimeindexed_imgcol():
data, task = rabpro.basin_stats.compute(
[Dataset("JRC/GSW1_3/MonthlyRecurrence", "monthly_recurrence",)],
basins_gdf=gdf,
test=True,
)
res = pd.concat([clean_res(feature) for feature in data[0]["features"]])
assert res.shape[0] > 0
def test_img():
data, task = rabpro.basin_stats.compute(
[
Dataset(
"JRC/GSW1_3/GlobalSurfaceWater",
"occurrence",
stats=["min", "max", "range", "std", "sum", "pct50", "pct3"],
)
],
basins_gdf=gdf,
test=True,
)
res = pd.DataFrame(data[0]["features"][0]["properties"], index=[0])
assert float(res["mean"]) > 0
assert res.shape[1] == 9
|
[
"pandas.DataFrame",
"rabpro.basin_stats.Dataset",
"rabpro.basin_stats.fetch_gee",
"numpy.array",
"shapely.geometry.box"
] |
[((287, 351), 'numpy.array', 'np.array', (['[-85.91331249, 39.42609864, -85.88453019, 39.46429816]'], {}), '([-85.91331249, 39.42609864, -85.88453019, 39.46429816])\n', (295, 351), True, 'import numpy as np\n'), ((476, 522), 'pandas.DataFrame', 'pd.DataFrame', (["feature['properties']"], {'index': '[0]'}), "(feature['properties'], index=[0])\n", (488, 522), True, 'import pandas as pd\n'), ((1157, 1201), 'rabpro.basin_stats.fetch_gee', 'rabpro.basin_stats.fetch_gee', (['urls', "['lulc']"], {}), "(urls, ['lulc'])\n", (1185, 1201), False, 'import rabpro\n'), ((1405, 1455), 'rabpro.basin_stats.fetch_gee', 'rabpro.basin_stats.fetch_gee', (['urls', "['waterclass']"], {}), "(urls, ['waterclass'])\n", (1433, 1455), False, 'import rabpro\n'), ((2624, 2685), 'pandas.DataFrame', 'pd.DataFrame', (["data[0]['features'][0]['properties']"], {'index': '[0]'}), "(data[0]['features'][0]['properties'], index=[0])\n", (2636, 2685), True, 'import pandas as pd\n'), ((401, 419), 'shapely.geometry.box', 'box', (['*total_bounds'], {}), '(*total_bounds)\n', (404, 419), False, 'from shapely.geometry import box\n'), ((708, 772), 'rabpro.basin_stats.Dataset', 'Dataset', (['"""JRC/GSW1_3/YearlyHistory"""', '"""waterClass"""'], {'stats': "['max']"}), "('JRC/GSW1_3/YearlyHistory', 'waterClass', stats=['max'])\n", (715, 772), False, 'from rabpro.basin_stats import Dataset\n'), ((1063, 1123), 'rabpro.basin_stats.Dataset', 'Dataset', (['"""MODIS/006/MCD12Q1"""', '"""LC_Type1"""'], {'stats': "['freqhist']"}), "('MODIS/006/MCD12Q1', 'LC_Type1', stats=['freqhist'])\n", (1070, 1123), False, 'from rabpro.basin_stats import Dataset\n'), ((1320, 1369), 'rabpro.basin_stats.Dataset', 'Dataset', (['"""JRC/GSW1_3/YearlyHistory"""', '"""waterClass"""'], {}), "('JRC/GSW1_3/YearlyHistory', 'waterClass')\n", (1327, 1369), False, 'from rabpro.basin_stats import Dataset\n'), ((1640, 1732), 'rabpro.basin_stats.Dataset', 'Dataset', (['"""JRC/GSW1_3/YearlyHistory"""', '"""waterClass"""'], {'start': '"""2017-01-01"""', 'end': '"""2019-01-01"""'}), "('JRC/GSW1_3/YearlyHistory', 'waterClass', start='2017-01-01', end=\n '2019-01-01')\n", (1647, 1732), False, 'from rabpro.basin_stats import Dataset\n'), ((2066, 2127), 'rabpro.basin_stats.Dataset', 'Dataset', (['"""JRC/GSW1_3/MonthlyRecurrence"""', '"""monthly_recurrence"""'], {}), "('JRC/GSW1_3/MonthlyRecurrence', 'monthly_recurrence')\n", (2073, 2127), False, 'from rabpro.basin_stats import Dataset\n'), ((2373, 2493), 'rabpro.basin_stats.Dataset', 'Dataset', (['"""JRC/GSW1_3/GlobalSurfaceWater"""', '"""occurrence"""'], {'stats': "['min', 'max', 'range', 'std', 'sum', 'pct50', 'pct3']"}), "('JRC/GSW1_3/GlobalSurfaceWater', 'occurrence', stats=['min', 'max',\n 'range', 'std', 'sum', 'pct50', 'pct3'])\n", (2380, 2493), False, 'from rabpro.basin_stats import Dataset\n')]
|
# graph.py
# Graph Class
# By: <NAME>
class Graph:
"""This class is used to represent a graph that is comprised
of named vertices that are joined via edges of different
weights.
"""
def __init__(self, vertices, directed = False):
"""Initiates the Graph Class
pre: vertices is a list of vertex labels & directed
is a Boolean value indicating whether th graph
is directed or not.
post: creates a dictionary of dictionaries to
indicate the vertices and the edges
"""
self.edges = {v:{} for v in vertices}
self.directed = directed
def add_edge(self, vertex1, vertex2, weight = 1):
"""Adds vertex2 as an edge to vertex1 if graph is directed,
otherwise adds vertex1 to vertex2, and vice-versa.
pre: vertex1 & vertex2 are vertex labels that are
connected via an edge of the given weight
post: adds two vertices with the specified weight
using the dictionary representation
"""
if self.directed:
self.edges[vertex1][vertex2] = weight
# edge connects both in case of undirected
else:
self.edges[vertex1][vertex2] = weight
self.edges[vertex2][vertex1] = weight
def has_edge(self, vertex1, vertex2):
"""Returns a Boolean indicating vertex1 is adjacent to vertex2
"""
return vertex2 in self.edges[vertex1]
def is_directed(self):
"""Returns a boolean indicating whether the graph is directed or not
"""
return self.directed
def weight(self, vertex1, vertex2):
"""Returns the weight off edge from vertex1 to vertex2
"""
return self.edges[vertex1][vertex2]
def num_vertices(self):
"""Returns the number of distinct vertices are in the graph
"""
return len(self.edges)
def vertex_iter(self):
"""Returns an iterator for vertices
"""
return iter(self.edges)
def edge_iter(self):
"""Returns an iterator for edges
"""
for vertex1 in self.edges:
for vertex2 in self.edges[vertex1]:
yield (vertex1, vertex2)
def adjacent_iter(self, vertex):
"""Returns an iterator for the adjacent vertices to the
provided vertex
"""
return iter(self.edges[vertex])
def fromfile(filename):
"""Iterates through the provided file and returns a graph
from with the given vertices and egdes in the file
pre: filename should be a str, and it should contain directed/
undirected on the first line, distinct vertices on the second
line (separated by spaces), and edges on the rest of the lines
format: undirected
a b c d
a b
b c
c d
d a
post: returns a dictionary implementation of the graph
provided in the file
"""
openfile = open(filename, "r")
directed = openfile.readline()[:-1].lower() == "directed"
vertices = []
# create a list of distinct vertices from the file
for vertex in openfile.readline().split(" "):
if vertex[-1:] == "\n":
vertices.append(vertex[:-1])
else:
vertices.append(vertex)
graph = Graph(vertices, directed)
# add edges to the graph
for edge in openfile.read()[:-1].split("\n"):
try:
vertex1, vertex2 = edge.split(" ")
graph.add_edge(vertex1, vertex2)
except:
vertex1, vertex2, weight = edge.split(" ")
graph.add_edge(vertex1, vertex2, weight)
openfile.close()
return graph
def main():
"""Lets the user create a graph from a file, and look at
the vertices and edges
"""
print("This program allows the user to build a graph from a file")
key_pressed = str(input("Would you like to start? (Y/N) "))
# keeps running until the user says to stop
while key_pressed.lower()[0] == "y":
# ask the user for the filename
inFile = askopenfilename()
graph = fromfile(inFile)
print("Would you like to know the number of distinct vertices")
key_pressed = str(input("in your graph? (Y/N)"))
if key_pressed.lower()[0] == "y":
print(graph.num_vertices())
print()
print("Would you like to see the distinct vertices in the graph?")
key_pressed = str(input("(Y/N) "))
if key_pressed.lower()[0] == "y":
for vertex in graph.vertex_iter():
print(vertex)
print()
print("Would you like to see all the edges in your graph?")
key_pressed = str(input("(Y/N)"))
if key_pressed.lower()[0] == "y":
for edge in graph.edge_iter():
print(edge)
print("Thank you for using this program!\n")
key_pressed = str(input("Would you like to create a graph from another file? (Y/N) "))
if __name__ == "__main__":
from tkinter.filedialog import askopenfilename
main()
|
[
"tkinter.filedialog.askopenfilename"
] |
[((4242, 4259), 'tkinter.filedialog.askopenfilename', 'askopenfilename', ([], {}), '()\n', (4257, 4259), False, 'from tkinter.filedialog import askopenfilename\n')]
|
"""SSD1351 demo (fonts)."""
from time import sleep
from ssd1351 import Display, color565
from machine import Pin, SPI
from xglcd_font import XglcdFont
def test():
"""Test code."""
spi = SPI(2, baudrate=14500000, sck=Pin(18), mosi=Pin(23))
display = Display(spi, dc=Pin(17), cs=Pin(5), rst=Pin(16))
print("Loading fonts, please wait.")
arcadepix = XglcdFont('fonts/ArcadePix9x11.c', 9, 11)
bally = XglcdFont('fonts/Bally7x9.c', 7, 9)
broadway = XglcdFont('fonts/Broadway17x15.c', 17, 15)
espresso_dolce = XglcdFont('fonts/EspressoDolce18x24.c', 18, 24)
fixed_font = XglcdFont('fonts/FixedFont5x8.c', 5, 8)
neato = XglcdFont('fonts/Neato5x7.c', 5, 7, letter_count=223)
robotron = XglcdFont('fonts/Robotron7x11.c', 7, 11)
unispace = XglcdFont('fonts/Unispace12x24.c', 12, 24)
wendy = XglcdFont('fonts/Wendy7x8.c', 7, 8)
print("Fonts loaded.")
display.draw_text(0, 0, 'Arcade Pix 9x11', arcadepix, color565(255, 0, 0))
display.draw_text(0, 12, 'Bally 7x9', bally, color565(0, 255, 0))
display.draw_text(0, 23, 'Broadway', broadway, color565(0, 0, 255))
display.draw_text(0, 36, 'Espresso', espresso_dolce,
color565(0, 255, 255))
display.draw_text(0, 64, 'Fixed Font 5x8', fixed_font,
color565(255, 0, 255))
display.draw_text(0, 76, 'Neato 5x7', neato, color565(255, 255, 0))
display.draw_text(0, 85, 'Robotron 7x11', robotron,
color565(255, 255, 255))
display.draw_text(0, 96, 'Unispace', unispace, color565(255, 128, 0))
display.draw_text(0, 120, 'Wendy 7x8', wendy, color565(255, 0, 128))
sleep(9)
display.clear()
display.draw_text(0, 0, 'Arcade Pix 9x11', arcadepix,
color565(255, 0, 0),
landscape=True)
display.draw_text(12, 0, 'Bally 7x9', bally, color565(0, 255, 0),
landscape=True)
display.draw_text(23, 0, 'Broadway', broadway, color565(0, 0, 255),
landscape=True)
display.draw_text(36, 0, 'Espresso', espresso_dolce,
color565(0, 255, 255), landscape=True)
display.draw_text(64, 0, 'Fixed Font 5x8', fixed_font,
color565(255, 0, 255), landscape=True)
display.draw_text(76, 0, 'Neato 5x7', neato, color565(255, 255, 0),
landscape=True)
display.draw_text(85, 0, 'Robotron 7x11', robotron,
color565(255, 255, 255),
landscape=True)
display.draw_text(96, 0, 'Unispace', unispace, color565(255, 128, 0),
landscape=True)
display.draw_text(120, 0, 'Wendy 7x8', wendy, color565(255, 0, 128),
landscape=True)
sleep(9)
display.clear()
display.draw_text(0, 0, 'Arcade Pix 9x11', arcadepix, color565(255, 0, 0),
background=color565(0, 255, 255))
display.draw_text(0, 12, 'Bally 7x9', bally, color565(0, 255, 0),
background=color565(0, 0, 128))
display.draw_text(0, 23, 'Broadway', broadway, color565(0, 0, 255),
background=color565(255, 255, 0))
display.draw_text(0, 36, 'Espresso', espresso_dolce,
color565(0, 255, 255), background=color565(255, 0, 0))
display.draw_text(0, 64, 'Fixed Font 5x8', fixed_font,
color565(255, 0, 255), background=color565(0, 128, 0))
display.draw_text(0, 76, 'Neato 5x7', neato, color565(255, 255, 0),
background=color565(0, 0, 255))
display.draw_text(0, 85, 'Robotron 7x11', robotron,
color565(255, 255, 255),
background=color565(128, 128, 128))
display.draw_text(0, 96, 'Unispace', unispace, color565(255, 128, 0),
background=color565(0, 128, 255))
display.draw_text(0, 120, 'Wendy 7x8', wendy, color565(255, 0, 128),
background=color565(255, 255, 255))
sleep(9)
display.cleanup()
test()
|
[
"xglcd_font.XglcdFont",
"machine.Pin",
"ssd1351.color565",
"time.sleep"
] |
[((370, 411), 'xglcd_font.XglcdFont', 'XglcdFont', (['"""fonts/ArcadePix9x11.c"""', '(9)', '(11)'], {}), "('fonts/ArcadePix9x11.c', 9, 11)\n", (379, 411), False, 'from xglcd_font import XglcdFont\n'), ((424, 459), 'xglcd_font.XglcdFont', 'XglcdFont', (['"""fonts/Bally7x9.c"""', '(7)', '(9)'], {}), "('fonts/Bally7x9.c', 7, 9)\n", (433, 459), False, 'from xglcd_font import XglcdFont\n'), ((475, 517), 'xglcd_font.XglcdFont', 'XglcdFont', (['"""fonts/Broadway17x15.c"""', '(17)', '(15)'], {}), "('fonts/Broadway17x15.c', 17, 15)\n", (484, 517), False, 'from xglcd_font import XglcdFont\n'), ((539, 586), 'xglcd_font.XglcdFont', 'XglcdFont', (['"""fonts/EspressoDolce18x24.c"""', '(18)', '(24)'], {}), "('fonts/EspressoDolce18x24.c', 18, 24)\n", (548, 586), False, 'from xglcd_font import XglcdFont\n'), ((604, 643), 'xglcd_font.XglcdFont', 'XglcdFont', (['"""fonts/FixedFont5x8.c"""', '(5)', '(8)'], {}), "('fonts/FixedFont5x8.c', 5, 8)\n", (613, 643), False, 'from xglcd_font import XglcdFont\n'), ((656, 709), 'xglcd_font.XglcdFont', 'XglcdFont', (['"""fonts/Neato5x7.c"""', '(5)', '(7)'], {'letter_count': '(223)'}), "('fonts/Neato5x7.c', 5, 7, letter_count=223)\n", (665, 709), False, 'from xglcd_font import XglcdFont\n'), ((725, 765), 'xglcd_font.XglcdFont', 'XglcdFont', (['"""fonts/Robotron7x11.c"""', '(7)', '(11)'], {}), "('fonts/Robotron7x11.c', 7, 11)\n", (734, 765), False, 'from xglcd_font import XglcdFont\n'), ((781, 823), 'xglcd_font.XglcdFont', 'XglcdFont', (['"""fonts/Unispace12x24.c"""', '(12)', '(24)'], {}), "('fonts/Unispace12x24.c', 12, 24)\n", (790, 823), False, 'from xglcd_font import XglcdFont\n'), ((836, 871), 'xglcd_font.XglcdFont', 'XglcdFont', (['"""fonts/Wendy7x8.c"""', '(7)', '(8)'], {}), "('fonts/Wendy7x8.c', 7, 8)\n", (845, 871), False, 'from xglcd_font import XglcdFont\n'), ((1654, 1662), 'time.sleep', 'sleep', (['(9)'], {}), '(9)\n', (1659, 1662), False, 'from time import sleep\n'), ((2758, 2766), 'time.sleep', 'sleep', (['(9)'], {}), '(9)\n', (2763, 2766), False, 'from time import sleep\n'), ((3998, 4006), 'time.sleep', 'sleep', (['(9)'], {}), '(9)\n', (4003, 4006), False, 'from time import sleep\n'), ((958, 977), 'ssd1351.color565', 'color565', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (966, 977), False, 'from ssd1351 import Display, color565\n'), ((1028, 1047), 'ssd1351.color565', 'color565', (['(0)', '(255)', '(0)'], {}), '(0, 255, 0)\n', (1036, 1047), False, 'from ssd1351 import Display, color565\n'), ((1100, 1119), 'ssd1351.color565', 'color565', (['(0)', '(0)', '(255)'], {}), '(0, 0, 255)\n', (1108, 1119), False, 'from ssd1351 import Display, color565\n'), ((1200, 1221), 'ssd1351.color565', 'color565', (['(0)', '(255)', '(255)'], {}), '(0, 255, 255)\n', (1208, 1221), False, 'from ssd1351 import Display, color565\n'), ((1304, 1325), 'ssd1351.color565', 'color565', (['(255)', '(0)', '(255)'], {}), '(255, 0, 255)\n', (1312, 1325), False, 'from ssd1351 import Display, color565\n'), ((1376, 1397), 'ssd1351.color565', 'color565', (['(255)', '(255)', '(0)'], {}), '(255, 255, 0)\n', (1384, 1397), False, 'from ssd1351 import Display, color565\n'), ((1477, 1500), 'ssd1351.color565', 'color565', (['(255)', '(255)', '(255)'], {}), '(255, 255, 255)\n', (1485, 1500), False, 'from ssd1351 import Display, color565\n'), ((1553, 1574), 'ssd1351.color565', 'color565', (['(255)', '(128)', '(0)'], {}), '(255, 128, 0)\n', (1561, 1574), False, 'from ssd1351 import Display, color565\n'), ((1626, 1647), 'ssd1351.color565', 'color565', (['(255)', '(0)', '(128)'], {}), '(255, 0, 128)\n', (1634, 1647), False, 'from ssd1351 import Display, color565\n'), ((1764, 1783), 'ssd1351.color565', 'color565', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (1772, 1783), False, 'from ssd1351 import Display, color565\n'), ((1872, 1891), 'ssd1351.color565', 'color565', (['(0)', '(255)', '(0)'], {}), '(0, 255, 0)\n', (1880, 1891), False, 'from ssd1351 import Display, color565\n'), ((1982, 2001), 'ssd1351.color565', 'color565', (['(0)', '(0)', '(255)'], {}), '(0, 0, 255)\n', (1990, 2001), False, 'from ssd1351 import Display, color565\n'), ((2120, 2141), 'ssd1351.color565', 'color565', (['(0)', '(255)', '(255)'], {}), '(0, 255, 255)\n', (2128, 2141), False, 'from ssd1351 import Display, color565\n'), ((2240, 2261), 'ssd1351.color565', 'color565', (['(255)', '(0)', '(255)'], {}), '(255, 0, 255)\n', (2248, 2261), False, 'from ssd1351 import Display, color565\n'), ((2328, 2349), 'ssd1351.color565', 'color565', (['(255)', '(255)', '(0)'], {}), '(255, 255, 0)\n', (2336, 2349), False, 'from ssd1351 import Display, color565\n'), ((2467, 2490), 'ssd1351.color565', 'color565', (['(255)', '(255)', '(255)'], {}), '(255, 255, 255)\n', (2475, 2490), False, 'from ssd1351 import Display, color565\n'), ((2581, 2602), 'ssd1351.color565', 'color565', (['(255)', '(128)', '(0)'], {}), '(255, 128, 0)\n', (2589, 2602), False, 'from ssd1351 import Display, color565\n'), ((2692, 2713), 'ssd1351.color565', 'color565', (['(255)', '(0)', '(128)'], {}), '(255, 0, 128)\n', (2700, 2713), False, 'from ssd1351 import Display, color565\n'), ((2846, 2865), 'ssd1351.color565', 'color565', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (2854, 2865), False, 'from ssd1351 import Display, color565\n'), ((2972, 2991), 'ssd1351.color565', 'color565', (['(0)', '(255)', '(0)'], {}), '(0, 255, 0)\n', (2980, 2991), False, 'from ssd1351 import Display, color565\n'), ((3098, 3117), 'ssd1351.color565', 'color565', (['(0)', '(0)', '(255)'], {}), '(0, 0, 255)\n', (3106, 3117), False, 'from ssd1351 import Display, color565\n'), ((3254, 3275), 'ssd1351.color565', 'color565', (['(0)', '(255)', '(255)'], {}), '(0, 255, 255)\n', (3262, 3275), False, 'from ssd1351 import Display, color565\n'), ((3390, 3411), 'ssd1351.color565', 'color565', (['(255)', '(0)', '(255)'], {}), '(255, 0, 255)\n', (3398, 3411), False, 'from ssd1351 import Display, color565\n'), ((3494, 3515), 'ssd1351.color565', 'color565', (['(255)', '(255)', '(0)'], {}), '(255, 255, 0)\n', (3502, 3515), False, 'from ssd1351 import Display, color565\n'), ((3649, 3672), 'ssd1351.color565', 'color565', (['(255)', '(255)', '(255)'], {}), '(255, 255, 255)\n', (3657, 3672), False, 'from ssd1351 import Display, color565\n'), ((3783, 3804), 'ssd1351.color565', 'color565', (['(255)', '(128)', '(0)'], {}), '(255, 128, 0)\n', (3791, 3804), False, 'from ssd1351 import Display, color565\n'), ((3912, 3933), 'ssd1351.color565', 'color565', (['(255)', '(0)', '(128)'], {}), '(255, 0, 128)\n', (3920, 3933), False, 'from ssd1351 import Display, color565\n'), ((226, 233), 'machine.Pin', 'Pin', (['(18)'], {}), '(18)\n', (229, 233), False, 'from machine import Pin, SPI\n'), ((240, 247), 'machine.Pin', 'Pin', (['(23)'], {}), '(23)\n', (243, 247), False, 'from machine import Pin, SPI\n'), ((279, 286), 'machine.Pin', 'Pin', (['(17)'], {}), '(17)\n', (282, 286), False, 'from machine import Pin, SPI\n'), ((291, 297), 'machine.Pin', 'Pin', (['(5)'], {}), '(5)\n', (294, 297), False, 'from machine import Pin, SPI\n'), ((303, 310), 'machine.Pin', 'Pin', (['(16)'], {}), '(16)\n', (306, 310), False, 'from machine import Pin, SPI\n'), ((2900, 2921), 'ssd1351.color565', 'color565', (['(0)', '(255)', '(255)'], {}), '(0, 255, 255)\n', (2908, 2921), False, 'from ssd1351 import Display, color565\n'), ((3026, 3045), 'ssd1351.color565', 'color565', (['(0)', '(0)', '(128)'], {}), '(0, 0, 128)\n', (3034, 3045), False, 'from ssd1351 import Display, color565\n'), ((3152, 3173), 'ssd1351.color565', 'color565', (['(255)', '(255)', '(0)'], {}), '(255, 255, 0)\n', (3160, 3173), False, 'from ssd1351 import Display, color565\n'), ((3288, 3307), 'ssd1351.color565', 'color565', (['(255)', '(0)', '(0)'], {}), '(255, 0, 0)\n', (3296, 3307), False, 'from ssd1351 import Display, color565\n'), ((3424, 3443), 'ssd1351.color565', 'color565', (['(0)', '(128)', '(0)'], {}), '(0, 128, 0)\n', (3432, 3443), False, 'from ssd1351 import Display, color565\n'), ((3550, 3569), 'ssd1351.color565', 'color565', (['(0)', '(0)', '(255)'], {}), '(0, 0, 255)\n', (3558, 3569), False, 'from ssd1351 import Display, color565\n'), ((3707, 3730), 'ssd1351.color565', 'color565', (['(128)', '(128)', '(128)'], {}), '(128, 128, 128)\n', (3715, 3730), False, 'from ssd1351 import Display, color565\n'), ((3839, 3860), 'ssd1351.color565', 'color565', (['(0)', '(128)', '(255)'], {}), '(0, 128, 255)\n', (3847, 3860), False, 'from ssd1351 import Display, color565\n'), ((3968, 3991), 'ssd1351.color565', 'color565', (['(255)', '(255)', '(255)'], {}), '(255, 255, 255)\n', (3976, 3991), False, 'from ssd1351 import Display, color565\n')]
|
# -*- coding: utf-8 -*-
"""Loading Data.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1CXQRx9Jfj4tmXZmu_DFiUzpPJqF1sdf3
"""
import random
import numpy as np
import pandas as pd
from datasets import Dataset
"""#Loading Data"""
"""A module for preparing the training data for the baselines."""
import logging
from typing import List, Tuple
import pandas as pd
def retrieve_instances_from_dataset(
dataset: pd.DataFrame,
) -> Tuple[List[str], List[str]]:
"""Retrieve sentences with insertions from dataset.
:param dataset: dataframe with labeled data
:return: a tuple with
* a list of id strs
* a list of sentence strs
"""
# fill the empty values with empty strings
dataset = dataset.fillna("")
ids = []
instances = []
before_context = []
after_context = []
fillers = []
for _, row in dataset.iterrows():
for filler_index in range(1, 6):
ids.append(f"{row['Id']}_{filler_index}")
sent_with_filler = row["Sentence"].replace(
"______", "[MASK]"
).strip()
fillers.append(row[f"Filler{filler_index}"])
resolvePattern = row["Resolved pattern"].strip()
articleTitle = row["Article title"].strip()
sectionHeader = row["Section header"].strip()
text = row["Previous context"].strip() + " " + sent_with_filler + " " + row["Follow-up context"].strip()
instance = f"Resolved pattern: {resolvePattern}\nSection header: {sectionHeader}\nArticle title: {articleTitle}\nText: {text}"
instances.append(instance)
return ids , instances , fillers
def retrieve_labels_from_dataset_for_ranking(label_set: pd.DataFrame) -> List[float]:
"""Retrieve labels from dataset.
:param label_set: dataframe with plausibility gold scores
:return: list of rating floats
"""
# the labels are already in the right order for the training instances, so we can just put them in a list
return list(label_set["Label"])
def retrieve_labels_from_dataset_for_classification(
label_set: pd.DataFrame,
) -> List[int]:
"""Retrieve labels from dataset.
:param label_set: dataframe with class labels
:return: list of int class labels 0, 1 or 2 (IMPLAUSIBLE, NEUTRAL, PLAUSIBLE)
"""
# the labels are already in the right order for the training instances, so we can just put them in a list
label_strs = list(label_set["Label"])
label_ints = []
for label_str in label_strs:
if label_str == "IMPLAUSIBLE":
label_ints.append(0)
elif label_str == "NEUTRAL":
label_ints.append(1)
elif label_str == "PLAUSIBLE":
label_ints.append(2)
else:
raise ValueError(
f"Label {label_str} is not a valid plausibility class.")
return label_ints
def write_predictions_to_file(
path_to_predictions: str, ids: List[str], predictions: List, subtask: str
) -> pd.DataFrame:
"""Write the instance indices and predictions to a tsv file.
:param path_to_predictions: str path to file where to write the predictions
:param ids: list of str instance indices
:param predictions: list of predictions
:param subtask: str indicating "ranking" or "classification"
:return: pandas dataframe with ids and predictions
"""
if subtask == "classification":
predictions = convert_class_indices_to_labels(predictions)
dataframe = pd.DataFrame({"Id": ids, "Label": predictions})
logging.info(f"--> Writing predictions to {path_to_predictions}")
dataframe.to_csv(path_to_predictions, sep="\t", index=False, header=False)
return dataframe
def convert_class_indices_to_labels(class_indices: List[int]) -> List[str]:
"""Convert integer class indices to str labels.
:param class_indices: list of int class indices (0 to 2)
:return: list of label strs from set "IMPLAUSIBLE" / "NEUTRAL" / "PLAUSIBLE"
"""
labels = ["IMPLAUSIBLE", "NEUTRAL", "PLAUSIBLE"]
return [labels[class_index] for class_index in class_indices]
|
[
"pandas.DataFrame",
"logging.info"
] |
[((3582, 3629), 'pandas.DataFrame', 'pd.DataFrame', (["{'Id': ids, 'Label': predictions}"], {}), "({'Id': ids, 'Label': predictions})\n", (3594, 3629), True, 'import pandas as pd\n'), ((3634, 3699), 'logging.info', 'logging.info', (['f"""--> Writing predictions to {path_to_predictions}"""'], {}), "(f'--> Writing predictions to {path_to_predictions}')\n", (3646, 3699), False, 'import logging\n')]
|
from beamline.web.Beamline import Beamline
from beamline.miners.DiscoveryMiner import DiscoveryMiner
Beamline.miners.append(DiscoveryMiner())
|
[
"beamline.miners.DiscoveryMiner.DiscoveryMiner"
] |
[((125, 141), 'beamline.miners.DiscoveryMiner.DiscoveryMiner', 'DiscoveryMiner', ([], {}), '()\n', (139, 141), False, 'from beamline.miners.DiscoveryMiner import DiscoveryMiner\n')]
|
import argparse
from os import get_terminal_size
from sys import stderr
from .cli_base import CliBaseClass
class StdoutFormat:
BOLD = "\033[1m"
ENDC = "\033[0m"
GREEN = "\033[92m"
class DeviceCLI(CliBaseClass):
parser_help = "Get information about device and attached pi-top hardware"
cli_name = "devices"
def __init__(self, args) -> None:
self.args = args
def run(self) -> int:
def print_header(section_name):
print(
f"{StdoutFormat.BOLD}{section_name}{StdoutFormat.ENDC} {'='*(get_terminal_size().columns - len(section_name) - 2)}"
)
def print_peripheral_line(data):
if data.get("connected") is False and self.args.quiet:
return
if self.args.devices_subcommand is None or not self.args.quiet:
print(
f"[ {StdoutFormat.GREEN}{'✓' if data.get('connected') else ' '}{StdoutFormat.ENDC} ]",
end=" ",
)
print(f"{data.get('name')}", end=" ")
if not self.args.name_only and data.get("fw_version"):
print(f"(v{data.get('fw_version')})", end="")
print("")
def print_hub_line(data):
print(f"{data.get('name')}", end="")
if not self.args.name_only and data.get("fw_version"):
print(f" (v{data.get('fw_version')})", end="")
print("")
# Get host device from pi-topd
try:
from pitop.system import device_info
device = device_info()
if self.args.devices_subcommand in ("hub", None):
if self.args.devices_subcommand is None:
print_header("HUB")
print_hub_line(device)
except Exception as e:
print(
f"Error on pitop-devices.run: Unable to get device type from pi-topd: {e}",
file=stderr,
)
return 1
if self.args.devices_subcommand in ("peripherals", None):
if self.args.devices_subcommand is None:
print_header("PERIPHERALS")
try:
# Get list of all pi-top peripherals
from pitop.system import pitop_peripherals
for periph in pitop_peripherals():
print_peripheral_line(periph)
except Exception as e:
print(
f"Error on pitop-devices.run: Unable to get connected peripherals from pi-topd: {e}",
file=stderr,
)
return 0
@classmethod
def add_parser_arguments(cls, parser) -> None:
def add_common_arguments(parser):
parser.add_argument(
"--quiet",
"-q",
help="Display only the connected devices",
action="store_true",
)
parser.add_argument(
"--name-only",
"-n",
help="Display only the name of the devices, without further information",
action="store_true",
)
# to use arguments with "devices" directly
add_common_arguments(parser)
# manage arguments common to subparser options (hub & peripherals)
parent_parser = argparse.ArgumentParser(add_help=False)
add_common_arguments(parent_parser)
subparser = parser.add_subparsers(
title="pi-top devices utility",
description="Get information about pi-top attached devices",
dest="devices_subcommand",
)
# "pitop devices hub" subcommand
subparser.add_parser(
"hub",
help="Get the name of the active pi-top device",
parents=[parent_parser],
)
# "pitop devices peripherals" subcommand
subparser.add_parser(
"peripherals",
help="Get information about attached pi-top peripherals",
parents=[parent_parser],
)
def main():
from .deprecated_cli_runner import run
run(DeviceCLI)
def host():
from .deprecated_cli_runner import run_with_args
args = {"devices_subcommand": "hub", "name_only": True}
run_with_args(
DeviceCLI,
old_command="pt-host",
new_command="pi-top devices hub",
args_dict=args,
)
if __name__ == "__main__":
main()
|
[
"os.get_terminal_size",
"pitop.system.pitop_peripherals",
"pitop.system.device_info",
"argparse.ArgumentParser"
] |
[((3339, 3378), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'add_help': '(False)'}), '(add_help=False)\n', (3362, 3378), False, 'import argparse\n'), ((1574, 1587), 'pitop.system.device_info', 'device_info', ([], {}), '()\n', (1585, 1587), False, 'from pitop.system import device_info\n'), ((2317, 2336), 'pitop.system.pitop_peripherals', 'pitop_peripherals', ([], {}), '()\n', (2334, 2336), False, 'from pitop.system import pitop_peripherals\n'), ((558, 577), 'os.get_terminal_size', 'get_terminal_size', ([], {}), '()\n', (575, 577), False, 'from os import get_terminal_size\n')]
|
import pcon
def test_solid():
a = pcon.Counter(5)
a.deserialize("python_counter.pcon")
b = pcon.Solid.from_counter(a, 20)
assert b.get(108) == False
b.serialize("python_solid.pcon")
c = pcon.Solid.deserialize("python_solid.pcon")
assert c.get(108) == False
c.set(108, True)
assert c.get(108) == True
|
[
"pcon.Solid.from_counter",
"pcon.Counter",
"pcon.Solid.deserialize"
] |
[((39, 54), 'pcon.Counter', 'pcon.Counter', (['(5)'], {}), '(5)\n', (51, 54), False, 'import pcon\n'), ((105, 135), 'pcon.Solid.from_counter', 'pcon.Solid.from_counter', (['a', '(20)'], {}), '(a, 20)\n', (128, 135), False, 'import pcon\n'), ((215, 258), 'pcon.Solid.deserialize', 'pcon.Solid.deserialize', (['"""python_solid.pcon"""'], {}), "('python_solid.pcon')\n", (237, 258), False, 'import pcon\n')]
|
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
import math
from hypothesis import given, settings, strategies as st
from generators_2d.generators import generate_2d_line
@given(
st.integers(min_value=-10 ** 5, max_value=10 ** 5),
st.integers(min_value=-10 ** 5, max_value=10 ** 5),
st.integers(min_value=100, max_value=10 ** 5),
st.integers(min_value=-100, max_value=100),
)
def test_lines_towards_east(x0, y0, east, north):
x1, y1 = x0 + east, y0 + north
correct_result = []
m = (y1 - y0) / abs(x1 - x0)
y_new = y0
for x in range(x0, x1 + 1):
correct_result.append((x, math.floor(y_new)))
y_new += m
function_result = list(generate_2d_line(x0, y0, x1, y1))
assert correct_result == function_result
@given(
st.integers(min_value=-10 ** 5, max_value=10 ** 5),
st.integers(min_value=-10 ** 5, max_value=10 ** 5),
st.integers(min_value=-10 ** 5, max_value=-100),
st.integers(min_value=-100, max_value=100),
)
def test_lines_towards_west(x0, y0, east, north):
x1, y1 = x0 + east, y0 + north
correct_result = []
m = (y1 - y0) / abs(x1 - x0)
y_new = y0
for x in range(x0, x1 - 1, -1):
correct_result.append((x, math.floor(y_new)))
y_new += m
function_result = list(generate_2d_line(x0, y0, x1, y1))
assert correct_result == function_result
@given(
st.integers(min_value=-10 ** 5, max_value=10 ** 5),
st.integers(min_value=-10 ** 5, max_value=10 ** 5),
st.integers(min_value=-100, max_value=-100),
st.integers(min_value=101, max_value=10 ** 5),
)
def test_lines_towards_north(x0, y0, east, north):
x1, y1 = x0 + east, y0 + north
correct_result = []
m = (x1 - x0) / abs(y1 - y0)
x_new = x0
for y in range(y0, y1 + 1):
correct_result.append((math.floor(x_new), y))
x_new += m
function_result = list(generate_2d_line(x0, y0, x1, y1))
assert correct_result == function_result
@given(
st.integers(min_value=-10 ** 5, max_value=10 ** 5),
st.integers(min_value=-10 ** 5, max_value=10 ** 5),
st.integers(min_value=-100, max_value=-100),
st.integers(min_value=-10 ** 5, max_value=-101),
)
def test_lines_towards_south(x0, y0, east, north):
x1, y1 = x0 + east, y0 + north
correct_result = []
m = (x1 - x0) / abs(y1 - y0)
x_new = x0
for y in range(y0, y1 - 1, -1):
correct_result.append((math.floor(x_new), y))
x_new += m
function_result = list(generate_2d_line(x0, y0, x1, y1))
assert correct_result == function_result
def test_simple_line_examples():
# No line, start and end point are identical, dont divide by zero
a = list(generate_2d_line(0, 0, 0, 0))
assert a == [(0, 0)]
# 2 examples to check vertical and horizontal
a = list(generate_2d_line(0, 0, 1, 0))
assert a == [(0, 0), (1, 0)]
a = list(generate_2d_line(0, 0, 0, 1))
assert a == [(0, 0), (0, 1)]
# 3 examples to check diagonal
a = list(generate_2d_line(0, 0, 1, 1))
assert a == [(0, 0), (1, 1)]
a = list(generate_2d_line(-1, -1, 1, 1))
assert a == [(-1, -1), (0, 0), (1, 1)]
a = list(generate_2d_line(-1, 1, 1, -1))
assert a == [(-1, 1), (0, 0), (1, -1)]
# Point2 is mostly to the right of point1
a = list(generate_2d_line(0, 0, 4, 2))
b = [(0, 0), (1, 0), (2, 1), (3, 1), (4, 2)]
assert a == b, f"{a}\n{b}"
# Point2 is mostly to the left of point1
a = list(generate_2d_line(4, 2, 0, 0))
b = [(4, 2), (3, 1), (2, 1), (1, 0), (0, 0)]
assert a == b, f"{a}\n{b}"
# Point2 is mostly to the top of point1
a = list(generate_2d_line(0, 0, 2, 4))
b = [(0, 0), (0, 1), (1, 2), (1, 3), (2, 4)]
assert a == b, f"{a}\n{b}"
# Point2 is mostly to the bottom of point1
a = list(generate_2d_line(2, 4, 0, 0))
b = [(2, 4), (1, 3), (1, 2), (0, 1), (0, 0)]
assert a == b, f"{a}\n{b}"
|
[
"hypothesis.strategies.integers",
"os.path.dirname",
"math.floor",
"generators_2d.generators.generate_2d_line"
] |
[((219, 269), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': '(-10 ** 5)', 'max_value': '(10 ** 5)'}), '(min_value=-10 ** 5, max_value=10 ** 5)\n', (230, 269), True, 'from hypothesis import given, settings, strategies as st\n'), ((275, 325), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': '(-10 ** 5)', 'max_value': '(10 ** 5)'}), '(min_value=-10 ** 5, max_value=10 ** 5)\n', (286, 325), True, 'from hypothesis import given, settings, strategies as st\n'), ((331, 376), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': '(100)', 'max_value': '(10 ** 5)'}), '(min_value=100, max_value=10 ** 5)\n', (342, 376), True, 'from hypothesis import given, settings, strategies as st\n'), ((382, 424), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': '(-100)', 'max_value': '(100)'}), '(min_value=-100, max_value=100)\n', (393, 424), True, 'from hypothesis import given, settings, strategies as st\n'), ((812, 862), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': '(-10 ** 5)', 'max_value': '(10 ** 5)'}), '(min_value=-10 ** 5, max_value=10 ** 5)\n', (823, 862), True, 'from hypothesis import given, settings, strategies as st\n'), ((868, 918), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': '(-10 ** 5)', 'max_value': '(10 ** 5)'}), '(min_value=-10 ** 5, max_value=10 ** 5)\n', (879, 918), True, 'from hypothesis import given, settings, strategies as st\n'), ((924, 971), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': '(-10 ** 5)', 'max_value': '(-100)'}), '(min_value=-10 ** 5, max_value=-100)\n', (935, 971), True, 'from hypothesis import given, settings, strategies as st\n'), ((977, 1019), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': '(-100)', 'max_value': '(100)'}), '(min_value=-100, max_value=100)\n', (988, 1019), True, 'from hypothesis import given, settings, strategies as st\n'), ((1411, 1461), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': '(-10 ** 5)', 'max_value': '(10 ** 5)'}), '(min_value=-10 ** 5, max_value=10 ** 5)\n', (1422, 1461), True, 'from hypothesis import given, settings, strategies as st\n'), ((1467, 1517), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': '(-10 ** 5)', 'max_value': '(10 ** 5)'}), '(min_value=-10 ** 5, max_value=10 ** 5)\n', (1478, 1517), True, 'from hypothesis import given, settings, strategies as st\n'), ((1523, 1566), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': '(-100)', 'max_value': '(-100)'}), '(min_value=-100, max_value=-100)\n', (1534, 1566), True, 'from hypothesis import given, settings, strategies as st\n'), ((1572, 1617), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': '(101)', 'max_value': '(10 ** 5)'}), '(min_value=101, max_value=10 ** 5)\n', (1583, 1617), True, 'from hypothesis import given, settings, strategies as st\n'), ((2006, 2056), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': '(-10 ** 5)', 'max_value': '(10 ** 5)'}), '(min_value=-10 ** 5, max_value=10 ** 5)\n', (2017, 2056), True, 'from hypothesis import given, settings, strategies as st\n'), ((2062, 2112), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': '(-10 ** 5)', 'max_value': '(10 ** 5)'}), '(min_value=-10 ** 5, max_value=10 ** 5)\n', (2073, 2112), True, 'from hypothesis import given, settings, strategies as st\n'), ((2118, 2161), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': '(-100)', 'max_value': '(-100)'}), '(min_value=-100, max_value=-100)\n', (2129, 2161), True, 'from hypothesis import given, settings, strategies as st\n'), ((2167, 2214), 'hypothesis.strategies.integers', 'st.integers', ([], {'min_value': '(-10 ** 5)', 'max_value': '(-101)'}), '(min_value=-10 ** 5, max_value=-101)\n', (2178, 2214), True, 'from hypothesis import given, settings, strategies as st\n'), ((45, 70), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (60, 70), False, 'import sys, os\n'), ((718, 750), 'generators_2d.generators.generate_2d_line', 'generate_2d_line', (['x0', 'y0', 'x1', 'y1'], {}), '(x0, y0, x1, y1)\n', (734, 750), False, 'from generators_2d.generators import generate_2d_line\n'), ((1317, 1349), 'generators_2d.generators.generate_2d_line', 'generate_2d_line', (['x0', 'y0', 'x1', 'y1'], {}), '(x0, y0, x1, y1)\n', (1333, 1349), False, 'from generators_2d.generators import generate_2d_line\n'), ((1912, 1944), 'generators_2d.generators.generate_2d_line', 'generate_2d_line', (['x0', 'y0', 'x1', 'y1'], {}), '(x0, y0, x1, y1)\n', (1928, 1944), False, 'from generators_2d.generators import generate_2d_line\n'), ((2513, 2545), 'generators_2d.generators.generate_2d_line', 'generate_2d_line', (['x0', 'y0', 'x1', 'y1'], {}), '(x0, y0, x1, y1)\n', (2529, 2545), False, 'from generators_2d.generators import generate_2d_line\n'), ((2711, 2739), 'generators_2d.generators.generate_2d_line', 'generate_2d_line', (['(0)', '(0)', '(0)', '(0)'], {}), '(0, 0, 0, 0)\n', (2727, 2739), False, 'from generators_2d.generators import generate_2d_line\n'), ((2830, 2858), 'generators_2d.generators.generate_2d_line', 'generate_2d_line', (['(0)', '(0)', '(1)', '(0)'], {}), '(0, 0, 1, 0)\n', (2846, 2858), False, 'from generators_2d.generators import generate_2d_line\n'), ((2907, 2935), 'generators_2d.generators.generate_2d_line', 'generate_2d_line', (['(0)', '(0)', '(0)', '(1)'], {}), '(0, 0, 0, 1)\n', (2923, 2935), False, 'from generators_2d.generators import generate_2d_line\n'), ((3019, 3047), 'generators_2d.generators.generate_2d_line', 'generate_2d_line', (['(0)', '(0)', '(1)', '(1)'], {}), '(0, 0, 1, 1)\n', (3035, 3047), False, 'from generators_2d.generators import generate_2d_line\n'), ((3096, 3126), 'generators_2d.generators.generate_2d_line', 'generate_2d_line', (['(-1)', '(-1)', '(1)', '(1)'], {}), '(-1, -1, 1, 1)\n', (3112, 3126), False, 'from generators_2d.generators import generate_2d_line\n'), ((3185, 3215), 'generators_2d.generators.generate_2d_line', 'generate_2d_line', (['(-1)', '(1)', '(1)', '(-1)'], {}), '(-1, 1, 1, -1)\n', (3201, 3215), False, 'from generators_2d.generators import generate_2d_line\n'), ((3320, 3348), 'generators_2d.generators.generate_2d_line', 'generate_2d_line', (['(0)', '(0)', '(4)', '(2)'], {}), '(0, 0, 4, 2)\n', (3336, 3348), False, 'from generators_2d.generators import generate_2d_line\n'), ((3489, 3517), 'generators_2d.generators.generate_2d_line', 'generate_2d_line', (['(4)', '(2)', '(0)', '(0)'], {}), '(4, 2, 0, 0)\n', (3505, 3517), False, 'from generators_2d.generators import generate_2d_line\n'), ((3657, 3685), 'generators_2d.generators.generate_2d_line', 'generate_2d_line', (['(0)', '(0)', '(2)', '(4)'], {}), '(0, 0, 2, 4)\n', (3673, 3685), False, 'from generators_2d.generators import generate_2d_line\n'), ((3828, 3856), 'generators_2d.generators.generate_2d_line', 'generate_2d_line', (['(2)', '(4)', '(0)', '(0)'], {}), '(2, 4, 0, 0)\n', (3844, 3856), False, 'from generators_2d.generators import generate_2d_line\n'), ((651, 668), 'math.floor', 'math.floor', (['y_new'], {}), '(y_new)\n', (661, 668), False, 'import math\n'), ((1250, 1267), 'math.floor', 'math.floor', (['y_new'], {}), '(y_new)\n', (1260, 1267), False, 'import math\n'), ((1842, 1859), 'math.floor', 'math.floor', (['x_new'], {}), '(x_new)\n', (1852, 1859), False, 'import math\n'), ((2443, 2460), 'math.floor', 'math.floor', (['x_new'], {}), '(x_new)\n', (2453, 2460), False, 'import math\n')]
|
# -*- coding: utf-8 -*-
# Copyright (C) 2018, <NAME>. All rights reserved.
#
# You should have received a copy of the MIT License along with this program.
# If not, see https://opensource.org/licenses/MIT.
#
# 2018-07-11 CNHume Added Command and FileManager classes
# 2018-07-09 CNHume Created File for Eric Peterson of Rigetti Computing
#
# References
# ----------
# Hangman (game) from Wikipedia
# See https://en.wikipedia.org/wiki/Hangman_(game)
#
# 49 unbeatable words for the game 'hangman' from <NAME>
# See https://www.prdaily.com/Main/Articles/20880.aspx
#
import random
import sys
import traceback
from Command import Command
from Player import Player
from FileManager import FileManager
def main():
# Command Line Defaults:
SETUP_PATH = u''
ART_FILE = u'art' # Hangman ASCII Art
WORD_FILE = u'words' # Word File (Hangman Dictionary)
FILE_EXT = u'txt' # Word File Extension
TRIALS = 6 # Head, Body, 2 Arms, 2 Legs
try:
command = Command(WORD_FILE, ART_FILE, FILE_EXT, TRIALS)
if command.Parse(sys.argv):
verbose = command.verbose
artManager = FileManager(SETUP_PATH, command.file_ext, verbose)
artManager.load(command.art_file)
figures = artManager.paragraphs()
wordManager = FileManager(SETUP_PATH, command.file_ext, verbose)
wordManager.load(command.word_file)
if wordManager.length > 0:
choice = random.randrange(0, wordManager.length)
word = wordManager.records[choice]
player = Player(word, figures)
result = player.play(command.trials)
message = u'You win!' if result else u"You're hung."
print(message)
else:
print(u'There are no words.')
except Exception as ex:
#type_name = type(ex).__name__
trace = traceback.format_exc()
print(trace)
#[Debug]
raw_input(u'Press Enter')
if __name__ == '__main__':
main()
pass
|
[
"Player.Player",
"FileManager.FileManager",
"random.randrange",
"traceback.format_exc",
"Command.Command"
] |
[((1044, 1090), 'Command.Command', 'Command', (['WORD_FILE', 'ART_FILE', 'FILE_EXT', 'TRIALS'], {}), '(WORD_FILE, ART_FILE, FILE_EXT, TRIALS)\n', (1051, 1090), False, 'from Command import Command\n'), ((1174, 1224), 'FileManager.FileManager', 'FileManager', (['SETUP_PATH', 'command.file_ext', 'verbose'], {}), '(SETUP_PATH, command.file_ext, verbose)\n', (1185, 1224), False, 'from FileManager import FileManager\n'), ((1326, 1376), 'FileManager.FileManager', 'FileManager', (['SETUP_PATH', 'command.file_ext', 'verbose'], {}), '(SETUP_PATH, command.file_ext, verbose)\n', (1337, 1376), False, 'from FileManager import FileManager\n'), ((1848, 1870), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1868, 1870), False, 'import traceback\n'), ((1472, 1511), 'random.randrange', 'random.randrange', (['(0)', 'wordManager.length'], {}), '(0, wordManager.length)\n', (1488, 1511), False, 'import random\n'), ((1573, 1594), 'Player.Player', 'Player', (['word', 'figures'], {}), '(word, figures)\n', (1579, 1594), False, 'from Player import Player\n')]
|
import math
from ezdxf.math.vector import Vector
def test_init_no_params():
v = Vector()
assert v == (0, 0, 0)
assert v == Vector()
def test_init_one_param():
v = Vector((2, 3))
assert v == (2, 3) # z is 0.
v = Vector((2, 3, 4))
assert v == (2, 3, 4)
def test_init_two_params():
v = Vector(1, 2)
assert v == (1, 2) # z is 0.
v = Vector(5, 6, 7) - Vector(1, 1, 1)
assert v == (4, 5, 6)
v = Vector.from_deg_angle(0)
assert v == (1, 0)
length, angle = 7, 45
v = Vector.from_deg_angle(angle, length)
x = math.cos(math.radians(angle)) * length
y = math.sin(math.radians(angle)) * length
assert v == (x, y)
def test_init_three_params():
v = Vector(1, 2, 3)
assert v == (1, 2, 3)
def test_from_angle():
angle = math.radians(50)
length = 3.
assert Vector.from_angle(angle, length) == (math.cos(angle) * length, math.sin(angle) * length, 0)
def test_vector_as_tuple():
v = Vector(1, 2, 3)
assert v[0] == 1
assert v[1] == 2
assert v[2] == 3
assert tuple(v) == (1, 2, 3)
assert isinstance(v[:2], tuple)
assert v[:2] == (1, 2)
assert v[1:] == (2, 3)
assert isinstance(v.xyz, tuple)
assert v.xyz == (1, 2, 3)
def test_vec2():
v = Vector(1, 2, 3)
assert len(v) == 3
v2 = v.vec2
assert len(v2) == 2
assert v2 == (1, 2)
def test_round():
v = Vector(1.123, 2.123, 3.123)
v2 = v.round(1)
assert v2 == (1.1, 2.1, 3.1)
def test_iter():
assert sum(Vector(1, 2, 3)) == 6
def test_deep_copy():
import copy
v = Vector(1, 2, 3)
l1 = [v, v, v]
l2 = copy.copy(l1)
assert l2[0] is l2[1]
assert l2[1] is l2[2]
assert l2[0] is v
l3 = copy.deepcopy(l1)
assert l3[0] is l3[1]
assert l3[1] is l3[2]
assert l3[0] is not v
def test_get_angle():
v = Vector(3, 3)
assert math.isclose(v.angle_deg, 45)
assert math.isclose(v.angle, math.radians(45))
def test_spatial_angle():
v = Vector(3, 3, 0)
assert math.isclose(v.spatial_angle_deg, 45)
assert math.isclose(v.spatial_angle, math.radians(45))
def test_compare_vectors():
v1 = Vector(1, 2, 3)
assert v1 == (1, 2, 3)
assert (1, 2, 3) == v1
v2 = Vector(2, 3, 4)
assert v2 > v1
assert v1 < v2
def test_xy():
assert Vector(1, 2, 3).xy == Vector(1, 2)
def test_is_null():
v = Vector()
assert v.is_null
v1 = Vector(23.56678, 56678.56778, 2.56677) * (1.0 / 14.5667)
v2 = Vector(23.56678, 56678.56778, 2.56677) / 14.5667
assert (v2 - v1).is_null
assert Vector(0, 0, 0).is_null
def test_bool():
v = Vector()
assert bool(v) is False
v1 = Vector(23.56678, 56678.56778, 2.56677) * (1.0 / 14.5667)
v2 = Vector(23.56678, 56678.56778, 2.56677) / 14.5667
result = v2 - v1
assert bool(result) is False
# actual precision is abs_tol=1e-9
assert not Vector(1e-8, 0, 0).is_null
def test_magnitude():
v = Vector(3, 4, 5)
assert math.isclose(abs(v), 7.0710678118654755)
assert math.isclose(v.magnitude, 7.0710678118654755)
def test_magnitude_square():
v = Vector(3, 4, 5)
assert math.isclose(v.magnitude_square, 50)
def test_normalize():
v = Vector(2, 0, 0)
assert v.normalize() == (1, 0, 0)
def test_normalize_to_length():
v = Vector(2, 0, 0)
assert v.normalize(4) == (4, 0, 0)
def test_orthogonal_ccw():
v = Vector(3, 4)
assert v.orthogonal() == (-4, 3)
def test_orthogonal_cw():
v = Vector(3, 4)
assert v.orthogonal(False) == (4, -3)
def test_negative():
v = Vector(2, 3, 4)
assert -v == (-2, -3, -4)
def test_add_scalar():
v = Vector(2, 3, 4)
assert v + 3 == (5, 6, 7)
def test_iadd_scalar():
v = Vector(2, 3, 4)
v += 3
assert v == (5, 6, 7)
def test_sub_scalar():
v = Vector(2, 3, 4)
assert v - 3 == (-1, 0, 1)
def test_isub_scalar():
v = Vector(2, 3, 4)
v -= 3
assert v == (-1, 0, 1)
def test_add_vector():
v = Vector(2, 3, 4)
assert v + (7, 7, 7) == (9, 10, 11)
def test_iadd_vector():
v = Vector(2, 3, 4)
v += (7, 7, 7)
assert v == (9, 10, 11)
def test_radd_vector():
v = Vector(2, 3, 4)
assert (7, 7, 7) + v == (9, 10, 11)
def test_sub_vector():
v = Vector(2, 3, 4)
assert v - (7, 7, 7) == (-5, -4, -3)
def test_isub_vector():
v = Vector(2, 3, 4)
v -= (7, 7, 7)
assert v == (-5, -4, -3)
def test_rsub_vector():
v = Vector(2, 3, 4)
assert (7, 7, 7) - v == (5, 4, 3)
def test_rsub_scalar_vector():
v = Vector(2, 3, 4)
assert 7 - v == (5, 4, 3)
def test_mul_scalar():
v = Vector(2, 3, 4)
assert v * 2 == (4, 6, 8)
def test_imul_scalar():
v = Vector(2, 3, 4)
v *= 2
assert v == (4, 6, 8)
def test_rmul_scalar():
v = Vector(2, 3, 4)
assert 2 * v == (4, 6, 8)
def test_div_scalar():
v = Vector(2, 3, 4)
assert v / 2 == (1, 1.5, 2)
def test_idiv_scalar():
v = Vector(2, 3, 4)
v /= 2
assert v == (1, 1.5, 2)
def test_rdiv_scalar():
v = Vector(2, 3, 4)
assert 2 / v == (1, 0.66666666667, 0.5)
def test_dot_product():
v1 = Vector(2, 7, 1)
v2 = Vector(3, 9, 8)
assert math.isclose(v1.dot(v2), 77)
def test_angle_deg():
assert math.isclose(Vector(0, 1).angle_deg, 90)
assert math.isclose(Vector(0, -1).angle_deg, -90)
assert math.isclose(Vector(1, 1).angle_deg, 45)
assert math.isclose(Vector(-1, 1).angle_deg, 135)
def test_angle_between():
v1 = Vector(0, 1)
v2 = Vector(1, 1)
angle = v1.angle_between(v2)
assert math.isclose(angle, math.pi / 4)
# reverse order, same result
angle = v2.angle_between(v1)
assert math.isclose(angle, math.pi / 4)
angle = v1.angle_between(Vector(0, -1))
assert math.isclose(angle, math.pi)
def test_angle_about():
extrusion = Vector(0, 0, 1)
a = Vector(1, 0, 0)
b = Vector(1, 1, 0)
assert math.isclose(a.angle_between(b), math.pi / 4)
assert math.isclose(extrusion.angle_about(a, b), math.pi / 4)
extrusion = Vector(0, 0, -1)
assert math.isclose(a.angle_between(b), math.pi / 4)
assert math.isclose(extrusion.angle_about(a, b), (-math.pi / 4) % math.tau)
extrusion = Vector(0, 0, 1)
a = Vector(1, 1, 0)
b = Vector(1, 1, 0)
assert math.isclose(a.angle_between(b), 0, abs_tol=1e-5)
assert math.isclose(extrusion.angle_about(a, b), 0)
extrusion = Vector(0, 1, 0)
a = Vector(1, 1, 0)
b = Vector(0, 1, -1)
assert math.isclose(a.angle_between(b), math.pi / 3, abs_tol=1e-5)
c = a.cross(b)
assert math.isclose(a.angle_between(b), c.angle_about(a, b))
assert math.isclose(extrusion.angle_about(a, b), math.pi / 2)
def test_cross_product():
v1 = Vector(2, 7, 9)
v2 = Vector(3, 9, 1)
assert v1.cross(v2) == (-74, 25, -3)
def test_rot_z():
assert Vector(2, 2, 7).rotate_deg(90) == (-2, 2, 7)
def test_lerp():
v1 = Vector(1, 1, 1)
v2 = Vector(4, 4, 4)
assert v1.lerp(v2, .5) == (2.5, 2.5, 2.5)
assert v1.lerp(v2, 0) == (1, 1, 1)
assert v1.lerp(v2, 1) == (4, 4, 4)
def test_replace():
v = Vector(1, 2, 3)
assert v.replace(x=7) == (7, 2, 3)
assert v.replace(y=7) == (1, 7, 3)
assert v.replace(z=7) == (1, 2, 7)
assert v.replace(x=7, z=7) == (7, 2, 7)
def test_project():
v = Vector(10, 0, 0)
assert v.project((5, 0, 0)) == (5, 0, 0)
assert v.project((5, 5, 0)) == (5, 0, 0)
assert v.project((5, 5, 5)) == (5, 0, 0)
v = Vector(10, 10, 0)
assert v.project((10, 0, 0)) == (5, 5, 0)
|
[
"ezdxf.math.vector.Vector.from_deg_angle",
"copy.deepcopy",
"math.radians",
"copy.copy",
"ezdxf.math.vector.Vector.from_angle",
"math.sin",
"ezdxf.math.vector.Vector",
"math.isclose",
"math.cos"
] |
[((86, 94), 'ezdxf.math.vector.Vector', 'Vector', ([], {}), '()\n', (92, 94), False, 'from ezdxf.math.vector import Vector\n'), ((183, 197), 'ezdxf.math.vector.Vector', 'Vector', (['(2, 3)'], {}), '((2, 3))\n', (189, 197), False, 'from ezdxf.math.vector import Vector\n'), ((241, 258), 'ezdxf.math.vector.Vector', 'Vector', (['(2, 3, 4)'], {}), '((2, 3, 4))\n', (247, 258), False, 'from ezdxf.math.vector import Vector\n'), ((323, 335), 'ezdxf.math.vector.Vector', 'Vector', (['(1)', '(2)'], {}), '(1, 2)\n', (329, 335), False, 'from ezdxf.math.vector import Vector\n'), ((448, 472), 'ezdxf.math.vector.Vector.from_deg_angle', 'Vector.from_deg_angle', (['(0)'], {}), '(0)\n', (469, 472), False, 'from ezdxf.math.vector import Vector\n'), ((531, 567), 'ezdxf.math.vector.Vector.from_deg_angle', 'Vector.from_deg_angle', (['angle', 'length'], {}), '(angle, length)\n', (552, 567), False, 'from ezdxf.math.vector import Vector\n'), ((725, 740), 'ezdxf.math.vector.Vector', 'Vector', (['(1)', '(2)', '(3)'], {}), '(1, 2, 3)\n', (731, 740), False, 'from ezdxf.math.vector import Vector\n'), ((804, 820), 'math.radians', 'math.radians', (['(50)'], {}), '(50)\n', (816, 820), False, 'import math\n'), ((978, 993), 'ezdxf.math.vector.Vector', 'Vector', (['(1)', '(2)', '(3)'], {}), '(1, 2, 3)\n', (984, 993), False, 'from ezdxf.math.vector import Vector\n'), ((1274, 1289), 'ezdxf.math.vector.Vector', 'Vector', (['(1)', '(2)', '(3)'], {}), '(1, 2, 3)\n', (1280, 1289), False, 'from ezdxf.math.vector import Vector\n'), ((1405, 1432), 'ezdxf.math.vector.Vector', 'Vector', (['(1.123)', '(2.123)', '(3.123)'], {}), '(1.123, 2.123, 3.123)\n', (1411, 1432), False, 'from ezdxf.math.vector import Vector\n'), ((1591, 1606), 'ezdxf.math.vector.Vector', 'Vector', (['(1)', '(2)', '(3)'], {}), '(1, 2, 3)\n', (1597, 1606), False, 'from ezdxf.math.vector import Vector\n'), ((1635, 1648), 'copy.copy', 'copy.copy', (['l1'], {}), '(l1)\n', (1644, 1648), False, 'import copy\n'), ((1733, 1750), 'copy.deepcopy', 'copy.deepcopy', (['l1'], {}), '(l1)\n', (1746, 1750), False, 'import copy\n'), ((1861, 1873), 'ezdxf.math.vector.Vector', 'Vector', (['(3)', '(3)'], {}), '(3, 3)\n', (1867, 1873), False, 'from ezdxf.math.vector import Vector\n'), ((1885, 1914), 'math.isclose', 'math.isclose', (['v.angle_deg', '(45)'], {}), '(v.angle_deg, 45)\n', (1897, 1914), False, 'import math\n'), ((2002, 2017), 'ezdxf.math.vector.Vector', 'Vector', (['(3)', '(3)', '(0)'], {}), '(3, 3, 0)\n', (2008, 2017), False, 'from ezdxf.math.vector import Vector\n'), ((2029, 2066), 'math.isclose', 'math.isclose', (['v.spatial_angle_deg', '(45)'], {}), '(v.spatial_angle_deg, 45)\n', (2041, 2066), False, 'import math\n'), ((2165, 2180), 'ezdxf.math.vector.Vector', 'Vector', (['(1)', '(2)', '(3)'], {}), '(1, 2, 3)\n', (2171, 2180), False, 'from ezdxf.math.vector import Vector\n'), ((2245, 2260), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(3)', '(4)'], {}), '(2, 3, 4)\n', (2251, 2260), False, 'from ezdxf.math.vector import Vector\n'), ((2392, 2400), 'ezdxf.math.vector.Vector', 'Vector', ([], {}), '()\n', (2398, 2400), False, 'from ezdxf.math.vector import Vector\n'), ((2639, 2647), 'ezdxf.math.vector.Vector', 'Vector', ([], {}), '()\n', (2645, 2647), False, 'from ezdxf.math.vector import Vector\n'), ((2968, 2983), 'ezdxf.math.vector.Vector', 'Vector', (['(3)', '(4)', '(5)'], {}), '(3, 4, 5)\n', (2974, 2983), False, 'from ezdxf.math.vector import Vector\n'), ((3047, 3092), 'math.isclose', 'math.isclose', (['v.magnitude', '(7.0710678118654755)'], {}), '(v.magnitude, 7.0710678118654755)\n', (3059, 3092), False, 'import math\n'), ((3132, 3147), 'ezdxf.math.vector.Vector', 'Vector', (['(3)', '(4)', '(5)'], {}), '(3, 4, 5)\n', (3138, 3147), False, 'from ezdxf.math.vector import Vector\n'), ((3159, 3195), 'math.isclose', 'math.isclose', (['v.magnitude_square', '(50)'], {}), '(v.magnitude_square, 50)\n', (3171, 3195), False, 'import math\n'), ((3228, 3243), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(0)', '(0)'], {}), '(2, 0, 0)\n', (3234, 3243), False, 'from ezdxf.math.vector import Vector\n'), ((3324, 3339), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(0)', '(0)'], {}), '(2, 0, 0)\n', (3330, 3339), False, 'from ezdxf.math.vector import Vector\n'), ((3416, 3428), 'ezdxf.math.vector.Vector', 'Vector', (['(3)', '(4)'], {}), '(3, 4)\n', (3422, 3428), False, 'from ezdxf.math.vector import Vector\n'), ((3502, 3514), 'ezdxf.math.vector.Vector', 'Vector', (['(3)', '(4)'], {}), '(3, 4)\n', (3508, 3514), False, 'from ezdxf.math.vector import Vector\n'), ((3588, 3603), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(3)', '(4)'], {}), '(2, 3, 4)\n', (3594, 3603), False, 'from ezdxf.math.vector import Vector\n'), ((3667, 3682), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(3)', '(4)'], {}), '(2, 3, 4)\n', (3673, 3682), False, 'from ezdxf.math.vector import Vector\n'), ((3747, 3762), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(3)', '(4)'], {}), '(2, 3, 4)\n', (3753, 3762), False, 'from ezdxf.math.vector import Vector\n'), ((3833, 3848), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(3)', '(4)'], {}), '(2, 3, 4)\n', (3839, 3848), False, 'from ezdxf.math.vector import Vector\n'), ((3914, 3929), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(3)', '(4)'], {}), '(2, 3, 4)\n', (3920, 3929), False, 'from ezdxf.math.vector import Vector\n'), ((4001, 4016), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(3)', '(4)'], {}), '(2, 3, 4)\n', (4007, 4016), False, 'from ezdxf.math.vector import Vector\n'), ((4091, 4106), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(3)', '(4)'], {}), '(2, 3, 4)\n', (4097, 4106), False, 'from ezdxf.math.vector import Vector\n'), ((4188, 4203), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(3)', '(4)'], {}), '(2, 3, 4)\n', (4194, 4203), False, 'from ezdxf.math.vector import Vector\n'), ((4277, 4292), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(3)', '(4)'], {}), '(2, 3, 4)\n', (4283, 4292), False, 'from ezdxf.math.vector import Vector\n'), ((4368, 4383), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(3)', '(4)'], {}), '(2, 3, 4)\n', (4374, 4383), False, 'from ezdxf.math.vector import Vector\n'), ((4466, 4481), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(3)', '(4)'], {}), '(2, 3, 4)\n', (4472, 4481), False, 'from ezdxf.math.vector import Vector\n'), ((4561, 4576), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(3)', '(4)'], {}), '(2, 3, 4)\n', (4567, 4576), False, 'from ezdxf.math.vector import Vector\n'), ((4640, 4655), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(3)', '(4)'], {}), '(2, 3, 4)\n', (4646, 4655), False, 'from ezdxf.math.vector import Vector\n'), ((4720, 4735), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(3)', '(4)'], {}), '(2, 3, 4)\n', (4726, 4735), False, 'from ezdxf.math.vector import Vector\n'), ((4807, 4822), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(3)', '(4)'], {}), '(2, 3, 4)\n', (4813, 4822), False, 'from ezdxf.math.vector import Vector\n'), ((4886, 4901), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(3)', '(4)'], {}), '(2, 3, 4)\n', (4892, 4901), False, 'from ezdxf.math.vector import Vector\n'), ((4968, 4983), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(3)', '(4)'], {}), '(2, 3, 4)\n', (4974, 4983), False, 'from ezdxf.math.vector import Vector\n'), ((5057, 5072), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(3)', '(4)'], {}), '(2, 3, 4)\n', (5063, 5072), False, 'from ezdxf.math.vector import Vector\n'), ((5152, 5167), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(7)', '(1)'], {}), '(2, 7, 1)\n', (5158, 5167), False, 'from ezdxf.math.vector import Vector\n'), ((5177, 5192), 'ezdxf.math.vector.Vector', 'Vector', (['(3)', '(9)', '(8)'], {}), '(3, 9, 8)\n', (5183, 5192), False, 'from ezdxf.math.vector import Vector\n'), ((5506, 5518), 'ezdxf.math.vector.Vector', 'Vector', (['(0)', '(1)'], {}), '(0, 1)\n', (5512, 5518), False, 'from ezdxf.math.vector import Vector\n'), ((5528, 5540), 'ezdxf.math.vector.Vector', 'Vector', (['(1)', '(1)'], {}), '(1, 1)\n', (5534, 5540), False, 'from ezdxf.math.vector import Vector\n'), ((5585, 5617), 'math.isclose', 'math.isclose', (['angle', '(math.pi / 4)'], {}), '(angle, math.pi / 4)\n', (5597, 5617), False, 'import math\n'), ((5695, 5727), 'math.isclose', 'math.isclose', (['angle', '(math.pi / 4)'], {}), '(angle, math.pi / 4)\n', (5707, 5727), False, 'import math\n'), ((5783, 5811), 'math.isclose', 'math.isclose', (['angle', 'math.pi'], {}), '(angle, math.pi)\n', (5795, 5811), False, 'import math\n'), ((5854, 5869), 'ezdxf.math.vector.Vector', 'Vector', (['(0)', '(0)', '(1)'], {}), '(0, 0, 1)\n', (5860, 5869), False, 'from ezdxf.math.vector import Vector\n'), ((5878, 5893), 'ezdxf.math.vector.Vector', 'Vector', (['(1)', '(0)', '(0)'], {}), '(1, 0, 0)\n', (5884, 5893), False, 'from ezdxf.math.vector import Vector\n'), ((5902, 5917), 'ezdxf.math.vector.Vector', 'Vector', (['(1)', '(1)', '(0)'], {}), '(1, 1, 0)\n', (5908, 5917), False, 'from ezdxf.math.vector import Vector\n'), ((6058, 6074), 'ezdxf.math.vector.Vector', 'Vector', (['(0)', '(0)', '(-1)'], {}), '(0, 0, -1)\n', (6064, 6074), False, 'from ezdxf.math.vector import Vector\n'), ((6229, 6244), 'ezdxf.math.vector.Vector', 'Vector', (['(0)', '(0)', '(1)'], {}), '(0, 0, 1)\n', (6235, 6244), False, 'from ezdxf.math.vector import Vector\n'), ((6253, 6268), 'ezdxf.math.vector.Vector', 'Vector', (['(1)', '(1)', '(0)'], {}), '(1, 1, 0)\n', (6259, 6268), False, 'from ezdxf.math.vector import Vector\n'), ((6277, 6292), 'ezdxf.math.vector.Vector', 'Vector', (['(1)', '(1)', '(0)'], {}), '(1, 1, 0)\n', (6283, 6292), False, 'from ezdxf.math.vector import Vector\n'), ((6427, 6442), 'ezdxf.math.vector.Vector', 'Vector', (['(0)', '(1)', '(0)'], {}), '(0, 1, 0)\n', (6433, 6442), False, 'from ezdxf.math.vector import Vector\n'), ((6451, 6466), 'ezdxf.math.vector.Vector', 'Vector', (['(1)', '(1)', '(0)'], {}), '(1, 1, 0)\n', (6457, 6466), False, 'from ezdxf.math.vector import Vector\n'), ((6475, 6491), 'ezdxf.math.vector.Vector', 'Vector', (['(0)', '(1)', '(-1)'], {}), '(0, 1, -1)\n', (6481, 6491), False, 'from ezdxf.math.vector import Vector\n'), ((6750, 6765), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(7)', '(9)'], {}), '(2, 7, 9)\n', (6756, 6765), False, 'from ezdxf.math.vector import Vector\n'), ((6775, 6790), 'ezdxf.math.vector.Vector', 'Vector', (['(3)', '(9)', '(1)'], {}), '(3, 9, 1)\n', (6781, 6790), False, 'from ezdxf.math.vector import Vector\n'), ((6936, 6951), 'ezdxf.math.vector.Vector', 'Vector', (['(1)', '(1)', '(1)'], {}), '(1, 1, 1)\n', (6942, 6951), False, 'from ezdxf.math.vector import Vector\n'), ((6961, 6976), 'ezdxf.math.vector.Vector', 'Vector', (['(4)', '(4)', '(4)'], {}), '(4, 4, 4)\n', (6967, 6976), False, 'from ezdxf.math.vector import Vector\n'), ((7131, 7146), 'ezdxf.math.vector.Vector', 'Vector', (['(1)', '(2)', '(3)'], {}), '(1, 2, 3)\n', (7137, 7146), False, 'from ezdxf.math.vector import Vector\n'), ((7338, 7354), 'ezdxf.math.vector.Vector', 'Vector', (['(10)', '(0)', '(0)'], {}), '(10, 0, 0)\n', (7344, 7354), False, 'from ezdxf.math.vector import Vector\n'), ((7499, 7516), 'ezdxf.math.vector.Vector', 'Vector', (['(10)', '(10)', '(0)'], {}), '(10, 10, 0)\n', (7505, 7516), False, 'from ezdxf.math.vector import Vector\n'), ((137, 145), 'ezdxf.math.vector.Vector', 'Vector', ([], {}), '()\n', (143, 145), False, 'from ezdxf.math.vector import Vector\n'), ((379, 394), 'ezdxf.math.vector.Vector', 'Vector', (['(5)', '(6)', '(7)'], {}), '(5, 6, 7)\n', (385, 394), False, 'from ezdxf.math.vector import Vector\n'), ((397, 412), 'ezdxf.math.vector.Vector', 'Vector', (['(1)', '(1)', '(1)'], {}), '(1, 1, 1)\n', (403, 412), False, 'from ezdxf.math.vector import Vector\n'), ((848, 880), 'ezdxf.math.vector.Vector.from_angle', 'Vector.from_angle', (['angle', 'length'], {}), '(angle, length)\n', (865, 880), False, 'from ezdxf.math.vector import Vector\n'), ((1948, 1964), 'math.radians', 'math.radians', (['(45)'], {}), '(45)\n', (1960, 1964), False, 'import math\n'), ((2108, 2124), 'math.radians', 'math.radians', (['(45)'], {}), '(45)\n', (2120, 2124), False, 'import math\n'), ((2349, 2361), 'ezdxf.math.vector.Vector', 'Vector', (['(1)', '(2)'], {}), '(1, 2)\n', (2355, 2361), False, 'from ezdxf.math.vector import Vector\n'), ((2432, 2470), 'ezdxf.math.vector.Vector', 'Vector', (['(23.56678)', '(56678.56778)', '(2.56677)'], {}), '(23.56678, 56678.56778, 2.56677)\n', (2438, 2470), False, 'from ezdxf.math.vector import Vector\n'), ((2498, 2536), 'ezdxf.math.vector.Vector', 'Vector', (['(23.56678)', '(56678.56778)', '(2.56677)'], {}), '(23.56678, 56678.56778, 2.56677)\n', (2504, 2536), False, 'from ezdxf.math.vector import Vector\n'), ((2588, 2603), 'ezdxf.math.vector.Vector', 'Vector', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (2594, 2603), False, 'from ezdxf.math.vector import Vector\n'), ((2686, 2724), 'ezdxf.math.vector.Vector', 'Vector', (['(23.56678)', '(56678.56778)', '(2.56677)'], {}), '(23.56678, 56678.56778, 2.56677)\n', (2692, 2724), False, 'from ezdxf.math.vector import Vector\n'), ((2752, 2790), 'ezdxf.math.vector.Vector', 'Vector', (['(23.56678)', '(56678.56778)', '(2.56677)'], {}), '(23.56678, 56678.56778, 2.56677)\n', (2758, 2790), False, 'from ezdxf.math.vector import Vector\n'), ((5757, 5770), 'ezdxf.math.vector.Vector', 'Vector', (['(0)', '(-1)'], {}), '(0, -1)\n', (5763, 5770), False, 'from ezdxf.math.vector import Vector\n'), ((585, 604), 'math.radians', 'math.radians', (['angle'], {}), '(angle)\n', (597, 604), False, 'import math\n'), ((632, 651), 'math.radians', 'math.radians', (['angle'], {}), '(angle)\n', (644, 651), False, 'import math\n'), ((1520, 1535), 'ezdxf.math.vector.Vector', 'Vector', (['(1)', '(2)', '(3)'], {}), '(1, 2, 3)\n', (1526, 1535), False, 'from ezdxf.math.vector import Vector\n'), ((2327, 2342), 'ezdxf.math.vector.Vector', 'Vector', (['(1)', '(2)', '(3)'], {}), '(1, 2, 3)\n', (2333, 2342), False, 'from ezdxf.math.vector import Vector\n'), ((2909, 2928), 'ezdxf.math.vector.Vector', 'Vector', (['(1e-08)', '(0)', '(0)'], {}), '(1e-08, 0, 0)\n', (2915, 2928), False, 'from ezdxf.math.vector import Vector\n'), ((5281, 5293), 'ezdxf.math.vector.Vector', 'Vector', (['(0)', '(1)'], {}), '(0, 1)\n', (5287, 5293), False, 'from ezdxf.math.vector import Vector\n'), ((5333, 5346), 'ezdxf.math.vector.Vector', 'Vector', (['(0)', '(-1)'], {}), '(0, -1)\n', (5339, 5346), False, 'from ezdxf.math.vector import Vector\n'), ((5387, 5399), 'ezdxf.math.vector.Vector', 'Vector', (['(1)', '(1)'], {}), '(1, 1)\n', (5393, 5399), False, 'from ezdxf.math.vector import Vector\n'), ((5439, 5452), 'ezdxf.math.vector.Vector', 'Vector', (['(-1)', '(1)'], {}), '(-1, 1)\n', (5445, 5452), False, 'from ezdxf.math.vector import Vector\n'), ((885, 900), 'math.cos', 'math.cos', (['angle'], {}), '(angle)\n', (893, 900), False, 'import math\n'), ((911, 926), 'math.sin', 'math.sin', (['angle'], {}), '(angle)\n', (919, 926), False, 'import math\n'), ((6863, 6878), 'ezdxf.math.vector.Vector', 'Vector', (['(2)', '(2)', '(7)'], {}), '(2, 2, 7)\n', (6869, 6878), False, 'from ezdxf.math.vector import Vector\n')]
|
"""
Slixmpp: The Slick XMPP Library
Implementation of xeps for Internet of Things
http://wiki.xmpp.org/web/Tech_pages/IoT_systems
Copyright (C) 2013 Sustainable Innovation, <EMAIL>, <EMAIL>
This file is part of Slixmpp.
See the file LICENSE for copying permission.
"""
from slixmpp import Iq, Message
from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID
from re import match
class Control(ElementBase):
""" Placeholder for the namespace, not used as a stanza """
namespace = 'urn:xmpp:iot:control'
name = 'control'
plugin_attrib = name
interfaces = set(tuple())
class ControlSet(ElementBase):
namespace = 'urn:xmpp:iot:control'
name = 'set'
plugin_attrib = name
interfaces = set(['nodes','datas'])
def __init__(self, xml=None, parent=None):
ElementBase.__init__(self, xml, parent)
self._nodes = set()
self._datas = set()
def setup(self, xml=None):
"""
Populate the stanza object using an optional XML object.
Overrides ElementBase.setup
Caches item information.
Arguments:
xml -- Use an existing XML object for the stanza's values.
"""
ElementBase.setup(self, xml)
self._nodes = set([node['nodeId'] for node in self['nodes']])
self._datas = set([data['name'] for data in self['datas']])
def add_node(self, nodeId, sourceId=None, cacheType=None):
"""
Add a new node element. Each item is required to have a
nodeId, but may also specify a sourceId value and cacheType.
Arguments:
nodeId -- The ID for the node.
sourceId -- [optional] identifying the data source controlling the device
cacheType -- [optional] narrowing down the search to a specific kind of node
"""
if nodeId not in self._nodes:
self._nodes.add((nodeId))
node = RequestNode(parent=self)
node['nodeId'] = nodeId
node['sourceId'] = sourceId
node['cacheType'] = cacheType
self.iterables.append(node)
return node
return None
def del_node(self, nodeId):
"""
Remove a single node.
Arguments:
nodeId -- Node ID of the item to remove.
"""
if nodeId in self._nodes:
nodes = [i for i in self.iterables if isinstance(i, RequestNode)]
for node in nodes:
if node['nodeId'] == nodeId:
self.xml.remove(node.xml)
self.iterables.remove(node)
return True
return False
def get_nodes(self):
"""Return all nodes."""
nodes = []
for node in self['substanzas']:
if isinstance(node, RequestNode):
nodes.append(node)
return nodes
def set_nodes(self, nodes):
"""
Set or replace all nodes. The given nodes must be in a
list or set where each item is a tuple of the form:
(nodeId, sourceId, cacheType)
Arguments:
nodes -- A series of nodes in tuple format.
"""
self.del_nodes()
for node in nodes:
if isinstance(node, RequestNode):
self.add_node(node['nodeId'], node['sourceId'], node['cacheType'])
else:
nodeId, sourceId, cacheType = node
self.add_node(nodeId, sourceId, cacheType)
def del_nodes(self):
"""Remove all nodes."""
self._nodes = set()
nodes = [i for i in self.iterables if isinstance(i, RequestNode)]
for node in nodes:
self.xml.remove(node.xml)
self.iterables.remove(node)
def add_data(self, name, typename, value):
"""
Add a new data element.
Arguments:
name -- The name of the data element
typename -- The type of data element
(boolean, color, string, date, dateTime,
double, duration, int, long, time)
value -- The value of the data element
"""
if name not in self._datas:
dataObj = None
if typename == "boolean":
dataObj = BooleanParameter(parent=self)
elif typename == "color":
dataObj = ColorParameter(parent=self)
elif typename == "string":
dataObj = StringParameter(parent=self)
elif typename == "date":
dataObj = DateParameter(parent=self)
elif typename == "dateTime":
dataObj = DateTimeParameter(parent=self)
elif typename == "double":
dataObj = DoubleParameter(parent=self)
elif typename == "duration":
dataObj = DurationParameter(parent=self)
elif typename == "int":
dataObj = IntParameter(parent=self)
elif typename == "long":
dataObj = LongParameter(parent=self)
elif typename == "time":
dataObj = TimeParameter(parent=self)
dataObj['name'] = name
dataObj['value'] = value
self._datas.add(name)
self.iterables.append(dataObj)
return dataObj
return None
def del_data(self, name):
"""
Remove a single data element.
Arguments:
data_name -- The data element name to remove.
"""
if name in self._datas:
datas = [i for i in self.iterables if isinstance(i, BaseParameter)]
for data in datas:
if data['name'] == name:
self.xml.remove(data.xml)
self.iterables.remove(data)
return True
return False
def get_datas(self):
""" Return all data elements. """
datas = []
for data in self['substanzas']:
if isinstance(data, BaseParameter):
datas.append(data)
return datas
def set_datas(self, datas):
"""
Set or replace all data elements. The given elements must be in a
list or set where each item is a data element (numeric, string, boolean, dateTime, timeSpan or enum)
Arguments:
datas -- A series of data elements.
"""
self.del_datas()
for data in datas:
self.add_data(name=data['name'], typename=data._get_typename(), value=data['value'])
def del_datas(self):
"""Remove all data elements."""
self._datas = set()
datas = [i for i in self.iterables if isinstance(i, BaseParameter)]
for data in datas:
self.xml.remove(data.xml)
self.iterables.remove(data)
class RequestNode(ElementBase):
""" Node element in a request """
namespace = 'urn:xmpp:iot:control'
name = 'node'
plugin_attrib = name
interfaces = set(['nodeId','sourceId','cacheType'])
class ControlSetResponse(ElementBase):
namespace = 'urn:xmpp:iot:control'
name = 'setResponse'
plugin_attrib = name
interfaces = set(['responseCode'])
def __init__(self, xml=None, parent=None):
ElementBase.__init__(self, xml, parent)
self._nodes = set()
self._datas = set()
def setup(self, xml=None):
"""
Populate the stanza object using an optional XML object.
Overrides ElementBase.setup
Caches item information.
Arguments:
xml -- Use an existing XML object for the stanza's values.
"""
ElementBase.setup(self, xml)
self._nodes = set([node['nodeId'] for node in self['nodes']])
self._datas = set([data['name'] for data in self['datas']])
def add_node(self, nodeId, sourceId=None, cacheType=None):
"""
Add a new node element. Each item is required to have a
nodeId, but may also specify a sourceId value and cacheType.
Arguments:
nodeId -- The ID for the node.
sourceId -- [optional] identifying the data source controlling the device
cacheType -- [optional] narrowing down the search to a specific kind of node
"""
if nodeId not in self._nodes:
self._nodes.add(nodeId)
node = RequestNode(parent=self)
node['nodeId'] = nodeId
node['sourceId'] = sourceId
node['cacheType'] = cacheType
self.iterables.append(node)
return node
return None
def del_node(self, nodeId):
"""
Remove a single node.
Arguments:
nodeId -- Node ID of the item to remove.
"""
if nodeId in self._nodes:
nodes = [i for i in self.iterables if isinstance(i, RequestNode)]
for node in nodes:
if node['nodeId'] == nodeId:
self.xml.remove(node.xml)
self.iterables.remove(node)
return True
return False
def get_nodes(self):
"""Return all nodes."""
nodes = []
for node in self['substanzas']:
if isinstance(node, RequestNode):
nodes.append(node)
return nodes
def set_nodes(self, nodes):
"""
Set or replace all nodes. The given nodes must be in a
list or set where each item is a tuple of the form:
(nodeId, sourceId, cacheType)
Arguments:
nodes -- A series of nodes in tuple format.
"""
self.del_nodes()
for node in nodes:
if isinstance(node, RequestNode):
self.add_node(node['nodeId'], node['sourceId'], node['cacheType'])
else:
nodeId, sourceId, cacheType = node
self.add_node(nodeId, sourceId, cacheType)
def del_nodes(self):
"""Remove all nodes."""
self._nodes = set()
nodes = [i for i in self.iterables if isinstance(i, RequestNode)]
for node in nodes:
self.xml.remove(node.xml)
self.iterables.remove(node)
def add_data(self, name):
"""
Add a new ResponseParameter element.
Arguments:
name -- Name of the parameter
"""
if name not in self._datas:
self._datas.add(name)
data = ResponseParameter(parent=self)
data['name'] = name
self.iterables.append(data)
return data
return None
def del_data(self, name):
"""
Remove a single ResponseParameter element.
Arguments:
name -- The data element name to remove.
"""
if name in self._datas:
datas = [i for i in self.iterables if isinstance(i, ResponseParameter)]
for data in datas:
if data['name'] == name:
self.xml.remove(data.xml)
self.iterables.remove(data)
return True
return False
def get_datas(self):
""" Return all ResponseParameter elements. """
datas = set()
for data in self['substanzas']:
if isinstance(data, ResponseParameter):
datas.add(data)
return datas
def set_datas(self, datas):
"""
Set or replace all data elements. The given elements must be in a
list or set of ResponseParameter elements
Arguments:
datas -- A series of data element names.
"""
self.del_datas()
for data in datas:
self.add_data(name=data['name'])
def del_datas(self):
"""Remove all ResponseParameter elements."""
self._datas = set()
datas = [i for i in self.iterables if isinstance(i, ResponseParameter)]
for data in datas:
self.xml.remove(data.xml)
self.iterables.remove(data)
class Error(ElementBase):
namespace = 'urn:xmpp:iot:control'
name = 'error'
plugin_attrib = name
interfaces = set(['var','text'])
def get_text(self):
"""Return then contents inside the XML tag."""
return self.xml.text
def set_text(self, value):
"""Set then contents inside the XML tag.
Arguments:
value -- string
"""
self.xml.text = value
return self
def del_text(self):
"""Remove the contents inside the XML tag."""
self.xml.text = ""
return self
class ResponseParameter(ElementBase):
"""
Parameter element in ControlSetResponse.
"""
namespace = 'urn:xmpp:iot:control'
name = 'parameter'
plugin_attrib = name
interfaces = set(['name'])
class BaseParameter(ElementBase):
"""
Parameter element in SetCommand. This is a base class,
all instances of parameters added to SetCommand must be of types:
BooleanParameter
ColorParameter
StringParameter
DateParameter
DateTimeParameter
DoubleParameter
DurationParameter
IntParameter
LongParameter
TimeParameter
"""
namespace = 'urn:xmpp:iot:control'
name = 'baseParameter'
plugin_attrib = name
interfaces = set(['name','value'])
def _get_typename(self):
return self.name
class BooleanParameter(BaseParameter):
"""
Field data of type boolean.
Note that the value is expressed as a string.
"""
name = 'boolean'
plugin_attrib = name
class ColorParameter(BaseParameter):
"""
Field data of type color.
Note that the value is expressed as a string.
"""
name = 'color'
plugin_attrib = name
class StringParameter(BaseParameter):
"""
Field data of type string.
"""
name = 'string'
plugin_attrib = name
class DateParameter(BaseParameter):
"""
Field data of type date.
Note that the value is expressed as a string.
"""
name = 'date'
plugin_attrib = name
class DateTimeParameter(BaseParameter):
"""
Field data of type dateTime.
Note that the value is expressed as a string.
"""
name = 'dateTime'
plugin_attrib = name
class DoubleParameter(BaseParameter):
"""
Field data of type double.
Note that the value is expressed as a string.
"""
name = 'double'
plugin_attrib = name
class DurationParameter(BaseParameter):
"""
Field data of type duration.
Note that the value is expressed as a string.
"""
name = 'duration'
plugin_attrib = name
class IntParameter(BaseParameter):
"""
Field data of type int.
Note that the value is expressed as a string.
"""
name = 'int'
plugin_attrib = name
class LongParameter(BaseParameter):
"""
Field data of type long (64-bit int).
Note that the value is expressed as a string.
"""
name = 'long'
plugin_attrib = name
class TimeParameter(BaseParameter):
"""
Field data of type time.
Note that the value is expressed as a string.
"""
name = 'time'
plugin_attrib = name
register_stanza_plugin(Iq, ControlSet)
register_stanza_plugin(Message, ControlSet)
register_stanza_plugin(ControlSet, RequestNode, iterable=True)
register_stanza_plugin(ControlSet, BooleanParameter, iterable=True)
register_stanza_plugin(ControlSet, ColorParameter, iterable=True)
register_stanza_plugin(ControlSet, StringParameter, iterable=True)
register_stanza_plugin(ControlSet, DateParameter, iterable=True)
register_stanza_plugin(ControlSet, DateTimeParameter, iterable=True)
register_stanza_plugin(ControlSet, DoubleParameter, iterable=True)
register_stanza_plugin(ControlSet, DurationParameter, iterable=True)
register_stanza_plugin(ControlSet, IntParameter, iterable=True)
register_stanza_plugin(ControlSet, LongParameter, iterable=True)
register_stanza_plugin(ControlSet, TimeParameter, iterable=True)
register_stanza_plugin(Iq, ControlSetResponse)
register_stanza_plugin(ControlSetResponse, Error)
register_stanza_plugin(ControlSetResponse, RequestNode, iterable=True)
register_stanza_plugin(ControlSetResponse, ResponseParameter, iterable=True)
|
[
"slixmpp.xmlstream.ElementBase.setup",
"slixmpp.xmlstream.register_stanza_plugin",
"slixmpp.xmlstream.ElementBase.__init__"
] |
[((15156, 15194), 'slixmpp.xmlstream.register_stanza_plugin', 'register_stanza_plugin', (['Iq', 'ControlSet'], {}), '(Iq, ControlSet)\n', (15178, 15194), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n'), ((15195, 15238), 'slixmpp.xmlstream.register_stanza_plugin', 'register_stanza_plugin', (['Message', 'ControlSet'], {}), '(Message, ControlSet)\n', (15217, 15238), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n'), ((15240, 15302), 'slixmpp.xmlstream.register_stanza_plugin', 'register_stanza_plugin', (['ControlSet', 'RequestNode'], {'iterable': '(True)'}), '(ControlSet, RequestNode, iterable=True)\n', (15262, 15302), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n'), ((15304, 15371), 'slixmpp.xmlstream.register_stanza_plugin', 'register_stanza_plugin', (['ControlSet', 'BooleanParameter'], {'iterable': '(True)'}), '(ControlSet, BooleanParameter, iterable=True)\n', (15326, 15371), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n'), ((15372, 15437), 'slixmpp.xmlstream.register_stanza_plugin', 'register_stanza_plugin', (['ControlSet', 'ColorParameter'], {'iterable': '(True)'}), '(ControlSet, ColorParameter, iterable=True)\n', (15394, 15437), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n'), ((15438, 15504), 'slixmpp.xmlstream.register_stanza_plugin', 'register_stanza_plugin', (['ControlSet', 'StringParameter'], {'iterable': '(True)'}), '(ControlSet, StringParameter, iterable=True)\n', (15460, 15504), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n'), ((15505, 15569), 'slixmpp.xmlstream.register_stanza_plugin', 'register_stanza_plugin', (['ControlSet', 'DateParameter'], {'iterable': '(True)'}), '(ControlSet, DateParameter, iterable=True)\n', (15527, 15569), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n'), ((15570, 15638), 'slixmpp.xmlstream.register_stanza_plugin', 'register_stanza_plugin', (['ControlSet', 'DateTimeParameter'], {'iterable': '(True)'}), '(ControlSet, DateTimeParameter, iterable=True)\n', (15592, 15638), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n'), ((15639, 15705), 'slixmpp.xmlstream.register_stanza_plugin', 'register_stanza_plugin', (['ControlSet', 'DoubleParameter'], {'iterable': '(True)'}), '(ControlSet, DoubleParameter, iterable=True)\n', (15661, 15705), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n'), ((15706, 15774), 'slixmpp.xmlstream.register_stanza_plugin', 'register_stanza_plugin', (['ControlSet', 'DurationParameter'], {'iterable': '(True)'}), '(ControlSet, DurationParameter, iterable=True)\n', (15728, 15774), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n'), ((15775, 15838), 'slixmpp.xmlstream.register_stanza_plugin', 'register_stanza_plugin', (['ControlSet', 'IntParameter'], {'iterable': '(True)'}), '(ControlSet, IntParameter, iterable=True)\n', (15797, 15838), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n'), ((15839, 15903), 'slixmpp.xmlstream.register_stanza_plugin', 'register_stanza_plugin', (['ControlSet', 'LongParameter'], {'iterable': '(True)'}), '(ControlSet, LongParameter, iterable=True)\n', (15861, 15903), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n'), ((15904, 15968), 'slixmpp.xmlstream.register_stanza_plugin', 'register_stanza_plugin', (['ControlSet', 'TimeParameter'], {'iterable': '(True)'}), '(ControlSet, TimeParameter, iterable=True)\n', (15926, 15968), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n'), ((15970, 16016), 'slixmpp.xmlstream.register_stanza_plugin', 'register_stanza_plugin', (['Iq', 'ControlSetResponse'], {}), '(Iq, ControlSetResponse)\n', (15992, 16016), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n'), ((16017, 16066), 'slixmpp.xmlstream.register_stanza_plugin', 'register_stanza_plugin', (['ControlSetResponse', 'Error'], {}), '(ControlSetResponse, Error)\n', (16039, 16066), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n'), ((16067, 16137), 'slixmpp.xmlstream.register_stanza_plugin', 'register_stanza_plugin', (['ControlSetResponse', 'RequestNode'], {'iterable': '(True)'}), '(ControlSetResponse, RequestNode, iterable=True)\n', (16089, 16137), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n'), ((16138, 16214), 'slixmpp.xmlstream.register_stanza_plugin', 'register_stanza_plugin', (['ControlSetResponse', 'ResponseParameter'], {'iterable': '(True)'}), '(ControlSetResponse, ResponseParameter, iterable=True)\n', (16160, 16214), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n'), ((840, 879), 'slixmpp.xmlstream.ElementBase.__init__', 'ElementBase.__init__', (['self', 'xml', 'parent'], {}), '(self, xml, parent)\n', (860, 879), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n'), ((1227, 1255), 'slixmpp.xmlstream.ElementBase.setup', 'ElementBase.setup', (['self', 'xml'], {}), '(self, xml)\n', (1244, 1255), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n'), ((7272, 7311), 'slixmpp.xmlstream.ElementBase.__init__', 'ElementBase.__init__', (['self', 'xml', 'parent'], {}), '(self, xml, parent)\n', (7292, 7311), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n'), ((7659, 7687), 'slixmpp.xmlstream.ElementBase.setup', 'ElementBase.setup', (['self', 'xml'], {}), '(self, xml)\n', (7676, 7687), False, 'from slixmpp.xmlstream import register_stanza_plugin, ElementBase, ET, JID\n')]
|
import sys
n = int(sys.stdin.readline())
total =0
for i in range(1,n+1):
while i%5 == 0:
i/=5
total +=1
print(total)
|
[
"sys.stdin.readline"
] |
[((20, 40), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (38, 40), False, 'import sys\n')]
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
UL call demonstrated: TmrDevice.pulse_out_start()
Purpose: Generate an output pulse using the
specified timer
Demonstration: Outputs user defined pulse on the
specified timer
Steps:
1. Call get_daq_device_inventory() to get the list of available DAQ devices
2. Call DaqDevice() to create a DaqDevice object
3. Call DaqDevice.get_tmr_device() to get the TmrDevice object for the timer
subsystem
4. Verify the TmrDevice object is valid
5. Call DaqDevice.connect() to connect to the device
6. Call TmrDevice.pulse_out_start() to start the output pulse for the specified
timer
7. Call TmrDevice.get_pulse_out_status() to get the output status and display
the status
8. Call TmrDevice.scan_stop() to stop the scan
9. Call DaqDevice.disconnect() and DaqDevice.release() before exiting the
process
"""
from __future__ import print_function
from time import sleep
from sys import stdout
from os import system
from uldaq import (get_daq_device_inventory, DaqDevice, InterfaceType,
TmrIdleState, PulseOutOption, TmrStatus)
# Constants
ERASE_LINE = '\x1b[2K'
def main():
"""Timer pulse output example."""
timer_number = 0
frequency = 1000.0 # Hz
duty_cycle = 0.5 # 50 percent
pulse_count = 0 # Continuous
initial_delay = 0.0
idle_state = TmrIdleState.LOW
options = PulseOutOption.DEFAULT
interface_type = InterfaceType.ANY
daq_device = None
tmr_device = None
try:
# Get descriptors for all of the available DAQ devices.
devices = get_daq_device_inventory(interface_type)
number_of_devices = len(devices)
# Verify at least one DAQ device is detected.
if number_of_devices == 0:
raise RuntimeError('Error: No DAQ devices found')
print('Found', number_of_devices, 'DAQ device(s):')
for i in range(number_of_devices):
print(' [', i, '] ', devices[i].product_name, ' (',
devices[i].unique_id, ')', sep='')
descriptor_index = input('\nPlease select a DAQ device, enter a number'
+ ' between 0 and '
+ str(number_of_devices - 1) + ': ')
descriptor_index = int(descriptor_index)
if descriptor_index not in range(number_of_devices):
raise RuntimeError('Error: Invalid descriptor index')
# Create the DAQ device from the descriptor at the specified index.
daq_device = DaqDevice(devices[descriptor_index])
tmr_device = daq_device.get_tmr_device()
# Verify the specified DAQ device supports timers.
if tmr_device is None:
raise RuntimeError('Error: The DAQ device does not support timers')
# Establish a connection to the device.
descriptor = daq_device.get_descriptor()
print('\nConnecting to', descriptor.dev_string, '- please wait...')
# For Ethernet devices using a connection_code other than the default
# value of zero, change the line below to enter the desired code.
daq_device.connect(connection_code=0)
print('\n', descriptor.dev_string, 'ready')
print(' Function demonstrated: TmrDevice.pulse_out_start')
print(' Timer:', timer_number)
print(' Frequency:', frequency, 'Hz')
print(' Duty cycle:', duty_cycle)
print(' Initial delay:', initial_delay)
try:
input('\nHit ENTER to continue')
except (NameError, SyntaxError):
pass
# Start the timer pulse output.
(frequency,
duty_cycle,
initial_delay) = tmr_device.pulse_out_start(timer_number, frequency,
duty_cycle, pulse_count,
initial_delay, idle_state,
options)
system('clear')
print('Please enter CTRL + C to terminate the process\n')
print('Active DAQ device: ', descriptor.dev_string, ' (',
descriptor.unique_id, ')\n', sep='')
print(' Actual frequency:', frequency, 'Hz')
print(' Actual duty cycle:', duty_cycle, 'Hz')
print(' Actual initial delay:', initial_delay, 'Hz')
try:
print('\n Outputting {0:.6f} Hz pulse with duty cycle {1:.3f} '
'for timer {2:d}'.format(frequency, duty_cycle, timer_number))
status = tmr_device.get_pulse_out_status(timer_number)
count = 0
if status == TmrStatus.RUNNING:
# If the status is RUNNING, then this timer does support the
# get_pulse_out_status() function so the status is checked to
# determine if the pulse output is stopped due to an error.
while status == TmrStatus.RUNNING:
status = tmr_device.get_pulse_out_status(timer_number)
print_status_dots(count)
count += 1
else:
# If the status is IDLE, then this timer does not support the
# get_pulse_out_status() function so we will wait for user
# input to stop the pulse output.
while True:
print_status_dots(count)
count += 1
except KeyboardInterrupt:
pass
except RuntimeError as error:
print('\n', error)
finally:
if daq_device:
# Stop the scan.
if tmr_device:
tmr_device.pulse_out_stop(timer_number)
stdout.write(ERASE_LINE)
print('\r Status:', TmrStatus.IDLE)
# Disconnect from the DAQ device.
if daq_device.is_connected():
daq_device.disconnect()
# Release the DAQ device resource.
daq_device.release()
def print_status_dots(count):
"""Display incrementing dots to indicate a status of running."""
if count % 6 == 0:
stdout.write(ERASE_LINE)
print('\r ', TmrStatus.RUNNING, end='')
else:
print('.', end='')
stdout.flush()
sleep(0.5)
if __name__ == '__main__':
main()
|
[
"sys.stdout.write",
"os.system",
"uldaq.DaqDevice",
"uldaq.get_daq_device_inventory",
"time.sleep",
"sys.stdout.flush"
] |
[((6327, 6341), 'sys.stdout.flush', 'stdout.flush', ([], {}), '()\n', (6339, 6341), False, 'from sys import stdout\n'), ((6346, 6356), 'time.sleep', 'sleep', (['(0.5)'], {}), '(0.5)\n', (6351, 6356), False, 'from time import sleep\n'), ((1700, 1740), 'uldaq.get_daq_device_inventory', 'get_daq_device_inventory', (['interface_type'], {}), '(interface_type)\n', (1724, 1740), False, 'from uldaq import get_daq_device_inventory, DaqDevice, InterfaceType, TmrIdleState, PulseOutOption, TmrStatus\n'), ((2634, 2670), 'uldaq.DaqDevice', 'DaqDevice', (['devices[descriptor_index]'], {}), '(devices[descriptor_index])\n', (2643, 2670), False, 'from uldaq import get_daq_device_inventory, DaqDevice, InterfaceType, TmrIdleState, PulseOutOption, TmrStatus\n'), ((4078, 4093), 'os.system', 'system', (['"""clear"""'], {}), "('clear')\n", (4084, 4093), False, 'from os import system\n'), ((6211, 6235), 'sys.stdout.write', 'stdout.write', (['ERASE_LINE'], {}), '(ERASE_LINE)\n', (6223, 6235), False, 'from sys import stdout\n'), ((5795, 5819), 'sys.stdout.write', 'stdout.write', (['ERASE_LINE'], {}), '(ERASE_LINE)\n', (5807, 5819), False, 'from sys import stdout\n')]
|
import os
import redis
import json
from flask import Flask, request, render_template, send_from_directory
from reporter import Reporter
host = os.getenv("REDIS_HOST")
if(host == None):
host = "redis"
app = Flask(__name__)
r = Reporter(host, 6379)
def build_cache():
cache = []
members = r.find_members()
for member in members:
item = {}
item["name"] = member
item["temp"] = float( r.get_key(member + ".temp") )
item["temp.baseline"] = float( r.get_key(member + ".temp.baseline") )
item["motion"] = float( r.get_key(member + ".motion") )
try:
item["temp.diff"] = float( round(abs(float(item["temp"]) - float(item["temp.baseline"])), 2) )
cache.append(item)
except:
print("oops " + member + "has bad data")
return cache
@app.route('/json', methods=['GET'])
def home_json():
cache = build_cache()
return json.dumps({"sensors": cache})
@app.route('/nodes/', methods=['GET'])
def home():
hosts = build_cache()
return render_template("nodes.html", hosts=hosts)
@app.route('/js/<path:path>')
def send_js(path):
return send_from_directory('js', path)
@app.route('/', methods=['GET'])
def sensors():
return render_template("sensors.html")
if __name__ == '__main__':
print("0.0.0.0")
app.run(debug=True, host='0.0.0.0')
|
[
"flask.Flask",
"json.dumps",
"reporter.Reporter",
"flask.render_template",
"flask.send_from_directory",
"os.getenv"
] |
[((145, 168), 'os.getenv', 'os.getenv', (['"""REDIS_HOST"""'], {}), "('REDIS_HOST')\n", (154, 168), False, 'import os\n'), ((213, 228), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (218, 228), False, 'from flask import Flask, request, render_template, send_from_directory\n'), ((233, 253), 'reporter.Reporter', 'Reporter', (['host', '(6379)'], {}), '(host, 6379)\n', (241, 253), False, 'from reporter import Reporter\n'), ((927, 957), 'json.dumps', 'json.dumps', (["{'sensors': cache}"], {}), "({'sensors': cache})\n", (937, 957), False, 'import json\n'), ((1047, 1089), 'flask.render_template', 'render_template', (['"""nodes.html"""'], {'hosts': 'hosts'}), "('nodes.html', hosts=hosts)\n", (1062, 1089), False, 'from flask import Flask, request, render_template, send_from_directory\n'), ((1151, 1182), 'flask.send_from_directory', 'send_from_directory', (['"""js"""', 'path'], {}), "('js', path)\n", (1170, 1182), False, 'from flask import Flask, request, render_template, send_from_directory\n'), ((1243, 1274), 'flask.render_template', 'render_template', (['"""sensors.html"""'], {}), "('sensors.html')\n", (1258, 1274), False, 'from flask import Flask, request, render_template, send_from_directory\n')]
|
import logging
import numpy as np
import pytest
import xskillscore as xs
from climpred.exceptions import CoordinateError
from climpred.prediction import compute_hindcast
def test_same_inits_initializations(
hind_ds_initialized_1d_cftime, reconstruction_ds_1d_cftime, caplog
):
"""Tests that inits are identical at all leads for `same_inits` alignment."""
with caplog.at_level(logging.INFO):
compute_hindcast(
hind_ds_initialized_1d_cftime,
reconstruction_ds_1d_cftime,
alignment="same_inits",
)
for i, record in enumerate(caplog.record_tuples):
if i >= 2:
print(record)
assert "inits: 1954-01-01 00:00:00-2007-01-01 00:00:00" in record[2]
def test_same_inits_verification_dates(
hind_ds_initialized_1d_cftime, reconstruction_ds_1d_cftime, caplog
):
"""Tests that appropriate verifs are being used at each lead for `same_inits`
alignment."""
with caplog.at_level(logging.INFO):
FIRST_INIT, LAST_INIT = 1954, 2007
compute_hindcast(
hind_ds_initialized_1d_cftime,
reconstruction_ds_1d_cftime,
alignment="same_inits",
)
nleads = hind_ds_initialized_1d_cftime["lead"].size
for i, record in zip(
np.arange(nleads + 2),
caplog.record_tuples,
):
if i >= 2:
print(record)
assert (
f"verifs: {FIRST_INIT+i}-01-01 00:00:00-{LAST_INIT+i}-01-01"
in record[2]
)
@pytest.mark.parametrize("alignment", ["same_inits", "same_verifs"])
def test_disjoint_verif_time(small_initialized_da, small_verif_da, alignment):
"""Tests that alignment works with disjoint time in the verification
data, i.e., non-continuous time sampling to verify against."""
hind = small_initialized_da
verif = small_verif_da.drop_sel(time=1992)
actual = compute_hindcast(hind, verif, alignment=alignment, metric="mse")
assert actual.notnull().all()
# hindcast inits: [1990, 1991, 1992, 1993]
# verif times: [1990, 1991, 1993, 1994]
a = hind.sel(init=[1990, 1992, 1993]).rename({"init": "time"})
b = verif.sel(time=[1991, 1993, 1994])
a["time"] = b["time"]
expected = xs.mse(a, b, "time")
assert actual == expected
@pytest.mark.parametrize("alignment", ["same_inits", "same_verifs"])
def test_disjoint_inits(small_initialized_da, small_verif_da, alignment):
"""Tests that alignment works with disjoint inits in the verification
data, i.e., non-continuous initializing to verify with."""
hind = small_initialized_da.drop_sel(init=1991)
verif = small_verif_da
actual = compute_hindcast(hind, verif, alignment=alignment, metric="mse")
assert actual.notnull().all()
# hindcast inits: [1990, 1992, 1993]
# verif times: [1990, 1991, 1992, 1993, 1994]
a = hind.rename({"init": "time"})
b = verif.sel(time=[1991, 1993, 1994])
a["time"] = b["time"]
expected = xs.mse(a, b, "time")
assert actual == expected
def test_same_verifs_verification_dates(
hind_ds_initialized_1d_cftime, reconstruction_ds_1d_cftime, caplog
):
"""Tests that verifs are identical at all leads for `same_verifs` alignment."""
with caplog.at_level(logging.INFO):
compute_hindcast(
hind_ds_initialized_1d_cftime,
reconstruction_ds_1d_cftime,
alignment="same_verifs",
)
for i, record in enumerate(caplog.record_tuples):
if i >= 2:
print(record)
assert "verifs: 1964-01-01 00:00:00-2017-01-01 00:00:00" in record[2]
def test_same_verifs_initializations(
hind_ds_initialized_1d_cftime, reconstruction_ds_1d_cftime, caplog
):
"""Tests that appropriate verifs are being used at each lead for `same_inits`
alignment."""
with caplog.at_level(logging.INFO):
FIRST_INIT, LAST_INIT = 1964, 2017
compute_hindcast(
hind_ds_initialized_1d_cftime,
reconstruction_ds_1d_cftime,
alignment="same_verifs",
)
nleads = hind_ds_initialized_1d_cftime["lead"].size
for i, record in zip(
np.arange(nleads + 2),
caplog.record_tuples,
):
if i >= 2:
print(record)
assert (
f"inits: {FIRST_INIT-i}-01-01 00:00:00-{LAST_INIT-i}-01-01 00:00:00"
in record[2]
)
def test_same_verifs_raises_error_when_not_possible(
hind_ds_initialized_1d_cftime, reconstruction_ds_1d_cftime
):
"""Tests that appropriate error is raised when a common set of verification dates
cannot be found with the supplied initializations."""
hind = hind_ds_initialized_1d_cftime.isel(lead=slice(0, 3), init=[1, 3, 5, 7, 9])
with pytest.raises(CoordinateError):
compute_hindcast(hind, reconstruction_ds_1d_cftime, alignment="same_verifs")
def test_maximize_alignment_inits(
hind_ds_initialized_1d_cftime, reconstruction_ds_1d_cftime, caplog
):
"""Tests that appropriate inits are selected for `maximize` alignment."""
with caplog.at_level(logging.INFO):
compute_hindcast(
hind_ds_initialized_1d_cftime,
reconstruction_ds_1d_cftime,
alignment="maximize",
)
# Add dummy values for the first two lines since they are just metadata.
for i, record in zip(
np.concatenate(([0, 0], hind_ds_initialized_1d_cftime.lead.values)),
caplog.record_tuples,
):
if i >= 1:
print(record)
assert (
f"inits: 1954-01-01 00:00:00-{2016-i}-01-01 00:00:00" in record[2]
)
def test_maximize_alignment_verifs(
hind_ds_initialized_1d_cftime, reconstruction_ds_1d_cftime, caplog
):
"""Tests that appropriate verifs are selected for `maximize` alignment."""
with caplog.at_level(logging.INFO):
compute_hindcast(
hind_ds_initialized_1d_cftime,
reconstruction_ds_1d_cftime,
alignment="maximize",
)
# Add dummy values for the first two lines since they are just metadata.
for i, record in zip(
np.concatenate(([0, 0], hind_ds_initialized_1d_cftime.lead.values)),
caplog.record_tuples,
):
if i >= 1:
print(record)
assert (
f"verifs: {1955+i}-01-01 00:00:00-2017-01-01 00:00:00" in record[2]
)
|
[
"climpred.prediction.compute_hindcast",
"pytest.raises",
"numpy.arange",
"xskillscore.mse",
"pytest.mark.parametrize",
"numpy.concatenate"
] |
[((1597, 1664), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""alignment"""', "['same_inits', 'same_verifs']"], {}), "('alignment', ['same_inits', 'same_verifs'])\n", (1620, 1664), False, 'import pytest\n'), ((2371, 2438), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""alignment"""', "['same_inits', 'same_verifs']"], {}), "('alignment', ['same_inits', 'same_verifs'])\n", (2394, 2438), False, 'import pytest\n'), ((1976, 2040), 'climpred.prediction.compute_hindcast', 'compute_hindcast', (['hind', 'verif'], {'alignment': 'alignment', 'metric': '"""mse"""'}), "(hind, verif, alignment=alignment, metric='mse')\n", (1992, 2040), False, 'from climpred.prediction import compute_hindcast\n'), ((2317, 2337), 'xskillscore.mse', 'xs.mse', (['a', 'b', '"""time"""'], {}), "(a, b, 'time')\n", (2323, 2337), True, 'import xskillscore as xs\n'), ((2742, 2806), 'climpred.prediction.compute_hindcast', 'compute_hindcast', (['hind', 'verif'], {'alignment': 'alignment', 'metric': '"""mse"""'}), "(hind, verif, alignment=alignment, metric='mse')\n", (2758, 2806), False, 'from climpred.prediction import compute_hindcast\n'), ((3054, 3074), 'xskillscore.mse', 'xs.mse', (['a', 'b', '"""time"""'], {}), "(a, b, 'time')\n", (3060, 3074), True, 'import xskillscore as xs\n'), ((415, 519), 'climpred.prediction.compute_hindcast', 'compute_hindcast', (['hind_ds_initialized_1d_cftime', 'reconstruction_ds_1d_cftime'], {'alignment': '"""same_inits"""'}), "(hind_ds_initialized_1d_cftime, reconstruction_ds_1d_cftime,\n alignment='same_inits')\n", (431, 519), False, 'from climpred.prediction import compute_hindcast\n'), ((1066, 1170), 'climpred.prediction.compute_hindcast', 'compute_hindcast', (['hind_ds_initialized_1d_cftime', 'reconstruction_ds_1d_cftime'], {'alignment': '"""same_inits"""'}), "(hind_ds_initialized_1d_cftime, reconstruction_ds_1d_cftime,\n alignment='same_inits')\n", (1082, 1170), False, 'from climpred.prediction import compute_hindcast\n'), ((3354, 3459), 'climpred.prediction.compute_hindcast', 'compute_hindcast', (['hind_ds_initialized_1d_cftime', 'reconstruction_ds_1d_cftime'], {'alignment': '"""same_verifs"""'}), "(hind_ds_initialized_1d_cftime, reconstruction_ds_1d_cftime,\n alignment='same_verifs')\n", (3370, 3459), False, 'from climpred.prediction import compute_hindcast\n'), ((4005, 4110), 'climpred.prediction.compute_hindcast', 'compute_hindcast', (['hind_ds_initialized_1d_cftime', 'reconstruction_ds_1d_cftime'], {'alignment': '"""same_verifs"""'}), "(hind_ds_initialized_1d_cftime, reconstruction_ds_1d_cftime,\n alignment='same_verifs')\n", (4021, 4110), False, 'from climpred.prediction import compute_hindcast\n'), ((4902, 4932), 'pytest.raises', 'pytest.raises', (['CoordinateError'], {}), '(CoordinateError)\n', (4915, 4932), False, 'import pytest\n'), ((4942, 5018), 'climpred.prediction.compute_hindcast', 'compute_hindcast', (['hind', 'reconstruction_ds_1d_cftime'], {'alignment': '"""same_verifs"""'}), "(hind, reconstruction_ds_1d_cftime, alignment='same_verifs')\n", (4958, 5018), False, 'from climpred.prediction import compute_hindcast\n'), ((5256, 5358), 'climpred.prediction.compute_hindcast', 'compute_hindcast', (['hind_ds_initialized_1d_cftime', 'reconstruction_ds_1d_cftime'], {'alignment': '"""maximize"""'}), "(hind_ds_initialized_1d_cftime, reconstruction_ds_1d_cftime,\n alignment='maximize')\n", (5272, 5358), False, 'from climpred.prediction import compute_hindcast\n'), ((6061, 6163), 'climpred.prediction.compute_hindcast', 'compute_hindcast', (['hind_ds_initialized_1d_cftime', 'reconstruction_ds_1d_cftime'], {'alignment': '"""maximize"""'}), "(hind_ds_initialized_1d_cftime, reconstruction_ds_1d_cftime,\n alignment='maximize')\n", (6077, 6163), False, 'from climpred.prediction import compute_hindcast\n'), ((1316, 1337), 'numpy.arange', 'np.arange', (['(nleads + 2)'], {}), '(nleads + 2)\n', (1325, 1337), True, 'import numpy as np\n'), ((4256, 4277), 'numpy.arange', 'np.arange', (['(nleads + 2)'], {}), '(nleads + 2)\n', (4265, 4277), True, 'import numpy as np\n'), ((5525, 5592), 'numpy.concatenate', 'np.concatenate', (['([0, 0], hind_ds_initialized_1d_cftime.lead.values)'], {}), '(([0, 0], hind_ds_initialized_1d_cftime.lead.values))\n', (5539, 5592), True, 'import numpy as np\n'), ((6330, 6397), 'numpy.concatenate', 'np.concatenate', (['([0, 0], hind_ds_initialized_1d_cftime.lead.values)'], {}), '(([0, 0], hind_ds_initialized_1d_cftime.lead.values))\n', (6344, 6397), True, 'import numpy as np\n')]
|
'''
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
SPDX-License-Identifier: MIT-0
'''
import json
import urllib.request
import os
import time
from neptune_python_utils.endpoints import Endpoints
class BulkLoad:
def __init__(self, source, format='csv', role=None, region=None, endpoints=None):
self.source = source
self.format = format
if role is None:
assert ('NEPTUNE_LOAD_FROM_S3_ROLE_ARN' in os.environ), 'role is missing.'
self.role = os.environ['NEPTUNE_LOAD_FROM_S3_ROLE_ARN']
else:
self.role = role
if region is None:
assert ('AWS_REGION' in os.environ), 'region is missing.'
self.region = os.environ['AWS_REGION']
else:
self.region = region
if endpoints is None:
self.endpoints = Endpoints()
else:
self.endpoints = endpoints
def __load_from(self, source, format, role, region):
return {
'source' : source,
'format' : format,
'iamRoleArn' : role,
'region' : region,
'failOnError' : 'FALSE'
}
def __load(self, loader_url, data):
jsondataasbytes = json.dumps(data).encode('utf8')
req = urllib.request.Request(loader_url, data=jsondataasbytes, headers={'Content-Type': 'application/json'})
response = urllib.request.urlopen(req)
jsonresponse = json.loads(response.read().decode('utf8'))
return jsonresponse['payload']['loadId']
def load_async(self):
localised_source = self.source.replace('${AWS_REGION}', self.region)
loader_url = self.endpoints.loader_endpoint()
json_payload = self.__load_from(localised_source, self.format, self.role, self.region)
print('''curl -X POST \\
-H 'Content-Type: application/json' \\
{} -d \'{}\''''.format(loader_url, json.dumps(json_payload, indent=4)))
load_id = self.__load(loader_url, json_payload)
return BulkLoadStatus(self.endpoints.load_status_endpoint(load_id))
def load(self, interval=2):
status = self.load_async()
print('status_uri: {}'.format(status.uri()))
status.wait(interval)
class BulkLoadStatus:
def __init__(self, status_uri):
self.status_uri = status_uri
def status(self):
req = urllib.request.Request(self.status_uri)
response = urllib.request.urlopen(req)
jsonresponse = json.loads(response.read().decode('utf8'))
status = jsonresponse['payload']['overallStatus']['status']
return (status, jsonresponse)
def uri(self):
return self.status_uri
def wait(self, interval=2):
while True:
status, jsonresponse = self.status()
if status == 'LOAD_COMPLETED':
print('load completed')
break
if status == 'LOAD_IN_PROGRESS':
print('loading... {} records inserted'.format(jsonresponse['payload']['overallStatus']['totalRecords']))
time.sleep(interval)
else:
raise Exception(jsonresponse)
|
[
"neptune_python_utils.endpoints.Endpoints",
"json.dumps",
"time.sleep"
] |
[((904, 915), 'neptune_python_utils.endpoints.Endpoints', 'Endpoints', ([], {}), '()\n', (913, 915), False, 'from neptune_python_utils.endpoints import Endpoints\n'), ((1323, 1339), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (1333, 1339), False, 'import json\n'), ((2006, 2040), 'json.dumps', 'json.dumps', (['json_payload'], {'indent': '(4)'}), '(json_payload, indent=4)\n', (2016, 2040), False, 'import json\n'), ((3183, 3203), 'time.sleep', 'time.sleep', (['interval'], {}), '(interval)\n', (3193, 3203), False, 'import time\n')]
|
#!/usr/bin/env python
"""
Plot signal heatmaps from TFBS across different bigwigs
@author: <NAME>
@contact: mette.bentsen (at) mpi-bn.mpg.de
@license: MIT
"""
import os
import sys
import argparse
import logging
import numpy as np
import matplotlib as mpl
mpl.use("Agg") #non-interactive backend
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
import matplotlib.gridspec as gridspec
from datetime import datetime
from sklearn import preprocessing
import pyBigWig
import pysam
import pybedtools as pb
from tobias.parsers import add_heatmap_arguments
from tobias.utils.regions import *
from tobias.utils.utilities import *
#----------------------------------------------------------------------------------------#
def run_heatmap(args):
#Start logger
logger = TobiasLogger("PlotHeatmap", args.verbosity)
logger.begin()
parser = add_heatmap_arguments(argparse.ArgumentParser())
logger.arguments_overview(parser, args)
logger.output_files([args.output])
check_required(args, ["TFBS", "signals"])
#Setup TFBS names if not yet
if args.TFBS_labels == None:
args.TFBS_labels = [[os.path.basename(fil) for fil in args.TFBS[i]] for i in range(len(args.TFBS))]
if args.signal_labels == None:
args.signal_labels = [os.path.basename(fil) for fil in args.signals]
########################################################
#Check valid input parameters (number of input TFBS vs. bigwig etc.)
no_signals = len(args.signals)
no_columns = len(args.show_columns)
no_TFBS_col = len(args.TFBS)
if no_TFBS_col > 1 and len(args.show_columns) > 0:
sys.exit("Error: option --show_columns is not available for multiple --TFBS inputs.")
if no_TFBS_col > 1 and no_signals != no_TFBS_col:
sys.exit("Error: Number of --TFBS does not match number of signals")
elif no_TFBS_col == 1 and no_signals > 1:
#copy bed_f to other columns
logger.info("Using bedfiles: {0} across all bigwigs".format(args.TFBS))
for i in range(no_signals-1):
args.TFBS.append(args.TFBS[0])
args.TFBS_labels.append(args.TFBS_labels[0])
else:
for i, signal in enumerate(args.signals):
logger.info("Using {0} with signal from {1}".format(args.TFBS[i], signal))
#todo: logger overview of bedfiles per column?
######################################################################################
##################################### INPUT DATA #####################################
######################################################################################
#Setup info dict
heatmap_info = {col:{row:{"bigwig_f": args.signals[col], "bed_f":args.TFBS[col][row]} for row in range(len(args.TFBS[col]))} for col in range(len(args.signals))}
#Add extra columns
for i, bed_column in enumerate(args.show_columns):
heatmap_info[no_signals+i] = {row:{"column": bed_column, "bed_f":args.TFBS[0][row]} for row in range(len(args.TFBS[0]))}
#------------------------------------------------------------------------------------#
#------------------------ Read input files to RegionLists ---------------------------#
#------------------------------------------------------------------------------------#
seen_bed = []
#Read regions per heatmap in grid
logger.comment("")
logger.info("Reading bedfiles")
for col in range(len(heatmap_info)):
for row in range(len(heatmap_info[col])):
heatmap_info[col][row]["regions"] = RegionList().from_bed(heatmap_info[col][row]["bed_f"])
#Estimate region width
distri = heatmap_info[col][row]["regions"].get_width_distri()
if len(distri) > 1:
logger.warning("Input regions have differing lengths: {0}".format(distri))
heatmap_info[col][row]["width"] = list(distri.keys())[0]
#Extend to flank
heatmap_info[col][row]["regions"] = heatmap_info[col][row]["regions"].apply_method(OneRegion.set_width, 2*args.flank)
#Sort if chosen
if args.sort_by != None:
try:
heatmap_info[col][row]["regions"].sort(key=lambda region: float(region[args.sort_by]), reverse=True)
except:
heatmap_info[col][row]["regions"].sort(key=lambda region: region[args.sort_by], reverse=True)
#Get scores from file
invalid = []
for i, bed_column in enumerate(args.show_columns):
heatmap_info[no_signals+i][row]["column_{0}".format(bed_column)] = [region[bed_column] for region in heatmap_info[col][row]["regions"]]
try:
heatmap_info[no_signals+i][row]["column_{0}".format(bed_column)] = [float(element) for element in heatmap_info[no_signals+i][row]["column_{0}".format(bed_column)]]
except:
logger.info("Column {0} cannot be converted to float - excluding".format(bed_column))
del heatmap_info[no_signals+i][row]["column_{0}".format(bed_column)]
invalid.append(bed_column)
for bed_column in invalid:
args.show_columns.remove(bed_column)
#Logger info about bedfile
if heatmap_info[col][row]["bed_f"] not in seen_bed:
logger.info("- Read {1} sites from {0} of width {2}".format(heatmap_info[col][row]["bed_f"], len(heatmap_info[col][row]["regions"]), heatmap_info[col][row]["width"]))
seen_bed.append(heatmap_info[col][row]["bed_f"])
#------------------------------------------------------------------------------------#
#------------------------------ Signals from all sites ------------------------------#
#------------------------------------------------------------------------------------#
logger.comment("")
logger.info("Reading signals from bigwigs")
for col in range(len(args.TFBS)):
bigwig_f = heatmap_info[col][0]["bigwig_f"] #bigwig is the same for all rows, therefore row == 0
pybw = pyBigWig.open(bigwig_f, "rb")
for row in heatmap_info[col]:
logger.info("- Reading {0} from {1}".format(heatmap_info[col][row]["bed_f"], bigwig_f))
if len(heatmap_info[col][row]["regions"]) > 0:
heatmap_info[col][row]["signal_mat"] = np.array([region.get_signal(pybw) for region in heatmap_info[col][row]["regions"]])
heatmap_info[col][row]["aggregate"] = np.mean(heatmap_info[col][row]["signal_mat"], axis=0)
else:
heatmap_info[col][row]["signal_mat"] = None
heatmap_info[col][row]["aggregate"] = None
pybw.close()
logger.comment("")
#------------------------------------------------------------------------------------#
#---------------------------------- Colorbar min/max --------------------------------#
#------------------------------------------------------------------------------------#
#Estimate min/max from all matrices
if args.share_colorbar == True:
mats = []
for col, bigwig in enumerate(args.signals):
for row in heatmap_info[col]:
if heatmap_info[col][row]["signal_mat"] is not None:
mats.append(heatmap_info[col][row]["signal_mat"])
vmin, vmax = (0,0)
if len(mats) > 0:
joined = np.vstack(mats)
vmin, vmax = np.percentile(joined, [1, 99])
#Set vmin/vmax for all plots
for col, bigwig in enumerate(args.signals):
for row in heatmap_info[col]:
heatmap_info[col][row].update({"vmin":vmin, "vmax":vmax})
# Estimate min/max for each bigwig
else:
for col, bigwig in enumerate(args.signals):
mats = [heatmap_info[col][row]["signal_mat"] for row in heatmap_info[col] if heatmap_info[col][row]["signal_mat"] is not None]
vmin, vmax = (0,0)
if len(mats) > 0:
joined = np.vstack(mats)
vmin, vmax = np.percentile(joined, [1, 99])
for row in heatmap_info[col]:
heatmap_info[col][row].update({"vmin":vmin, "vmax":vmax})
del mats
del joined
# Estimate min/max for extra columns
for i, name in enumerate(args.show_columns):
col = no_signals + i
glob_values = []
for row in range(len(args.TFBS[0])):
glob_values.extend(heatmap_info[col][row]["column_{0}".format(name)])
vmin, vmax = np.percentile(glob_values, [1, 99])
for row in range(len(args.TFBS[0])):
heatmap_info[col][row]["vmin"] = vmin
heatmap_info[col][row]["vmax"] = vmax
del glob_values
######################################################################################
##################################### PLOTTING #######################################
######################################################################################
#------------------------------------------------------------------------------------#
#------------------------------------ Set up plots ----------------------------------#
#------------------------------------------------------------------------------------#
logger.info("Setting up plotting grid")
total_columns = no_signals + no_columns
xvals = np.arange(-args.flank, args.flank)
fig = plt.figure(figsize = (no_signals*5, 5*5))
h_ratios = [2,10,0.1]
w_ratios = [1]*no_signals + [0.1]*no_columns
gs = gridspec.GridSpec(3, total_columns, height_ratios=h_ratios, width_ratios=w_ratios, hspace=0.1, wspace=0.3) #aggregate + heatmaps (with sub heatmaps) + colorbar
#Setup axarr fitting to grid
axdict = {col:{row:"ax" for row in ["aggregate"] + list(heatmap_info[col]) + ["colorbar"]} for col in range(no_signals)}
axdict.update({col:{row:"ax" for row in ["aggregate"] + list(heatmap_info[col]) + ["colorbar"]} for col in range(no_signals, no_signals+no_columns)})
#Per signal column
xvals = np.arange(-args.flank, args.flank)
for col in range(no_signals):
#Aggregates
axdict[col]["aggregate"] = fig.add_subplot(gs[0,col])
axdict[col]["aggregate"].set_xlim(left=-args.flank, right=args.flank)
axdict[col]["aggregate"].set_xlabel('bp from center')
axdict[col]["aggregate"].set_ylabel('Mean aggregate signal')
axdict[col]["aggregate"].set_title("{0}".format(args.signal_labels[col]))
#Heatmaps
no_beds = len(args.TFBS[col])
h_ratios = [len(heatmap_info[col][row]["regions"]) for row in heatmap_info[col]]
h_ratios = [max(num,1) for num in h_ratios] #deal with empty beds
gs_sub = gridspec.GridSpecFromSubplotSpec(no_beds, 1, subplot_spec=gs[1,col], height_ratios=h_ratios, hspace=0.05)
for row in range(no_beds):
axdict[col][row] = plt.Subplot(fig, gs_sub[row,0])
fig.add_subplot(axdict[col][row])
#Appearance
plt.setp(axdict[col][row].get_yticklabels(), visible=False) #Hide y-axis ticks
plt.setp(axdict[col][row].get_xticklabels(), visible=False) #Hide x-axis ticks
axdict[col][row].tick_params(direction="in")
axdict[col][row].set_ylabel("{0} ({1})".format(args.TFBS_labels[col][row], len(heatmap_info[col][row]["regions"])))
#Last row
if row == no_beds-1:
axdict[col][row].set_xlabel('bp from center')
#Colorbar
axdict[col]["colorbar"] = fig.add_subplot(gs[2,col]) #row number 3
for col in range(no_signals, no_signals + no_columns):
gs_sub = gridspec.GridSpecFromSubplotSpec(no_beds, 1, subplot_spec=gs[1,col], height_ratios=h_ratios, hspace=0.05)
for row in range(no_beds):
axdict[col][row] = plt.Subplot(fig, gs_sub[row,0])
plt.setp(axdict[col][row].get_yticklabels(), visible=False) #Hide y-axis ticks
plt.setp(axdict[col][row].get_xticklabels(), visible=False) #Hide x-axis ticks
axdict[col][row].tick_params(direction="in")
fig.add_subplot(axdict[col][row])
#------------------------------------------------------------------------------------#
#--------------------------------- Fill in plots ------------------------------------#
#------------------------------------------------------------------------------------#
logger.info("Filling in grid")
#Colormaps
for col, bigwig in enumerate(args.signals):
colors = mpl.cm.jet(np.linspace(0, 1, len(heatmap_info[col]))) #colors for aggregate plots
for row in heatmap_info[col]:
if heatmap_info[col][row]["signal_mat"] is not None:
#Aggregate
axdict[col]["aggregate"].plot(xvals, heatmap_info[col][row]["aggregate"], color=colors[row], linewidth=2, label=args.TFBS_labels[col][row])
#Heatmap
lim = np.max([np.abs(heatmap_info[col][row]["vmin"]),np.abs(heatmap_info[col][row]["vmax"])])
heatmap_info[col][row]["vmin"] = -lim
heatmap_info[col][row]["vmax"] = lim
heatmap = axdict[col][row].imshow(heatmap_info[col][row]["signal_mat"], aspect="auto", cmap="seismic", norm=mpl.colors.Normalize(vmin=heatmap_info[col][row]["vmin"], vmax=heatmap_info[col][row]["vmax"]))
#Insert colorbar (inserted multiple times for each bigwig, but since it is shared for the same bigwig, it doesn't matter)
fig.colorbar(heatmap, cax=axdict[col]["colorbar"], orientation="horizontal")
#Extra columns w/ scores from bed
for i, col in enumerate(range(no_signals, no_signals + no_columns)):
bed_column = args.show_columns[i]
for row in heatmap_info[col]:
values = np.array(heatmap_info[col][row]["column_{0}".format(bed_column)])
values = values.reshape(-1,1)
vmin, vmax = np.percentile(values, [1, 99])
lim = np.max([abs(vmin), abs(vmax)])
axdict[col][row].imshow(values, aspect="auto", cmap="seismic", norm=mpl.colors.Normalize(vmin=-lim, vmax=lim))
#------------------------------------------------------------------------------------#
#-------------------------------- Plot decorations ----------------------------------#
#------------------------------------------------------------------------------------#
if args.plot_boundaries:
for col in heatmap_info:
motif_len = heatmap_info[col][0]["width"]
mstart = int(-np.floor(motif_len/2.0))
mend = int(np.ceil(motif_len/2.0))
axdict[col]["aggregate"].axvline(mstart, color="black", linestyle="dashed", linewidth=1)
axdict[col]["aggregate"].axvline(mend, color="black", linestyle="dashed", linewidth=1)
for row in heatmap_info[col]:
motif_len = heatmap_info[col][row]["width"]
mstart = int(-np.floor(motif_len/2.0))
mend = int(np.ceil(motif_len/2.0))
axdict[col][row].axvline(mstart+args.flank, color="black", linestyle="dashed", linewidth=1)
axdict[col][row].axvline(mend+args.flank, color="black", linestyle="dashed", linewidth=1)
#Add legend to aggregate plots
for col in range(len(args.signals)):
axdict[col]["aggregate"].legend(loc=1, prop={"size":6})
if args.share_colorbar == True:
ymin = min([axdict[col]["aggregate"].get_ylim()[0] for col in range(no_signals)])
ymax = max([axdict[col]["aggregate"].get_ylim()[1] for col in range(no_signals)])
for col in range(no_signals):
axdict[col]["aggregate"].set_ylim([ymin, ymax])
#------------------------------------------------------------------------------------#
#----------------------------- Finish off and output --------------------------------#
#------------------------------------------------------------------------------------#
"""
#For each heatmap
for row in [1,2]:
plt.setp(axarr[row].get_yticklabels(), visible=False) #Hide y-axis ticks
plt.setp(axarr[row].get_xticklabels(), visible=False) #Hide x-axis ticks
axarr[row].tick_params(direction="in")
"""
plt.subplots_adjust(top=0.95)
plt.suptitle(args.title, fontsize=25)
logger.info("Writing output file")
plt.savefig(args.output, bbox_inches='tight')
plt.close()
logger.end()
#--------------------------------------------------------------------------------------------------------#
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser = add_heatmap_arguments(parser)
args = parser.parse_args()
if len(sys.argv[1:]) == 0:
parser.print_help()
sys.exit()
run_heatmap(args)
|
[
"numpy.abs",
"argparse.ArgumentParser",
"matplotlib.pyplot.suptitle",
"numpy.floor",
"matplotlib.pyplot.figure",
"numpy.mean",
"numpy.arange",
"tobias.parsers.add_heatmap_arguments",
"matplotlib.pyplot.Subplot",
"matplotlib.colors.Normalize",
"matplotlib.pyplot.close",
"numpy.ceil",
"os.path.basename",
"numpy.percentile",
"matplotlib.use",
"matplotlib.pyplot.subplots_adjust",
"numpy.vstack",
"sys.exit",
"pyBigWig.open",
"matplotlib.gridspec.GridSpec",
"matplotlib.gridspec.GridSpecFromSubplotSpec",
"matplotlib.pyplot.savefig"
] |
[((259, 273), 'matplotlib.use', 'mpl.use', (['"""Agg"""'], {}), "('Agg')\n", (266, 273), True, 'import matplotlib as mpl\n'), ((8567, 8601), 'numpy.arange', 'np.arange', (['(-args.flank)', 'args.flank'], {}), '(-args.flank, args.flank)\n', (8576, 8601), True, 'import numpy as np\n'), ((8610, 8653), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(no_signals * 5, 5 * 5)'}), '(figsize=(no_signals * 5, 5 * 5))\n', (8620, 8653), True, 'import matplotlib.pyplot as plt\n'), ((8727, 8838), 'matplotlib.gridspec.GridSpec', 'gridspec.GridSpec', (['(3)', 'total_columns'], {'height_ratios': 'h_ratios', 'width_ratios': 'w_ratios', 'hspace': '(0.1)', 'wspace': '(0.3)'}), '(3, total_columns, height_ratios=h_ratios, width_ratios=\n w_ratios, hspace=0.1, wspace=0.3)\n', (8744, 8838), True, 'import matplotlib.gridspec as gridspec\n'), ((9221, 9255), 'numpy.arange', 'np.arange', (['(-args.flank)', 'args.flank'], {}), '(-args.flank, args.flank)\n', (9230, 9255), True, 'import numpy as np\n'), ((14807, 14836), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'top': '(0.95)'}), '(top=0.95)\n', (14826, 14836), True, 'import matplotlib.pyplot as plt\n'), ((14838, 14875), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (['args.title'], {'fontsize': '(25)'}), '(args.title, fontsize=25)\n', (14850, 14875), True, 'import matplotlib.pyplot as plt\n'), ((14914, 14959), 'matplotlib.pyplot.savefig', 'plt.savefig', (['args.output'], {'bbox_inches': '"""tight"""'}), "(args.output, bbox_inches='tight')\n", (14925, 14959), True, 'import matplotlib.pyplot as plt\n'), ((14961, 14972), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (14970, 14972), True, 'import matplotlib.pyplot as plt\n'), ((15134, 15159), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (15157, 15159), False, 'import argparse\n'), ((15170, 15199), 'tobias.parsers.add_heatmap_arguments', 'add_heatmap_arguments', (['parser'], {}), '(parser)\n', (15191, 15199), False, 'from tobias.parsers import add_heatmap_arguments\n'), ((905, 930), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (928, 930), False, 'import argparse\n'), ((1607, 1702), 'sys.exit', 'sys.exit', (['"""Error: option --show_columns is not available for multiple --TFBS inputs."""'], {}), "(\n 'Error: option --show_columns is not available for multiple --TFBS inputs.'\n )\n", (1615, 1702), False, 'import sys\n'), ((1747, 1815), 'sys.exit', 'sys.exit', (['"""Error: Number of --TFBS does not match number of signals"""'], {}), "('Error: Number of --TFBS does not match number of signals')\n", (1755, 1815), False, 'import sys\n'), ((5630, 5659), 'pyBigWig.open', 'pyBigWig.open', (['bigwig_f', '"""rb"""'], {}), "(bigwig_f, 'rb')\n", (5643, 5659), False, 'import pyBigWig\n'), ((7767, 7802), 'numpy.percentile', 'np.percentile', (['glob_values', '[1, 99]'], {}), '(glob_values, [1, 99])\n', (7780, 7802), True, 'import numpy as np\n'), ((9834, 9944), 'matplotlib.gridspec.GridSpecFromSubplotSpec', 'gridspec.GridSpecFromSubplotSpec', (['no_beds', '(1)'], {'subplot_spec': 'gs[1, col]', 'height_ratios': 'h_ratios', 'hspace': '(0.05)'}), '(no_beds, 1, subplot_spec=gs[1, col],\n height_ratios=h_ratios, hspace=0.05)\n', (9866, 9944), True, 'import matplotlib.gridspec as gridspec\n'), ((10647, 10757), 'matplotlib.gridspec.GridSpecFromSubplotSpec', 'gridspec.GridSpecFromSubplotSpec', (['no_beds', '(1)'], {'subplot_spec': 'gs[1, col]', 'height_ratios': 'h_ratios', 'hspace': '(0.05)'}), '(no_beds, 1, subplot_spec=gs[1, col],\n height_ratios=h_ratios, hspace=0.05)\n', (10679, 10757), True, 'import matplotlib.gridspec as gridspec\n'), ((15281, 15291), 'sys.exit', 'sys.exit', ([], {}), '()\n', (15289, 15291), False, 'import sys\n'), ((1275, 1296), 'os.path.basename', 'os.path.basename', (['fil'], {}), '(fil)\n', (1291, 1296), False, 'import os\n'), ((6808, 6823), 'numpy.vstack', 'np.vstack', (['mats'], {}), '(mats)\n', (6817, 6823), True, 'import numpy as np\n'), ((6840, 6870), 'numpy.percentile', 'np.percentile', (['joined', '[1, 99]'], {}), '(joined, [1, 99])\n', (6853, 6870), True, 'import numpy as np\n'), ((9992, 10024), 'matplotlib.pyplot.Subplot', 'plt.Subplot', (['fig', 'gs_sub[row, 0]'], {}), '(fig, gs_sub[row, 0])\n', (10003, 10024), True, 'import matplotlib.pyplot as plt\n'), ((10804, 10836), 'matplotlib.pyplot.Subplot', 'plt.Subplot', (['fig', 'gs_sub[row, 0]'], {}), '(fig, gs_sub[row, 0])\n', (10815, 10836), True, 'import matplotlib.pyplot as plt\n'), ((12706, 12736), 'numpy.percentile', 'np.percentile', (['values', '[1, 99]'], {}), '(values, [1, 99])\n', (12719, 12736), True, 'import numpy as np\n'), ((1138, 1159), 'os.path.basename', 'os.path.basename', (['fil'], {}), '(fil)\n', (1154, 1159), False, 'import os\n'), ((6014, 6067), 'numpy.mean', 'np.mean', (["heatmap_info[col][row]['signal_mat']"], {'axis': '(0)'}), "(heatmap_info[col][row]['signal_mat'], axis=0)\n", (6021, 6067), True, 'import numpy as np\n'), ((7322, 7337), 'numpy.vstack', 'np.vstack', (['mats'], {}), '(mats)\n', (7331, 7337), True, 'import numpy as np\n'), ((7356, 7386), 'numpy.percentile', 'np.percentile', (['joined', '[1, 99]'], {}), '(joined, [1, 99])\n', (7369, 7386), True, 'import numpy as np\n'), ((13315, 13339), 'numpy.ceil', 'np.ceil', (['(motif_len / 2.0)'], {}), '(motif_len / 2.0)\n', (13322, 13339), True, 'import numpy as np\n'), ((12849, 12890), 'matplotlib.colors.Normalize', 'mpl.colors.Normalize', ([], {'vmin': '(-lim)', 'vmax': 'lim'}), '(vmin=-lim, vmax=lim)\n', (12869, 12890), True, 'import matplotlib as mpl\n'), ((13276, 13301), 'numpy.floor', 'np.floor', (['(motif_len / 2.0)'], {}), '(motif_len / 2.0)\n', (13284, 13301), True, 'import numpy as np\n'), ((13664, 13688), 'numpy.ceil', 'np.ceil', (['(motif_len / 2.0)'], {}), '(motif_len / 2.0)\n', (13671, 13688), True, 'import numpy as np\n'), ((11824, 11862), 'numpy.abs', 'np.abs', (["heatmap_info[col][row]['vmin']"], {}), "(heatmap_info[col][row]['vmin'])\n", (11830, 11862), True, 'import numpy as np\n'), ((11863, 11901), 'numpy.abs', 'np.abs', (["heatmap_info[col][row]['vmax']"], {}), "(heatmap_info[col][row]['vmax'])\n", (11869, 11901), True, 'import numpy as np\n'), ((12099, 12198), 'matplotlib.colors.Normalize', 'mpl.colors.Normalize', ([], {'vmin': "heatmap_info[col][row]['vmin']", 'vmax': "heatmap_info[col][row]['vmax']"}), "(vmin=heatmap_info[col][row]['vmin'], vmax=heatmap_info\n [col][row]['vmax'])\n", (12119, 12198), True, 'import matplotlib as mpl\n'), ((13624, 13649), 'numpy.floor', 'np.floor', (['(motif_len / 2.0)'], {}), '(motif_len / 2.0)\n', (13632, 13649), True, 'import numpy as np\n')]
|
from contextlib import contextmanager
import random
import pylibmc
# project
import ddtrace
from ddtrace import config
# 3p
from ddtrace.vendor.wrapt import ObjectProxy
from ...constants import ANALYTICS_SAMPLE_RATE_KEY
from ...constants import SPAN_MEASURED_KEY
from ...ext import SpanTypes
from ...ext import memcached
from ...ext import net
from ...internal.logger import get_logger
from .addrs import parse_addresses
# Original Client class
_Client = pylibmc.Client
log = get_logger(__name__)
class TracedClient(ObjectProxy):
""" TracedClient is a proxy for a pylibmc.Client that times it's network operations. """
def __init__(self, client=None, service=memcached.SERVICE, tracer=None, *args, **kwargs):
""" Create a traced client that wraps the given memcached client.
"""
# The client instance/service/tracer attributes are kept for compatibility
# with the old interface: TracedClient(client=pylibmc.Client(['localhost:11211']))
# TODO(Benjamin): Remove these in favor of patching.
if not isinstance(client, _Client):
# We are in the patched situation, just pass down all arguments to the pylibmc.Client
# Note that, in that case, client isn't a real client (just the first argument)
client = _Client(client, *args, **kwargs)
else:
log.warning('TracedClient instantiation is deprecated and will be remove '
'in future versions (0.6.0). Use patching instead (see the docs).')
super(TracedClient, self).__init__(client)
pin = ddtrace.Pin(service=service, tracer=tracer)
pin.onto(self)
# attempt to collect the pool of urls this client talks to
try:
self._addresses = parse_addresses(client.addresses)
except Exception:
log.debug('error setting addresses', exc_info=True)
def clone(self, *args, **kwargs):
# rewrap new connections.
cloned = self.__wrapped__.clone(*args, **kwargs)
traced_client = TracedClient(cloned)
pin = ddtrace.Pin.get_from(self)
if pin:
pin.clone().onto(traced_client)
return traced_client
def get(self, *args, **kwargs):
return self._trace_cmd('get', *args, **kwargs)
def set(self, *args, **kwargs):
return self._trace_cmd('set', *args, **kwargs)
def delete(self, *args, **kwargs):
return self._trace_cmd('delete', *args, **kwargs)
def gets(self, *args, **kwargs):
return self._trace_cmd('gets', *args, **kwargs)
def touch(self, *args, **kwargs):
return self._trace_cmd('touch', *args, **kwargs)
def cas(self, *args, **kwargs):
return self._trace_cmd('cas', *args, **kwargs)
def incr(self, *args, **kwargs):
return self._trace_cmd('incr', *args, **kwargs)
def decr(self, *args, **kwargs):
return self._trace_cmd('decr', *args, **kwargs)
def append(self, *args, **kwargs):
return self._trace_cmd('append', *args, **kwargs)
def prepend(self, *args, **kwargs):
return self._trace_cmd('prepend', *args, **kwargs)
def get_multi(self, *args, **kwargs):
return self._trace_multi_cmd('get_multi', *args, **kwargs)
def set_multi(self, *args, **kwargs):
return self._trace_multi_cmd('set_multi', *args, **kwargs)
def delete_multi(self, *args, **kwargs):
return self._trace_multi_cmd('delete_multi', *args, **kwargs)
def _trace_cmd(self, method_name, *args, **kwargs):
""" trace the execution of the method with the given name and will
patch the first arg.
"""
method = getattr(self.__wrapped__, method_name)
with self._span(method_name) as span:
if span and args:
span.set_tag(memcached.QUERY, '%s %s' % (method_name, args[0]))
return method(*args, **kwargs)
def _trace_multi_cmd(self, method_name, *args, **kwargs):
""" trace the execution of the multi command with the given name. """
method = getattr(self.__wrapped__, method_name)
with self._span(method_name) as span:
pre = kwargs.get('key_prefix')
if span and pre:
span.set_tag(memcached.QUERY, '%s %s' % (method_name, pre))
return method(*args, **kwargs)
@contextmanager
def _no_span(self):
yield None
def _span(self, cmd_name):
""" Return a span timing the given command. """
pin = ddtrace.Pin.get_from(self)
if not pin or not pin.enabled():
return self._no_span()
span = pin.tracer.trace(
'memcached.cmd',
service=pin.service,
resource=cmd_name,
span_type=SpanTypes.CACHE,
)
span.set_tag(SPAN_MEASURED_KEY)
try:
self._tag_span(span)
except Exception:
log.debug('error tagging span', exc_info=True)
return span
def _tag_span(self, span):
# FIXME[matt] the host selection is buried in c code. we can't tell what it's actually
# using, so fallback to randomly choosing one. can we do better?
if self._addresses:
_, host, port, _ = random.choice(self._addresses)
span.set_meta(net.TARGET_HOST, host)
span.set_meta(net.TARGET_PORT, port)
# set analytics sample rate
span.set_tag(
ANALYTICS_SAMPLE_RATE_KEY,
config.pylibmc.get_analytics_sample_rate()
)
|
[
"ddtrace.config.pylibmc.get_analytics_sample_rate",
"ddtrace.Pin.get_from",
"random.choice",
"ddtrace.Pin"
] |
[((1597, 1640), 'ddtrace.Pin', 'ddtrace.Pin', ([], {'service': 'service', 'tracer': 'tracer'}), '(service=service, tracer=tracer)\n', (1608, 1640), False, 'import ddtrace\n'), ((2088, 2114), 'ddtrace.Pin.get_from', 'ddtrace.Pin.get_from', (['self'], {}), '(self)\n', (2108, 2114), False, 'import ddtrace\n'), ((4526, 4552), 'ddtrace.Pin.get_from', 'ddtrace.Pin.get_from', (['self'], {}), '(self)\n', (4546, 4552), False, 'import ddtrace\n'), ((5256, 5286), 'random.choice', 'random.choice', (['self._addresses'], {}), '(self._addresses)\n', (5269, 5286), False, 'import random\n'), ((5495, 5537), 'ddtrace.config.pylibmc.get_analytics_sample_rate', 'config.pylibmc.get_analytics_sample_rate', ([], {}), '()\n', (5535, 5537), False, 'from ddtrace import config\n')]
|
# Generated by Django 2.1.15 on 2020-02-25 12:03
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('news', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='news',
name='image',
),
migrations.AddField(
model_name='img',
name='news',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='news.News'),
),
]
|
[
"django.db.migrations.RemoveField",
"django.db.models.ForeignKey"
] |
[((255, 310), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""news"""', 'name': '"""image"""'}), "(model_name='news', name='image')\n", (277, 310), False, 'from django.db import migrations, models\n'), ((449, 542), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'default': '(1)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""news.News"""'}), "(default=1, on_delete=django.db.models.deletion.CASCADE,\n to='news.News')\n", (466, 542), False, 'from django.db import migrations, models\n')]
|
import json
import urllib.parse
import urllib.request
class YahooApi:
def __init__(self, appid):
self.appid = appid
def api(self, apiurl, params, method='GET'):
params['appid'] = self.appid
params = urllib.parse.urlencode(params, encoding='UTF-8')
if method == 'GET':
response = urllib.request.urlopen("%s?%s" % (apiurl, params))
elif method == 'POST':
# Encode the query string to bytes
params = params.encode('UTF-8')
response = urllib.request.urlopen(apiurl, params)
else:
raise NotImplementedError('Method %s is not supported.' % method)
response = response.read().decode('UTF-8')
response = json.loads(response)
# Handle errors
if not isinstance(response, type({})):
raise IOError('INVALID FORMAT "%s"' % response)
if 'Error' in response:
raise IOError('YAHOO! API ERROR "%s"' % response['Error'])
return response
|
[
"json.loads"
] |
[((732, 752), 'json.loads', 'json.loads', (['response'], {}), '(response)\n', (742, 752), False, 'import json\n')]
|
from __future__ import annotations
from typing import Any, Callable, Optional, TYPE_CHECKING
import string
from subtypes import Dict
from iotools import Config
from .argument import Argument
from .enums import RunMode
from .hierarchy import Hierarchy
if TYPE_CHECKING:
from .declarative import Command, Group
class Handler:
def __init__(self, name: str, parent: Handler = None) -> None:
self.name, self.parent = name, parent
self.arguments: list[Argument] = []
self.groups: list[GroupHandler] = []
self.names: set[str] = set()
def __repr__(self) -> str:
return f"{type(self).__name__}({', '.join([f'{attr}={repr(val)}' for attr, val in self.__dict__.items() if not attr.startswith('_')])})"
def __bool__(self) -> bool:
return all([*self.arguments, *self.groups])
def add_argument(self, argument: Argument) -> None:
"""Add a new Argument object to this CommandHandler. Passes on its arguments to the Argument constructor."""
self.register_name(argument.name)
self.arguments.append(argument)
def add_group(self, group: GroupHandler) -> None:
self.register_name(group.name)
self.groups.append(group)
group.parent = self
def register_name(self, name: str) -> None:
if not name.isidentifier():
raise ValueError(f"Name '{name}' is not a valid Python identifier.")
if name in self.names:
raise ValueError(f"Name '{name}' is already attached to this {type(self).__name__}.")
self.names.add(name)
class CommandHandler(Handler):
"""
A class that handles I/O by collecting arguments through the commandline, or generates a GUI to collect arguments if no commandline arguments are provided.
The CommandHandler implicitly creates a dir structure in the directory of its script for storing the configuration of the previous run, and for providing output.
"""
parent: CommandHandler
def __init__(self, name: str, desc: str = "", callback: Callable = None, run_mode: RunMode = RunMode.SMART, subtypes: bool = True, parent: CommandHandler = None, command: Command = None) -> None:
super().__init__(name=name, parent=parent)
self.desc, self.run_mode, self.callback = desc, run_mode, callback
self.subhandlers: list[CommandHandler] = []
self.command = command
self.hierarchy: Optional[Hierarchy] = None
self.remaining_letters = set(string.ascii_lowercase)
self.remaining_letters.discard("h")
def __repr__(self) -> str:
return f"{type(self).__name__}({', '.join([f'{attr}={repr(val)}' for attr, val in self.__dict__.items() if not attr.startswith('_')])})"
def add_argument(self, argument: Argument) -> None:
"""Add a new Argument object to this CommandHandler. Passes on its arguments to the Argument constructor."""
super().add_argument(argument)
if shortform := self.determine_shortform_alias(argument.name):
argument.aliases.append(shortform)
def add_subhandler(self, subhandler: CommandHandler) -> None:
self.register_name(subhandler.name)
self.subhandlers.append(subhandler)
subhandler.parent = self
def process(self, *args: Any, **kwargs: Any) -> CommandHandler:
"""Collect input using this CommandHandler's 'run_mode' and return a CallableDict holding the parsed arguments, coerced to appropriate python types."""
self.pre_validate()
self.hierarchize()
self.hierarchy = Hierarchy(root_handler=self)
return self.hierarchy.choose_strategy(*args, **kwargs)
def pre_validate(self) -> None:
for group in self.groups:
group.pre_validate()
for child in self.subhandlers:
child.pre_validate()
def post_validate(self) -> None:
for group in self.groups:
group.post_validate()
def hierarchize(self) -> None:
self.configure()
for child in self.subhandlers:
child.hierarchize()
def configure(self) -> None:
if self.parent:
self.config = self.parent.config
self.root = self.parent.root
self.folder = self.parent.folder.new_dir(self.name)
self.shared_namespace = self.parent.shared_namespace
else:
self.config = Config(author="command", name=self.name)
self.root = self.folder = self.config.dir
self.shared_namespace = Dict()
self.latest = self.folder.new_file("latest", "pkl")
def save_latest_input_config(self, namespace: Dict) -> None:
self.latest.write(namespace)
def load_latest_input_config(self) -> dict[str, Any]:
if self.latest:
return self.latest.read()
else:
print(f"No prior configuration found for '{self.name}'")
def determine_shortform_alias(self, name: str) -> str:
for char in name:
if char.isalnum():
letter = char.lower()
if letter in self.remaining_letters:
self.remaining_letters.remove(letter)
return letter
class GroupHandler(Handler):
def __init__(self, name: str, parent: Handler = None, group: Group = None) -> None:
super().__init__(name=name, parent=parent)
self.group = group
def __repr__(self) -> str:
return f"{type(self).__name__}({', '.join([f'{attr}={repr(val)}' for attr, val in self.__dict__.items() if not attr.startswith('_')])})"
def __bool__(self) -> bool:
from .declarative import ArgumentGroup
if isinstance(self.group, ArgumentGroup.Exclusive):
return len([*filter(None, [*self.arguments, *self.groups])]) <= 1
return super().__bool__()
def add_argument(self, argument: Argument) -> None:
"""Add a new Argument object to this CommandHandler. Passes on its arguments to the Argument constructor."""
super().add_argument(argument)
parent = self.parent
while not isinstance(parent, CommandHandler):
parent = parent.parent
parent.add_argument(argument)
def pre_validate(self) -> None:
from .declarative import ArgumentGroup
if isinstance(self.group, ArgumentGroup.Inclusive):
if all([*self.arguments, *self.groups]):
raise RuntimeError(f"The {ArgumentGroup.Inclusive.__name__} {self.group._handler_.name} is valid in all circumstances due to argument nullability and defaults.")
elif isinstance(self.group, ArgumentGroup.Exclusive):
if any([*self.arguments, *self.groups]):
truthiness = {item: bool(item) for item in [*self.arguments, *self.groups]}
raise RuntimeError(f"The {ArgumentGroup.Exclusive.__name__} {self.group._handler_.name} contains at least one argument or group that is always valid due to argument nullability and defaults.")
def post_validate(self) -> None:
from .declarative import ArgumentGroup
if not self:
if isinstance(self.group, ArgumentGroup.Inclusive):
for argument in self.arguments:
if not argument:
raise RuntimeError(f"Argument {argument.name} of {ArgumentGroup.Inclusive.__name__} {self.group._handler_.name} was not provided.")
for group in self.groups:
if not group:
group.post_validate()
elif isinstance(self.group, ArgumentGroup.Exclusive):
provided_items = [item.name for item in (*self.arguments, *self.groups) if item]
raise RuntimeError(f"At most one argument or argument group of {ArgumentGroup.Exclusive.__name__} {self.group._handler_.name} can be provided. Provided:\n\n{', '.join(provided_items)}")
|
[
"iotools.Config",
"subtypes.Dict"
] |
[((4374, 4414), 'iotools.Config', 'Config', ([], {'author': '"""command"""', 'name': 'self.name'}), "(author='command', name=self.name)\n", (4380, 4414), False, 'from iotools import Config\n'), ((4506, 4512), 'subtypes.Dict', 'Dict', ([], {}), '()\n', (4510, 4512), False, 'from subtypes import Dict\n')]
|
from __future__ import absolute_import, division, print_function
from libtbx import easy_run
import libtbx.load_env
import os.path
import time
# taken from phenix_regression/refinement/ncs/tst_ncs_0.py
pdb_str = """\
CRYST1 100.000 100.000 100.000 90.00 90.00 90.00 P 1
ATOM 1 N ALA A 1 27.344 16.348 30.784 1.00 10.00 N
ATOM 2 CA ALA A 1 26.429 15.281 31.335 1.00 10.00 C
ATOM 3 C ALA A 1 26.610 14.025 30.603 1.00 10.00 C
ATOM 4 O ALA A 1 26.479 13.979 29.356 1.00 10.00 O
ATOM 5 CB ALA A 1 24.874 15.800 31.300 1.00 10.00 C
ATOM 1 N ALA A 2 26.812 12.925 31.345 1.00 10.00 N
ATOM 2 CA ALA A 2 27.084 11.577 30.797 1.00 10.00 C
ATOM 3 C ALA A 2 25.856 10.737 30.707 1.00 10.00 C
ATOM 4 O ALA A 2 25.741 9.860 29.891 1.00 10.00 O
ATOM 5 CB ALA A 2 28.151 10.950 31.721 1.00 10.00 C
ATOM 1 N ALA A 3 25.009 10.973 31.714 1.00 10.00 N
ATOM 2 CA ALA A 3 23.621 10.543 31.560 1.00 10.00 C
ATOM 3 C ALA A 3 23.023 11.008 30.214 1.00 10.00 C
ATOM 4 O ALA A 3 22.786 10.233 29.249 1.00 10.00 O
ATOM 5 CB ALA A 3 22.760 11.040 32.654 1.00 10.00 C
ATOM 1 N ALA A 4 22.798 12.304 30.175 1.00 10.00 N
ATOM 2 CA ALA A 4 22.329 13.084 28.981 1.00 10.00 C
ATOM 3 C ALA A 4 23.116 12.816 27.721 1.00 10.00 C
ATOM 4 O ALA A 4 22.533 12.805 26.670 1.00 10.00 O
ATOM 5 CB ALA A 4 22.372 14.607 29.318 1.00 10.00 C
ATOM 1 N ALA A 5 24.448 12.622 27.823 1.00 10.00 N
ATOM 2 CA ALA A 5 25.228 12.407 26.573 1.00 10.00 C
ATOM 3 C ALA A 5 25.222 10.947 26.143 1.00 10.00 C
ATOM 4 O ALA A 5 25.386 10.664 24.983 1.00 10.00 O
ATOM 5 CB ALA A 5 26.634 12.906 26.746 1.00 10.00 C
ATOM 1 N ALA A 6 24.976 10.048 27.071 1.00 10.00 N
ATOM 2 CA ALA A 6 24.857 8.614 26.805 1.00 10.00 C
ATOM 3 C ALA A 6 23.537 8.349 26.054 1.00 10.00 C
ATOM 4 O ALA A 6 23.439 7.570 25.057 1.00 10.00 O
ATOM 5 CB ALA A 6 24.874 7.845 28.114 1.00 10.00 C
ATOM 1 N ALA A 7 22.542 9.039 26.580 1.00 10.00 N
ATOM 2 CA ALA A 7 21.228 8.903 25.942 1.00 10.00 C
ATOM 3 C ALA A 7 21.329 9.698 24.628 1.00 10.00 C
ATOM 4 O ALA A 7 20.707 9.383 23.632 1.00 10.00 O
ATOM 5 CB ALA A 7 20.146 9.465 26.862 1.00 10.00 C
ATOM 1 N ALA A 8 22.181 10.696 24.613 1.00 10.00 N
ATOM 2 CA ALA A 8 22.526 11.372 23.378 1.00 10.00 C
ATOM 3 C ALA A 8 23.351 10.555 22.448 1.00 10.00 C
ATOM 4 O ALA A 8 23.618 10.883 21.252 1.00 10.00 O
ATOM 5 CB ALA A 8 23.168 12.697 23.693 1.00 10.00 C
ATOM 1 N ALA A 9 23.864 9.423 22.961 1.00 10.00 N
ATOM 2 CA ALA A 9 24.785 8.541 22.264 1.00 10.00 C
ATOM 3 C ALA A 9 24.057 7.451 21.484 1.00 10.00 C
ATOM 4 O ALA A 9 24.127 7.381 20.257 1.00 10.00 O
ATOM 5 CB ALA A 9 25.815 7.975 23.249 1.00 10.00 C
ATOM 1 N ALA A 10 23.518 6.548 22.264 1.00 10.00 N
ATOM 2 CA ALA A 10 22.629 5.525 21.690 1.00 10.00 C
ATOM 3 C ALA A 10 21.549 6.308 21.009 1.00 10.00 C
ATOM 4 O ALA A 10 21.114 5.933 19.930 1.00 10.00 O
ATOM 5 CB ALA A 10 22.057 4.714 22.784 1.00 10.00 C
ATOM 1 N ALA A 11 21.120 7.452 21.541 1.00 10.00 N
ATOM 2 CA ALA A 11 20.186 8.260 20.874 1.00 10.00 C
ATOM 3 C ALA A 11 20.978 9.215 19.937 1.00 10.00 C
ATOM 4 O ALA A 11 20.386 10.177 19.507 1.00 10.00 O
ATOM 5 CB ALA A 11 19.295 9.031 21.867 1.00 10.00 C
ATOM 1 N ALA A 12 22.222 8.932 19.598 1.00 10.00 N
ATOM 2 CA ALA A 12 22.896 9.709 18.563 1.00 10.00 C
ATOM 3 C ALA A 12 22.924 8.925 17.308 1.00 10.00 C
ATOM 4 O ALA A 12 22.982 9.445 16.193 1.00 10.00 O
ATOM 5 CB ALA A 12 24.294 10.138 18.994 1.00 10.00 C
ATOM 1 N ALA A 13 22.951 7.633 17.508 1.00 10.00 N
ATOM 2 CA ALA A 13 22.709 6.629 16.554 1.00 10.00 C
ATOM 3 C ALA A 13 21.275 6.673 16.206 1.00 10.00 C
ATOM 4 O ALA A 13 20.870 6.521 15.092 1.00 10.00 O
ATOM 5 CB ALA A 13 23.077 5.254 17.025 1.00 10.00 C
ATOM 1 N ALA A 14 20.471 6.929 17.226 1.00 10.00 N
ATOM 2 CA ALA A 14 19.039 6.992 17.025 1.00 10.00 C
ATOM 3 C ALA A 14 18.676 8.380 16.528 1.00 10.00 C
ATOM 4 O ALA A 14 17.748 8.556 15.761 1.00 10.00 O
ATOM 5 CB ALA A 14 18.240 6.715 18.272 1.00 10.00 C
ATOM 1 N ALA A 15 19.381 9.390 17.055 1.00 10.00 N
ATOM 2 CA ALA A 15 19.204 10.743 16.669 1.00 10.00 C
ATOM 3 C ALA A 15 19.407 10.807 15.174 1.00 10.00 C
ATOM 4 O ALA A 15 18.402 10.987 14.424 1.00 10.00 O
ATOM 5 CB ALA A 15 20.190 11.665 17.493 1.00 10.00 C
ATOM 1 N ALA A 16 20.702 10.653 14.831 1.00 10.00 N
ATOM 2 CA ALA A 16 21.206 10.546 13.480 1.00 10.00 C
ATOM 3 C ALA A 16 20.484 9.612 12.585 1.00 10.00 C
ATOM 4 O ALA A 16 20.380 9.918 11.386 1.00 10.00 O
ATOM 5 CB ALA A 16 22.631 10.174 13.475 1.00 10.00 C
ATOM 1 N ALA A 17 20.064 8.475 13.175 1.00 10.00 N
ATOM 2 CA ALA A 17 19.355 7.473 12.426 1.00 10.00 C
ATOM 3 C ALA A 17 17.924 7.807 12.064 1.00 10.00 C
ATOM 4 O ALA A 17 17.535 7.721 10.871 1.00 10.00 O
ATOM 5 CB ALA A 17 19.359 6.123 13.216 1.00 10.00 C
ATOM 1 N ALA A 18 17.152 8.115 13.031 1.00 10.00 N
ATOM 2 CA ALA A 18 15.835 8.594 12.861 1.00 10.00 C
ATOM 3 C ALA A 18 15.811 9.835 11.861 1.00 10.00 C
ATOM 4 O ALA A 18 15.020 9.889 10.868 1.00 10.00 O
ATOM 5 CB ALA A 18 15.272 8.918 14.234 1.00 10.00 C
ATOM 1 N ALA A 19 16.661 10.845 12.100 1.00 10.00 N
ATOM 2 CA ALA A 19 16.435 12.061 11.275 1.00 10.00 C
ATOM 3 C ALA A 19 17.004 11.815 9.833 1.00 10.00 C
ATOM 4 O ALA A 19 16.334 12.117 8.857 1.00 10.00 O
ATOM 5 CB ALA A 19 17.059 13.242 11.866 1.00 10.00 C
ATOM 1 N ALA A 20 18.191 11.200 9.841 1.00 10.00 N
ATOM 2 CA ALA A 20 19.091 11.247 8.697 1.00 10.00 C
ATOM 3 C ALA A 20 19.549 9.835 8.231 1.00 10.00 C
ATOM 4 O ALA A 20 20.670 9.692 7.663 1.00 10.00 O
ATOM 5 CB ALA A 20 20.326 12.105 9.035 1.00 10.00 C
ATOM 1 N ALA A 21 18.654 8.850 8.523 1.00 10.00 N
ATOM 2 CA ALA A 21 18.827 7.437 8.168 1.00 10.00 C
ATOM 3 C ALA A 21 17.565 6.607 8.282 1.00 10.00 C
ATOM 4 O ALA A 21 16.485 6.992 7.820 1.00 10.00 O
ATOM 5 CB ALA A 21 19.888 6.838 8.983 1.00 10.00 C
TER
ATOM 1 N ALA B 1 16.348 17.420 35.897 1.00 50.00 N
ATOM 2 CA ALA B 1 16.783 16.083 36.351 1.00 50.00 C
ATOM 3 C ALA B 1 16.794 15.172 35.139 1.00 50.00 C
ATOM 4 O ALA B 1 16.167 15.477 34.133 1.00 50.00 O
ATOM 5 CB ALA B 1 15.785 15.534 37.468 1.00 50.00 C
ATOM 1 N ALA B 2 17.491 14.058 35.255 1.00 50.00 N
ATOM 2 CA ALA B 2 17.790 13.267 34.127 1.00 50.00 C
ATOM 3 C ALA B 2 16.716 12.232 33.688 1.00 50.00 C
ATOM 4 O ALA B 2 16.676 11.869 32.543 1.00 50.00 O
ATOM 5 CB ALA B 2 19.125 12.656 34.415 1.00 50.00 C
ATOM 1 N ALA B 3 15.904 11.687 34.605 1.00 50.00 N
ATOM 2 CA ALA B 3 14.798 10.901 34.173 1.00 50.00 C
ATOM 3 C ALA B 3 13.740 11.723 33.536 1.00 50.00 C
ATOM 4 O ALA B 3 13.398 11.501 32.356 1.00 50.00 O
ATOM 5 CB ALA B 3 14.148 10.176 35.403 1.00 50.00 C
ATOM 1 N ALA B 4 13.239 12.708 34.247 1.00 50.00 N
ATOM 2 CA ALA B 4 12.158 13.487 33.709 1.00 50.00 C
ATOM 3 C ALA B 4 12.674 14.248 32.495 1.00 50.00 C
ATOM 4 O ALA B 4 11.935 14.376 31.526 1.00 50.00 O
ATOM 5 CB ALA B 4 11.553 14.432 34.712 1.00 50.00 C
ATOM 1 N ALA B 5 13.947 14.627 32.479 1.00 50.00 N
ATOM 2 CA ALA B 5 14.416 15.490 31.405 1.00 50.00 C
ATOM 3 C ALA B 5 14.960 14.730 30.186 1.00 50.00 C
ATOM 4 O ALA B 5 14.575 14.940 29.054 1.00 50.00 O
ATOM 5 CB ALA B 5 15.464 16.431 31.928 1.00 50.00 C
ATOM 1 N ALA B 6 15.867 13.827 30.546 1.00 50.00 N
ATOM 2 CA ALA B 6 16.575 12.918 29.615 1.00 50.00 C
ATOM 3 C ALA B 6 15.465 12.002 28.975 1.00 50.00 C
ATOM 4 O ALA B 6 15.450 11.709 27.742 1.00 50.00 O
ATOM 5 CB ALA B 6 17.632 12.157 30.362 1.00 50.00 C
ATOM 1 N ALA B 7 14.542 11.597 29.783 1.00 50.00 N
ATOM 2 CA ALA B 7 13.529 10.701 29.277 1.00 50.00 C
ATOM 3 C ALA B 7 12.175 11.364 28.835 1.00 50.00 C
ATOM 4 O ALA B 7 11.466 10.770 27.969 1.00 50.00 O
ATOM 5 CB ALA B 7 13.161 9.644 30.376 1.00 50.00 C
ATOM 1 N ALA B 8 11.753 12.455 29.452 1.00 50.00 N
ATOM 2 CA ALA B 8 10.536 13.193 28.972 1.00 50.00 C
ATOM 3 C ALA B 8 10.919 13.923 27.670 1.00 50.00 C
ATOM 4 O ALA B 8 10.171 14.036 26.729 1.00 50.00 O
ATOM 5 CB ALA B 8 10.032 14.139 30.014 1.00 50.00 C
ATOM 1 N ALA B 9 12.185 14.247 27.579 1.00 50.00 N
ATOM 2 CA ALA B 9 12.754 14.849 26.385 1.00 50.00 C
ATOM 3 C ALA B 9 12.892 13.859 25.320 1.00 50.00 C
ATOM 4 O ALA B 9 12.234 13.980 24.290 1.00 50.00 O
ATOM 5 CB ALA B 9 14.108 15.448 26.695 1.00 50.00 C
ATOM 1 N ALA B 10 13.655 12.794 25.566 1.00 50.00 N
ATOM 2 CA ALA B 10 13.831 11.803 24.529 1.00 50.00 C
ATOM 3 C ALA B 10 12.551 10.987 24.319 1.00 50.00 C
ATOM 4 O ALA B 10 12.514 10.237 23.390 1.00 50.00 O
ATOM 5 CB ALA B 10 15.024 10.750 24.992 1.00 50.00 C
ATOM 1 N ALA B 11 11.558 11.184 25.126 1.00 50.00 N
ATOM 2 CA ALA B 11 10.334 10.457 24.931 1.00 50.00 C
ATOM 3 C ALA B 11 9.326 11.284 24.168 1.00 50.00 C
ATOM 4 O ALA B 11 8.566 10.707 23.476 1.00 50.00 O
ATOM 5 CB ALA B 11 9.644 10.042 26.251 1.00 50.00 C
ATOM 1 N ALA B 12 9.277 12.611 24.334 1.00 50.00 N
ATOM 2 CA ALA B 12 8.354 13.375 23.644 1.00 50.00 C
ATOM 3 C ALA B 12 9.019 13.546 22.264 1.00 50.00 C
ATOM 4 O ALA B 12 8.400 13.891 21.317 1.00 50.00 O
ATOM 5 CB ALA B 12 8.056 14.678 24.287 1.00 50.00 C
ATOM 1 N ALA B 13 10.333 13.339 22.264 1.00 50.00 N
ATOM 2 CA ALA B 13 11.239 13.471 21.127 1.00 50.00 C
ATOM 3 C ALA B 13 11.096 12.161 20.325 1.00 50.00 C
ATOM 4 O ALA B 13 11.145 12.175 19.123 1.00 50.00 O
ATOM 5 CB ALA B 13 12.584 13.665 21.596 1.00 50.00 C
ATOM 1 N ALA B 14 11.051 11.078 21.086 1.00 50.00 N
ATOM 2 CA ALA B 14 10.953 9.771 20.454 1.00 50.00 C
ATOM 3 C ALA B 14 9.550 9.463 20.117 1.00 50.00 C
ATOM 4 O ALA B 14 9.233 8.571 19.367 1.00 50.00 O
ATOM 5 CB ALA B 14 11.461 8.697 21.413 1.00 50.00 C
ATOM 1 N ALA B 15 8.669 10.215 20.743 1.00 50.00 N
ATOM 2 CA ALA B 15 7.282 10.010 20.486 1.00 50.00 C
ATOM 3 C ALA B 15 6.825 10.982 19.376 1.00 50.00 C
ATOM 4 O ALA B 15 5.855 10.783 18.619 1.00 50.00 O
ATOM 5 CB ALA B 15 6.367 10.306 21.797 1.00 50.00 C
ATOM 1 N ALA B 16 7.511 12.143 19.430 1.00 50.00 N
ATOM 2 CA ALA B 16 7.233 13.302 18.551 1.00 50.00 C
ATOM 3 C ALA B 16 7.912 13.082 17.205 1.00 50.00 C
ATOM 4 O ALA B 16 7.492 13.573 16.111 1.00 50.00 O
ATOM 5 CB ALA B 16 7.762 14.594 19.165 1.00 50.00 C
ATOM 1 N ALA B 17 9.071 12.427 17.269 1.00 50.00 N
ATOM 2 CA ALA B 17 9.595 11.771 16.091 1.00 50.00 C
ATOM 3 C ALA B 17 8.883 10.519 15.763 1.00 50.00 C
ATOM 4 O ALA B 17 8.890 10.193 14.597 1.00 50.00 O
ATOM 5 CB ALA B 17 11.046 11.518 16.265 1.00 50.00 C
ATOM 1 N ALA B 18 8.315 9.809 16.722 1.00 50.00 N
ATOM 2 CA ALA B 18 7.515 8.647 16.448 1.00 50.00 C
ATOM 3 C ALA B 18 6.253 9.063 15.707 1.00 50.00 C
ATOM 4 O ALA B 18 5.559 8.173 15.198 1.00 50.00 O
ATOM 5 CB ALA B 18 7.129 7.915 17.695 1.00 50.00 C
ATOM 1 N ALA B 19 5.866 10.332 15.772 1.00 50.00 N
ATOM 2 CA ALA B 19 4.686 10.808 15.089 1.00 50.00 C
ATOM 3 C ALA B 19 5.011 11.578 13.803 1.00 50.00 C
ATOM 4 O ALA B 19 4.291 11.514 12.837 1.00 50.00 O
ATOM 5 CB ALA B 19 3.854 11.710 15.960 1.00 50.00 C
ATOM 1 N ALA B 20 6.176 12.195 13.822 1.00 50.00 N
ATOM 2 CA ALA B 20 6.614 13.121 12.789 1.00 50.00 C
ATOM 3 C ALA B 20 7.933 12.759 12.098 1.00 50.00 C
ATOM 4 O ALA B 20 8.620 13.613 11.585 1.00 50.00 O
ATOM 5 CB ALA B 20 6.823 14.498 13.449 1.00 50.00 C
ATOM 1 N ALA B 21 8.284 11.511 12.050 1.00 50.00 N
ATOM 2 CA ALA B 21 9.513 11.117 11.323 1.00 50.00 C
ATOM 3 C ALA B 21 9.313 9.628 11.029 1.00 50.00 C
ATOM 4 O ALA B 21 9.731 8.751 11.795 1.00 50.00 O
ATOM 5 CB ALA B 21 10.799 11.332 12.178 1.00 50.00 C
TER
"""
def exercise_04(prefix="tst_mi_map_test_04"):
"""
Run with reference map.
Check if working with NCS in the model. Without symmetry.
"""
# without cryst
pdb_file = open("%s_start.pdb" % prefix, "w")
pdb_file.write(pdb_str)
pdb_file.close()
cmd = " ".join([
"phenix.model_idealization",
"%s_start.pdb" % prefix,
"use_map_for_reference=True",
"loop_idealization.number_of_ccd_trials=1",
"number_of_refinement_cycles=1",
"n_macro=1",
"debug=True",
">%s.log" % prefix])
print(cmd)
assert not easy_run.call(cmd)
assert os.path.isfile("%s_start.pdb_all_idealized.pdb" % prefix)
res_log = open("%s.log" % prefix, "r")
log_lines = res_log.readlines()
# NCS constraints with map are not implemented yet
for l in [
# "Using ncs\n",
"Using map as reference\n",
" Minimizing... (NCS)\n",
# "Ramachandran outliers: 0.00 0.00 0.00 0.00 0.00\n",
"All done.\n"]:
assert l in log_lines, "'%s' not in log file." % l
res_log.close()
if (__name__ == "__main__"):
t0 = time.time()
if (not libtbx.env.has_module(name="probe")):
print("Skipping: probe not configured")
else:
exercise_04()
print("Time: %.2f" % (time.time() - t0))
print("OK")
|
[
"libtbx.easy_run.call",
"time.time"
] |
[((17972, 17983), 'time.time', 'time.time', ([], {}), '()\n', (17981, 17983), False, 'import time\n'), ((17437, 17455), 'libtbx.easy_run.call', 'easy_run.call', (['cmd'], {}), '(cmd)\n', (17450, 17455), False, 'from libtbx import easy_run\n'), ((18126, 18137), 'time.time', 'time.time', ([], {}), '()\n', (18135, 18137), False, 'import time\n')]
|
import pytest
import jax.numpy as np
from pzflow.distributions import *
@pytest.mark.parametrize(
"distribution,inputs,params",
[
(Normal, (2,), ()),
(Tdist, (2,), np.log(30.0)),
(Uniform, ((0, 1), (0, 1)), ()),
(Joint, (Normal(1), Uniform((0, 1))), ((), ())),
(Joint, (Normal(1), Tdist(1)), ((), np.log(30.0))),
(Joint, (Joint(Normal(1), Uniform((0, 1))).info[1]), ((), ())),
],
)
class TestDistributions:
def test_returns_correct_shapes(self, distribution, inputs, params):
dist = distribution(*inputs)
nsamples = 8
samples = dist.sample(params, nsamples)
assert samples.shape == (nsamples, 2)
log_prob = dist.log_prob(params, samples)
assert log_prob.shape == (nsamples,)
def test_control_sample_randomness(self, distribution, inputs, params):
dist = distribution(*inputs)
nsamples = 8
s1 = dist.sample(params, nsamples)
s2 = dist.sample(params, nsamples)
assert ~np.all(np.isclose(s1, s2))
s1 = dist.sample(params, nsamples, seed=0)
s2 = dist.sample(params, nsamples, seed=0)
assert np.allclose(s1, s2)
def test_normal_cov():
dist = Normal(2)
nsamples = 2
samples = dist.sample((), nsamples)
cov = np.array([[[1, 0], [0, 1]], [[1, 1], [1, 1]]])
log_prob = dist.log_prob(4, samples, cov=cov)
assert log_prob.shape == (nsamples,)
@pytest.mark.parametrize(
"inputs",
[
((-1, 1, 2),),
((2, 1),),
],
)
def test_uniform_bad_inputs(inputs):
with pytest.raises(ValueError):
Uniform(*inputs)
|
[
"jax.numpy.array",
"jax.numpy.log",
"jax.numpy.isclose",
"pytest.raises",
"jax.numpy.allclose",
"pytest.mark.parametrize"
] |
[((1453, 1514), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""inputs"""', '[((-1, 1, 2),), ((2, 1),)]'], {}), "('inputs', [((-1, 1, 2),), ((2, 1),)])\n", (1476, 1514), False, 'import pytest\n'), ((1312, 1358), 'jax.numpy.array', 'np.array', (['[[[1, 0], [0, 1]], [[1, 1], [1, 1]]]'], {}), '([[[1, 0], [0, 1]], [[1, 1], [1, 1]]])\n', (1320, 1358), True, 'import jax.numpy as np\n'), ((1176, 1195), 'jax.numpy.allclose', 'np.allclose', (['s1', 's2'], {}), '(s1, s2)\n', (1187, 1195), True, 'import jax.numpy as np\n'), ((1595, 1620), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1608, 1620), False, 'import pytest\n'), ((190, 202), 'jax.numpy.log', 'np.log', (['(30.0)'], {}), '(30.0)\n', (196, 202), True, 'import jax.numpy as np\n'), ((1038, 1056), 'jax.numpy.isclose', 'np.isclose', (['s1', 's2'], {}), '(s1, s2)\n', (1048, 1056), True, 'import jax.numpy as np\n'), ((347, 359), 'jax.numpy.log', 'np.log', (['(30.0)'], {}), '(30.0)\n', (353, 359), True, 'import jax.numpy as np\n')]
|
# -*- coding: utf-8 -*-
""" 解压压缩包,支持zip, rar
"""
import os
import sys
import six
class CompressedFile(object):
""" a simple wrapper class for ZipFile and RarFile, it's only support read.
"""
EXTS = ['zip', 'rar']
def __init__(self, file):
self.file = file
self._file = None
_, ext = os.path.splitext(file)
if ext == '.zip':
import zipfile
self._file = zipfile.ZipFile(self.file, 'r')
elif ext == '.rar':
import rarfile
if sys.platform == 'win32':
# if os system is windows,try to use built-in unrar
rarfile.UNRAR_TOOL = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'unrar.exe')
self._file = rarfile.RarFile(self.file, 'r')
else:
raise ValueError('CompressedFile doesnt support "{}"'.format(ext))
@staticmethod
def decode_file_name(name):
if six.PY3:
try:
name = name.encode('cp437')
except UnicodeEncodeError as e:
pass
if isinstance(name, six.binary_type):
try:
name = name.decode('gbk')
except UnicodeDecodeError as e:
try:
name = name.decode('utf8')
except UnicodeDecodeError as e:
pass
return name
@classmethod
def is_compressed_file(cls, filename):
_, ext = os.path.splitext(filename)
ext = ext[1:]
return ext in cls.EXTS
def isdir(self, name):
info = self._file.getinfo(name)
try:
return info.isdir()
except:
return name.endswith(os.path.sep)
def namelist(self):
return self._file.namelist()
def extract(self, filename, dest):
f = self._file.open(filename, 'r')
with open(dest, 'wb') as fp:
fp.write(f.read())
def close(self):
self._file.close()
|
[
"os.path.abspath",
"rarfile.RarFile",
"zipfile.ZipFile",
"os.path.splitext"
] |
[((340, 362), 'os.path.splitext', 'os.path.splitext', (['file'], {}), '(file)\n', (356, 362), False, 'import os\n'), ((1510, 1536), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (1526, 1536), False, 'import os\n'), ((444, 475), 'zipfile.ZipFile', 'zipfile.ZipFile', (['self.file', '"""r"""'], {}), "(self.file, 'r')\n", (459, 475), False, 'import zipfile\n'), ((777, 808), 'rarfile.RarFile', 'rarfile.RarFile', (['self.file', '"""r"""'], {}), "(self.file, 'r')\n", (792, 808), False, 'import rarfile\n'), ((710, 735), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (725, 735), False, 'import os\n')]
|
from pyrr import matrix44
import moderngl
from demosys import geometry
from demosys.opengl.texture import helper
from demosys.effects import Effect
class DeferredRenderer(Effect):
runnable = False
def __init__(self, width, height, gbuffer=None, lightbuffer=None):
self.width = width
self.height = height
self.size = (width, height)
# FBOs
self.gbuffer = gbuffer
self.lightbuffer = lightbuffer
# Light Info
self.point_lights = []
depth_texture = self.ctx.depth_texture(self.size)
if not self.gbuffer:
self.gbuffer = self.ctx.framebuffer(
(
self.ctx.texture(self.size, 4, dtype='f1'),
self.ctx.texture(self.size, 3, dtype='f2'),
),
depth_attachment=depth_texture,
)
self.gbuffer_scope = self.ctx.scope(
self.gbuffer,
enable_only=moderngl.DEPTH_TEST | moderngl.CULL_FACE
)
if not self.lightbuffer:
self.lightbuffer = self.ctx.framebuffer(
self.ctx.texture(self.size, 4),
)
self.lightbuffer_scope = self.ctx.scope(
self.lightbuffer,
enable_only=moderngl.BLEND | moderngl.CULL_FACE
)
# Unit cube for point lights (cube with radius 1.0)
self.unit_cube = geometry.cube(width=2, height=2, depth=2)
self.point_light_shader = self.get_program("demosys.deferred.point_light")
# Debug draw lights
self.debug_shader = self.get_program("demosys.deferred.debug")
# Combine shader
self.combine_shader = self.get_program("demosys.deferred.combine")
self.quad = geometry.quad_fs()
def draw_buffers(self, near, far):
"""
Draw framebuffers for debug purposes.
We need to supply near and far plane so the depth buffer can be linearized when visualizing.
:param near: Projection near value
:param far: Projection far value
"""
self.ctx.disable(moderngl.DEPTH_TEST)
helper.draw(self.gbuffer.color_attachments[0], pos=(0.0, 0.0), scale=(0.25, 0.25))
helper.draw(self.gbuffer.color_attachments[1], pos=(0.5, 0.0), scale=(0.25, 0.25))
helper.draw_depth(self.gbuffer.depth_attachment, near, far, pos=(1.0, 0.0), scale=(0.25, 0.25))
helper.draw(self.lightbuffer.color_attachments[0], pos=(1.5, 0.0), scale=(0.25, 0.25))
def add_point_light(self, position, radius):
"""Add point light"""
self.point_lights.append(PointLight(position, radius))
def render_lights(self, camera_matrix, projection):
"""Render light volumes"""
# Draw light volumes from the inside
self.ctx.front_face = 'cw'
self.ctx.blend_func = moderngl.ONE, moderngl.ONE
helper._depth_sampler.use(location=1)
with self.lightbuffer_scope:
for light in self.point_lights:
# Calc light properties
light_size = light.radius
m_light = matrix44.multiply(light.matrix, camera_matrix)
# Draw the light volume
self.point_light_shader["m_proj"].write(projection.tobytes())
self.point_light_shader["m_light"].write(m_light.astype('f4').tobytes())
self.gbuffer.color_attachments[1].use(location=0)
self.point_light_shader["g_normal"].value = 0
self.gbuffer.depth_attachment.use(location=1)
self.point_light_shader["g_depth"].value = 1
self.point_light_shader["screensize"].value = (self.width, self.height)
self.point_light_shader["proj_const"].value = projection.projection_constants
self.point_light_shader["radius"].value = light_size
self.unit_cube.render(self.point_light_shader)
helper._depth_sampler.clear(location=1)
def render_lights_debug(self, camera_matrix, projection):
"""Render outlines of light volumes"""
self.ctx.enable(moderngl.BLEND)
self.ctx.blend_func = moderngl.SRC_ALPHA, moderngl.ONE_MINUS_SRC_ALPHA
for light in self.point_lights:
m_mv = matrix44.multiply(light.matrix, camera_matrix)
light_size = light.radius
self.debug_shader["m_proj"].write(projection.tobytes())
self.debug_shader["m_mv"].write(m_mv.astype('f4').tobytes())
self.debug_shader["size"].value = light_size
self.unit_cube.render(self.debug_shader, mode=moderngl.LINE_STRIP)
self.ctx.disable(moderngl.BLEND)
def render_geometry(self, cam_matrix, projection):
raise NotImplementedError("render_geometry() not implemented")
def combine(self):
"""Combine diffuse and light buffer"""
self.gbuffer.color_attachments[0].use(location=0)
self.combine_shader["diffuse_buffer"].value = 0
self.lightbuffer.color_attachments[0].use(location=1)
self.combine_shader["light_buffer"].value = 1
self.quad.render(self.combine_shader)
def clear(self):
"""clear all buffers"""
self.gbuffer.clear()
self.lightbuffer.clear()
class PointLight:
"""A point light and its properties"""
def __init__(self, position, radius):
self.matrix = None
self._position = position
self.position = position
self.radius = radius
@property
def position(self):
return self._position
@position.setter
def position(self, pos):
self._position = pos
self.matrix = matrix44.create_from_translation(pos)
|
[
"demosys.opengl.texture.helper._depth_sampler.use",
"demosys.opengl.texture.helper.draw_depth",
"pyrr.matrix44.create_from_translation",
"demosys.geometry.quad_fs",
"demosys.opengl.texture.helper.draw",
"pyrr.matrix44.multiply",
"demosys.geometry.cube",
"demosys.opengl.texture.helper._depth_sampler.clear"
] |
[((1404, 1445), 'demosys.geometry.cube', 'geometry.cube', ([], {'width': '(2)', 'height': '(2)', 'depth': '(2)'}), '(width=2, height=2, depth=2)\n', (1417, 1445), False, 'from demosys import geometry\n'), ((1750, 1768), 'demosys.geometry.quad_fs', 'geometry.quad_fs', ([], {}), '()\n', (1766, 1768), False, 'from demosys import geometry\n'), ((2120, 2206), 'demosys.opengl.texture.helper.draw', 'helper.draw', (['self.gbuffer.color_attachments[0]'], {'pos': '(0.0, 0.0)', 'scale': '(0.25, 0.25)'}), '(self.gbuffer.color_attachments[0], pos=(0.0, 0.0), scale=(0.25,\n 0.25))\n', (2131, 2206), False, 'from demosys.opengl.texture import helper\n'), ((2211, 2297), 'demosys.opengl.texture.helper.draw', 'helper.draw', (['self.gbuffer.color_attachments[1]'], {'pos': '(0.5, 0.0)', 'scale': '(0.25, 0.25)'}), '(self.gbuffer.color_attachments[1], pos=(0.5, 0.0), scale=(0.25,\n 0.25))\n', (2222, 2297), False, 'from demosys.opengl.texture import helper\n'), ((2302, 2401), 'demosys.opengl.texture.helper.draw_depth', 'helper.draw_depth', (['self.gbuffer.depth_attachment', 'near', 'far'], {'pos': '(1.0, 0.0)', 'scale': '(0.25, 0.25)'}), '(self.gbuffer.depth_attachment, near, far, pos=(1.0, 0.0),\n scale=(0.25, 0.25))\n', (2319, 2401), False, 'from demosys.opengl.texture import helper\n'), ((2406, 2497), 'demosys.opengl.texture.helper.draw', 'helper.draw', (['self.lightbuffer.color_attachments[0]'], {'pos': '(1.5, 0.0)', 'scale': '(0.25, 0.25)'}), '(self.lightbuffer.color_attachments[0], pos=(1.5, 0.0), scale=(\n 0.25, 0.25))\n', (2417, 2497), False, 'from demosys.opengl.texture import helper\n'), ((2874, 2911), 'demosys.opengl.texture.helper._depth_sampler.use', 'helper._depth_sampler.use', ([], {'location': '(1)'}), '(location=1)\n', (2899, 2911), False, 'from demosys.opengl.texture import helper\n'), ((3929, 3968), 'demosys.opengl.texture.helper._depth_sampler.clear', 'helper._depth_sampler.clear', ([], {'location': '(1)'}), '(location=1)\n', (3956, 3968), False, 'from demosys.opengl.texture import helper\n'), ((5651, 5688), 'pyrr.matrix44.create_from_translation', 'matrix44.create_from_translation', (['pos'], {}), '(pos)\n', (5683, 5688), False, 'from pyrr import matrix44\n'), ((4258, 4304), 'pyrr.matrix44.multiply', 'matrix44.multiply', (['light.matrix', 'camera_matrix'], {}), '(light.matrix, camera_matrix)\n', (4275, 4304), False, 'from pyrr import matrix44\n'), ((3101, 3147), 'pyrr.matrix44.multiply', 'matrix44.multiply', (['light.matrix', 'camera_matrix'], {}), '(light.matrix, camera_matrix)\n', (3118, 3147), False, 'from pyrr import matrix44\n')]
|
# -*- coding: utf-8 -*-
# file: squeeze_embedding.py
# author: songyouwei <<EMAIL>>
# Copyright (C) 2018. All Rights Reserved.
import torch
import torch.nn as nn
import numpy as np
class SqueezeEmbedding(nn.Module):
"""
Squeeze sequence embedding length to the longest one in the batch
"""
def __init__(self, batch_first=True):
super(SqueezeEmbedding, self).__init__()
self.batch_first = batch_first
def forward(self, x, x_len):
"""
sequence -> sort -> pad and pack -> unpack ->unsort
:param x: sequence embedding vectors
:param x_len: numpy/tensor list
:return:
"""
"""sort"""
x_sort_idx = torch.sort(-x_len)[1].long()
x_unsort_idx = torch.sort(x_sort_idx)[1].long()
x_len = x_len[x_sort_idx]
x = x[x_sort_idx]
"""pack"""
x_emb_p = torch.nn.utils.rnn.pack_padded_sequence(x, x_len, batch_first=self.batch_first)
"""unpack: out"""
out = torch.nn.utils.rnn.pad_packed_sequence(x_emb_p, batch_first=self.batch_first) # (sequence, lengths)
out = out[0] #
"""unsort"""
out = out[x_unsort_idx]
return out
|
[
"torch.sort",
"torch.nn.utils.rnn.pad_packed_sequence",
"torch.nn.utils.rnn.pack_padded_sequence"
] |
[((879, 958), 'torch.nn.utils.rnn.pack_padded_sequence', 'torch.nn.utils.rnn.pack_padded_sequence', (['x', 'x_len'], {'batch_first': 'self.batch_first'}), '(x, x_len, batch_first=self.batch_first)\n', (918, 958), False, 'import torch\n'), ((999, 1076), 'torch.nn.utils.rnn.pad_packed_sequence', 'torch.nn.utils.rnn.pad_packed_sequence', (['x_emb_p'], {'batch_first': 'self.batch_first'}), '(x_emb_p, batch_first=self.batch_first)\n', (1037, 1076), False, 'import torch\n'), ((697, 715), 'torch.sort', 'torch.sort', (['(-x_len)'], {}), '(-x_len)\n', (707, 715), False, 'import torch\n'), ((749, 771), 'torch.sort', 'torch.sort', (['x_sort_idx'], {}), '(x_sort_idx)\n', (759, 771), False, 'import torch\n')]
|
import json
commcare_build_config = json.loads("""{
"_id": "config--commcare-builds",
"doc_type": "CommCareBuildConfig",
"preview": {
"version": "1.2.1",
"build_number": null,
"latest": true
},
"defaults": [{
"version": "1.2.1",
"build_number": null,
"latest": true
}, {
"version": "2.0.0",
"build_number": null,
"latest": true
}],
"application_versions": ["1.0", "2.0"],
"menu": [
{
"build": {
"version": "1.1.1",
"build_number": null,
"latest": true
},
"label": "CommCare 1.1.1"
},
{
"build": {
"version": "1.2.1",
"build_number": null,
"latest": true
},
"label": "CommCare 1.2.1"
},
{
"build": {
"version": "1.3.0",
"build_number": null,
"latest": true
},
"label": "CommCare 1.3 (RC5)"
},
{
"build": {
"version": "2.0.0",
"build_number": null,
"latest": true
},
"label": "CommCare 2.0 (unstable)"
}
],
"ID": "config--commcare-builds"
}""")
|
[
"json.loads"
] |
[((38, 1318), 'json.loads', 'json.loads', (['"""{\n "_id": "config--commcare-builds",\n "doc_type": "CommCareBuildConfig",\n "preview": {\n "version": "1.2.1",\n "build_number": null,\n "latest": true\n },\n "defaults": [{\n "version": "1.2.1",\n "build_number": null,\n "latest": true\n }, {\n "version": "2.0.0",\n "build_number": null,\n "latest": true\n }],\n "application_versions": ["1.0", "2.0"],\n "menu": [\n {\n "build": {\n "version": "1.1.1",\n "build_number": null,\n "latest": true\n },\n "label": "CommCare 1.1.1"\n },\n {\n "build": {\n "version": "1.2.1",\n "build_number": null,\n "latest": true\n },\n "label": "CommCare 1.2.1"\n },\n {\n "build": {\n "version": "1.3.0",\n "build_number": null,\n "latest": true\n },\n "label": "CommCare 1.3 (RC5)"\n },\n {\n "build": {\n "version": "2.0.0",\n "build_number": null,\n "latest": true\n },\n "label": "CommCare 2.0 (unstable)"\n }\n ],\n "ID": "config--commcare-builds"\n}"""'], {}), '(\n """{\n "_id": "config--commcare-builds",\n "doc_type": "CommCareBuildConfig",\n "preview": {\n "version": "1.2.1",\n "build_number": null,\n "latest": true\n },\n "defaults": [{\n "version": "1.2.1",\n "build_number": null,\n "latest": true\n }, {\n "version": "2.0.0",\n "build_number": null,\n "latest": true\n }],\n "application_versions": ["1.0", "2.0"],\n "menu": [\n {\n "build": {\n "version": "1.1.1",\n "build_number": null,\n "latest": true\n },\n "label": "CommCare 1.1.1"\n },\n {\n "build": {\n "version": "1.2.1",\n "build_number": null,\n "latest": true\n },\n "label": "CommCare 1.2.1"\n },\n {\n "build": {\n "version": "1.3.0",\n "build_number": null,\n "latest": true\n },\n "label": "CommCare 1.3 (RC5)"\n },\n {\n "build": {\n "version": "2.0.0",\n "build_number": null,\n "latest": true\n },\n "label": "CommCare 2.0 (unstable)"\n }\n ],\n "ID": "config--commcare-builds"\n}"""\n )\n', (48, 1318), False, 'import json\n')]
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="opentool",
version="0.0.12",
author="huutrinh",
author_email="<EMAIL>",
description="Tools for AI project",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/huutrinh68/opentool",
packages=setuptools.find_packages(),
install_requires=[
'torch>=1.6.0',
'torchvision>=0.7.0',
'numpy>=1.19.1',
'tensorboardX>=2.1',
'easydict>=1.9',
'addict>=2.2.1',
'yapf>=0.30.0',
],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
)
|
[
"setuptools.find_packages"
] |
[((393, 419), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (417, 419), False, 'import setuptools\n')]
|
#Object for handling numerical functions
from plotInt import Iplot
from re import split
class lightCurve:
x = 0
y = 1
dy = 2
class lcOutOfBound(Exception): pass
def __init__(self,table = []):
self.table = []
try:
for line in open(table):
try:
self.table.append([float(x) for x in
split("\s+",line.strip())])
except TypeError: continue
except TypeError: self.table = table
self.table.sort()
'Currently only 3-point derivative.'
def diff(self,i):
if i <= 0 or i >= len(self.table)-1: return None
dy = self.table[i+1][1] - self.table[i-1][1]
dx = self.table[i+1][0] - self.table[i-1][0]
return self.table[i][0],dy/dx
def avg(self,column, transform = lambda x: x):
return sum([transform(row[column]) for row in self.table])/len(self.table)
def var(self, column):
return self.avg(column,lambda x: x**2)-self.avg(column)**2
def resetzoom(self):
try: self.table = self.original
except KeyError: pass
def slideAndAverage(self, windowSize, action, verbose=False):
if action in dir(self):
start = -1
res = 0
count = 0
try:
while True:
start += 1
stop = self.find(self.table[start][0]+windowSize)
self.zoom(window=[start,stop])
current = getattr(self,action)()
self.resetzoom()
if verbose:
print("-I- Window [",start,"=",self.table[start][0],"-",stop,"=",self.table[stop][0],"] got",action,"of",current)
res += current
count += 1
except lightCurve.lcOutOfBound: pass
if verbose:
print("-I- Got",count,"windows.")
return res/count
else:
print("-E- Got bad action! use dir() to see availble actions (no parameter functions).")
def inPairs(self, column, action=lambda x,y: abs(x-y), after=lambda x: sum(x)/(len(x)-1)):
return after([action(self.table[i][column],self.table[i-1][column])
for i in range(1,len(self.table))])
def zoom(self, timewindow=None, window=[]):
if timewindow:
window.append(self.find(timewindow[0]))
window.append(self.find(timewindow[1]))
if window[1] < 0:
window[1] += len(self.table)
window[1] += 1
self.original = self.table
self.table = self.table[window[0]:window[1]]
def Fvar(self):
return ((self.var(self.y) - self.avg(self.dy,lambda x: x**2))/
self.avg(self.y)**2)**0.5
#Earliest time smaller or equal to time
def find(self, time):
if time > self.table[-1][0] or time < self.table[0][0]:
raise lightCurve.lcOutOfBound()
for i in range(0,len(self.table)):
if self.table[i][0] > time: return i
def dFvar(self):
N = len(self.table)
s2 = self.avg(self.dy,lambda x: x**2)
F = (self.avg(self.y))
return ( (((s2/N)**0.5)/F)**2 + ((s2/F**2/self.Fvar())*(1/(2*N))**0.5)**2 )**0.5
def plot(self):
Iplot.clearPlots()
Iplot.plotCurves(self)
def __getitem__(self,i):
return self.table[i]
def __iadd__(self,other):
self.table += other.table
return self
def __len__(self):
return len(self.table)
|
[
"plotInt.Iplot.plotCurves",
"plotInt.Iplot.clearPlots"
] |
[((3343, 3361), 'plotInt.Iplot.clearPlots', 'Iplot.clearPlots', ([], {}), '()\n', (3359, 3361), False, 'from plotInt import Iplot\n'), ((3370, 3392), 'plotInt.Iplot.plotCurves', 'Iplot.plotCurves', (['self'], {}), '(self)\n', (3386, 3392), False, 'from plotInt import Iplot\n')]
|
import argparse
import io
import os
import shutil
import unittest
import wx
from pathlib import Path
from contextlib import redirect_stdout
from unittests import pfr
from pyfuzzyrenamer import args, config, filters, main_listctrl, main_dlg, masks
from pyfuzzyrenamer.config import get_config
from pyfuzzyrenamer.args import get_args, get_argparser
# ---------------------------------------------------------------------------
class args_Tests(pfr.PyFuzzyRenamerTestCaseCLI):
def test_args_report_match(self):
get_config()["workers"] = 1
get_config()["show_fullpath"] = False
get_config()["hide_extension"] = True
get_config()["masks"] = "+Ending Disk#\n" + r'"(\s?_disk\d)$"' + "\n"
masks.FileMasked.masks = masks.CompileMasks(get_config()["masks"])
filters.FileFiltered.filters = filters.CompileFilters(get_config()["filters"])
if os.path.exists(self.outdir):
shutil.rmtree(self.outdir)
shutil.copytree(os.path.abspath(os.path.join(os.path.dirname(__file__), "./data")), self.outdir)
sourcesDir = os.path.join(self.outdir, "sources_multimatch")
choicesDir = os.path.join(self.outdir, "choices_multimatch")
args.theArgs = args.theArgsParser.parse_args(["--sources", sourcesDir, "--choices", choicesDir, "report_match"])
with io.StringIO() as buf, redirect_stdout(buf):
frame = main_dlg.MainFrame()
shutil.rmtree(self.outdir)
output = buf.getvalue()
self.assertEqual(
"acanthe à feuilles molles --> acanthus mollis (70.00)\n"
"acanthe épineuse --> acanthus spinosus (73.00)\n"
"aconit vénéneux --> aconitum anthora (52.00)\n"
"violette cornue --> viola cornuta (71.00)\n"
"volutaire à fleurs tubulées --> volutaria tubuliflora (54.00)\n",
output,
)
def test_args_preview_rename(self):
get_config()["workers"] = 1
get_config()["masks"] = "+Ending Disk#\n" + r'"(\s?_disk\d)$"' + "\n"
masks.FileMasked.masks = masks.CompileMasks(get_config()["masks"])
filters.FileFiltered.filters = filters.CompileFilters(get_config()["filters"])
if os.path.exists(self.outdir):
shutil.rmtree(self.outdir)
shutil.copytree(os.path.abspath(os.path.join(os.path.dirname(__file__), "./data")), self.outdir)
sourcesDir = os.path.join(self.outdir, "sources_multimatch")
choicesDir = os.path.join(self.outdir, "choices_multimatch")
args.theArgs = args.theArgsParser.parse_args(["--sources", sourcesDir, "--choices", choicesDir, "preview_rename"])
with io.StringIO() as buf, redirect_stdout(buf):
frame = main_dlg.MainFrame()
shutil.rmtree(self.outdir)
output = buf.getvalue()
self.assertEqual(
"Renaming : "
+ os.path.join(sourcesDir, "Acanthe à feuilles molles_disk2.txt")
+ " --> "
+ os.path.join(sourcesDir, "Acanthus mollis_disk2.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Acanthe épineuse.txt")
+ " --> "
+ os.path.join(sourcesDir, "Acanthus spinosus_disk1.txt\n")
+ "Copying : "
+ os.path.join(sourcesDir, "Acanthus spinosus_disk1.txt")
+ " --> "
+ os.path.join(sourcesDir, "Acanthus spinosus_disk2.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Aconit vénéneux.txt")
+ " --> "
+ os.path.join(sourcesDir, "Aconitum anthora.txt\n")
+ "Copying : "
+ os.path.join(sourcesDir, "Aconitum anthora.txt")
+ " --> "
+ os.path.join(sourcesDir, "Aconitum anthora_disk2.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Aconit vénéneux_disk1.txt")
+ " --> "
+ os.path.join(sourcesDir, "Aconitum anthora_disk1.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Aconit vénéneux_disk3.txt")
+ " --> "
+ os.path.join(sourcesDir, "Aconitum anthora_disk3.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Violette cornue_disk1.txt")
+ " --> "
+ os.path.join(sourcesDir, "Viola cornuta_disk1.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Volutaire à fleurs tubulées_disk1.txt")
+ " --> "
+ os.path.join(sourcesDir, "Volutaria tubuliflora_disk1.txt\n"),
output,
)
def test_args_preview_rename_nomultirename(self):
get_config()["workers"] = 1
get_config()["source_w_multiple_choice"] = False
get_config()["masks"] = "+Ending Disk#\n" + r'"(\s?_disk\d)$"' + "\n"
masks.FileMasked.masks = masks.CompileMasks(get_config()["masks"])
filters.FileFiltered.filters = filters.CompileFilters(get_config()["filters"])
if os.path.exists(self.outdir):
shutil.rmtree(self.outdir)
shutil.copytree(os.path.abspath(os.path.join(os.path.dirname(__file__), "./data")), self.outdir)
sourcesDir = os.path.join(self.outdir, "sources_multimatch")
choicesDir = os.path.join(self.outdir, "choices_multimatch")
args.theArgs = args.theArgsParser.parse_args(["--sources", sourcesDir, "--choices", choicesDir, "preview_rename"])
with io.StringIO() as buf, redirect_stdout(buf):
frame = main_dlg.MainFrame()
shutil.rmtree(self.outdir)
output = buf.getvalue()
self.maxDiff = None
self.assertEqual(
"Renaming : "
+ os.path.join(sourcesDir, "Acanthe à feuilles molles_disk2.txt")
+ " --> "
+ os.path.join(sourcesDir, "Acanthus mollis_disk2.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Acanthe épineuse.txt")
+ " --> "
+ os.path.join(sourcesDir, "Acanthus spinosus.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Aconit vénéneux.txt")
+ " --> "
+ os.path.join(sourcesDir, "Aconitum anthora.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Aconit vénéneux_disk1.txt")
+ " --> "
+ os.path.join(sourcesDir, "Aconitum anthora_disk1.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Aconit vénéneux_disk3.txt")
+ " --> "
+ os.path.join(sourcesDir, "Aconitum anthora_disk3.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Violette cornue_disk1.txt")
+ " --> "
+ os.path.join(sourcesDir, "Viola cornuta_disk1.txt\n")
+ "Renaming : "
+ os.path.join(sourcesDir, "Volutaire à fleurs tubulées_disk1.txt")
+ " --> "
+ os.path.join(sourcesDir, "Volutaria tubuliflora_disk1.txt\n"),
output,
)
def test_args_rename(self):
get_config()["workers"] = 1
get_config()["keep_original"] = False
get_config()["masks"] = "+Ending Disk#\n" + r'"(\s?_disk\d)$"' + "\n"
masks.FileMasked.masks = masks.CompileMasks(get_config()["masks"])
filters.FileFiltered.filters = filters.CompileFilters(get_config()["filters"])
if os.path.exists(self.outdir):
shutil.rmtree(self.outdir)
shutil.copytree(os.path.abspath(os.path.join(os.path.dirname(__file__), "./data")), self.outdir)
sourcesDir = os.path.join(self.outdir, "sources_multimatch")
choicesDir = os.path.join(self.outdir, "choices_multimatch")
args.theArgs = args.theArgsParser.parse_args(["--sources", sourcesDir, "--choices", choicesDir, "rename"])
with io.StringIO() as buf, redirect_stdout(buf):
frame = main_dlg.MainFrame()
renamed = []
for f in sorted(Path(os.path.join(self.outdir, "sources_multimatch")).resolve().glob("*"), key=os.path.basename):
try:
if f.is_file():
renamed.append(f.name)
except (OSError, IOError):
pass
shutil.rmtree(self.outdir)
self.assertEqual(
[
"Acanthus mollis_disk2.txt",
"Acanthus spinosus_disk1.txt",
"Acanthus spinosus_disk2.txt",
"Aconitum anthora.txt",
"Aconitum anthora_disk1.txt",
"Aconitum anthora_disk2.txt",
"Aconitum anthora_disk3.txt",
"Viola cornuta_disk1.txt",
"Volutaria tubuliflora_disk1.txt",
],
renamed,
)
# ---------------------------------------------------------------------------
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"io.StringIO",
"os.path.dirname",
"os.path.exists",
"pyfuzzyrenamer.main_dlg.MainFrame",
"pyfuzzyrenamer.config.get_config",
"contextlib.redirect_stdout",
"shutil.rmtree",
"os.path.join",
"pyfuzzyrenamer.args.theArgsParser.parse_args"
] |
[((9237, 9252), 'unittest.main', 'unittest.main', ([], {}), '()\n', (9250, 9252), False, 'import unittest\n'), ((897, 924), 'os.path.exists', 'os.path.exists', (['self.outdir'], {}), '(self.outdir)\n', (911, 924), False, 'import os\n'), ((1091, 1138), 'os.path.join', 'os.path.join', (['self.outdir', '"""sources_multimatch"""'], {}), "(self.outdir, 'sources_multimatch')\n", (1103, 1138), False, 'import os\n'), ((1160, 1207), 'os.path.join', 'os.path.join', (['self.outdir', '"""choices_multimatch"""'], {}), "(self.outdir, 'choices_multimatch')\n", (1172, 1207), False, 'import os\n'), ((1231, 1332), 'pyfuzzyrenamer.args.theArgsParser.parse_args', 'args.theArgsParser.parse_args', (["['--sources', sourcesDir, '--choices', choicesDir, 'report_match']"], {}), "(['--sources', sourcesDir, '--choices',\n choicesDir, 'report_match'])\n", (1260, 1332), False, 'from pyfuzzyrenamer import args, config, filters, main_listctrl, main_dlg, masks\n'), ((2251, 2278), 'os.path.exists', 'os.path.exists', (['self.outdir'], {}), '(self.outdir)\n', (2265, 2278), False, 'import os\n'), ((2445, 2492), 'os.path.join', 'os.path.join', (['self.outdir', '"""sources_multimatch"""'], {}), "(self.outdir, 'sources_multimatch')\n", (2457, 2492), False, 'import os\n'), ((2514, 2561), 'os.path.join', 'os.path.join', (['self.outdir', '"""choices_multimatch"""'], {}), "(self.outdir, 'choices_multimatch')\n", (2526, 2561), False, 'import os\n'), ((2585, 2688), 'pyfuzzyrenamer.args.theArgsParser.parse_args', 'args.theArgsParser.parse_args', (["['--sources', sourcesDir, '--choices', choicesDir, 'preview_rename']"], {}), "(['--sources', sourcesDir, '--choices',\n choicesDir, 'preview_rename'])\n", (2614, 2688), False, 'from pyfuzzyrenamer import args, config, filters, main_listctrl, main_dlg, masks\n'), ((5174, 5201), 'os.path.exists', 'os.path.exists', (['self.outdir'], {}), '(self.outdir)\n', (5188, 5201), False, 'import os\n'), ((5368, 5415), 'os.path.join', 'os.path.join', (['self.outdir', '"""sources_multimatch"""'], {}), "(self.outdir, 'sources_multimatch')\n", (5380, 5415), False, 'import os\n'), ((5437, 5484), 'os.path.join', 'os.path.join', (['self.outdir', '"""choices_multimatch"""'], {}), "(self.outdir, 'choices_multimatch')\n", (5449, 5484), False, 'import os\n'), ((5508, 5611), 'pyfuzzyrenamer.args.theArgsParser.parse_args', 'args.theArgsParser.parse_args', (["['--sources', sourcesDir, '--choices', choicesDir, 'preview_rename']"], {}), "(['--sources', sourcesDir, '--choices',\n choicesDir, 'preview_rename'])\n", (5537, 5611), False, 'from pyfuzzyrenamer import args, config, filters, main_listctrl, main_dlg, masks\n'), ((7684, 7711), 'os.path.exists', 'os.path.exists', (['self.outdir'], {}), '(self.outdir)\n', (7698, 7711), False, 'import os\n'), ((7878, 7925), 'os.path.join', 'os.path.join', (['self.outdir', '"""sources_multimatch"""'], {}), "(self.outdir, 'sources_multimatch')\n", (7890, 7925), False, 'import os\n'), ((7947, 7994), 'os.path.join', 'os.path.join', (['self.outdir', '"""choices_multimatch"""'], {}), "(self.outdir, 'choices_multimatch')\n", (7959, 7994), False, 'import os\n'), ((8018, 8113), 'pyfuzzyrenamer.args.theArgsParser.parse_args', 'args.theArgsParser.parse_args', (["['--sources', sourcesDir, '--choices', choicesDir, 'rename']"], {}), "(['--sources', sourcesDir, '--choices',\n choicesDir, 'rename'])\n", (8047, 8113), False, 'from pyfuzzyrenamer import args, config, filters, main_listctrl, main_dlg, masks\n'), ((525, 537), 'pyfuzzyrenamer.config.get_config', 'get_config', ([], {}), '()\n', (535, 537), False, 'from pyfuzzyrenamer.config import get_config\n'), ((561, 573), 'pyfuzzyrenamer.config.get_config', 'get_config', ([], {}), '()\n', (571, 573), False, 'from pyfuzzyrenamer.config import get_config\n'), ((607, 619), 'pyfuzzyrenamer.config.get_config', 'get_config', ([], {}), '()\n', (617, 619), False, 'from pyfuzzyrenamer.config import get_config\n'), ((653, 665), 'pyfuzzyrenamer.config.get_config', 'get_config', ([], {}), '()\n', (663, 665), False, 'from pyfuzzyrenamer.config import get_config\n'), ((938, 964), 'shutil.rmtree', 'shutil.rmtree', (['self.outdir'], {}), '(self.outdir)\n', (951, 964), False, 'import shutil\n'), ((1343, 1356), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (1354, 1356), False, 'import io\n'), ((1365, 1385), 'contextlib.redirect_stdout', 'redirect_stdout', (['buf'], {}), '(buf)\n', (1380, 1385), False, 'from contextlib import redirect_stdout\n'), ((1407, 1427), 'pyfuzzyrenamer.main_dlg.MainFrame', 'main_dlg.MainFrame', ([], {}), '()\n', (1425, 1427), False, 'from pyfuzzyrenamer import args, config, filters, main_listctrl, main_dlg, masks\n'), ((1440, 1466), 'shutil.rmtree', 'shutil.rmtree', (['self.outdir'], {}), '(self.outdir)\n', (1453, 1466), False, 'import shutil\n'), ((1971, 1983), 'pyfuzzyrenamer.config.get_config', 'get_config', ([], {}), '()\n', (1981, 1983), False, 'from pyfuzzyrenamer.config import get_config\n'), ((2007, 2019), 'pyfuzzyrenamer.config.get_config', 'get_config', ([], {}), '()\n', (2017, 2019), False, 'from pyfuzzyrenamer.config import get_config\n'), ((2292, 2318), 'shutil.rmtree', 'shutil.rmtree', (['self.outdir'], {}), '(self.outdir)\n', (2305, 2318), False, 'import shutil\n'), ((2699, 2712), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (2710, 2712), False, 'import io\n'), ((2721, 2741), 'contextlib.redirect_stdout', 'redirect_stdout', (['buf'], {}), '(buf)\n', (2736, 2741), False, 'from contextlib import redirect_stdout\n'), ((2763, 2783), 'pyfuzzyrenamer.main_dlg.MainFrame', 'main_dlg.MainFrame', ([], {}), '()\n', (2781, 2783), False, 'from pyfuzzyrenamer import args, config, filters, main_listctrl, main_dlg, masks\n'), ((2796, 2822), 'shutil.rmtree', 'shutil.rmtree', (['self.outdir'], {}), '(self.outdir)\n', (2809, 2822), False, 'import shutil\n'), ((4837, 4849), 'pyfuzzyrenamer.config.get_config', 'get_config', ([], {}), '()\n', (4847, 4849), False, 'from pyfuzzyrenamer.config import get_config\n'), ((4873, 4885), 'pyfuzzyrenamer.config.get_config', 'get_config', ([], {}), '()\n', (4883, 4885), False, 'from pyfuzzyrenamer.config import get_config\n'), ((4930, 4942), 'pyfuzzyrenamer.config.get_config', 'get_config', ([], {}), '()\n', (4940, 4942), False, 'from pyfuzzyrenamer.config import get_config\n'), ((5215, 5241), 'shutil.rmtree', 'shutil.rmtree', (['self.outdir'], {}), '(self.outdir)\n', (5228, 5241), False, 'import shutil\n'), ((5622, 5635), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (5633, 5635), False, 'import io\n'), ((5644, 5664), 'contextlib.redirect_stdout', 'redirect_stdout', (['buf'], {}), '(buf)\n', (5659, 5664), False, 'from contextlib import redirect_stdout\n'), ((5686, 5706), 'pyfuzzyrenamer.main_dlg.MainFrame', 'main_dlg.MainFrame', ([], {}), '()\n', (5704, 5706), False, 'from pyfuzzyrenamer import args, config, filters, main_listctrl, main_dlg, masks\n'), ((5719, 5745), 'shutil.rmtree', 'shutil.rmtree', (['self.outdir'], {}), '(self.outdir)\n', (5732, 5745), False, 'import shutil\n'), ((7358, 7370), 'pyfuzzyrenamer.config.get_config', 'get_config', ([], {}), '()\n', (7368, 7370), False, 'from pyfuzzyrenamer.config import get_config\n'), ((7394, 7406), 'pyfuzzyrenamer.config.get_config', 'get_config', ([], {}), '()\n', (7404, 7406), False, 'from pyfuzzyrenamer.config import get_config\n'), ((7440, 7452), 'pyfuzzyrenamer.config.get_config', 'get_config', ([], {}), '()\n', (7450, 7452), False, 'from pyfuzzyrenamer.config import get_config\n'), ((7725, 7751), 'shutil.rmtree', 'shutil.rmtree', (['self.outdir'], {}), '(self.outdir)\n', (7738, 7751), False, 'import shutil\n'), ((8124, 8137), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (8135, 8137), False, 'import io\n'), ((8146, 8166), 'contextlib.redirect_stdout', 'redirect_stdout', (['buf'], {}), '(buf)\n', (8161, 8166), False, 'from contextlib import redirect_stdout\n'), ((8188, 8208), 'pyfuzzyrenamer.main_dlg.MainFrame', 'main_dlg.MainFrame', ([], {}), '()\n', (8206, 8208), False, 'from pyfuzzyrenamer import args, config, filters, main_listctrl, main_dlg, masks\n'), ((8544, 8570), 'shutil.rmtree', 'shutil.rmtree', (['self.outdir'], {}), '(self.outdir)\n', (8557, 8570), False, 'import shutil\n'), ((775, 787), 'pyfuzzyrenamer.config.get_config', 'get_config', ([], {}), '()\n', (785, 787), False, 'from pyfuzzyrenamer.config import get_config\n'), ((860, 872), 'pyfuzzyrenamer.config.get_config', 'get_config', ([], {}), '()\n', (870, 872), False, 'from pyfuzzyrenamer.config import get_config\n'), ((2129, 2141), 'pyfuzzyrenamer.config.get_config', 'get_config', ([], {}), '()\n', (2139, 2141), False, 'from pyfuzzyrenamer.config import get_config\n'), ((2214, 2226), 'pyfuzzyrenamer.config.get_config', 'get_config', ([], {}), '()\n', (2224, 2226), False, 'from pyfuzzyrenamer.config import get_config\n'), ((5052, 5064), 'pyfuzzyrenamer.config.get_config', 'get_config', ([], {}), '()\n', (5062, 5064), False, 'from pyfuzzyrenamer.config import get_config\n'), ((5137, 5149), 'pyfuzzyrenamer.config.get_config', 'get_config', ([], {}), '()\n', (5147, 5149), False, 'from pyfuzzyrenamer.config import get_config\n'), ((7562, 7574), 'pyfuzzyrenamer.config.get_config', 'get_config', ([], {}), '()\n', (7572, 7574), False, 'from pyfuzzyrenamer.config import get_config\n'), ((7647, 7659), 'pyfuzzyrenamer.config.get_config', 'get_config', ([], {}), '()\n', (7657, 7659), False, 'from pyfuzzyrenamer.config import get_config\n'), ((1018, 1043), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1033, 1043), False, 'import os\n'), ((2372, 2397), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2387, 2397), False, 'import os\n'), ((4673, 4734), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Volutaria tubuliflora_disk1.txt\n"""'], {}), "(sourcesDir, 'Volutaria tubuliflora_disk1.txt\\n')\n", (4685, 4734), False, 'import os\n'), ((5295, 5320), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (5310, 5320), False, 'import os\n'), ((7216, 7277), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Volutaria tubuliflora_disk1.txt\n"""'], {}), "(sourcesDir, 'Volutaria tubuliflora_disk1.txt\\n')\n", (7228, 7277), False, 'import os\n'), ((7805, 7830), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (7820, 7830), False, 'import os\n'), ((4563, 4628), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Volutaire à fleurs tubulées_disk1.txt"""'], {}), "(sourcesDir, 'Volutaire à fleurs tubulées_disk1.txt')\n", (4575, 4628), False, 'import os\n'), ((7106, 7171), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Volutaire à fleurs tubulées_disk1.txt"""'], {}), "(sourcesDir, 'Volutaire à fleurs tubulées_disk1.txt')\n", (7118, 7171), False, 'import os\n'), ((4459, 4512), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Viola cornuta_disk1.txt\n"""'], {}), "(sourcesDir, 'Viola cornuta_disk1.txt\\n')\n", (4471, 4512), False, 'import os\n'), ((7002, 7055), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Viola cornuta_disk1.txt\n"""'], {}), "(sourcesDir, 'Viola cornuta_disk1.txt\\n')\n", (7014, 7055), False, 'import os\n'), ((8267, 8314), 'os.path.join', 'os.path.join', (['self.outdir', '"""sources_multimatch"""'], {}), "(self.outdir, 'sources_multimatch')\n", (8279, 8314), False, 'import os\n'), ((4361, 4414), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Violette cornue_disk1.txt"""'], {}), "(sourcesDir, 'Violette cornue_disk1.txt')\n", (4373, 4414), False, 'import os\n'), ((6904, 6957), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Violette cornue_disk1.txt"""'], {}), "(sourcesDir, 'Violette cornue_disk1.txt')\n", (6916, 6957), False, 'import os\n'), ((4254, 4310), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Aconitum anthora_disk3.txt\n"""'], {}), "(sourcesDir, 'Aconitum anthora_disk3.txt\\n')\n", (4266, 4310), False, 'import os\n'), ((6797, 6853), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Aconitum anthora_disk3.txt\n"""'], {}), "(sourcesDir, 'Aconitum anthora_disk3.txt\\n')\n", (6809, 6853), False, 'import os\n'), ((4156, 4209), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Aconit vénéneux_disk3.txt"""'], {}), "(sourcesDir, 'Aconit vénéneux_disk3.txt')\n", (4168, 4209), False, 'import os\n'), ((6699, 6752), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Aconit vénéneux_disk3.txt"""'], {}), "(sourcesDir, 'Aconit vénéneux_disk3.txt')\n", (6711, 6752), False, 'import os\n'), ((4049, 4105), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Aconitum anthora_disk1.txt\n"""'], {}), "(sourcesDir, 'Aconitum anthora_disk1.txt\\n')\n", (4061, 4105), False, 'import os\n'), ((6592, 6648), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Aconitum anthora_disk1.txt\n"""'], {}), "(sourcesDir, 'Aconitum anthora_disk1.txt\\n')\n", (6604, 6648), False, 'import os\n'), ((3951, 4004), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Aconit vénéneux_disk1.txt"""'], {}), "(sourcesDir, 'Aconit vénéneux_disk1.txt')\n", (3963, 4004), False, 'import os\n'), ((6494, 6547), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Aconit vénéneux_disk1.txt"""'], {}), "(sourcesDir, 'Aconit vénéneux_disk1.txt')\n", (6506, 6547), False, 'import os\n'), ((3844, 3900), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Aconitum anthora_disk2.txt\n"""'], {}), "(sourcesDir, 'Aconitum anthora_disk2.txt\\n')\n", (3856, 3900), False, 'import os\n'), ((6393, 6443), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Aconitum anthora.txt\n"""'], {}), "(sourcesDir, 'Aconitum anthora.txt\\n')\n", (6405, 6443), False, 'import os\n'), ((3751, 3799), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Aconitum anthora.txt"""'], {}), "(sourcesDir, 'Aconitum anthora.txt')\n", (3763, 3799), False, 'import os\n'), ((6301, 6348), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Aconit vénéneux.txt"""'], {}), "(sourcesDir, 'Aconit vénéneux.txt')\n", (6313, 6348), False, 'import os\n'), ((3651, 3701), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Aconitum anthora.txt\n"""'], {}), "(sourcesDir, 'Aconitum anthora.txt\\n')\n", (3663, 3701), False, 'import os\n'), ((6199, 6250), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Acanthus spinosus.txt\n"""'], {}), "(sourcesDir, 'Acanthus spinosus.txt\\n')\n", (6211, 6250), False, 'import os\n'), ((3559, 3606), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Aconit vénéneux.txt"""'], {}), "(sourcesDir, 'Aconit vénéneux.txt')\n", (3571, 3606), False, 'import os\n'), ((6106, 6154), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Acanthe épineuse.txt"""'], {}), "(sourcesDir, 'Acanthe épineuse.txt')\n", (6118, 6154), False, 'import os\n'), ((3451, 3508), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Acanthus spinosus_disk2.txt\n"""'], {}), "(sourcesDir, 'Acanthus spinosus_disk2.txt\\n')\n", (3463, 3508), False, 'import os\n'), ((6000, 6055), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Acanthus mollis_disk2.txt\n"""'], {}), "(sourcesDir, 'Acanthus mollis_disk2.txt\\n')\n", (6012, 6055), False, 'import os\n'), ((3351, 3406), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Acanthus spinosus_disk1.txt"""'], {}), "(sourcesDir, 'Acanthus spinosus_disk1.txt')\n", (3363, 3406), False, 'import os\n'), ((5892, 5955), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Acanthe à feuilles molles_disk2.txt"""'], {}), "(sourcesDir, 'Acanthe à feuilles molles_disk2.txt')\n", (5904, 5955), False, 'import os\n'), ((3244, 3301), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Acanthus spinosus_disk1.txt\n"""'], {}), "(sourcesDir, 'Acanthus spinosus_disk1.txt\\n')\n", (3256, 3301), False, 'import os\n'), ((3151, 3199), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Acanthe épineuse.txt"""'], {}), "(sourcesDir, 'Acanthe épineuse.txt')\n", (3163, 3199), False, 'import os\n'), ((3045, 3100), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Acanthus mollis_disk2.txt\n"""'], {}), "(sourcesDir, 'Acanthus mollis_disk2.txt\\n')\n", (3057, 3100), False, 'import os\n'), ((2937, 3000), 'os.path.join', 'os.path.join', (['sourcesDir', '"""Acanthe à feuilles molles_disk2.txt"""'], {}), "(sourcesDir, 'Acanthe à feuilles molles_disk2.txt')\n", (2949, 3000), False, 'import os\n')]
|
# Generated by Django 3.1.5 on 2021-04-27 15:08
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('authentication', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='user',
name='subscribers',
field=models.ManyToManyField(blank=True, related_name='_user_subscribers_+', to=settings.AUTH_USER_MODEL, verbose_name='Subscibers'),
),
]
|
[
"django.db.models.ManyToManyField"
] |
[((366, 497), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'related_name': '"""_user_subscribers_+"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""Subscibers"""'}), "(blank=True, related_name='_user_subscribers_+', to=\n settings.AUTH_USER_MODEL, verbose_name='Subscibers')\n", (388, 497), False, 'from django.db import migrations, models\n')]
|
# Copyright 2014 Rackspace Inc.
#
# Author: <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from designate.utils import DEFAULT_AGENT_PORT
AGENT_GROUP = cfg.OptGroup(
name='service:agent',
title="Configuration for the Agent Service"
)
AGENT_OPTS = [
cfg.IntOpt('workers',
help='Number of agent worker processes to spawn'),
cfg.IntOpt('threads', default=1000,
help='Number of agent greenthreads to spawn'),
cfg.ListOpt('listen',
default=['0.0.0.0:%d' % DEFAULT_AGENT_PORT],
help='Agent host:port pairs to listen on'),
cfg.IntOpt('tcp_backlog', default=100,
help='The Agent TCP Backlog'),
cfg.FloatOpt('tcp_recv_timeout', default=0.5,
help='Agent TCP Receive Timeout'),
cfg.ListOpt('allow_notify', default=[],
help='List of IP addresses allowed to NOTIFY The Agent'),
cfg.ListOpt('masters', default=[],
help='List of masters for the Agent, format ip:port'),
cfg.StrOpt('backend_driver', default='bind9',
help='The backend driver to use, e.g. bind9, djbdns, knot2'),
cfg.StrOpt('transfer_source',
help='An IP address to be used to fetch zones transferred in'),
cfg.FloatOpt('notify_delay', default=0.0,
help='Delay after a NOTIFY arrives for a zone that the Agent '
'will pause and drop subsequent NOTIFYs for that zone'),
]
def register_opts(conf):
conf.register_group(AGENT_GROUP)
conf.register_opts(AGENT_OPTS, group=AGENT_GROUP)
def list_opts():
return {
AGENT_GROUP: AGENT_OPTS
}
|
[
"oslo_config.cfg.OptGroup",
"oslo_config.cfg.StrOpt",
"oslo_config.cfg.FloatOpt",
"oslo_config.cfg.IntOpt",
"oslo_config.cfg.ListOpt"
] |
[((699, 778), 'oslo_config.cfg.OptGroup', 'cfg.OptGroup', ([], {'name': '"""service:agent"""', 'title': '"""Configuration for the Agent Service"""'}), "(name='service:agent', title='Configuration for the Agent Service')\n", (711, 778), False, 'from oslo_config import cfg\n'), ((809, 880), 'oslo_config.cfg.IntOpt', 'cfg.IntOpt', (['"""workers"""'], {'help': '"""Number of agent worker processes to spawn"""'}), "('workers', help='Number of agent worker processes to spawn')\n", (819, 880), False, 'from oslo_config import cfg\n'), ((901, 987), 'oslo_config.cfg.IntOpt', 'cfg.IntOpt', (['"""threads"""'], {'default': '(1000)', 'help': '"""Number of agent greenthreads to spawn"""'}), "('threads', default=1000, help=\n 'Number of agent greenthreads to spawn')\n", (911, 987), False, 'from oslo_config import cfg\n'), ((1003, 1117), 'oslo_config.cfg.ListOpt', 'cfg.ListOpt', (['"""listen"""'], {'default': "['0.0.0.0:%d' % DEFAULT_AGENT_PORT]", 'help': '"""Agent host:port pairs to listen on"""'}), "('listen', default=['0.0.0.0:%d' % DEFAULT_AGENT_PORT], help=\n 'Agent host:port pairs to listen on')\n", (1014, 1117), False, 'from oslo_config import cfg\n'), ((1150, 1218), 'oslo_config.cfg.IntOpt', 'cfg.IntOpt', (['"""tcp_backlog"""'], {'default': '(100)', 'help': '"""The Agent TCP Backlog"""'}), "('tcp_backlog', default=100, help='The Agent TCP Backlog')\n", (1160, 1218), False, 'from oslo_config import cfg\n'), ((1239, 1318), 'oslo_config.cfg.FloatOpt', 'cfg.FloatOpt', (['"""tcp_recv_timeout"""'], {'default': '(0.5)', 'help': '"""Agent TCP Receive Timeout"""'}), "('tcp_recv_timeout', default=0.5, help='Agent TCP Receive Timeout')\n", (1251, 1318), False, 'from oslo_config import cfg\n'), ((1341, 1442), 'oslo_config.cfg.ListOpt', 'cfg.ListOpt', (['"""allow_notify"""'], {'default': '[]', 'help': '"""List of IP addresses allowed to NOTIFY The Agent"""'}), "('allow_notify', default=[], help=\n 'List of IP addresses allowed to NOTIFY The Agent')\n", (1352, 1442), False, 'from oslo_config import cfg\n'), ((1459, 1552), 'oslo_config.cfg.ListOpt', 'cfg.ListOpt', (['"""masters"""'], {'default': '[]', 'help': '"""List of masters for the Agent, format ip:port"""'}), "('masters', default=[], help=\n 'List of masters for the Agent, format ip:port')\n", (1470, 1552), False, 'from oslo_config import cfg\n'), ((1569, 1680), 'oslo_config.cfg.StrOpt', 'cfg.StrOpt', (['"""backend_driver"""'], {'default': '"""bind9"""', 'help': '"""The backend driver to use, e.g. bind9, djbdns, knot2"""'}), "('backend_driver', default='bind9', help=\n 'The backend driver to use, e.g. bind9, djbdns, knot2')\n", (1579, 1680), False, 'from oslo_config import cfg\n'), ((1696, 1793), 'oslo_config.cfg.StrOpt', 'cfg.StrOpt', (['"""transfer_source"""'], {'help': '"""An IP address to be used to fetch zones transferred in"""'}), "('transfer_source', help=\n 'An IP address to be used to fetch zones transferred in')\n", (1706, 1793), False, 'from oslo_config import cfg\n'), ((1809, 1976), 'oslo_config.cfg.FloatOpt', 'cfg.FloatOpt', (['"""notify_delay"""'], {'default': '(0.0)', 'help': '"""Delay after a NOTIFY arrives for a zone that the Agent will pause and drop subsequent NOTIFYs for that zone"""'}), "('notify_delay', default=0.0, help=\n 'Delay after a NOTIFY arrives for a zone that the Agent will pause and drop subsequent NOTIFYs for that zone'\n )\n", (1821, 1976), False, 'from oslo_config import cfg\n')]
|
import datetime
import logging
import localflavor
from paying_for_college.models.disclosures import (
DEFAULT_EXCLUSIONS, HIGHEST_DEGREES, School
)
STATES = sorted(
[tup[0] for tup in localflavor.us.us_states.CONTIGUOUS_STATES] +
[tup[0] for tup in localflavor.us.us_states.NON_CONTIGUOUS_STATES] +
['PR']
)
DEGREE_COHORTS = {k: [] for k in HIGHEST_DEGREES.keys()}
logger = logging.getLogger(__name__)
def get_grad_level(school):
"""Consider degrees higher than graduate level '4' as graduate degrees."""
if int(school.degrees_highest) > 4:
return '4'
else:
return school.degrees_highest
def build_base_cohorts():
"""
Pre-build the base highest-degree cohorts.
DEFAULT_EXCLUSIONS are the primary keys for the home offices of schools
or school systems, plus our fake demo school, 999999.
"""
global DEGREE_COHORTS
base_query = School.objects.filter(
operating=True, state__in=STATES).exclude(
pk__in=DEFAULT_EXCLUSIONS).exclude(
degrees_highest='')
for key in DEGREE_COHORTS:
DEGREE_COHORTS[key] += [
school for school in base_query if get_grad_level(school) == key
]
return base_query
def calculate_percentile_rank(array, score):
"""Get a school score's percentile rank from an array of cohort scores."""
true_false_array = [value <= score for value in array]
if len(true_false_array) == 0:
return
raw_rank = float(sum(true_false_array)) / len(true_false_array)
return int(round(raw_rank * 100))
def rank_by_metric(school, cohort, metric):
"""Return a school's percentile rank among a cohort by 3 metrics."""
values = [
getattr(s, metric) for s in cohort if getattr(s, metric) is not None
]
payload = {'cohort_count': len(values)}
array = [float(val) for val in values]
target_value = float(getattr(school, metric))
payload.update({
'percentile_rank': calculate_percentile_rank(array, target_value)
})
return payload
def run(single_school=None):
"""Get percentile rankings for schools by degree, control, and state."""
count = 0
starter = datetime.datetime.now()
base_query = build_base_cohorts()
if single_school:
base_query = base_query.filter(pk=single_school)
for school in base_query:
by_degree = {}
by_state = {}
by_control = {}
count += 1
if count % 500 == 0: # pragma: no cover
logger.info("{} schools processed".format(count))
# degree_cohort is the default, national base cohort
# base query weeds out schools without state or degrees_highest values
degree_cohort = DEGREE_COHORTS.get(get_grad_level(school))
state_cohort = [
s for s in degree_cohort
if s
and s.state
and s.state == school.state
]
# For school control, we want cohorts only for public and private;
# We do not want a special cohort of for-profit schools
if not school.control:
control_cohort = None
elif school.control == 'Public':
control_cohort = [
s for s in degree_cohort if s.control == school.control
]
else:
control_cohort = [
s for s in degree_cohort if s.control != 'Public'
]
for metric in ['grad_rate', 'repay_3yr', 'median_total_debt']:
if getattr(school, metric) is None:
by_state.update({metric: None})
by_control.update({metric: None})
by_degree.update({metric: None})
else:
if state_cohort:
by_state.update({
metric: rank_by_metric(school, state_cohort, metric)
})
if control_cohort:
by_control.update({
metric: rank_by_metric(school, control_cohort, metric)
})
if degree_cohort:
by_degree.update({
metric: rank_by_metric(school, degree_cohort, metric)
})
school.cohort_ranking_by_state = by_state
school.cohort_ranking_by_control = by_control
school.cohort_ranking_by_highest_degree = by_degree
school.save()
logger.info("\nCohort script took {} to process {} schools".format(
datetime.datetime.now() - starter,
count
))
|
[
"paying_for_college.models.disclosures.HIGHEST_DEGREES.keys",
"paying_for_college.models.disclosures.School.objects.filter",
"datetime.datetime.now",
"logging.getLogger"
] |
[((394, 421), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (411, 421), False, 'import logging\n'), ((2182, 2205), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2203, 2205), False, 'import datetime\n'), ((361, 383), 'paying_for_college.models.disclosures.HIGHEST_DEGREES.keys', 'HIGHEST_DEGREES.keys', ([], {}), '()\n', (381, 383), False, 'from paying_for_college.models.disclosures import DEFAULT_EXCLUSIONS, HIGHEST_DEGREES, School\n'), ((4471, 4494), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4492, 4494), False, 'import datetime\n'), ((907, 962), 'paying_for_college.models.disclosures.School.objects.filter', 'School.objects.filter', ([], {'operating': '(True)', 'state__in': 'STATES'}), '(operating=True, state__in=STATES)\n', (928, 962), False, 'from paying_for_college.models.disclosures import DEFAULT_EXCLUSIONS, HIGHEST_DEGREES, School\n')]
|
import sqlite3
class DBase:
def __init__(self, db_file):
self.conn = sqlite3.connect(db_file)
self.cur = self.conn.cursor()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.cur:
self.cur.close()
if self.conn:
self.conn.close()
def query(self, hash_id, definition, identifier):
sql = """
SELECT json FROM {}
WHERE {} = {}
"""
self.cur.execute(sql.format(definition, identifier, hash_id))
return self.cur.fetchall()
|
[
"sqlite3.connect"
] |
[((84, 108), 'sqlite3.connect', 'sqlite3.connect', (['db_file'], {}), '(db_file)\n', (99, 108), False, 'import sqlite3\n')]
|
import torch
import numpy as np
class Network(torch.nn.Module):
def __init__(self, structure):
super(Network, self).__init__()
self.structure = structure
self.layers_pool_inited = self.init_layers(self.structure)
def init_layers(self, structure):
# pool of layers, which should be initialised and connected
layers_pool = [0]
# pool of initialised layers
layers_pool_inited = {}
# pool of broken (invalid) layers) such as inconsistent number of dimensions
layers_pool_removed = []
while layers_pool:
# take first layer in a pool
layer_index = layers_pool[0]
# find all connections before this layer
enter_layers = set(np.where(self.structure.matrix[:, layer_index] == 1)[0])
# check if some of previous layers were not initialized
# that means - we should initialise them first
not_inited_layers = [i for i in enter_layers if i not in (layers_pool_inited.keys())]
not_inited_layers_selected = [layer for layer in not_inited_layers if layer not in layers_pool_removed]
if not_inited_layers_selected:
# remove layers, which are in pool already
# this is possible due to complex connections with different orders
not_inited_layers_selected = [layer for layer in not_inited_layers_selected if layer not in layers_pool]
# add not initialised layers to the pool
layers_pool.extend(not_inited_layers_selected)
# current layer should be shift to the end of the queue
acc = layers_pool.pop(0)
layers_pool.append(acc)
continue
# take Layer instance of the previous layers
input_layers = [self.structure.layers_index_reverse[layer] for layer in enter_layers]
# layer without rank is broken and we ignore that
input_layers = [layer for layer in input_layers if layer.config.get('rank', False)]
enter_layers = [i for i in enter_layers if i not in layers_pool_removed]
# if curent layer is the Input - initialise without any input connections
if not input_layers and self.structure.layers_index_reverse[layer_index].layer_type == 'input':
inited_layer = (None, None, self.structure.layers_index_reverse[layer_index].init_layer(None))
# detect hanging node - some of mutations could remove connection to the layer
elif not input_layers:
layers_pool_removed.append(layers_pool.pop(0))
continue
# if there are multiple input connections
elif len(input_layers) > 1:
# this case does not require additional processing - all logic is inside Layer instance,
# which handles multiple connections
inited_layer = self.structure.layers_index_reverse[layer_index]([None for _ in range(len(input_layers))], input_layers)
else:
input_layers_inited = [layers_pool_inited[layer] for layer in enter_layers][0]
inited_layer = self.structure.layers_index_reverse[layer_index](None, input_layers[0])
# add new initialised layer
layers_pool_inited[layer_index] = inited_layer
setattr(self, 'layer_{}'.format(layer_index), inited_layer[2])
# find outgoing connections and add them to the pool
output_layers = [layer for layer in np.where(self.structure.matrix[layer_index] == 1)[0]
if layer not in layers_pool and layer not in layers_pool_inited.keys()]
layers_pool.extend(output_layers)
# remove current layer from the pool
layers_pool.pop(layers_pool.index(layer_index))
self.layers_pool_removed = layers_pool_removed
return layers_pool_inited
def forward(self, x):
# pool of layers, which should be initialised and connected
layers_pool = [0]
buffer_x = {-1: x}
last_value = None
while layers_pool:
# take first layer in a pool
layer_index = layers_pool[0]
# find all connections before this layer
enter_layers = set(np.where(self.structure.matrix[:, layer_index] == 1)[0])
enter_layers = [i for i in enter_layers if i not in self.layers_pool_removed]
# check if some of previous layers were not initialized
# that means - we should initialise them first
not_inited_layers = [i for i in enter_layers if i not in (buffer_x.keys())]
not_inited_layers_selected = [layer for layer in not_inited_layers if layer not in self.layers_pool_removed]
if not_inited_layers_selected:
# remove layers, which are in pool already
# this is possible due to complex connections with different orders
not_inited_layers_selected = [layer for layer in not_inited_layers_selected if layer not in layers_pool]
# add not initialised layers to the pool
layers_pool.extend(not_inited_layers_selected)
# current layer should be shift to the end of the queue
layers_pool.append(layers_pool.pop(0))
continue
# take Layer instance of the previous layers
temp_x = [buffer_x[layer] for layer in enter_layers]
# if curent layer is the Input - initialise without any input connections
if not enter_layers and self.structure.layers_index_reverse[layer_index].layer_type == 'input':
if self.layers_pool_inited[layer_index][0] is not None:
raise "Input layer is not the first one. Incorrect graph structure"
if self.layers_pool_inited[layer_index][1] is not None:
reshaper = self.layers_pool_inited[layer_index][1] # .init_layer(None)
temp_x = reshaper(buffer_x[-1])
else:
temp_x = buffer_x[-1]
result_x = self.process_layer_output(self.layers_pool_inited[layer_index][2](temp_x), self.structure.layers_index_reverse[layer_index].layer_type)
buffer_x[layer_index] = result_x
# detect hanging node - some of mutations could remove connection to the layer
elif not enter_layers:
continue
# if there are multiple input connections
elif len(enter_layers) > 1:
if self.layers_pool_inited[layer_index][0] is not None:
reshapers = self.layers_pool_inited[layer_index][0][0]
axis = self.layers_pool_inited[layer_index][0][1]
if reshapers is not None:
reshapers = [i.init_layer(None) for i in reshapers]
temp_x = [r(temp_x[i]) for i, r in enumerate(reshapers)]
temp_x = torch.cat(temp_x, axis)
if self.layers_pool_inited[layer_index][1] is not None:
temp_x = self.layers_pool_inited[layer_index][1](temp_x)
result_x = self.process_layer_output(self.layers_pool_inited[layer_index][2](temp_x), self.structure.layers_index_reverse[layer_index].layer_type)
buffer_x[layer_index] = result_x
else:
temp_x = temp_x[0]
if self.layers_pool_inited[layer_index][1] is not None:
reshaper = self.layers_pool_inited[layer_index][1] # .init_layer(None)
temp_x = reshaper(temp_x)
result_x = self.process_layer_output(self.layers_pool_inited[layer_index][2](temp_x), self.structure.layers_index_reverse[layer_index].layer_type)
buffer_x[layer_index] = result_x
# find outgoing connections and add them to the pool
output_layers = [layer for layer in np.where(self.structure.matrix[layer_index] == 1)[0]
if layer not in layers_pool and layer not in buffer_x.keys()]
last_value = result_x
layers_pool.extend(output_layers)
# remove current layer from the pool
layers_pool.pop(layers_pool.index(layer_index))
return last_value
def process_layer_output(self, x, layer_type):
"""
Some layer returns intermediate results, usually we dont need that
"""
if layer_type == 'lstm':
return x[0]
else:
return x
def recalculate_shapes(structure):
# pool of layers, which should be initialised and connected
layers_pool = [0]
# pool of initialised layers
layers_pool_inited = {}
# pool of broken (invalid) layers) such as inconsistent number of dimensions
layers_pool_removed = []
while layers_pool:
# take first layer in a pool
layer_index = layers_pool[0]
# find all connections before this layer
enter_layers = set(np.where(structure.matrix[:, layer_index] == 1)[0])
# check if some of previous layers were not initialized
# that means - we should initialise them first
not_inited_layers = [i for i in enter_layers if i not in (layers_pool_inited.keys())]
not_inited_layers_selected = [layer for layer in not_inited_layers if layer not in layers_pool_removed]
if not_inited_layers_selected:
# remove layers, which are in pool already
# this is possible due to complex connections with different orders
not_inited_layers_selected = [layer for layer in not_inited_layers_selected if layer not in layers_pool]
# add not initialised layers to the pool
layers_pool.extend(not_inited_layers_selected)
# current layer should be shift to the end of the queue
acc = layers_pool.pop(0)
layers_pool.append(acc)
continue
# take Layer instance of the previous layers
input_layers = [structure.layers_index_reverse[layer] for layer in enter_layers]
# layer without rank is broken and we ignore that
input_layers = [layer for layer in input_layers if layer.config.get('rank', False)]
enter_layers = [i for i in enter_layers if i not in layers_pool_removed]
# if curent layer is the Input - initialise without any input connections
if not input_layers and structure.layers_index_reverse[layer_index].layer_type == 'input':
inited_layer = (None, None, None)
# detect hanging node - some of mutations could remove connection to the layer
elif not input_layers:
layers_pool_removed.append(layers_pool.pop(0))
continue
# if there are multiple input connections
elif len(input_layers) > 1:
# this case does not require additional processing - all logic is inside Layer instance,
# which handles multiple connections
inited_layer = structure.layers_index_reverse[layer_index]([None for _ in range(len(input_layers))], input_layers, init=False)
else:
input_layers_inited = [layers_pool_inited[layer] for layer in enter_layers][0]
inited_layer = structure.layers_index_reverse[layer_index](None, input_layers[0], init=False)
# add new initialised layer
layers_pool_inited[layer_index] = inited_layer
# find outgoing connections and add them to the pool
output_layers = [layer for layer in np.where(structure.matrix[layer_index] == 1)[0]
if layer not in layers_pool and layer not in layers_pool_inited.keys()]
layers_pool.extend(output_layers)
# remove current layer from the pool
layers_pool.pop(layers_pool.index(layer_index))
|
[
"numpy.where",
"torch.cat"
] |
[((9206, 9253), 'numpy.where', 'np.where', (['(structure.matrix[:, layer_index] == 1)'], {}), '(structure.matrix[:, layer_index] == 1)\n', (9214, 9253), True, 'import numpy as np\n'), ((768, 820), 'numpy.where', 'np.where', (['(self.structure.matrix[:, layer_index] == 1)'], {}), '(self.structure.matrix[:, layer_index] == 1)\n', (776, 820), True, 'import numpy as np\n'), ((4383, 4435), 'numpy.where', 'np.where', (['(self.structure.matrix[:, layer_index] == 1)'], {}), '(self.structure.matrix[:, layer_index] == 1)\n', (4391, 4435), True, 'import numpy as np\n'), ((11740, 11784), 'numpy.where', 'np.where', (['(structure.matrix[layer_index] == 1)'], {}), '(structure.matrix[layer_index] == 1)\n', (11748, 11784), True, 'import numpy as np\n'), ((3591, 3640), 'numpy.where', 'np.where', (['(self.structure.matrix[layer_index] == 1)'], {}), '(self.structure.matrix[layer_index] == 1)\n', (3599, 3640), True, 'import numpy as np\n'), ((8095, 8144), 'numpy.where', 'np.where', (['(self.structure.matrix[layer_index] == 1)'], {}), '(self.structure.matrix[layer_index] == 1)\n', (8103, 8144), True, 'import numpy as np\n'), ((7117, 7140), 'torch.cat', 'torch.cat', (['temp_x', 'axis'], {}), '(temp_x, axis)\n', (7126, 7140), False, 'import torch\n')]
|
import functools
import logging
from concurrent.futures import ThreadPoolExecutor
from pathlib import Path
from typing import Iterable
import frontmatter
from libcst import AnnAssign, parse_module
from flake8_codes.semanticize import semanticize
from flake8_codes.wemake_python_styleguide.constants.models import (
BodyStatement, GenerationFailed, NotAnAssignment, NotPublicConstant,
WPSConstant)
logger = logging.getLogger(__name__)
def construct_description(statement: BodyStatement) -> str:
"""Fetch description from preceding comment."""
last_leading_line = statement.leading_lines[-1]
description = last_leading_line.comment.value
if not description.startswith('#:'):
raise NotPublicConstant()
return description.lstrip('#: ').replace('``', '`')
def construct_constant(
statement: BodyStatement,
module,
) -> WPSConstant:
"""Parse WPSConstant object from a LibCST statement."""
assignment = statement.body[0]
if not isinstance(assignment, AnnAssign):
raise NotAnAssignment()
name = assignment.target.value
description = construct_description(statement)
about = f'python://{module.__name__}.{name}'
return WPSConstant(
name=name,
about=about,
description=description,
value=semanticize(getattr(module, name)),
)
def construct_constants(constants) -> Iterable[WPSConstant]:
"""Describe WPS constants."""
python_code = Path(constants.__file__).read_text()
module = parse_module(python_code)
for statement in module.body:
try:
yield construct_constant(
statement=statement,
module=constants,
)
except GenerationFailed as err:
logger.info(err)
def persist_constant(
constant: WPSConstant,
directory: Path,
):
post = frontmatter.Post(
content=constant.description,
handler=frontmatter.YAMLHandler(),
# To avoid yaml.representer.RepresenterError
about=str(constant.about),
**constant.dict(
exclude={'description', 'about'},
exclude_none=True,
by_alias=True,
),
)
with (directory / f'{constant.name}.md').open('wb+') as output_file:
frontmatter.dump(
post=post,
fd=output_file,
)
def persist_constants(
constants: Iterable[WPSConstant],
directory: Path,
):
directory.mkdir(parents=True, exist_ok=True)
list(ThreadPoolExecutor(
max_workers=10,
).map(
functools.partial(
persist_constant,
directory=directory,
),
constants,
))
def generate_constants(
constants,
destination: Path,
):
generated_constants = construct_constants(constants)
persist_constants(
generated_constants,
directory=destination,
)
|
[
"functools.partial",
"flake8_codes.wemake_python_styleguide.constants.models.NotPublicConstant",
"flake8_codes.wemake_python_styleguide.constants.models.NotAnAssignment",
"pathlib.Path",
"frontmatter.YAMLHandler",
"frontmatter.dump",
"libcst.parse_module",
"concurrent.futures.ThreadPoolExecutor",
"logging.getLogger"
] |
[((417, 444), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (434, 444), False, 'import logging\n'), ((1510, 1535), 'libcst.parse_module', 'parse_module', (['python_code'], {}), '(python_code)\n', (1522, 1535), False, 'from libcst import AnnAssign, parse_module\n'), ((717, 736), 'flake8_codes.wemake_python_styleguide.constants.models.NotPublicConstant', 'NotPublicConstant', ([], {}), '()\n', (734, 736), False, 'from flake8_codes.wemake_python_styleguide.constants.models import BodyStatement, GenerationFailed, NotAnAssignment, NotPublicConstant, WPSConstant\n'), ((1035, 1052), 'flake8_codes.wemake_python_styleguide.constants.models.NotAnAssignment', 'NotAnAssignment', ([], {}), '()\n', (1050, 1052), False, 'from flake8_codes.wemake_python_styleguide.constants.models import BodyStatement, GenerationFailed, NotAnAssignment, NotPublicConstant, WPSConstant\n'), ((2279, 2322), 'frontmatter.dump', 'frontmatter.dump', ([], {'post': 'post', 'fd': 'output_file'}), '(post=post, fd=output_file)\n', (2295, 2322), False, 'import frontmatter\n'), ((1460, 1484), 'pathlib.Path', 'Path', (['constants.__file__'], {}), '(constants.__file__)\n', (1464, 1484), False, 'from pathlib import Path\n'), ((1934, 1959), 'frontmatter.YAMLHandler', 'frontmatter.YAMLHandler', ([], {}), '()\n', (1957, 1959), False, 'import frontmatter\n'), ((2566, 2622), 'functools.partial', 'functools.partial', (['persist_constant'], {'directory': 'directory'}), '(persist_constant, directory=directory)\n', (2583, 2622), False, 'import functools\n'), ((2503, 2537), 'concurrent.futures.ThreadPoolExecutor', 'ThreadPoolExecutor', ([], {'max_workers': '(10)'}), '(max_workers=10)\n', (2521, 2537), False, 'from concurrent.futures import ThreadPoolExecutor\n')]
|
from copy import deepcopy
from interface.task import Task
from lib.state import State
from lib.stateeffortmap import StateEffortMap
class SimpleTask(Task):
_process_start_state = State.S0
_process_terminal_state = State.S9
_global_id = 0
def __init__(self,
effort_map: StateEffortMap):
"""
Constructor
"""
self._state = SimpleTask._process_start_state
self._id = SimpleTask._global_id
SimpleTask._global_id += 1
self._effort_map = effort_map
self._state_orig = self._process_start_state
# Things with mutable state
self._remaining_effort = None
self._failed = None
self._lead_time = None
self.state = self._state_orig
self.reset()
def reset(self) -> None:
"""
Return the Actor to the same state at which it was constructed
"""
self._remaining_effort = 0
self._failed = False
self._lead_time = float(0)
self.state = self._state_orig
return
@property
def id(self) -> int:
"""
The globally unique id of the task
:return: Globally Unique id of the task
"""
return deepcopy(self._id)
@property
def lead_time(self) -> State:
"""
The lead time between task starting and task finishing
:return: Lead Time
"""
return deepcopy(self._lead_time)
@property
def state(self) -> State:
"""
Current State of the task.
:return: Current State
"""
return deepcopy(self._state)
@state.setter
def state(self,
s: State) -> None:
"""
Set the tasks new state
:param s: the state to set the task to
"""
self._state = deepcopy(s)
self._remaining_effort = 0
if s.value != self._process_terminal_state.value:
self._remaining_effort = self._effort_map.effort()
return
@property
def failed(self) -> bool:
"""
True if task filed during processing
:return: Failure state of task
"""
return deepcopy(self._failed)
@failed.setter
def failed(self,
s: bool) -> None:
"""
Set the failed status of the task
:param s: the state to set the task to
"""
self._failed = deepcopy(s)
def do_work(self,
work: int) -> int:
"""
Do the given units of work, i.e. decrement the number of work units from the residual effort remaining
for the task. If the number of work units is greater than the residual then the difference of work
units is 'lost' as the task will absorb any additional.
:param work: The number of units of work to do.
:return: The remaining units of work, where 0 means the task ne
"""
self._remaining_effort = max(0, self._remaining_effort - work)
self._lead_time += 1
return deepcopy(self._remaining_effort)
def __str__(self) -> str:
"""
Render the task as a string
:return: Task as string
"""
return "Task id[{0}] in State[{1}] @ effort[{2}] - Lead Time[{3}]".format(str(self._id),
str(self._state),
str(self._remaining_effort),
str(self._lead_time))
@classmethod
def process_start_state(cls,
start_state: State = None) -> State:
if start_state is not None:
cls._process_start_state = start_state
return deepcopy(cls._process_start_state)
@classmethod
def process_end_state(cls,
end_state: State = None) -> State:
if end_state is not None:
cls._process_terminal_state = end_state
return deepcopy(cls._process_terminal_state)
|
[
"copy.deepcopy"
] |
[((1226, 1244), 'copy.deepcopy', 'deepcopy', (['self._id'], {}), '(self._id)\n', (1234, 1244), False, 'from copy import deepcopy\n'), ((1423, 1448), 'copy.deepcopy', 'deepcopy', (['self._lead_time'], {}), '(self._lead_time)\n', (1431, 1448), False, 'from copy import deepcopy\n'), ((1599, 1620), 'copy.deepcopy', 'deepcopy', (['self._state'], {}), '(self._state)\n', (1607, 1620), False, 'from copy import deepcopy\n'), ((1818, 1829), 'copy.deepcopy', 'deepcopy', (['s'], {}), '(s)\n', (1826, 1829), False, 'from copy import deepcopy\n'), ((2169, 2191), 'copy.deepcopy', 'deepcopy', (['self._failed'], {}), '(self._failed)\n', (2177, 2191), False, 'from copy import deepcopy\n'), ((2402, 2413), 'copy.deepcopy', 'deepcopy', (['s'], {}), '(s)\n', (2410, 2413), False, 'from copy import deepcopy\n'), ((3021, 3053), 'copy.deepcopy', 'deepcopy', (['self._remaining_effort'], {}), '(self._remaining_effort)\n', (3029, 3053), False, 'from copy import deepcopy\n'), ((3807, 3841), 'copy.deepcopy', 'deepcopy', (['cls._process_start_state'], {}), '(cls._process_start_state)\n', (3815, 3841), False, 'from copy import deepcopy\n'), ((4053, 4090), 'copy.deepcopy', 'deepcopy', (['cls._process_terminal_state'], {}), '(cls._process_terminal_state)\n', (4061, 4090), False, 'from copy import deepcopy\n')]
|
import mock
import os
from django.contrib.auth import get_user_model
from django.test.client import Client
from django.core.urlresolvers import reverse
from favit.models import Favorite
from firecares.firecares_core.tests.base import BaseFirecaresTestcase
from firecares.firestation.models import FireDepartment, FireStation
User = get_user_model()
class TestFavorites(BaseFirecaresTestcase):
@mock.patch('geopy.geocoders.base.urllib_urlopen')
def test_favorite_stations_list_view(self, urllib_urlopen):
"""
Tests the favorite stations list view.
"""
c = urllib_urlopen.return_value
c.read.return_value = open(os.path.join(os.path.dirname(__file__), 'mock/geocode.json')).read()
c.headers.getparam.return_value = 'utf-8'
fd = FireDepartment.objects.create(name='Fire Department 1')
fs1 = FireStation.create_station(department=fd, address_string='1', name='Fire Station 1')
fs2 = FireStation.create_station(department=fd, address_string='1', name='Fire Station 2')
fs3 = FireStation.create_station(department=fd, address_string='1', name='Fire Station 3')
# add these stations as favorites and remove the last one
user = User.objects.get(username='admin')
Favorite.objects.create(user, fs1)
Favorite.objects.create(user, fs2)
fav = Favorite.objects.create(user, fs3)
fav.delete()
c = Client()
c.login(**{'username': 'admin', 'password': '<PASSWORD>'})
response = c.get(reverse('firestation_favorite_list'))
self.assertTrue(fs1 in response.context['object_list'])
self.assertTrue(fs2 in response.context['object_list'])
self.assertTrue(fs3 not in response.context['object_list'])
self.assertEqual(response.status_code, 200)
def test_favorite_departments_list_view(self):
"""
Tests the favorite departments list view.
"""
fd1 = FireDepartment.objects.create(name='Fire Department 1')
fd2 = FireDepartment.objects.create(name='Fire Department 2')
fd3 = FireDepartment.objects.create(name='Fire Department 3')
# add these departments as favorites and remove the last one
user = User.objects.get(username='admin')
Favorite.objects.create(user, fd1)
Favorite.objects.create(user, fd2)
fav = Favorite.objects.create(user, fd3)
fav.delete()
c = Client()
c.login(**{'username': 'admin', 'password': '<PASSWORD>'})
response = c.get(reverse('firedepartment_list') + '?favorites=true')
self.assertTrue(fd1 in response.context['object_list'])
self.assertTrue(fd2 in response.context['object_list'])
self.assertTrue(fd3 not in response.context['object_list'])
self.assertEqual(response.status_code, 200)
c.logout()
try:
response = c.get(reverse('firedepartment_list') + '?favorites=true')
except:
self.fail('Logged-out user triggering favorites search should not throw exception')
|
[
"favit.models.Favorite.objects.create",
"django.core.urlresolvers.reverse",
"os.path.dirname",
"django.contrib.auth.get_user_model",
"mock.patch",
"django.test.client.Client",
"firecares.firestation.models.FireDepartment.objects.create",
"firecares.firestation.models.FireStation.create_station"
] |
[((334, 350), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (348, 350), False, 'from django.contrib.auth import get_user_model\n'), ((402, 451), 'mock.patch', 'mock.patch', (['"""geopy.geocoders.base.urllib_urlopen"""'], {}), "('geopy.geocoders.base.urllib_urlopen')\n", (412, 451), False, 'import mock\n'), ((796, 851), 'firecares.firestation.models.FireDepartment.objects.create', 'FireDepartment.objects.create', ([], {'name': '"""Fire Department 1"""'}), "(name='Fire Department 1')\n", (825, 851), False, 'from firecares.firestation.models import FireDepartment, FireStation\n'), ((867, 956), 'firecares.firestation.models.FireStation.create_station', 'FireStation.create_station', ([], {'department': 'fd', 'address_string': '"""1"""', 'name': '"""Fire Station 1"""'}), "(department=fd, address_string='1', name=\n 'Fire Station 1')\n", (893, 956), False, 'from firecares.firestation.models import FireDepartment, FireStation\n'), ((966, 1055), 'firecares.firestation.models.FireStation.create_station', 'FireStation.create_station', ([], {'department': 'fd', 'address_string': '"""1"""', 'name': '"""Fire Station 2"""'}), "(department=fd, address_string='1', name=\n 'Fire Station 2')\n", (992, 1055), False, 'from firecares.firestation.models import FireDepartment, FireStation\n'), ((1065, 1154), 'firecares.firestation.models.FireStation.create_station', 'FireStation.create_station', ([], {'department': 'fd', 'address_string': '"""1"""', 'name': '"""Fire Station 3"""'}), "(department=fd, address_string='1', name=\n 'Fire Station 3')\n", (1091, 1154), False, 'from firecares.firestation.models import FireDepartment, FireStation\n'), ((1274, 1308), 'favit.models.Favorite.objects.create', 'Favorite.objects.create', (['user', 'fs1'], {}), '(user, fs1)\n', (1297, 1308), False, 'from favit.models import Favorite\n'), ((1317, 1351), 'favit.models.Favorite.objects.create', 'Favorite.objects.create', (['user', 'fs2'], {}), '(user, fs2)\n', (1340, 1351), False, 'from favit.models import Favorite\n'), ((1366, 1400), 'favit.models.Favorite.objects.create', 'Favorite.objects.create', (['user', 'fs3'], {}), '(user, fs3)\n', (1389, 1400), False, 'from favit.models import Favorite\n'), ((1435, 1443), 'django.test.client.Client', 'Client', ([], {}), '()\n', (1441, 1443), False, 'from django.test.client import Client\n'), ((1963, 2018), 'firecares.firestation.models.FireDepartment.objects.create', 'FireDepartment.objects.create', ([], {'name': '"""Fire Department 1"""'}), "(name='Fire Department 1')\n", (1992, 2018), False, 'from firecares.firestation.models import FireDepartment, FireStation\n'), ((2033, 2088), 'firecares.firestation.models.FireDepartment.objects.create', 'FireDepartment.objects.create', ([], {'name': '"""Fire Department 2"""'}), "(name='Fire Department 2')\n", (2062, 2088), False, 'from firecares.firestation.models import FireDepartment, FireStation\n'), ((2103, 2158), 'firecares.firestation.models.FireDepartment.objects.create', 'FireDepartment.objects.create', ([], {'name': '"""Fire Department 3"""'}), "(name='Fire Department 3')\n", (2132, 2158), False, 'from firecares.firestation.models import FireDepartment, FireStation\n'), ((2286, 2320), 'favit.models.Favorite.objects.create', 'Favorite.objects.create', (['user', 'fd1'], {}), '(user, fd1)\n', (2309, 2320), False, 'from favit.models import Favorite\n'), ((2329, 2363), 'favit.models.Favorite.objects.create', 'Favorite.objects.create', (['user', 'fd2'], {}), '(user, fd2)\n', (2352, 2363), False, 'from favit.models import Favorite\n'), ((2378, 2412), 'favit.models.Favorite.objects.create', 'Favorite.objects.create', (['user', 'fd3'], {}), '(user, fd3)\n', (2401, 2412), False, 'from favit.models import Favorite\n'), ((2447, 2455), 'django.test.client.Client', 'Client', ([], {}), '()\n', (2453, 2455), False, 'from django.test.client import Client\n'), ((1537, 1573), 'django.core.urlresolvers.reverse', 'reverse', (['"""firestation_favorite_list"""'], {}), "('firestation_favorite_list')\n", (1544, 1573), False, 'from django.core.urlresolvers import reverse\n'), ((2549, 2579), 'django.core.urlresolvers.reverse', 'reverse', (['"""firedepartment_list"""'], {}), "('firedepartment_list')\n", (2556, 2579), False, 'from django.core.urlresolvers import reverse\n'), ((2912, 2942), 'django.core.urlresolvers.reverse', 'reverse', (['"""firedepartment_list"""'], {}), "('firedepartment_list')\n", (2919, 2942), False, 'from django.core.urlresolvers import reverse\n'), ((676, 701), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (691, 701), False, 'import os\n')]
|
import datetime
import time
from typing import Callable
import math
from utils.ask_library import AskLibrary
from transitions import Machine
#from social_interaction_cloud.basic_connector import BasicSICConnector, RobotPosture
from social_interaction_cloud.action import ActionRunner
from social_interaction_cloud.basic_connector import BasicSICConnector
import pandas as pd
class NaoFit:
"""
The NaoFit main class.
Implements the whole flow of human-robot interaction using a state-machine scheme.
Nao-Fit does a workout with the user, depending on your BMI and age.
"""
states = ['asleep', 'wake_up', 'introduce', 'ask_workout', 'ask_name', 'ask_age', 'recognise', 'ask_weight',
'ask_height', 'workout', 'finish_workout', 'logging_off', 'ask_again']
def __init__(self, sic: BasicSICConnector):
"""
Initializes the BasicConnector, AskLibrary and state-machine transitions
"""
self.sic = sic
self.action_runner = ActionRunner(self.sic)
# self.name = namelist
self.ask_nao = AskLibrary(sic)
self.user_model = {}
self.recognition_manager = {'attempt_success': False, 'attempt_number': 0}
self.user_recognised = False
self.file = pd.read_csv('data/user_data.csv')
transitions = [
{'trigger': 'start', 'source': 'asleep', 'dest': 'wake_up'},
{'trigger': 'intro', 'source': 'wake_up', 'dest': 'introduce'},
{'trigger': 'work', 'source': 'introduce', 'dest': 'ask_workout'},
{'trigger': 'name', 'source': 'ask_workout', 'dest': 'ask_name'},
{'trigger': 'age', 'source': 'ask_name', 'dest': 'ask_age'},
{'trigger': 'rec', 'source': 'ask_age', 'dest': 'recognise'},
{'trigger': 'start_workout', 'source': 'recognise', 'dest': 'workout'},
{'trigger': 'height', 'source': 'recognise', 'dest': 'ask_height'},
{'trigger': 'weight', 'source': 'ask_height', 'dest': 'ask_weight'},
{'trigger': 'start_workout', 'source': 'ask_weight', 'dest': 'workout'},
{'trigger': 'workout_done', 'source': 'workout', 'dest': 'finish_workout'},
{'trigger': 'workout_done', 'source': 'ask_workout', 'dest': 'finish_workout'},
{'trigger': 'say_goodbye', 'source': 'finish_workout', 'dest': 'logging_off'}]
self.machine = Machine(model=self, states=NaoFit.states, transitions=transitions, initial='asleep')
# In the following, state transitions are defined
if self.state == 'asleep':
print(self.state)
self.start()
if self.state == 'wake_up':
print(self.state)
self.intro()
if self.state == 'introduce':
print(self.state)
self.handle_introduction()
self.work()
if self.state == 'ask_workout':
print(self.state)
ready = self.handle_ask_workout()
if ready == 'Yes' or ready == 'YES' or ready == 'yes':
self.name()
# assume answer is no and end
else:
# print('The answer was:', ready)
# print('Ending the workout..')
self.workout_done()
if self.state == 'ask_name':
print(self.state)
# print(name)
name = self.handle_ask_name()
if name is False:
self.handle_ask_again()
else:
# set the name and switch to next state
self.name = name
self.age()
if self.state == 'ask_age':
print(self.state)
age = self.handle_ask_age()
if age is False:
self.handle_ask_again()
else:
# set the age for later database usage
# TODO: add the possiblity to call parents if age is too low
self.age = int(age)
if self.age <= 8:
self.action_runner.run_waiting_action('say', 'Great, please ask your parents to come here.')
self.action_runner.run_waiting_action('say', 'I will wait until your parents are here')
time.sleep(5)
self.rec()
if self.state == 'recognise':
print(self.state)
# Todo Fix recognition
recognise = self.handle_recognise()
if recognise is False:
self.height()
else:
self.start_workout()
if self.state == 'ask_height':
print(self.state)
# save height for later
self.height = self.handle_ask_height()
# print('height:', self.height)
self.weight()
if self.state == 'ask_weight':
print(self.state)
# save weight for later
self.weight = self.handle_ask_weight()
# print('weight:', self.weight)
self.start_workout()
if self.state == 'workout':
print(self.state)
self.handle_workout()
#self.action_runner.run_waiting_action('say', 'I am here')
self.workout_done()
if self.state == 'finish_workout':
print(self.state)
self.handle_finish()
self.say_goodbye()
if self.state == 'logging_off':
print(self.state)
self.handle_saying_goodbye()
self.sic.stop()
exit()
if self.state == 'ask_again':
# for now this state is not needed and unreachable!
# However self.handle_ask_again() is used!
print(self.state)
# Now from here on are the functions that handle the interactions:
# Like asking questions and dealing with the replies.
def ask_until_answer(self, question_func: Callable):
"""
This function asks a question until it gets a valid answer.
It expects a function from the ask_library class as :str
"""
var = False
while var is False:
var = question_func()
if var is False:
self.handle_ask_again()
return var
def handle_wake_up(self) -> None:
"""
Handles the necessary tasks when nao wakes up
"""
self.action_runner.load_waiting_action('set_language', 'en-US')
self.action_runner.load_waiting_action('wake_up')
print("\n\n state: awake \n\n")
self.action_runner.run_loaded_actions()
def handle_introduction(self) -> None:
"""
Initiates an introduction
"""
self.action_runner.run_waiting_action('say_animated',
'Hi I am Nao-Fit. Your personal trainer. Let\'s be workout buddies!')
return None
def handle_ask_workout(self):
"""
Asks if the user is ready to work out
"""
self.action_runner.run_waiting_action('say', 'Are you ready for the workout?')
confirm = self.ask_until_answer(self.ask_nao.ask_confirmation)
return confirm
def handle_ask_name(self):
"""
Asks for the name of the user
"""
self.action_runner.run_waiting_action('say_animated', 'Could you please tell me your name?')
name = self.ask_until_answer(self.ask_nao.ask_name)
return name
def handle_ask_again(self):
"""
Asks if the user could repeat the answer
"""
self.action_runner.run_waiting_action('say_animated', 'I did not understand that. Could you repeat that?')
return
def handle_ask_age(self):
"""
Asks for the age of the user
"""
self.action_runner.run_waiting_action('say_animated', 'Awesome. And how old are you?')
age = self.ask_until_answer(self.ask_nao.ask_age)
return age
def handle_recognise(self):
"""
Checks if the user is already in the databse
"""
df_dummy_database = self.file
if self.name in df_dummy_database.loc[df_dummy_database['age'] == self.age].values:
self.action_runner.run_waiting_action('say_animated', f'Welcome Back {self.name}!')
return True
else:
return False
# name_list = ['max', 'julian', 'enrico']
# if name in namelist:
# self.recognise()
def handle_ask_height(self):
"""
Asks for the height of the user
"""
time.sleep(1)
self.action_runner.run_waiting_action('say_animated', 'Thank you! Now please tell me your height in centimeter?')
height = self.ask_until_answer(self.ask_nao.ask_height)
return height
def handle_ask_weight(self):
"""
Asks for the weight of the user
"""
self.action_runner.run_waiting_action('say_animated', f'Incredibble {self.name}! Lastly I would like to know'
'how much you weight in kilos? ')
weight = self.ask_until_answer(self.ask_nao.ask_weight)
return weight
def reset_recognition_management(self) -> None:
"""
Resets the recognition manager
"""
self.recognition_manager.update({'attempt_success': False, 'attempt_number': 0})
def handle_saying_goodbye(self) -> None:
"""
Says goodbye to the user
"""
print("\n\n NAO: \"Well this was fun.\"\n\"I will see you around.\" \n\n")
self.action_runner.run_waiting_action('say_animated', 'I will see you around.')
self.action_runner.run_waiting_action('rest')
return
def handle_workout(self) -> None:
"""
Calculates the BMI based on the values that are given by the user.
Initiates a workout based on the BMI.
"""
self.action_runner.run_waiting_action('say',
f'I will select a workout based on your personal information.')
try:
user_bmi = int(float(self.weight) / ((float(self.height)*0.01)**2))
if user_bmi > 30:
self.action_runner.run_waiting_action('do_gesture', "finalworkout_1/behavior_1")
else:
self.action_runner.run_waiting_action('do_gesture', 'finalworkout_2/behavior_1')
except:
self.action_runner.run_waiting_action('do_gesture', "finalworkout_1/behavior_1")
def handle_finish(self) -> None:
"""
Finishes the whole flow and stores information into the dataframe
"""
self.action_runner.run_waiting_action('say', ' This was so much fun!')
# now write everything into the database
#df_user_info = pd.DataFrame({'name': self.name, 'age': self.age, 'height': self.height, 'weight:': self.weight,
# 'date': datetime.date.today()})
#df_dummy_db = self.file.append(df_user_info)
#df_dummy_db.to_csv('data/user_data.csv')
return None
class StateMachineInit(object):
"""
A simple class that initiates the Nao-Fit when this file is executed from the console
"""
def __init__(self, server_ip: str, dialogflow_key_file: str, dialogflow_agent_id: str):
self.sic = BasicSICConnector(server_ip, 'en-US', dialogflow_key_file, dialogflow_agent_id)
def run(self) -> None:
"""
Starts the whole procedure and stops it when finished.
"""
self.sic.start()
self.robot = NaoFit(self.sic)
print('byeee')
self.sic.stop()
simple_nao_fit = StateMachineInit('127.0.0.1',
'testagent-nava-6ec5f3b4299a.json',
'testagent-nava')
simple_nao_fit.run()
|
[
"transitions.Machine",
"pandas.read_csv",
"utils.ask_library.AskLibrary",
"time.sleep",
"social_interaction_cloud.action.ActionRunner",
"social_interaction_cloud.basic_connector.BasicSICConnector"
] |
[((1001, 1023), 'social_interaction_cloud.action.ActionRunner', 'ActionRunner', (['self.sic'], {}), '(self.sic)\n', (1013, 1023), False, 'from social_interaction_cloud.action import ActionRunner\n'), ((1079, 1094), 'utils.ask_library.AskLibrary', 'AskLibrary', (['sic'], {}), '(sic)\n', (1089, 1094), False, 'from utils.ask_library import AskLibrary\n'), ((1265, 1298), 'pandas.read_csv', 'pd.read_csv', (['"""data/user_data.csv"""'], {}), "('data/user_data.csv')\n", (1276, 1298), True, 'import pandas as pd\n'), ((2402, 2491), 'transitions.Machine', 'Machine', ([], {'model': 'self', 'states': 'NaoFit.states', 'transitions': 'transitions', 'initial': '"""asleep"""'}), "(model=self, states=NaoFit.states, transitions=transitions, initial=\n 'asleep')\n", (2409, 2491), False, 'from transitions import Machine\n'), ((8539, 8552), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (8549, 8552), False, 'import time\n'), ((11304, 11383), 'social_interaction_cloud.basic_connector.BasicSICConnector', 'BasicSICConnector', (['server_ip', '"""en-US"""', 'dialogflow_key_file', 'dialogflow_agent_id'], {}), "(server_ip, 'en-US', dialogflow_key_file, dialogflow_agent_id)\n", (11321, 11383), False, 'from social_interaction_cloud.basic_connector import BasicSICConnector\n'), ((4244, 4257), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (4254, 4257), False, 'import time\n')]
|
import logging
from optparse import make_option
import re
import types
from django.core.management.base import BaseCommand
from smsc.api import sms_read
from smsapp import models
logger = logging.getLogger(__name__)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--hours', action="store", type="int", dest="hours", default=3,
help="Number of hours to list SMS for"),
)
def handle(self, *args, **options):
hours = options.get('hours')
js = sms_read.get_sms_list(hours)
if not isinstance(js, types.ListType):
err = js.get('error')
if err:
logger.error("Error: {0}".format(err))
return
rx = re.compile('^\+')
for r in js:
code = r.get('message')
num = r.get('phone')
if not rx.match(num):
num = '+' + num
try:
p = models.PhoneData.objects.get(uniq_id=code)
logger.debug("Found matched phone for code {0}".format(code))
if rx.match(unicode(p.number)):
logger.debug("Phone {0} already has valid # set".format(p))
continue
p.number = num
p.save()
except models.PhoneData.DoesNotExist:
logger.error("Phone with code {0} does not exist".format(code))
continue
|
[
"smsapp.models.PhoneData.objects.get",
"optparse.make_option",
"smsc.api.sms_read.get_sms_list",
"logging.getLogger",
"re.compile"
] |
[((192, 219), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (209, 219), False, 'import logging\n'), ((538, 566), 'smsc.api.sms_read.get_sms_list', 'sms_read.get_sms_list', (['hours'], {}), '(hours)\n', (559, 566), False, 'from smsc.api import sms_read\n'), ((759, 777), 're.compile', 're.compile', (['"""^\\\\+"""'], {}), "('^\\\\+')\n", (769, 777), False, 'import re\n'), ((304, 423), 'optparse.make_option', 'make_option', (['"""--hours"""'], {'action': '"""store"""', 'type': '"""int"""', 'dest': '"""hours"""', 'default': '(3)', 'help': '"""Number of hours to list SMS for"""'}), "('--hours', action='store', type='int', dest='hours', default=3,\n help='Number of hours to list SMS for')\n", (315, 423), False, 'from optparse import make_option\n'), ((970, 1012), 'smsapp.models.PhoneData.objects.get', 'models.PhoneData.objects.get', ([], {'uniq_id': 'code'}), '(uniq_id=code)\n', (998, 1012), False, 'from smsapp import models\n')]
|
import os
from .firebase_authentication import firebase_auth
from itsdangerous import (
TimedJSONWebSignatureSerializer as Serializer,
BadSignature,
SignatureExpired,
)
SECRET_KEY = os.environ.get("SECRET_KEY")
def generate_auth_token(idToken, expiration=3600):
s = Serializer(SECRET_KEY, expires_in=expiration)
return s.dumps({"idToken": idToken})
def verify_auth_token(token):
s = Serializer(SECRET_KEY)
try:
data = s.loads(token)
except SignatureExpired:
return None # valid token, but expired
except BadSignature:
return None # invalid token
return True
|
[
"os.environ.get",
"itsdangerous.TimedJSONWebSignatureSerializer"
] |
[((195, 223), 'os.environ.get', 'os.environ.get', (['"""SECRET_KEY"""'], {}), "('SECRET_KEY')\n", (209, 223), False, 'import os\n'), ((285, 330), 'itsdangerous.TimedJSONWebSignatureSerializer', 'Serializer', (['SECRET_KEY'], {'expires_in': 'expiration'}), '(SECRET_KEY, expires_in=expiration)\n', (295, 330), True, 'from itsdangerous import TimedJSONWebSignatureSerializer as Serializer, BadSignature, SignatureExpired\n'), ((412, 434), 'itsdangerous.TimedJSONWebSignatureSerializer', 'Serializer', (['SECRET_KEY'], {}), '(SECRET_KEY)\n', (422, 434), True, 'from itsdangerous import TimedJSONWebSignatureSerializer as Serializer, BadSignature, SignatureExpired\n')]
|
# Generated by Django 3.2.8 on 2021-11-22 04:59
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("twoops", "0010_tweetsearch"),
]
operations = [
migrations.AlterModelOptions(
name="tweetsearch",
options={"verbose_name_plural": "Tweet Searches"},
),
]
|
[
"django.db.migrations.AlterModelOptions"
] |
[((219, 323), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""tweetsearch"""', 'options': "{'verbose_name_plural': 'Tweet Searches'}"}), "(name='tweetsearch', options={\n 'verbose_name_plural': 'Tweet Searches'})\n", (247, 323), False, 'from django.db import migrations\n')]
|
# coding: utf-8
import numpy as np
import torch
def convert_to_np(weights):
for k, v in weights.items():
if isinstance(v, torch.Tensor):
weights[k] = v.cpu().numpy()
elif isinstance(v, np.ndarray):
pass
elif isinstance(v, list):
weights[k] = np.array(v)
else:
raise SystemError("NOT SUPPORT THE DATATYPE", type(v))
return weights
def convert_to_tensor(weights):
for k, v in weights.items():
if isinstance(v, torch.Tensor):
pass
elif isinstance(v, np.ndarray):
weights[k] = torch.from_numpy(v)
elif isinstance(v, list):
weights[k] = torch.from_numpy(np.array(v))
else:
raise SystemError("NOT SUPPORT THE DATATYPE", type(v))
return weights
def cdw_feature_distance(old_model, new_model, device, train_loader):
"""cosine distance weight (cdw): calculate feature distance of
the features of a batch of data by cosine distance.
old_classifier,
"""
old_model = old_model.to(device)
# old_classifier = old_classifier.to(device)
for data in train_loader:
inputs, _ = data
inputs = inputs.to(device)
with torch.no_grad():
# old_out = old_classifier(old_model(inputs))
old_out = old_model(inputs)
new_out = new_model(inputs)
distance = 1 - torch.cosine_similarity(old_out, new_out)
return torch.mean(distance).cpu().numpy()
|
[
"torch.mean",
"numpy.array",
"torch.no_grad",
"torch.cosine_similarity",
"torch.from_numpy"
] |
[((1234, 1249), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1247, 1249), False, 'import torch\n'), ((1413, 1454), 'torch.cosine_similarity', 'torch.cosine_similarity', (['old_out', 'new_out'], {}), '(old_out, new_out)\n', (1436, 1454), False, 'import torch\n'), ((610, 629), 'torch.from_numpy', 'torch.from_numpy', (['v'], {}), '(v)\n', (626, 629), False, 'import torch\n'), ((308, 319), 'numpy.array', 'np.array', (['v'], {}), '(v)\n', (316, 319), True, 'import numpy as np\n'), ((706, 717), 'numpy.array', 'np.array', (['v'], {}), '(v)\n', (714, 717), True, 'import numpy as np\n'), ((1470, 1490), 'torch.mean', 'torch.mean', (['distance'], {}), '(distance)\n', (1480, 1490), False, 'import torch\n')]
|
#! /usr/bin/python
# by <EMAIL> at Mon Nov 6 18:08:44 CET 2017
import struct
import zlib
def write_pgm(filename, img_data):
f = open(filename, 'wb')
try:
f.write('P5 %d %d 255\n' % (len(img_data[0]), len(img_data)))
for line in img_data:
f.write(line.replace('\1', '\xff'))
finally:
f.close()
def write_png(filename, img_data):
# https://tools.ietf.org/rfc/rfc2083.txt
def compress(data):
#return zlib.compress(data)
# No compression below, same as zlib.compress(data, 0).
# https://www.ietf.org/rfc/rfc1951.txt
data = str(data)
output = ['x\1']
max_block_size = 0xfb00 # zlib uses 0xfb00, we could use at most 65535.
for i in xrange(0, len(data), 65535):
size = len(data) - i
is_final = size <= 65535
if not is_final:
size = 65535
output.append(struct.pack('<BHH', is_final, size, 65535 & ~size))
output.append(data[i : i + size]) # TODO(pts): Don't copy slice.
output.append(struct.pack('>l', zlib.adler32(data)))
return ''.join(output)
def write_chunk(chunk_type, chunk_data):
f.write(struct.pack('>L', len(chunk_data)))
# This wastes memory on the string concatenation.
# TODO(pts): Optimize memory use.
f.write(chunk_type)
chunk_data = str(chunk_data)
f.write(chunk_data)
f.write(struct.pack('>l', zlib.crc32(
chunk_data, zlib.crc32(chunk_type, 0))))
f = open(filename, 'wb')
try:
f.write('\x89PNG\r\n\x1A\n') # PNG signature.
width, height = len(img_data[0]), len(img_data)
bpc = 8
# 0: 'gray',
# 2: 'rgb',
# 3: 'indexed-rgb',
# 4: 'gray-alpha',
# 6: 'rgb-alpha',
color_type = 3
compression = 0
filter = 0
is_interlaced = 0
plte = '\0\0\0\xff\xff\xff'
output = []
for line in img_data:
output.append('\0') # Predictor value for the specified line.
output.append(line)
write_chunk(
'IHDR', struct.pack(
'>LL5B', width, height, bpc, color_type,
compression, filter, is_interlaced))
#assert 0, f.tell() # 33.
if plte is not None:
write_chunk('PLTE', plte)
write_chunk('IDAT', compress(''.join(output)))
# "\0\0\0\0IEND\xae""B`\x82".
write_chunk('IEND', '')
finally:
f.close()
def work():
# A chessboard in a frame.
img_data = tuple(''.join(
'\0\1'[((x in (1, 82) or y in (1, 82)) and
1) or # not (x in (0, 83) or y in (0, 83))) or
(2 <= x < 82 and 2 <= y < 82 and
((x - 2) // 10 + (y - 2) // 10) % 2)]
for x in xrange(91)) for y in xrange(84))
write_pgm('chess.pgm', img_data)
write_png('chess.png', img_data)
if __name__ == '__main__':
work()
|
[
"zlib.crc32",
"zlib.adler32",
"struct.pack"
] |
[((1936, 2028), 'struct.pack', 'struct.pack', (['""">LL5B"""', 'width', 'height', 'bpc', 'color_type', 'compression', 'filter', 'is_interlaced'], {}), "('>LL5B', width, height, bpc, color_type, compression, filter,\n is_interlaced)\n", (1947, 2028), False, 'import struct\n'), ((842, 892), 'struct.pack', 'struct.pack', (['"""<BHH"""', 'is_final', 'size', '(65535 & ~size)'], {}), "('<BHH', is_final, size, 65535 & ~size)\n", (853, 892), False, 'import struct\n'), ((1002, 1020), 'zlib.adler32', 'zlib.adler32', (['data'], {}), '(data)\n', (1014, 1020), False, 'import zlib\n'), ((1377, 1402), 'zlib.crc32', 'zlib.crc32', (['chunk_type', '(0)'], {}), '(chunk_type, 0)\n', (1387, 1402), False, 'import zlib\n')]
|
""" Compose multiple datasets in a single loader. """
import numpy as np
from copy import deepcopy
from torch.utils.data import Dataset
from dataset.wireframe_dataset import WireframeDataset
from dataset.holicity_dataset import HolicityDataset
class MergeDataset(Dataset):
def __init__(self, mode, config=None):
super(MergeDataset, self).__init__()
# Initialize the datasets
self._datasets = []
spec_config = deepcopy(config)
for i, d in enumerate(config['datasets']):
spec_config['dataset_name'] = d
spec_config['gt_source_train'] = config['gt_source_train'][i]
spec_config['gt_source_test'] = config['gt_source_test'][i]
if d == "wireframe":
self._datasets.append(WireframeDataset(mode, spec_config))
elif d == "holicity":
spec_config['train_split'] = config['train_splits'][i]
self._datasets.append(HolicityDataset(mode, spec_config))
else:
raise ValueError("Unknown dataset: " + d)
self._weights = config['weights']
def __getitem__(self, item):
dataset = self._datasets[np.random.choice(
range(len(self._datasets)), p=self._weights)]
return dataset[np.random.randint(len(dataset))]
def __len__(self):
return np.sum([len(d) for d in self._datasets])
|
[
"dataset.wireframe_dataset.WireframeDataset",
"copy.deepcopy",
"dataset.holicity_dataset.HolicityDataset"
] |
[((449, 465), 'copy.deepcopy', 'deepcopy', (['config'], {}), '(config)\n', (457, 465), False, 'from copy import deepcopy\n'), ((778, 813), 'dataset.wireframe_dataset.WireframeDataset', 'WireframeDataset', (['mode', 'spec_config'], {}), '(mode, spec_config)\n', (794, 813), False, 'from dataset.wireframe_dataset import WireframeDataset\n'), ((958, 992), 'dataset.holicity_dataset.HolicityDataset', 'HolicityDataset', (['mode', 'spec_config'], {}), '(mode, spec_config)\n', (973, 992), False, 'from dataset.holicity_dataset import HolicityDataset\n')]
|
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import optim
from torch.autograd import Variable
from torch.nn.parameter import Parameter
from torchvision import datasets, transforms
from torch.utils.data import DataLoader, Dataset
from xbbo.utils.constants import MAXINT
from xbbo.core import TestFunction
class LossIsNaN(Exception):
pass
class Model(TestFunction):
def __init__(self, cfg, seed, **kwargs):
# np.random.seed(cfg.GENERAL.random_seed)
self.cfg = cfg
# self.dim = 30
# assert self.dim % 2 == 0
super().__init__(seed=seed)
self.api_config = self._load_api_config()
torch.seed(self.rng.randint(MAXINT))
torch.manual_seed(self.rng.randint(MAXINT))
self.device = torch.device(kwargs.get('device', 'cpu'))
self.theta = Parameter(torch.FloatTensor([0.9, 0.9]).to(self.device))
# self.opt_wrap = lambda params: optim.SGD(self.net.parameters(), lr=lr, momentum=momentum)
self.opt = optim.SGD([self.theta], lr=0.01)
self.step_num = 0
self.history_hp = [] # for record strategy
self.trajectory_hp = []
self.trajectory_loss = [] # 记录该个体score过程
self.history_loss = [] # 记录使用了(考虑权重迁移)hp-stategy后的score过程
self.hp = torch.empty(2, device=self.device)
self.obj_val_func = lambda theta: 1.2 - (theta ** 2).sum()
self.obj_train_func = lambda theta, h: 1.2 - ((h * theta) ** 2).sum()
self.trajectory_theta = []
def __len__(self): # one epoch has how many batchs
return 1
def update_hp(self, params: dict):
self.history_hp.append((self.step_num, params)) # 在该steps上更改超参,acc为该step时的结果(受该step*前*所有超参影响)
self.trajectory_hp.append((self.step_num, params))
self.trajectory_theta.append(self.theta.detach().cpu().numpy())
self.hp[0] = params['h1']
self.hp[1] = params['h2']
def step(self, num): # train need training(optimizer)
for it in range(num):
self.trajectory_theta.append(self.theta.detach().cpu().numpy())
loss = self.obj_train_func(self.theta, self.hp)
if np.isnan(loss.item()):
print("Loss is NaN.")
self.step_num += 1
return
# raise LossIsNaN
self.opt.zero_grad()
loss.backward()
self.opt.step()
self.step_num += 1
def evaluate(self): # val no training need(optimizer)
with torch.no_grad():
loss = self.obj_val_func(self.theta).item()
self.loss = np.inf if np.isnan(loss) else loss
self.trajectory_loss.append((self.step_num, self.loss))
self.history_loss.append((self.step_num, self.loss))
return self.loss
def load_checkpoint(self, checkpoint):
with torch.no_grad():
self.theta.set_(checkpoint['model_state_dict'])
# self.opt.load_state_dict(checkpoint['optim_state_dict'])
def save_checkpoint(self):
checkpoint = dict(model_state_dict=self.theta.data.clone())
return checkpoint
def _load_api_config(self):
return {
'h1': {
'type': 'float', 'warp': 'linear', 'range': [0, 1]},
'h2': {
'type': 'float', 'warp': 'linear', 'range': [0, 1]
}
}
|
[
"torch.FloatTensor",
"torch.empty",
"numpy.isnan",
"torch.no_grad",
"torch.optim.SGD"
] |
[((1051, 1083), 'torch.optim.SGD', 'optim.SGD', (['[self.theta]'], {'lr': '(0.01)'}), '([self.theta], lr=0.01)\n', (1060, 1083), False, 'from torch import optim\n'), ((1326, 1360), 'torch.empty', 'torch.empty', (['(2)'], {'device': 'self.device'}), '(2, device=self.device)\n', (1337, 1360), False, 'import torch\n'), ((2542, 2557), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2555, 2557), False, 'import torch\n'), ((2645, 2659), 'numpy.isnan', 'np.isnan', (['loss'], {}), '(loss)\n', (2653, 2659), True, 'import numpy as np\n'), ((2877, 2892), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2890, 2892), False, 'import torch\n'), ((885, 914), 'torch.FloatTensor', 'torch.FloatTensor', (['[0.9, 0.9]'], {}), '([0.9, 0.9])\n', (902, 914), False, 'import torch\n')]
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""cloud tpu list command."""
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.compute import flags as compute_flags
from googlecloudsdk.command_lib.compute.tpus import util as cli_util
class List(base.ListCommand):
"""List Cloud TPUs."""
@staticmethod
def Args(parser):
parser.display_info.AddFormat(cli_util.LIST_FORMAT)
compute_flags.AddZoneFlag(
parser,
resource_type='tpu',
operation_type='list',
explanation=(
'List TPUs from this zone. '
'If not specified, will list TPUs in `default` compute/zone.'))
parser.display_info.AddCacheUpdater(None)
def Run(self, args):
return cli_util.List(
page_size=args.page_size,
limit=args.limit,
zone=args.zone)
|
[
"googlecloudsdk.command_lib.compute.flags.AddZoneFlag",
"googlecloudsdk.command_lib.compute.tpus.util.List"
] |
[((959, 1152), 'googlecloudsdk.command_lib.compute.flags.AddZoneFlag', 'compute_flags.AddZoneFlag', (['parser'], {'resource_type': '"""tpu"""', 'operation_type': '"""list"""', 'explanation': '"""List TPUs from this zone. If not specified, will list TPUs in `default` compute/zone."""'}), "(parser, resource_type='tpu', operation_type=\n 'list', explanation=\n 'List TPUs from this zone. If not specified, will list TPUs in `default` compute/zone.'\n )\n", (984, 1152), True, 'from googlecloudsdk.command_lib.compute import flags as compute_flags\n'), ((1282, 1355), 'googlecloudsdk.command_lib.compute.tpus.util.List', 'cli_util.List', ([], {'page_size': 'args.page_size', 'limit': 'args.limit', 'zone': 'args.zone'}), '(page_size=args.page_size, limit=args.limit, zone=args.zone)\n', (1295, 1355), True, 'from googlecloudsdk.command_lib.compute.tpus import util as cli_util\n')]
|
from django.db.models import Q
from django.contrib.auth import get_user_model
from .utils import sorted_standings
def percent(num, denom):
return 0.0 if denom == 0 else num / denom * 100.0
class RosterStats:
def __init__(self, user, league, season=None):
self.user = user
self.season = season
self.league = league
self.correct = 0
self.wrong = 0
self.points_delta = 0
queryset = self.user.picksets.filter(gameset__league=league).select_related().filter(
Q(correct__gt=0) | Q(wrong__gt=0)
)
if season:
queryset = queryset.filter(gameset__season=season)
self.picksets_played = 0
self.picksets_won = 0
for correct, wrong, is_winner, points, actual_points in queryset.values_list(
'correct',
'wrong',
'is_winner',
'points',
'gameset__points'
):
self.picksets_played += 1
self.correct += correct
self.wrong += wrong
if actual_points:
self.points_delta += abs(points - actual_points)
if is_winner:
self.picksets_won += 1
self.is_active = self.user.is_active
self.pct = percent(self.correct, self.correct + self.wrong)
self.avg_points_delta = (
self.points_delta / self.picksets_played
if self.picksets_played
else 0
)
def __str__(self):
return '{}{}'.format(self.user, ' ({})'.format(self.season) if self.season else '')
__repr__ = __str__
@classmethod
def get_details(cls, league, group, season=None):
season = season or league.current_season
users = get_user_model().objects.filter(picker_memberships__group=group)
def keyfn(rs):
return (rs.correct, -rs.points_delta, rs.picksets_played)
stats = [cls(u, league) for u in users]
by_user = {
entry.user: entry for entry in sorted_standings(stats, key=keyfn)
}
stats = [cls(u, league, season) for u in users]
results = [
(e, by_user[e.user]) for e in sorted_standings(stats, key=keyfn)
]
return results
|
[
"django.contrib.auth.get_user_model",
"django.db.models.Q"
] |
[((536, 552), 'django.db.models.Q', 'Q', ([], {'correct__gt': '(0)'}), '(correct__gt=0)\n', (537, 552), False, 'from django.db.models import Q\n'), ((555, 569), 'django.db.models.Q', 'Q', ([], {'wrong__gt': '(0)'}), '(wrong__gt=0)\n', (556, 569), False, 'from django.db.models import Q\n'), ((1755, 1771), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (1769, 1771), False, 'from django.contrib.auth import get_user_model\n')]
|
'''OpenGL extension NV.conservative_raster_pre_snap
This module customises the behaviour of the
OpenGL.raw.GLES2.NV.conservative_raster_pre_snap to provide a more
Python-friendly API
Overview (from the spec)
NV_conservative_raster_pre_snap_triangles provides a new mode to achieve
rasterization of triangles that is conservative w.r.t the triangle at
infinite precision i.e. before it is snapped to the sub-pixel grid. This
extension provides a new mode that expands this functionality to lines and
points.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/NV/conservative_raster_pre_snap.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GLES2 import _types, _glgets
from OpenGL.raw.GLES2.NV.conservative_raster_pre_snap import *
from OpenGL.raw.GLES2.NV.conservative_raster_pre_snap import _EXTENSION_NAME
def glInitConservativeRasterPreSnapNV():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION
|
[
"OpenGL.extensions.hasGLExtension"
] |
[((1130, 1172), 'OpenGL.extensions.hasGLExtension', 'extensions.hasGLExtension', (['_EXTENSION_NAME'], {}), '(_EXTENSION_NAME)\n', (1155, 1172), False, 'from OpenGL import extensions\n')]
|
# -*- coding: utf-8 -*-
import os
import platform
import pytest
import yaml
import giraffez
from giraffez.constants import *
from giraffez.errors import *
from giraffez.types import Columns
from giraffez.utils import *
@pytest.mark.usefixtures('config', 'tmpfiles')
class TestConfig(object):
def test_get_set_list_value(self, tmpfiles):
with giraffez.Config(tmpfiles.conf, "w", tmpfiles.key) as config:
value = config.get_value("test")
assert value == {}
value = config.get_value("connections.default")
assert value == "db1"
config.set_value("connections.default", "db2")
value = config.get_value("connections.default")
assert value == "db2"
value = config.list_value(decrypt=False)
def test_get_multi_value(self, tmpfiles):
with giraffez.Config(tmpfiles.conf, "w", tmpfiles.key) as config:
value = config.get_value("connections")
def test_get_trailing_dot(self, tmpfiles):
with giraffez.Config(tmpfiles.conf, "w", tmpfiles.key) as config:
value1 = config.get_value("connections")
value2 = config.get_value("connections.")
assert value1 == value2
def test_unset_value(self, tmpfiles):
expected_dsn = "db2"
with giraffez.Config(tmpfiles.conf, "w", tmpfiles.key) as config:
config.unset_value("connections.db1")
value = config.get_value("connections.db1")
assert value == {}
def test_read_only(self, tmpfiles):
with pytest.raises(ConfigReadOnly):
with giraffez.Config(tmpfiles.conf, "r", tmpfiles.key) as config:
config.set_value("connections.default", "db2")
config.write()
def test_config_conf_missing(self, tmpfiles):
with pytest.raises(ConfigNotFound):
with giraffez.Config("None", "r", tmpfiles.key) as config:
pass
def test_config_key_missing(self, tmpfiles):
with pytest.raises(KeyNotFound):
with giraffez.Config(tmpfiles.conf, "r", "None") as config:
pass
def test_config_conf_bad_permissions(self, tmpfiles):
# Tests for permissions on linux or unix-like system only. Windows
# requires the use of Windows-only APIs to determine and set the
# permissions on files.
if platform.system() == 'Windows':
return
with pytest.raises(ConfigurationError):
os.chmod(tmpfiles.conf, 0o655)
with giraffez.Config(tmpfiles.conf, "r", tmpfiles.key) as config:
pass
os.chmod(tmpfiles.conf, 0o600)
def test_config_key_bad_permissions(self, tmpfiles):
# Tests for permissions on linux or unix-like system only. Windows
# requires the use of Windows-only APIs to determine and set the
# permissions on files.
if platform.system() == 'Windows':
return
with pytest.raises(ConfigurationError):
os.chmod(tmpfiles.key, 0o655)
with giraffez.Config(tmpfiles.conf, "r", tmpfiles.key) as config:
pass
os.chmod(tmpfiles.key, 0o400)
def test_config_connections(self, tmpfiles):
with giraffez.Config(tmpfiles.conf, "r", tmpfiles.key) as config:
connections = config.connections
dsn = config.get_connection("db1")
assert dsn.get("host") == None
def test_config_lock(self, tmpfiles):
with giraffez.Config(tmpfiles.conf, "r", tmpfiles.key) as config:
giraffez.Config.lock_connection(tmpfiles.conf, "db1", key=tmpfiles.key)
giraffez.Config.lock_connection(tmpfiles.conf, "db1", key=tmpfiles.key)
with pytest.raises(ConnectionLock):
giraffez.Config.lock_connection(tmpfiles.conf, "db1", key=tmpfiles.key)
config.reload()
lock_value = config.get_value("connections.db1.lock")
assert lock_value == 2
giraffez.Config.unlock_connection(tmpfiles.conf, "db1", key=tmpfiles.key)
config.reload()
lock_value = config.get_value("connections.db1.lock")
assert lock_value == {}
def test_secret_decrypt(self, tmpfiles):
expected_username = "user123"
expected_password = "<PASSWORD>"
with giraffez.Config(tmpfiles.conf, "w", tmpfiles.key) as config:
config.set_value("connections.db1.username", expected_username)
config.set_value("connections.db1.password", expected_password)
config.write()
with giraffez.Secret(tmpfiles.conf, "r", tmpfiles.key) as secret:
username, password = secret("connections.db1.username, connections.db1.password")
assert expected_username == username
assert expected_password == password
with giraffez.Secret(tmpfiles.conf, "w", tmpfiles.key) as secret:
secret.set("db1.username", expected_username)
secret.set("db1.password", expected_password)
username, password = secret("db1.username, db1.password")
assert expected_username == username
assert expected_password == password
|
[
"os.chmod",
"giraffez.Config.unlock_connection",
"pytest.raises",
"giraffez.Config",
"giraffez.Config.lock_connection",
"giraffez.Secret",
"platform.system",
"pytest.mark.usefixtures"
] |
[((225, 270), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""config"""', '"""tmpfiles"""'], {}), "('config', 'tmpfiles')\n", (248, 270), False, 'import pytest\n'), ((2649, 2677), 'os.chmod', 'os.chmod', (['tmpfiles.conf', '(384)'], {}), '(tmpfiles.conf, 384)\n', (2657, 2677), False, 'import os\n'), ((3178, 3205), 'os.chmod', 'os.chmod', (['tmpfiles.key', '(256)'], {}), '(tmpfiles.key, 256)\n', (3186, 3205), False, 'import os\n'), ((359, 408), 'giraffez.Config', 'giraffez.Config', (['tmpfiles.conf', '"""w"""', 'tmpfiles.key'], {}), "(tmpfiles.conf, 'w', tmpfiles.key)\n", (374, 408), False, 'import giraffez\n'), ((860, 909), 'giraffez.Config', 'giraffez.Config', (['tmpfiles.conf', '"""w"""', 'tmpfiles.key'], {}), "(tmpfiles.conf, 'w', tmpfiles.key)\n", (875, 909), False, 'import giraffez\n'), ((1034, 1083), 'giraffez.Config', 'giraffez.Config', (['tmpfiles.conf', '"""w"""', 'tmpfiles.key'], {}), "(tmpfiles.conf, 'w', tmpfiles.key)\n", (1049, 1083), False, 'import giraffez\n'), ((1323, 1372), 'giraffez.Config', 'giraffez.Config', (['tmpfiles.conf', '"""w"""', 'tmpfiles.key'], {}), "(tmpfiles.conf, 'w', tmpfiles.key)\n", (1338, 1372), False, 'import giraffez\n'), ((1575, 1604), 'pytest.raises', 'pytest.raises', (['ConfigReadOnly'], {}), '(ConfigReadOnly)\n', (1588, 1604), False, 'import pytest\n'), ((1842, 1871), 'pytest.raises', 'pytest.raises', (['ConfigNotFound'], {}), '(ConfigNotFound)\n', (1855, 1871), False, 'import pytest\n'), ((2028, 2054), 'pytest.raises', 'pytest.raises', (['KeyNotFound'], {}), '(KeyNotFound)\n', (2041, 2054), False, 'import pytest\n'), ((2399, 2416), 'platform.system', 'platform.system', ([], {}), '()\n', (2414, 2416), False, 'import platform\n'), ((2463, 2496), 'pytest.raises', 'pytest.raises', (['ConfigurationError'], {}), '(ConfigurationError)\n', (2476, 2496), False, 'import pytest\n'), ((2510, 2538), 'os.chmod', 'os.chmod', (['tmpfiles.conf', '(429)'], {}), '(tmpfiles.conf, 429)\n', (2518, 2538), False, 'import os\n'), ((2929, 2946), 'platform.system', 'platform.system', ([], {}), '()\n', (2944, 2946), False, 'import platform\n'), ((2993, 3026), 'pytest.raises', 'pytest.raises', (['ConfigurationError'], {}), '(ConfigurationError)\n', (3006, 3026), False, 'import pytest\n'), ((3040, 3067), 'os.chmod', 'os.chmod', (['tmpfiles.key', '(429)'], {}), '(tmpfiles.key, 429)\n', (3048, 3067), False, 'import os\n'), ((3271, 3320), 'giraffez.Config', 'giraffez.Config', (['tmpfiles.conf', '"""r"""', 'tmpfiles.key'], {}), "(tmpfiles.conf, 'r', tmpfiles.key)\n", (3286, 3320), False, 'import giraffez\n'), ((3523, 3572), 'giraffez.Config', 'giraffez.Config', (['tmpfiles.conf', '"""r"""', 'tmpfiles.key'], {}), "(tmpfiles.conf, 'r', tmpfiles.key)\n", (3538, 3572), False, 'import giraffez\n'), ((3596, 3667), 'giraffez.Config.lock_connection', 'giraffez.Config.lock_connection', (['tmpfiles.conf', '"""db1"""'], {'key': 'tmpfiles.key'}), "(tmpfiles.conf, 'db1', key=tmpfiles.key)\n", (3627, 3667), False, 'import giraffez\n'), ((3680, 3751), 'giraffez.Config.lock_connection', 'giraffez.Config.lock_connection', (['tmpfiles.conf', '"""db1"""'], {'key': 'tmpfiles.key'}), "(tmpfiles.conf, 'db1', key=tmpfiles.key)\n", (3711, 3751), False, 'import giraffez\n'), ((4029, 4102), 'giraffez.Config.unlock_connection', 'giraffez.Config.unlock_connection', (['tmpfiles.conf', '"""db1"""'], {'key': 'tmpfiles.key'}), "(tmpfiles.conf, 'db1', key=tmpfiles.key)\n", (4062, 4102), False, 'import giraffez\n'), ((4371, 4420), 'giraffez.Config', 'giraffez.Config', (['tmpfiles.conf', '"""w"""', 'tmpfiles.key'], {}), "(tmpfiles.conf, 'w', tmpfiles.key)\n", (4386, 4420), False, 'import giraffez\n'), ((4624, 4673), 'giraffez.Secret', 'giraffez.Secret', (['tmpfiles.conf', '"""r"""', 'tmpfiles.key'], {}), "(tmpfiles.conf, 'r', tmpfiles.key)\n", (4639, 4673), False, 'import giraffez\n'), ((4890, 4939), 'giraffez.Secret', 'giraffez.Secret', (['tmpfiles.conf', '"""w"""', 'tmpfiles.key'], {}), "(tmpfiles.conf, 'w', tmpfiles.key)\n", (4905, 4939), False, 'import giraffez\n'), ((1623, 1672), 'giraffez.Config', 'giraffez.Config', (['tmpfiles.conf', '"""r"""', 'tmpfiles.key'], {}), "(tmpfiles.conf, 'r', tmpfiles.key)\n", (1638, 1672), False, 'import giraffez\n'), ((1890, 1932), 'giraffez.Config', 'giraffez.Config', (['"""None"""', '"""r"""', 'tmpfiles.key'], {}), "('None', 'r', tmpfiles.key)\n", (1905, 1932), False, 'import giraffez\n'), ((2073, 2116), 'giraffez.Config', 'giraffez.Config', (['tmpfiles.conf', '"""r"""', '"""None"""'], {}), "(tmpfiles.conf, 'r', 'None')\n", (2088, 2116), False, 'import giraffez\n'), ((2558, 2607), 'giraffez.Config', 'giraffez.Config', (['tmpfiles.conf', '"""r"""', 'tmpfiles.key'], {}), "(tmpfiles.conf, 'r', tmpfiles.key)\n", (2573, 2607), False, 'import giraffez\n'), ((3087, 3136), 'giraffez.Config', 'giraffez.Config', (['tmpfiles.conf', '"""r"""', 'tmpfiles.key'], {}), "(tmpfiles.conf, 'r', tmpfiles.key)\n", (3102, 3136), False, 'import giraffez\n'), ((3769, 3798), 'pytest.raises', 'pytest.raises', (['ConnectionLock'], {}), '(ConnectionLock)\n', (3782, 3798), False, 'import pytest\n'), ((3816, 3887), 'giraffez.Config.lock_connection', 'giraffez.Config.lock_connection', (['tmpfiles.conf', '"""db1"""'], {'key': 'tmpfiles.key'}), "(tmpfiles.conf, 'db1', key=tmpfiles.key)\n", (3847, 3887), False, 'import giraffez\n')]
|
#! /usr/bin/env python3
import argparse
import collections.abc
import glob
import json
import os
import re
import shutil
import subprocess
import sys
import time
from ast import literal_eval
from collections import OrderedDict
from itertools import combinations
from pathlib import Path
import mne
import pandas as pd
import yaml
import pydeface.utils as pdu
from bids_validator import BIDSValidator
from mne_bids import make_dataset_description, write_raw_bids
import pkg_resources
NEUROSPIN_DATABASES = {
'prisma': '/neurospin/acquisition/database/Prisma_fit',
'trio': '/neurospin/acquisition/database/TrioTim',
'7T': '/neurospin/acquisition/database/Investigational_Device_7T',
'meg': '/neurospin/acquisition/neuromag/data',
}
class Bcolors:
"""Colors to improve print statements' readability
Example:
`print(f"{Bcolors.OKBLUE}Hello World!{Bcolors.ENDC}")`
"""
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def yes_no(question: str, default: str = None) -> bool:
"""A simple yes/no prompt
Args:
question (str): The question to be answered.
default (bool, optional): Default answer to `question`.
Defaults to None.
Raises:
ValueError: Raise `ValueError` when default answer is not
`yes` or `no`.
Returns:
bool: Boolean answer to the yes/no question.
"""
valid = {"yes": True, "y": True, "no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError(f"invalid default answer: '{default}'")
while True:
choice = input(question + prompt).lower()
if choice == '' and default is not None:
return valid[default]
if choice in valid:
return valid[choice]
print("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n")
def file_manager_default_file(main_path,
filter_list,
file_tag,
file_type='*',
allow_other_fields=True):
"""Path to the most specific file with respect to optional filters.
Each filter is a list [key, value]. Like [sub, 01] or [ses, 02].
Following BIDS standard files can be of the form
[key-value_]...[key-value_]file_tag.file_type.
"""
filters = []
for n in reversed(range(1, len(filter_list) + 1)):
filters += combinations(filter_list, n)
filters += [[]]
for filt in filters:
found = get_bids_files(main_path,
sub_folder=False,
file_type=file_type,
file_tag=file_tag,
filters=filt,
allow_other_fields=allow_other_fields)
if found:
return found[0]
return None
def file_reference(img_path):
reference = {}
reference['file_path'] = img_path
reference['file_basename'] = os.path.basename(img_path)
parts = reference['file_basename'].split('_')
tag, typ = parts[-1].split('.', 1)
reference['file_tag'] = tag
reference['file_type'] = typ
reference['file_fields'] = ''
reference['fields_ordered'] = []
for part in parts[:-1]:
reference['file_fields'] += part + '_'
field, value = part.split('-')
reference['fields_ordered'].append(field)
reference[field] = value
return reference
def get_bids_files(main_path,
file_tag='*',
file_type='*',
sub_id='*',
file_folder='*',
filters=None,
ref=False,
sub_folder=True,
allow_other_fields=True):
"""Return files following bids spec
Filters are of the form (key, value). Only one filter per key allowed.
A file for which a filter do not apply will be discarded.
"""
if sub_folder:
files = os.path.join(main_path, 'sub-*', 'ses-*')
if glob.glob(files):
files = os.path.join(
main_path, 'sub-%s' % sub_id, 'ses-*', file_folder,
'sub-%s*_%s.%s' % (sub_id, file_tag, file_type))
else:
files = os.path.join(
main_path, 'sub-%s' % sub_id, file_folder,
'sub-%s*_%s.%s' % (sub_id, file_tag, file_type))
else:
files = os.path.join(main_path, '*%s.%s' % (file_tag, file_type))
files = glob.glob(files)
files.sort()
if filters:
if not allow_other_fields:
files = [
file_ for file_ in files
if len(os.path.basename(file_).split('_')) <= len(filters) + 1
]
files = [file_reference(file_) for file_ in files]
for key, value in filters:
files = [
file_ for file_ in files
if (key in file_ and file_[key] == value)
]
else:
files = [file_reference(file_) for file_ in files]
if ref:
return files
else:
return [ref_file['file_path'] for ref_file in files]
def bids_copy_events(behav_path='exp_info/recorded_events',
data_root_path='',
dataset_name=None):
dataset_name, data_path = get_bids_default_path(data_root_path,
dataset_name)
# ~ print(os.path.join(data_root_path, behav_path, 'sub-*', 'ses-*'))
if glob.glob(os.path.join(data_root_path, behav_path, 'sub-*', 'ses-*')):
sub_folders = glob.glob(
os.path.join(behav_path, 'sub-*', 'ses-*', 'func'))
else:
# ~ print(os.path.join(data_root_path, behav_path,'sub-*', 'func'))
sub_folders = glob.glob(
os.path.join(data_root_path, behav_path, 'sub-*', 'func'))
# raise warning if no folder is found in recorded events
if not sub_folders:
print(
f'{Bcolors.WARNING}BIDS IMPORT WARNING: NO EVENTS FILE{Bcolors.ENDC}'
)
else:
for sub_folder in sub_folders:
# ~ file_path = sub_folder.replace(behav_path + '/', '')
file_path = sub_folder
for file_name in os.listdir(os.path.join(sub_folder)):
# ~ dest_directory = os.path.join(data_path, file_path)
# ~ if not os.path.exists(dest_directory):
# ~ os.makedirs(dest_directory)
file_ext = []
last = ''
root, last = os.path.split(sub_folder)
while last != 'recorded_events':
if last == '':
break
file_ext.append(last)
sub_folder = root
root, last = os.path.split(sub_folder)
list_tmp = []
elements_path = [[item, '/'] for item in reversed(file_ext)]
elements_path = [(list_tmp.append(item[0]),
list_tmp.append(item[1]))
for item in elements_path]
ext = ''.join(list_tmp)
shutil.copyfile(os.path.join(file_path, file_name),
os.path.join(data_path, ext, file_name))
def get_bids_path(data_root_path='',
subject_id='01',
folder='',
session_id=None):
if session_id is None:
session_id = ''
else:
session_id = 'ses-' + session_id
return os.path.join(data_root_path, 'sub-' + subject_id, session_id, folder)
def get_bids_file_descriptor(subject_id,
task_id=None,
session_id=None,
acq_label=None,
dir_label=None,
rec_id=None,
run_id=None,
run_dir=None,
file_tag=None,
file_type=None):
""" Creates a filename descriptor following BIDS.
subject_id refers to the subject label
task_id refers to the task label
run_id refers to run index
acq_label refers to acquisition parameters as a label
rec_id refers to reconstruction parameters as a label
"""
if 'sub-' or 'sub' in subject_id:
descriptor = subject_id
else:
descriptor = 'sub-{0}'.format(subject_id)
if (session_id is not None) and (session_id is not pd.np.nan):
descriptor += '_ses-{0}'.format(session_id)
if (task_id is not None) and (task_id is not pd.np.nan):
descriptor += '_task-{0}'.format(task_id)
if (acq_label is not None) and (acq_label is not pd.np.nan):
descriptor += '_acq-{0}'.format(acq_label)
if (dir_label is not None) and (dir_label is not pd.np.nan):
descriptor += '_dir-{0}'.format(dir_label)
if (rec_id is not None) and (rec_id is not pd.np.nan):
descriptor += '_rec-{0}'.format(rec_id)
if (run_dir is not None) and (run_dir is not pd.np.nan):
descriptor += '_dir-{0}'.format(run_dir)
if (run_id is not None) and (run_id is not pd.np.nan):
descriptor += '_run-{0}'.format(run_id)
if (file_tag is not None) and (file_type is not None):
descriptor += '_{0}.{1}'.format(file_tag, file_type)
return descriptor
def get_bids_default_path(data_root_path='', dataset_name=None):
"""Default experiment raw dataset folder name"""
if dataset_name is None:
dataset_name = 'rawdata'
return (dataset_name, os.path.join(data_root_path, dataset_name))
def bids_init_dataset(data_root_path='',
dataset_name=None,
dataset_description=None,
readme='',
changes=''):
"""Create directories and files missing to follow bids.
Files and folders already created will be left untouched.
This is an utility to initialize all files that should be present
according to the standard. Particularly those that should be filled
manually like README files.
dataset_description.json : interactif mode to fill in. Or later on if the
user wants. By default :
Name: dataset_name
BidsVersion: 1.0.0
README is quite free as a file
CHANGES follow CPAN standards
"""
# CHECK DATASET REPOSITORY
dataset_name, dataset_name_path = get_bids_default_path(
data_root_path, dataset_name)
if not os.path.exists(dataset_name_path):
os.makedirs(dataset_name_path)
# CHECK dataset_description.json FILE
description_file = os.path.exists(
os.path.join(dataset_name_path, 'dataset_description.json'))
overwrite_datadesc_file = True
if description_file:
overwrite_datadesc_file = yes_no(
'\nA dataset_description.json already exists, do you want to overwrite?',
default="yes")
if overwrite_datadesc_file or not description_file:
data_descrip = yes_no(
'\nDo you want to create or overwrite the dataset_description.json?',
default="yes")
if data_descrip:
print(
'\nIf you do not know all information: pass and edit the file later.'
)
name = input("\nType the name of this BIDS dataset: ").capitalize()
authors = input("\nA list of authors like `a, b, c`: ").capitalize()
acknowledgements = input(
"\nA list of acknowledgements like `a, b, c`: ").capitalize()
how_to_acknowledge = input(
"\nEither a str describing how to acknowledge this dataset OR a list of publications that should be cited: "
)
funding = input(
'\nList of sources of funding (e.g., grant numbers). Must be a list of strings or a single comma separated string like `a, b, c`: '
)
references_and_links = input(
"\nList of references to publication that contain information on the dataset, or links. Must be a list of strings or a single comma separated string like `a, b, c`: "
)
doi = input('\nThe DOI for the dataset: ')
make_dataset_description(dataset_name_path,
name=name,
data_license=None,
authors=authors,
acknowledgements=str(acknowledgements),
how_to_acknowledge=how_to_acknowledge,
funding=str(funding),
references_and_links=references_and_links,
doi=doi,
verbose=False)
else:
print(
"\nYou may update the README file later on. A README file by default has been created."
)
make_dataset_description(dataset_name_path, name=dataset_name)
# CHECK CHANGES FILE / TEXT FILE CPAN CONVENTION
changes_file = os.path.join(dataset_name_path, 'CHANGES')
changes_file_exists = os.path.exists(changes_file)
overwrite_changes_file = True
if changes_file_exists:
overwrite_changes_file = yes_no(
'\nA CHANGES file already exists, do you want to overwrite?',
default="yes")
if overwrite_changes_file or not changes_file_exists:
changes = yes_no('\nDo you want to create/overwrite the CHANGES file?',
default="yes")
if changes:
changes_input = input("Type your text: ")
with open(changes_file, 'w', encoding="utf-8") as fid:
fid.write(str(changes_input))
# CHECK README FILE / TEXT FILE
readme_file = os.path.join(os.path.join(dataset_name_path, 'README'))
readme_file_exist = os.path.exists(readme_file)
overwrite_readme_file = True
if readme_file_exist:
overwrite_readme_file = yes_no(
'\nA README file already exists, do you want to overwrite?',
default="yes")
if overwrite_readme_file or not readme_file_exist:
readme = yes_no('\nDo you want to create/complete the README file?',
default="yes")
if not readme:
readme_input = "TO BE COMPLETED BY THE USER"
else:
readme_input = input("Type your text: ")
with open(readme_file, 'w') as fid:
fid.write(readme_input)
def bids_acquisition_download(data_root_path='',
dataset_name=None,
force_download=False,
behav_path='exp_info/recorded_events',
copy_events=False,
deface=False,
dry_run=False):
"""Automatically download files from neurospin server to a BIDS dataset.
Download-database is based on NeuroSpin server conventions.
Options are 'prisma', 'trio' and custom path.
Prisma db_path = '/neurospin/acquisition/database/Prisma_fit'
Trio db_path = '/neurospin/acquisition/database/TrioTim'
The bids dataset is created if necessary before download with some
empty mandatory files to be filled like README in case they don't exist.
The download depends on the file '[sub-*_][ses-*_]download.csv' contained
in the folder 'exp_info'.
NIP and acq date of the subjects will be taken automatically from
exp_info/participants_to_import.tsv file that follows bids standard. The
file will be copied in the dataset folder without the NIP column for
privacy.
Possible exceptions
1) exp_info directory not found
2) participants_to_import.tsv not found
3) download files not found
4) Acquisition directory in neurospin server not found
5) There is more than one acquisition directory (Have to ask manip for
extra digits for NIP, the NIP then would look like xxxxxxxx-ssss)
6) Event file corresponding to downloaded bold.nii not found
"""
####################################
# CHECK PATHS AND FILES
####################################
# exp_info path where is the participants_to_import.tsv
# ~ print(data_root_path)
exp_info_path = os.path.join(data_root_path, 'exp_info')
if not os.path.exists(exp_info_path):
raise Exception('exp_info directory not found')
if os.path.isfile(os.path.join(exp_info_path,
'participants_to_import.tsv')):
participants_to_import = os.path.join(exp_info_path,
'participants_to_import.tsv')
elif os.path.isfile(os.path.join(exp_info_path, 'participants.tsv')):
# Legacy name of participants_to_import.tsv
participants_to_import = os.path.join(exp_info_path,
'participants.tsv')
else:
raise Exception('exp_info/participants_to_import.tsv not found')
# Determine target path with the name of dataset
dataset_name, target_root_path = get_bids_default_path(
data_root_path, dataset_name)
# Create dataset directories and files if necessary
bids_init_dataset(data_root_path, dataset_name)
# Manage the report and download information
download_report = ('download_report_'
+ time.strftime("%d-%b-%Y-%H:%M:%S", time.gmtime())
+ '.csv')
report_path = os.path.join(data_root_path, 'report')
if not os.path.exists(report_path):
os.makedirs(report_path)
download_report = open(os.path.join(report_path, download_report), 'w')
# ~ report_line = '%s,%s,%s\n' % ('subject_id', 'session_id', 'download_file')
# ~ download_report.write(report_line)
list_imported = []
list_already_imported = []
list_warning = []
# Create a dataFrame to store participant information
# ~ df_participant = pd.DataFrame()
# Dict for info participant
# ~ list_all_participants = {}
dic_info_participants = OrderedDict()
# List for the bacth file for dc2nii_batch command
infiles_dcm2nii = []
# List for data to deface
files_for_pydeface = []
# Dict of descriptors to be added
dict_descriptors = {}
####################################
# GETTING INFORMATION TO DOWNLOAD
####################################
# Download command for each subject/session
# one line has the following information
# participant_id / NIP / infos_participant / session_label / acq_date / location / to_import
# Read the participants_to_import.tsv file for getting subjects/sessions to
# download
pop = pd.read_csv(participants_to_import,
dtype=str,
sep='\t',
na_filter=False,
index_col=False)
# ~ print(df_participant)
for _unused_index, subject_info in pop.iterrows():
subject_id = subject_info[0].strip()
# Fill the partcipant information for the participants_to_import.tsv
if subject_info['infos_participant'].strip():
info_participant = json.loads(
subject_info['infos_participant'].strip())
else:
info_participant = {}
if subject_id in dic_info_participants:
existing_items = dic_info_participants[subject_id]
# Existing items take precedence over new values
info_participant.update(existing_items)
dic_info_participants[subject_id] = info_participant
# Determine path to files in NeuroSpin server
download_database = subject_info['location'].strip()
db_path = NEUROSPIN_DATABASES.get(download_database, download_database)
# sub_path = target_root_path + subject_id + ses_path
# Mange the optional filters
# optional_filters = [('sub', subject_id)]
# if session_id is not None:
# optional_filters += [('ses', session_id)]
if 'session_label' in subject_info.index:
if subject_info['session_label'].strip():
session_id = subject_info['session_label'].strip()
else:
session_id = None
if session_id is None:
ses_path = ''
else:
ses_path = 'ses-' + session_id
if subject_id.isnumeric():
int(subject_id)
subject_id = 'sub-{0}'.format(subject_id)
else:
if 'sub-' not in subject_id:
print(
f'{Bcolors.WARNING}BIDS IMPORTATION WARNING: SUBJECT ID PROBABLY NOT CONFORM{Bcolors.ENDC}'
)
sub_path = os.path.join(target_root_path, subject_id, ses_path)
if not os.path.exists(sub_path):
os.makedirs(sub_path)
# Avoid redownloading subjects/sessions
if not force_download:
check_file = os.path.join(sub_path, 'downloaded')
if os.path.isfile(check_file):
continue
# DATE has to be transformed from BIDS to NeuroSpin server standard
# NeuroSpin standard is yyyymmdd -> Bids standard is YYYY-MM-DD
acq_date = subject_info['acq_date'].strip().replace('-', '')
# nip number
nip = subject_info['NIP'].strip()
# Get appropriate download file. As specific as possible
# ~ specs_path = file_manager_default_file(exp_info_path,
# ~ optional_filters, 'download',
# ~ file_type='tsv',
# ~ allow_other_fields=False)
# ~ report_line = '%s,%s,%s\n' % (subject_id, session_id, specs_path)
# ~ download_report.write(report_line)
# ~ specs = pd.read_csv(specs_path, dtype=str, sep='\t', index_col=False)
# Retrieve list of list for seqs to import
# One tuple is configured as :(file_to_import;acq_folder;acq_name)
# value[0] : num of seq
# value[1] : modality
# value[2] : part of ht file_name
to_import = subject_info['to_import'].strip()
if to_import:
seqs_to_retrieve = literal_eval(to_import)
if not isinstance(seqs_to_retrieve, collections.abc.Collection):
raise TypeError("seqs_to_retrieve must be a Collection")
else:
seqs_to_retrieve = []
print("Scans for ", nip)
print(json.dumps(to_import))
# Convert the first element if there is only one sequence, otherwise
# each value will be used as str and note tuple).
if len(seqs_to_retrieve) > 0 and isinstance(seqs_to_retrieve[0], str):
seqs_to_retrieve = [seqs_to_retrieve]
# download data, store information in batch files for anat/fmri
# download data for meg data
for value in seqs_to_retrieve:
# ~ print(seqs_to_retrieve)
def get_value(key, text):
m = re.search(key + '-(.+?)_', text)
if m:
return m.group(1)
else:
return None
run_task = get_value('task', value[2])
acq_label = get_value('acq', value[2])
run_id = get_value('run', value[2])
run_dir = get_value('dir', value[2])
run_session = session_id
tag = value[2].split('_')[-1]
target_path = os.path.join(sub_path, value[1])
if not os.path.exists(target_path):
os.makedirs(target_path)
# MEG CASE
if value[1] == 'meg':
# Create subject path if necessary
meg_path = os.path.join(sub_path, 'meg')
if not os.path.exists(meg_path):
os.makedirs(meg_path)
# Create the sub-emptyroom
# ~ sub-emptyroom_path = os.path.join(data_root_path, 'sub_emptyroom')
# ~ if not os.path.exists(sub-emptyroom_path):
# ~ os.makedirs(sub-emptyroom_path)
meg_file = os.path.join(db_path, nip, acq_date, value[0])
print(meg_file)
filename = get_bids_file_descriptor(subject_id,
task_id=run_task,
run_id=run_id,
run_dir=run_dir,
session_id=run_session,
file_tag=tag,
acq_label=acq_label,
file_type='tif')
# ~ output_path = os.path.join(target_path, filename)
# ~ print(output_path)
# ~ shutil.copyfile(meg_file, output_path)
raw = mne.io.read_raw_fif(meg_file, allow_maxshield=True)
write_raw_bids(raw, filename, target_path, overwrite=True)
# add event
# create json file
# copy the subject emptyroom
# ANAT and FUNC case
# todo: bad practices, to refactor for the sake of simplicity
elif value[1] in ('anat', 'func', 'dwi', 'fmap'):
download = True
dicom_paths = []
path_file_glob = ""
nip_dirs = glob.glob(
os.path.join(db_path, str(acq_date),
str(nip) + '*'))
# ~ print(os.path.join(db_path, str(acq_date), str(nip) + '*'))
if len(nip_dirs) < 1:
list_warning.append(
f"\n {Bcolors.WARNING}WARNING: No directory found for given NIP {nip} and SESSION {session_id}{Bcolors.ENDC}"
)
# ~ print(message)
# ~ download_report.write(message)
download = False
elif len(nip_dirs) > 1:
list_warning.append(
f"\n {Bcolors.WARNING}WARNING: Multiple path for given NIP {nip} \
SESSION {session_id} - please \
mention the session of the subject for this date, \
2 sessions for the same subject the same day are \
possible{Bcolors.ENDC}")
# ~ print(message)
# ~ download_report.write(message)
download = False
else:
path_file_glob = os.path.join(
nip_dirs[0], '{0:06d}_*'.format(int(value[0])))
# ~ print(path_file_glob)
dicom_paths = glob.glob(path_file_glob)
if not dicom_paths and download:
list_warning.append("\n WARNING: file not found "
+ path_file_glob)
# ~ print(message)
# ~ download_report.write(message)
elif download:
dicom_path = dicom_paths[0]
list_imported.append("\n IMPORTATION OF " + dicom_path)
# ~ print(message)
# ~ download_report.write(message)
# Expecting page 10 bids specification file name
filename = get_bids_file_descriptor(subject_id,
task_id=run_task,
run_id=run_id,
run_dir=run_dir,
session_id=run_session,
file_tag=tag,
acq_label=acq_label,
file_type='nii')
if value[1] == 'anat' and deface:
print("\n Deface with pydeface")
files_for_pydeface.append(
os.path.join(target_path, filename))
# append list for preparing the batch importation
file_to_convert = {
'in_dir': dicom_path,
'out_dir': target_path,
'filename': os.path.splitext(filename)[0]
}
is_file_to_import = os.path.join(
os.path.join(os.getcwd(), target_path, filename))
if os.path.isfile(is_file_to_import):
list_already_imported.append(
f" ALREADY IMPORTED: {is_file_to_import}")
else:
infiles_dcm2nii.append(file_to_convert)
# Add descriptor into the json file
if run_task:
filename_json = os.path.join(target_path,
filename[:-3] + 'json')
dict_descriptors.update(
{filename_json: {
'TaskName': run_task
}})
if len(value) == 4:
# ~ print('value[3]', value[3] )
filename_json = os.path.join(target_path,
filename[:-3] + 'json')
dict_descriptors.update({filename_json: value[3]})
# Importation and conversion of dicom files
dcm2nii_batch = dict(Options=dict(isGz='false',
isFlipY='false',
isVerbose='false',
isCreateBIDS='true',
isOnlySingleFile='false'),
Files=infiles_dcm2nii)
dcm2nii_batch_file = os.path.join(exp_info_path, 'batch_dcm2nii.yaml')
with open(dcm2nii_batch_file, 'w') as f:
_unused_data = yaml.dump(dcm2nii_batch, f)
print(
"\n------------------------------------------------------------------------------------"
)
print(
"------------------- SUMMARY OF IMPORTATION --------------------------------------"
)
print(
"--------------------------------------------------------------------------------------\n"
)
for i in list_already_imported:
print(i)
download_report.write(i)
print(
"\n------------------------------------------------------------------------------------"
)
for i in list_imported:
print(i)
download_report.write(i)
print(
"\n------------------------------------------------------------------------------------"
)
for i in list_warning:
print(i)
download_report.write(i)
print(
"\n------------------------------------------------------------------------------------"
)
print(
"------------------------------------------------------------------------------------\n"
)
download_report.close()
if dry_run:
print("\n NO IMPORTATION, DRY-RUN OPTION IS TRUE \n")
else:
print('\n')
cmd = ("dcm2niibatch", dcm2nii_batch_file)
subprocess.call(cmd)
# loop for checking if downloaded are ok and create the downloaded files
# done_file = open(os.path.join(sub_path, 'downloaded'), 'w')
# done_file.close()
# Data to deface
# ~ print(files_for_pydeface)
if files_for_pydeface:
try:
# warning: Isn't that too restrictive?
template = pkg_resources.resource_filename(
pkg_resources.Requirement.parse("neurospin_to_bids"),
"neurospin_to_bids/template_deface/mean_reg2mean.nii.gz")
facemask = pkg_resources.resource_filename(
pkg_resources.Requirement.parse("neurospin_to_bids"),
"neurospin_to_bids/template_deface/facemask.nii.gz")
except pkg_resources.DistributionNotFound:
template = (
"/neurospin/unicog/protocols/IRMf/Unicogfmri/BIDS/"
"unicog-dev/bids/template_deface/mean_reg2mean.nii.gz")
facemask = ("/neurospin/unicog/protocols/IRMf/Unicogfmri/BIDS/"
"unicog-dev/bids/template_deface/facemask.nii.gz")
print(template)
os.environ['FSLDIR'] = "/i2bm/local/fsl/bin/"
os.environ['FSLOUTPUTTYPE'] = "NIFTI_PAIR"
os.environ['PATH'] = os.environ['FSLDIR'] + ":" + os.environ['PATH']
for file_to_deface in files_for_pydeface:
print(f"\nDeface with pydeface {file_to_deface}")
pdu.deface_image(infile=file_to_deface,
outfile=file_to_deface,
facemask=facemask,
template=template,
force=True)
# Create participants.tsv in dataset folder (take out NIP column)
participants_path = os.path.join(target_root_path, 'participants.tsv')
df_participant = pd.DataFrame.from_dict(dic_info_participants,
orient="index")
df_participant.index.rename('participant_id', inplace=True)
df_participant.to_csv(participants_path, sep='\t', na_rep="n/a")
if dict_descriptors:
# ~ print(dict_descriptors)
# Adding a new key value pair in a json file such as taskname
for k, v in dict_descriptors.items():
with open(k, 'r+') as json_file:
for key, val in v.items():
temp_json = json.load(json_file)
temp_json[key] = val
json_file.seek(0)
json.dump(temp_json, json_file)
json_file.truncate()
# Copy recorded event files
if copy_events:
bids_copy_events(behav_path, data_root_path, dataset_name)
# Validate paths with BIDSValidator
# see also http://bids-standard.github.io/bids-validator/
validation_bids = yes_no('\nDo you want to use a bids validator?',
default=None)
if validation_bids:
bids_validation_report = os.path.join(report_path,
"report_bids_valisation.txt")
if shutil.which('bids-validator'):
cmd = f"bids-validator {target_root_path} > {bids_validation_report}"
subprocess.call(cmd, shell=True)
cmd = f"cat < {bids_validation_report}"
subprocess.call(cmd, shell=True)
print(
f'\n\nSee the summary of bids validator at {bids_validation_report}'
)
else:
validator = BIDSValidator()
os.chdir(target_root_path)
for file_to_test in Path('.').glob('./**/*'):
if file_to_test.is_file():
file_to_test = '/' + str(file_to_test)
print(
f'\nTest the following name of file : {file_to_test} with BIDSValidator'
)
print(validator.is_bids(file_to_test))
print('\n')
def main():
if sys.version_info < (3, 6):
sys.exit("error: neurospin_to_bids needs Python 3.6 or later")
# Parse arguments from console
parser = argparse.ArgumentParser(description='NeuroSpin to BIDS conversion')
parser.add_argument('--root-path', '-root_path',
default='.',
help='directory containing exp_info to download into')
parser.add_argument('--dataset-name', '-dataset_name',
type=str,
default='rawdata',
help='name of the directory created in ROOT_PATH')
parser.add_argument('--copy-events', '-copy_events',
action='store_true',
help='copy events from a directory with the same '
'structure')
parser.add_argument('--neurospin-database', '-neurospin_database',
type=str,
default='prisma',
help='neurospin server to download from')
parser.add_argument('--dry-run', '-n', '-dry-run',
action='store_true',
help='Test without importation of data')
# LOAD CONSOLE ARGUMENTS
args = parser.parse_args()
deface = yes_no('\nDo you want deface T1?', default=None)
bids_acquisition_download(data_root_path=args.root_path,
dataset_name=args.dataset_name,
force_download=False,
behav_path='exp_info/recorded_events',
copy_events=args.copy_events,
deface=deface,
dry_run=args.dry_run)
if __name__ == "__main__":
main()
|
[
"argparse.ArgumentParser",
"pandas.read_csv",
"yaml.dump",
"json.dumps",
"os.path.isfile",
"pathlib.Path",
"glob.glob",
"os.path.join",
"os.chdir",
"mne_bids.make_dataset_description",
"pydeface.utils.deface_image",
"pkg_resources.Requirement.parse",
"os.path.exists",
"re.search",
"bids_validator.BIDSValidator",
"json.dump",
"pandas.DataFrame.from_dict",
"os.path.basename",
"shutil.which",
"itertools.combinations",
"subprocess.call",
"collections.OrderedDict",
"sys.exit",
"json.load",
"mne.io.read_raw_fif",
"os.makedirs",
"mne_bids.write_raw_bids",
"time.gmtime",
"os.getcwd",
"os.path.splitext",
"ast.literal_eval",
"os.path.split"
] |
[((3269, 3295), 'os.path.basename', 'os.path.basename', (['img_path'], {}), '(img_path)\n', (3285, 3295), False, 'import os\n'), ((4777, 4793), 'glob.glob', 'glob.glob', (['files'], {}), '(files)\n', (4786, 4793), False, 'import glob\n'), ((7820, 7889), 'os.path.join', 'os.path.join', (['data_root_path', "('sub-' + subject_id)", 'session_id', 'folder'], {}), "(data_root_path, 'sub-' + subject_id, session_id, folder)\n", (7832, 7889), False, 'import os\n'), ((13427, 13469), 'os.path.join', 'os.path.join', (['dataset_name_path', '"""CHANGES"""'], {}), "(dataset_name_path, 'CHANGES')\n", (13439, 13469), False, 'import os\n'), ((13496, 13524), 'os.path.exists', 'os.path.exists', (['changes_file'], {}), '(changes_file)\n', (13510, 13524), False, 'import os\n'), ((14230, 14257), 'os.path.exists', 'os.path.exists', (['readme_file'], {}), '(readme_file)\n', (14244, 14257), False, 'import os\n'), ((16664, 16704), 'os.path.join', 'os.path.join', (['data_root_path', '"""exp_info"""'], {}), "(data_root_path, 'exp_info')\n", (16676, 16704), False, 'import os\n'), ((17872, 17910), 'os.path.join', 'os.path.join', (['data_root_path', '"""report"""'], {}), "(data_root_path, 'report')\n", (17884, 17910), False, 'import os\n'), ((18456, 18469), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (18467, 18469), False, 'from collections import OrderedDict\n'), ((19093, 19187), 'pandas.read_csv', 'pd.read_csv', (['participants_to_import'], {'dtype': 'str', 'sep': '"""\t"""', 'na_filter': '(False)', 'index_col': '(False)'}), "(participants_to_import, dtype=str, sep='\\t', na_filter=False,\n index_col=False)\n", (19104, 19187), True, 'import pandas as pd\n'), ((30544, 30593), 'os.path.join', 'os.path.join', (['exp_info_path', '"""batch_dcm2nii.yaml"""'], {}), "(exp_info_path, 'batch_dcm2nii.yaml')\n", (30556, 30593), False, 'import os\n'), ((36317, 36384), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""NeuroSpin to BIDS conversion"""'}), "(description='NeuroSpin to BIDS conversion')\n", (36340, 36384), False, 'import argparse\n'), ((2703, 2731), 'itertools.combinations', 'combinations', (['filter_list', 'n'], {}), '(filter_list, n)\n', (2715, 2731), False, 'from itertools import combinations\n'), ((4270, 4311), 'os.path.join', 'os.path.join', (['main_path', '"""sub-*"""', '"""ses-*"""'], {}), "(main_path, 'sub-*', 'ses-*')\n", (4282, 4311), False, 'import os\n'), ((4323, 4339), 'glob.glob', 'glob.glob', (['files'], {}), '(files)\n', (4332, 4339), False, 'import glob\n'), ((4706, 4763), 'os.path.join', 'os.path.join', (['main_path', "('*%s.%s' % (file_tag, file_type))"], {}), "(main_path, '*%s.%s' % (file_tag, file_type))\n", (4718, 4763), False, 'import os\n'), ((5789, 5847), 'os.path.join', 'os.path.join', (['data_root_path', 'behav_path', '"""sub-*"""', '"""ses-*"""'], {}), "(data_root_path, behav_path, 'sub-*', 'ses-*')\n", (5801, 5847), False, 'import os\n'), ((9885, 9927), 'os.path.join', 'os.path.join', (['data_root_path', 'dataset_name'], {}), '(data_root_path, dataset_name)\n', (9897, 9927), False, 'import os\n'), ((10802, 10835), 'os.path.exists', 'os.path.exists', (['dataset_name_path'], {}), '(dataset_name_path)\n', (10816, 10835), False, 'import os\n'), ((10845, 10875), 'os.makedirs', 'os.makedirs', (['dataset_name_path'], {}), '(dataset_name_path)\n', (10856, 10875), False, 'import os\n'), ((10966, 11025), 'os.path.join', 'os.path.join', (['dataset_name_path', '"""dataset_description.json"""'], {}), "(dataset_name_path, 'dataset_description.json')\n", (10978, 11025), False, 'import os\n'), ((14163, 14204), 'os.path.join', 'os.path.join', (['dataset_name_path', '"""README"""'], {}), "(dataset_name_path, 'README')\n", (14175, 14204), False, 'import os\n'), ((16716, 16745), 'os.path.exists', 'os.path.exists', (['exp_info_path'], {}), '(exp_info_path)\n', (16730, 16745), False, 'import os\n'), ((16825, 16882), 'os.path.join', 'os.path.join', (['exp_info_path', '"""participants_to_import.tsv"""'], {}), "(exp_info_path, 'participants_to_import.tsv')\n", (16837, 16882), False, 'import os\n'), ((16953, 17010), 'os.path.join', 'os.path.join', (['exp_info_path', '"""participants_to_import.tsv"""'], {}), "(exp_info_path, 'participants_to_import.tsv')\n", (16965, 17010), False, 'import os\n'), ((17922, 17949), 'os.path.exists', 'os.path.exists', (['report_path'], {}), '(report_path)\n', (17936, 17949), False, 'import os\n'), ((17959, 17983), 'os.makedirs', 'os.makedirs', (['report_path'], {}), '(report_path)\n', (17970, 17983), False, 'import os\n'), ((18011, 18053), 'os.path.join', 'os.path.join', (['report_path', 'download_report'], {}), '(report_path, download_report)\n', (18023, 18053), False, 'import os\n'), ((21091, 21143), 'os.path.join', 'os.path.join', (['target_root_path', 'subject_id', 'ses_path'], {}), '(target_root_path, subject_id, ses_path)\n', (21103, 21143), False, 'import os\n'), ((30662, 30689), 'yaml.dump', 'yaml.dump', (['dcm2nii_batch', 'f'], {}), '(dcm2nii_batch, f)\n', (30671, 30689), False, 'import yaml\n'), ((31928, 31948), 'subprocess.call', 'subprocess.call', (['cmd'], {}), '(cmd)\n', (31943, 31948), False, 'import subprocess\n'), ((33820, 33870), 'os.path.join', 'os.path.join', (['target_root_path', '"""participants.tsv"""'], {}), "(target_root_path, 'participants.tsv')\n", (33832, 33870), False, 'import os\n'), ((33896, 33957), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['dic_info_participants'], {'orient': '"""index"""'}), "(dic_info_participants, orient='index')\n", (33918, 33957), True, 'import pandas as pd\n'), ((36206, 36268), 'sys.exit', 'sys.exit', (['"""error: neurospin_to_bids needs Python 3.6 or later"""'], {}), "('error: neurospin_to_bids needs Python 3.6 or later')\n", (36214, 36268), False, 'import sys\n'), ((4361, 4479), 'os.path.join', 'os.path.join', (['main_path', "('sub-%s' % sub_id)", '"""ses-*"""', 'file_folder', "('sub-%s*_%s.%s' % (sub_id, file_tag, file_type))"], {}), "(main_path, 'sub-%s' % sub_id, 'ses-*', file_folder, \n 'sub-%s*_%s.%s' % (sub_id, file_tag, file_type))\n", (4373, 4479), False, 'import os\n'), ((4542, 4651), 'os.path.join', 'os.path.join', (['main_path', "('sub-%s' % sub_id)", 'file_folder', "('sub-%s*_%s.%s' % (sub_id, file_tag, file_type))"], {}), "(main_path, 'sub-%s' % sub_id, file_folder, 'sub-%s*_%s.%s' % (\n sub_id, file_tag, file_type))\n", (4554, 4651), False, 'import os\n'), ((5895, 5945), 'os.path.join', 'os.path.join', (['behav_path', '"""sub-*"""', '"""ses-*"""', '"""func"""'], {}), "(behav_path, 'sub-*', 'ses-*', 'func')\n", (5907, 5945), False, 'import os\n'), ((6078, 6135), 'os.path.join', 'os.path.join', (['data_root_path', 'behav_path', '"""sub-*"""', '"""func"""'], {}), "(data_root_path, behav_path, 'sub-*', 'func')\n", (6090, 6135), False, 'import os\n'), ((13291, 13353), 'mne_bids.make_dataset_description', 'make_dataset_description', (['dataset_name_path'], {'name': 'dataset_name'}), '(dataset_name_path, name=dataset_name)\n', (13315, 13353), False, 'from mne_bids import make_dataset_description, write_raw_bids\n'), ((17081, 17128), 'os.path.join', 'os.path.join', (['exp_info_path', '"""participants.tsv"""'], {}), "(exp_info_path, 'participants.tsv')\n", (17093, 17128), False, 'import os\n'), ((17216, 17263), 'os.path.join', 'os.path.join', (['exp_info_path', '"""participants.tsv"""'], {}), "(exp_info_path, 'participants.tsv')\n", (17228, 17263), False, 'import os\n'), ((21159, 21183), 'os.path.exists', 'os.path.exists', (['sub_path'], {}), '(sub_path)\n', (21173, 21183), False, 'import os\n'), ((21197, 21218), 'os.makedirs', 'os.makedirs', (['sub_path'], {}), '(sub_path)\n', (21208, 21218), False, 'import os\n'), ((21324, 21360), 'os.path.join', 'os.path.join', (['sub_path', '"""downloaded"""'], {}), "(sub_path, 'downloaded')\n", (21336, 21360), False, 'import os\n'), ((21376, 21402), 'os.path.isfile', 'os.path.isfile', (['check_file'], {}), '(check_file)\n', (21390, 21402), False, 'import os\n'), ((22615, 22638), 'ast.literal_eval', 'literal_eval', (['to_import'], {}), '(to_import)\n', (22627, 22638), False, 'from ast import literal_eval\n'), ((22884, 22905), 'json.dumps', 'json.dumps', (['to_import'], {}), '(to_import)\n', (22894, 22905), False, 'import json\n'), ((23872, 23904), 'os.path.join', 'os.path.join', (['sub_path', 'value[1]'], {}), '(sub_path, value[1])\n', (23884, 23904), False, 'import os\n'), ((35112, 35167), 'os.path.join', 'os.path.join', (['report_path', '"""report_bids_valisation.txt"""'], {}), "(report_path, 'report_bids_valisation.txt')\n", (35124, 35167), False, 'import os\n'), ((35233, 35263), 'shutil.which', 'shutil.which', (['"""bids-validator"""'], {}), "('bids-validator')\n", (35245, 35263), False, 'import shutil\n'), ((6523, 6547), 'os.path.join', 'os.path.join', (['sub_folder'], {}), '(sub_folder)\n', (6535, 6547), False, 'import os\n'), ((6820, 6845), 'os.path.split', 'os.path.split', (['sub_folder'], {}), '(sub_folder)\n', (6833, 6845), False, 'import os\n'), ((17806, 17819), 'time.gmtime', 'time.gmtime', ([], {}), '()\n', (17817, 17819), False, 'import time\n'), ((23418, 23450), 're.search', 're.search', (["(key + '-(.+?)_')", 'text'], {}), "(key + '-(.+?)_', text)\n", (23427, 23450), False, 'import re\n'), ((23924, 23951), 'os.path.exists', 'os.path.exists', (['target_path'], {}), '(target_path)\n', (23938, 23951), False, 'import os\n'), ((23969, 23993), 'os.makedirs', 'os.makedirs', (['target_path'], {}), '(target_path)\n', (23980, 23993), False, 'import os\n'), ((24130, 24159), 'os.path.join', 'os.path.join', (['sub_path', '"""meg"""'], {}), "(sub_path, 'meg')\n", (24142, 24159), False, 'import os\n'), ((24529, 24575), 'os.path.join', 'os.path.join', (['db_path', 'nip', 'acq_date', 'value[0]'], {}), '(db_path, nip, acq_date, value[0])\n', (24541, 24575), False, 'import os\n'), ((25352, 25403), 'mne.io.read_raw_fif', 'mne.io.read_raw_fif', (['meg_file'], {'allow_maxshield': '(True)'}), '(meg_file, allow_maxshield=True)\n', (25371, 25403), False, 'import mne\n'), ((25421, 25479), 'mne_bids.write_raw_bids', 'write_raw_bids', (['raw', 'filename', 'target_path'], {'overwrite': '(True)'}), '(raw, filename, target_path, overwrite=True)\n', (25435, 25479), False, 'from mne_bids import make_dataset_description, write_raw_bids\n'), ((33471, 33589), 'pydeface.utils.deface_image', 'pdu.deface_image', ([], {'infile': 'file_to_deface', 'outfile': 'file_to_deface', 'facemask': 'facemask', 'template': 'template', 'force': '(True)'}), '(infile=file_to_deface, outfile=file_to_deface, facemask=\n facemask, template=template, force=True)\n', (33487, 33589), True, 'import pydeface.utils as pdu\n'), ((35367, 35399), 'subprocess.call', 'subprocess.call', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (35382, 35399), False, 'import subprocess\n'), ((35472, 35504), 'subprocess.call', 'subprocess.call', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (35487, 35504), False, 'import subprocess\n'), ((35681, 35696), 'bids_validator.BIDSValidator', 'BIDSValidator', ([], {}), '()\n', (35694, 35696), False, 'from bids_validator import BIDSValidator\n'), ((35713, 35739), 'os.chdir', 'os.chdir', (['target_root_path'], {}), '(target_root_path)\n', (35721, 35739), False, 'import os\n'), ((7073, 7098), 'os.path.split', 'os.path.split', (['sub_folder'], {}), '(sub_folder)\n', (7086, 7098), False, 'import os\n'), ((7459, 7493), 'os.path.join', 'os.path.join', (['file_path', 'file_name'], {}), '(file_path, file_name)\n', (7471, 7493), False, 'import os\n'), ((7527, 7566), 'os.path.join', 'os.path.join', (['data_path', 'ext', 'file_name'], {}), '(data_path, ext, file_name)\n', (7539, 7566), False, 'import os\n'), ((24183, 24207), 'os.path.exists', 'os.path.exists', (['meg_path'], {}), '(meg_path)\n', (24197, 24207), False, 'import os\n'), ((24229, 24250), 'os.makedirs', 'os.makedirs', (['meg_path'], {}), '(meg_path)\n', (24240, 24250), False, 'import os\n'), ((32382, 32434), 'pkg_resources.Requirement.parse', 'pkg_resources.Requirement.parse', (['"""neurospin_to_bids"""'], {}), "('neurospin_to_bids')\n", (32413, 32434), False, 'import pkg_resources\n'), ((32594, 32646), 'pkg_resources.Requirement.parse', 'pkg_resources.Requirement.parse', (['"""neurospin_to_bids"""'], {}), "('neurospin_to_bids')\n", (32625, 32646), False, 'import pkg_resources\n'), ((34473, 34493), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (34482, 34493), False, 'import json\n'), ((34605, 34636), 'json.dump', 'json.dump', (['temp_json', 'json_file'], {}), '(temp_json, json_file)\n', (34614, 34636), False, 'import json\n'), ((35776, 35785), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (35780, 35785), False, 'from pathlib import Path\n'), ((27251, 27276), 'glob.glob', 'glob.glob', (['path_file_glob'], {}), '(path_file_glob)\n', (27260, 27276), False, 'import glob\n'), ((29126, 29159), 'os.path.isfile', 'os.path.isfile', (['is_file_to_import'], {}), '(is_file_to_import)\n', (29140, 29159), False, 'import os\n'), ((29506, 29555), 'os.path.join', 'os.path.join', (['target_path', "(filename[:-3] + 'json')"], {}), "(target_path, filename[:-3] + 'json')\n", (29518, 29555), False, 'import os\n'), ((29927, 29976), 'os.path.join', 'os.path.join', (['target_path', "(filename[:-3] + 'json')"], {}), "(target_path, filename[:-3] + 'json')\n", (29939, 29976), False, 'import os\n'), ((4948, 4971), 'os.path.basename', 'os.path.basename', (['file_'], {}), '(file_)\n', (4964, 4971), False, 'import os\n'), ((28644, 28679), 'os.path.join', 'os.path.join', (['target_path', 'filename'], {}), '(target_path, filename)\n', (28656, 28679), False, 'import os\n'), ((28922, 28948), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (28938, 28948), False, 'import os\n'), ((29065, 29076), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (29074, 29076), False, 'import os\n')]
|
from pymongo import MongoClient
from bson import ObjectId
import json
# Database connection information
host = 'localhost'
port = 27017
dbName = 'cellsideAssistance'
class JSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, ObjectId):
return str(o)
return json.JSONEncoder.default(self, o)
class MongoConnection(object):
def __init__(self):
client = MongoClient(host, port)
self.db = client[dbName]
def get_collection(self, name):
self.collection = self.db[name]
class PatientCollection(MongoConnection):
def __init__(self):
super(PatientCollection, self).__init__()
self.get_collection('patients')
def getPatientById(self, patientId):
return self.collection.find_one({'patientId': patientId})
def getPatientByIdSpecific(self, patientId, field):
return self.collection.find_one({'patientId': patientId}, { field : 1, '_id': 0 })
def getPatientByName(self, name):
return self.collection.find_one({'name': name})
def getPatientByNameSpecific(self, name, field):
return self.collection.find_one({'name': name}, { field : 1, '_id': 0 })
def updatePatient(self, patientId, patient):
return self.collection.update_one({'id': patientId}, patient)
def deletePatient(self, patientId):
return self.collection.delete_one({'id': patientId})
def createPatient(self, patient):
return self.collection.insert(patient)
|
[
"pymongo.MongoClient",
"json.JSONEncoder.default"
] |
[((308, 341), 'json.JSONEncoder.default', 'json.JSONEncoder.default', (['self', 'o'], {}), '(self, o)\n', (332, 341), False, 'import json\n'), ((415, 438), 'pymongo.MongoClient', 'MongoClient', (['host', 'port'], {}), '(host, port)\n', (426, 438), False, 'from pymongo import MongoClient\n')]
|
__all__ = ['PyReplyDecoder']
import pickle
from tron import Misc
from .ReplyDecoder import ReplyDecoder
class PyReplyDecoder(ReplyDecoder):
""" Encode Replys as single-line pickled python objects.
"""
def __init__(self, **argv):
ReplyDecoder.__init__(self, **argv)
# How do we terminate encoded lines?
#
self.EOL = argv.get('EOL', '\f')
def decode(self, buf, newData):
""" Find and extract a single complete command in the inputBuffer.
"""
if newData:
buf += newData
if self.debug > 3:
Misc.log('PyReply.decoder', 'called with EOL=%r and buf=%r' % (self.EOL, buf))
eol = buf.find(self.EOL)
if self.debug > 2:
Misc.log('PyReply.decoder', 'eol at %d in buffer %r' % (eol, buf))
# No complete reply found. make sure to return
# the unmolested buffer.
#
if eol == -1:
return None, buf
replyString = buf[:eol]
buf = buf[eol + len(self.EOL):]
# Make sure to consume unparseable junk up to the next EOL.
#
try:
r = pickle.loads(replyString)
except SyntaxError:
Misc.log('PyReply.decoder', 'Failed to unpickle %r' % (replyString))
return None, buf
if self.debug > 5:
Misc.log('PyReply.decoder', 'extracted %r, returning %r' % (r, buf))
return r, buf
|
[
"pickle.loads",
"tron.Misc.log"
] |
[((600, 678), 'tron.Misc.log', 'Misc.log', (['"""PyReply.decoder"""', "('called with EOL=%r and buf=%r' % (self.EOL, buf))"], {}), "('PyReply.decoder', 'called with EOL=%r and buf=%r' % (self.EOL, buf))\n", (608, 678), False, 'from tron import Misc\n'), ((752, 818), 'tron.Misc.log', 'Misc.log', (['"""PyReply.decoder"""', "('eol at %d in buffer %r' % (eol, buf))"], {}), "('PyReply.decoder', 'eol at %d in buffer %r' % (eol, buf))\n", (760, 818), False, 'from tron import Misc\n'), ((1150, 1175), 'pickle.loads', 'pickle.loads', (['replyString'], {}), '(replyString)\n', (1162, 1175), False, 'import pickle\n'), ((1354, 1422), 'tron.Misc.log', 'Misc.log', (['"""PyReply.decoder"""', "('extracted %r, returning %r' % (r, buf))"], {}), "('PyReply.decoder', 'extracted %r, returning %r' % (r, buf))\n", (1362, 1422), False, 'from tron import Misc\n'), ((1216, 1282), 'tron.Misc.log', 'Misc.log', (['"""PyReply.decoder"""', "('Failed to unpickle %r' % replyString)"], {}), "('PyReply.decoder', 'Failed to unpickle %r' % replyString)\n", (1224, 1282), False, 'from tron import Misc\n')]
|
#! /usr/bin/python3
# -*- coding: utf-8 -*-
#--------------------------------------------------------------------------------------------------
# Script to parse Japanese Wiktionary XML stream and export word information
#
# Usage:
# parse_wiktionary_ja.py [--sampling num] [--max num] [--quiet]
# (It reads the standard input and prints the result on the standard output.)
#
# Example:
# $ bzcat jawiktionary-latest-pages-articles.xml.bz2 |
# ./parse_wikipedia_ja.py > wiktionary-ja.tsv
#
# Copyright 2020 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
# except in compliance with the License. You may obtain a copy of the License at
# https://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed under the
# License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific language governing permissions
# and limitations under the License.
#--------------------------------------------------------------------------------------------------
import logging
import html
import random
import regex
import sys
import tkrzw_dict
import xml.sax
import xml.sax.handler
random.seed(19780211)
logger = tkrzw_dict.GetLogger()
class XMLHandler(xml.sax.handler.ContentHandler):
def __init__(self, sampling_ratio, max_outputs):
self.sampling_ratio = sampling_ratio
self.max_outputs = max_outputs
self.num_articles = 0
self.num_outputs = 0
self.tags = []
self.title = None
self.is_redirect = False
self.has_restrictions = False
self.model = None
self.format = None
self.text = None
def startDocument(self):
logger.info("Start the document")
def endDocument(self):
logger.info("End the document")
def startElement(self, name, attrs):
self.tags.append(name)
if self.tags == ['mediawiki', 'page']:
self.title = None
self.is_redirect = False
self.has_restrictions = False
if self.tags == ['mediawiki', 'page', 'title']:
self.title = ""
if self.tags == ['mediawiki', 'page', 'redirect']:
self.is_redirect = True
if self.tags == ['mediawiki', 'page', 'restrictions']:
self.has_restrictions = True
if self.tags == ['mediawiki', 'page', 'revision', 'model']:
self.model = ""
if self.tags == ['mediawiki', 'page', 'revision', 'format']:
self.format = ""
if self.tags == ['mediawiki', 'page', 'revision', 'text']:
self.text = ""
def endElement(self, name):
if self.tags == ['mediawiki', 'page', 'revision']:
if (self.title and not self.is_redirect and not self.has_restrictions and
self.model == 'wikitext' and self.format == 'text/x-wiki' and self.text):
self.num_articles += 1
if self.num_articles % 1000 == 0:
logger.info("Article {}".format(self.num_articles))
if random.random() <= self.sampling_ratio:
self.processText()
self.model = None
self.format = None
self.text = None
self.tags.pop()
if self.num_outputs >= self.max_outputs:
logger.info("reached max outputs ({})".format(self.max_outputs))
raise xml.sax.SAXException("reached max articles")
def characters(self, content):
if self.tags == ['mediawiki', 'page', 'title']:
self.title += content
if self.tags == ['mediawiki', 'page', 'revision', 'model']:
self.model += content
if self.tags == ['mediawiki', 'page', 'revision', 'format']:
self.format += content
if self.tags == ['mediawiki', 'page', 'revision', 'text']:
self.text += content
def processText(self):
title = self.title
if title.find(":") >= 0: return
if not regex.search(r"^[-\p{Latin}0-9 ]+$", title): return
fulltext = html.unescape(self.text)
fulltext = regex.sub(r"<!--.*?-->", "", fulltext)
fulltext = regex.sub(r"(\n==+[^=]+==+)", "\\1\n", fulltext)
output = []
is_eng_head = False
is_eng_cat = False
eng_head_level = 0
mode = ""
submode = ""
infl_modes = set()
sections = []
synonyms = []
hypernyms = []
hyponyms = []
antonyms = []
derivatives = []
relations = []
for line in fulltext.split("\n"):
line = line.strip()
if regex.search(r"^==([^=]+)==$", line):
lang = regex.sub(r"^==([^=]+)==$", r"\1", line).strip()
lang = lang.lower()
if lang in ("{{en}}", "{{eng}}", "{{english}}", "英語", "english"):
is_eng_head = True
elif lang.startswith("{{") or lang.endswith("語"):
is_eng_head = False
is_eng_cat = False
mode = ""
submode = ""
elif regex.search(r"^===([^=]+)===$", line):
mode = regex.sub(r"^===([^=]+)===$", r"\1", line).strip()
mode = regex.sub(r":.*", "", mode).strip()
mode = mode.lower()
sections.append((mode,[]))
submode = ""
elif regex.search(r"^====+([^=]+)=+===$", line):
submode = regex.sub(r"^====+([^=]+)=+===$", r"\1", line).strip()
submode = regex.sub(r":.*", "", submode).strip()
submode = submode.lower()
if submode in ("{{noun}}", "{{name}}", "noun", "名詞", "固有名詞", "人名", "地名",
"{{verb}}", "verb", "動詞", "自動詞", "他動詞",
"{{adj}}", "{{adjective}}", "adjective", "形容詞",
"{{adv}}", "{{adverb}}", "adverb", "副詞",
"{{pronoun}}", "{{auxverb}}", "{{prep}}", "{{article}}, {{interj}}",
"{{pron}}", "{{pron|en}}", "{{pron|eng}}", "発音"):
mode = submode
sections.append((mode,[]))
submode = ""
elif regex.search(r"^\[\[category:(.*)\]\]$", line, regex.IGNORECASE):
lang = regex.sub(r"^\[\[category:(.*)\]\]$", r"\1", line, flags=regex.IGNORECASE)
if lang in ("{{en}}", "{{eng}}") or lang.find("英語") >= 0:
is_eng_cat = True
elif regex.search(r"^\{\{[a-z]{2,3}\}\}$", lang) or lang.find("語") >= 0:
is_eng_cat = False
elif (is_eng_head or is_eng_cat):
if sections and not submode:
section = sections[-1]
section[1].append(line)
def CheckMode(labels):
if mode and submode in labels:
return True
if mode in labels and not submode:
return True
return False
rel_words = None
if CheckMode(("{{syn}}", "synonym", "類義語")):
rel_words = synonyms
elif CheckMode(("{{hyper}}", "hypernym", "上位語")):
rel_words = hypernyms
elif CheckMode(("{{hypo}}", "hyponym", "下位語")):
rel_words = hyponyms
elif CheckMode(("{{ant}}", "antonym", "対義語")):
rel_words = antonyms
elif CheckMode(("{{derived}}", "{{drv}}", "derived terms", "derived term", "派生語")):
rel_words = derivatives
elif CheckMode(("{{rel}}", "related terms", "related term", "関連語")):
rel_words = relations
if rel_words != None:
for rel_word in regex.findall(r"\{\{l\|en\|([- \p{Latin}]+?)\}\}", line):
rel_words.append(rel_word)
for rel_word in regex.findall(r"\[\[([- \p{Latin}]+?)\]\]", line):
rel_words.append(rel_word)
pronunciation_ipa_us = ""
pronunciation_ipa_misc = ""
pronunciation_sampa_us = ""
pronunciation_sampa_misc = ""
alternatives = []
for mode, lines in sections:
mode = regex.sub(r":.*", "", mode).strip()
mode = regex.sub(r"[0-9]+$", "", mode).strip()
if regex.search(r"^\{\{(pron|発音)(\|(en|eng))?[0-9]?\}\}[0-9]?$", mode) or mode == "発音":
mode = "pronunciation"
elif mode in ("{{noun}}", "{{name}}", "noun", "名詞", "固有名詞", "人名", "地名"):
mode = "noun"
elif mode in ("{{verb}}", "verb", "動詞", "自動詞", "他動詞"):
mode = "verb"
elif mode in ("{{adj}}", "{{adjective}}", "adjective", "形容詞"):
mode = "adjective"
elif mode in ("{{adv}}", "{{adverb}}", "adverb", "副詞"):
mode = "adverb"
elif mode in ("{{pronoun}}", "pronoun", "代名詞", "人称代名詞", "指示代名詞",
"疑問代名詞", "関係代名詞"):
mode = "pronoun"
elif mode in ("{{aux}}", "{{auxverb}}", "auxiliary verb", "助動詞"):
mode = "auxverb"
elif mode in ("{{prep}}", "{{preposition}}", "preposition", "前置詞"):
mode = "preposition"
elif mode in ("{{det}}", "{{determiner}}", "determiner", "限定詞"):
mode = "determiner"
elif mode in ("{{article}}", "冠詞"):
mode = "article"
elif mode in ("{{interj}}", "{{interjection}}", "interjection", "間投詞", "感動詞"):
mode = "interjection"
elif mode in ("{{conj}}", "{{conjunction}}", "conjunction", "接続詞"):
mode = "conjunction"
elif mode in ("{{pref}}", "{{prefix}}", "prefix", "接頭辞"):
mode = "prefix"
elif mode in ("{{suf}}", "{{suffix}}", "suffix", "設備時"):
mode = "suffix"
elif mode in ("{{abbr}}", "{{abbreviation}}", "abbreviation", "略語"):
mode = "abbreviation"
elif mode in ("{{alter}}", "alternative", "alternative forms", "alternative form",
"代替", "代替語", "別表記", "異表記", "異綴", "異体"):
mode = "alternative"
else:
mode = ""
if mode == "pronunciation":
for line in lines:
if regex.search(r"\{\{ipa[0-9]?\|([^}|]+)(\|[^}|]+)*\}\}", line, regex.IGNORECASE):
value = regex.sub(r".*\{\{ipa[0-9]?\|([^}|]+)(\|[^}|]+)*\}\}.*", r"\1",
line, flags=regex.IGNORECASE)
value = self.TrimPronunciation(value, True)
if value:
if regex.search(r"(アメリカ|米)", line):
pronunciation_ipa_us = value
else:
pronunciation_ipa_misc = value
if regex.search(r"\{\{sampa\|([^}]+)\}\}", line, regex.IGNORECASE):
value = regex.sub(r".*\{\{sampa\|([^}]+)\}\}.*", r"\1", line, flags=regex.IGNORECASE)
value = self.TrimPronunciation(value, False)
if value:
if regex.search(r"(アメリカ|米)", line):
pronunciation_sampa_us = value
else:
pronunciation_sampa_misc = value
if regex.search(r"\{\{pron-en1\|([^\}]+)\}\}", line, regex.IGNORECASE):
values = regex.sub(r".*\{\{pron-en1\|([^\}]+)\}\}.*", r"\1", line).split("|")
if len(values) == 3:
output.append("pronunciation_ahd={}".format(values[0]))
output.append("pronunciation_ipa={}".format(values[1]))
output.append("pronunciation_sampa={}".format(values[2]))
elif mode:
cat_lines = []
for line in lines:
if cat_lines and line.startswith("|"):
cat_lines[:-1] += line
else:
cat_lines.append(line)
current_text = ""
last_level = 0
for line in cat_lines:
if line.startswith("--"): continue
if line.find("{{lb|en|obsolete}}") >= 0: continue
if ((regex.search("[^は]廃(語|用)", line) or line.find("{{label|en|archaic}}") >= 0) and
not regex.search("(または|又は)", line)):
continue
if mode == "alternative":
for alt in regex.findall(r"\{\{l\|en\|([- \p{Latin}]+?)\}\}", line):
alternatives.append(alt)
for alt in regex.findall(r"\[\[([- \p{Latin}]+?)\]\]", line):
alternatives.append(alt)
continue
if regex.search(r"\{\{en-noun\|?([^\}]*)\}\}", line):
if "noun" in infl_modes: continue
infl_modes.add("noun")
value = regex.sub(r".*\{\{en-noun\|?([^\}]*)\}\}.*", r"\1", line).strip()
values = value.split("|") if value else []
values = self.TrimInflections(values)
stop = False
for value in values:
if value.startswith("head="):
stop = True
if not stop:
plural = title + "s"
if len(values) == 1 and values[0] == "es":
plural = title + "es"
elif len(values) == 1 and values[0] == "~":
pass
elif len(values) == 1 and values[0] == "-":
plural = None
elif len(values) == 1:
plural = values[0]
elif (len(values) == 2 and values[0] in ("-", "~") and
values[1] != "s" and values[1] != "es" and values[1] != "?"):
plural = values[1]
elif len(values) == 2 and values[1] == "es":
stem = title if values[0] in ("-", "~") else values[0]
plural = stem + "es"
elif len(values) == 2 and values[1] == "ies":
stem = title if values[0] in ("-", "~") else values[0]
plural = stem + "ies"
elif len(values) == 1 and values[0].startswith("pl="):
plural = regex.sub(".*=", "", values[0])
elif len(values) == 2 and values[0].startswith("sg=") and values[1] == "es":
plural = title + "es"
elif (len(values) == 2 and
values[0].startswith("sg=") and values[1].startswith("pl=")):
plural = regex.sub(".*=", "", values[1])
if self.IsGoodInflection(plural):
output.append("inflection_noun_plural={}".format(plural))
if regex.search(r"\{\{en-verb\|?([^\}]*)\}\}", line):
if "verb" in infl_modes: continue
infl_modes.add("verb")
value = regex.sub(r".*\{\{en-verb\|?([^\}]*)\}\}.*", r"\1", line).strip()
values = value.split("|") if value else []
values = self.TrimInflections(values)
stop = False
if values and values[0].startswith("head="):
if values[0][5:] != title:
stop = True
values.pop(0)
for value in values:
if value.startswith("head="):
stop = True
if not stop:
singular = title + "s"
present_participle = title + "ing"
past = title + "ed"
past_participle = title + "ed"
if len(values) == 1 and values[0] == "es":
singular = title + "es"
elif len(values) == 1 and values[0] == "d":
past = title + "d"
past_participle = title + "d"
elif len(values) == 1 and values[0] == "ing":
present_participle = title + "ing"
elif len(values) == 1:
present_participle = values[0] + "ing"
past = values[0] + "ed"
past_participle = values[0] + "ed"
elif len(values) == 2 and values[1] == "es":
singular = values[0] + "es"
present_participle = values[0] + "ing"
past = values[0] + "ed"
past_participle = values[0] + "ed"
elif len(values) == 2 and values[1] == "ies":
singular = values[0] + "ies"
present_participle = values[0] + "ying"
past = values[0] + "ied"
past_participle = values[0] + "ied"
elif len(values) == 2 and values[1] == "d":
singular = values[0] + "s"
present_participle = values[0] + "ing"
past = values[0] + "d"
past_participle = values[0] + "d"
elif len(values) == 2 and values[1] == "ing":
singular = values[0] + "es"
present_participle = values[0] + "ing"
past = values[0] + "ed"
past_participle = values[0] + "ed"
elif len(values) == 2:
singular = values[0]
present_participle = values[1]
stem = regex.sub(r"e$", "", title)
past = stem + "ed"
past_participle = stem + "ed"
elif len(values) == 3 and values[2] == "es":
singular = values[0] + values[1] + "es"
present_participle = values[0] + values[1] + "ing"
past = values[0] + values[1] + "ed"
past_participle = values[0] + values[1] + "ed"
elif len(values) == 3 and values[1] == "i" and values[2] == "ed":
singular = values[0] + "ies"
present_participle = values[0] + "ying"
past = values[0] + "ied"
past_participle = values[0] + "ied"
elif len(values) == 3 and values[2] == "ed":
present_participle = values[0] + values[1] + "ing"
past = values[0] + values[1] + "ed"
past_participle = values[0] + values[1] + "ed"
elif len(values) == 3 and values[1] == "k" and values[2] == "ing":
present_participle = values[0] + "king"
elif len(values) == 3 and values[1] == "n" and values[2] == "ing":
present_participle = values[0] + "ning"
elif len(values) == 3 and values[1] == "y" and values[2] == "ing":
singular = values[0] + "ies"
present_participle = values[0] + "ying"
past = values[0] + "ied"
past_participle = values[0] + "ied"
elif len(values) == 3:
singular = values[0]
present_participle = values[1]
past = values[2]
past_participle = values[2]
elif len(values) == 4:
singular = values[0]
present_participle = values[1]
past = values[2]
past_participle = values[3]
if self.IsGoodInflection(singular):
output.append("inflection_verb_singular={}".format(singular))
if self.IsGoodInflection(present_participle):
output.append("inflection_verb_present_participle={}".format(present_participle))
if self.IsGoodInflection(past):
output.append("inflection_verb_past={}".format(past))
if self.IsGoodInflection(past_participle):
output.append("inflection_verb_past_participle={}".format(past_participle))
if regex.search(r"\{\{en-adj\|?([^\}]*)\}\}", line):
if "adjective" in infl_modes: continue
infl_modes.add("adjective")
value = regex.sub(r".*\{\{en-adj\|?([^\}]*)\}\}.*", r"\1", line).strip()
values = value.split("|") if value else []
values = self.TrimInflections(values)
stop = False
if values and values[0].startswith("head="):
if values[0][5:] != title:
stop = True
values.pop(0)
for value in values:
if value.startswith("head="):
stop = True
if not stop:
comparative = None
superlative = None
if len(values) == 1 and values[0] == "er":
stem = title
stem = regex.sub(r"e$", "", stem)
stem = regex.sub(r"([^aeiou])y$", r"\1i", stem)
comparative = stem + "er"
superlative = stem + "est"
elif len(values) == 1 and values[0].endswith("er"):
comparative = values[0]
superlative = values[0][:-2] + "est"
elif len(values) == 2 and values[1] == "er":
comparative = values[0] + "er"
superlative = values[0] + "est"
elif len(values) == 2 and values[0] == "r" and values[1] == "more":
comparative = title + "r"
superlative = ""
elif len(values) == 2 and values[0] == "er" and values[1] == "more":
comparative = title + "er"
superlative = ""
elif len(values) == 2:
comparative = values[0]
superlative = values[1]
if self.IsGoodInflection(comparative):
output.append("inflection_adjective_comparative={}".format(comparative))
if self.IsGoodInflection(superlative):
output.append("inflection_adjective_superlative={}".format(superlative))
if regex.search(r"\{\{en-adv\|?([^\}]*)\}\}", line):
if "adverb" in infl_modes: continue
infl_modes.add("adverb")
value = regex.sub(r".*\{\{en-adv\|?([^\}]*)\}\}.*", r"\1", line).strip()
values = value.split("|") if value else []
values = self.TrimInflections(values)
stop = False
if values and values[0].startswith("head="):
if values[0][5:] != title:
stop = True
values.pop(0)
for value in values:
if value.startswith("head="):
stop = True
if not stop:
comparative = None
superlative = None
if len(values) == 1 and values[0] == "er":
stem = title
stem = regex.sub(r"e$", "", stem)
stem = regex.sub(r"([^aeiou])y]$", r"\1i", stem)
comparative = stem + "er"
superlative = stem + "est"
elif len(values) == 2 and values[1] == "er":
comparative = values[0] + "er"
superlative = values[0] + "est"
elif len(values) == 1 and values[0].endswith("er"):
comparative = values[0]
superlative = values[0][:-2] + "est"
elif len(values) == 2 and values[0] == "r" and values[1] == "more":
comparative = title + "r"
superlative = ""
elif len(values) == 2 and values[0] == "er" and values[1] == "more":
comparative = title + "er"
superlative = ""
elif len(values) == 2:
comparative = values[0]
superlative = values[1]
if self.IsGoodInflection(comparative):
output.append("inflection_adverb_comparative={}".format(comparative))
if self.IsGoodInflection(superlative):
output.append("inflection_adverb_superlative={}".format(superlative))
if mode == "noun":
if regex.search(r"\{\{p\}\} *:.*\[\[([a-zA-Z ]+)\]\]", line):
value = regex.sub(r".*\{\{p\}\} *:.*\[\[([a-zA-Z ]+)\]\].*", r"\1", line)
if value:
output.append("inflection_noun_plural={}".format(value))
if mode in ("adjective", "adverb"):
if regex.search(
r"比較級 *:.*\[\[([a-zA-Z ]+)\]\].*[,、].*最上級 *: *\[\[([a-zA-Z ]+)\]\]", line):
values = regex.sub(
r".*比較級 *:.*\[\[([a-zA-Z ]+)\]\].*[,、].*最上級 *: *\[\[([a-zA-Z ]+)\]\].*",
"\\1\t\\2", line).split("\t")
if (len(values) == 2 and
self.IsGoodInflection(values[0]) and self.IsGoodInflection(values[1])):
output.append("inflection_{}_comparative={}".format(mode, values[0]))
output.append("inflection_{}_superlative={}".format(mode, values[1]))
if not regex.search(r"^[#\*:]", line):
last_level = 0
continue
prefix = regex.sub(r"^([#\*:]+).*", r"\1", line)
level = len(prefix)
text = line[level:]
if level > last_level + 1:
continue
last_level = level
if text.find("{{quote") >= 0: continue
text = self.MakePlainText(text)
eff_text = regex.sub(r"[\((].*?[\))]", "", text).strip()
if not regex.search(r"(\p{Latin}{2,})|([\p{Han}\p{Hiragana}|\p{Katakana}ー])", eff_text):
continue
if level <= 1:
if current_text:
output.append("{}={}".format(mode, current_text))
current_text = text
elif current_text:
if level == 2:
sep = "[-]"
elif level == 3:
sep = "[--]"
else:
sep = "[---]"
current_text += " " + sep + " " + text
eff_text = regex.sub(r"[\((].*?[\))]", "", current_text).strip()
if regex.search(r"([\p{Latin}0-9]{2,}|[\p{Han}\p{Hiragana}\p{Katakana}])", eff_text):
output.append("{}={}".format(mode, current_text))
pronunciation_ipa = pronunciation_ipa_us or pronunciation_ipa_misc
if pronunciation_ipa:
output.append("pronunciation_ipa={}".format(pronunciation_ipa))
pronunciation_sampa = pronunciation_sampa_us or pronunciation_sampa_misc
if pronunciation_sampa:
output.append("pronunciation_sampa={}".format(pronunciation_sampa))
num_effective_records = 0;
for record in output:
name, value = record.split("=", 1)
if name not in (
"noun", "verb", "adjective", "adverb",
"pronoun", "auxverb", "preposition", "determiner", "article",
"interjection", "conjunction",
"prefix", "suffix", "abbreviation"):
continue
if regex.search(
r"の(直接法|直説法|仮定法)?(現在|過去)?(第?[一二三]人称)?[ ・、]?" +
r"(単数|複数|現在|過去|比較|最上|進行|完了|動名詞|単純)+[ ・、]?" +
r"(形|型|分詞|級|動名詞)+", value):
continue
if regex.search(r"の(直接法|直説法|仮定法)(現在|過去)", value):
continue
if regex.search(r"の(動名詞|異綴|旧綴)", value):
continue
num_effective_records += 1
if num_effective_records:
if alternatives:
uniq_alts = set()
out_alts = []
for alt in alternatives:
if alt in uniq_alts: continue
uniq_alts.add(alt)
out_alts.append(alt)
output.append("alternative={}".format(", ".join(out_alts)))
for rel in ((synonyms, "synonym"), (hypernyms, "hypernym"), (hyponyms, "hyponym"),
(antonyms, "antonym"), (derivatives, "derivative"), (relations, "relation")):
if rel[0]:
output.append("{}={}".format(rel[1], ", ".join(rel[0])))
print("word={}\t{}".format(title, "\t".join(output)))
def IsGoodInflection(self, text):
if not text: return False
if text in ("-" or "~"): return False
if regex.search("[\?\!=,/\(\)]", text): return False
return True
def MakePlainText(self, text):
text = regex.sub(r"^[#\*]+", "", text)
text = regex.sub(r"^--+", "", text)
text = regex.sub(r"\{\{w\|(lang=[a-z]+\|)?([^\}\|]*)(\|[^\}]*)?\}\}", r"\2", text)
text = regex.sub(r"\{\{ふりがな\|([^\}\|]+)(\|[^\}]+)?\}\}", r"\1", text)
text = regex.sub(r"\{\{おくりがな\|(.*?)\|(.*?)\|(.*?)}\}", r"\1\2", text)
text = regex.sub(r"\{\{おくりがな2\|(.*?)\|(.*?)\|(.*?)\|(.*?)}\}", r"\1\3", text)
text = regex.sub(r"\{\{おくりがな3\|(.*?)\|(.*?)\|(.*?)\|(.*?)\|(.*?)\|(.*?)\|(.*?)}\}",
r"\1\3\4\6", text)
text = regex.sub(r"\{\{(en-)?(noun)\}\}", r"名詞", text)
text = regex.sub(r"\{\{(en-)?(verb)\}\}", r"動詞", text)
text = regex.sub(r"\{\{(en-)?(adj|adjective)\}\}", r"形容詞", text)
text = regex.sub(r"\{\{(en-)?(adv|adverb)\}\}", r"副詞", text)
text = regex.sub(r"\{\{(en-)?(pronoun)\}\}", r"代名詞", text)
text = regex.sub(r"\{\{(en-)?(auxverb)\}\}", r"助動詞", text)
text = regex.sub(r"\{\{(en-)?(prep|preposition)\}\}", r"前置詞", text)
text = regex.sub(r"\{\{(en-)?(det)\}\}", r"限定詞", text)
text = regex.sub(r"\{\{(en-)?(article)\}\}", r"冠詞", text)
text = regex.sub(r"\{\{(en-)?(interj|interjection)\}\}", r"間投詞", text)
text = regex.sub(r"\{\{(en-)?(conj|conjunction)\}\}", r"接続詞", text)
text = regex.sub(r"\{\{(en-)?(prefix)\}\}", r"接頭辞", text)
text = regex.sub(r"\{\{(en-)?(suffix)\}\}", r"接尾辞", text)
text = regex.sub(r"\{\{(en-)?(abbr|abbreviation)\}\}", r"略語", text)
text = regex.sub(r"\{\{(en-)?(drv|derivative)\}\}", r"派生語", text)
text = regex.sub(r"\{\{(en-)?(alter)\}\}", r"代替語", text)
text = regex.sub(r"\{\{(en-)?(syn)\}\}", r"類義語", text)
text = regex.sub(r"\{\{(en-)?(ant)\}\}", r"対義語", text)
text = regex.sub(r"\{\{(en-)?(rel)\}\}", r"関連語", text)
text = regex.sub(r"\{\{countable\}\}", r"可算", text)
text = regex.sub(r"\{\{uncountable\}\}", r"不可算", text)
text = regex.sub(r"\{\{countable(\|[^\}]+)*\}\}", r"(可算)", text)
text = regex.sub(r"\{\{uncountable(\|[^\}]+)*\}\}", r"(不可算)", text)
text = regex.sub(r"\{\{lb\|\en(\|\w+)*(\|countable\+?)(\|\w+)*\}\}", r"(可算)", text)
text = regex.sub(r"\{\{lb\|\en(\|\w+)*(\|uncountable\+?)(\|\w+)*\}\}", r"(不可算)", text)
text = regex.sub(r"\{\{intransitive\}\}", r"自動詞", text)
text = regex.sub(r"\{\{transitive\}\}", r"他動詞", text)
text = regex.sub(r"\{\{v\.i\.\}\}", r"自動詞", text)
text = regex.sub(r"\{\{v\.t\.\}\}", r"他動詞", text)
text = regex.sub(r"\{\{intransitive(\|[^\}]+)*\}\}", r"(自動詞)", text)
text = regex.sub(r"\{\{context\|transitive(\|[^\}]+)*\}\}", r"(他動詞)", text)
text = regex.sub(r"\{\{lb\|\en(\|\w+)*(\|transitive\+?)(\|\w+)*\}\}", r"(自動詞)", text)
text = regex.sub(r"\{\{lb\|\en(\|\w+)*(\|intransitive\+?)(\|\w+)*\}\}", r"(他動詞)", text)
text = regex.sub(r"\{\{タグ\|en\|自動詞\}\}", r"(自動詞)", text)
text = regex.sub(r"\{\{タグ\|en\|他動詞\}\}", r"(他動詞)", text)
text = regex.sub(r"\{\{\.\.\.\}\}", "...", text)
text = regex.sub(r"(\{\{[^{}]+)\{\{[^{}]+\}\}([^}]*\}\})", r"\1\2", text)
text = regex.sub(r"\{\{l\|[^\}\|]+\|([^\}]+)?\}\}", r"\1", text)
text = regex.sub(r"\{\{(context|lb|タグ|tag|label|infl)\|[^\}]*\}\}", "", text)
text = regex.sub(r"\{\{cat:[^\}]*\}\}", "", text)
text = regex.sub(r"\{\{abbreviation of(\|en)?\|([^|}]+)\}\}", r"\2", text)
text = regex.sub(r"\{\{(en-)?plural of(\|en)?\|([^|}]+)\}\}", r"\3の複数形", text)
text = regex.sub(r"\{\{(en-)?third-person singular of(\|en)?\|([^|}]+)\}\}",
r"\3の三人称単数現在形", text)
text = regex.sub(r"\{\{(en-)?past of(\|en)?\|([^|}]+)\}\}", r"\3の過去形", text)
text = regex.sub(r"\{\{(en-)?present participle of(\|en)?\|([^|}]+)\}\}",
r"\3の現在分詞", text)
text = regex.sub(r"\{\{(en-)?past participle of(\|en)?\|([^|}]+)\}\}", r"\3の過去分詞", text)
text = regex.sub(r"\{\{(en-)?comparative of(\|en)?\|([^|}]+)\}\}", r"\3の複数形", text)
text = regex.sub(r"\{\{(en-)?comparative of(\|en)?\|([^|}]+)\}\}", r"\3の比較級", text)
text = regex.sub(r"\{\{(en-)?superlative of(\|en)?\|([^|}]+)\}\}", r"\3の最上級", text)
text = regex.sub(r"\{\{(m|ux|l)\|[a-z]+\|([^\|\}]+)(\|[^\}\|]+)*\}\}", r"\2", text)
text = regex.sub(r"\{\{(n-g|non-gloss definition)\|([^\|\}]+)(\|[^\}\|]+)*\}\}", r"\2", text)
text = regex.sub(r"\{\{&lit\|en\|(.*?)\|(.*?)\|(.*?)(\|.*?)*?\}\}", r"cf. \1, \2, \3 ", text)
text = regex.sub(r"\{\{&lit\|en\|(.*?)\|(.*?)(\|.*?)*?\}\}", r"cf. \1, \2 ", text)
text = regex.sub(r"\{\{&lit\|en\|(.*?)(\|.*?)*?\}\}", r"cf. \1", text)
text = regex.sub(r"\{\{(vern|taxlink)\|(.*?)(\|.*?)*\}\}", r"\2", text)
text = regex.sub(r"\{\{syn of\|en\|(.*?)(\|.*?)*\}\}", r"Synonym of \1", text)
text = regex.sub(r"\{\{syn\|en\|(.*?)\|(.*?)\|(.*?)(\|.*?)*?\}\}",
r"Synonyms: \1, \2, \3 ", text)
text = regex.sub(r"\{\{syn\|en\|(.*?)\|(.*?)(\|.*?)*?\}\}", r"Synonyms: \1, \2 ", text)
text = regex.sub(r"\{\{syn\|en\|(.*?)(\|.*?)*?\}\}", r"Synonym: \1 ", text)
text = regex.sub(r"\{\{rfdate[a-z]+\|[a-z]+\|([^\|\}]+)(\|[^\}\|]+)*\}\}", r"\1", text)
text = regex.sub(r"\{\{(RQ|Q):([^\|\}]+)(\|[^\|\}]+)*\|passage=([^\|\}]+)(\|[^\|\}]+)*\}\}",
r"\2 -- \4", text)
text = regex.sub(r"\{\{(RQ|R):([^\|\}]+)(\|[^\}\|]+)*\}\}", r"\2", text)
text = regex.sub(r"\{\{([^\}\|]+\|)([^\}\|]+)(\|[^\}]+)?\}\}", r"\2", text)
text = regex.sub(r"\{\{([^}]*)\}\}", r"", text)
text = regex.sub(r"\{\}", r"", text)
text = regex.sub(r"\}\}", r"", text)
text = regex.sub(r"\[\[w:[a-z]+:([^\]\|]+)(\|[^\]\|]+)?\]\]", r"\1", text)
text = regex.sub(r"\[\[(category|カテゴリ):[^\]]*\]\]", "", text, regex.IGNORECASE)
text = regex.sub(r"\[\[([^\]\|]+\|)?([^\]]*)\]\]", r"\2", text)
text = regex.sub(r"\[(https?://[^ ]+ +)([^\]]+)\]", r"\2", text)
text = regex.sub(r"\[https?://.*?\]", r"", text)
text = regex.sub(r"\[\[", r"", text)
text = regex.sub(r"\]\]", r"", text)
text = regex.sub(r"'''", "", text)
text = regex.sub(r"''", "", text)
text = regex.sub(r"\( *\)", "", text)
text = regex.sub(r"( *)", "", text)
text = regex.sub(r"「 *」", "", text)
text = regex.sub(r"<ref>.*?</ref>", "", text)
text = regex.sub(r"</?[a-z]+[^>]*>", "", text)
text = regex.sub(r"<!-- *", "(", text)
text = regex.sub(r" *-->", ")", text)
text = regex.sub(r"^ *[,:;] *", "", text)
return regex.sub(r"\s+", " ", text).strip()
def TrimPronunciation(self, value, is_ipa):
value = regex.sub(r"</?[a-z]+[^>]*>", "", value)
value = regex.sub(r"^/(.*)/$", r"\1", value)
value = regex.sub(r"lang=[a-z]*\|", "", value)
value = regex.sub(r"[,\|].*", "", value)
if is_ipa:
value = regex.sub(r"^/(.*)/$", r"\1", value)
value = regex.sub(r"/ ?\(.*", "", value)
value = regex.sub(r"/", "", value)
if value in ("...", "?"):
return ""
return value
def TrimInflections(self, values):
trimmed_values = []
for value in values:
value = regex.sub(r"\[\[([^\]]+)\]\]", r"\1", value)
value = value.replace(r"'''", "")
value = value.replace(r"''", "")
value = regex.sub(r"(又|また).*", "", value)
value = regex.sub(r",.*", "", value)
if regex.search("^[a-z_]+[234](_[a-z_]+)=", value):
continue
trimmed_values.append(value.strip())
return trimmed_values
def main():
args = sys.argv[1:]
sampling_ratio = float(tkrzw_dict.GetCommandFlag(args, "--sampling", 1) or 1.0)
max_outputs = int(tkrzw_dict.GetCommandFlag(args, "--max", 1) or sys.maxsize)
if tkrzw_dict.GetCommandFlag(args, "--quiet", 0):
logger.setLevel(logging.ERROR)
if args:
raise RuntimeError("unknown arguments: {}".format(str(args)))
if sampling_ratio <= 0 or sampling_ratio > 1:
raise ValueError("invalid sampling ratio")
if max_outputs < 0:
raise ValueError("invalid max outputs")
logger.info("Process started")
parser = xml.sax.make_parser()
handler = XMLHandler(sampling_ratio, max_outputs)
parser.setContentHandler(handler)
try:
parser.parse(sys.stdin)
except xml.sax.SAXException:
pass
logger.info("Process done")
if __name__=="__main__":
main()
|
[
"html.unescape",
"tkrzw_dict.GetCommandFlag",
"regex.findall",
"regex.search",
"tkrzw_dict.GetLogger",
"random.random",
"regex.sub",
"random.seed"
] |
[((1290, 1311), 'random.seed', 'random.seed', (['(19780211)'], {}), '(19780211)\n', (1301, 1311), False, 'import random\n'), ((1321, 1343), 'tkrzw_dict.GetLogger', 'tkrzw_dict.GetLogger', ([], {}), '()\n', (1341, 1343), False, 'import tkrzw_dict\n'), ((33947, 33992), 'tkrzw_dict.GetCommandFlag', 'tkrzw_dict.GetCommandFlag', (['args', '"""--quiet"""', '(0)'], {}), "(args, '--quiet', 0)\n", (33972, 33992), False, 'import tkrzw_dict\n'), ((3868, 3892), 'html.unescape', 'html.unescape', (['self.text'], {}), '(self.text)\n', (3881, 3892), False, 'import html\n'), ((3908, 3945), 'regex.sub', 'regex.sub', (['"""<!--.*?-->"""', '""""""', 'fulltext'], {}), "('<!--.*?-->', '', fulltext)\n", (3917, 3945), False, 'import regex\n'), ((3962, 4010), 'regex.sub', 'regex.sub', (['"""(\\\\n==+[^=]+==+)"""', '"""\\\\1\n"""', 'fulltext'], {}), "('(\\\\n==+[^=]+==+)', '\\\\1\\n', fulltext)\n", (3971, 4010), False, 'import regex\n'), ((26342, 26381), 'regex.search', 'regex.search', (['"""[\\\\?\\\\!=,/\\\\(\\\\)]"""', 'text'], {}), "('[\\\\?\\\\!=,/\\\\(\\\\)]', text)\n", (26354, 26381), False, 'import regex\n'), ((26453, 26484), 'regex.sub', 'regex.sub', (['"""^[#\\\\*]+"""', '""""""', 'text'], {}), "('^[#\\\\*]+', '', text)\n", (26462, 26484), False, 'import regex\n'), ((26496, 26523), 'regex.sub', 'regex.sub', (['"""^--+"""', '""""""', 'text'], {}), "('^--+', '', text)\n", (26505, 26523), False, 'import regex\n'), ((26536, 26624), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{w\\\\|(lang=[a-z]+\\\\|)?([^\\\\}\\\\|]*)(\\\\|[^\\\\}]*)?\\\\}\\\\}"""', '"""\\\\2"""', 'text'], {}), "('\\\\{\\\\{w\\\\|(lang=[a-z]+\\\\|)?([^\\\\}\\\\|]*)(\\\\|[^\\\\}]*)?\\\\}\\\\}',\n '\\\\2', text)\n", (26545, 26624), False, 'import regex\n'), ((26623, 26693), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{ふりがな\\\\|([^\\\\}\\\\|]+)(\\\\|[^\\\\}]+)?\\\\}\\\\}"""', '"""\\\\1"""', 'text'], {}), "('\\\\{\\\\{ふりがな\\\\|([^\\\\}\\\\|]+)(\\\\|[^\\\\}]+)?\\\\}\\\\}', '\\\\1', text)\n", (26632, 26693), False, 'import regex\n'), ((26697, 26765), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{おくりがな\\\\|(.*?)\\\\|(.*?)\\\\|(.*?)}\\\\}"""', '"""\\\\1\\\\2"""', 'text'], {}), "('\\\\{\\\\{おくりがな\\\\|(.*?)\\\\|(.*?)\\\\|(.*?)}\\\\}', '\\\\1\\\\2', text)\n", (26706, 26765), False, 'import regex\n'), ((26771, 26848), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{おくりがな2\\\\|(.*?)\\\\|(.*?)\\\\|(.*?)\\\\|(.*?)}\\\\}"""', '"""\\\\1\\\\3"""', 'text'], {}), "('\\\\{\\\\{おくりがな2\\\\|(.*?)\\\\|(.*?)\\\\|(.*?)\\\\|(.*?)}\\\\}', '\\\\1\\\\3', text)\n", (26780, 26848), False, 'import regex\n'), ((26853, 26969), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{おくりがな3\\\\|(.*?)\\\\|(.*?)\\\\|(.*?)\\\\|(.*?)\\\\|(.*?)\\\\|(.*?)\\\\|(.*?)}\\\\}"""', '"""\\\\1\\\\3\\\\4\\\\6"""', 'text'], {}), "(\n '\\\\{\\\\{おくりがな3\\\\|(.*?)\\\\|(.*?)\\\\|(.*?)\\\\|(.*?)\\\\|(.*?)\\\\|(.*?)\\\\|(.*?)}\\\\}',\n '\\\\1\\\\3\\\\4\\\\6', text)\n", (26862, 26969), False, 'import regex\n'), ((26981, 27030), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?(noun)\\\\}\\\\}"""', '"""名詞"""', 'text'], {}), "('\\\\{\\\\{(en-)?(noun)\\\\}\\\\}', '名詞', text)\n", (26990, 27030), False, 'import regex\n'), ((27040, 27089), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?(verb)\\\\}\\\\}"""', '"""動詞"""', 'text'], {}), "('\\\\{\\\\{(en-)?(verb)\\\\}\\\\}', '動詞', text)\n", (27049, 27089), False, 'import regex\n'), ((27099, 27158), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?(adj|adjective)\\\\}\\\\}"""', '"""形容詞"""', 'text'], {}), "('\\\\{\\\\{(en-)?(adj|adjective)\\\\}\\\\}', '形容詞', text)\n", (27108, 27158), False, 'import regex\n'), ((27168, 27223), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?(adv|adverb)\\\\}\\\\}"""', '"""副詞"""', 'text'], {}), "('\\\\{\\\\{(en-)?(adv|adverb)\\\\}\\\\}', '副詞', text)\n", (27177, 27223), False, 'import regex\n'), ((27233, 27286), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?(pronoun)\\\\}\\\\}"""', '"""代名詞"""', 'text'], {}), "('\\\\{\\\\{(en-)?(pronoun)\\\\}\\\\}', '代名詞', text)\n", (27242, 27286), False, 'import regex\n'), ((27296, 27349), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?(auxverb)\\\\}\\\\}"""', '"""助動詞"""', 'text'], {}), "('\\\\{\\\\{(en-)?(auxverb)\\\\}\\\\}', '助動詞', text)\n", (27305, 27349), False, 'import regex\n'), ((27359, 27421), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?(prep|preposition)\\\\}\\\\}"""', '"""前置詞"""', 'text'], {}), "('\\\\{\\\\{(en-)?(prep|preposition)\\\\}\\\\}', '前置詞', text)\n", (27368, 27421), False, 'import regex\n'), ((27431, 27480), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?(det)\\\\}\\\\}"""', '"""限定詞"""', 'text'], {}), "('\\\\{\\\\{(en-)?(det)\\\\}\\\\}', '限定詞', text)\n", (27440, 27480), False, 'import regex\n'), ((27490, 27542), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?(article)\\\\}\\\\}"""', '"""冠詞"""', 'text'], {}), "('\\\\{\\\\{(en-)?(article)\\\\}\\\\}', '冠詞', text)\n", (27499, 27542), False, 'import regex\n'), ((27552, 27617), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?(interj|interjection)\\\\}\\\\}"""', '"""間投詞"""', 'text'], {}), "('\\\\{\\\\{(en-)?(interj|interjection)\\\\}\\\\}', '間投詞', text)\n", (27561, 27617), False, 'import regex\n'), ((27627, 27689), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?(conj|conjunction)\\\\}\\\\}"""', '"""接続詞"""', 'text'], {}), "('\\\\{\\\\{(en-)?(conj|conjunction)\\\\}\\\\}', '接続詞', text)\n", (27636, 27689), False, 'import regex\n'), ((27699, 27751), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?(prefix)\\\\}\\\\}"""', '"""接頭辞"""', 'text'], {}), "('\\\\{\\\\{(en-)?(prefix)\\\\}\\\\}', '接頭辞', text)\n", (27708, 27751), False, 'import regex\n'), ((27761, 27813), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?(suffix)\\\\}\\\\}"""', '"""接尾辞"""', 'text'], {}), "('\\\\{\\\\{(en-)?(suffix)\\\\}\\\\}', '接尾辞', text)\n", (27770, 27813), False, 'import regex\n'), ((27823, 27885), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?(abbr|abbreviation)\\\\}\\\\}"""', '"""略語"""', 'text'], {}), "('\\\\{\\\\{(en-)?(abbr|abbreviation)\\\\}\\\\}', '略語', text)\n", (27832, 27885), False, 'import regex\n'), ((27895, 27955), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?(drv|derivative)\\\\}\\\\}"""', '"""派生語"""', 'text'], {}), "('\\\\{\\\\{(en-)?(drv|derivative)\\\\}\\\\}', '派生語', text)\n", (27904, 27955), False, 'import regex\n'), ((27965, 28016), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?(alter)\\\\}\\\\}"""', '"""代替語"""', 'text'], {}), "('\\\\{\\\\{(en-)?(alter)\\\\}\\\\}', '代替語', text)\n", (27974, 28016), False, 'import regex\n'), ((28026, 28075), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?(syn)\\\\}\\\\}"""', '"""類義語"""', 'text'], {}), "('\\\\{\\\\{(en-)?(syn)\\\\}\\\\}', '類義語', text)\n", (28035, 28075), False, 'import regex\n'), ((28085, 28134), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?(ant)\\\\}\\\\}"""', '"""対義語"""', 'text'], {}), "('\\\\{\\\\{(en-)?(ant)\\\\}\\\\}', '対義語', text)\n", (28094, 28134), False, 'import regex\n'), ((28144, 28193), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?(rel)\\\\}\\\\}"""', '"""関連語"""', 'text'], {}), "('\\\\{\\\\{(en-)?(rel)\\\\}\\\\}', '関連語', text)\n", (28153, 28193), False, 'import regex\n'), ((28203, 28249), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{countable\\\\}\\\\}"""', '"""可算"""', 'text'], {}), "('\\\\{\\\\{countable\\\\}\\\\}', '可算', text)\n", (28212, 28249), False, 'import regex\n'), ((28259, 28308), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{uncountable\\\\}\\\\}"""', '"""不可算"""', 'text'], {}), "('\\\\{\\\\{uncountable\\\\}\\\\}', '不可算', text)\n", (28268, 28308), False, 'import regex\n'), ((28318, 28379), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{countable(\\\\|[^\\\\}]+)*\\\\}\\\\}"""', '"""(可算)"""', 'text'], {}), "('\\\\{\\\\{countable(\\\\|[^\\\\}]+)*\\\\}\\\\}', '(可算)', text)\n", (28327, 28379), False, 'import regex\n'), ((28387, 28451), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{uncountable(\\\\|[^\\\\}]+)*\\\\}\\\\}"""', '"""(不可算)"""', 'text'], {}), "('\\\\{\\\\{uncountable(\\\\|[^\\\\}]+)*\\\\}\\\\}', '(不可算)', text)\n", (28396, 28451), False, 'import regex\n'), ((28459, 28549), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{lb\\\\|\\\\en(\\\\|\\\\w+)*(\\\\|countable\\\\+?)(\\\\|\\\\w+)*\\\\}\\\\}"""', '"""(可算)"""', 'text'], {}), "('\\\\{\\\\{lb\\\\|\\\\en(\\\\|\\\\w+)*(\\\\|countable\\\\+?)(\\\\|\\\\w+)*\\\\}\\\\}',\n '(可算)', text)\n", (28468, 28549), False, 'import regex\n'), ((28547, 28640), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{lb\\\\|\\\\en(\\\\|\\\\w+)*(\\\\|uncountable\\\\+?)(\\\\|\\\\w+)*\\\\}\\\\}"""', '"""(不可算)"""', 'text'], {}), "('\\\\{\\\\{lb\\\\|\\\\en(\\\\|\\\\w+)*(\\\\|uncountable\\\\+?)(\\\\|\\\\w+)*\\\\}\\\\}',\n '(不可算)', text)\n", (28556, 28640), False, 'import regex\n'), ((28638, 28688), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{intransitive\\\\}\\\\}"""', '"""自動詞"""', 'text'], {}), "('\\\\{\\\\{intransitive\\\\}\\\\}', '自動詞', text)\n", (28647, 28688), False, 'import regex\n'), ((28698, 28746), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{transitive\\\\}\\\\}"""', '"""他動詞"""', 'text'], {}), "('\\\\{\\\\{transitive\\\\}\\\\}', '他動詞', text)\n", (28707, 28746), False, 'import regex\n'), ((28756, 28802), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{v\\\\.i\\\\.\\\\}\\\\}"""', '"""自動詞"""', 'text'], {}), "('\\\\{\\\\{v\\\\.i\\\\.\\\\}\\\\}', '自動詞', text)\n", (28765, 28802), False, 'import regex\n'), ((28810, 28856), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{v\\\\.t\\\\.\\\\}\\\\}"""', '"""他動詞"""', 'text'], {}), "('\\\\{\\\\{v\\\\.t\\\\.\\\\}\\\\}', '他動詞', text)\n", (28819, 28856), False, 'import regex\n'), ((28864, 28929), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{intransitive(\\\\|[^\\\\}]+)*\\\\}\\\\}"""', '"""(自動詞)"""', 'text'], {}), "('\\\\{\\\\{intransitive(\\\\|[^\\\\}]+)*\\\\}\\\\}', '(自動詞)', text)\n", (28873, 28929), False, 'import regex\n'), ((28937, 29010), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{context\\\\|transitive(\\\\|[^\\\\}]+)*\\\\}\\\\}"""', '"""(他動詞)"""', 'text'], {}), "('\\\\{\\\\{context\\\\|transitive(\\\\|[^\\\\}]+)*\\\\}\\\\}', '(他動詞)', text)\n", (28946, 29010), False, 'import regex\n'), ((29017, 29109), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{lb\\\\|\\\\en(\\\\|\\\\w+)*(\\\\|transitive\\\\+?)(\\\\|\\\\w+)*\\\\}\\\\}"""', '"""(自動詞)"""', 'text'], {}), "('\\\\{\\\\{lb\\\\|\\\\en(\\\\|\\\\w+)*(\\\\|transitive\\\\+?)(\\\\|\\\\w+)*\\\\}\\\\}',\n '(自動詞)', text)\n", (29026, 29109), False, 'import regex\n'), ((29107, 29201), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{lb\\\\|\\\\en(\\\\|\\\\w+)*(\\\\|intransitive\\\\+?)(\\\\|\\\\w+)*\\\\}\\\\}"""', '"""(他動詞)"""', 'text'], {}), "('\\\\{\\\\{lb\\\\|\\\\en(\\\\|\\\\w+)*(\\\\|intransitive\\\\+?)(\\\\|\\\\w+)*\\\\}\\\\}',\n '(他動詞)', text)\n", (29116, 29201), False, 'import regex\n'), ((29199, 29252), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{タグ\\\\|en\\\\|自動詞\\\\}\\\\}"""', '"""(自動詞)"""', 'text'], {}), "('\\\\{\\\\{タグ\\\\|en\\\\|自動詞\\\\}\\\\}', '(自動詞)', text)\n", (29208, 29252), False, 'import regex\n'), ((29260, 29313), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{タグ\\\\|en\\\\|他動詞\\\\}\\\\}"""', '"""(他動詞)"""', 'text'], {}), "('\\\\{\\\\{タグ\\\\|en\\\\|他動詞\\\\}\\\\}', '(他動詞)', text)\n", (29269, 29313), False, 'import regex\n'), ((29321, 29368), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{\\\\.\\\\.\\\\.\\\\}\\\\}"""', '"""..."""', 'text'], {}), "('\\\\{\\\\{\\\\.\\\\.\\\\.\\\\}\\\\}', '...', text)\n", (29330, 29368), False, 'import regex\n'), ((29374, 29448), 'regex.sub', 'regex.sub', (['"""(\\\\{\\\\{[^{}]+)\\\\{\\\\{[^{}]+\\\\}\\\\}([^}]*\\\\}\\\\})"""', '"""\\\\1\\\\2"""', 'text'], {}), "('(\\\\{\\\\{[^{}]+)\\\\{\\\\{[^{}]+\\\\}\\\\}([^}]*\\\\}\\\\})', '\\\\1\\\\2', text)\n", (29383, 29448), False, 'import regex\n'), ((29452, 29517), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{l\\\\|[^\\\\}\\\\|]+\\\\|([^\\\\}]+)?\\\\}\\\\}"""', '"""\\\\1"""', 'text'], {}), "('\\\\{\\\\{l\\\\|[^\\\\}\\\\|]+\\\\|([^\\\\}]+)?\\\\}\\\\}', '\\\\1', text)\n", (29461, 29517), False, 'import regex\n'), ((29521, 29596), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(context|lb|タグ|tag|label|infl)\\\\|[^\\\\}]*\\\\}\\\\}"""', '""""""', 'text'], {}), "('\\\\{\\\\{(context|lb|タグ|tag|label|infl)\\\\|[^\\\\}]*\\\\}\\\\}', '', text)\n", (29530, 29596), False, 'import regex\n'), ((29603, 29649), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{cat:[^\\\\}]*\\\\}\\\\}"""', '""""""', 'text'], {}), "('\\\\{\\\\{cat:[^\\\\}]*\\\\}\\\\}', '', text)\n", (29612, 29649), False, 'import regex\n'), ((29657, 29729), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{abbreviation of(\\\\|en)?\\\\|([^|}]+)\\\\}\\\\}"""', '"""\\\\2"""', 'text'], {}), "('\\\\{\\\\{abbreviation of(\\\\|en)?\\\\|([^|}]+)\\\\}\\\\}', '\\\\2', text)\n", (29666, 29729), False, 'import regex\n'), ((29736, 29812), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?plural of(\\\\|en)?\\\\|([^|}]+)\\\\}\\\\}"""', '"""\\\\3の複数形"""', 'text'], {}), "('\\\\{\\\\{(en-)?plural of(\\\\|en)?\\\\|([^|}]+)\\\\}\\\\}', '\\\\3の複数形', text)\n", (29745, 29812), False, 'import regex\n'), ((29819, 29919), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?third-person singular of(\\\\|en)?\\\\|([^|}]+)\\\\}\\\\}"""', '"""\\\\3の三人称単数現在形"""', 'text'], {}), "('\\\\{\\\\{(en-)?third-person singular of(\\\\|en)?\\\\|([^|}]+)\\\\}\\\\}',\n '\\\\3の三人称単数現在形', text)\n", (29828, 29919), False, 'import regex\n'), ((29943, 30017), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?past of(\\\\|en)?\\\\|([^|}]+)\\\\}\\\\}"""', '"""\\\\3の過去形"""', 'text'], {}), "('\\\\{\\\\{(en-)?past of(\\\\|en)?\\\\|([^|}]+)\\\\}\\\\}', '\\\\3の過去形', text)\n", (29952, 30017), False, 'import regex\n'), ((30024, 30117), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?present participle of(\\\\|en)?\\\\|([^|}]+)\\\\}\\\\}"""', '"""\\\\3の現在分詞"""', 'text'], {}), "('\\\\{\\\\{(en-)?present participle of(\\\\|en)?\\\\|([^|}]+)\\\\}\\\\}',\n '\\\\3の現在分詞', text)\n", (30033, 30117), False, 'import regex\n'), ((30141, 30231), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?past participle of(\\\\|en)?\\\\|([^|}]+)\\\\}\\\\}"""', '"""\\\\3の過去分詞"""', 'text'], {}), "('\\\\{\\\\{(en-)?past participle of(\\\\|en)?\\\\|([^|}]+)\\\\}\\\\}',\n '\\\\3の過去分詞', text)\n", (30150, 30231), False, 'import regex\n'), ((30234, 30319), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?comparative of(\\\\|en)?\\\\|([^|}]+)\\\\}\\\\}"""', '"""\\\\3の複数形"""', 'text'], {}), "('\\\\{\\\\{(en-)?comparative of(\\\\|en)?\\\\|([^|}]+)\\\\}\\\\}', '\\\\3の複数形',\n text)\n", (30243, 30319), False, 'import regex\n'), ((30322, 30407), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?comparative of(\\\\|en)?\\\\|([^|}]+)\\\\}\\\\}"""', '"""\\\\3の比較級"""', 'text'], {}), "('\\\\{\\\\{(en-)?comparative of(\\\\|en)?\\\\|([^|}]+)\\\\}\\\\}', '\\\\3の比較級',\n text)\n", (30331, 30407), False, 'import regex\n'), ((30410, 30495), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(en-)?superlative of(\\\\|en)?\\\\|([^|}]+)\\\\}\\\\}"""', '"""\\\\3の最上級"""', 'text'], {}), "('\\\\{\\\\{(en-)?superlative of(\\\\|en)?\\\\|([^|}]+)\\\\}\\\\}', '\\\\3の最上級',\n text)\n", (30419, 30495), False, 'import regex\n'), ((30498, 30588), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(m|ux|l)\\\\|[a-z]+\\\\|([^\\\\|\\\\}]+)(\\\\|[^\\\\}\\\\|]+)*\\\\}\\\\}"""', '"""\\\\2"""', 'text'], {}), "('\\\\{\\\\{(m|ux|l)\\\\|[a-z]+\\\\|([^\\\\|\\\\}]+)(\\\\|[^\\\\}\\\\|]+)*\\\\}\\\\}',\n '\\\\2', text)\n", (30507, 30588), False, 'import regex\n'), ((30586, 30690), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(n-g|non-gloss definition)\\\\|([^\\\\|\\\\}]+)(\\\\|[^\\\\}\\\\|]+)*\\\\}\\\\}"""', '"""\\\\2"""', 'text'], {}), "(\n '\\\\{\\\\{(n-g|non-gloss definition)\\\\|([^\\\\|\\\\}]+)(\\\\|[^\\\\}\\\\|]+)*\\\\}\\\\}',\n '\\\\2', text)\n", (30595, 30690), False, 'import regex\n'), ((30684, 30784), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{&lit\\\\|en\\\\|(.*?)\\\\|(.*?)\\\\|(.*?)(\\\\|.*?)*?\\\\}\\\\}"""', '"""cf. \\\\1, \\\\2, \\\\3 """', 'text'], {}), "('\\\\{\\\\{&lit\\\\|en\\\\|(.*?)\\\\|(.*?)\\\\|(.*?)(\\\\|.*?)*?\\\\}\\\\}',\n 'cf. \\\\1, \\\\2, \\\\3 ', text)\n", (30693, 30784), False, 'import regex\n'), ((30782, 30869), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{&lit\\\\|en\\\\|(.*?)\\\\|(.*?)(\\\\|.*?)*?\\\\}\\\\}"""', '"""cf. \\\\1, \\\\2 """', 'text'], {}), "('\\\\{\\\\{&lit\\\\|en\\\\|(.*?)\\\\|(.*?)(\\\\|.*?)*?\\\\}\\\\}',\n 'cf. \\\\1, \\\\2 ', text)\n", (30791, 30869), False, 'import regex\n'), ((30869, 30938), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{&lit\\\\|en\\\\|(.*?)(\\\\|.*?)*?\\\\}\\\\}"""', '"""cf. \\\\1"""', 'text'], {}), "('\\\\{\\\\{&lit\\\\|en\\\\|(.*?)(\\\\|.*?)*?\\\\}\\\\}', 'cf. \\\\1', text)\n", (30878, 30938), False, 'import regex\n'), ((30944, 31013), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(vern|taxlink)\\\\|(.*?)(\\\\|.*?)*\\\\}\\\\}"""', '"""\\\\2"""', 'text'], {}), "('\\\\{\\\\{(vern|taxlink)\\\\|(.*?)(\\\\|.*?)*\\\\}\\\\}', '\\\\2', text)\n", (30953, 31013), False, 'import regex\n'), ((31020, 31097), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{syn of\\\\|en\\\\|(.*?)(\\\\|.*?)*\\\\}\\\\}"""', '"""Synonym of \\\\1"""', 'text'], {}), "('\\\\{\\\\{syn of\\\\|en\\\\|(.*?)(\\\\|.*?)*\\\\}\\\\}', 'Synonym of \\\\1', text)\n", (31029, 31097), False, 'import regex\n'), ((31103, 31208), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{syn\\\\|en\\\\|(.*?)\\\\|(.*?)\\\\|(.*?)(\\\\|.*?)*?\\\\}\\\\}"""', '"""Synonyms: \\\\1, \\\\2, \\\\3 """', 'text'], {}), "('\\\\{\\\\{syn\\\\|en\\\\|(.*?)\\\\|(.*?)\\\\|(.*?)(\\\\|.*?)*?\\\\}\\\\}',\n 'Synonyms: \\\\1, \\\\2, \\\\3 ', text)\n", (31112, 31208), False, 'import regex\n'), ((31227, 31319), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{syn\\\\|en\\\\|(.*?)\\\\|(.*?)(\\\\|.*?)*?\\\\}\\\\}"""', '"""Synonyms: \\\\1, \\\\2 """', 'text'], {}), "('\\\\{\\\\{syn\\\\|en\\\\|(.*?)\\\\|(.*?)(\\\\|.*?)*?\\\\}\\\\}',\n 'Synonyms: \\\\1, \\\\2 ', text)\n", (31236, 31319), False, 'import regex\n'), ((31319, 31393), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{syn\\\\|en\\\\|(.*?)(\\\\|.*?)*?\\\\}\\\\}"""', '"""Synonym: \\\\1 """', 'text'], {}), "('\\\\{\\\\{syn\\\\|en\\\\|(.*?)(\\\\|.*?)*?\\\\}\\\\}', 'Synonym: \\\\1 ', text)\n", (31328, 31393), False, 'import regex\n'), ((31399, 31493), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{rfdate[a-z]+\\\\|[a-z]+\\\\|([^\\\\|\\\\}]+)(\\\\|[^\\\\}\\\\|]+)*\\\\}\\\\}"""', '"""\\\\1"""', 'text'], {}), "('\\\\{\\\\{rfdate[a-z]+\\\\|[a-z]+\\\\|([^\\\\|\\\\}]+)(\\\\|[^\\\\}\\\\|]+)*\\\\}\\\\}',\n '\\\\1', text)\n", (31408, 31493), False, 'import regex\n'), ((31491, 31620), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(RQ|Q):([^\\\\|\\\\}]+)(\\\\|[^\\\\|\\\\}]+)*\\\\|passage=([^\\\\|\\\\}]+)(\\\\|[^\\\\|\\\\}]+)*\\\\}\\\\}"""', '"""\\\\2 -- \\\\4"""', 'text'], {}), "(\n '\\\\{\\\\{(RQ|Q):([^\\\\|\\\\}]+)(\\\\|[^\\\\|\\\\}]+)*\\\\|passage=([^\\\\|\\\\}]+)(\\\\|[^\\\\|\\\\}]+)*\\\\}\\\\}'\n , '\\\\2 -- \\\\4', text)\n", (31500, 31620), False, 'import regex\n'), ((31628, 31701), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{(RQ|R):([^\\\\|\\\\}]+)(\\\\|[^\\\\}\\\\|]+)*\\\\}\\\\}"""', '"""\\\\2"""', 'text'], {}), "('\\\\{\\\\{(RQ|R):([^\\\\|\\\\}]+)(\\\\|[^\\\\}\\\\|]+)*\\\\}\\\\}', '\\\\2', text)\n", (31637, 31701), False, 'import regex\n'), ((31705, 31783), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{([^\\\\}\\\\|]+\\\\|)([^\\\\}\\\\|]+)(\\\\|[^\\\\}]+)?\\\\}\\\\}"""', '"""\\\\2"""', 'text'], {}), "('\\\\{\\\\{([^\\\\}\\\\|]+\\\\|)([^\\\\}\\\\|]+)(\\\\|[^\\\\}]+)?\\\\}\\\\}', '\\\\2', text)\n", (31714, 31783), False, 'import regex\n'), ((31785, 31827), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\{([^}]*)\\\\}\\\\}"""', '""""""', 'text'], {}), "('\\\\{\\\\{([^}]*)\\\\}\\\\}', '', text)\n", (31794, 31827), False, 'import regex\n'), ((31837, 31866), 'regex.sub', 'regex.sub', (['"""\\\\{\\\\}"""', '""""""', 'text'], {}), "('\\\\{\\\\}', '', text)\n", (31846, 31866), False, 'import regex\n'), ((31878, 31907), 'regex.sub', 'regex.sub', (['"""\\\\}\\\\}"""', '""""""', 'text'], {}), "('\\\\}\\\\}', '', text)\n", (31887, 31907), False, 'import regex\n'), ((31919, 31994), 'regex.sub', 'regex.sub', (['"""\\\\[\\\\[w:[a-z]+:([^\\\\]\\\\|]+)(\\\\|[^\\\\]\\\\|]+)?\\\\]\\\\]"""', '"""\\\\1"""', 'text'], {}), "('\\\\[\\\\[w:[a-z]+:([^\\\\]\\\\|]+)(\\\\|[^\\\\]\\\\|]+)?\\\\]\\\\]', '\\\\1', text)\n", (31928, 31994), False, 'import regex\n'), ((31998, 32074), 'regex.sub', 'regex.sub', (['"""\\\\[\\\\[(category|カテゴリ):[^\\\\]]*\\\\]\\\\]"""', '""""""', 'text', 'regex.IGNORECASE'], {}), "('\\\\[\\\\[(category|カテゴリ):[^\\\\]]*\\\\]\\\\]', '', text, regex.IGNORECASE)\n", (32007, 32074), False, 'import regex\n'), ((32082, 32145), 'regex.sub', 'regex.sub', (['"""\\\\[\\\\[([^\\\\]\\\\|]+\\\\|)?([^\\\\]]*)\\\\]\\\\]"""', '"""\\\\2"""', 'text'], {}), "('\\\\[\\\\[([^\\\\]\\\\|]+\\\\|)?([^\\\\]]*)\\\\]\\\\]', '\\\\2', text)\n", (32091, 32145), False, 'import regex\n'), ((32150, 32209), 'regex.sub', 'regex.sub', (['"""\\\\[(https?://[^ ]+ +)([^\\\\]]+)\\\\]"""', '"""\\\\2"""', 'text'], {}), "('\\\\[(https?://[^ ]+ +)([^\\\\]]+)\\\\]', '\\\\2', text)\n", (32159, 32209), False, 'import regex\n'), ((32219, 32260), 'regex.sub', 'regex.sub', (['"""\\\\[https?://.*?\\\\]"""', '""""""', 'text'], {}), "('\\\\[https?://.*?\\\\]', '', text)\n", (32228, 32260), False, 'import regex\n'), ((32272, 32301), 'regex.sub', 'regex.sub', (['"""\\\\[\\\\["""', '""""""', 'text'], {}), "('\\\\[\\\\[', '', text)\n", (32281, 32301), False, 'import regex\n'), ((32313, 32342), 'regex.sub', 'regex.sub', (['"""\\\\]\\\\]"""', '""""""', 'text'], {}), "('\\\\]\\\\]', '', text)\n", (32322, 32342), False, 'import regex\n'), ((32354, 32380), 'regex.sub', 'regex.sub', (['"""\'\'\'"""', '""""""', 'text'], {}), '("\'\'\'", \'\', text)\n', (32363, 32380), False, 'import regex\n'), ((32393, 32418), 'regex.sub', 'regex.sub', (['"""\'\'"""', '""""""', 'text'], {}), '("\'\'", \'\', text)\n', (32402, 32418), False, 'import regex\n'), ((32431, 32462), 'regex.sub', 'regex.sub', (['"""\\\\( *\\\\)"""', '""""""', 'text'], {}), "('\\\\( *\\\\)', '', text)\n", (32440, 32462), False, 'import regex\n'), ((32473, 32500), 'regex.sub', 'regex.sub', (['"""( *)"""', '""""""', 'text'], {}), "('( *)', '', text)\n", (32482, 32500), False, 'import regex\n'), ((32513, 32540), 'regex.sub', 'regex.sub', (['"""「 *」"""', '""""""', 'text'], {}), "('「 *」', '', text)\n", (32522, 32540), False, 'import regex\n'), ((32553, 32590), 'regex.sub', 'regex.sub', (['"""<ref>.*?</ref>"""', '""""""', 'text'], {}), "('<ref>.*?</ref>', '', text)\n", (32562, 32590), False, 'import regex\n'), ((32603, 32641), 'regex.sub', 'regex.sub', (['"""</?[a-z]+[^>]*>"""', '""""""', 'text'], {}), "('</?[a-z]+[^>]*>', '', text)\n", (32612, 32641), False, 'import regex\n'), ((32654, 32684), 'regex.sub', 'regex.sub', (['"""<!-- *"""', '"""("""', 'text'], {}), "('<!-- *', '(', text)\n", (32663, 32684), False, 'import regex\n'), ((32697, 32726), 'regex.sub', 'regex.sub', (['""" *-->"""', '""")"""', 'text'], {}), "(' *-->', ')', text)\n", (32706, 32726), False, 'import regex\n'), ((32739, 32772), 'regex.sub', 'regex.sub', (['"""^ *[,:;] *"""', '""""""', 'text'], {}), "('^ *[,:;] *', '', text)\n", (32748, 32772), False, 'import regex\n'), ((32881, 32920), 'regex.sub', 'regex.sub', (['"""</?[a-z]+[^>]*>"""', '""""""', 'value'], {}), "('</?[a-z]+[^>]*>', '', value)\n", (32890, 32920), False, 'import regex\n'), ((32934, 32969), 'regex.sub', 'regex.sub', (['"""^/(.*)/$"""', '"""\\\\1"""', 'value'], {}), "('^/(.*)/$', '\\\\1', value)\n", (32943, 32969), False, 'import regex\n'), ((32983, 33021), 'regex.sub', 'regex.sub', (['"""lang=[a-z]*\\\\|"""', '""""""', 'value'], {}), "('lang=[a-z]*\\\\|', '', value)\n", (32992, 33021), False, 'import regex\n'), ((33034, 33066), 'regex.sub', 'regex.sub', (['"""[,\\\\|].*"""', '""""""', 'value'], {}), "('[,\\\\|].*', '', value)\n", (33043, 33066), False, 'import regex\n'), ((3801, 3844), 'regex.search', 'regex.search', (['"""^[-\\\\p{Latin}0-9 ]+$"""', 'title'], {}), "('^[-\\\\p{Latin}0-9 ]+$', title)\n", (3813, 3844), False, 'import regex\n'), ((4355, 4390), 'regex.search', 'regex.search', (['"""^==([^=]+)==$"""', 'line'], {}), "('^==([^=]+)==$', line)\n", (4367, 4390), False, 'import regex\n'), ((25242, 25374), 'regex.search', 'regex.search', (["('の(直接法|直説法|仮定法)?(現在|過去)?(第?[一二三]人称)?[ ・、]?' +\n '(単数|複数|現在|過去|比較|最上|進行|完了|動名詞|単純)+[ ・、]?' + '(形|型|分詞|級|動名詞)+')", 'value'], {}), "('の(直接法|直説法|仮定法)?(現在|過去)?(第?[一二三]人称)?[ ・、]?' +\n '(単数|複数|現在|過去|比較|最上|進行|完了|動名詞|単純)+[ ・、]?' + '(形|型|分詞|級|動名詞)+', value)\n", (25254, 25374), False, 'import regex\n'), ((25432, 25476), 'regex.search', 'regex.search', (['"""の(直接法|直説法|仮定法)(現在|過去)"""', 'value'], {}), "('の(直接法|直説法|仮定法)(現在|過去)', value)\n", (25444, 25476), False, 'import regex\n'), ((25505, 25540), 'regex.search', 'regex.search', (['"""の(動名詞|異綴|旧綴)"""', 'value'], {}), "('の(動名詞|異綴|旧綴)', value)\n", (25517, 25540), False, 'import regex\n'), ((33096, 33131), 'regex.sub', 'regex.sub', (['"""^/(.*)/$"""', '"""\\\\1"""', 'value'], {}), "('^/(.*)/$', '\\\\1', value)\n", (33105, 33131), False, 'import regex\n'), ((33147, 33179), 'regex.sub', 'regex.sub', (['"""/ ?\\\\(.*"""', '""""""', 'value'], {}), "('/ ?\\\\(.*', '', value)\n", (33156, 33179), False, 'import regex\n'), ((33194, 33219), 'regex.sub', 'regex.sub', (['"""/"""', '""""""', 'value'], {}), "('/', '', value)\n", (33203, 33219), False, 'import regex\n'), ((33385, 33433), 'regex.sub', 'regex.sub', (['"""\\\\[\\\\[([^\\\\]]+)\\\\]\\\\]"""', '"""\\\\1"""', 'value'], {}), "('\\\\[\\\\[([^\\\\]]+)\\\\]\\\\]', '\\\\1', value)\n", (33394, 33433), False, 'import regex\n'), ((33523, 33555), 'regex.sub', 'regex.sub', (['"""(又|また).*"""', '""""""', 'value'], {}), "('(又|また).*', '', value)\n", (33532, 33555), False, 'import regex\n'), ((33571, 33598), 'regex.sub', 'regex.sub', (['""",.*"""', '""""""', 'value'], {}), "(',.*', '', value)\n", (33580, 33598), False, 'import regex\n'), ((33609, 33656), 'regex.search', 'regex.search', (['"""^[a-z_]+[234](_[a-z_]+)="""', 'value'], {}), "('^[a-z_]+[234](_[a-z_]+)=', value)\n", (33621, 33656), False, 'import regex\n'), ((33805, 33853), 'tkrzw_dict.GetCommandFlag', 'tkrzw_dict.GetCommandFlag', (['args', '"""--sampling"""', '(1)'], {}), "(args, '--sampling', 1)\n", (33830, 33853), False, 'import tkrzw_dict\n'), ((33882, 33925), 'tkrzw_dict.GetCommandFlag', 'tkrzw_dict.GetCommandFlag', (['args', '"""--max"""', '(1)'], {}), "(args, '--max', 1)\n", (33907, 33925), False, 'import tkrzw_dict\n'), ((4755, 4792), 'regex.search', 'regex.search', (['"""^===([^=]+)===$"""', 'line'], {}), "('^===([^=]+)===$', line)\n", (4767, 4792), False, 'import regex\n'), ((7617, 7688), 'regex.search', 'regex.search', (['"""^\\\\{\\\\{(pron|発音)(\\\\|(en|eng))?[0-9]?\\\\}\\\\}[0-9]?$"""', 'mode'], {}), "('^\\\\{\\\\{(pron|発音)(\\\\|(en|eng))?[0-9]?\\\\}\\\\}[0-9]?$', mode)\n", (7629, 7688), False, 'import regex\n'), ((32785, 32813), 'regex.sub', 'regex.sub', (['"""\\\\s+"""', '""" """', 'text'], {}), "('\\\\s+', ' ', text)\n", (32794, 32813), False, 'import regex\n'), ((2980, 2995), 'random.random', 'random.random', ([], {}), '()\n', (2993, 2995), False, 'import random\n'), ((5007, 5048), 'regex.search', 'regex.search', (['"""^====+([^=]+)=+===$"""', 'line'], {}), "('^====+([^=]+)=+===$', line)\n", (5019, 5048), False, 'import regex\n'), ((7519, 7545), 'regex.sub', 'regex.sub', (['""":.*"""', '""""""', 'mode'], {}), "(':.*', '', mode)\n", (7528, 7545), False, 'import regex\n'), ((7568, 7598), 'regex.sub', 'regex.sub', (['"""[0-9]+$"""', '""""""', 'mode'], {}), "('[0-9]+$', '', mode)\n", (7577, 7598), False, 'import regex\n'), ((9380, 9469), 'regex.search', 'regex.search', (['"""\\\\{\\\\{ipa[0-9]?\\\\|([^}|]+)(\\\\|[^}|]+)*\\\\}\\\\}"""', 'line', 'regex.IGNORECASE'], {}), "('\\\\{\\\\{ipa[0-9]?\\\\|([^}|]+)(\\\\|[^}|]+)*\\\\}\\\\}', line, regex.\n IGNORECASE)\n", (9392, 9469), False, 'import regex\n'), ((9858, 9925), 'regex.search', 'regex.search', (['"""\\\\{\\\\{sampa\\\\|([^}]+)\\\\}\\\\}"""', 'line', 'regex.IGNORECASE'], {}), "('\\\\{\\\\{sampa\\\\|([^}]+)\\\\}\\\\}', line, regex.IGNORECASE)\n", (9870, 9925), False, 'import regex\n'), ((10279, 10351), 'regex.search', 'regex.search', (['"""\\\\{\\\\{pron-en1\\\\|([^\\\\}]+)\\\\}\\\\}"""', 'line', 'regex.IGNORECASE'], {}), "('\\\\{\\\\{pron-en1\\\\|([^\\\\}]+)\\\\}\\\\}', line, regex.IGNORECASE)\n", (10291, 10351), False, 'import regex\n'), ((24397, 24485), 'regex.search', 'regex.search', (['"""([\\\\p{Latin}0-9]{2,}|[\\\\p{Han}\\\\p{Hiragana}\\\\p{Katakana}])"""', 'eff_text'], {}), "('([\\\\p{Latin}0-9]{2,}|[\\\\p{Han}\\\\p{Hiragana}\\\\p{Katakana}])',\n eff_text)\n", (24409, 24485), False, 'import regex\n'), ((4408, 4447), 'regex.sub', 'regex.sub', (['"""^==([^=]+)==$"""', '"""\\\\1"""', 'line'], {}), "('^==([^=]+)==$', '\\\\1', line)\n", (4417, 4447), False, 'import regex\n'), ((5755, 5822), 'regex.search', 'regex.search', (['"""^\\\\[\\\\[category:(.*)\\\\]\\\\]$"""', 'line', 'regex.IGNORECASE'], {}), "('^\\\\[\\\\[category:(.*)\\\\]\\\\]$', line, regex.IGNORECASE)\n", (5767, 5822), False, 'import regex\n'), ((9481, 9583), 'regex.sub', 'regex.sub', (['""".*\\\\{\\\\{ipa[0-9]?\\\\|([^}|]+)(\\\\|[^}|]+)*\\\\}\\\\}.*"""', '"""\\\\1"""', 'line'], {'flags': 'regex.IGNORECASE'}), "('.*\\\\{\\\\{ipa[0-9]?\\\\|([^}|]+)(\\\\|[^}|]+)*\\\\}\\\\}.*', '\\\\1', line,\n flags=regex.IGNORECASE)\n", (9490, 9583), False, 'import regex\n'), ((9943, 10029), 'regex.sub', 'regex.sub', (['""".*\\\\{\\\\{sampa\\\\|([^}]+)\\\\}\\\\}.*"""', '"""\\\\1"""', 'line'], {'flags': 'regex.IGNORECASE'}), "('.*\\\\{\\\\{sampa\\\\|([^}]+)\\\\}\\\\}.*', '\\\\1', line, flags=regex.\n IGNORECASE)\n", (9952, 10029), False, 'import regex\n'), ((11540, 11594), 'regex.search', 'regex.search', (['"""\\\\{\\\\{en-noun\\\\|?([^\\\\}]*)\\\\}\\\\}"""', 'line'], {}), "('\\\\{\\\\{en-noun\\\\|?([^\\\\}]*)\\\\}\\\\}', line)\n", (11552, 11594), False, 'import regex\n'), ((13479, 13533), 'regex.search', 'regex.search', (['"""\\\\{\\\\{en-verb\\\\|?([^\\\\}]*)\\\\}\\\\}"""', 'line'], {}), "('\\\\{\\\\{en-verb\\\\|?([^\\\\}]*)\\\\}\\\\}', line)\n", (13491, 13533), False, 'import regex\n'), ((18378, 18431), 'regex.search', 'regex.search', (['"""\\\\{\\\\{en-adj\\\\|?([^\\\\}]*)\\\\}\\\\}"""', 'line'], {}), "('\\\\{\\\\{en-adj\\\\|?([^\\\\}]*)\\\\}\\\\}', line)\n", (18390, 18431), False, 'import regex\n'), ((20412, 20465), 'regex.search', 'regex.search', (['"""\\\\{\\\\{en-adv\\\\|?([^\\\\}]*)\\\\}\\\\}"""', 'line'], {}), "('\\\\{\\\\{en-adv\\\\|?([^\\\\}]*)\\\\}\\\\}', line)\n", (20424, 20465), False, 'import regex\n'), ((23463, 23502), 'regex.sub', 'regex.sub', (['"""^([#\\\\*:]+).*"""', '"""\\\\1"""', 'line'], {}), "('^([#\\\\*:]+).*', '\\\\1', line)\n", (23472, 23502), False, 'import regex\n'), ((4810, 4851), 'regex.sub', 'regex.sub', (['"""^===([^=]+)===$"""', '"""\\\\1"""', 'line'], {}), "('^===([^=]+)===$', '\\\\1', line)\n", (4819, 4851), False, 'import regex\n'), ((4876, 4902), 'regex.sub', 'regex.sub', (['""":.*"""', '""""""', 'mode'], {}), "(':.*', '', mode)\n", (4885, 4902), False, 'import regex\n'), ((5836, 5913), 'regex.sub', 'regex.sub', (['"""^\\\\[\\\\[category:(.*)\\\\]\\\\]$"""', '"""\\\\1"""', 'line'], {'flags': 'regex.IGNORECASE'}), "('^\\\\[\\\\[category:(.*)\\\\]\\\\]$', '\\\\1', line, flags=regex.IGNORECASE)\n", (5845, 5913), False, 'import regex\n'), ((9700, 9730), 'regex.search', 'regex.search', (['"""(アメリカ|米)"""', 'line'], {}), "('(アメリカ|米)', line)\n", (9712, 9730), False, 'import regex\n'), ((10117, 10147), 'regex.search', 'regex.search', (['"""(アメリカ|米)"""', 'line'], {}), "('(アメリカ|米)', line)\n", (10129, 10147), False, 'import regex\n'), ((11296, 11358), 'regex.findall', 'regex.findall', (['"""\\\\{\\\\{l\\\\|en\\\\|([- \\\\p{Latin}]+?)\\\\}\\\\}"""', 'line'], {}), "('\\\\{\\\\{l\\\\|en\\\\|([- \\\\p{Latin}]+?)\\\\}\\\\}', line)\n", (11309, 11358), False, 'import regex\n'), ((11416, 11469), 'regex.findall', 'regex.findall', (['"""\\\\[\\\\[([- \\\\p{Latin}]+?)\\\\]\\\\]"""', 'line'], {}), "('\\\\[\\\\[([- \\\\p{Latin}]+?)\\\\]\\\\]', line)\n", (11429, 11469), False, 'import regex\n'), ((22466, 22530), 'regex.search', 'regex.search', (['"""\\\\{\\\\{p\\\\}\\\\} *:.*\\\\[\\\\[([a-zA-Z ]+)\\\\]\\\\]"""', 'line'], {}), "('\\\\{\\\\{p\\\\}\\\\} *:.*\\\\[\\\\[([a-zA-Z ]+)\\\\]\\\\]', line)\n", (22478, 22530), False, 'import regex\n'), ((22771, 22874), 'regex.search', 'regex.search', (['"""比較級 *:.*\\\\[\\\\[([a-zA-Z ]+)\\\\]\\\\].*[,、].*最上級 *: *\\\\[\\\\[([a-zA-Z ]+)\\\\]\\\\]"""', 'line'], {}), "(\n '比較級 *:.*\\\\[\\\\[([a-zA-Z ]+)\\\\]\\\\].*[,、].*最上級 *: *\\\\[\\\\[([a-zA-Z ]+)\\\\]\\\\]',\n line)\n", (22783, 22874), False, 'import regex\n'), ((23364, 23394), 'regex.search', 'regex.search', (['"""^[#\\\\*:]"""', 'line'], {}), "('^[#\\\\*:]', line)\n", (23376, 23394), False, 'import regex\n'), ((23825, 23912), 'regex.search', 'regex.search', (['"""(\\\\p{Latin}{2,})|([\\\\p{Han}\\\\p{Hiragana}|\\\\p{Katakana}ー])"""', 'eff_text'], {}), "('(\\\\p{Latin}{2,})|([\\\\p{Han}\\\\p{Hiragana}|\\\\p{Katakana}ー])',\n eff_text)\n", (23837, 23912), False, 'import regex\n'), ((24332, 24378), 'regex.sub', 'regex.sub', (['"""[\\\\((].*?[\\\\))]"""', '""""""', 'current_text'], {}), "('[\\\\((].*?[\\\\))]', '', current_text)\n", (24341, 24378), False, 'import regex\n'), ((5069, 5114), 'regex.sub', 'regex.sub', (['"""^====+([^=]+)=+===$"""', '"""\\\\1"""', 'line'], {}), "('^====+([^=]+)=+===$', '\\\\1', line)\n", (5078, 5114), False, 'import regex\n'), ((5142, 5171), 'regex.sub', 'regex.sub', (['""":.*"""', '""""""', 'submode'], {}), "(':.*', '', submode)\n", (5151, 5171), False, 'import regex\n'), ((10369, 10431), 'regex.sub', 'regex.sub', (['""".*\\\\{\\\\{pron-en1\\\\|([^\\\\}]+)\\\\}\\\\}.*"""', '"""\\\\1"""', 'line'], {}), "('.*\\\\{\\\\{pron-en1\\\\|([^\\\\}]+)\\\\}\\\\}.*', '\\\\1', line)\n", (10378, 10431), False, 'import regex\n'), ((11085, 11117), 'regex.search', 'regex.search', (['"""[^は]廃(語|用)"""', 'line'], {}), "('[^は]廃(語|用)', line)\n", (11097, 11117), False, 'import regex\n'), ((11183, 11213), 'regex.search', 'regex.search', (['"""(または|又は)"""', 'line'], {}), "('(または|又は)', line)\n", (11195, 11213), False, 'import regex\n'), ((22547, 22619), 'regex.sub', 'regex.sub', (['""".*\\\\{\\\\{p\\\\}\\\\} *:.*\\\\[\\\\[([a-zA-Z ]+)\\\\]\\\\].*"""', '"""\\\\1"""', 'line'], {}), "('.*\\\\{\\\\{p\\\\}\\\\} *:.*\\\\[\\\\[([a-zA-Z ]+)\\\\]\\\\].*', '\\\\1', line)\n", (22556, 22619), False, 'import regex\n'), ((23762, 23800), 'regex.sub', 'regex.sub', (['"""[\\\\((].*?[\\\\))]"""', '""""""', 'text'], {}), "('[\\\\((].*?[\\\\))]', '', text)\n", (23771, 23800), False, 'import regex\n'), ((6018, 6064), 'regex.search', 'regex.search', (['"""^\\\\{\\\\{[a-z]{2,3}\\\\}\\\\}$"""', 'lang'], {}), "('^\\\\{\\\\{[a-z]{2,3}\\\\}\\\\}$', lang)\n", (6030, 6064), False, 'import regex\n'), ((7110, 7172), 'regex.findall', 'regex.findall', (['"""\\\\{\\\\{l\\\\|en\\\\|([- \\\\p{Latin}]+?)\\\\}\\\\}"""', 'line'], {}), "('\\\\{\\\\{l\\\\|en\\\\|([- \\\\p{Latin}]+?)\\\\}\\\\}', line)\n", (7123, 7172), False, 'import regex\n'), ((7233, 7286), 'regex.findall', 'regex.findall', (['"""\\\\[\\\\[([- \\\\p{Latin}]+?)\\\\]\\\\]"""', 'line'], {}), "('\\\\[\\\\[([- \\\\p{Latin}]+?)\\\\]\\\\]', line)\n", (7246, 7286), False, 'import regex\n'), ((11692, 11754), 'regex.sub', 'regex.sub', (['""".*\\\\{\\\\{en-noun\\\\|?([^\\\\}]*)\\\\}\\\\}.*"""', '"""\\\\1"""', 'line'], {}), "('.*\\\\{\\\\{en-noun\\\\|?([^\\\\}]*)\\\\}\\\\}.*', '\\\\1', line)\n", (11701, 11754), False, 'import regex\n'), ((13631, 13693), 'regex.sub', 'regex.sub', (['""".*\\\\{\\\\{en-verb\\\\|?([^\\\\}]*)\\\\}\\\\}.*"""', '"""\\\\1"""', 'line'], {}), "('.*\\\\{\\\\{en-verb\\\\|?([^\\\\}]*)\\\\}\\\\}.*', '\\\\1', line)\n", (13640, 13693), False, 'import regex\n'), ((18539, 18600), 'regex.sub', 'regex.sub', (['""".*\\\\{\\\\{en-adj\\\\|?([^\\\\}]*)\\\\}\\\\}.*"""', '"""\\\\1"""', 'line'], {}), "('.*\\\\{\\\\{en-adj\\\\|?([^\\\\}]*)\\\\}\\\\}.*', '\\\\1', line)\n", (18548, 18600), False, 'import regex\n'), ((19193, 19218), 'regex.sub', 'regex.sub', (['"""e$"""', '""""""', 'stem'], {}), "('e$', '', stem)\n", (19202, 19218), False, 'import regex\n'), ((19243, 19282), 'regex.sub', 'regex.sub', (['"""([^aeiou])y$"""', '"""\\\\1i"""', 'stem'], {}), "('([^aeiou])y$', '\\\\1i', stem)\n", (19252, 19282), False, 'import regex\n'), ((20567, 20628), 'regex.sub', 'regex.sub', (['""".*\\\\{\\\\{en-adv\\\\|?([^\\\\}]*)\\\\}\\\\}.*"""', '"""\\\\1"""', 'line'], {}), "('.*\\\\{\\\\{en-adv\\\\|?([^\\\\}]*)\\\\}\\\\}.*', '\\\\1', line)\n", (20576, 20628), False, 'import regex\n'), ((21221, 21246), 'regex.sub', 'regex.sub', (['"""e$"""', '""""""', 'stem'], {}), "('e$', '', stem)\n", (21230, 21246), False, 'import regex\n'), ((21271, 21311), 'regex.sub', 'regex.sub', (['"""([^aeiou])y]$"""', '"""\\\\1i"""', 'stem'], {}), "('([^aeiou])y]$', '\\\\1i', stem)\n", (21280, 21311), False, 'import regex\n'), ((22900, 23017), 'regex.sub', 'regex.sub', (['""".*比較級 *:.*\\\\[\\\\[([a-zA-Z ]+)\\\\]\\\\].*[,、].*最上級 *: *\\\\[\\\\[([a-zA-Z ]+)\\\\]\\\\].*"""', '"""\\\\1\t\\\\2"""', 'line'], {}), "(\n '.*比較級 *:.*\\\\[\\\\[([a-zA-Z ]+)\\\\]\\\\].*[,、].*最上級 *: *\\\\[\\\\[([a-zA-Z ]+)\\\\]\\\\].*'\n , '\\\\1\\t\\\\2', line)\n", (22909, 23017), False, 'import regex\n'), ((13003, 13034), 'regex.sub', 'regex.sub', (['""".*="""', '""""""', 'values[0]'], {}), "('.*=', '', values[0])\n", (13012, 13034), False, 'import regex\n'), ((15948, 15974), 'regex.sub', 'regex.sub', (['"""e$"""', '""""""', 'title'], {}), "('e$', '', title)\n", (15957, 15974), False, 'import regex\n'), ((13312, 13343), 'regex.sub', 'regex.sub', (['""".*="""', '""""""', 'values[1]'], {}), "('.*=', '', values[1])\n", (13321, 13343), False, 'import regex\n')]
|
# import sys
#
# sys.path.insert(0, '/content/gdrive/MyDrive/Tese/code') # for colab
import time
import torch.nn.functional as F
from src.classification_scripts.finetune_abstract import *
class FineTuneSupCon(FineTune):
"""
class that unfreezes the efficient-net model and pre-trains it on RSICD data
"""
def __init__(self, model_type, device, file, nr_classes=31, eff_net_version = 'v1'): # default is 31 classes (nr of rscid classes)
super().__init__(model_type, device, file, nr_classes, eff_net_version)
def _train_step(self, imgs, targets):
# if doing diff views on the same batch need to iterate through the list first
images = torch.cat([imgs[0], imgs[1]], dim = 0)
if torch.cuda.is_available():
images = images.cuda(non_blocking=True)
targets = targets.cuda(non_blocking=True)
bsz = targets.shape[0]
features = self.model(images)
f1, f2 = torch.split(features, [bsz, bsz], dim=0)
features = torch.cat([f1.unsqueeze(1), f2.unsqueeze(1)], dim=1)
# print(features.shape, targets.squeeze(1).shape)
loss = self.criterion(features, targets.squeeze(1))
self.model.zero_grad()
loss.backward()
# Update weights
self.optimizer.step()
return loss, targets.shape[0]
def val_step(self, imgs, targets):
"""
validation step
"""
# if doing diff views on the same batch need to iterate through the list first
images = torch.cat([imgs[0], imgs[1]], dim=0)
if torch.cuda.is_available():
images = images.cuda(non_blocking=True)
targets = targets.cuda(non_blocking=True)
bsz = targets.shape[0]
features = self.model(images)
f1, f2 = torch.split(features, [bsz, bsz], dim=0)
features = torch.cat([f1.unsqueeze(1), f2.unsqueeze(1)], dim=1)
loss = self.criterion(features, targets.squeeze(1))
return loss, targets.shape[0]
def train(self, train_dataloader, val_dataloader):
"""
train the model
"""
early_stopping = EarlyStopping(
epochs_limit_without_improvement=6,
epochs_since_last_improvement=self.checkpoint_epochs_since_last_improvement
if self.checkpoint_exists else 0,
baseline=torch.FloatTensor([self.checkpoint_val_loss.val]) if self.checkpoint_exists else np.Inf,
encoder_optimizer=self.optimizer, # TENS
decoder_optimizer=None,
period_decay_lr=2 # no decay lr!
)
batch_time = AverageMeter()
train_losses = AverageMeter()
val_losses = AverageMeter()
start = time.time()
#
start_epoch = self.checkpoint_start_epoch if self.checkpoint_exists else 0
#
# Iterate by epoch
for epoch in range(start_epoch, int(self.setters["h_parameters"]['epochs'])):
self.current_epoch = epoch
if early_stopping.is_to_stop_training_early():
break
# #Train by batch
self.model.train()
for batch_i, (imgs, targets) in enumerate(train_dataloader):
train_loss, bsz = self._train_step(imgs, targets)
train_losses.update(train_loss.item(), bsz)
self._log_status("TRAIN", epoch, batch_i,
train_dataloader, train_loss)
# (only for debug: interrupt val after 1 step)
if self.setters["DEBUG"]:
break
batch_time.update(time.time() - start)
# End training
logging.info(' Batch Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t sec'.format(
batch_time=batch_time))
logging.info('\n\n-----> TRAIN END! Epoch: {}\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'.format(epoch, loss=train_losses))
# # Start validation
self.model.eval() # eval mode (no dropout or batchnorm)
with torch.no_grad():
for batch_i, (imgs, targets) in enumerate(val_dataloader):
val_loss, bsz = self.val_step(imgs, targets)
val_losses.update(val_loss.item(), bsz)
self._log_status("VAL", epoch, batch_i,
val_dataloader, val_loss)
# (only for debug: interrupt val after 1 step)
if self.setters["DEBUG"]:
break
# End validation
early_stopping.check_improvement(torch.Tensor([val_losses.avg]))
self._save_checkpoint_encoder(early_stopping.is_current_val_best(),
epoch,
early_stopping.get_number_of_epochs_without_improvement(),
val_losses)
logging.info(
'\n-------------- END EPOCH:{}⁄{}\t Train Loss {train_loss.val:.4f} ({train_loss.avg:.4f})\t'
'Val Loss {val_loss.val:.4f} ({val_loss.avg:.4f})\t'.format(
epoch, int(self.setters["h_parameters"]['epochs']), train_loss=train_losses, val_loss=val_losses))
def _log_status(self, train_or_val, epoch, batch_i, dataloader, loss):
print_freq = int(self.setters["h_parameters"]['print_freq'])
if batch_i % print_freq == 0:
logging.info(
"{} - Epoch: [{}/{}]; Batch: [{}/{}]\t Loss: {:.4f}\t".format(
train_or_val, epoch, int(self.setters["h_parameters"]['epochs']), batch_i,
len(dataloader), loss
)
)
|
[
"time.time"
] |
[((2726, 2737), 'time.time', 'time.time', ([], {}), '()\n', (2735, 2737), False, 'import time\n'), ((3626, 3637), 'time.time', 'time.time', ([], {}), '()\n', (3635, 3637), False, 'import time\n')]
|
'''
Created on January 5, 2020
Filer Guidelines: ESMA_ESEF Manula 2019.pdf
@author: Mark V Systems Limited
(c) Copyright 2020 Mark V Systems Limited, All rights reserved.
'''
from .Const import standardTaxonomyURIs, esefTaxonomyNamespaceURIs
from lxml.etree import XML, XMLSyntaxError
# check if a modelDocument URI is an extension URI (document URI)
# also works on a uri passed in as well as modelObject
def isExtension(val, modelObject):
if modelObject is None:
return False
if isinstance(modelObject, str):
uri = modelObject
else:
uri = modelObject.modelDocument.uri
return (uri.startswith(val.modelXbrl.uriDir) or
not any(uri.startswith(standardTaxonomyURI) for standardTaxonomyURI in standardTaxonomyURIs))
# check if in core esef taxonomy (based on namespace URI)
def isInEsefTaxonomy(val, modelObject):
if modelObject is None:
return False
ns = modelObject.qname.namespaceURI
return (any(ns.startswith(esefNsPrefix) for esefNsPrefix in esefTaxonomyNamespaceURIs))
# check image contents against mime/file ext and for Steganography
def checkImageContents(modelXbrl, imgElt, imgType, data):
if "svg" in imgType:
try:
rootElement = True
for elt in XML(data).iter():
if rootElement:
if elt.tag != "{http://www.w3.org/2000/svg}svg":
modelXbrl.error("ESEF.2.5.1.imageFileCannotBeLoaded",
_("Image SVG has root element which is not svg"),
modelObject=imgElt)
rootElement = False
eltTag = elt.tag.rpartition("}")[2] # strip namespace
if ((eltTag in ("object", "script")) or
(eltTag in ("audio", "foreignObject", "iframe", "image", "script", "use", "video")
and "javascript:" in elt.get("href",""))):
modelXbrl.error("ESEF.2.5.1.executableCodePresent",
_("Inline XBRL images MUST NOT contain executable code: %(element)s"),
modelObject=imgElt, element=eltTag)
except (XMLSyntaxError, UnicodeDecodeError) as err:
modelXbrl.error("ESEF.2.5.1.imageFileCannotBeLoaded",
_("Image SVG has XML error %(error)s"),
modelObject=imgElt, error=err)
elif not any(t in imgType for t in ("gif", "jpg", "jpeg", "png")):
modelXbrl.error("ESEF.2.5.1.imageFileCannotBeLoaded",
_("Image type %(imgType)s is not supported"),
modelObject=imgElt, imgType=imgType)
else:
if data[:3] == b"GIF" and data[3:6] in (b'89a', b'89b', b'87a'):
headerType = "gif"
elif ((data[:4] == b'\xff\xd8\xff\xe0' and data[6:11] == b'JFIF\x00') or
(data[:4] == b'\xff\xd8\xff\xe1' and data[6:11] == b'Exif\x00')):
headerType = "jpg"
elif data[:8] == b"\x89PNG\r\n\x1a\n":
headerType = "png"
elif data[:2] in (b"MM", b"II"):
headerType = "tiff"
elif data[:2] in (b"BM", b"BA"):
headerType = "bmp"
elif data[:4] == b"\x00\x00\x01\x00":
headerType = "ico"
elif data[:4] == b"\x00\x00\x02\x00":
headerType = "cur"
elif len(data) == 0:
headerType = "none"
else:
headerType = "unrecognized"
if (("gif" in imgType and headerType != "gif") or
(("jpg" in imgType or "jpeg" in imgType) and headerType != "jpg") or
("png" in imgType and headerType != "png")):
modelXbrl.error("ESEF.2.5.1.imageFileCannotBeLoaded",
_("Image type %(imgType)s has wrong header type: %(headerType)s"),
modelObject=imgElt, imgType=imgType, headerType=headerType)
|
[
"lxml.etree.XML"
] |
[((1270, 1279), 'lxml.etree.XML', 'XML', (['data'], {}), '(data)\n', (1273, 1279), False, 'from lxml.etree import XML, XMLSyntaxError\n')]
|
# (C) Copyright 2021 ECMWF.
#
# This software is licensed under the terms of the Apache Licence Version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
# In applying this licence, ECMWF does not waive the privileges and immunities
# granted to it by virtue of its status as an intergovernmental organisation
# nor does it submit to any jurisdiction.
#
import inspect
import logging
import os
import re
import threading
from functools import wraps
from climetlab.utils import load_json_or_yaml
from climetlab.utils.availability import Availability
LOG = logging.getLogger(__name__)
def dict_args(func):
@wraps(func)
def wrapped(*args, **kwargs):
m = []
p = {}
for q in args:
if isinstance(q, dict):
p.update(q)
else:
m.append(q)
p.update(kwargs)
return func(*m, **p)
return wrapped
LOCK = threading.RLock()
def locked(func):
@wraps(func)
def wrapped(*args, **kwargs):
with LOCK:
return func(*args, **kwargs)
return wrapped
class Decorator:
is_availability = False
def __call__(self, func):
from climetlab.arguments import InputManager
if not callable(func):
manager = InputManager(decorators=[self])
return manager.apply_to_value(func)
decorators = [self]
def unwrap(f):
if not hasattr(f, "_climetlab_decorators"):
return f
return unwrap(f.__wrapped__)
unwrapped = unwrap(func)
if hasattr(func, "_climetlab_decorators"):
decorators = decorators + func._climetlab_decorators
manager = InputManager(decorators=decorators)
@wraps(unwrapped)
def newfunc(*args, **kwargs):
args, kwargs = manager.apply_to_arg_kwargs(args, kwargs, func=unwrapped)
return unwrapped(*args, **kwargs)
newfunc._climetlab_decorators = decorators
return newfunc
OPTIONS = {
"date": ("format",),
"date-list": ("format",),
"bounding-box": ("format",),
"bbox": ("format",),
"variable": ("convention",),
"variable-list": ("convention",),
}
class normalize(Decorator):
def __init__(
self,
name,
values=None,
**kwargs,
):
assert name is None or isinstance(name, str)
self.name = name
if isinstance(values, str):
assert (
kwargs.get("type") is None
), f"Cannot mix values={values} and type={kwargs.get('type')}"
if "(" in values:
m = re.match(r"(.+)\((.+)\)", values)
type = m.group(1)
args = m.group(2).split(",")
else:
type = values
args = []
# len(args) <= len(options)
if args:
for name, value in zip(OPTIONS[type], args):
kwargs[name] = value
kwargs["type"] = type
else:
kwargs["values"] = values
if "aliases" in kwargs and isinstance(kwargs["aliases"], str):
_, ext = os.path.splitext(kwargs["aliases"])
if ext in (".json", ".yaml", ".yml"):
path = kwargs["aliases"]
if not os.path.isabs(path):
caller = os.path.dirname(inspect.stack()[1].filename)
path = os.path.join(caller, path)
kwargs["aliases"] = load_json_or_yaml(path)
self.kwargs = kwargs
def register(self, manager):
manager.register_normalize(self)
class availability(Decorator):
is_availability = True
def __init__(self, availability, **kwargs):
if isinstance(availability, str):
if not os.path.isabs(availability):
caller = os.path.dirname(inspect.stack()[1].filename)
availability = os.path.join(caller, availability)
self.availability = Availability(availability, **kwargs)
def register(self, manager):
manager.register_availability(self)
|
[
"os.path.isabs",
"inspect.stack",
"os.path.join",
"climetlab.utils.availability.Availability",
"climetlab.arguments.InputManager",
"threading.RLock",
"re.match",
"os.path.splitext",
"functools.wraps",
"climetlab.utils.load_json_or_yaml",
"logging.getLogger"
] |
[((582, 609), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (599, 609), False, 'import logging\n'), ((930, 947), 'threading.RLock', 'threading.RLock', ([], {}), '()\n', (945, 947), False, 'import threading\n'), ((638, 649), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (643, 649), False, 'from functools import wraps\n'), ((973, 984), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (978, 984), False, 'from functools import wraps\n'), ((1710, 1745), 'climetlab.arguments.InputManager', 'InputManager', ([], {'decorators': 'decorators'}), '(decorators=decorators)\n', (1722, 1745), False, 'from climetlab.arguments import InputManager\n'), ((1756, 1772), 'functools.wraps', 'wraps', (['unwrapped'], {}), '(unwrapped)\n', (1761, 1772), False, 'from functools import wraps\n'), ((4002, 4038), 'climetlab.utils.availability.Availability', 'Availability', (['availability'], {}), '(availability, **kwargs)\n', (4014, 4038), False, 'from climetlab.utils.availability import Availability\n'), ((1285, 1316), 'climetlab.arguments.InputManager', 'InputManager', ([], {'decorators': '[self]'}), '(decorators=[self])\n', (1297, 1316), False, 'from climetlab.arguments import InputManager\n'), ((3174, 3209), 'os.path.splitext', 'os.path.splitext', (["kwargs['aliases']"], {}), "(kwargs['aliases'])\n", (3190, 3209), False, 'import os\n'), ((2644, 2678), 're.match', 're.match', (['"""(.+)\\\\((.+)\\\\)"""', 'values'], {}), "('(.+)\\\\((.+)\\\\)', values)\n", (2652, 2678), False, 'import re\n'), ((3509, 3532), 'climetlab.utils.load_json_or_yaml', 'load_json_or_yaml', (['path'], {}), '(path)\n', (3526, 3532), False, 'from climetlab.utils import load_json_or_yaml\n'), ((3808, 3835), 'os.path.isabs', 'os.path.isabs', (['availability'], {}), '(availability)\n', (3821, 3835), False, 'import os\n'), ((3938, 3972), 'os.path.join', 'os.path.join', (['caller', 'availability'], {}), '(caller, availability)\n', (3950, 3972), False, 'import os\n'), ((3324, 3343), 'os.path.isabs', 'os.path.isabs', (['path'], {}), '(path)\n', (3337, 3343), False, 'import os\n'), ((3446, 3472), 'os.path.join', 'os.path.join', (['caller', 'path'], {}), '(caller, path)\n', (3458, 3472), False, 'import os\n'), ((3878, 3893), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (3891, 3893), False, 'import inspect\n'), ((3390, 3405), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (3403, 3405), False, 'import inspect\n')]
|
import json
from flask import request
from simplyrestful.resources import Resource
from simplyrestful.exceptions import Conflict
from serializers import ProcessSerializer
from settings import PROCESS_SPECIFICATION_FIELD
class ProcessResource(Resource):
endpoint = 'processes'
serializer = ProcessSerializer
def post(self):
self._validate_multipart()
return self._serializer.create(json.loads(request.form.get(PROCESS_SPECIFICATION_FIELD))), 201
def put(self, id):
self._validate_multipart()
return self._serializer.update(id, json.loads(request.form.get(PROCESS_SPECIFICATION_FIELD)))
@staticmethod
def _validate_multipart():
if not request.form:
raise Conflict('This endpoint only accepts multipart/form-data')
|
[
"flask.request.form.get",
"simplyrestful.exceptions.Conflict"
] |
[((734, 792), 'simplyrestful.exceptions.Conflict', 'Conflict', (['"""This endpoint only accepts multipart/form-data"""'], {}), "('This endpoint only accepts multipart/form-data')\n", (742, 792), False, 'from simplyrestful.exceptions import Conflict\n'), ((589, 634), 'flask.request.form.get', 'request.form.get', (['PROCESS_SPECIFICATION_FIELD'], {}), '(PROCESS_SPECIFICATION_FIELD)\n', (605, 634), False, 'from flask import request\n'), ((423, 468), 'flask.request.form.get', 'request.form.get', (['PROCESS_SPECIFICATION_FIELD'], {}), '(PROCESS_SPECIFICATION_FIELD)\n', (439, 468), False, 'from flask import request\n')]
|
from django.db import models
from django.utils import timezone
from django.urls import reverse
# Create your models here.
class LogInfo(models.Model):
aims_id = models.CharField(max_length=100)
host_id = models.CharField(max_length=100)
app_id = models.CharField(max_length=100)
app_name = models.CharField(max_length=100)
system_status = models.CharField(max_length=100)
log_agent_name = models.CharField(max_length=100)
description = models.TextField(blank=True, null=True)
use_yn = models.TextField(blank=True, default="Y", max_length=1)
prediction_qual = models.CharField(blank=True, null=True, max_length=100)
prediction_model = models.CharField(blank=True, null=True, max_length=100)
prediction_model_version = models.CharField(blank=True, null=True, max_length=100)
ptn001_cnt = models.CharField(blank=True, null=True, max_length=100)
ptn001_ratio = models.CharField(blank=True, null=True, max_length=100)
ptn002_cnt = models.CharField(blank=True, null=True, max_length=100)
ptn002_ratio = models.CharField(blank=True, null=True, max_length=100)
ptn003_cnt = models.CharField(blank=True, null=True, max_length=100)
ptn003_ratio = models.CharField(blank=True, null=True, max_length=100)
ptn004_cnt = models.CharField(blank=True, null=True, max_length=100)
created_date = models.DateTimeField(default=timezone.now)
published_date = models.DateTimeField(blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.aims_id
def get_absolute_url(self):
return reverse("loginfo:detail", kwargs={"aims_id": self.aims_id})
class HostInfo(models.Model):
host_id = models.CharField(max_length=100)
host_name = models.CharField(max_length=100)
host_ip = models.CharField(max_length=100)
host_desc = models.CharField(max_length=100)
use_yn = models.CharField(max_length=100)
created_date = models.DateTimeField(default=timezone.now)
updated_date = models.DateTimeField(blank=True, null=True)
class AppInfo(models.Model):
app_id = models.CharField(max_length=100)
app_name = models.CharField(max_length=100)
app_desc = models.CharField(max_length=100)
use_yn = models.CharField(max_length=100)
created_date = models.DateTimeField(default=timezone.now)
updated_date = models.DateTimeField(blank=True, null=True)
class MonthlyTranInfo(models.Model):
log_mid = models.CharField(max_length=100)
tran_w1_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_w1_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_w1_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_w1_errratio = models.CharField(blank=True, null=True, max_length=100)
tran_w2_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_w2_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_w2_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_w2_errratio = models.CharField(blank=True, null=True, max_length=100)
tran_w3_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_w3_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_w3_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_w3_errratio = models.CharField(blank=True, null=True, max_length=100)
tran_w4_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_w4_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_w4_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_w4_errratio = models.CharField(blank=True, null=True, max_length=100)
tran_w5_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_w5_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_w5_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_w5_errratio = models.CharField(blank=True, null=True, max_length=100)
use_yn = models.CharField(max_length=100)
created_date = models.DateTimeField(default=timezone.now)
updated_date = models.DateTimeField(blank=True, null=True)
class WeeklyTranInfo(models.Model):
log_mid = models.CharField(max_length=100)
tran_d1_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_d1_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_d1_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_d1_errratio = models.CharField(blank=True, null=True, max_length=100)
tran_d2_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_d2_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_d2_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_d2_errratio = models.CharField(blank=True, null=True, max_length=100)
tran_d3_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_d3_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_d3_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_d3_errratio = models.CharField(blank=True, null=True, max_length=100)
tran_d4_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_d4_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_d4_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_d4_errratio = models.CharField(blank=True, null=True, max_length=100)
tran_d5_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_d5_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_d5_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_d5_errratio = models.CharField(blank=True, null=True, max_length=100)
tran_d6_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_d6_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_d6_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_d6_errratio = models.CharField(blank=True, null=True, max_length=100)
tran_d7_cnt = models.CharField(blank=True, null=True, max_length=100)
tran_d7_errcnt = models.CharField(blank=True, null=True, max_length=100)
tran_d7_ratio = models.CharField(blank=True, null=True, max_length=100)
tran_d7_errratio = models.CharField(blank=True, null=True, max_length=100)
use_yn = models.CharField(max_length=100)
created_date = models.DateTimeField(default=timezone.now)
updated_date = models.DateTimeField(blank=True, null=True)
|
[
"django.db.models.TextField",
"django.db.models.CharField",
"django.utils.timezone.now",
"django.urls.reverse",
"django.db.models.DateTimeField"
] |
[((168, 200), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (184, 200), False, 'from django.db import models\n'), ((215, 247), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (231, 247), False, 'from django.db import models\n'), ((261, 293), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (277, 293), False, 'from django.db import models\n'), ((309, 341), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (325, 341), False, 'from django.db import models\n'), ((362, 394), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (378, 394), False, 'from django.db import models\n'), ((416, 448), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (432, 448), False, 'from django.db import models\n'), ((467, 506), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (483, 506), False, 'from django.db import models\n'), ((520, 575), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'default': '"""Y"""', 'max_length': '(1)'}), "(blank=True, default='Y', max_length=1)\n", (536, 575), False, 'from django.db import models\n'), ((598, 653), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (614, 653), False, 'from django.db import models\n'), ((677, 732), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (693, 732), False, 'from django.db import models\n'), ((764, 819), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (780, 819), False, 'from django.db import models\n'), ((837, 892), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (853, 892), False, 'from django.db import models\n'), ((912, 967), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (928, 967), False, 'from django.db import models\n'), ((985, 1040), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (1001, 1040), False, 'from django.db import models\n'), ((1060, 1115), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (1076, 1115), False, 'from django.db import models\n'), ((1133, 1188), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (1149, 1188), False, 'from django.db import models\n'), ((1208, 1263), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (1224, 1263), False, 'from django.db import models\n'), ((1281, 1336), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (1297, 1336), False, 'from django.db import models\n'), ((1356, 1398), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'timezone.now'}), '(default=timezone.now)\n', (1376, 1398), False, 'from django.db import models\n'), ((1420, 1463), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (1440, 1463), False, 'from django.db import models\n'), ((1761, 1793), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1777, 1793), False, 'from django.db import models\n'), ((1810, 1842), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1826, 1842), False, 'from django.db import models\n'), ((1857, 1889), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1873, 1889), False, 'from django.db import models\n'), ((1906, 1938), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1922, 1938), False, 'from django.db import models\n'), ((1952, 1984), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1968, 1984), False, 'from django.db import models\n'), ((2004, 2046), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'timezone.now'}), '(default=timezone.now)\n', (2024, 2046), False, 'from django.db import models\n'), ((2066, 2109), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (2086, 2109), False, 'from django.db import models\n'), ((2153, 2185), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (2169, 2185), False, 'from django.db import models\n'), ((2201, 2233), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (2217, 2233), False, 'from django.db import models\n'), ((2249, 2281), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (2265, 2281), False, 'from django.db import models\n'), ((2295, 2327), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (2311, 2327), False, 'from django.db import models\n'), ((2347, 2389), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'timezone.now'}), '(default=timezone.now)\n', (2367, 2389), False, 'from django.db import models\n'), ((2409, 2452), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (2429, 2452), False, 'from django.db import models\n'), ((2506, 2538), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (2522, 2538), False, 'from django.db import models\n'), ((2557, 2612), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (2573, 2612), False, 'from django.db import models\n'), ((2634, 2689), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (2650, 2689), False, 'from django.db import models\n'), ((2710, 2765), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (2726, 2765), False, 'from django.db import models\n'), ((2789, 2844), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (2805, 2844), False, 'from django.db import models\n'), ((2863, 2918), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (2879, 2918), False, 'from django.db import models\n'), ((2940, 2995), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (2956, 2995), False, 'from django.db import models\n'), ((3016, 3071), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (3032, 3071), False, 'from django.db import models\n'), ((3095, 3150), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (3111, 3150), False, 'from django.db import models\n'), ((3169, 3224), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (3185, 3224), False, 'from django.db import models\n'), ((3246, 3301), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (3262, 3301), False, 'from django.db import models\n'), ((3322, 3377), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (3338, 3377), False, 'from django.db import models\n'), ((3401, 3456), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (3417, 3456), False, 'from django.db import models\n'), ((3475, 3530), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (3491, 3530), False, 'from django.db import models\n'), ((3552, 3607), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (3568, 3607), False, 'from django.db import models\n'), ((3628, 3683), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (3644, 3683), False, 'from django.db import models\n'), ((3707, 3762), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (3723, 3762), False, 'from django.db import models\n'), ((3781, 3836), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (3797, 3836), False, 'from django.db import models\n'), ((3858, 3913), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (3874, 3913), False, 'from django.db import models\n'), ((3934, 3989), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (3950, 3989), False, 'from django.db import models\n'), ((4013, 4068), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (4029, 4068), False, 'from django.db import models\n'), ((4082, 4114), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (4098, 4114), False, 'from django.db import models\n'), ((4134, 4176), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'timezone.now'}), '(default=timezone.now)\n', (4154, 4176), False, 'from django.db import models\n'), ((4196, 4239), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (4216, 4239), False, 'from django.db import models\n'), ((4292, 4324), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (4308, 4324), False, 'from django.db import models\n'), ((4343, 4398), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (4359, 4398), False, 'from django.db import models\n'), ((4420, 4475), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (4436, 4475), False, 'from django.db import models\n'), ((4496, 4551), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (4512, 4551), False, 'from django.db import models\n'), ((4575, 4630), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (4591, 4630), False, 'from django.db import models\n'), ((4649, 4704), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (4665, 4704), False, 'from django.db import models\n'), ((4726, 4781), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (4742, 4781), False, 'from django.db import models\n'), ((4802, 4857), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (4818, 4857), False, 'from django.db import models\n'), ((4881, 4936), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (4897, 4936), False, 'from django.db import models\n'), ((4955, 5010), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (4971, 5010), False, 'from django.db import models\n'), ((5032, 5087), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (5048, 5087), False, 'from django.db import models\n'), ((5108, 5163), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (5124, 5163), False, 'from django.db import models\n'), ((5187, 5242), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (5203, 5242), False, 'from django.db import models\n'), ((5261, 5316), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (5277, 5316), False, 'from django.db import models\n'), ((5338, 5393), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (5354, 5393), False, 'from django.db import models\n'), ((5414, 5469), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (5430, 5469), False, 'from django.db import models\n'), ((5493, 5548), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (5509, 5548), False, 'from django.db import models\n'), ((5567, 5622), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (5583, 5622), False, 'from django.db import models\n'), ((5644, 5699), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (5660, 5699), False, 'from django.db import models\n'), ((5720, 5775), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (5736, 5775), False, 'from django.db import models\n'), ((5799, 5854), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (5815, 5854), False, 'from django.db import models\n'), ((5873, 5928), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (5889, 5928), False, 'from django.db import models\n'), ((5950, 6005), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (5966, 6005), False, 'from django.db import models\n'), ((6026, 6081), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (6042, 6081), False, 'from django.db import models\n'), ((6105, 6160), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (6121, 6160), False, 'from django.db import models\n'), ((6179, 6234), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (6195, 6234), False, 'from django.db import models\n'), ((6256, 6311), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (6272, 6311), False, 'from django.db import models\n'), ((6332, 6387), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (6348, 6387), False, 'from django.db import models\n'), ((6411, 6466), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'null': '(True)', 'max_length': '(100)'}), '(blank=True, null=True, max_length=100)\n', (6427, 6466), False, 'from django.db import models\n'), ((6480, 6512), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (6496, 6512), False, 'from django.db import models\n'), ((6532, 6574), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'timezone.now'}), '(default=timezone.now)\n', (6552, 6574), False, 'from django.db import models\n'), ((6594, 6637), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (6614, 6637), False, 'from django.db import models\n'), ((1518, 1532), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1530, 1532), False, 'from django.utils import timezone\n'), ((1654, 1713), 'django.urls.reverse', 'reverse', (['"""loginfo:detail"""'], {'kwargs': "{'aims_id': self.aims_id}"}), "('loginfo:detail', kwargs={'aims_id': self.aims_id})\n", (1661, 1713), False, 'from django.urls import reverse\n')]
|
import argparse
import json
import os
import django
import logging
FSW_ACCOUNT = 18
CANVAS_URL = "https://canvas.vu.nl"
os.environ['DJANGO_SETTINGS_MODULE'] = 'dejavu.settings'
django.setup()
logging.basicConfig(level=logging.INFO, format='[%(asctime)s %(name)-12s %(levelname)-5s] %(message)s')
import canvasapi
from dejaviewer.models import Course, CourseField
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('apikey')
args = parser.parse_args()
canvas = canvasapi.Canvas(CANVAS_URL, args.apikey)
c = Course.objects.get(code="S_D1")
course = canvas.get_course(c.canvas_course, include=["syllabus_body"])
f = CourseField.objects.get(field='description')
c.set_field("description", "canvas syllabus", course.syllabus_body)
c.save()
|
[
"django.setup",
"dejaviewer.models.Course.objects.get",
"argparse.ArgumentParser",
"logging.basicConfig",
"dejaviewer.models.CourseField.objects.get",
"canvasapi.Canvas"
] |
[((181, 195), 'django.setup', 'django.setup', ([], {}), '()\n', (193, 195), False, 'import django\n'), ((196, 304), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': '"""[%(asctime)s %(name)-12s %(levelname)-5s] %(message)s"""'}), "(level=logging.INFO, format=\n '[%(asctime)s %(name)-12s %(levelname)-5s] %(message)s')\n", (215, 304), False, 'import logging\n'), ((378, 422), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '__doc__'}), '(description=__doc__)\n', (401, 422), False, 'import argparse\n'), ((491, 532), 'canvasapi.Canvas', 'canvasapi.Canvas', (['CANVAS_URL', 'args.apikey'], {}), '(CANVAS_URL, args.apikey)\n', (507, 532), False, 'import canvasapi\n'), ((538, 569), 'dejaviewer.models.Course.objects.get', 'Course.objects.get', ([], {'code': '"""S_D1"""'}), "(code='S_D1')\n", (556, 569), False, 'from dejaviewer.models import Course, CourseField\n'), ((645, 689), 'dejaviewer.models.CourseField.objects.get', 'CourseField.objects.get', ([], {'field': '"""description"""'}), "(field='description')\n", (668, 689), False, 'from dejaviewer.models import Course, CourseField\n')]
|
from src.utils.readConfig import getGeneralConfig
from discord import Guild, TextChannel
from typing import List
generalConfig = getGeneralConfig()
def fetchAnnouncementChannel(guild: Guild):
if 'channel' not in generalConfig['announcement']:
return None
allTextChannels: List[TextChannel] = guild.text_channels
for channel in allTextChannels:
if channel.name == generalConfig['announcement']['channel']:
return channel
return None
|
[
"src.utils.readConfig.getGeneralConfig"
] |
[((130, 148), 'src.utils.readConfig.getGeneralConfig', 'getGeneralConfig', ([], {}), '()\n', (146, 148), False, 'from src.utils.readConfig import getGeneralConfig\n')]
|
import torch as ch
import utils
import numpy as np
from tqdm import tqdm
if __name__ == "__main__":
import sys
model_arch = sys.argv[1]
model_type = sys.argv[2]
prefix = sys.argv[3]
dataset = sys.argv[4]
if dataset == 'cifar10':
dx = utils.CIFAR10()
elif dataset == 'imagenet':
dx = utils.ImageNet1000()
else:
raise ValueError("Dataset not supported")
ds = dx.get_dataset()
model = dx.get_model(model_type, model_arch)
batch_size = 128
all_reps = []
train_loader = None
if dataset == 'cifar10':
train_loader, val_loader = ds.make_loaders(batch_size=batch_size, workers=8)
else:
_, val_loader = ds.make_loaders(batch_size=batch_size, workers=8, only_val=True)
def get_reps(data_loader):
for (im, label) in tqdm(data_loader):
with ch.no_grad():
(_, rep), _ = model(im, with_latent=True)
all_reps.append(rep.cpu())
if train_loader:
get_reps(train_loader)
get_reps(val_loader)
all_reps = ch.cat(all_reps)
ch_mean = ch.mean(all_reps, dim=0)
ch_std = ch.std(all_reps, dim=0)
# Dump mean, std vectors for later use:
np_mean = ch_mean.cpu().numpy()
np_std = ch_std.cpu().numpy()
np.save(prefix + "feature_mean", np_mean)
np.save(prefix + "feature_std", np_std)
|
[
"torch.mean",
"tqdm.tqdm",
"numpy.save",
"utils.ImageNet1000",
"utils.CIFAR10",
"torch.cat",
"torch.std",
"torch.no_grad"
] |
[((952, 968), 'torch.cat', 'ch.cat', (['all_reps'], {}), '(all_reps)\n', (958, 968), True, 'import torch as ch\n'), ((981, 1005), 'torch.mean', 'ch.mean', (['all_reps'], {'dim': '(0)'}), '(all_reps, dim=0)\n', (988, 1005), True, 'import torch as ch\n'), ((1018, 1041), 'torch.std', 'ch.std', (['all_reps'], {'dim': '(0)'}), '(all_reps, dim=0)\n', (1024, 1041), True, 'import torch as ch\n'), ((1150, 1191), 'numpy.save', 'np.save', (["(prefix + 'feature_mean')", 'np_mean'], {}), "(prefix + 'feature_mean', np_mean)\n", (1157, 1191), True, 'import numpy as np\n'), ((1193, 1232), 'numpy.save', 'np.save', (["(prefix + 'feature_std')", 'np_std'], {}), "(prefix + 'feature_std', np_std)\n", (1200, 1232), True, 'import numpy as np\n'), ((261, 276), 'utils.CIFAR10', 'utils.CIFAR10', ([], {}), '()\n', (274, 276), False, 'import utils\n'), ((755, 772), 'tqdm.tqdm', 'tqdm', (['data_loader'], {}), '(data_loader)\n', (759, 772), False, 'from tqdm import tqdm\n'), ((313, 333), 'utils.ImageNet1000', 'utils.ImageNet1000', ([], {}), '()\n', (331, 333), False, 'import utils\n'), ((782, 794), 'torch.no_grad', 'ch.no_grad', ([], {}), '()\n', (792, 794), True, 'import torch as ch\n')]
|
#!/usr/bin/python
# Copyright (c) 2017, 2020 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_identity_smtp_credential
short_description: Manage a SmtpCredential resource in Oracle Cloud Infrastructure
description:
- This module allows the user to create, update and delete a SmtpCredential resource in Oracle Cloud Infrastructure
- "For I(state=present), creates a new SMTP credential for the specified user. An SMTP credential has an SMTP user name and an SMTP password.
You must specify a *description* for the SMTP credential (although it can be an empty string). It does not
have to be unique, and you can change it anytime with
L(UpdateSmtpCredential,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/identity/20160918/SmtpCredentialSummary/UpdateSmtpCredential)."
version_added: "2.9"
author: Oracle (@oracle)
options:
description:
description:
- The description you assign to the SMTP credentials during creation. Does not have to be unique, and it's changeable.
- Required for create using I(state=present).
- This parameter is updatable.
type: str
user_id:
description:
- The OCID of the user.
type: str
required: true
smtp_credential_id:
description:
- The OCID of the SMTP credential.
- Required for update using I(state=present).
- Required for delete using I(state=absent).
type: str
aliases: ["id"]
state:
description:
- The state of the SmtpCredential.
- Use I(state=present) to create or update a SmtpCredential.
- Use I(state=absent) to delete a SmtpCredential.
type: str
required: false
default: 'present'
choices: ["present", "absent"]
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_creatable_resource, oracle.oci.oracle_wait_options ]
"""
EXAMPLES = """
- name: Create smtp_credential
oci_identity_smtp_credential:
description: description_example
user_id: ocid1.user.oc1..xxxxxxEXAMPLExxxxxx
- name: Update smtp_credential
oci_identity_smtp_credential:
description: description_example
user_id: ocid1.user.oc1..xxxxxxEXAMPLExxxxxx
smtp_credential_id: ocid1.smtpcredential.oc1..xxxxxxEXAMPLExxxxxx
- name: Delete smtp_credential
oci_identity_smtp_credential:
user_id: ocid1.user.oc1..xxxxxxEXAMPLExxxxxx
smtp_credential_id: ocid1.smtpcredential.oc1..xxxxxxEXAMPLExxxxxx
state: absent
"""
RETURN = """
smtp_credential:
description:
- Details of the SmtpCredential resource acted upon by the current operation
returned: on success
type: complex
contains:
username:
description:
- The SMTP user name.
returned: on success
type: string
sample: username_example
id:
description:
- The OCID of the SMTP credential.
returned: on success
type: string
sample: ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx
user_id:
description:
- The OCID of the user the SMTP credential belongs to.
returned: on success
type: string
sample: ocid1.user.oc1..xxxxxxEXAMPLExxxxxx
description:
description:
- The description you assign to the SMTP credential. Does not have to be unique, and it's changeable.
returned: on success
type: string
sample: description_example
time_created:
description:
- Date and time the `SmtpCredential` object was created, in the format defined by RFC3339.
- "Example: `2016-08-25T21:10:29.600Z`"
returned: on success
type: string
sample: 2016-08-25T21:10:29.600Z
time_expires:
description:
- Date and time when this credential will expire, in the format defined by RFC3339.
Null if it never expires.
- "Example: `2016-08-25T21:10:29.600Z`"
returned: on success
type: string
sample: 2016-08-25T21:10:29.600Z
lifecycle_state:
description:
- The credential's current state. After creating a SMTP credential, make sure its `lifecycleState` changes from
CREATING to ACTIVE before using it.
returned: on success
type: string
sample: CREATING
inactive_status:
description:
- The detailed status of INACTIVE lifecycleState.
returned: on success
type: int
sample: 56
sample: {
"username": "username_example",
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"user_id": "ocid1.user.oc1..xxxxxxEXAMPLExxxxxx",
"description": "description_example",
"time_created": "2016-08-25T21:10:29.600Z",
"time_expires": "2016-08-25T21:10:29.600Z",
"lifecycle_state": "CREATING",
"inactive_status": 56
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import (
oci_common_utils,
oci_wait_utils,
)
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceHelperBase,
get_custom_class,
)
try:
from oci.identity import IdentityClient
from oci.identity.models import CreateSmtpCredentialDetails
from oci.identity.models import UpdateSmtpCredentialDetails
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class SmtpCredentialHelperGen(OCIResourceHelperBase):
"""Supported operations: create, update, list and delete"""
def get_module_resource_id_param(self):
return "smtp_credential_id"
def get_module_resource_id(self):
return self.module.params.get("smtp_credential_id")
def get_resource(self):
resources = self.list_resources()
for resource in resources:
if self.get_module_resource_id() == resource.id:
return oci_common_utils.get_default_response_from_resource(resource)
oci_common_utils.raise_does_not_exist_service_error()
def get_required_kwargs_for_list(self):
required_list_method_params = [
"user_id",
]
return dict(
(param, self.module.params[param]) for param in required_list_method_params
)
def get_optional_kwargs_for_list(self):
return dict()
def list_resources(self):
required_kwargs = self.get_required_kwargs_for_list()
optional_kwargs = self.get_optional_kwargs_for_list()
kwargs = oci_common_utils.merge_dicts(required_kwargs, optional_kwargs)
return oci_common_utils.list_all_resources(
self.client.list_smtp_credentials, **kwargs
)
def get_create_model_class(self):
return CreateSmtpCredentialDetails
def create_resource(self):
create_details = self.get_create_model()
return oci_wait_utils.call_and_wait(
call_fn=self.client.create_smtp_credential,
call_fn_args=(),
call_fn_kwargs=dict(
create_smtp_credential_details=create_details,
user_id=self.module.params.get("user_id"),
),
waiter_type=oci_wait_utils.LIFECYCLE_STATE_WAITER_KEY,
operation=oci_common_utils.CREATE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=self.get_wait_for_states_for_operation(
oci_common_utils.CREATE_OPERATION_KEY,
),
)
def get_update_model_class(self):
return UpdateSmtpCredentialDetails
def update_resource(self):
update_details = self.get_update_model()
return oci_wait_utils.call_and_wait(
call_fn=self.client.update_smtp_credential,
call_fn_args=(),
call_fn_kwargs=dict(
user_id=self.module.params.get("user_id"),
smtp_credential_id=self.module.params.get("smtp_credential_id"),
update_smtp_credential_details=update_details,
),
waiter_type=oci_wait_utils.NONE_WAITER_KEY,
operation=oci_common_utils.UPDATE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=self.get_wait_for_states_for_operation(
oci_common_utils.UPDATE_OPERATION_KEY,
),
)
def delete_resource(self):
return oci_wait_utils.call_and_wait(
call_fn=self.client.delete_smtp_credential,
call_fn_args=(),
call_fn_kwargs=dict(
user_id=self.module.params.get("user_id"),
smtp_credential_id=self.module.params.get("smtp_credential_id"),
),
waiter_type=oci_wait_utils.NONE_WAITER_KEY,
operation=oci_common_utils.DELETE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=self.get_wait_for_states_for_operation(
oci_common_utils.DELETE_OPERATION_KEY,
),
)
SmtpCredentialHelperCustom = get_custom_class("SmtpCredentialHelperCustom")
class ResourceHelper(SmtpCredentialHelperCustom, SmtpCredentialHelperGen):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec(
supports_create=True, supports_wait=True
)
module_args.update(
dict(
description=dict(type="str"),
user_id=dict(type="str", required=True),
smtp_credential_id=dict(aliases=["id"], type="str"),
state=dict(type="str", default="present", choices=["present", "absent"]),
)
)
module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_helper = ResourceHelper(
module=module,
resource_type="smtp_credential",
service_client_class=IdentityClient,
namespace="identity",
)
result = dict(changed=False)
if resource_helper.is_delete():
result = resource_helper.delete()
elif resource_helper.is_update():
result = resource_helper.update()
elif resource_helper.is_create():
result = resource_helper.create()
module.exit_json(**result)
if __name__ == "__main__":
main()
|
[
"ansible_collections.oracle.oci.plugins.module_utils.oci_common_utils.raise_does_not_exist_service_error",
"ansible_collections.oracle.oci.plugins.module_utils.oci_common_utils.list_all_resources",
"ansible_collections.oracle.oci.plugins.module_utils.oci_common_utils.get_common_arg_spec",
"ansible_collections.oracle.oci.plugins.module_utils.oci_common_utils.get_default_response_from_resource",
"ansible.module_utils.basic.AnsibleModule",
"ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils.get_custom_class",
"ansible_collections.oracle.oci.plugins.module_utils.oci_common_utils.merge_dicts"
] |
[((9997, 10043), 'ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils.get_custom_class', 'get_custom_class', (['"""SmtpCredentialHelperCustom"""'], {}), "('SmtpCredentialHelperCustom')\n", (10013, 10043), False, 'from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import OCIResourceHelperBase, get_custom_class\n'), ((10162, 10240), 'ansible_collections.oracle.oci.plugins.module_utils.oci_common_utils.get_common_arg_spec', 'oci_common_utils.get_common_arg_spec', ([], {'supports_create': '(True)', 'supports_wait': '(True)'}), '(supports_create=True, supports_wait=True)\n', (10198, 10240), False, 'from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils, oci_wait_utils\n'), ((10569, 10635), 'ansible.module_utils.basic.AnsibleModule', 'AnsibleModule', ([], {'argument_spec': 'module_args', 'supports_check_mode': '(True)'}), '(argument_spec=module_args, supports_check_mode=True)\n', (10582, 10635), False, 'from ansible.module_utils.basic import AnsibleModule\n'), ((6832, 6885), 'ansible_collections.oracle.oci.plugins.module_utils.oci_common_utils.raise_does_not_exist_service_error', 'oci_common_utils.raise_does_not_exist_service_error', ([], {}), '()\n', (6883, 6885), False, 'from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils, oci_wait_utils\n'), ((7364, 7426), 'ansible_collections.oracle.oci.plugins.module_utils.oci_common_utils.merge_dicts', 'oci_common_utils.merge_dicts', (['required_kwargs', 'optional_kwargs'], {}), '(required_kwargs, optional_kwargs)\n', (7392, 7426), False, 'from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils, oci_wait_utils\n'), ((7442, 7527), 'ansible_collections.oracle.oci.plugins.module_utils.oci_common_utils.list_all_resources', 'oci_common_utils.list_all_resources', (['self.client.list_smtp_credentials'], {}), '(self.client.list_smtp_credentials, **kwargs\n )\n', (7477, 7527), False, 'from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils, oci_wait_utils\n'), ((6761, 6822), 'ansible_collections.oracle.oci.plugins.module_utils.oci_common_utils.get_default_response_from_resource', 'oci_common_utils.get_default_response_from_resource', (['resource'], {}), '(resource)\n', (6812, 6822), False, 'from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils, oci_wait_utils\n')]
|
""".. Ignore pydocstyle D400.
=========
Utilities
=========
Utilities for using global manager features.
"""
from django.test import override_settings
def disable_auto_calls():
"""Decorator/context manager which stops automatic manager calls.
When entered, automatic
:meth:`~resolwe.flow.managers.dispatcher.Manager.communicate` calls
from the Django transaction signal are not done.
"""
return override_settings(FLOW_MANAGER_DISABLE_AUTO_CALLS=True)
|
[
"django.test.override_settings"
] |
[((426, 481), 'django.test.override_settings', 'override_settings', ([], {'FLOW_MANAGER_DISABLE_AUTO_CALLS': '(True)'}), '(FLOW_MANAGER_DISABLE_AUTO_CALLS=True)\n', (443, 481), False, 'from django.test import override_settings\n')]
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import json
from scrapy.exceptions import DropItem
from data_pirate_cep.utils import beautify_item, validate_item, write_addresses
class DataPirateCepPipeline(object):
def open_spider(self, spider):
with open('uf.json', 'r', encoding='utf-8') as uf_file:
ufs_string = uf_file.readline()
ufs = json.loads(ufs_string).get('ufs')
for uf in ufs:
spider.addresses[uf] = []
def process_item(self, item, spider):
if validate_item(item):
beautify_item(item)
spider.addresses[item.get('uf')].append([item.get('address'), item.get('range_cep')])
return item
else:
raise DropItem('Invalid Item')
def close_spider(self, spider):
write_addresses(spider.addresses)
print('jsonlines file created')
|
[
"data_pirate_cep.utils.write_addresses",
"json.loads",
"data_pirate_cep.utils.validate_item",
"data_pirate_cep.utils.beautify_item",
"scrapy.exceptions.DropItem"
] |
[((682, 701), 'data_pirate_cep.utils.validate_item', 'validate_item', (['item'], {}), '(item)\n', (695, 701), False, 'from data_pirate_cep.utils import beautify_item, validate_item, write_addresses\n'), ((959, 992), 'data_pirate_cep.utils.write_addresses', 'write_addresses', (['spider.addresses'], {}), '(spider.addresses)\n', (974, 992), False, 'from data_pirate_cep.utils import beautify_item, validate_item, write_addresses\n'), ((715, 734), 'data_pirate_cep.utils.beautify_item', 'beautify_item', (['item'], {}), '(item)\n', (728, 734), False, 'from data_pirate_cep.utils import beautify_item, validate_item, write_addresses\n'), ((889, 913), 'scrapy.exceptions.DropItem', 'DropItem', (['"""Invalid Item"""'], {}), "('Invalid Item')\n", (897, 913), False, 'from scrapy.exceptions import DropItem\n'), ((525, 547), 'json.loads', 'json.loads', (['ufs_string'], {}), '(ufs_string)\n', (535, 547), False, 'import json\n')]
|
import requests
import json
import jsonlines
import time
import os
import sys
from retrying import retry
import traceback
class User():
def __init__(self, uid):
self.uid = str(uid)
def get_info(self):
url = f'https://api.bilibili.com/x/space/acc/info?mid={self.uid}'
return Get(url)['data']
def get_dynamic(self, offset):
# need_top: {1: 带置顶, 0: 不带置顶}
url = f'https://api.vc.bilibili.com/dynamic_svr/v1/dynamic_svr/space_history?host_uid={self.uid}&offset_dynamic_id={offset}&need_top=0'
return Get(url)['data']
def get_live_info(self):
url = f'https://api.live.bilibili.com/room/v1/Room/getRoomInfoOld?mid={self.uid}'
return Get(url)['data']
def Get(url):
DEFAULT_HEADERS = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/79.0.3945.130 Safari/537.36",
"Referer": "https://www.bilibili.com/"
}
r = requests.get(url, headers=DEFAULT_HEADERS)
return r.json()
def checkAndCreate(dirs):
if not os.path.exists(dirs):
os.makedirs(dirs)
@retry(wait_random_min=1000, wait_random_max=3000)
def save_file(url, output_dir):
DEFAULT_HEADERS = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/79.0.3945.130 Safari/537.36",
"Referer": "https://www.bilibili.com/"
}
response = requests.get(url,headers=DEFAULT_HEADERS,timeout=10)
img = response.content
# 保存路径
filename = url.split("?")[0].split("/")[-1]
with open(os.path.join(output_dir, filename), 'wb') as f:
f.write(img)
print(f"保存 {url} 到文件 {os.path.join(output_dir, filename)}")
return os.path.join(output_dir, filename)
class DynamicSaver():
def __init__(self, dynamic,path_dict):
self.dynamic = dynamic
self.type = dynamic['desc']['type']
self.id = dynamic['desc']['dynamic_id']
self.url = "https://t.bilibili.com/" + str(self.id)
self.time = dynamic['desc']['timestamp']
# self.origin_id = dynamic['desc']['orig_dy_id']
self.name = dynamic['desc']['user_profile']['info']['uname']
self.uid = dynamic['desc']['user_profile']['info']['uid']
self.card = json.loads(dynamic['card'])
self.forwards_file = path_dict['forwards_file']
self.videos_file = path_dict['videos_file']
self.short_videos_file = path_dict['short_videos_file']
self.audios_file = path_dict['audios_file']
self.dynamics_file = path_dict['dynamics_file']
self.albums_file = path_dict['albums_file']
self.articles_file = path_dict['articles_file']
self.calendars_file = path_dict['calendars_file']
self.images_dir = path_dict['images_dir']
self.short_videos_dir = path_dict['short_videos_dir']
def format(self):
try:
if self.type == 1:
# 转发动态
msgs = {
"dynamic_id": self.id,
"time": self.time,
"content": self.card['item']['content'],
"origin": self.dynamic['desc']['origin']['dynamic_id']
}
with jsonlines.open(self.forwards_file, "a") as f:
f.write(msgs)
elif self.type == 2:
# 相簿
pictures_urls = [pic['img_src']
for pic in self.card['item']['pictures']]
pictures_urls_local = [save_file(
url, self.images_dir) for url in pictures_urls]
msgs = {
"dynamic_id": self.id,
"time": self.time,
"content": {
"description": self.card['item']['description'],
"pictures": pictures_urls,
"pictures_local": pictures_urls_local
}
}
with jsonlines.open(self.albums_file, "a") as f:
f.write(msgs)
elif self.type == 4:
# 普通动态
msgs = {
"dynamic_id": self.id,
"time": self.time,
"content": self.card['item']['content']
}
with jsonlines.open(self.dynamics_file, "a") as f:
f.write(msgs)
elif self.type == 8:
# 视频投稿
msgs = {
"dynamic_id": self.id,
"time": self.time,
"dynamic": self.card['dynamic'],
"video": {
"bvid": self.dynamic['desc']['bvid'],
"title": self.card['title'],
"desc": self.card['desc']
}
}
with jsonlines.open(self.videos_file, "a") as f:
f.write(msgs)
elif self.type == 16:
# 短视频
url = self.card['item']['video_playurl']
local_url = save_file(url, self.short_videos_dir)
msgs = {
"dynamic_id": self.id,
"time": self.time,
"short_video": {
"url": url,
"url_local": local_url,
"description": self.card['item']['description'],
}
}
with jsonlines.open(self.short_videos_file, "a") as f:
f.write(msgs)
elif self.type == 64:
# 专栏
msgs = {
"dynamic_id": self.id,
"time": self.time,
"article": {
"cvid": self.card['id'],
"title": self.card['title'],
"summary": self.card['summary']
}
}
with jsonlines.open(self.articles_file, "a") as f:
f.write(msgs)
elif self.type == 256:
# 音频
msgs = {
"dynamic_id": self.id,
"time": self.time,
"audio": {
"auid": self.card['id'],
"title": self.card['title']
}
}
with jsonlines.open(self.audios_file, "a") as f:
f.write(msgs)
elif self.type == 2048:
# 直播日历
msgs = {
"dynamic_id": self.id,
"time": self.time,
"content": self.card['vest']['content']
}
with jsonlines.open(self.calendars_file, "a") as f:
f.write(msgs)
else:
print("未知 ", self.type, self.card)
except Exception as exc:
print(str(exc))
print(self.dynamic)
traceback.print_exc()
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Save bilibili dynamic data including all images to local given a specific account.')
parser.add_argument('uid', type=int, nargs=1,
help='UID of the account you want to save its dynamics')
parser.add_argument('-n', dest='name', type=str,
help='The name to use in the local save. If not specified, the username of the bilibili account will be used')
parser.add_argument('-o', dest='save_root', type=str,
help='The root directory of the local save')
args = parser.parse_args()
uid = args.uid[0]
user = User(uid)
if args.name:
name = args.name
else:
name = user.get_info()['name']
save_root = './'
if args.save_root:
save_root = args.save_root
save_paths = {
'forwards_file':os.path.join(save_root,f"{name}_forwards.jsonl"),
'videos_file':os.path.join(save_root,f"{name}_videos.jsonl"),
'short_videos_file':os.path.join(save_root,f"{name}_short_videos.jsonl"),
'audios_file':os.path.join(save_root,f"{name}_audios.jsonl"),
'dynamics_file':os.path.join(save_root,f"{name}_dynamics.jsonl"),
'albums_file':os.path.join(save_root,f"{name}_albums.jsonl"),
'articles_file':os.path.join(save_root,f"{name}_articles.jsonl"),
'calendars_file':os.path.join(save_root,f"{name}_calendar.jsonl"),
'images_dir':os.path.join(save_root,f"{name}_images"),
'short_videos_dir':os.path.join(save_root,f"{name}_short_video")
}
checkAndCreate(save_paths['images_dir'])
checkAndCreate(save_paths['short_videos_dir'])
offset = 0
while True:
dynamics = user.get_dynamic(offset) # 获取最近十二条动态
nums = len(dynamics['cards'])
print("获取动态条数:", nums)
for d in dynamics['cards']:
dd = DynamicSaver(d,path_dict=save_paths)
dd.format()
if dynamics['has_more'] == 1:
offset = dynamics['next_offset']
time.sleep(5)
else:
break
|
[
"traceback.print_exc",
"argparse.ArgumentParser",
"os.makedirs",
"json.loads",
"os.path.exists",
"time.sleep",
"jsonlines.open",
"requests.get",
"retrying.retry",
"os.path.join"
] |
[((1171, 1220), 'retrying.retry', 'retry', ([], {'wait_random_min': '(1000)', 'wait_random_max': '(3000)'}), '(wait_random_min=1000, wait_random_max=3000)\n', (1176, 1220), False, 'from retrying import retry\n'), ((1013, 1055), 'requests.get', 'requests.get', (['url'], {'headers': 'DEFAULT_HEADERS'}), '(url, headers=DEFAULT_HEADERS)\n', (1025, 1055), False, 'import requests\n'), ((1504, 1558), 'requests.get', 'requests.get', (['url'], {'headers': 'DEFAULT_HEADERS', 'timeout': '(10)'}), '(url, headers=DEFAULT_HEADERS, timeout=10)\n', (1516, 1558), False, 'import requests\n'), ((1808, 1842), 'os.path.join', 'os.path.join', (['output_dir', 'filename'], {}), '(output_dir, filename)\n', (1820, 1842), False, 'import os\n'), ((7342, 7473), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Save bilibili dynamic data including all images to local given a specific account."""'}), "(description=\n 'Save bilibili dynamic data including all images to local given a specific account.'\n )\n", (7365, 7473), False, 'import argparse\n'), ((1118, 1138), 'os.path.exists', 'os.path.exists', (['dirs'], {}), '(dirs)\n', (1132, 1138), False, 'import os\n'), ((1149, 1166), 'os.makedirs', 'os.makedirs', (['dirs'], {}), '(dirs)\n', (1160, 1166), False, 'import os\n'), ((2367, 2394), 'json.loads', 'json.loads', (["dynamic['card']"], {}), "(dynamic['card'])\n", (2377, 2394), False, 'import json\n'), ((8226, 8275), 'os.path.join', 'os.path.join', (['save_root', 'f"""{name}_forwards.jsonl"""'], {}), "(save_root, f'{name}_forwards.jsonl')\n", (8238, 8275), False, 'import os\n'), ((8299, 8346), 'os.path.join', 'os.path.join', (['save_root', 'f"""{name}_videos.jsonl"""'], {}), "(save_root, f'{name}_videos.jsonl')\n", (8311, 8346), False, 'import os\n'), ((8376, 8429), 'os.path.join', 'os.path.join', (['save_root', 'f"""{name}_short_videos.jsonl"""'], {}), "(save_root, f'{name}_short_videos.jsonl')\n", (8388, 8429), False, 'import os\n'), ((8453, 8500), 'os.path.join', 'os.path.join', (['save_root', 'f"""{name}_audios.jsonl"""'], {}), "(save_root, f'{name}_audios.jsonl')\n", (8465, 8500), False, 'import os\n'), ((8526, 8575), 'os.path.join', 'os.path.join', (['save_root', 'f"""{name}_dynamics.jsonl"""'], {}), "(save_root, f'{name}_dynamics.jsonl')\n", (8538, 8575), False, 'import os\n'), ((8599, 8646), 'os.path.join', 'os.path.join', (['save_root', 'f"""{name}_albums.jsonl"""'], {}), "(save_root, f'{name}_albums.jsonl')\n", (8611, 8646), False, 'import os\n'), ((8672, 8721), 'os.path.join', 'os.path.join', (['save_root', 'f"""{name}_articles.jsonl"""'], {}), "(save_root, f'{name}_articles.jsonl')\n", (8684, 8721), False, 'import os\n'), ((8748, 8797), 'os.path.join', 'os.path.join', (['save_root', 'f"""{name}_calendar.jsonl"""'], {}), "(save_root, f'{name}_calendar.jsonl')\n", (8760, 8797), False, 'import os\n'), ((8822, 8863), 'os.path.join', 'os.path.join', (['save_root', 'f"""{name}_images"""'], {}), "(save_root, f'{name}_images')\n", (8834, 8863), False, 'import os\n'), ((8892, 8938), 'os.path.join', 'os.path.join', (['save_root', 'f"""{name}_short_video"""'], {}), "(save_root, f'{name}_short_video')\n", (8904, 8938), False, 'import os\n'), ((1661, 1695), 'os.path.join', 'os.path.join', (['output_dir', 'filename'], {}), '(output_dir, filename)\n', (1673, 1695), False, 'import os\n'), ((9432, 9445), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (9442, 9445), False, 'import time\n'), ((1768, 1802), 'os.path.join', 'os.path.join', (['output_dir', 'filename'], {}), '(output_dir, filename)\n', (1780, 1802), False, 'import os\n'), ((7253, 7274), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (7272, 7274), False, 'import traceback\n'), ((3351, 3390), 'jsonlines.open', 'jsonlines.open', (['self.forwards_file', '"""a"""'], {}), "(self.forwards_file, 'a')\n", (3365, 3390), False, 'import jsonlines\n'), ((4130, 4167), 'jsonlines.open', 'jsonlines.open', (['self.albums_file', '"""a"""'], {}), "(self.albums_file, 'a')\n", (4144, 4167), False, 'import jsonlines\n'), ((4479, 4518), 'jsonlines.open', 'jsonlines.open', (['self.dynamics_file', '"""a"""'], {}), "(self.dynamics_file, 'a')\n", (4493, 4518), False, 'import jsonlines\n'), ((5046, 5083), 'jsonlines.open', 'jsonlines.open', (['self.videos_file', '"""a"""'], {}), "(self.videos_file, 'a')\n", (5060, 5083), False, 'import jsonlines\n'), ((5680, 5723), 'jsonlines.open', 'jsonlines.open', (['self.short_videos_file', '"""a"""'], {}), "(self.short_videos_file, 'a')\n", (5694, 5723), False, 'import jsonlines\n'), ((6191, 6230), 'jsonlines.open', 'jsonlines.open', (['self.articles_file', '"""a"""'], {}), "(self.articles_file, 'a')\n", (6205, 6230), False, 'import jsonlines\n'), ((6639, 6676), 'jsonlines.open', 'jsonlines.open', (['self.audios_file', '"""a"""'], {}), "(self.audios_file, 'a')\n", (6653, 6676), False, 'import jsonlines\n'), ((6991, 7031), 'jsonlines.open', 'jsonlines.open', (['self.calendars_file', '"""a"""'], {}), "(self.calendars_file, 'a')\n", (7005, 7031), False, 'import jsonlines\n')]
|
import tweepy
import time
auth = tweepy.OAuthHandler('','')
auth.set_access_token('', '')
api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)
busca = 'Busque por tweet'
numTweets = 5
for tweet in tweepy.Cursor(api.search, busca).items(numTweets):
try:
if(len(tweet.text) <= 130):
api.update_status('@' + tweet.user.screen_name + ' ' + tweet.text, in_reply_to_status_id = tweet.id)
print('operação com sucesso')
print(len(tweet.text))
time.sleep(60)
except tweepy.TweepError as e:
print(e.reason)
except StopIteration:
break
|
[
"tweepy.OAuthHandler",
"tweepy.Cursor",
"tweepy.API",
"time.sleep"
] |
[((37, 64), 'tweepy.OAuthHandler', 'tweepy.OAuthHandler', (['""""""', '""""""'], {}), "('', '')\n", (56, 64), False, 'import tweepy\n'), ((104, 177), 'tweepy.API', 'tweepy.API', (['auth'], {'wait_on_rate_limit': '(True)', 'wait_on_rate_limit_notify': '(True)'}), '(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)\n', (114, 177), False, 'import tweepy\n'), ((239, 271), 'tweepy.Cursor', 'tweepy.Cursor', (['api.search', 'busca'], {}), '(api.search, busca)\n', (252, 271), False, 'import tweepy\n'), ((543, 557), 'time.sleep', 'time.sleep', (['(60)'], {}), '(60)\n', (553, 557), False, 'import time\n')]
|
# This is my main script
import json
import multiprocessing as mp
import os
import time
import matplotlib.cm
import matplotlib.pyplot
import matplotlib.pyplot as plt
import numpy as np
from sklearn import metrics
from sklearn.cluster import AgglomerativeClustering
from sklearn.cluster import OPTICS
import FeatureProcessing as fp
import malware_stats
import my_sorter as my_sort
import process_cuckoo_reports as pcr
def dist_metric(x, y):
print("X = " + str(int(x[0])) + " Y = " + str(int(y[0])))
data_x = data[int(x[0])]
data_y = data[int(y[0])]
max_len = max(len(data_x), len(data_y))
# I divide with MAX to get the Levenshtein ratio
return fp.levenshtein_distance_dp(data_x, data_y) / max_len
def dist_metric_alt(x, y, dm):
print("X = " + str(x) + " Y = " + str(y))
data_x = dm[x]
data_y = dm[y]
max_len = max(len(data_x), len(data_y))
# I divide with MAX to get the Levenshtein ratio
return fp.levenshtein_distance_dp(data_x, data_y) / max_len
def alt_dist_metric(i):
if i[0] == i[1]:
return 0.0
data_x = i[2]
data_y = i[3]
max_len = max(len(data_x), len(data_y))
# I divide with MAX to get the Levenshtein ratio
dist = fp.levenshtein_distance_dp(data_x, data_y) / max_len
return [i[0], i[1], dist]
def mp_calc_dist_matrix(idxs, dm):
calcs = []
# Define all pairwise:
for i in range(0, len(idxs)):
for j in range(i + 1, len(idxs)):
if i < len(idxs) and j < len(idxs):
calcs.append([i, j, dm[i], dm[j]])
# Submit to pools calculations to pools:
pool = mp.Pool()
res = pool.map(alt_dist_metric, calcs)
pool.close()
m_out = np.zeros((int(len(idxs)), int(len(idxs))))
for r in res:
m_out[r[0]][r[1]] = r[2]
m_out[r[1]][r[0]] = r[2]
return m_out
def swap(dist_mat, i, j):
for y in range(0, len(dist_mat)):
t = dist_mat[y][i]
dist_mat[y][i] = dist_mat[y][j]
dist_mat[y][j] = t
tmp = dist_mat[i].copy()
tmp2 = dist_mat[j].copy()
dist_mat[i] = tmp2
dist_mat[j] = tmp
def order_dist_matrix(dist_matrix, labels):
for iter_num in range(len(labels) - 1, 0, -1):
for idx in range(iter_num):
if labels[idx] > labels[idx + 1]:
temp = labels[idx]
labels[idx] = labels[idx + 1]
labels[idx + 1] = temp
swap(dist_matrix, idx, idx + 1)
def store_ordered_dist_matrix_as_png(dist_matrix, labels, title):
ticks = []
for i in range(1, len(labels)):
if labels[i] != labels[i - 1]:
ticks.append(i - 0.5)
plt.clf()
fig, ax = plt.subplots(figsize=(20, 20), sharey=True)
# fig, ax = plt.subplots(sharey=True)
cmap = matplotlib.cm.get_cmap('CMRmap')
cax = ax.matshow(dist_matrix, interpolation='nearest', cmap=cmap)
# cax = ax.matshow(dist_matrix, interpolation='nearest')
ax.grid(False)
plt.suptitle('Clustered Distance Matrix')
plt.grid(True)
plt.title(title)
plt.xticks(ticks, color="w")
plt.yticks(ticks, color="w")
fig.colorbar(cax, ticks=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0])
plt.savefig("images/Dist_matrix_" + str(title) + ".png", bbox_inches='tight')
plt.close()
def do_hierarchical_cluster_analysis_routine(api_call_description, dist_matrix):
print('DOING HIERARCHICAL AGGLOMERATIVE CLUSTERING:')
n = len(dist_matrix)
best_mean_silhouette = -1.0
best_nc = -1
best_labels = []
ncs = []
mss = []
top = min(301, n)
for n_c in range(3, top - 1):
print('Trying with ' + str(n_c) + ' clusters:')
agg = AgglomerativeClustering(n_clusters=n_c, affinity='precomputed', linkage='average')
labels = agg.fit_predict(dist_matrix)
mean_silhouette = metrics.silhouette_score(dist_matrix, labels=labels, metric="precomputed")
print("For " + str(n_c) + " clusters the mean silhouette score is: " + str(mean_silhouette))
ncs.append(n_c)
mss.append(mean_silhouette)
best_nc = n_c if mean_silhouette > best_mean_silhouette else best_nc
best_labels = labels if mean_silhouette > best_mean_silhouette else best_labels
best_mean_silhouette = mean_silhouette if mean_silhouette > best_mean_silhouette else best_mean_silhouette
# Display info about cluster to silhouette:
plt.clf()
plt.plot(ncs, mss)
plt.title('HIERARCHICAL - Cluster count to Silhouette score')
plt.xlabel('Number of clusters')
plt.ylabel('Silhouette score')
plt.yticks(np.arange(0, 1, 0.1))
# plt.ylim([0.0, 1.0])
plt.grid(True)
plt.savefig("images/EVAL_HIERARCHICAL_" + api_call_description + ".png", bbox_inches='tight')
plt.close()
# Show info about best found nCluster and store dm to image:
print('Best # of clusters: ' + str(best_nc))
print("The mean Silhouette score is: " + str(best_mean_silhouette))
# sorted_dm = dist_matrix.copy()
# order_dist_matrix(sorted_dm, best_labels)
sorted_dm, sorted_labels = my_sort.optimal_sort(dist_matrix, best_labels)
store_ordered_dist_matrix_as_png(sorted_dm, sorted_labels, "HIERARCHICAL_analysis_n=" + str(n) + "_nCluster=" + str(
best_nc) + "_API_format" + api_call_description)
print("Sorted dist matrix saved as image.")
def plot_optics_reachability(clust, X, title):
space = np.arange(len(X))
reachability = clust.reachability_[clust.ordering_]
labels = clust.labels_[clust.ordering_]
plt.clf()
plt.figure(figsize=(20, 10))
ax1 = plt.subplot()
# Plotting the Reachability-Distance Plot
colors = ['c.', 'b.', 'r.', 'y.', 'g.']
for Class, colour in zip(range(0, len(labels)), colors):
Xk = space[labels == Class]
Rk = reachability[labels == Class]
ax1.plot(Xk, Rk, colour, alpha=0.3)
ax1.plot(space[labels == -1], reachability[labels == -1], 'k.', alpha=0.3)
ax1.plot(space, np.full_like(space, 2., dtype=float), 'k-', alpha=0.5)
ax1.plot(space, np.full_like(space, 0.5, dtype=float), 'k-.', alpha=0.5)
ax1.set_ylabel('Reachability Distance')
plt.title('Reachability plot')
plt.xlabel('Samples')
plt.ylabel('Reachability (epsilon distance)')
plt.title('Reachability Plot')
plt.savefig("images/Reachability_plot_" + str(title) + ".png", bbox_inches='tight')
plt.close()
def do_optics_cluster_analysis_routine(api_call_description, dist_matrix):
print('DOING OPTICS ANALYSIS:')
n = len(dist_matrix)
best_ms = -1
best_mean_silhouette = -1
best_labels = []
list_min_samples = []
list_mean_silhouettes = []
list_mean_silhouettes_no_noise = []
list_clusters = []
list_noise_count = []
for ms in range(2, 21):
try:
cluster_analyzer = OPTICS(metric="precomputed", min_samples=ms)
labels = cluster_analyzer.fit_predict(dist_matrix)
# plot_optics_reachability(cluster_analyzer, dist_matrix, api_call_description + '_min_samples=' + str(ms))
lbl_count = len(set(labels)) - (1 if -1 in labels else 0)
mean_s_coefficient = metrics.silhouette_score(dist_matrix, labels=labels, metric="precomputed")
print('For min_samples=' + str(ms) + ' found ' + str(lbl_count) + ' clusters, and mean_silhouette=' + str(
mean_s_coefficient))
list_noise_count.append(np.count_nonzero(labels == -1))
list_min_samples.append(ms)
list_mean_silhouettes.append(mean_s_coefficient)
best_ms = ms if mean_s_coefficient > best_mean_silhouette else best_ms
best_labels = labels.copy() if mean_s_coefficient > best_mean_silhouette else best_labels
list_clusters.append(len(set(labels)) - (1 if -1 in best_labels else 0))
best_mean_silhouette = mean_s_coefficient if mean_s_coefficient > best_mean_silhouette else best_mean_silhouette
no_noise_dm, no_noise_labels = get_noise_free_dm_n_labels_copy(dist_matrix, labels)
no_noise_mean_s_coefficient = metrics.silhouette_score(no_noise_dm, labels=no_noise_labels,
metric="precomputed")
print('For min_samples=' + str(ms) + ' found no_noise mean_silhouette=' + str(no_noise_mean_s_coefficient))
list_mean_silhouettes_no_noise.append(no_noise_mean_s_coefficient)
except Exception as e:
print("Could not do optics for min_samples=" + str(ms))
print(e)
# Display info about cluster to min_samples:
plt.clf()
fig, axes = plt.subplots()
axes.spines['left'].set_color('green')
axes.set_ylim([0.0, 1.0])
axes.xaxis.set_ticks(np.arange(0, 21, 1))
axes.yaxis.set_ticks(np.arange(0, 1, 0.1))
axes.grid(True)
fig.subplots_adjust(right=0.75)
twin_axes = axes.twinx()
twin_axes.spines['right'].set_color('red')
#twin_axes.set_ylim([100, 600])
second_twin = axes.twinx()
second_twin.spines['right'].set_position(('axes', 1.2))
second_twin.spines['right'].set_color('blue')
#second_twin.set_ylim([0, 250])
p1, = axes.plot(list_min_samples, list_mean_silhouettes, color='green', label='Silhouette score')
p2, = axes.plot(list_min_samples, list_mean_silhouettes_no_noise, color='green', dashes=[6, 2], label="Silhouette without noise")
axes.set_xlabel("Min samples")
axes.set_ylabel("Silhouette score")
p3, = twin_axes.plot(list_min_samples, list_noise_count, color='red', label='Noise')
twin_axes.set_ylabel("Noise samples")
p4, = second_twin.plot(list_min_samples, list_clusters, color='blue', label='Clusters')
second_twin.set_ylabel('Cluster count')
axes.legend(handles=[p1, p2, p3, p4], bbox_to_anchor=(0.5, 1.1), loc='lower center')
plt.title('OPTICS - Min_samples size to Silhouette score')
plt.savefig("images/EVAL_OPTICS_" + api_call_description + ".png", bbox_inches='tight')
plt.close()
print('')
print('***** OPTICS Analysis done *****')
print('Best min_sample=' + str(best_ms))
print('Finds ' + str(len(set(best_labels)) - (1 if -1 in best_labels else 0)) + ' clusters')
# print('Samples counted as noise: ' + str(best_labels.count(-1)))
print('Samples counted as noise: ' + str(np.count_nonzero(best_labels == -1)))
print('Mean silhouette: ' + str(best_mean_silhouette))
# sorted_dm = dist_matrix.copy()
# my_sort.tim_sort(best_labels, sorted_dm)
# order_dist_matrix(sorted_dm, best_labels)
sorted_dm, sorted_lbls = my_sort.optimal_sort(dist_matrix, best_labels)
noise_count = np.count_nonzero(best_labels == -1)
print("Samples considered noise: " + str(noise_count))
store_ordered_dist_matrix_as_png(sorted_dm, sorted_lbls, "OPTICS_analysis_n=" + str(n) + "_min_samples=" + str(
best_ms) + "_API_format=" + api_call_description)
print("Sorted dist matrix saved as image.")
def get_noise_free_dm_n_labels_copy(dist_matrix, labels):
no_noise_dm = np.delete(dist_matrix, np.where(labels == -1), axis=0)
no_noise_dm = np.delete(no_noise_dm, np.where(labels == -1), axis=1)
no_noise_labels = np.delete(labels, np.where(labels == -1))
return no_noise_dm, no_noise_labels
def do_final_optics(dm, min_sampels):
print('')
print('***** DOING FINAL OPTICS *****')
cluster_analyzer = OPTICS(metric="precomputed", min_samples=min_sampels)
labels = cluster_analyzer.fit_predict(dm)
lbl_count = len(set(labels)) - (1 if -1 in labels else 0)
mean_s_coefficient = metrics.silhouette_score(dm, labels=labels, metric="precomputed")
print('For min_samples=' + str(min_sampels) + ' found ' + str(lbl_count) + ' clusters, and mean_silhouette=' + str(
mean_s_coefficient))
# orderd_dm = dm.copy()
# order_dist_matrix(orderd_dm, labels)
sorted_dm, sorted_labels = my_sort.optimal_sort(dm, labels)
title = 'FINAL_OPTICS_min_samples=' + str(min_sampels)
title += '_n_clusters=' + str(lbl_count)
title += '_n_samples=' + str(len(labels))
store_ordered_dist_matrix_as_png(sorted_dm, sorted_labels, title)
print('........... DONE!')
def do_final_hierarchical(dm, n_clusters):
print('')
print('***** DOING FINAL Hierarchical *****')
agg = AgglomerativeClustering(n_clusters=n_clusters, affinity='precomputed', linkage='complete')
labels = agg.fit_predict(dm)
mean_silhouette = metrics.silhouette_score(dm, labels=labels, metric="precomputed")
print("For " + str(n_clusters) + " clusters the mean silhouette score is: " + str(mean_silhouette))
sorted_dm, sorted_labels = my_sort.optimal_sort(dm, labels)
title = 'FINAL_HIERARCHICAL'
title += '_n_clusters=' + str(n_clusters)
title += '_n_samples=' + str(len(labels))
store_ordered_dist_matrix_as_png(sorted_dm, sorted_labels, title)
print('........... DONE!')
def find_optimal_values():
global j
labels = ['FILTERED=FALSE_COLLAPSED=FALSE', 'FILTERED=TRUE_COLLAPSED=FALSE', 'FILTERED=TRUE_COLLAPSED=TRUE']
for dm_id in range(0, 3):
dist_list = []
for i in range(0, len(m[dm_id])):
for j in range(i, len(m[dm_id])):
if not i == j:
dist_list.append(m[dm_id][i][j])
print("Calculating frequency of distances in distance matrix:")
print("Dist counts: " + str(len(dist_list)))
plt.clf()
plt.hist(dist_list, bins=50)
plt.gca().set(title='Frequency of Distances', ylabel='Frequency', xlabel='Levenshtein ratio distance')
plt.title('Frequency of distances with: ' + labels[dm_id])
#plt.ylim([0, 350000])
plt.grid(True)
plt.savefig("images/Dist_frequencies_" + labels[dm_id] + ".png")
plt.close()
print(".......... DONE")
print('')
do_optics_cluster_analysis_routine(labels[dm_id] + '_API_SEQ', m[dm_id])
do_hierarchical_cluster_analysis_routine(labels[dm_id] + '_API_SEQ', m[dm_id])
if __name__ == '__main__':
start = time.time()
mp.freeze_support()
stored_dist_matrix = "data/dist_matrix.json"
global glob_data
data = []
global toShare
print("###GO GO GO###")
m = []
if os.path.isfile(stored_dist_matrix):
print("Loading stored distance matrix")
f = open(stored_dist_matrix, "r")
j = f.read()
f.close()
data = json.loads(j)
m = np.array(data)
print(".......... DONE")
print('')
else:
workdir = "C:\\Users\\stegg\\OneDrive\\Documents\\Master Projekt\\Data\\Nov2020_First_250\\"
d = pcr.mp_get_all_files_api_sequences(workdir)
# print("Samples: " + str(len(data)))
print("Creating a new distance matrices")
for dms in d:
X = np.arange(len(dms)).reshape(-1, 1)
currX = -1
start = time.time()
m = mp_calc_dist_matrix(X, dms)
end = time.time()
print("Time take: " + str(end - start))
print(".......... DONE")
print('')
data.append(m)
print("Saving distance matrix to file:")
m_list = []
for meh in data:
m_list.append(meh.tolist())
m_as_json = json.dumps(m_list)
f = open("data/dist_matrix.json", "w")
f.write(m_as_json)
f.close()
print(".......... DONE")
print('')
m = np.array(m_list)
# m has the distance matrices of the three tracks:
find_optimal_values()
do_final_hierarchical(m[2], 45)
do_final_hierarchical(m[2], 75)
do_final_optics(m[2], 2)
do_final_optics(m[2], 4)
do_final_optics(m[2], 5)
do_final_optics(m[2], 6)
do_final_optics(m[2], 7)
do_final_optics(m[2], 8)
# malware_stats.do_api_analysis("C:\\Users\\stegg\\OneDrive\\Documents\\Master Projekt\\Data\\Nov2020_First_250\\")
end = time.time()
print('Time taken: ' + str(end - start) + " sec.")
print('')
print("********* ALL DONE **********")
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.clf",
"matplotlib.pyplot.suptitle",
"json.dumps",
"matplotlib.pyplot.figure",
"os.path.isfile",
"numpy.arange",
"matplotlib.pyplot.gca",
"numpy.full_like",
"json.loads",
"matplotlib.pyplot.close",
"process_cuckoo_reports.mp_get_all_files_api_sequences",
"matplotlib.pyplot.yticks",
"FeatureProcessing.levenshtein_distance_dp",
"sklearn.cluster.AgglomerativeClustering",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.subplots",
"my_sorter.optimal_sort",
"sklearn.metrics.silhouette_score",
"multiprocessing.Pool",
"sklearn.cluster.OPTICS",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.grid",
"matplotlib.pyplot.subplot",
"numpy.count_nonzero",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.hist",
"time.time",
"numpy.where",
"numpy.array",
"matplotlib.pyplot.xlabel",
"multiprocessing.freeze_support",
"matplotlib.pyplot.savefig"
] |
[((1607, 1616), 'multiprocessing.Pool', 'mp.Pool', ([], {}), '()\n', (1614, 1616), True, 'import multiprocessing as mp\n'), ((2641, 2650), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (2648, 2650), True, 'import matplotlib.pyplot as plt\n'), ((2665, 2708), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(20, 20)', 'sharey': '(True)'}), '(figsize=(20, 20), sharey=True)\n', (2677, 2708), True, 'import matplotlib.pyplot as plt\n'), ((2949, 2990), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (['"""Clustered Distance Matrix"""'], {}), "('Clustered Distance Matrix')\n", (2961, 2990), True, 'import matplotlib.pyplot as plt\n'), ((2995, 3009), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (3003, 3009), True, 'import matplotlib.pyplot as plt\n'), ((3014, 3030), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (3023, 3030), True, 'import matplotlib.pyplot as plt\n'), ((3035, 3063), 'matplotlib.pyplot.xticks', 'plt.xticks', (['ticks'], {'color': '"""w"""'}), "(ticks, color='w')\n", (3045, 3063), True, 'import matplotlib.pyplot as plt\n'), ((3068, 3096), 'matplotlib.pyplot.yticks', 'plt.yticks', (['ticks'], {'color': '"""w"""'}), "(ticks, color='w')\n", (3078, 3096), True, 'import matplotlib.pyplot as plt\n'), ((3268, 3279), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (3277, 3279), True, 'import matplotlib.pyplot as plt\n'), ((4392, 4401), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (4399, 4401), True, 'import matplotlib.pyplot as plt\n'), ((4406, 4424), 'matplotlib.pyplot.plot', 'plt.plot', (['ncs', 'mss'], {}), '(ncs, mss)\n', (4414, 4424), True, 'import matplotlib.pyplot as plt\n'), ((4429, 4490), 'matplotlib.pyplot.title', 'plt.title', (['"""HIERARCHICAL - Cluster count to Silhouette score"""'], {}), "('HIERARCHICAL - Cluster count to Silhouette score')\n", (4438, 4490), True, 'import matplotlib.pyplot as plt\n'), ((4495, 4527), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Number of clusters"""'], {}), "('Number of clusters')\n", (4505, 4527), True, 'import matplotlib.pyplot as plt\n'), ((4532, 4562), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Silhouette score"""'], {}), "('Silhouette score')\n", (4542, 4562), True, 'import matplotlib.pyplot as plt\n'), ((4631, 4645), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (4639, 4645), True, 'import matplotlib.pyplot as plt\n'), ((4650, 4747), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('images/EVAL_HIERARCHICAL_' + api_call_description + '.png')"], {'bbox_inches': '"""tight"""'}), "('images/EVAL_HIERARCHICAL_' + api_call_description + '.png',\n bbox_inches='tight')\n", (4661, 4747), True, 'import matplotlib.pyplot as plt\n'), ((4748, 4759), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (4757, 4759), True, 'import matplotlib.pyplot as plt\n'), ((5062, 5108), 'my_sorter.optimal_sort', 'my_sort.optimal_sort', (['dist_matrix', 'best_labels'], {}), '(dist_matrix, best_labels)\n', (5082, 5108), True, 'import my_sorter as my_sort\n'), ((5518, 5527), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (5525, 5527), True, 'import matplotlib.pyplot as plt\n'), ((5532, 5560), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(20, 10)'}), '(figsize=(20, 10))\n', (5542, 5560), True, 'import matplotlib.pyplot as plt\n'), ((5572, 5585), 'matplotlib.pyplot.subplot', 'plt.subplot', ([], {}), '()\n', (5583, 5585), True, 'import matplotlib.pyplot as plt\n'), ((6138, 6168), 'matplotlib.pyplot.title', 'plt.title', (['"""Reachability plot"""'], {}), "('Reachability plot')\n", (6147, 6168), True, 'import matplotlib.pyplot as plt\n'), ((6173, 6194), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Samples"""'], {}), "('Samples')\n", (6183, 6194), True, 'import matplotlib.pyplot as plt\n'), ((6199, 6244), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Reachability (epsilon distance)"""'], {}), "('Reachability (epsilon distance)')\n", (6209, 6244), True, 'import matplotlib.pyplot as plt\n'), ((6249, 6279), 'matplotlib.pyplot.title', 'plt.title', (['"""Reachability Plot"""'], {}), "('Reachability Plot')\n", (6258, 6279), True, 'import matplotlib.pyplot as plt\n'), ((6372, 6383), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (6381, 6383), True, 'import matplotlib.pyplot as plt\n'), ((8598, 8607), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (8605, 8607), True, 'import matplotlib.pyplot as plt\n'), ((8624, 8638), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (8636, 8638), True, 'import matplotlib.pyplot as plt\n'), ((9828, 9886), 'matplotlib.pyplot.title', 'plt.title', (['"""OPTICS - Min_samples size to Silhouette score"""'], {}), "('OPTICS - Min_samples size to Silhouette score')\n", (9837, 9886), True, 'import matplotlib.pyplot as plt\n'), ((9891, 9982), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('images/EVAL_OPTICS_' + api_call_description + '.png')"], {'bbox_inches': '"""tight"""'}), "('images/EVAL_OPTICS_' + api_call_description + '.png',\n bbox_inches='tight')\n", (9902, 9982), True, 'import matplotlib.pyplot as plt\n'), ((9983, 9994), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (9992, 9994), True, 'import matplotlib.pyplot as plt\n'), ((10571, 10617), 'my_sorter.optimal_sort', 'my_sort.optimal_sort', (['dist_matrix', 'best_labels'], {}), '(dist_matrix, best_labels)\n', (10591, 10617), True, 'import my_sorter as my_sort\n'), ((10637, 10672), 'numpy.count_nonzero', 'np.count_nonzero', (['(best_labels == -1)'], {}), '(best_labels == -1)\n', (10653, 10672), True, 'import numpy as np\n'), ((11385, 11438), 'sklearn.cluster.OPTICS', 'OPTICS', ([], {'metric': '"""precomputed"""', 'min_samples': 'min_sampels'}), "(metric='precomputed', min_samples=min_sampels)\n", (11391, 11438), False, 'from sklearn.cluster import OPTICS\n'), ((11572, 11637), 'sklearn.metrics.silhouette_score', 'metrics.silhouette_score', (['dm'], {'labels': 'labels', 'metric': '"""precomputed"""'}), "(dm, labels=labels, metric='precomputed')\n", (11596, 11637), False, 'from sklearn import metrics\n'), ((11891, 11923), 'my_sorter.optimal_sort', 'my_sort.optimal_sort', (['dm', 'labels'], {}), '(dm, labels)\n', (11911, 11923), True, 'import my_sorter as my_sort\n'), ((12295, 12389), 'sklearn.cluster.AgglomerativeClustering', 'AgglomerativeClustering', ([], {'n_clusters': 'n_clusters', 'affinity': '"""precomputed"""', 'linkage': '"""complete"""'}), "(n_clusters=n_clusters, affinity='precomputed',\n linkage='complete')\n", (12318, 12389), False, 'from sklearn.cluster import AgglomerativeClustering\n'), ((12441, 12506), 'sklearn.metrics.silhouette_score', 'metrics.silhouette_score', (['dm'], {'labels': 'labels', 'metric': '"""precomputed"""'}), "(dm, labels=labels, metric='precomputed')\n", (12465, 12506), False, 'from sklearn import metrics\n'), ((12643, 12675), 'my_sorter.optimal_sort', 'my_sort.optimal_sort', (['dm', 'labels'], {}), '(dm, labels)\n', (12663, 12675), True, 'import my_sorter as my_sort\n'), ((14049, 14060), 'time.time', 'time.time', ([], {}), '()\n', (14058, 14060), False, 'import time\n'), ((14066, 14085), 'multiprocessing.freeze_support', 'mp.freeze_support', ([], {}), '()\n', (14083, 14085), True, 'import multiprocessing as mp\n'), ((14236, 14270), 'os.path.isfile', 'os.path.isfile', (['stored_dist_matrix'], {}), '(stored_dist_matrix)\n', (14250, 14270), False, 'import os\n'), ((15918, 15929), 'time.time', 'time.time', ([], {}), '()\n', (15927, 15929), False, 'import time\n'), ((672, 714), 'FeatureProcessing.levenshtein_distance_dp', 'fp.levenshtein_distance_dp', (['data_x', 'data_y'], {}), '(data_x, data_y)\n', (698, 714), True, 'import FeatureProcessing as fp\n'), ((950, 992), 'FeatureProcessing.levenshtein_distance_dp', 'fp.levenshtein_distance_dp', (['data_x', 'data_y'], {}), '(data_x, data_y)\n', (976, 992), True, 'import FeatureProcessing as fp\n'), ((1213, 1255), 'FeatureProcessing.levenshtein_distance_dp', 'fp.levenshtein_distance_dp', (['data_x', 'data_y'], {}), '(data_x, data_y)\n', (1239, 1255), True, 'import FeatureProcessing as fp\n'), ((3668, 3755), 'sklearn.cluster.AgglomerativeClustering', 'AgglomerativeClustering', ([], {'n_clusters': 'n_c', 'affinity': '"""precomputed"""', 'linkage': '"""average"""'}), "(n_clusters=n_c, affinity='precomputed', linkage=\n 'average')\n", (3691, 3755), False, 'from sklearn.cluster import AgglomerativeClustering\n'), ((3823, 3897), 'sklearn.metrics.silhouette_score', 'metrics.silhouette_score', (['dist_matrix'], {'labels': 'labels', 'metric': '"""precomputed"""'}), "(dist_matrix, labels=labels, metric='precomputed')\n", (3847, 3897), False, 'from sklearn import metrics\n'), ((4578, 4598), 'numpy.arange', 'np.arange', (['(0)', '(1)', '(0.1)'], {}), '(0, 1, 0.1)\n', (4587, 4598), True, 'import numpy as np\n'), ((5957, 5994), 'numpy.full_like', 'np.full_like', (['space', '(2.0)'], {'dtype': 'float'}), '(space, 2.0, dtype=float)\n', (5969, 5994), True, 'import numpy as np\n'), ((6032, 6069), 'numpy.full_like', 'np.full_like', (['space', '(0.5)'], {'dtype': 'float'}), '(space, 0.5, dtype=float)\n', (6044, 6069), True, 'import numpy as np\n'), ((8738, 8757), 'numpy.arange', 'np.arange', (['(0)', '(21)', '(1)'], {}), '(0, 21, 1)\n', (8747, 8757), True, 'import numpy as np\n'), ((8784, 8804), 'numpy.arange', 'np.arange', (['(0)', '(1)', '(0.1)'], {}), '(0, 1, 0.1)\n', (8793, 8804), True, 'import numpy as np\n'), ((11055, 11077), 'numpy.where', 'np.where', (['(labels == -1)'], {}), '(labels == -1)\n', (11063, 11077), True, 'import numpy as np\n'), ((11128, 11150), 'numpy.where', 'np.where', (['(labels == -1)'], {}), '(labels == -1)\n', (11136, 11150), True, 'import numpy as np\n'), ((11200, 11222), 'numpy.where', 'np.where', (['(labels == -1)'], {}), '(labels == -1)\n', (11208, 11222), True, 'import numpy as np\n'), ((13416, 13425), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (13423, 13425), True, 'import matplotlib.pyplot as plt\n'), ((13434, 13462), 'matplotlib.pyplot.hist', 'plt.hist', (['dist_list'], {'bins': '(50)'}), '(dist_list, bins=50)\n', (13442, 13462), True, 'import matplotlib.pyplot as plt\n'), ((13582, 13640), 'matplotlib.pyplot.title', 'plt.title', (["('Frequency of distances with: ' + labels[dm_id])"], {}), "('Frequency of distances with: ' + labels[dm_id])\n", (13591, 13640), True, 'import matplotlib.pyplot as plt\n'), ((13680, 13694), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (13688, 13694), True, 'import matplotlib.pyplot as plt\n'), ((13703, 13767), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('images/Dist_frequencies_' + labels[dm_id] + '.png')"], {}), "('images/Dist_frequencies_' + labels[dm_id] + '.png')\n", (13714, 13767), True, 'import matplotlib.pyplot as plt\n'), ((13776, 13787), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (13785, 13787), True, 'import matplotlib.pyplot as plt\n'), ((14416, 14429), 'json.loads', 'json.loads', (['j'], {}), '(j)\n', (14426, 14429), False, 'import json\n'), ((14442, 14456), 'numpy.array', 'np.array', (['data'], {}), '(data)\n', (14450, 14456), True, 'import numpy as np\n'), ((14631, 14674), 'process_cuckoo_reports.mp_get_all_files_api_sequences', 'pcr.mp_get_all_files_api_sequences', (['workdir'], {}), '(workdir)\n', (14665, 14674), True, 'import process_cuckoo_reports as pcr\n'), ((15266, 15284), 'json.dumps', 'json.dumps', (['m_list'], {}), '(m_list)\n', (15276, 15284), False, 'import json\n'), ((15440, 15456), 'numpy.array', 'np.array', (['m_list'], {}), '(m_list)\n', (15448, 15456), True, 'import numpy as np\n'), ((6808, 6852), 'sklearn.cluster.OPTICS', 'OPTICS', ([], {'metric': '"""precomputed"""', 'min_samples': 'ms'}), "(metric='precomputed', min_samples=ms)\n", (6814, 6852), False, 'from sklearn.cluster import OPTICS\n'), ((7139, 7213), 'sklearn.metrics.silhouette_score', 'metrics.silhouette_score', (['dist_matrix'], {'labels': 'labels', 'metric': '"""precomputed"""'}), "(dist_matrix, labels=labels, metric='precomputed')\n", (7163, 7213), False, 'from sklearn import metrics\n'), ((8074, 8162), 'sklearn.metrics.silhouette_score', 'metrics.silhouette_score', (['no_noise_dm'], {'labels': 'no_noise_labels', 'metric': '"""precomputed"""'}), "(no_noise_dm, labels=no_noise_labels, metric=\n 'precomputed')\n", (8098, 8162), False, 'from sklearn import metrics\n'), ((14887, 14898), 'time.time', 'time.time', ([], {}), '()\n', (14896, 14898), False, 'import time\n'), ((14961, 14972), 'time.time', 'time.time', ([], {}), '()\n', (14970, 14972), False, 'import time\n'), ((7407, 7437), 'numpy.count_nonzero', 'np.count_nonzero', (['(labels == -1)'], {}), '(labels == -1)\n', (7423, 7437), True, 'import numpy as np\n'), ((10313, 10348), 'numpy.count_nonzero', 'np.count_nonzero', (['(best_labels == -1)'], {}), '(best_labels == -1)\n', (10329, 10348), True, 'import numpy as np\n'), ((13471, 13480), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (13478, 13480), True, 'import matplotlib.pyplot as plt\n')]
|
"""
SAMS umbrella sampling for DDR1 kinase DFG loop flip.
"""
__author__ = '<NAME>'
################################################################################
# IMPORTS
################################################################################
import os, os.path
import sys, math
import numpy as np
import time
from simtk import openmm, unit
from simtk.openmm import app
import mdtraj as md
import netCDF4
from sams import ThermodynamicState
################################################################################
# MAJOR SETTINGS AND PARAMETERS
################################################################################
# Define paths for explicitly-solvated complex
system_xml_filename = 'setup/system.xml'
state_xml_filename = 'setup/state_DFG_IN.xml'
state_pdb_filename = 'setup/state_DFG_IN.pdb'
pdb_filename = 'setup/systems/Abl-STI/complex.pdb'
# Specify umbrellas for distance restraint
umbrella_sigma = 5*unit.degrees # umbrella stddev width in absence of external PMF (no Jacobian)
umbrella_atoms = [2817, 2815, 2825, 2830] # atoms involved in umbrella restraint
#ATOM 2818 CB ALA A 180 1.927 52.416 41.379 1.00 0.00 C
#ATOM 2816 CA ALA A 180 3.319 52.098 40.823 1.00 0.00 C
#ATOM 2826 CA ASP A 181 5.071 50.442 43.834 1.00 0.00 C
#ATOM 2831 CG ASP A 181 2.928 49.040 44.337 1.00 0.00 C
min_dihedral = -180*unit.degrees
max_dihedral = +180*unit.degrees
dihedral_unit = unit.degrees
numbrellas = int((max_dihedral - min_dihedral) / umbrella_sigma + 2)
umbrella_values = np.linspace(min_dihedral/dihedral_unit, max_dihedral/dihedral_unit, numbrellas) * dihedral_unit
# Output SAMS filename
netcdf_filename = 'output.nc'
pdb_trajectory_filename = 'trajectory.pdb' # first frame of trajectory to be written at end
dcd_trajectory_filename = 'trajectory.dcd' # DCD format for trajectory to be written at end
# Simulation conditions
temperature = 298.0 * unit.kelvin
pressure = 1.0 * unit.atmospheres
collision_rate = 1.0 / unit.picoseconds
timestep = 2.0 * unit.femtoseconds
#minimize = True # if True, will minimize the structure before simulation (highly recommended)
minimize = False
################################################################################
# SUBROUTINES
################################################################################
def read_file(filename):
infile = open(filename, 'r')
contents = infile.read()
return contents
################################################################################
# MAIN
################################################################################
from sams import kB
kT = kB * temperature
beta = 1.0 / kT
# Load system
print('Loading system...')
system = openmm.XmlSerializer.deserialize(read_file(system_xml_filename))
pdbfile = app.PDBFile(state_pdb_filename)
topology = pdbfile.topology
state = openmm.XmlSerializer.deserialize(read_file(state_xml_filename))
positions = state.getPositions(asNumpy=True)
box_vectors = state.getPeriodicBoxVectors()
print('System has %d atoms.' % system.getNumParticles())
forces = { force.__class__.__name__ : force for force in system.getForces() }
if (pressure is not None) and ('MonteCarloBarostat' not in forces):
# Add a barostat
print("Adding barostat...")
barostat = openmm.MonteCarloBarostat(pressure, temperature)
reference_system.addForce(barostat)
else:
# TODO: Update barostat
pass
# Add umbrella restraint with global variable to control umbrella position
print('umbrella schedule for dihedral defined by atoms %s : %s' % (str(umbrella_atoms), str(umbrella_values)))
from numpy import pi
energy_function = '- (umbrella_K/2) * cos(min(dtheta, 2*pi-dtheta)); dtheta = abs(theta-umbrella_r0);'
energy_function += 'pi = %f;' % pi
umbrella_force = openmm.CustomTorsionForce(energy_function)
umbrella_force.addGlobalParameter('umbrella_K', 0.0)
umbrella_force.addGlobalParameter('umbrella_r0', 0.0)
umbrella_force.addTorsion(*umbrella_atoms, [])
umbrella_K = kT/umbrella_sigma**2
system.addForce(umbrella_force)
# Create thermodynamic states
thermodynamic_states = list()
# Umbrella off state
parameters = {
'umbrella_K' : 0.0, 'umbrella_r0' : 0.0, # umbrella parameters
}
thermodynamic_states.append( ThermodynamicState(system=system, temperature=temperature, pressure=pressure, parameters=parameters) )
# Umbrella on state
alchemical_lambda = 0.0
for umbrella_value in umbrella_values:
parameters = {
'umbrella_K' : umbrella_K.value_in_unit_system(unit.md_unit_system), 'umbrella_r0' : umbrella_value.value_in_unit_system(unit.md_unit_system), # umbrella parameters
}
thermodynamic_states.append( ThermodynamicState(system=system, temperature=temperature, pressure=pressure, parameters=parameters) )
# Analyze
from sams import analysis
# States
from collections import namedtuple
MockTestsystem = namedtuple('MockTestsystem', ['description', 'thermodynamic_states'])
testsystem = MockTestsystem(description='DDR1 umbrella states', thermodynamic_states=thermodynamic_states)
analysis.analyze(netcdf_filename, testsystem, 'output.pdf')
# Write trajectory
reference_pdb_filename = 'trajectory.pdb'
trajectory_filename = 'trajectory.xtc'
analysis.write_trajectory(netcdf_filename, topology, reference_pdb_filename, trajectory_filename)
|
[
"sams.analysis.analyze",
"simtk.openmm.CustomTorsionForce",
"simtk.openmm.MonteCarloBarostat",
"sams.ThermodynamicState",
"collections.namedtuple",
"numpy.linspace",
"simtk.openmm.app.PDBFile",
"sams.analysis.write_trajectory"
] |
[((2874, 2905), 'simtk.openmm.app.PDBFile', 'app.PDBFile', (['state_pdb_filename'], {}), '(state_pdb_filename)\n', (2885, 2905), False, 'from simtk.openmm import app\n'), ((3863, 3905), 'simtk.openmm.CustomTorsionForce', 'openmm.CustomTorsionForce', (['energy_function'], {}), '(energy_function)\n', (3888, 3905), False, 'from simtk import openmm, unit\n'), ((4941, 5010), 'collections.namedtuple', 'namedtuple', (['"""MockTestsystem"""', "['description', 'thermodynamic_states']"], {}), "('MockTestsystem', ['description', 'thermodynamic_states'])\n", (4951, 5010), False, 'from collections import namedtuple\n'), ((5118, 5177), 'sams.analysis.analyze', 'analysis.analyze', (['netcdf_filename', 'testsystem', '"""output.pdf"""'], {}), "(netcdf_filename, testsystem, 'output.pdf')\n", (5134, 5177), False, 'from sams import analysis\n'), ((5278, 5379), 'sams.analysis.write_trajectory', 'analysis.write_trajectory', (['netcdf_filename', 'topology', 'reference_pdb_filename', 'trajectory_filename'], {}), '(netcdf_filename, topology, reference_pdb_filename,\n trajectory_filename)\n', (5303, 5379), False, 'from sams import analysis\n'), ((1621, 1708), 'numpy.linspace', 'np.linspace', (['(min_dihedral / dihedral_unit)', '(max_dihedral / dihedral_unit)', 'numbrellas'], {}), '(min_dihedral / dihedral_unit, max_dihedral / dihedral_unit,\n numbrellas)\n', (1632, 1708), True, 'import numpy as np\n'), ((3367, 3415), 'simtk.openmm.MonteCarloBarostat', 'openmm.MonteCarloBarostat', (['pressure', 'temperature'], {}), '(pressure, temperature)\n', (3392, 3415), False, 'from simtk import openmm, unit\n'), ((4322, 4427), 'sams.ThermodynamicState', 'ThermodynamicState', ([], {'system': 'system', 'temperature': 'temperature', 'pressure': 'pressure', 'parameters': 'parameters'}), '(system=system, temperature=temperature, pressure=\n pressure, parameters=parameters)\n', (4340, 4427), False, 'from sams import ThermodynamicState\n'), ((4740, 4845), 'sams.ThermodynamicState', 'ThermodynamicState', ([], {'system': 'system', 'temperature': 'temperature', 'pressure': 'pressure', 'parameters': 'parameters'}), '(system=system, temperature=temperature, pressure=\n pressure, parameters=parameters)\n', (4758, 4845), False, 'from sams import ThermodynamicState\n')]
|
import numpy as np
from numpy.random import beta
import sys
#sys.path.append('../h5hep')
#from write import *
import hepfile
################################################################################
def calc_energy(mass,px,py,pz):
energy = np.sqrt(mass*mass + px*px + py*py + pz*pz)
return energy
################################################################################
data = hepfile.initialize()
hepfile.create_group(data,'jet',counter='njet')
hepfile.create_dataset(data,['e','px','py','pz','btag'],group='jet',dtype=float)
hepfile.create_group(data,'muon',counter='nmuon')
hepfile.create_dataset(data,['e','px','py','pz','q'],group='muon',dtype=float)
hepfile.create_group(data,'electron',counter='nelectron')
hepfile.create_dataset(data,['e','px','py','pz','q'],group='electron',dtype=float)
hepfile.create_group(data,'photon',counter='nphoton')
hepfile.create_dataset(data,['e','px','py','pz'],group='photon',dtype=float)
hepfile.create_group(data,'MET',counter='nMET')
hepfile.create_dataset(data,['pt','phi'],group='MET',dtype=float)
event = hepfile.create_single_bucket(data)
nentries = 10000
#print(data)
#print(event)
#'''
for i in range(0,nentries):
if i%1000==0:
print(i)
njet = np.random.randint(10)
event['jet/njet'] = njet
for n in range(njet):
px = 300*beta(2,9)
py = 300*beta(2,9)
pz = 300*beta(2,9)
mass = 5*beta(2,9)
energy = calc_energy(mass,px,py,pz)
event['jet/px'].append(px)
event['jet/py'].append(py)
event['jet/pz'].append(pz)
event['jet/e'].append(energy)
event['jet/btag'].append(np.random.random())
nmuon = np.random.randint(10)
event['muon/nmuon'] = nmuon
for n in range(nmuon):
px = 300*beta(2,9)
py = 300*beta(2,9)
pz = 300*beta(2,9)
mass = 0.105
energy = calc_energy(mass,px,py,pz)
event['muon/px'].append(px)
event['muon/py'].append(py)
event['muon/pz'].append(pz)
event['muon/e'].append(energy)
event['muon/q'].append(2*np.random.randint(2) - 1)
nelectron = np.random.randint(10)
event['electron/nelectron'] = nelectron
for n in range(nelectron):
px = 300*beta(2,9)
py = 300*beta(2,9)
pz = 300*beta(2,9)
mass = 0.000511
energy = calc_energy(mass,px,py,pz)
event['electron/px'].append(px)
event['electron/py'].append(py)
event['electron/pz'].append(pz)
event['electron/e'].append(energy)
event['electron/q'].append(2*np.random.randint(2) - 1)
nphoton = np.random.randint(10)
event['photon/nphoton'] = nphoton
for n in range(nphoton):
px = 300*beta(2,9)
py = 300*beta(2,9)
pz = 300*beta(2,9)
mass = 0.0
energy = calc_energy(mass,px,py,pz)
event['photon/px'].append(px)
event['photon/py'].append(py)
event['photon/pz'].append(pz)
event['photon/e'].append(energy)
hepfile.pack(data,event)
print("Writing the file...")
#hdfile = write_to_file('output.hdf5',data)
hdfile = hepfile.write_to_file('HEP_random_file_LARGE.hdf5',data,comp_type='gzip',comp_opts=9)
#'''
|
[
"hepfile.initialize",
"hepfile.pack",
"numpy.random.beta",
"hepfile.create_single_bucket",
"hepfile.write_to_file",
"numpy.random.randint",
"numpy.random.random",
"hepfile.create_group",
"numpy.sqrt",
"hepfile.create_dataset"
] |
[((405, 425), 'hepfile.initialize', 'hepfile.initialize', ([], {}), '()\n', (423, 425), False, 'import hepfile\n'), ((427, 476), 'hepfile.create_group', 'hepfile.create_group', (['data', '"""jet"""'], {'counter': '"""njet"""'}), "(data, 'jet', counter='njet')\n", (447, 476), False, 'import hepfile\n'), ((475, 566), 'hepfile.create_dataset', 'hepfile.create_dataset', (['data', "['e', 'px', 'py', 'pz', 'btag']"], {'group': '"""jet"""', 'dtype': 'float'}), "(data, ['e', 'px', 'py', 'pz', 'btag'], group='jet',\n dtype=float)\n", (497, 566), False, 'import hepfile\n'), ((557, 608), 'hepfile.create_group', 'hepfile.create_group', (['data', '"""muon"""'], {'counter': '"""nmuon"""'}), "(data, 'muon', counter='nmuon')\n", (577, 608), False, 'import hepfile\n'), ((607, 696), 'hepfile.create_dataset', 'hepfile.create_dataset', (['data', "['e', 'px', 'py', 'pz', 'q']"], {'group': '"""muon"""', 'dtype': 'float'}), "(data, ['e', 'px', 'py', 'pz', 'q'], group='muon',\n dtype=float)\n", (629, 696), False, 'import hepfile\n'), ((687, 746), 'hepfile.create_group', 'hepfile.create_group', (['data', '"""electron"""'], {'counter': '"""nelectron"""'}), "(data, 'electron', counter='nelectron')\n", (707, 746), False, 'import hepfile\n'), ((745, 838), 'hepfile.create_dataset', 'hepfile.create_dataset', (['data', "['e', 'px', 'py', 'pz', 'q']"], {'group': '"""electron"""', 'dtype': 'float'}), "(data, ['e', 'px', 'py', 'pz', 'q'], group='electron',\n dtype=float)\n", (767, 838), False, 'import hepfile\n'), ((829, 884), 'hepfile.create_group', 'hepfile.create_group', (['data', '"""photon"""'], {'counter': '"""nphoton"""'}), "(data, 'photon', counter='nphoton')\n", (849, 884), False, 'import hepfile\n'), ((883, 970), 'hepfile.create_dataset', 'hepfile.create_dataset', (['data', "['e', 'px', 'py', 'pz']"], {'group': '"""photon"""', 'dtype': 'float'}), "(data, ['e', 'px', 'py', 'pz'], group='photon', dtype\n =float)\n", (905, 970), False, 'import hepfile\n'), ((961, 1010), 'hepfile.create_group', 'hepfile.create_group', (['data', '"""MET"""'], {'counter': '"""nMET"""'}), "(data, 'MET', counter='nMET')\n", (981, 1010), False, 'import hepfile\n'), ((1009, 1078), 'hepfile.create_dataset', 'hepfile.create_dataset', (['data', "['pt', 'phi']"], {'group': '"""MET"""', 'dtype': 'float'}), "(data, ['pt', 'phi'], group='MET', dtype=float)\n", (1031, 1078), False, 'import hepfile\n'), ((1084, 1118), 'hepfile.create_single_bucket', 'hepfile.create_single_bucket', (['data'], {}), '(data)\n', (1112, 1118), False, 'import hepfile\n'), ((3131, 3223), 'hepfile.write_to_file', 'hepfile.write_to_file', (['"""HEP_random_file_LARGE.hdf5"""', 'data'], {'comp_type': '"""gzip"""', 'comp_opts': '(9)'}), "('HEP_random_file_LARGE.hdf5', data, comp_type='gzip',\n comp_opts=9)\n", (3152, 3223), False, 'import hepfile\n'), ((253, 303), 'numpy.sqrt', 'np.sqrt', (['(mass * mass + px * px + py * py + pz * pz)'], {}), '(mass * mass + px * px + py * py + pz * pz)\n', (260, 303), True, 'import numpy as np\n'), ((1247, 1268), 'numpy.random.randint', 'np.random.randint', (['(10)'], {}), '(10)\n', (1264, 1268), True, 'import numpy as np\n'), ((1685, 1706), 'numpy.random.randint', 'np.random.randint', (['(10)'], {}), '(10)\n', (1702, 1706), True, 'import numpy as np\n'), ((2135, 2156), 'numpy.random.randint', 'np.random.randint', (['(10)'], {}), '(10)\n', (2152, 2156), True, 'import numpy as np\n'), ((2622, 2643), 'numpy.random.randint', 'np.random.randint', (['(10)'], {}), '(10)\n', (2639, 2643), True, 'import numpy as np\n'), ((3023, 3048), 'hepfile.pack', 'hepfile.pack', (['data', 'event'], {}), '(data, event)\n', (3035, 3048), False, 'import hepfile\n'), ((1341, 1351), 'numpy.random.beta', 'beta', (['(2)', '(9)'], {}), '(2, 9)\n', (1345, 1351), False, 'from numpy.random import beta\n'), ((1368, 1378), 'numpy.random.beta', 'beta', (['(2)', '(9)'], {}), '(2, 9)\n', (1372, 1378), False, 'from numpy.random import beta\n'), ((1395, 1405), 'numpy.random.beta', 'beta', (['(2)', '(9)'], {}), '(2, 9)\n', (1399, 1405), False, 'from numpy.random import beta\n'), ((1422, 1432), 'numpy.random.beta', 'beta', (['(2)', '(9)'], {}), '(2, 9)\n', (1426, 1432), False, 'from numpy.random import beta\n'), ((1652, 1670), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (1668, 1670), True, 'import numpy as np\n'), ((1783, 1793), 'numpy.random.beta', 'beta', (['(2)', '(9)'], {}), '(2, 9)\n', (1787, 1793), False, 'from numpy.random import beta\n'), ((1810, 1820), 'numpy.random.beta', 'beta', (['(2)', '(9)'], {}), '(2, 9)\n', (1814, 1820), False, 'from numpy.random import beta\n'), ((1837, 1847), 'numpy.random.beta', 'beta', (['(2)', '(9)'], {}), '(2, 9)\n', (1841, 1847), False, 'from numpy.random import beta\n'), ((2249, 2259), 'numpy.random.beta', 'beta', (['(2)', '(9)'], {}), '(2, 9)\n', (2253, 2259), False, 'from numpy.random import beta\n'), ((2276, 2286), 'numpy.random.beta', 'beta', (['(2)', '(9)'], {}), '(2, 9)\n', (2280, 2286), False, 'from numpy.random import beta\n'), ((2303, 2313), 'numpy.random.beta', 'beta', (['(2)', '(9)'], {}), '(2, 9)\n', (2307, 2313), False, 'from numpy.random import beta\n'), ((2728, 2738), 'numpy.random.beta', 'beta', (['(2)', '(9)'], {}), '(2, 9)\n', (2732, 2738), False, 'from numpy.random import beta\n'), ((2755, 2765), 'numpy.random.beta', 'beta', (['(2)', '(9)'], {}), '(2, 9)\n', (2759, 2765), False, 'from numpy.random import beta\n'), ((2782, 2792), 'numpy.random.beta', 'beta', (['(2)', '(9)'], {}), '(2, 9)\n', (2786, 2792), False, 'from numpy.random import beta\n'), ((2092, 2112), 'numpy.random.randint', 'np.random.randint', (['(2)'], {}), '(2)\n', (2109, 2112), True, 'import numpy as np\n'), ((2581, 2601), 'numpy.random.randint', 'np.random.randint', (['(2)'], {}), '(2)\n', (2598, 2601), True, 'import numpy as np\n')]
|
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
from rest_framework.authtoken.views import obtain_auth_token
urlpatterns = [
path('admin/', admin.site.urls),
path('token-auth/', obtain_auth_token, name='token_auth'),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
[
"django.conf.urls.static.static",
"django.urls.path"
] |
[((326, 389), 'django.conf.urls.static.static', 'static', (['settings.STATIC_URL'], {'document_root': 'settings.STATIC_ROOT'}), '(settings.STATIC_URL, document_root=settings.STATIC_ROOT)\n', (332, 389), False, 'from django.conf.urls.static import static\n'), ((229, 260), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (233, 260), False, 'from django.urls import path, include\n'), ((263, 320), 'django.urls.path', 'path', (['"""token-auth/"""', 'obtain_auth_token'], {'name': '"""token_auth"""'}), "('token-auth/', obtain_auth_token, name='token_auth')\n", (267, 320), False, 'from django.urls import path, include\n')]
|
"""Utility functions for scikit-learn-realted implementations"""
import os
from datetime import datetime
from sklearn.externals import joblib
from MLT.tools import prediction_entry as pe
def sklearn_train_model(model, training_data, training_labels, test_data, test_labels, model_savename):
"""Train the given model with data and predict the run"""
starttime = datetime.now()
model.fit(training_data, training_labels)
finishtime = datetime.now()
runtime = finishtime - starttime
#predict the run
test_predictions = model.predict(test_data)
test_predictions_probabilities = model.predict_proba(test_data)[:, 1]
# proba[:,1] returns just 1 of 2 columns. As they always add up, this is enough!
sklearn_persist_model(model, model_savename)
# append all this to a dataframe / JSON / whatever and
return pe.PredictionEntry(test_labels, test_predictions, test_predictions_probabilities, runtime)
def sklearn_persist_model(model, model_savename):
"""Save a scikit model to disk"""
joblib.dump(model, model_savename + '.pkl')
def sklearn_load_model(dirpath, modelname):
"""Load a scikit model from disk"""
model_path = os.path.join(dirpath, modelname)
return joblib.load(model_path)
def sklearn_load_modellist(model_filenames, model_path):
"""Load a list of scikit models from disk from given path"""
loaded_models = []
for model_fname in model_filenames:
filename_wo_ext = os.path.splitext(model_fname)[0]
loaded_models.append(
(
filename_wo_ext,
sklearn_load_model(model_path, model_fname)
)
)
return loaded_models
def predict_scikit(single_model, test_data, test_labels):
"""Only predict a model without fitting it"""
starttime = datetime.now()
test_predictions = single_model.predict(test_data)
test_predictions_probabilities = single_model.predict_proba(test_data)[:, 1]
finishtime = datetime.now()
runtime = finishtime - starttime
return pe.PredictionEntry(test_labels, test_predictions, test_predictions_probabilities, runtime)
|
[
"sklearn.externals.joblib.dump",
"os.path.join",
"MLT.tools.prediction_entry.PredictionEntry",
"os.path.splitext",
"sklearn.externals.joblib.load",
"datetime.datetime.now"
] |
[((371, 385), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (383, 385), False, 'from datetime import datetime\n'), ((449, 463), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (461, 463), False, 'from datetime import datetime\n'), ((850, 944), 'MLT.tools.prediction_entry.PredictionEntry', 'pe.PredictionEntry', (['test_labels', 'test_predictions', 'test_predictions_probabilities', 'runtime'], {}), '(test_labels, test_predictions,\n test_predictions_probabilities, runtime)\n', (868, 944), True, 'from MLT.tools import prediction_entry as pe\n'), ((1035, 1078), 'sklearn.externals.joblib.dump', 'joblib.dump', (['model', "(model_savename + '.pkl')"], {}), "(model, model_savename + '.pkl')\n", (1046, 1078), False, 'from sklearn.externals import joblib\n'), ((1181, 1213), 'os.path.join', 'os.path.join', (['dirpath', 'modelname'], {}), '(dirpath, modelname)\n', (1193, 1213), False, 'import os\n'), ((1225, 1248), 'sklearn.externals.joblib.load', 'joblib.load', (['model_path'], {}), '(model_path)\n', (1236, 1248), False, 'from sklearn.externals import joblib\n'), ((1806, 1820), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1818, 1820), False, 'from datetime import datetime\n'), ((1976, 1990), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1988, 1990), False, 'from datetime import datetime\n'), ((2040, 2134), 'MLT.tools.prediction_entry.PredictionEntry', 'pe.PredictionEntry', (['test_labels', 'test_predictions', 'test_predictions_probabilities', 'runtime'], {}), '(test_labels, test_predictions,\n test_predictions_probabilities, runtime)\n', (2058, 2134), True, 'from MLT.tools import prediction_entry as pe\n'), ((1461, 1490), 'os.path.splitext', 'os.path.splitext', (['model_fname'], {}), '(model_fname)\n', (1477, 1490), False, 'import os\n')]
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetPrivateStoreCollectionResult',
'AwaitableGetPrivateStoreCollectionResult',
'get_private_store_collection',
]
@pulumi.output_type
class GetPrivateStoreCollectionResult:
"""
The Collection data structure.
"""
def __init__(__self__, all_subscriptions=None, claim=None, collection_id=None, collection_name=None, enabled=None, id=None, name=None, number_of_offers=None, subscriptions_list=None, system_data=None, type=None):
if all_subscriptions and not isinstance(all_subscriptions, bool):
raise TypeError("Expected argument 'all_subscriptions' to be a bool")
pulumi.set(__self__, "all_subscriptions", all_subscriptions)
if claim and not isinstance(claim, str):
raise TypeError("Expected argument 'claim' to be a str")
pulumi.set(__self__, "claim", claim)
if collection_id and not isinstance(collection_id, str):
raise TypeError("Expected argument 'collection_id' to be a str")
pulumi.set(__self__, "collection_id", collection_id)
if collection_name and not isinstance(collection_name, str):
raise TypeError("Expected argument 'collection_name' to be a str")
pulumi.set(__self__, "collection_name", collection_name)
if enabled and not isinstance(enabled, bool):
raise TypeError("Expected argument 'enabled' to be a bool")
pulumi.set(__self__, "enabled", enabled)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if number_of_offers and not isinstance(number_of_offers, float):
raise TypeError("Expected argument 'number_of_offers' to be a float")
pulumi.set(__self__, "number_of_offers", number_of_offers)
if subscriptions_list and not isinstance(subscriptions_list, list):
raise TypeError("Expected argument 'subscriptions_list' to be a list")
pulumi.set(__self__, "subscriptions_list", subscriptions_list)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="allSubscriptions")
def all_subscriptions(self) -> Optional[bool]:
"""
Indicating whether all subscriptions are selected (=true) or not (=false).
"""
return pulumi.get(self, "all_subscriptions")
@property
@pulumi.getter
def claim(self) -> Optional[str]:
"""
Gets or sets the association with Commercial's Billing Account.
"""
return pulumi.get(self, "claim")
@property
@pulumi.getter(name="collectionId")
def collection_id(self) -> str:
"""
Gets collection Id.
"""
return pulumi.get(self, "collection_id")
@property
@pulumi.getter(name="collectionName")
def collection_name(self) -> Optional[str]:
"""
Gets or sets collection name.
"""
return pulumi.get(self, "collection_name")
@property
@pulumi.getter
def enabled(self) -> Optional[bool]:
"""
Indicating whether the collection is enabled or disabled.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def id(self) -> str:
"""
The resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="numberOfOffers")
def number_of_offers(self) -> float:
"""
Gets the number of offers associated with the collection.
"""
return pulumi.get(self, "number_of_offers")
@property
@pulumi.getter(name="subscriptionsList")
def subscriptions_list(self) -> Optional[Sequence[str]]:
"""
Gets or sets subscription ids list. Empty list indicates all subscriptions are selected, null indicates no update is done, explicit list indicates the explicit selected subscriptions. On insert, null is considered as bad request
"""
return pulumi.get(self, "subscriptions_list")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
Metadata pertaining to creation and last modification of the resource
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource.
"""
return pulumi.get(self, "type")
class AwaitableGetPrivateStoreCollectionResult(GetPrivateStoreCollectionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetPrivateStoreCollectionResult(
all_subscriptions=self.all_subscriptions,
claim=self.claim,
collection_id=self.collection_id,
collection_name=self.collection_name,
enabled=self.enabled,
id=self.id,
name=self.name,
number_of_offers=self.number_of_offers,
subscriptions_list=self.subscriptions_list,
system_data=self.system_data,
type=self.type)
def get_private_store_collection(collection_id: Optional[str] = None,
private_store_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetPrivateStoreCollectionResult:
"""
The Collection data structure.
:param str collection_id: The collection ID
:param str private_store_id: The store ID - must use the tenant ID
"""
__args__ = dict()
__args__['collectionId'] = collection_id
__args__['privateStoreId'] = private_store_id
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:marketplace/v20210601:getPrivateStoreCollection', __args__, opts=opts, typ=GetPrivateStoreCollectionResult).value
return AwaitableGetPrivateStoreCollectionResult(
all_subscriptions=__ret__.all_subscriptions,
claim=__ret__.claim,
collection_id=__ret__.collection_id,
collection_name=__ret__.collection_name,
enabled=__ret__.enabled,
id=__ret__.id,
name=__ret__.name,
number_of_offers=__ret__.number_of_offers,
subscriptions_list=__ret__.subscriptions_list,
system_data=__ret__.system_data,
type=__ret__.type)
|
[
"pulumi.get",
"pulumi.getter",
"pulumi.set",
"pulumi.InvokeOptions",
"pulumi.runtime.invoke"
] |
[((2915, 2953), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""allSubscriptions"""'}), "(name='allSubscriptions')\n", (2928, 2953), False, 'import pulumi\n'), ((3394, 3428), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""collectionId"""'}), "(name='collectionId')\n", (3407, 3428), False, 'import pulumi\n'), ((3586, 3622), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""collectionName"""'}), "(name='collectionName')\n", (3599, 3622), False, 'import pulumi\n'), ((4317, 4353), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""numberOfOffers"""'}), "(name='numberOfOffers')\n", (4330, 4353), False, 'import pulumi\n'), ((4557, 4596), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""subscriptionsList"""'}), "(name='subscriptionsList')\n", (4570, 4596), False, 'import pulumi\n'), ((4993, 5025), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemData"""'}), "(name='systemData')\n", (5006, 5025), False, 'import pulumi\n'), ((969, 1029), 'pulumi.set', 'pulumi.set', (['__self__', '"""all_subscriptions"""', 'all_subscriptions'], {}), "(__self__, 'all_subscriptions', all_subscriptions)\n", (979, 1029), False, 'import pulumi\n'), ((1156, 1192), 'pulumi.set', 'pulumi.set', (['__self__', '"""claim"""', 'claim'], {}), "(__self__, 'claim', claim)\n", (1166, 1192), False, 'import pulumi\n'), ((1343, 1395), 'pulumi.set', 'pulumi.set', (['__self__', '"""collection_id"""', 'collection_id'], {}), "(__self__, 'collection_id', collection_id)\n", (1353, 1395), False, 'import pulumi\n'), ((1552, 1608), 'pulumi.set', 'pulumi.set', (['__self__', '"""collection_name"""', 'collection_name'], {}), "(__self__, 'collection_name', collection_name)\n", (1562, 1608), False, 'import pulumi\n'), ((1743, 1783), 'pulumi.set', 'pulumi.set', (['__self__', '"""enabled"""', 'enabled'], {}), "(__self__, 'enabled', enabled)\n", (1753, 1783), False, 'import pulumi\n'), ((1901, 1931), 'pulumi.set', 'pulumi.set', (['__self__', '"""id"""', 'id'], {}), "(__self__, 'id', id)\n", (1911, 1931), False, 'import pulumi\n'), ((2055, 2089), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (2065, 2089), False, 'import pulumi\n'), ((2253, 2311), 'pulumi.set', 'pulumi.set', (['__self__', '"""number_of_offers"""', 'number_of_offers'], {}), "(__self__, 'number_of_offers', number_of_offers)\n", (2263, 2311), False, 'import pulumi\n'), ((2479, 2541), 'pulumi.set', 'pulumi.set', (['__self__', '"""subscriptions_list"""', 'subscriptions_list'], {}), "(__self__, 'subscriptions_list', subscriptions_list)\n", (2489, 2541), False, 'import pulumi\n'), ((2688, 2736), 'pulumi.set', 'pulumi.set', (['__self__', '"""system_data"""', 'system_data'], {}), "(__self__, 'system_data', system_data)\n", (2698, 2736), False, 'import pulumi\n'), ((2860, 2894), 'pulumi.set', 'pulumi.set', (['__self__', '"""type"""', 'type'], {}), "(__self__, 'type', type)\n", (2870, 2894), False, 'import pulumi\n'), ((3127, 3164), 'pulumi.get', 'pulumi.get', (['self', '"""all_subscriptions"""'], {}), "(self, 'all_subscriptions')\n", (3137, 3164), False, 'import pulumi\n'), ((3348, 3373), 'pulumi.get', 'pulumi.get', (['self', '"""claim"""'], {}), "(self, 'claim')\n", (3358, 3373), False, 'import pulumi\n'), ((3532, 3565), 'pulumi.get', 'pulumi.get', (['self', '"""collection_id"""'], {}), "(self, 'collection_id')\n", (3542, 3565), False, 'import pulumi\n'), ((3748, 3783), 'pulumi.get', 'pulumi.get', (['self', '"""collection_name"""'], {}), "(self, 'collection_name')\n", (3758, 3783), False, 'import pulumi\n'), ((3964, 3991), 'pulumi.get', 'pulumi.get', (['self', '"""enabled"""'], {}), "(self, 'enabled')\n", (3974, 3991), False, 'import pulumi\n'), ((4115, 4137), 'pulumi.get', 'pulumi.get', (['self', '"""id"""'], {}), "(self, 'id')\n", (4125, 4137), False, 'import pulumi\n'), ((4272, 4296), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (4282, 4296), False, 'import pulumi\n'), ((4500, 4536), 'pulumi.get', 'pulumi.get', (['self', '"""number_of_offers"""'], {}), "(self, 'number_of_offers')\n", (4510, 4536), False, 'import pulumi\n'), ((4934, 4972), 'pulumi.get', 'pulumi.get', (['self', '"""subscriptions_list"""'], {}), "(self, 'subscriptions_list')\n", (4944, 4972), False, 'import pulumi\n'), ((5202, 5233), 'pulumi.get', 'pulumi.get', (['self', '"""system_data"""'], {}), "(self, 'system_data')\n", (5212, 5233), False, 'import pulumi\n'), ((5368, 5392), 'pulumi.get', 'pulumi.get', (['self', '"""type"""'], {}), "(self, 'type')\n", (5378, 5392), False, 'import pulumi\n'), ((6669, 6691), 'pulumi.InvokeOptions', 'pulumi.InvokeOptions', ([], {}), '()\n', (6689, 6691), False, 'import pulumi\n'), ((6783, 6935), 'pulumi.runtime.invoke', 'pulumi.runtime.invoke', (['"""azure-native:marketplace/v20210601:getPrivateStoreCollection"""', '__args__'], {'opts': 'opts', 'typ': 'GetPrivateStoreCollectionResult'}), "(\n 'azure-native:marketplace/v20210601:getPrivateStoreCollection',\n __args__, opts=opts, typ=GetPrivateStoreCollectionResult)\n", (6804, 6935), False, 'import pulumi\n')]
|
import asyncio
import time
from rich.pretty import pprint
import aiomysql
import asyncmy
import MySQLdb
import pymysql
from benchmark import COUNT, connection_kwargs
from benchmark.decorators import cleanup, fill_data
count = int(COUNT / 5)
@cleanup
@fill_data
async def update_asyncmy():
conn = await asyncmy.connect(**connection_kwargs)
async with conn.cursor() as cur:
t = time.time()
for i in range(count):
await cur.execute(
"update test.asyncmy set `string`=%s where `id` = %s",
(
"update",
i + 1,
),
)
return time.time() - t
@cleanup
@fill_data
async def update_aiomysql():
conn = await aiomysql.connect(**connection_kwargs)
async with conn.cursor() as cur:
t = time.time()
for i in range(count):
await cur.execute(
"update test.asyncmy set `string`=%s where `id` = %s",
(
"update",
i + 1,
),
)
return time.time() - t
@cleanup
@fill_data
def update_mysqlclient():
conn = MySQLdb.connect(**connection_kwargs)
cur = conn.cursor()
t = time.time()
for i in range(count):
cur.execute(
"update test.asyncmy set `string`=%s where `id` = %s",
(
"update",
i + 1,
),
)
return time.time() - t
@cleanup
@fill_data
def update_pymysql():
conn = pymysql.connect(**connection_kwargs)
cur = conn.cursor()
t = time.time()
for i in range(count):
cur.execute(
"update test.asyncmy set `string`=%s where `id` = %s",
(
"update",
i + 1,
),
)
return time.time() - t
def benchmark_update():
loop = asyncio.get_event_loop()
update_mysqlclient_ret = update_mysqlclient()
update_asyncmy_ret = loop.run_until_complete(update_asyncmy())
update_pymysql_ret = update_pymysql()
update_aiomysql_ret = loop.run_until_complete(update_aiomysql())
return sorted(
{
"mysqlclient": update_mysqlclient_ret,
"asyncmy": update_asyncmy_ret,
"pymysql": update_pymysql_ret,
"aiomysql": update_aiomysql_ret,
}.items(),
key=lambda x: x[1],
)
if __name__ == "__main__":
pprint(benchmark_update())
|
[
"aiomysql.connect",
"asyncio.get_event_loop",
"MySQLdb.connect",
"asyncmy.connect",
"time.time",
"pymysql.connect"
] |
[((1179, 1215), 'MySQLdb.connect', 'MySQLdb.connect', ([], {}), '(**connection_kwargs)\n', (1194, 1215), False, 'import MySQLdb\n'), ((1248, 1259), 'time.time', 'time.time', ([], {}), '()\n', (1257, 1259), False, 'import time\n'), ((1545, 1581), 'pymysql.connect', 'pymysql.connect', ([], {}), '(**connection_kwargs)\n', (1560, 1581), False, 'import pymysql\n'), ((1614, 1625), 'time.time', 'time.time', ([], {}), '()\n', (1623, 1625), False, 'import time\n'), ((1893, 1917), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (1915, 1917), False, 'import asyncio\n'), ((311, 347), 'asyncmy.connect', 'asyncmy.connect', ([], {}), '(**connection_kwargs)\n', (326, 347), False, 'import asyncmy\n'), ((397, 408), 'time.time', 'time.time', ([], {}), '()\n', (406, 408), False, 'import time\n'), ((749, 786), 'aiomysql.connect', 'aiomysql.connect', ([], {}), '(**connection_kwargs)\n', (765, 786), False, 'import aiomysql\n'), ((836, 847), 'time.time', 'time.time', ([], {}), '()\n', (845, 847), False, 'import time\n'), ((1474, 1485), 'time.time', 'time.time', ([], {}), '()\n', (1483, 1485), False, 'import time\n'), ((1840, 1851), 'time.time', 'time.time', ([], {}), '()\n', (1849, 1851), False, 'import time\n'), ((665, 676), 'time.time', 'time.time', ([], {}), '()\n', (674, 676), False, 'import time\n'), ((1104, 1115), 'time.time', 'time.time', ([], {}), '()\n', (1113, 1115), False, 'import time\n')]
|
from discord.ext import commands
from discord.ext.commands import Cog
# from discord import Embed
# from collections import defaultdict, Counter
# from itertools import islice
# from nltk import pos_tag, CFG, Production
# from nltk import Nonterminal, nonterminals
# from nltk.corpus import brown
import json
from pathlib import Path
# import numpy as np
# import scipy
# from matplotlib import pyplot as plt
# category lists
# structural color texture
class CategoryWordGraph:
def __init__(self, objectlist):
self.objectlist = objectlist
self.objects = {} # {"object": (category, [("object1", weight1)])}
self.auto_categories = {} # {"category": [words]}
def _obj_dist(self, o1, o2):
pass
def generate_graph(self):
pass
def associated_graph(self, object, depth):
pass
# Is obj of given type?
def is_type_of(self, obj, template_object):
super_type = obj
while super_type in self.objects:
super_type = self.is_a(super_type)
if template_object == super_type:
return True
return None
# What is obj?
def is_a(self, object):
if object in self.objects:
return self.objects[object][0]
else:
return None
# What features characterize this object?
def features(self, object):
try:
return self.objects[object][1]
except KeyError:
pass
# What are the features of the parent objects? It inherits these by nature.
def super_features(self, object, depth=None):
parent = object
if depth is not None:
i = 0
while parent is not None and i < depth:
features = []
parent = self.is_a(parent)
features.append(self.features(parent))
i += 1
return features
else:
return self.features(self.is_a(object))
# What objects are in this category?
def cat_objs(self, category):
objects = []
for object in self.objects.keys():
if self.is_type_of(object, category):
objects.append(object)
return objects
# Find the intersection of object features
def shared_obj_features(self, obj1, obj2):
return list(set(self.objects[obj1][1]) & set(self.objects[obj2][1]))
def interactive_add_words(self):
object = None
while object != "!!":
object, category = input("Enter a word and category: ").split(' ')
if object == "!!":
continue
associations = input("Enter associated words: ")
if object in self.objects:
self.objects[object] += (category, associations.split(' '))
else:
self.objects[object] = (category, associations.split(' '))
with open(self.objectlist, "w") as f:
json.dump(self.objects, f)
def definition_load(self):
pass
def sentence_load(self):
pass
def load_wordlist(self, jsonlist=None):
if jsonlist is not None:
self.objectlist = jsonlist
with open(self.objectlist, "r") as f:
self.objects = json.load(f)
def __repr__(self):
return f'{self.objects}'
class WordGraph:
def __init__(self):
self.words = {}
self.sents = []
def add_word(self, tagged_word):
# is the word in the graph?
if tagged_word[0] in self.words:
#
if self.words[tagged_word] in self.words[tagged_word[0]]:
pass
else:
self.words[tagged_word[0]] = {'pos': tagged_word[1], 'edges': []}
def add_sentence(self, tagged_sentence):
pass
def give_contexts(self, word, contexts):
pass
class WordGraphPod(Cog):
def __init__(self, bot):
self.bot = bot
self.graph = WordGraph()
@commands.group(pass_context=True)
async def wg(self, ctx):
pass
@wg.command()
async def test(self, ctx, *sent: str):
# sentence = ' '.join(sent)
await ctx.send('')
class Associate(Cog):
def __init__(self, bot):
self.bot = bot
def setup(bot):
bot.add_cog(Associate(bot))
bot.add_cog(WordGraphPod(bot))
if __name__ == '__main__':
# Interactive writing loop
parent_dir = Path(__file__).resolve().parent
cwg = CategoryWordGraph(Path(parent_dir, "resources/data/words.json"))
cwg.load_wordlist()
# cwg.interactive_add_words()
for object in cwg.objects:
print(f"A {object} is a {cwg.is_a(object)}")
print(f"It has these features {cwg.features(object)}")
print("----------------")
print(cwg.is_type_of("dog", "lifeform"))
print(cwg.cat_objs("lifeform"))
print(cwg.super_features("dog", depth=3), cwg.features("dog"))
print(cwg.shared_obj_features("dog", "cat"))
|
[
"json.dump",
"pathlib.Path",
"json.load",
"discord.ext.commands.group"
] |
[((3963, 3996), 'discord.ext.commands.group', 'commands.group', ([], {'pass_context': '(True)'}), '(pass_context=True)\n', (3977, 3996), False, 'from discord.ext import commands\n'), ((4462, 4507), 'pathlib.Path', 'Path', (['parent_dir', '"""resources/data/words.json"""'], {}), "(parent_dir, 'resources/data/words.json')\n", (4466, 4507), False, 'from pathlib import Path\n'), ((2947, 2973), 'json.dump', 'json.dump', (['self.objects', 'f'], {}), '(self.objects, f)\n', (2956, 2973), False, 'import json\n'), ((3253, 3265), 'json.load', 'json.load', (['f'], {}), '(f)\n', (3262, 3265), False, 'import json\n'), ((4402, 4416), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (4406, 4416), False, 'from pathlib import Path\n')]
|
#!/usr/bin/env python3
from intcode import Computer
from itertools import permutations
with open("inputs/7") as f:
inputs = list(map(int, f.readline().strip().split(",")))
for bounds in ((0, 5), (5, 10)):
output = float("-inf")
for config in permutations(range(*bounds)):
amps = []
for i in range(len(config)):
amps.append(Computer(inputs))
amps[i].put(config[i])
x = 0
halt = False
while not halt:
for i in range(len(config)):
amps[i].put(x)
try:
x = amps[i].eval()
except StopIteration:
halt = True
output = max(output, x)
print(output)
|
[
"intcode.Computer"
] |
[((367, 383), 'intcode.Computer', 'Computer', (['inputs'], {}), '(inputs)\n', (375, 383), False, 'from intcode import Computer\n')]
|
from setuptools import setup
import ssllabs
setup(name='python-ssllabs',
version=ssllabs.__version__,
packages=['ssllabs'],
scripts=['ssllabs-cli.py'],
install_requires=['requests'],
url='https://github.com/takeshixx/python-ssllabs',
license='Apache 2.0',
author='takeshix')
|
[
"setuptools.setup"
] |
[((45, 288), 'setuptools.setup', 'setup', ([], {'name': '"""python-ssllabs"""', 'version': 'ssllabs.__version__', 'packages': "['ssllabs']", 'scripts': "['ssllabs-cli.py']", 'install_requires': "['requests']", 'url': '"""https://github.com/takeshixx/python-ssllabs"""', 'license': '"""Apache 2.0"""', 'author': '"""takeshix"""'}), "(name='python-ssllabs', version=ssllabs.__version__, packages=[\n 'ssllabs'], scripts=['ssllabs-cli.py'], install_requires=['requests'],\n url='https://github.com/takeshixx/python-ssllabs', license='Apache 2.0',\n author='takeshix')\n", (50, 288), False, 'from setuptools import setup\n')]
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from typing import Dict
import requests
from pandas import DataFrame
from lib.concurrent import thread_map
from lib.data_source import DataSource
from lib.time import date_range, date_today
_api_url_tpl = "https://api-covid19.rnbo.gov.ua/data?to={date}"
def _get_daily_records(date: str):
records = []
url = _api_url_tpl.format(date=date)
daily_data = requests.get(url, timeout=60).json().get("ukraine", [])
for record in daily_data:
records.append(
{
"date": date,
"country_code": "UA",
"match_string": record.get("label", {}).get("en"),
"total_confirmed": record.get("confirmed"),
"total_deceased": record.get("deaths"),
"total_recovered": record.get("recovered"),
}
)
return records
class UkraineDataSource(DataSource):
def parse(self, sources: Dict[str, str], aux: Dict[str, DataFrame], **parse_opts) -> DataFrame:
# Data can only be retrieved one day at a time, and it starts on 2020-01-22
first = "2020-01-22"
map_iter = list(date_range(first, date_today()))
records = sum(thread_map(_get_daily_records, map_iter), [])
return DataFrame.from_records(records)
|
[
"requests.get",
"lib.concurrent.thread_map",
"lib.time.date_today",
"pandas.DataFrame.from_records"
] |
[((1845, 1876), 'pandas.DataFrame.from_records', 'DataFrame.from_records', (['records'], {}), '(records)\n', (1867, 1876), False, 'from pandas import DataFrame\n'), ((1784, 1824), 'lib.concurrent.thread_map', 'thread_map', (['_get_daily_records', 'map_iter'], {}), '(_get_daily_records, map_iter)\n', (1794, 1824), False, 'from lib.concurrent import thread_map\n'), ((1747, 1759), 'lib.time.date_today', 'date_today', ([], {}), '()\n', (1757, 1759), False, 'from lib.time import date_range, date_today\n'), ((975, 1004), 'requests.get', 'requests.get', (['url'], {'timeout': '(60)'}), '(url, timeout=60)\n', (987, 1004), False, 'import requests\n')]
|