code
stringlengths 1
25.8M
| language
stringclasses 18
values | source
stringclasses 4
values | repo
stringclasses 78
values | path
stringlengths 0
268
|
|---|---|---|---|---|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
$Id: Project.py 1092 2011-06-13 14:40:56Z sumpfralle $
Copyright 2010 Lars Kruse <devel@sumpfralle.de>
This file is part of PyCAM.
PyCAM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
PyCAM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with PyCAM. If not, see <http://www.gnu.org/licenses/>.
"""
from pycam.Exporters.GCodeExporter import PATH_MODES, GCodeGenerator
import pycam.Exporters.EMCToolExporter
import pycam.Gui.Settings
import pycam.Cutters
import pycam.Toolpath.Generator
import pycam.Toolpath
import pycam.Importers.CXFImporter
import pycam.Importers.TestModel
import pycam.Importers
from pycam.Geometry.Point import Point, Vector
from pycam.Geometry.Plane import Plane
import pycam.Geometry.Path
import pycam.Utils.log
import pycam.Utils
from pycam.Geometry.utils import sqrt
from pycam.Gui.OpenGLTools import ModelViewWindowGL
from pycam.Geometry.Letters import TEXT_ALIGN_LEFT, TEXT_ALIGN_CENTER, \
TEXT_ALIGN_RIGHT
import pycam.Geometry.Model
from pycam.Toolpath import Bounds
import pycam.Utils.FontCache
from pycam import VERSION
import pycam.Physics.ode_physics
# this requires ODE - we import it later, if necessary
#import pycam.Simulation.ODEBlocks
import gtk
import gobject
import webbrowser
import ConfigParser
import string
import StringIO
import pickle
import time
import logging
import datetime
import traceback
import random
import math
import re
import os
import sys
DATA_DIR_ENVIRON_KEY = "PYCAM_DATA_DIR"
FONT_DIR_ENVIRON_KEY = "PYCAM_FONT_DIR"
DATA_BASE_DIRS = [os.path.realpath(os.path.join(os.path.dirname(__file__),
os.pardir, os.pardir, os.pardir, "share")),
os.path.join(sys.prefix, "local", "share", "pycam"),
os.path.join(sys.prefix, "share", "pycam")]
UI_SUBDIR = "ui"
FONTS_SUBDIR = "fonts"
# necessary for "pyinstaller"
if "_MEIPASS2" in os.environ:
DATA_BASE_DIRS.insert(0, os.path.join(os.path.normpath(os.environ["_MEIPASS2"]), "share"))
# respect an override via an environment setting
if DATA_DIR_ENVIRON_KEY in os.environ:
DATA_BASE_DIRS.insert(0, os.path.normpath(os.environ[DATA_DIR_ENVIRON_KEY]))
if FONT_DIR_ENVIRON_KEY in os.environ:
FONT_DIR_OVERRIDE = os.path.normpath(os.environ[FONT_DIR_ENVIRON_KEY])
else:
FONT_DIR_OVERRIDE = None
FONT_DIR_FALLBACKS = ["/usr/share/librecad/fonts", "/usr/share/qcad/fonts"]
GTKBUILD_FILE = os.path.join(UI_SUBDIR, "pycam-project.ui")
GTKMENU_FILE = os.path.join(UI_SUBDIR, "menubar.xml")
GTKRC_FILE_WINDOWS = os.path.join(UI_SUBDIR, "gtkrc_windows")
WINDOW_ICON_FILENAMES = ["logo_%dpx.png" % pixels for pixels in (16, 32, 48, 64, 128)]
HELP_WIKI_URL = "http://sourceforge.net/apps/mediawiki/pycam/index.php?title=%s"
FILTER_GCODE = (("GCode files", ("*.ngc", "*.nc", "*.gc", "*.gcode")),)
FILTER_MODEL = (("All supported model filetypes",
("*.stl", "*.dxf", "*.svg", "*.eps", "*.ps")),
("STL models", "*.stl"), ("DXF contours", "*.dxf"),
("SVG contours", "*.svg"), ("PS contours", ("*.eps", "*.ps")))
FILTER_CONFIG = (("Config files", "*.conf"),)
FILTER_EMC_TOOL = (("EMC tool files", "*.tbl"),)
CLIPBOARD_TARGETS = {
"dxf": ("image/vnd.dxf", ),
"ps": ("application/postscript", ),
"stl": ("application/sla", ),
"svg": ("image/x-inkscape-svg", "image/svg+xml"),
"filename_drag": ("text/uri-list", "text-plain"),
}
EXTRUSION_TYPES = (("radius_up", "Radius (bulge)", "ExtrusionRadiusUpIcon"),
("radius_down", "Radius (valley)", "ExtrusionRadiusDownIcon"),
("skewed", "Chamfer", "ExtrusionChamferIcon"),
("sine", "Sine", "ExtrusionSineIcon"),
("sigmoid", "Sigmoid", "ExtrusionSigmoidIcon"),
)
PREFERENCES_DEFAULTS = {
"enable_ode": False,
"boundary_mode": -1,
"unit": "mm",
"default_task_settings_file": "",
"show_model": True,
"show_support_grid": True,
"show_axes": True,
"show_dimensions": True,
"show_bounding_box": True,
"show_toolpath": True,
"show_drill": False,
"show_directions": False,
"color_background": (0.0, 0.0, 0.0, 1.0),
"color_model": (0.5, 0.5, 1.0, 1.0),
"color_support_grid": (0.8, 0.8, 0.3, 1.0),
"color_bounding_box": (0.3, 0.3, 0.3, 1.0),
"color_cutter": (1.0, 0.2, 0.2, 1.0),
"color_toolpath_cut": (1.0, 0.5, 0.5, 1.0),
"color_toolpath_return": (0.9, 1.0, 0.1, 0.4),
"color_material": (1.0, 0.5, 0.0, 1.0),
"view_light": True,
"view_shadow": True,
"view_polygon": True,
"view_perspective": True,
"simulation_details_level": 3,
"drill_progress_max_fps": 2,
"gcode_safety_height": 25.0,
"gcode_minimum_step_x": 0.0001,
"gcode_minimum_step_y": 0.0001,
"gcode_minimum_step_z": 0.0001,
"gcode_path_mode": 0,
"gcode_motion_tolerance": 0,
"gcode_naive_tolerance": 0,
"gcode_start_stop_spindle": True,
"gcode_filename_extension": "",
"gcode_spindle_delay": 3,
"external_program_inkscape": "",
"external_program_pstoedit": "",
"server_auth_key": "",
"server_port_local": pycam.Utils.threading.DEFAULT_PORT,
"server_port_remote": pycam.Utils.threading.DEFAULT_PORT,
"server_hostname": "",
"touch_off_on_startup": False,
"touch_off_on_tool_change": False,
"touch_off_position_type": "absolute",
"touch_off_position_x": 0.0,
"touch_off_position_y": 0.0,
"touch_off_position_z": 0.0,
"touch_off_rapid_move": 0.0,
"touch_off_slow_move": 1.0,
"touch_off_slow_feedrate": 20,
"touch_off_height": 0.0,
"touch_off_pause_execution": False,
}
""" the listed items will be loaded/saved via the preferences file in the
user's home directory on startup/shutdown"""
GRID_TYPES = {"none": 0, "grid": 1, "automatic_edge": 2, "automatic_corner": 3}
POCKETING_TYPES = ["none", "holes", "enclosed"]
MAX_UNDO_STATES = 10
# floating point color values are only available since gtk 2.16
GTK_COLOR_MAX = 65535.0
log = pycam.Utils.log.get_logger()
def get_data_file_location(filename, silent=False):
for base_dir in DATA_BASE_DIRS:
test_path = os.path.join(base_dir, filename)
if os.path.exists(test_path):
return test_path
else:
if not silent:
lines = []
lines.append("Failed to locate a resource file (%s) in %s!" \
% (filename, DATA_BASE_DIRS))
lines.append("You can extend the search path by setting the " \
+ "environment variable '%s'." % str(DATA_DIR_ENVIRON_KEY))
log.error(os.linesep.join(lines))
return None
def report_exception():
log.error("An unexpected exception occoured: please send the " \
+ "text below to the developers of PyCAM. Thanks a lot!" \
+ os.linesep + traceback.format_exc())
def get_filters_from_list(filter_list):
result = []
for one_filter in filter_list:
current_filter = gtk.FileFilter()
current_filter.set_name(one_filter[0])
file_extensions = one_filter[1]
if not isinstance(file_extensions, (list, tuple)):
file_extensions = [file_extensions]
for ext in file_extensions:
current_filter.add_pattern(ext)
result.append(current_filter)
return result
def get_icons_pixbuffers():
result = []
for icon_filename in WINDOW_ICON_FILENAMES:
icon_subdir_filename = os.path.join(UI_SUBDIR, icon_filename)
abs_filename = get_data_file_location(icon_subdir_filename, silent=True)
if abs_filename:
try:
result.append(gtk.gdk.pixbuf_new_from_file(abs_filename))
except gobject.GError, err_msg:
# ignore icons that are not found
log.debug("Failed to process window icon (%s): %s" \
% (abs_filename, err_msg))
else:
log.debug("Failed to locate window icon: %s" % icon_filename)
return result
def get_font_dir():
if FONT_DIR_OVERRIDE:
if os.path.isdir(FONT_DIR_OVERRIDE):
return FONT_DIR_OVERRIDE
else:
log.warn(("You specified a font dir that does not exist (%s). " \
+ "I will ignore it.") % FONT_DIR_OVERRIDE)
font_dir = get_data_file_location(FONTS_SUBDIR, silent=True)
if not font_dir is None:
return font_dir
else:
log.warn(("Failed to locate the fonts directory '%s' below '%s'. " + \
"Falling back to '%s'.") % \
(FONTS_SUBDIR, DATA_BASE_DIRS, ", ".join(FONT_DIR_FALLBACKS)))
for font_dir_fallback in FONT_DIR_FALLBACKS:
if os.path.isdir(font_dir_fallback):
return font_dir_fallback
else:
log.warn(("None of the fallback font directories (%s) exists. " + \
"No fonts will be available. Please install " + \
"'qcad-data' or 'librecad-data' if you need fonts." ) % \
", ".join(FONT_DIR_FALLBACKS))
return None
class ProjectGui:
BOUNDARY_MODES = {
"inside": -1,
"along": 0,
"around": 1}
# mapping of boundary types and GUI control elements
BOUNDARY_TYPES = {
Bounds.TYPE_RELATIVE_MARGIN: "BoundsTypeRelativeMargin",
Bounds.TYPE_FIXED_MARGIN: "BoundsTypeFixedMargin",
Bounds.TYPE_CUSTOM: "BoundsTypeCustom"}
META_DATA_PREFIX = "PYCAM-META-DATA:"
def __init__(self, no_dialog=False):
self.settings = pycam.Gui.Settings.Settings()
self.gui_is_active = False
self.view3d = None
# during initialization any dialog (e.g. "Unit change") is not allowed
# we set the final value later
self.no_dialog = True
self._batch_queue = []
self._progress_running = False
self._progress_cancel_requested = False
self._last_gtk_events_time = None
self._undo_states = []
self._fonts_cache = pycam.Utils.FontCache.FontCache(get_font_dir(),
callback=self.update_progress_bar)
self.gui = gtk.Builder()
gtk_build_file = get_data_file_location(GTKBUILD_FILE)
if gtk_build_file is None:
gtk.main_quit()
self.gui.add_from_file(gtk_build_file)
if pycam.Utils.get_platform() == pycam.Utils.PLATFORM_WINDOWS:
gtkrc_file = get_data_file_location(GTKRC_FILE_WINDOWS)
if gtkrc_file:
print "GTKRC: %s" % str(gtkrc_file)
gtk.rc_add_default_file(gtkrc_file)
gtk.rc_reparse_all_for_settings(gtk.settings_get_default(), True)
self.window = self.gui.get_object("ProjectWindow")
# show stock items on buttons
# increase the initial width of the window (due to hidden elements)
self.window.set_default_size(400, -1)
# initialize the RecentManager (TODO: check for Windows)
if False and pycam.Utils.get_platform() == pycam.Utils.PLATFORM_WINDOWS:
# The pyinstaller binary for Windows fails mysteriously when trying
# to display the stock item.
# Error message: Gtk:ERROR:gtkrecentmanager.c:1942:get_icon_fallback: assertion failed: (retval != NULL)
self.recent_manager = None
else:
try:
self.recent_manager = gtk.recent_manager_get_default()
except AttributeError:
# GTK 2.12.1 seems to have problems with "RecentManager" on
# Windows. Sadly this is the version, that is shipped with the
# "appunti" GTK packages for Windows (April 2010).
# see http://www.daa.com.au/pipermail/pygtk/2009-May/017052.html
self.recent_manager = None
# file loading
self.last_dirname = None
self.last_task_settings_uri = None
self.last_model_uri = None
self.last_toolpath_file = None
# define callbacks and accelerator keys for the menu actions
for objname, callback, data, accel_key in (
("LoadTaskSettings", self.load_task_settings_file, None, "<Control>t"),
("SaveTaskSettings", self.save_task_settings_file, lambda: self.last_task_settings_uri, None),
("SaveAsTaskSettings", self.save_task_settings_file, None, None),
("OpenModel", self.load_model_file, None, "<Control>o"),
("SaveModel", self.save_model, lambda: self.last_model_uri, "<Control>s"),
("SaveAsModel", self.save_model, None, "<Control><Shift>s"),
("ExportGCodeAll", self.save_toolpath, False, "<Control><Shift>e"),
("ExportGCodeVisible", self.save_toolpath, True, None),
("ExportEMCToolDefinition", self.export_emc_tools, None, None),
("Quit", self.destroy, None, "<Control>q"),
("GeneralSettings", self.toggle_preferences_window, None, "<Control>p"),
("Toggle3DView", self.toggle_3d_view, None, "<Control><Shift>v"),
("ToggleLogWindow", self.toggle_log_window, None, "<Control>l"),
("ToggleProcessPoolWindow", self.toggle_process_pool_window, None, None),
("ShowFontDialog", self.toggle_font_dialog_window, None, "<Control><Shift>t"),
("UndoButton", self._restore_undo_state, None, "<Control>z"),
("CopyModelToClipboard", self.copy_model_to_clipboard, None, "<Control>c"),
("PasteModelFromClipboard", self.paste_model_from_clipboard, None, "<Control>v"),
("HelpUserManual", self.show_help, "User_Manual", "F1"),
("HelpIntroduction", self.show_help, "Introduction", None),
("HelpSupportedFormats", self.show_help, "SupportedFormats", None),
("HelpModelTransformations", self.show_help, "ModelTransformations", None),
("HelpToolTypes", self.show_help, "ToolTypes", None),
("HelpProcessSettings", self.show_help, "ProcessSettings", None),
("HelpBoundsSettings", self.show_help, "BoundsSettings", None),
("HelpTaskSetup", self.show_help, "TaskSetup", None),
("HelpGCodeExport", self.show_help, "GCodeExport", None),
("HelpTouchOff", self.show_help, "TouchOff", None),
("HelpSimulation", self.show_help, "Simulation", None),
("Help3DView", self.show_help, "3D_View", None),
("HelpServerMode", self.show_help, "ServerMode", None),
("HelpCommandLine", self.show_help, "CommandlineExamples", None),
("HelpHotkeys", self.show_help, "KeyboardShortcuts", None),
("ProjectWebsite", self.show_help, "http://pycam.sourceforge.net", None),
("DevelopmentBlog", self.show_help, "http://fab.senselab.org/pycam", None),
("Forum", self.show_help, "http://sourceforge.net/projects/pycam/forums", None),
("BugTracker", self.show_help, "http://sourceforge.net/tracker/?group_id=237831&atid=1104176", None),
("FeatureRequest", self.show_help, "http://sourceforge.net/tracker/?group_id=237831&atid=1104179", None)):
item = self.gui.get_object(objname)
if objname in ("Toggle3DView", "ToggleLogWindow",
"ToggleProcessPoolWindow"):
action = "toggled"
else:
action = "activate"
if data is None:
item.connect(action, callback)
else:
item.connect(action, callback, data)
if accel_key:
key, mod = gtk.accelerator_parse(accel_key)
accel_path = "<pycam>/%s" % objname
item.set_accel_path(accel_path)
gtk.accel_map_change_entry(accel_path, key, mod, True)
# LinkButton does not work on Windows: https://bugzilla.gnome.org/show_bug.cgi?id=617874
if pycam.Utils.get_platform() == pycam.Utils.PLATFORM_WINDOWS:
def open_url(widget, data=None):
webbrowser.open(widget.get_uri())
gtk.link_button_set_uri_hook(open_url)
# no undo is allowed at the beginning
self.gui.get_object("UndoButton").set_sensitive(False)
# configure drag-n-drop for config files and models
self.configure_drag_and_drop(self.window)
self.clipboard = gtk.clipboard_get()
self.clipboard.connect("owner-change", self.update_clipboard_state)
# other events
self.window.connect("destroy", self.destroy)
# the settings window
self.gui.get_object("CloseSettingsWindow").connect("clicked", self.toggle_preferences_window, False)
self.gui.get_object("ResetPreferencesButton").connect("clicked", self.reset_preferences)
self.preferences_window = self.gui.get_object("GeneralSettingsWindow")
self.preferences_window.connect("delete-event", self.toggle_preferences_window, False)
self._preferences_window_position = None
self._preferences_window_visible = False
self._log_window_position = None
# "about" window
self.about_window = self.gui.get_object("AboutWindow")
self.about_window.set_version(VERSION)
self.gui.get_object("About").connect("activate", self.toggle_about_window, True)
# "unit change" window
self.unit_change_window = self.gui.get_object("UnitChangeDialog")
self.gui.get_object("UnitChangeApply").connect("clicked", self.change_unit_apply)
self.unit_change_window.connect("delete_event", self.change_unit_apply, False)
# we assume, that the last child of the window is the "close" button
# TODO: fix this ugly hack!
self.gui.get_object("AboutWindowButtons").get_children()[-1].connect("clicked", self.toggle_about_window, False)
self.about_window.connect("delete-event", self.toggle_about_window, False)
# "log" window
self.log_window = self.gui.get_object("LogWindow")
self.log_window.set_default_size(500, 400)
self.log_window.connect("delete-event", self.toggle_log_window, False)
self.log_window.connect("destroy", self.toggle_log_window, False)
self.gui.get_object("LogWindowClose").connect("clicked", self.toggle_log_window, False)
self.gui.get_object("LogWindowClear").connect("clicked", self.clear_log_window)
self.gui.get_object("LogWindowCopyToClipboard").connect("clicked",
self.copy_log_to_clipboard)
self.log_model = self.gui.get_object("LogWindowList")
# "process pool" window
self.process_pool_window = self.gui.get_object("ProcessPoolWindow")
self.process_pool_window.set_default_size(500, 400)
self.process_pool_window.connect("delete-event", self.toggle_process_pool_window, False)
self.process_pool_window.connect("destroy", self.toggle_process_pool_window, False)
self.gui.get_object("ProcessPoolWindowClose").connect("clicked", self.toggle_process_pool_window, False)
self.gui.get_object("ProcessPoolRefreshInterval").set_value(3)
self.process_pool_model = self.gui.get_object("ProcessPoolStatisticsModel")
# extrusion dialog
self._extrusion_dialog_position = None
self._extrusion_dialog_visible = False
self.extrusion_dialog_window = self.gui.get_object("ExtrusionDialog")
self.extrusion_dialog_window.connect("delete-event",
self.toggle_extrusion_dialog, False)
self.gui.get_object("ExtrusionCancel").connect("clicked",
self.toggle_extrusion_dialog, False)
self.gui.get_object("ExtrusionSubmit").connect("clicked",
self.extrude_model)
self.gui.get_object("ExtrusionHeight").set_value(1)
self.gui.get_object("ExtrusionWidth").set_value(1)
self.gui.get_object("ExtrusionGrid").set_value(0.5)
extrusion_model = self.gui.get_object("ExtrusionTypeModel")
for row in EXTRUSION_TYPES:
extrusion_model.append((row[0], row[1],
self.gui.get_object(row[2]).get_pixbuf()))
self.gui.get_object("ExtrusionTypeSelector").set_active(0)
# "font dialog" window
self.font_dialog_window = self.gui.get_object("FontDialog")
self.font_dialog_window.connect("delete-event",
self.toggle_font_dialog_window, False)
self.font_dialog_window.connect("destroy",
self.toggle_font_dialog_window, False)
self.gui.get_object("FontDialogCancel").connect("clicked",
self.toggle_font_dialog_window, False)
self.gui.get_object("FontDialogApply").connect("clicked",
self.import_from_font_dialog)
self.gui.get_object("FontDialogSave").connect("clicked",
self.export_from_font_dialog)
self.gui.get_object("FontDialogCopy").connect("clicked",
self.copy_font_dialog_to_clipboard)
self.gui.get_object("FontDialogInputBuffer").connect("changed",
self.update_font_dialog_preview)
self.gui.get_object("FontDialogPreview").connect("configure_event",
self.update_font_dialog_preview)
self.gui.get_object("FontDialogPreview").connect("expose_event",
self.update_font_dialog_preview)
for objname in ("FontSideSkewValue", "FontCharacterSpacingValue",
"FontLineSpacingValue"):
obj = self.gui.get_object(objname)
# set default value before connecting the change-handler
if objname != "FontSideSkewValue":
obj.set_value(1.0)
obj.connect("value-changed",
self.update_font_dialog_preview)
for objname in ("FontTextAlignLeft", "FontTextAlignCenter",
"FontTextAlignRight"):
self.gui.get_object(objname).connect("toggled",
self.update_font_dialog_preview)
self._font_dialog_window_visible = False
self._font_dialog_window_position = None
# set defaults
# fallback - in case of a failure when opening a model file
self.model = pycam.Importers.TestModel.get_test_model()
self.toolpath = pycam.Toolpath.ToolpathList()
self.cutter = None
self.tool_list = []
self.process_list = []
self.bounds_list = []
self.task_list = []
self.grid_adjustments_x = []
self.grid_adjustments_y = []
self.font_selector = None
self._last_unit = None
self._toolpath_for_grid_data = {}
# add some dummies - to be implemented later ...
self.settings.add_item("model", lambda: self.model)
self.settings.add_item("toolpath", lambda: self.toolpath)
self.settings.add_item("cutter", lambda: self.cutter)
# unit control (mm/inch)
unit_field = self.gui.get_object("unit_control")
unit_field.connect("changed", self.change_unit_init)
def set_unit(text):
unit_field.set_active(0 if text == "mm" else 1)
self._last_unit = text
self.settings.add_item("unit", unit_field.get_active_text, set_unit)
self.gui.get_object("UnitChangeSelectAll").connect("clicked",
self.change_unit_set_selection, True)
self.gui.get_object("UnitChangeSelectNone").connect("clicked",
self.change_unit_set_selection, False)
# autoload task settings file on startup
autoload_enable = self.gui.get_object("AutoLoadTaskFile")
autoload_box = self.gui.get_object("StartupTaskFileBox")
autoload_source = self.gui.get_object("StartupTaskFile")
for one_filter in get_filters_from_list(FILTER_CONFIG):
autoload_source.add_filter(one_filter)
autoload_source.set_filter(one_filter)
def get_autoload_task_file(autoload_source=autoload_source):
if autoload_enable.get_active():
return autoload_source.get_filename()
else:
return ""
def set_autoload_task_file(filename):
if filename:
autoload_enable.set_active(True)
autoload_box.show()
autoload_source.set_filename(filename)
else:
autoload_enable.set_active(False)
autoload_box.hide()
autoload_source.unselect_all()
def autoload_enable_switched(widget, box):
if not widget.get_active():
set_autoload_task_file(None)
else:
autoload_box.show()
autoload_enable.connect("toggled", autoload_enable_switched,
autoload_box)
self.settings.add_item("default_task_settings_file",
get_autoload_task_file, set_autoload_task_file)
# boundary mode (move inside/along/around the boundaries)
boundary_mode_control = self.gui.get_object("BoundaryModeControl")
def set_boundary_mode(value):
# we assume, that the items in the list are (-1, 0, +1)
boundary_mode_control.set_active(value + 1)
def get_boundary_mode():
return boundary_mode_control.get_active() - 1
self.settings.add_item("boundary_mode", get_boundary_mode,
set_boundary_mode)
# Trigger a re-calculation of the bounds values after changing its type.
for objname in ("BoundsTypeRelativeMargin", "BoundsTypeFixedMargin",
"BoundsTypeCustom"):
self.gui.get_object(objname).connect("toggled",
self.switch_bounds_type)
# Calculate the "minx, ..." settings based on a (potentially) selected
# bounds setting.
def get_absolute_limit(key):
if not self.model:
# avoid problems if no model is loaded
return 0
bounds = self.settings.get("current_bounds")
if bounds is None:
return getattr(self.model, key)
low, high = bounds.get_absolute_limits(reference=self.model.get_bounds())
index = "xyz".index(key[-1])
if key.startswith("min"):
return low[index]
else:
return high[index]
for key in ("minx", "maxx", "miny", "maxy", "minz", "maxz"):
# create a new variable "key" to avoid re-using the same object "key"
# (due to the lambda name scope)
self.settings.add_item(key, lambda key=key: get_absolute_limit(key))
# Transformations
self.gui.get_object("Rotate").connect("clicked", self.transform_model)
self.gui.get_object("Flip").connect("clicked", self.transform_model)
self.gui.get_object("Swap").connect("clicked", self.transform_model)
shift_model_button = self.gui.get_object("Shift Model")
shift_model_button.connect("clicked", self.shift_model, True)
# Make the "shift" button the default while one of the x/y/z values is
# active.
for objname in ("shift_x", "shift_y", "shift_z"):
self.gui.get_object(objname).connect("focus-in-event",
lambda widget, data: shift_model_button.grab_default())
self.gui.get_object(objname).connect("focus-out-event",
lambda widget, data: self.window.set_default(None))
self.gui.get_object("Shift To Origin").connect("clicked",
self.shift_model, False)
# scale model
scale_percent = self.gui.get_object("ScalePercent")
scale_button = self.gui.get_object("ScaleModelButton")
scale_percent.set_value(100)
scale_percent.connect("focus-in-event",
lambda widget, data: scale_button.grab_default())
scale_percent.connect("focus-out-event",
lambda widget, data: self.window.set_default(None))
scale_button.connect("clicked", self.scale_model)
# scale model to an axis dimension
self.gui.get_object("ScaleDimensionAxis").connect("changed",
self.update_model_dimensions)
scale_dimension_button = self.gui.get_object("ScaleDimensionButton")
scale_dimension_button.connect("clicked", self.scale_model_axis_fit)
scale_dimension_control = self.gui.get_object("ScaleDimensionControl")
scale_dimension_control.connect("focus-in-event",
lambda widget, data: scale_dimension_button.grab_default())
scale_dimension_control.connect("focus-out-event",
lambda widget, data: self.window.set_default(None))
self.gui.get_object("ToggleModelDirectionButton").connect("clicked",
self.reverse_model_direction)
self.gui.get_object("DirectionsGuessButton").connect("clicked",
self.guess_model_directions)
self.gui.get_object("ScaleInchMM").connect("clicked", self.scale_model,
100 * 25.4, False)
self.gui.get_object("ScaleMMInch").connect("clicked", self.scale_model,
100 / 25.4, False)
self.gui.get_object("Projection2D").connect("clicked",
self.projection_2d)
self.gui.get_object("ExtrudeButton").connect("clicked",
self.toggle_extrusion_dialog, True)
# support grid
support_grid_type_control = self.gui.get_object(
"SupportGridTypesControl")
support_grid_type_control.connect("changed",
self.update_support_controls)
self.settings.add_item("support_grid_type",
support_grid_type_control.get_active,
support_grid_type_control.set_active)
self.settings.set("support_grid_type", GRID_TYPES["none"])
grid_distance_x = self.gui.get_object("SupportGridDistanceX")
grid_distance_x.connect("value-changed", self.update_support_controls)
self.settings.add_item("support_grid_distance_x",
grid_distance_x.get_value, grid_distance_x.set_value)
grid_distance_square = self.gui.get_object("SupportGridDistanceSquare")
grid_distance_square.connect("clicked", self.update_support_controls)
grid_distance_y = self.gui.get_object("SupportGridDistanceY")
grid_distance_y.connect("value-changed", self.update_support_controls)
def get_support_grid_distance_y():
if grid_distance_square.get_active():
return self.settings.get("support_grid_distance_x")
else:
return grid_distance_y.get_value()
self.settings.add_item("support_grid_distance_y",
get_support_grid_distance_y, grid_distance_y.set_value)
grid_thickness = self.gui.get_object("SupportGridThickness")
grid_thickness.connect("value-changed", self.update_support_model)
self.settings.add_item("support_grid_thickness",
grid_thickness.get_value, grid_thickness.set_value)
grid_height = self.gui.get_object("SupportGridHeight")
grid_height.connect("value-changed", self.update_support_model)
self.settings.add_item("support_grid_height",
grid_height.get_value, grid_height.set_value)
grid_length = self.gui.get_object("SupportGridLength")
grid_length.connect("value-changed", self.update_support_model)
self.settings.add_item("support_grid_length",
grid_length.get_value, grid_length.set_value)
grid_offset_x = self.gui.get_object("SupportGridOffsetX")
grid_offset_x.connect("value-changed", self.update_support_model)
self.settings.add_item("support_grid_offset_x",
grid_offset_x.get_value, grid_offset_x.set_value)
grid_offset_y = self.gui.get_object("SupportGridOffsetY")
grid_offset_y.connect("value-changed", self.update_support_model)
self.settings.add_item("support_grid_offset_y",
grid_offset_y.get_value, grid_offset_y.set_value)
grid_average_distance = self.gui.get_object("GridAverageDistance")
grid_average_distance.connect("value-changed",
self.update_support_model)
self.settings.add_item("support_grid_average_distance",
grid_average_distance.get_value,
grid_average_distance.set_value)
grid_minimum_bridges = self.gui.get_object("GridMinBridgesPerPolygon")
grid_minimum_bridges.connect("value-changed", self.update_support_model)
self.settings.add_item("support_grid_minimum_bridges",
grid_minimum_bridges.get_value, grid_minimum_bridges.set_value)
# manual grid adjustments
self.grid_adjustment_axis_x = self.gui.get_object("SupportGridPositionManualAxisX")
self.grid_adjustment_axis_x.connect("toggled",
self.switch_support_grid_manual_selector)
self.gui.get_object("SupportGridPositionManualResetOne").connect(
"clicked", self.reset_support_grid_manual, False)
self.gui.get_object("SupportGridPositionManualResetAll").connect(
"clicked", self.reset_support_grid_manual, True)
self.grid_adjustment_model = self.gui.get_object(
"SupportGridPositionManualList")
self.grid_adjustment_selector = self.gui.get_object(
"SupportGridPositionManualSelector")
self.grid_adjustment_selector.connect("changed",
self.switch_support_grid_manual_selector)
self.grid_adjustment_value = self.gui.get_object(
"SupportGridPositionManualAdjustment")
self.grid_adjustment_value_control = self.gui.get_object(
"SupportGridPositionManualShiftControl")
self.grid_adjustment_value_control.connect("move-slider",
self.update_support_grid_manual_adjust)
self.grid_adjustment_value_control.connect("value-changed",
self.update_support_grid_manual_adjust)
self.gui.get_object("SupportGridPositionManualShiftControl2").connect(
"value-changed", self.update_support_grid_manual_adjust)
def get_set_grid_adjustment_value(value=None):
if self.grid_adjustment_axis_x.get_active():
adjustments = self.grid_adjustments_x
else:
adjustments = self.grid_adjustments_y
index = self.grid_adjustment_selector.get_active()
if value is None:
if 0 <= index < len(adjustments):
return adjustments[index]
else:
return 0
else:
while len(adjustments) <= index:
adjustments.append(0)
adjustments[index] = value
self.settings.add_item("support_grid_adjustment_value",
get_set_grid_adjustment_value, get_set_grid_adjustment_value)
# support grid defaults
grid_distance_square.set_active(True)
self.settings.set("support_grid_distance_x", 10.0)
self.settings.set("support_grid_thickness", 0.5)
self.settings.set("support_grid_height", 0.5)
self.settings.set("support_grid_average_distance", 30)
self.settings.set("support_grid_minimum_bridges", 2)
self.settings.set("support_grid_length", 5)
self.grid_adjustment_axis_x_last = True
# toolpath grid pattern
for objname in ("GridYCount", "GridXCount", "GridYDistance",
"GridXDistance"):
self.gui.get_object(objname).connect("value-changed",
self.update_toolpath_grid_window)
# visual and general settings
for name, objname in (("show_model", "ShowModelCheckBox"),
("show_support_grid", "ShowSupportGridCheckBox"),
("show_axes", "ShowAxesCheckBox"),
("show_dimensions", "ShowDimensionsCheckBox"),
("show_bounding_box", "ShowBoundingCheckBox"),
("show_toolpath", "ShowToolPathCheckBox"),
("show_drill", "ShowDrillCheckBox"),
("show_directions", "ShowDirectionsCheckBox")):
obj = self.gui.get_object(objname)
self.settings.add_item(name, obj.get_active, obj.set_active)
# all of the objects above should trigger redraw
obj.connect("toggled", self.update_view)
self.show_progress_button = self.gui.get_object("ShowToolpathProgressButton")
self.settings.add_item("show_drill_progress",
self.show_progress_button.get_active,
self.show_progress_button.set_active)
for name, objname in (
("view_light", "OpenGLLight"),
("view_shadow", "OpenGLShadow"),
("view_polygon", "OpenGLPolygon"),
("view_perspective", "OpenGLPerspective")):
obj = self.gui.get_object(objname)
self.settings.add_item(name, obj.get_active, obj.set_active)
# send "True" to trigger a re-setup of GL settings
obj.connect("toggled", self.update_view, True)
# color selectors
def get_color_wrapper(obj):
def gtk_color_to_float():
gtk_color = obj.get_color()
alpha = obj.get_alpha()
return (gtk_color.red / GTK_COLOR_MAX,
gtk_color.green / GTK_COLOR_MAX,
gtk_color.blue / GTK_COLOR_MAX,
alpha / GTK_COLOR_MAX)
return gtk_color_to_float
def set_color_wrapper(obj):
def set_gtk_color_by_float(components):
# use alpha if it was given
if len(components) == 3:
alpha = 1.0
else:
alpha = components[3]
red, green, blue = components[:3]
obj.set_color(gtk.gdk.Color(int(red * GTK_COLOR_MAX),
int(green * GTK_COLOR_MAX), int(blue * GTK_COLOR_MAX)))
obj.set_alpha(int(alpha * GTK_COLOR_MAX))
return set_gtk_color_by_float
for name, objname in (("color_background", "ColorBackground"),
("color_model", "ColorModel"),
("color_support_grid", "ColorSupportGrid"),
("color_bounding_box", "ColorBoundingBox"),
("color_cutter", "ColorDrill"),
("color_toolpath_cut", "ColorToolpathCut"),
("color_toolpath_return", "ColorToolpathReturn"),
("color_material", "ColorMaterial")):
obj = self.gui.get_object(objname)
self.settings.add_item(name, get_color_wrapper(obj), set_color_wrapper(obj))
# repaint the 3d view after a color change
obj.connect("color-set", self.update_view)
# set the availability of ODE
self.enable_ode_control = self.gui.get_object("SettingEnableODE")
self.settings.add_item("enable_ode", self.enable_ode_control.get_active,
self.enable_ode_control.set_active)
skip_obj = self.gui.get_object("DrillProgressFrameSkipControl")
self.settings.add_item("drill_progress_max_fps", skip_obj.get_value, skip_obj.set_value)
sim_detail_obj = self.gui.get_object("SimulationDetailsValue")
self.settings.add_item("simulation_details_level", sim_detail_obj.get_value, sim_detail_obj.set_value)
# drill settings
for objname in ("ToolDiameterControl", "TorusDiameterControl",
"FeedrateControl", "SpindleSpeedControl"):
self.gui.get_object(objname).connect("value-changed", self.handle_tool_settings_change)
for name in ("SphericalCutter", "CylindricalCutter", "ToroidalCutter"):
self.gui.get_object(name).connect("clicked", self.handle_tool_settings_change)
self.gui.get_object("ToolName").connect("changed", self.handle_tool_settings_change)
# connect the "consistency check" and the update-handler with all toolpath settings
for objname in ("PushRemoveStrategy", "ContourPolygonStrategy",
"ContourFollowStrategy", "SurfaceStrategy",
"EngraveStrategy", "GridDirectionX", "GridDirectionY",
"GridDirectionXY", "MillingStyleConventional",
"MillingStyleClimb", "MillingStyleIgnore"):
self.gui.get_object(objname).connect("toggled",
self.update_process_controls)
self.gui.get_object(objname).connect("toggled",
self.handle_process_settings_change)
for objname in ("OverlapPercentControl", "MaterialAllowanceControl",
"MaxStepDownControl", "EngraveOffsetControl"):
self.gui.get_object(objname).connect("value-changed",
self.handle_process_settings_change)
self.gui.get_object("ProcessSettingName").connect("changed",
self.handle_process_settings_change)
pocketing_selector = self.gui.get_object("PocketingControl")
self.settings.add_item("pocketing_type", pocketing_selector.get_active,
pocketing_selector.set_active)
pocketing_selector.connect("changed",
self.handle_process_settings_change)
# get/set functions for the current tool/process/bounds/task
def get_current_item(table, item_list):
index = self._treeview_get_active_index(table, item_list)
if index is None:
return None
else:
return item_list[index]
def set_current_item(table, item_list, item):
old_index = self._treeview_get_active_index(table, item_list)
try:
new_index = item_list.index(item)
except ValueError:
return
if old_index == new_index:
return
else:
self._treeview_set_active_index(table, new_index)
# update all controls related the (possibly changed) item
if item_list is self.tool_list:
self.append_to_queue(self.switch_tool_table_selection)
elif item_list is self.process_list:
self.append_to_queue(self.switch_process_table_selection)
elif item_list is self.task_list:
self.append_to_queue(self.switch_tasklist_table_selection)
# the boundary manager
self.settings.add_item("current_bounds",
lambda: get_current_item(self.bounds_editor_table, self.bounds_list),
lambda bounds: set_current_item(self.bounds_editor_table, self.bounds_list, bounds))
self.bounds_editor_table = self.gui.get_object("BoundsEditorTable")
self.bounds_editor_table.get_selection().connect("changed", self.switch_bounds_table_selection)
self.gui.get_object("BoundsListMoveUp").connect("clicked", self.handle_bounds_table_event, "move_up")
self.gui.get_object("BoundsListMoveDown").connect("clicked", self.handle_bounds_table_event, "move_down")
self.gui.get_object("BoundsListAdd").connect("clicked", self.handle_bounds_table_event, "add")
self.gui.get_object("BoundsListDelete").connect("clicked", self.handle_bounds_table_event, "delete")
self.gui.get_object("BoundsMarginIncreaseX").connect("clicked", self.adjust_bounds, "x", "+")
self.gui.get_object("BoundsMarginIncreaseY").connect("clicked", self.adjust_bounds, "y", "+")
self.gui.get_object("BoundsMarginIncreaseZ").connect("clicked", self.adjust_bounds, "z", "+")
self.gui.get_object("BoundsMarginDecreaseX").connect("clicked", self.adjust_bounds, "x", "-")
self.gui.get_object("BoundsMarginDecreaseY").connect("clicked", self.adjust_bounds, "y", "-")
self.gui.get_object("BoundsMarginDecreaseZ").connect("clicked", self.adjust_bounds, "z", "-")
self.gui.get_object("BoundsMarginResetX").connect("clicked", self.adjust_bounds, "x", "0")
self.gui.get_object("BoundsMarginResetY").connect("clicked", self.adjust_bounds, "y", "0")
self.gui.get_object("BoundsMarginResetZ").connect("clicked", self.adjust_bounds, "z", "0")
# connect change handler for boundary settings
self.gui.get_object("BoundsName").connect("changed",
self.handle_bounds_settings_change)
for objname in ("boundary_x_low", "boundary_x_high", "boundary_y_low",
"boundary_y_high", "boundary_z_low", "boundary_z_high"):
self.gui.get_object(objname).connect("value-changed",
self.handle_bounds_settings_change)
# the process manager
self.settings.add_item("current_process",
lambda: get_current_item(self.process_editor_table, self.process_list),
lambda process: set_current_item(self.process_editor_table, self.process_list, process))
self.process_editor_table = self.gui.get_object("ProcessEditorTable")
self.process_editor_table.get_selection().connect("changed", self.switch_process_table_selection)
self.gui.get_object("ProcessListMoveUp").connect("clicked", self.handle_process_table_event, "move_up")
self.gui.get_object("ProcessListMoveDown").connect("clicked", self.handle_process_table_event, "move_down")
self.gui.get_object("ProcessListAdd").connect("clicked", self.handle_process_table_event, "add")
self.gui.get_object("ProcessListDelete").connect("clicked", self.handle_process_table_event, "delete")
# progress bar and task pane
self.progress_bar = self.gui.get_object("ProgressBar")
self.progress_widget = self.gui.get_object("ProgressWidget")
self.task_pane = self.gui.get_object("MainTabs")
self.progress_cancel_button = self.gui.get_object("ProgressCancelButton")
self.progress_cancel_button.connect("clicked", self.cancel_progress)
# make sure that the toolpath settings are consistent
self.toolpath_table = self.gui.get_object("ToolPathTable")
self.toolpath_table.get_selection().connect("changed", self.toolpath_table_event, "update_buttons")
self.gui.get_object("toolpath_visible").connect("toggled", self.toolpath_table_event, "toggle_visibility")
self.gui.get_object("toolpath_up").connect("clicked", self.toolpath_table_event, "move_up")
self.gui.get_object("toolpath_down").connect("clicked", self.toolpath_table_event, "move_down")
self.gui.get_object("toolpath_delete").connect("clicked", self.toolpath_table_event, "delete")
self.gui.get_object("toolpath_simulate").connect("clicked", self.toolpath_table_event, "simulate")
self.gui.get_object("toolpath_crop").connect("clicked", self.toolpath_table_event, "crop")
self.gui.get_object("ToolpathGrid").connect("clicked", self.toolpath_table_event, "grid")
self.gui.get_object("ExitSimulationButton").connect("clicked", self.finish_toolpath_simulation)
self.gui.get_object("ExportAllToolpathsButton").connect("clicked",
self.save_toolpath, False)
self.gui.get_object("ExportVisibleToolpathsButton").connect("clicked",
self.save_toolpath, True)
speed_factor_widget = self.gui.get_object("SimulationSpeedFactor")
self.settings.add_item("simulation_speed_factor",
lambda: pow(10, speed_factor_widget.get_value()),
lambda value: speed_factor_widget.set_value(math.log10(max(0.001, value))))
simulation_progress = self.gui.get_object("SimulationProgressTimelineValue")
def update_simulation_progress(widget):
if widget.get_value() == 100:
# a negative value indicates, that the simulation is finished
self.settings.set("simulation_current_distance", -1)
else:
complete = self.settings.get("simulation_complete_distance")
partial = widget.get_value() / 100.0 * complete
self.settings.set("simulation_current_distance", partial)
simulation_progress.connect("value-changed", update_simulation_progress)
# update the speed factor label
speed_factor_widget.connect("value-changed",
lambda widget: self.gui.get_object("SimulationSpeedFactorValueLabel").set_label(
"%.2f" % self.settings.get("simulation_speed_factor")))
self.simulation_window = self.gui.get_object("SimulationDialog")
self.simulation_window.connect("delete-event", self.finish_toolpath_simulation)
# store the original content (for adding the number of current toolpaths in "update_toolpath_table")
self._original_toolpath_tab_label = self.gui.get_object("ToolPathTabLabel").get_text()
# tool editor
self.settings.add_item("current_tool",
lambda: get_current_item(self.tool_editor_table, self.tool_list),
lambda tool: set_current_item(self.tool_editor_table, self.tool_list, tool))
self.tool_editor_table = self.gui.get_object("ToolEditorTable")
self.tool_editor_table.get_selection().connect("changed", self.switch_tool_table_selection)
self.gui.get_object("ToolListMoveUp").connect("clicked", self._tool_editor_button_event, "move_up")
self.gui.get_object("ToolListMoveDown").connect("clicked", self._tool_editor_button_event, "move_down")
self.gui.get_object("ToolListAdd").connect("clicked", self._tool_editor_button_event, "add")
self.gui.get_object("ToolListDelete").connect("clicked", self._tool_editor_button_event, "delete")
# the task list manager
self.settings.add_item("current_task",
lambda: get_current_item(self.tasklist_table, self.task_list),
lambda task: set_current_item(self.tasklist_table, self.task_list, task))
self.tasklist_table = self.gui.get_object("TaskListTable")
self.tasklist_table.get_selection().connect("changed", self.switch_tasklist_table_selection)
self.gui.get_object("tasklist_enabled").connect("toggled", self._handle_tasklist_button_event, "toggle_enabled")
self.gui.get_object("TaskListMoveUp").connect("clicked", self._handle_tasklist_button_event, "move_up")
self.gui.get_object("TaskListMoveDown").connect("clicked", self._handle_tasklist_button_event, "move_down")
self.gui.get_object("TaskListAdd").connect("clicked", self._handle_tasklist_button_event, "add")
self.gui.get_object("TaskListDelete").connect("clicked", self._handle_tasklist_button_event, "delete")
self.gui.get_object("GenerateToolPathButton").connect("clicked", self._handle_tasklist_button_event, "generate_one_toolpath")
self.gui.get_object("GenerateAllToolPathsButton").connect("clicked", self._handle_tasklist_button_event, "generate_all_toolpaths")
# We need to collect the signal handles to block them during
# programmatical changes. The "self._task_property_signals" list allows
# us to track all handlers that need to be blocked.
self._task_property_signals = []
for objname in ("TaskNameControl", "TaskToolSelector",
"TaskProcessSelector", "TaskBoundsSelector"):
obj = self.gui.get_object(objname)
self._task_property_signals.append((obj,
obj.connect("changed", self._handle_task_setting_change)))
# gcode settings
gcode_minimum_step_x = self.gui.get_object("GCodeMinimumStep_x")
self.settings.add_item("gcode_minimum_step_x",
gcode_minimum_step_x.get_value, gcode_minimum_step_x.set_value)
gcode_minimum_step_y = self.gui.get_object("GCodeMinimumStep_y")
self.settings.add_item("gcode_minimum_step_y",
gcode_minimum_step_y.get_value, gcode_minimum_step_y.set_value)
gcode_minimum_step_z = self.gui.get_object("GCodeMinimumStep_z")
self.settings.add_item("gcode_minimum_step_z",
gcode_minimum_step_z.get_value, gcode_minimum_step_z.set_value)
gcode_safety_height = self.gui.get_object("SafetyHeightControl")
self.settings.add_item("gcode_safety_height",
gcode_safety_height.get_value, gcode_safety_height.set_value)
gcode_spindle_delay = self.gui.get_object("GCodeSpindleDelay")
self.settings.add_item("gcode_spindle_delay",
gcode_spindle_delay.get_value, gcode_spindle_delay.set_value)
for objname, setting in (
("GCodeTouchOffOnStartup", "touch_off_on_startup"),
("GCodeTouchOffOnToolChange", "touch_off_on_tool_change")):
obj = self.gui.get_object(objname)
obj.connect("toggled", self.update_gcode_controls)
self.settings.add_item(setting, obj.get_active, obj.set_active)
touch_off_pos_selector = self.gui.get_object("TouchOffLocationSelector")
def get_touch_off_position_type():
index = touch_off_pos_selector.get_active()
if index < 0:
return PREFERENCES_DEFAULTS["touch_off_position_type"]
else:
return touch_off_pos_selector.get_model()[index][0]
def set_touch_off_position_type(new_key):
model = touch_off_pos_selector.get_model()
for index, (key, value) in enumerate(model):
if key == new_key:
touch_off_pos_selector.set_active(index)
break
else:
touch_off_pos_selector.set_active(-1)
touch_off_pos_selector.connect("changed", self.update_gcode_controls)
self.settings.add_item("touch_off_position_type",
get_touch_off_position_type, set_touch_off_position_type)
for axis in "XYZ":
obj = self.gui.get_object("ToolChangePos%s" % axis.upper())
self.settings.add_item("touch_off_position_%s" % axis.lower(),
obj.get_value, obj.set_value)
for objname, setting in (
("ToolChangeRapidMoveDown", "touch_off_rapid_move"),
("ToolChangeSlowMoveDown", "touch_off_slow_move"),
("ToolChangeSlowMoveSpeed", "touch_off_slow_feedrate"),
("TouchOffHeight", "touch_off_height")):
obj = self.gui.get_object(objname)
self.settings.add_item(setting, obj.get_value, obj.set_value)
touch_off_pause = self.gui.get_object("TouchOffPauseExecution")
self.settings.add_item("touch_off_pause_execution",
touch_off_pause.get_active, touch_off_pause.set_active)
# redraw the toolpath if safety height changed
gcode_safety_height.connect("value-changed", self.update_view)
gcode_path_mode = self.gui.get_object("GCodeCornerStyleControl")
self.settings.add_item("gcode_path_mode", gcode_path_mode.get_active,
gcode_path_mode.set_active)
gcode_path_mode.connect("changed", self.update_gcode_controls)
gcode_motion_tolerance = self.gui.get_object(
"GCodeCornerStyleMotionTolerance")
self.settings.add_item("gcode_motion_tolerance",
gcode_motion_tolerance.get_value,
gcode_motion_tolerance.set_value)
gcode_naive_tolerance = self.gui.get_object(
"GCodeCornerStyleCAMTolerance")
self.settings.add_item("gcode_naive_tolerance",
gcode_naive_tolerance.get_value,
gcode_naive_tolerance.set_value)
gcode_start_stop_spindle = self.gui.get_object("GCodeStartStopSpindle")
self.settings.add_item("gcode_start_stop_spindle",
gcode_start_stop_spindle.get_active,
gcode_start_stop_spindle.set_active)
gcode_start_stop_spindle.connect("toggled", self.update_gcode_controls)
gcode_filename_extension = self.gui.get_object("GCodeFilenameExtension")
self.settings.add_item("gcode_filename_extension",
gcode_filename_extension.get_text,
gcode_filename_extension.set_text)
# configure locations of external programs
for auto_control_name, location_control_name, browse_button, key in (
("ExternalProgramInkscapeAuto",
"ExternalProgramInkscapeControl",
"ExternalProgramInkscapeBrowse", "inkscape"),
("ExternalProgramPstoeditAuto",
"ExternalProgramPstoeditControl",
"ExternalProgramPstoeditBrowse", "pstoedit")):
self.gui.get_object(auto_control_name).connect("clicked",
self._locate_external_program, key)
location_control = self.gui.get_object(location_control_name)
self.settings.add_item("external_program_%s" % key,
location_control.get_text, location_control.set_text)
self.gui.get_object(browse_button).connect("clicked",
self._browse_external_program_location, key)
# parallel processing settings
self.enable_parallel_processes = self.gui.get_object(
"EnableParallelProcesses")
if pycam.Utils.threading.is_multiprocessing_available():
self.gui.get_object("ParallelProcessingDisabledLabel").hide()
if pycam.Utils.threading.is_server_mode_available():
self.gui.get_object("ServerModeDisabledLabel").hide()
else:
self.gui.get_object("ServerModeSettingsFrame").hide()
else:
self.gui.get_object("ParallelProcessSettingsBox").hide()
self.gui.get_object("EnableParallelProcesses").hide()
self.enable_parallel_processes.set_active(
pycam.Utils.threading.is_multiprocessing_enabled())
self.enable_parallel_processes.connect("toggled",
self.handle_parallel_processes_settings)
self.number_of_processes = self.gui.get_object(
"NumberOfProcesses")
self.number_of_processes.set_value(
pycam.Utils.threading.get_number_of_processes())
server_port_local_obj = self.gui.get_object("ServerPortLocal")
self.settings.add_item("server_port_local",
server_port_local_obj.get_value,
server_port_local_obj.set_value)
server_port_remote_obj = self.gui.get_object("RemoteServerPort")
self.settings.add_item("server_port_remote",
server_port_remote_obj.get_value,
server_port_remote_obj.set_value)
self.number_of_processes.connect("value-changed",
self.handle_parallel_processes_settings)
self.gui.get_object("EnableServerMode").connect("toggled",
self.initialize_multiprocessing)
self.gui.get_object("ServerPasswordGenerate").connect("clicked",
self.generate_random_server_password)
self.gui.get_object("ServerPasswordShow").connect("toggled",
self.update_parallel_processes_settings)
auth_key_obj = self.gui.get_object("ServerPassword")
self.settings.add_item("server_auth_key", auth_key_obj.get_text,
auth_key_obj.set_text)
server_hostname = self.gui.get_object("RemoteServerHostname")
self.settings.add_item("server_hostname",
server_hostname.get_text,
server_hostname.set_text)
cpu_cores = pycam.Utils.threading.get_number_of_cores()
if cpu_cores is None:
cpu_cores = "unknown"
self.gui.get_object("AvailableCores").set_label(str(cpu_cores))
# status bar
self.status_bar = self.gui.get_object("StatusBar")
self.gui.get_object("StatusBarEventBox").connect("button-press-event",
self.toggle_log_window)
# menu bar
uimanager = gtk.UIManager()
self._accel_group = uimanager.get_accel_group()
for window in (self.window, self.about_window, self.preferences_window,
self.log_window, self.process_pool_window,
self.font_dialog_window):
window.add_accel_group(self._accel_group)
# set the icons (in different sizes) for all windows
gtk.window_set_default_icon_list(*get_icons_pixbuffers())
# load menu data
gtk_menu_file = get_data_file_location(GTKMENU_FILE)
if gtk_menu_file is None:
gtk.main_quit()
uimanager.add_ui_from_file(gtk_menu_file)
# make the actions defined in the GTKBUILD file available in the menu
actiongroup = gtk.ActionGroup("menubar")
for action in [action for action in self.gui.get_objects()
if isinstance(action, gtk.Action)]:
actiongroup.add_action(action)
# the "pos" parameter is optional since 2.12 - we can remove it later
uimanager.insert_action_group(actiongroup, pos=-1)
# the "recent files" sub-menu
if not self.recent_manager is None:
recent_files_menu = gtk.RecentChooserMenu(self.recent_manager)
recent_files_menu.set_name("RecentFilesMenu")
recent_menu_filter = gtk.RecentFilter()
for filter_name, filter_patterns in FILTER_MODEL:
if not isinstance(filter_patterns, (list, set, tuple)):
filter_patterns = [filter_patterns]
for pattern in filter_patterns:
recent_menu_filter.add_pattern(pattern)
recent_files_menu.add_filter(recent_menu_filter)
recent_files_menu.set_show_numbers(True)
# non-local files (without "file://") are not supported. yet
recent_files_menu.set_local_only(False)
# most recent files to the top
recent_files_menu.set_sort_type(gtk.RECENT_SORT_MRU)
# show only ten files
recent_files_menu.set_limit(10)
uimanager.get_widget("/MenuBar/FileMenu/OpenRecentModelMenu").set_submenu(recent_files_menu)
recent_files_menu.connect("item-activated",
self.load_recent_model_file)
else:
self.gui.get_object("OpenRecentModel").set_visible(False)
# load the menubar and connect functions to its items
self.menubar = uimanager.get_widget("/MenuBar")
window_box = self.gui.get_object("WindowBox")
window_box.pack_start(self.menubar, False)
window_box.reorder_child(self.menubar, 0)
# some more initialization
self.reset_preferences()
self.load_preferences()
self.load_task_settings()
# Without this "gkt.main_iteration" loop the task settings file
# control would not be updated in time.
while gtk.events_pending():
gtk.main_iteration()
autoload_task_filename = self.settings.get("default_task_settings_file")
if autoload_task_filename:
self.open_task_settings_file(autoload_task_filename)
self.update_all_controls()
self.no_dialog = no_dialog
if not self.no_dialog:
# register a logging handler for displaying error messages
pycam.Utils.log.add_gtk_gui(self.window, logging.ERROR)
# register a callback for the log window
pycam.Utils.log.add_hook(self.add_log_message)
self.window.show()
def update_all_controls(self):
self.update_toolpath_table()
self.update_tool_table()
self.update_process_controls()
self.update_process_table()
self.update_bounds_table()
self.update_tasklist_table()
self.update_save_actions()
self.update_unit_labels()
self.update_support_controls()
self.update_model_dimensions()
self.update_gcode_controls()
self.update_ode_settings()
self.update_parallel_processes_settings()
self.update_model_type_related_controls()
self.update_toolpath_related_controls()
self.update_clipboard_state()
def update_gcode_controls(self, widget=None):
# path mode
path_mode = self.settings.get("gcode_path_mode")
self.gui.get_object("GCodeToleranceTable").set_sensitive(path_mode == 3)
# spindle delay
sensitive = self.settings.get("gcode_start_stop_spindle")
self.gui.get_object("GCodeSpindleDelayLabel").set_sensitive(sensitive)
self.gui.get_object("GCodeSpindleDelay").set_sensitive(sensitive)
# tool change controls
pos_control = self.gui.get_object("TouchOffLocationSelector")
tool_change_pos_model = pos_control.get_model()
active_pos_index = pos_control.get_active()
if active_pos_index < 0:
pos_key = None
else:
pos_key = tool_change_pos_model[active_pos_index][0]
# show or hide the vbox containing the absolute tool change location
absolute_pos_box = self.gui.get_object("AbsoluteToolChangePositionBox")
if pos_key == "absolute":
absolute_pos_box.show()
else:
absolute_pos_box.hide()
# disable/enable the touch off position controls
position_controls_table = self.gui.get_object("TouchOffLocationTable")
touch_off_enabled = any([self.gui.get_object(objname).get_active()
for objname in ("GCodeTouchOffOnStartup",
"GCodeTouchOffOnToolChange")])
position_controls_table.set_sensitive(touch_off_enabled)
# disable/enable touch probe height
if self.gui.get_object("GCodeTouchOffOnStartup").get_active():
update_func = "show"
else:
update_func = "hide"
for objname in ("TouchOffHeight", "TouchOffHeightLabel",
"LengthUnitTouchOffHeight"):
getattr(self.gui.get_object(objname), update_func)()
def update_model_type_related_controls(self):
is_reversible = self.model and hasattr(self.model, "reverse_directions")
controls_2d = ("ToggleModelDirectionButton", "DirectionsGuessButton")
for control in controls_2d:
if is_reversible:
self.gui.get_object(control).show()
else:
self.gui.get_object(control).hide()
is_extrudable = self.model and hasattr(self.model, "extrude")
extrude_button = self.gui.get_object("ExtrudeButton")
if is_extrudable:
extrude_button.show()
else:
extrude_button.hide()
is_projectable = self.model and (self.model.minz != self.model.maxz)
if is_projectable:
self.gui.get_object("Projection2D").show()
else:
self.gui.get_object("Projection2D").hide()
# disable the lower boundary for contour models
is_contour = isinstance(self.model, pycam.Geometry.Model.ContourModel)
margin_type = self._load_bounds_settings_from_gui().get_type()
z_low_control = self.gui.get_object("boundary_z_low")
if is_contour and (margin_type != Bounds.TYPE_CUSTOM):
z_low_control.set_sensitive(False)
else:
z_low_control.set_sensitive(True)
# copy button
self.gui.get_object("CopyModelToClipboard").set_sensitive(
bool(self.model and self.model.is_export_supported()))
def update_ode_settings(self, widget=None):
if pycam.Utils.threading.is_multiprocessing_enabled() \
or not pycam.Physics.ode_physics.is_ode_available():
self.enable_ode_control.set_sensitive(False)
self.enable_ode_control.set_active(False)
else:
self.enable_ode_control.set_sensitive(True)
def progress_activity_guard(func):
def progress_activity_guard_wrapper(self, *args, **kwargs):
if self._progress_running:
return
self._progress_running = True
self._progress_cancel_requested = False
self.toggle_progress_bar(True)
result = func(self, *args, **kwargs)
self.toggle_progress_bar(False)
self._progress_running = False
return result
return progress_activity_guard_wrapper
def gui_activity_guard(func):
def gui_activity_guard_wrapper(self, *args, **kwargs):
if self.gui_is_active:
return
self.gui_is_active = True
try:
result = func(self, *args, **kwargs)
except Exception:
# Catch possible exceptions (except system-exit ones) and
# report them.
report_exception()
result = None
self.gui_is_active = False
while self._batch_queue:
batch_func, batch_args, batch_kwargs = self._batch_queue[0]
del self._batch_queue[0]
batch_func(*batch_args, **batch_kwargs)
return result
return gui_activity_guard_wrapper
def _store_undo_state(self):
# for now we only store the model
if not self.model:
return
self._undo_states.append(pickle.dumps(self.model))
log.debug("Stored the current state of the model for undo")
while len(self._undo_states) > MAX_UNDO_STATES:
self._undo_states.pop(0)
self.gui.get_object("UndoButton").set_sensitive(True)
def _restore_undo_state(self, widget=None, event=None):
if len(self._undo_states) > 0:
latest = StringIO.StringIO(self._undo_states.pop(-1))
self.model = pickle.Unpickler(latest).load()
self.gui.get_object("UndoButton").set_sensitive(
len(self._undo_states) > 0)
log.info("Restored the previous state of the model")
self._update_all_model_attributes()
else:
log.info("No previous undo state available - request ignored")
def show_help(self, widget=None, page="Main_Page"):
if not page.startswith("http"):
url = HELP_WIKI_URL % page
else:
url = page
webbrowser.open(url)
def update_view(self, widget=None, data=None):
if self.view3d and self.view3d.is_visible and not self.no_dialog:
if data:
self.view3d.glsetup()
self.view3d.paint()
def set_model_filename(self, filename):
""" Store the given filename for a possible later "save model" action.
Additionally the window's title is adjusted and the "save" buttons are
updated.
"""
uri = pycam.Utils.URIHandler(filename)
self.last_model_uri = uri
if not self.last_model_uri:
self.window.set_title("PyCAM")
else:
short_name = os.path.basename(uri.get_path())
self.window.set_title("%s - PyCAM" % short_name)
self.update_save_actions()
def update_save_actions(self):
self.gui.get_object("SaveTaskSettings").set_sensitive(
bool(self.last_task_settings_uri and \
self.last_task_settings_uri.is_writable()))
save_as_possible = self.model and self.model.is_export_supported()
self.gui.get_object("SaveAsModel").set_sensitive(save_as_possible)
save_possible = bool(self.last_model_uri and save_as_possible and \
self.last_model_uri.is_writable())
#TODO: fix this dirty hack to avoid silent overwrites of PS/DXF files as SVG
if save_possible:
extension = os.path.splitext(self.last_model_uri.get_path(
))[-1].lower()
# TODO: fix these hard-coded file extensions
if extension[1:] in ("eps", "ps", "dxf"):
# can't save 2D formats except SVG
save_possible = False
self.gui.get_object("SaveModel").set_sensitive(save_possible)
@gui_activity_guard
def update_support_controls(self, widget=None):
controls = {"GridProfileExpander": ("grid", "automatic_edge",
"automatic_corner"),
"GridPatternExpander": ("grid", ),
"GridPositionExpander": ("grid", ),
"GridManualShiftExpander": ("grid", ),
"GridAverageDistanceExpander": ("automatic_edge",
"automatic_corner"),
}
grid_type = self.settings.get("support_grid_type")
if grid_type == GRID_TYPES["grid"]:
grid_square = self.gui.get_object("SupportGridDistanceSquare")
distance_y = self.gui.get_object("SupportGridDistanceYControl")
distance_y.set_sensitive(not grid_square.get_active())
if grid_square.get_active():
# We let "distance_y" track the value of "distance_x".
self.settings.set("support_grid_distance_y",
self.settings.get("support_grid_distance_x"))
self.update_support_grid_manual_model()
self.switch_support_grid_manual_selector()
elif grid_type in (GRID_TYPES["automatic_edge"],
GRID_TYPES["automatic_corner"], GRID_TYPES["none"]):
pass
elif grid_type < 0:
# not initialized
pass
else:
log.error("Invalid grid type: %d" % grid_type)
# show and hide all controls according to the current type
for key, grid_types in controls.iteritems():
obj = self.gui.get_object(key)
if grid_type in [GRID_TYPES[allowed] for allowed in grid_types]:
obj.show()
else:
obj.hide()
self.update_support_model()
self.update_view()
def update_support_model(self, widget=None):
grid_type = self.settings.get("support_grid_type")
s = self.settings
support_grid = None
if grid_type == GRID_TYPES["grid"]:
if (s.get("support_grid_thickness") > 0) \
and ((s.get("support_grid_distance_x") > 0) \
or (s.get("support_grid_distance_y") > 0)) \
and ((s.get("support_grid_distance_x") == 0) \
or (s.get("support_grid_distance_x") \
> s.get("support_grid_thickness"))) \
and ((s.get("support_grid_distance_y") == 0) \
or (s.get("support_grid_distance_y") \
> s.get("support_grid_thickness"))) \
and (s.get("support_grid_height") > 0):
support_grid = pycam.Toolpath.SupportGrid.get_support_grid(
s.get("minx"), s.get("maxx"), s.get("miny"), s.get("maxy"),
s.get("minz"), s.get("support_grid_distance_x"),
s.get("support_grid_distance_y"),
s.get("support_grid_thickness"),
s.get("support_grid_height"),
offset_x=s.get("support_grid_offset_x"),
offset_y=s.get("support_grid_offset_y"),
adjustments_x=self.grid_adjustments_x,
adjustments_y=self.grid_adjustments_y)
elif grid_type in (GRID_TYPES["automatic_edge"],
GRID_TYPES["automatic_corner"]):
if (s.get("support_grid_thickness") > 0) \
and (s.get("support_grid_height") > 0) \
and (s.get("support_grid_average_distance") > 0) \
and (s.get("support_grid_minimum_bridges") > 0):
# get the minimum z value of the bounding box
bounds = self.settings.get("current_bounds")
if (bounds is None) and (len(self.bounds_list) > 0):
bounds = self.bounds_list[0]
if not bounds is None:
minz = bounds.get_absolute_limits(
reference=self.model.get_bounds())[0][2]
corner_start = (grid_type == GRID_TYPES["automatic_corner"])
support_grid = pycam.Toolpath.SupportGrid.get_support_distributed(
s.get("model"), minz,
s.get("support_grid_average_distance"),
s.get("support_grid_minimum_bridges"),
s.get("support_grid_thickness"),
s.get("support_grid_height"),
s.get("support_grid_length"),
bounds.get_referenced_bounds(s.get("model").get_bounds()),
start_at_corners=corner_start)
elif grid_type == GRID_TYPES["none"]:
pass
s.set("support_grid", support_grid)
self.update_view()
def switch_support_grid_manual_selector(self, widget=None):
old_axis_was_x = self.grid_adjustment_axis_x_last
self.grid_adjustment_axis_x_last = \
self.grid_adjustment_axis_x.get_active()
if self.grid_adjustment_axis_x.get_active():
# x axis is selected
if not old_axis_was_x:
self.update_support_grid_manual_model()
max_distance = self.settings.get("support_grid_distance_x")
else:
# y axis
if old_axis_was_x:
self.update_support_grid_manual_model()
max_distance = self.settings.get("support_grid_distance_y")
# we allow an individual adjustment of 66% of the distance
max_distance /= 1.5
if hasattr(self.grid_adjustment_value, "set_lower"):
# gtk 2.14 is required for "set_lower" and "set_upper"
self.grid_adjustment_value.set_lower(-max_distance)
self.grid_adjustment_value.set_upper(max_distance)
if self.grid_adjustment_value.get_value() \
!= self.settings.get("support_grid_adjustment_value"):
self.grid_adjustment_value.set_value(self.settings.get(
"support_grid_adjustment_value"))
self.gui.get_object("SupportGridPositionManualShiftBox").set_sensitive(
self.grid_adjustment_selector.get_active() >= 0)
def update_support_grid_manual_adjust(self, widget=None, data1=None,
data2=None):
new_value = self.grid_adjustment_value.get_value()
self.settings.set("support_grid_adjustment_value", new_value)
tree_iter = self.grid_adjustment_selector.get_active_iter()
if not tree_iter is None:
value_string = "(%+.1f)" % new_value
self.grid_adjustment_model.set(tree_iter, 1, value_string)
self.update_support_model()
self.update_view()
def reset_support_grid_manual(self, widget=None, reset_all=False):
if reset_all:
self.grid_adjustments_x = []
self.grid_adjustments_y = []
else:
self.settings.set("support_grid_adjustment_value", 0)
self.update_support_grid_manual_model()
self.switch_support_grid_manual_selector()
self.update_support_model()
self.update_view()
def update_support_grid_manual_model(self):
old_index = self.grid_adjustment_selector.get_active()
model = self.grid_adjustment_model
model.clear()
s = self.settings
# get the toolpath without adjustments
base_x, base_y = pycam.Toolpath.SupportGrid.get_support_grid_locations(
s.get("minx"), s.get("maxx"), s.get("miny"), s.get("maxy"),
s.get("support_grid_distance_x"),
s.get("support_grid_distance_y"),
offset_x=s.get("support_grid_offset_x"),
offset_y=s.get("support_grid_offset_y"))
# fill the adjustment lists
while len(self.grid_adjustments_x) < len(base_x):
self.grid_adjustments_x.append(0)
while len(self.grid_adjustments_y) < len(base_y):
self.grid_adjustments_y.append(0)
# select the currently active list
if self.grid_adjustment_axis_x.get_active():
base = base_x
adjustments = self.grid_adjustments_x
else:
base = base_y
adjustments = self.grid_adjustments_y
# generate the model content
for index, base_value in enumerate(base):
position = "%.2f%s" % (base_value, s.get("unit"))
if (0 <= index < len(adjustments)) and (adjustments[index] != 0):
diff = "(%+.1f)" % adjustments[index]
else:
diff = ""
model.append((position, diff))
if old_index < len(base):
self.grid_adjustment_selector.set_active(old_index)
else:
self.grid_adjustment_selector.set_active(-1)
@gui_activity_guard
def generate_random_server_password(self, widget=None):
all_characters = string.letters + string.digits
random_pw = "".join([random.choice(all_characters) for i in range(12)])
self.gui.get_object("ServerPassword").set_text(random_pw)
@gui_activity_guard
def update_parallel_processes_settings(self, widget=None):
parallel_settings = self.gui.get_object("ParallelProcessSettingsBox")
server_enabled = self.gui.get_object("EnableServerMode")
server_mode_settings = self.gui.get_object("ServerModeSettingsTable")
# update the show/hide state of the password
hide_password = self.gui.get_object("ServerPasswordShow").get_active()
self.gui.get_object("ServerPassword").set_visibility(hide_password)
if (self.gui.get_object("NumberOfProcesses").get_value() == 0) \
and self.enable_parallel_processes.get_active():
self.gui.get_object("ZeroProcessesWarning").show()
else:
self.gui.get_object("ZeroProcessesWarning").hide()
if self.enable_parallel_processes.get_active():
parallel_settings.set_sensitive(True)
if server_enabled.get_active():
# don't allow changes for an active connection
server_mode_settings.set_sensitive(False)
else:
server_mode_settings.set_sensitive(True)
else:
parallel_settings.set_sensitive(False)
server_enabled.set_active(False)
# check availability of ODE again (conflicts with multiprocessing)
self.update_ode_settings()
def handle_parallel_processes_settings(self, widget=None):
new_num_of_processes = self.number_of_processes.get_value()
new_enable_parallel = self.enable_parallel_processes.get_active()
old_num_of_processes = pycam.Utils.threading.get_number_of_processes()
old_enable_parallel = pycam.Utils.threading.is_multiprocessing_enabled()
if (old_num_of_processes != new_num_of_processes) \
or (old_enable_parallel != new_enable_parallel):
self.initialize_multiprocessing()
@gui_activity_guard
def initialize_multiprocessing(self, widget=None):
complete_area = self.gui.get_object("MultiprocessingFrame")
# prevent any further actions while the connection is established
complete_area.set_sensitive(False)
# wait for the above "set_sensitive" to finish
while gtk.events_pending():
gtk.main_iteration()
enable_parallel = self.enable_parallel_processes.get_active()
enable_server_obj = self.gui.get_object("EnableServerMode")
enable_server = enable_server_obj.get_active()
remote_host = self.gui.get_object("RemoteServerHostname").get_text()
if remote_host:
remote_port = int(self.gui.get_object(
"RemoteServerPort").get_value())
remote = "%s:%s" % (remote_host, remote_port)
else:
remote = None
local_port = int(self.gui.get_object("ServerPortLocal").get_value())
auth_key = self.gui.get_object("ServerPassword").get_text()
if not auth_key and enable_parallel and enable_server:
log.error("You need to provide a password for this connection.")
enable_server_obj.set_active(False)
elif enable_parallel:
if enable_server and \
(pycam.Utils.get_platform() == pycam.Utils.PLATFORM_WINDOWS):
if self.number_of_processes.get_value() > 0:
log.warn("Mixed local and remote processes are " + \
"currently not available on the Windows platform. " + \
"Setting the number of local processes to zero." + \
os.linesep + "See <a href=\"" + \
HELP_WIKI_URL % "Parallel_Processing_on_different_Platforms" + \
"\">platform feature matrix</a> for more details.")
self.number_of_processes.set_value(0)
self.number_of_processes.set_sensitive(False)
else:
self.number_of_processes.set_sensitive(True)
num_of_processes = int(self.number_of_processes.get_value())
error = pycam.Utils.threading.init_threading(
number_of_processes=num_of_processes,
enable_server=enable_server, remote=remote,
server_credentials=auth_key, local_port=local_port)
if error:
log.error("Failed to start server: %s" % error)
pycam.Utils.threading.cleanup()
enable_server_obj.set_active(False)
else:
pycam.Utils.threading.cleanup()
log.info("Multiprocessing disabled")
# set the label of the "connect" button
if enable_server_obj.get_active():
info = gtk.stock_lookup(gtk.STOCK_DISCONNECT)
else:
info = gtk.stock_lookup(gtk.STOCK_CONNECT)
enable_server_obj.set_label(info[0])
complete_area.set_sensitive(True)
self.append_to_queue(self.update_parallel_processes_settings)
def _browse_external_program_location(self, widget=None, key=None):
location = self.get_filename_via_dialog(title="Select the executable " \
+ "for '%s'" % key, mode_load=True,
parent=self.preferences_window)
if not location is None:
self.settings.set("external_program_%s" % key, location)
def _locate_external_program(self, widget=None, key=None):
# the button was just activated
location = pycam.Utils.get_external_program_location(key)
if not location:
log.error("Failed to locate the external program '%s'. " % key \
+ "Please install the program and try again." \
+ os.linesep \
+ "Or maybe you need to specify the location manually.")
else:
# store the new setting
self.settings.set("external_program_%s" % key, location)
@gui_activity_guard
def adjust_bounds(self, widget, axis, change):
bounds = self.settings.get("current_bounds")
abs_bounds_low, abs_bounds_high = bounds.get_absolute_limits(
reference=self.model.get_bounds())
# calculate the "change" for +/- (10% of this axis' model dimension)
if bounds is None:
return
if axis == "x":
change_value = (self.model.maxx - self.model.minx) * 0.1
elif axis == "y":
change_value = (self.model.maxy - self.model.miny) * 0.1
elif axis == "z":
change_value = (self.model.maxz - self.model.minz) * 0.1
else:
# not allowed
return
# calculate the new bounds
axis_index = "xyz".index(axis)
if change == "0":
abs_bounds_low[axis_index] = getattr(self.model, "min%s" % axis)
abs_bounds_high[axis_index] = getattr(self.model, "max%s" % axis)
elif change == "+":
abs_bounds_low[axis_index] -= change_value
abs_bounds_high[axis_index] += change_value
elif change == "-":
abs_bounds_low[axis_index] += change_value
abs_bounds_high[axis_index] -= change_value
else:
# not allowed
return
# transfer the new bounds values to the old settings
bounds.adjust_bounds_to_absolute_limits(abs_bounds_low, abs_bounds_high,
reference=self.model.get_bounds())
# update the controls
self._put_bounds_settings_to_gui(bounds)
# update the visualization
self.append_to_queue(self.update_boundary_limits)
@gui_activity_guard
def switch_bounds_type(self, widget=None):
bounds = self.settings.get("current_bounds")
new_type = self._load_bounds_settings_from_gui().get_type()
if new_type == bounds.get_type():
# no change
return
# calculate the absolute bounds of the previous configuration
abs_bounds_low, abs_bounds_high = bounds.get_absolute_limits(
reference=self.model.get_bounds())
bounds.set_type(new_type)
bounds.adjust_bounds_to_absolute_limits(abs_bounds_low, abs_bounds_high,
reference=self.model.get_bounds())
self._put_bounds_settings_to_gui(bounds)
# update the descriptive label for each margin type
self.update_bounds_controls()
self.append_to_queue(self.update_boundary_limits)
# update the sensitivity of the lower z margin for contour models
self.update_model_type_related_controls()
@gui_activity_guard
def update_boundary_limits(self, widget=None):
# update the values in the manual support grid adjustment list
self.update_support_grid_manual_model()
# the support grid depends on the boundary
self.update_support_model()
self.update_view()
def update_tasklist_controls(self):
# en/disable some buttons
index = self._treeview_get_active_index(self.tasklist_table, self.task_list)
selection_active = not index is None
self.gui.get_object("TaskListDelete").set_sensitive(selection_active)
self.gui.get_object("TaskListMoveUp").set_sensitive(selection_active and index > 0)
self.gui.get_object("TaskListMoveDown").set_sensitive(selection_active and index < len(self.task_list) - 1)
self.gui.get_object("GenerateToolPathButton").set_sensitive(selection_active)
# "add" is only allowed, if there are any tools, processes and bounds
self.gui.get_object("TaskListAdd").set_sensitive(
(len(self.tool_list) > 0) \
and (len(self.process_list) > 0) \
and (len(self.bounds_list) > 0))
details_box = self.gui.get_object("TaskDetails")
if selection_active:
details_box.show()
else:
details_box.hide()
# check if any of the tasks is marked as "enabled"
enabled_count = len([True for task in self.task_list if task["enabled"]])
self.gui.get_object("GenerateAllToolPathsButton").set_sensitive(enabled_count > 0)
# update the summary description of the currently active task
self.update_task_description()
def update_task_description(self):
# update the task description
lines = []
task_index = self._treeview_get_active_index(self.tasklist_table, self.task_list)
if (not task_index is None) and (task_index < len(self.task_list)):
task = self.task_list[task_index]
# block all "change" signals for the task controls
for obj, signal_handler in self._task_property_signals:
obj.handler_block(signal_handler)
self.gui.get_object("TaskNameControl").set_text(task["name"])
tool = task["tool"]
self.gui.get_object("TaskToolSelector").set_active(self.tool_list.index(tool))
process = task["process"]
self.gui.get_object("TaskProcessSelector").set_active(self.process_list.index(process))
bounds = task["bounds"]
self.gui.get_object("TaskBoundsSelector").set_active(self.bounds_list.index(bounds))
# unblock the signals again
for obj, signal_handler in self._task_property_signals:
obj.handler_unblock(signal_handler)
unit = self.settings.get("unit")
tool_desc = "Tool: %s " % tool["shape"]
if tool["shape"] != "ToroidalCutter":
tool_desc += "(%.4f%s)" % (2 * tool["tool_radius"], unit)
else:
tool_desc += "(%.4f%s / %.4f%s)" % ( 2 * tool["tool_radius"], unit, 2 * tool["torus_radius"], unit)
lines.append(tool_desc)
lines.append("Spindle speed: %drpm / Feedrate: %d%s/minute" % (tool["speed"], tool["feedrate"], unit))
lines.append("Strategy: %s" % process["path_strategy"])
if process["path_strategy"] == "EngraveStrategy":
lines.append("Engrave offset: %.3f" % process["engrave_offset"])
else:
lines.append("Milling style: %s" % process["milling_style"])
if process["path_strategy"] != "ContourFollowStrategy":
lines.append("Overlap: %d%%" % process["overlap_percent"])
lines.append("Material allowance: %.2f%s" \
% (process["material_allowance"], unit))
if process["path_strategy"] != "SurfaceStrategy":
lines.append("Maximum step down: %.2f%s" % (process["step_down"], unit))
else:
lines.append("No task selected")
self.gui.get_object("CurrentTaskSummary").set_text(os.linesep.join(lines))
def update_tasklist_table(self, new_index=None, skip_model_update=False):
tasklist_model = self.gui.get_object("TaskList")
if new_index is None:
# keep the old selection - this may return "None" if nothing is selected
new_index = self._treeview_get_active_index(self.tasklist_table, self.task_list)
if not skip_model_update:
tasklist_model.clear()
# remove broken tasks from the list (tool or process was deleted)
self.task_list = [task for task in self.task_list
if (task["tool"] in self.tool_list) \
and (task["process"] in self.process_list) \
and (task["bounds"] in self.bounds_list)]
counter = 0
for task in self.task_list:
tasklist_model.append((counter, task["name"], task["enabled"]))
counter += 1
if not new_index is None:
self._treeview_set_active_index(self.tasklist_table, new_index)
self.update_tasklist_controls()
def switch_tasklist_table_selection(self, widget=None):
current_task = self.settings.get("current_task")
if not current_task is None:
self.settings.set("current_tool", current_task["tool"])
self.update_tool_table(skip_model_update=True)
self.settings.set("current_process", current_task["process"])
self.update_process_table(skip_model_update=True)
self.settings.set("current_bounds", current_task["bounds"])
self.update_bounds_table(skip_model_update=True)
self.update_tasklist_controls()
@gui_activity_guard
def _handle_task_setting_change(self, widget, data=None):
# get the index of the currently selected task
task = self.settings.get("current_task")
if task is None:
return
task_name_obj = self.gui.get_object("TaskNameControl")
old_name = task["name"]
new_name = task_name_obj.get_text()
if old_name != new_name:
task["name"] = new_name
tool_id = self.gui.get_object("TaskToolSelector").get_active()
task["tool"] = self.tool_list[tool_id]
process_id = self.gui.get_object("TaskProcessSelector").get_active()
task["process"] = self.process_list[process_id]
bounds_id = self.gui.get_object("TaskBoundsSelector").get_active()
old_bounds_id = self.bounds_list.index(task["bounds"])
task["bounds"] = self.bounds_list[bounds_id]
# update the current boundary limit, if it was changed
if bounds_id != old_bounds_id:
self.append_to_queue(self.update_boundary_limits)
# update the tasklist table (especially for name changes)
self.update_tasklist_table()
# the task_name input control seems to loose focus somehow
if old_name != new_name:
task_name_obj.grab_focus()
@gui_activity_guard
def _handle_tasklist_button_event(self, widget, data, action=None):
# "toggle" uses two parameters - all other actions have only one
if action is None:
action = data
# get the index of the currently selected task
try:
current_task_index = self._treeview_get_active_index(self.tasklist_table, self.task_list)
except ValueError:
current_task_index = None
self._treeview_button_event(self.tasklist_table, self.task_list, action, self.update_tasklist_table)
if action == "add":
new_task = {}
# look for the first unused default name
prefix = "New Task "
index = 1
# loop while the current name is in use
while [True for task in self.task_list if task["name"] == "%s%d" % (prefix, index)]:
index += 1
new_task["name"] = "%s%d" % (prefix, index)
new_task["tool"] = self.tool_list[0]
new_task["process"] = self.process_list[0]
new_task["bounds"] = self.bounds_list[0]
new_task["enabled"] = True
self.task_list.append(new_task)
self.update_tasklist_table(self.task_list.index(new_task))
elif action == "toggle_enabled":
# "data" contains the row of the clicked checkbox
if not data is None:
current_task_index = int(data)
if (not current_task_index is None) and (current_task_index < len(self.task_list)):
self.task_list[current_task_index]["enabled"] = not self.task_list[current_task_index]["enabled"]
# update the table values
self.update_tasklist_table(current_task_index)
elif action == "generate_all_toolpaths":
self.process_multiple_tasks()
elif action == "generate_one_toolpath":
self.process_one_task(current_task_index)
else:
pass
def process_one_task(self, task_index):
try:
task = self.task_list[task_index]
except IndexError:
# this should only happen, if we were called in batch mode (command line)
log.warn("The given task ID (%d) does not exist. Valid values are: %s." % (task_index, range(len(self.task_list))))
return
self.generate_toolpath(task["tool"], task["process"], task["bounds"])
def process_multiple_tasks(self, task_list=None):
if task_list is None:
task_list = self.task_list[:]
enabled_tasks = []
for index in range(len(task_list)):
task = task_list[index]
if task["enabled"]:
enabled_tasks.append(task)
progress_bar = self.gui.get_object("MultipleProgressBar")
progress_bar.show()
for index in range(len(enabled_tasks)):
progress_bar.set_fraction(float(index) / len(enabled_tasks))
progress_bar.set_text("Toolpath %d/%d" % (index, len(enabled_tasks)))
task = enabled_tasks[index]
if not self.generate_toolpath(task["tool"], task["process"],
task["bounds"]):
# break out of the loop, if cancel was requested
break
progress_bar.hide()
def update_process_controls(self, widget=None, data=None):
# possible dependencies of the DropCutter
get_obj = self.gui.get_object
strategy = None
for one_strategy in ("PushRemoveStrategy", "ContourPolygonStrategy",
"ContourFollowStrategy", "SurfaceStrategy", "EngraveStrategy"):
if get_obj(one_strategy).get_active():
strategy = one_strategy
break
else:
raise ValueError("Invalid path strategy selected")
if strategy == "ContourPolygonStrategy" \
and not get_obj("MillingStyleIgnore").get_active():
get_obj("MillingStyleIgnore").set_active(True)
if strategy == "ContourPolygonStrategy" \
and not get_obj("GridDirectionX").get_active():
# only "x" direction for ContourPolygon
get_obj("GridDirectionX").set_active(True)
if strategy in ("ContourFollowStrategy", "EngraveStrategy") \
and get_obj("MillingStyleIgnore").get_active():
get_obj("MillingStyleConventional").set_active(True)
all_controls = ("GridDirectionX", "GridDirectionY", "GridDirectionXY",
"MillingStyleConventional", "MillingStyleClimb",
"MillingStyleIgnore", "MaxStepDownControl",
"MaterialAllowanceControl", "OverlapPercentControl",
"EngraveOffsetControl", "PocketingControl")
active_controls = {
"PushRemoveStrategy": ("GridDirectionX", "GridDirectionY",
"GridDirectionXY", "MillingStyleConventional",
"MillingStyleClimb", "MillingStyleIgnore",
"MaxStepDownControl", "MaterialAllowanceControl",
"OverlapPercentControl"),
# TODO: direction y and xy currently don't work for ContourPolygonStrategy
"ContourPolygonStrategy": ("GridDirectionX",
"MillingStyleIgnore", "MaxStepDownControl",
"MaterialAllowanceControl", "OverlapPercentControl"),
"ContourFollowStrategy": ("MillingStyleConventional",
"MillingStyleClimb", "MaxStepDownControl"),
"SurfaceStrategy": ("GridDirectionX", "GridDirectionY",
"GridDirectionXY", "MillingStyleConventional",
"MillingStyleClimb", "MillingStyleIgnore",
"MaterialAllowanceControl", "OverlapPercentControl"),
"EngraveStrategy": ("MaxStepDownControl", "EngraveOffsetControl",
"MillingStyleConventional", "MillingStyleClimb",
"PocketingControl"),
}
for one_control in all_controls:
get_obj(one_control).set_sensitive(one_control in active_controls[strategy])
def update_tool_controls(self, widget=None, data=None):
# disable the toroidal radius if the toroidal cutter is not enabled
if self.gui.get_object("ToroidalCutter").get_active():
self.gui.get_object("TorusDiameterControl").show()
self.gui.get_object("TorusDiameterLabel").show()
else:
self.gui.get_object("TorusDiameterControl").hide()
self.gui.get_object("TorusDiameterLabel").hide()
for objname, default_value in (("ToolDiameterControl", 1.0),
("TorusDiameterControl", 0.25),
("SpindleSpeedControl", 1000),
("FeedrateControl", 200)):
obj = self.gui.get_object(objname)
if obj.get_value() == 0:
# set the value to the configured minimum
obj.set_value(default_value)
self.gui.get_object("ExportEMCToolDefinition").set_sensitive(len(self.tool_list) > 0)
@gui_activity_guard
@progress_activity_guard
def toggle_font_dialog_window(self, widget=None, event=None, state=None):
# only "delete-event" uses four arguments
# TODO: unify all these "toggle" functions for different windows into one single function (including storing the position)
if state is None:
state = event
if state is None:
state = not self._font_dialog_window_visible
if state:
if self.font_selector is None:
self.update_progress_bar("Initializing fonts")
# create it manually to ease access
font_selector = gtk.combo_box_new_text()
self.gui.get_object("FontSelectionBox").pack_start(
font_selector, expand=False, fill=False)
sorted_keys = list(self._fonts_cache.get_font_names())
sorted_keys.sort(key=lambda x: x.upper())
for name in sorted_keys:
font_selector.append_text(name)
if sorted_keys:
font_selector.set_active(0)
else:
log.warn("No single-line fonts found!")
font_selector.connect("changed",
self.update_font_dialog_preview)
font_selector.show()
self.font_selector = font_selector
if len(self._fonts_cache) > 0:
# show the dialog only if fonts are available
if self._font_dialog_window_position:
self.font_dialog_window.move(
*self._font_dialog_window_position)
self.font_dialog_window.show()
self._font_dialog_window_visible = True
else:
log.error("No fonts were found on your system. " \
+ "Please check the Log Window for details.")
else:
self._font_dialog_window_position = \
self.font_dialog_window.get_position()
self.font_dialog_window.hide()
self._font_dialog_window_visible = False
# don't close the window - just hide it (for "delete-event")
return True
def get_font_dialog_text_rendered(self):
input_field = self.gui.get_object("FontDialogInput")
text_buffer = input_field.get_buffer()
text = text_buffer.get_text(text_buffer.get_start_iter(),
text_buffer.get_end_iter())
text = unicode(text)
if text:
skew = self.gui.get_object("FontSideSkewValue").get_value()
line_space = self.gui.get_object("FontLineSpacingValue").get_value()
pitch = self.gui.get_object("FontCharacterSpacingValue").get_value()
# get the active align setting
for objname, value, justification in (
("FontTextAlignLeft", TEXT_ALIGN_LEFT, gtk.JUSTIFY_LEFT),
("FontTextAlignCenter", TEXT_ALIGN_CENTER, gtk.JUSTIFY_CENTER),
("FontTextAlignRight", TEXT_ALIGN_RIGHT, gtk.JUSTIFY_RIGHT)):
obj = self.gui.get_object(objname)
if obj.get_active():
align = value
input_field.set_justification(justification)
font_name = self.font_selector.get_active_text()
charset = self._fonts_cache.get_font(font_name)
return charset.render(text, skew=skew, line_spacing=line_space,
pitch=pitch, align=align)
else:
# empty text
return None
@gui_activity_guard
def import_from_font_dialog(self, widget=None):
self.load_model(self.get_font_dialog_text_rendered())
self.append_to_queue(self.toggle_font_dialog_window)
def export_from_font_dialog(self, widget=None):
text_model = self.get_font_dialog_text_rendered()
if text_model and (not text_model.maxx is None):
self.save_model(model=text_model, store_filename=False)
def copy_font_dialog_to_clipboard(self, widget=None):
text_model = self.get_font_dialog_text_rendered()
if text_model and (not text_model.maxx is None):
text_buffer = StringIO.StringIO()
text_model.export(comment=self.get_meta_data(),
unit=self.settings.get("unit")).write(text_buffer)
text_buffer.seek(0)
text = text_buffer.read()
self._copy_text_to_clipboard(text, CLIPBOARD_TARGETS["svg"])
def update_clipboard_state(self, clipboard=None, event=None):
data, importer = self._get_data_and_importer_from_clipboard()
paste_button = self.gui.get_object("PasteModelFromClipboard")
paste_button.set_sensitive(not data is None)
def _copy_text_to_clipboard(self, text, targets):
clip_targets = [(key, gtk.TARGET_OTHER_WIDGET, index)
for index, key in enumerate(targets)]
def get_func(clipboard, selectiondata, info, (text, clip_type)):
selectiondata.set(clip_type, 8, text)
if "svg" in "".join(targets).lower():
# Inkscape for Windows strictly requires the BITMAP type
clip_type = gtk.gdk.SELECTION_TYPE_BITMAP
else:
clip_type = gtk.gdk.SELECTION_TYPE_STRING
result = self.clipboard.set_with_data(clip_targets, get_func,
lambda *args: None, (text, clip_type))
self.clipboard.store()
def copy_model_to_clipboard(self, widget=None):
if not self.model.is_export_supported():
return
text_buffer = StringIO.StringIO()
self.model.export(comment=self.get_meta_data(),
unit=self.settings.get("unit")).write(text_buffer)
text_buffer.seek(0)
is_contour = isinstance(self.model, pycam.Geometry.Model.ContourModel)
# TODO: this should not be decided here
if is_contour:
targets = CLIPBOARD_TARGETS["svg"]
else:
targets = CLIPBOARD_TARGETS["stl"]
self._copy_text_to_clipboard(text_buffer.read(), targets)
def _get_data_and_importer_from_clipboard(self):
for targets, filename in ((CLIPBOARD_TARGETS["svg"], "foo.svg"),
(CLIPBOARD_TARGETS["stl"], "foo.stl"),
(CLIPBOARD_TARGETS["ps"], "foo.ps"),
(CLIPBOARD_TARGETS["dxf"], "foo.dxf")):
for target in targets:
data = self.clipboard.wait_for_contents(target)
if not data is None:
importer = pycam.Importers.detect_file_type(filename)[1]
return data, importer
return None, None
@progress_activity_guard
@gui_activity_guard
def paste_model_from_clipboard(self, widget=None):
data, importer = self._get_data_and_importer_from_clipboard()
if data:
self.update_progress_bar(text="Loading model from clipboard")
text_buffer = StringIO.StringIO(data.data)
model = importer(text_buffer,
program_locations=self._get_program_locations(),
unit=self.settings.get("unit"),
fonts_cache=self._fonts_cache,
callback=self.update_progress_bar)
if model:
log.info("Loaded a model from clipboard")
self.load_model(model)
else:
log.warn("Failed to load a model from clipboard")
else:
log.warn("The clipboard does not contain suitable data")
@gui_activity_guard
def update_font_dialog_preview(self, widget=None, event=None):
if not self.font_selector:
# not initialized
return
if len(self._fonts_cache) == 0:
# empty
return
font_name = self.font_selector.get_active_text()
font = self._fonts_cache.get_font(font_name)
self.gui.get_object("FontAuthorText").set_label(
os.linesep.join(font.get_authors()))
preview_widget = self.gui.get_object("FontDialogPreview")
final_drawing_area = preview_widget.window
text_model = self.get_font_dialog_text_rendered()
# always clean the background
x, y, width, height = preview_widget.get_allocation()
drawing_area = gtk.gdk.Pixmap(final_drawing_area, width, height)
drawing_area.draw_rectangle(preview_widget.get_style().white_gc, True,
0, 0, width, height)
# carefully check if there are lines in the rendered text
if text_model and (not text_model.maxx is None) and \
(text_model.maxx > text_model.minx):
# leave a small border around the preview
border = 3
x_fac = (width - 1 - 2 * border) / \
(text_model.maxx - text_model.minx)
y_fac = (height - 1 - 2 * border) / \
(text_model.maxy - text_model.miny)
factor = min(x_fac, y_fac)
# vertically centered preview
y_offset = int((height - 2 * border - \
factor * (text_model.maxy - text_model.miny)) // 2)
gc = drawing_area.new_gc()
if text_model.minx == 0:
# left align
get_virtual_x = lambda x: int(x * factor) + border
elif text_model.maxx == 0:
# right align
get_virtual_x = lambda x: width + int(x * factor) - 1 - border
else:
# center align
get_virtual_x = lambda x: \
int(width / 2.0 + x * factor) - 1 - border
get_virtual_y = lambda y: -y_offset + \
height - int((y - text_model.miny) * factor) - 1 - border
for polygon in text_model.get_polygons():
draw_points = []
points = polygon.get_points()
if polygon.is_closed:
# add the first point again to close the polygon
points.append(points[0])
for point in points:
x = get_virtual_x(point.x)
y = get_virtual_y(point.y)
draw_points.append((x, y))
drawing_area.draw_lines(gc, draw_points)
final_gc = final_drawing_area.new_gc()
final_drawing_area.draw_drawable(final_gc, drawing_area, 0, 0, 0, 0,
-1, -1)
@gui_activity_guard
def toggle_about_window(self, widget=None, event=None, state=None):
# only "delete-event" uses four arguments
# TODO: unify all these "toggle" functions for different windows into one single function (including storing the position)
if state is None:
state = event
if state:
self.about_window.show()
else:
self.about_window.hide()
# don't close the window - just hide it (for "delete-event")
return True
@progress_activity_guard
@gui_activity_guard
def extrude_model(self, widget=None):
self.update_progress_bar("Calculating extrusion")
extrusion_type_selector = self.gui.get_object("ExtrusionTypeSelector")
type_model = extrusion_type_selector.get_model()
type_active = extrusion_type_selector.get_active()
if type_active >= 0:
type_string = type_model[type_active][0]
height = self.gui.get_object("ExtrusionHeight").get_value()
width = self.gui.get_object("ExtrusionWidth").get_value()
grid_size = self.gui.get_object("ExtrusionGrid").get_value()
if type_string == "radius_up":
func = lambda x: height * math.sqrt((width ** 2 - max(0, width - x) ** 2))
elif type_string == "radius_down":
func = lambda x: height * (1 - math.sqrt((width ** 2 - min(width, x) ** 2)) / width)
elif type_string == "skewed":
func = lambda x: height * min(1, x / width)
elif type_string == "sine":
func = lambda x: height * math.sin(min(x, width) / width * math.pi / 2)
elif type_string == "sigmoid":
func = lambda x: height * ((math.sin(((min(x, width) / width) - 0.5) * math.pi) + 1) / 2)
else:
log.error("Unknown extrusion type selected: %s" % type_string)
return
self.toggle_extrusion_dialog(False)
model = self.model.extrude(stepping=grid_size, func=func,
callback=self.update_progress_bar)
if model and (not model.minx is None):
self.load_model(model)
else:
self.toggle_extrusion_dialog(True)
def toggle_extrusion_dialog(self, widget=None, event=None, state=None):
if state is None:
# the "delete-event" issues the additional "event" argument
state = event
if state is None:
state = not self._extrusion_dialog_visible
if state:
if self._extrusion_dialog_position:
self.extrusion_dialog_window.move(*self._extrusion_dialog_position)
self.extrusion_dialog_window.show()
else:
self._extrusion_dialog_position = self.extrusion_dialog_window.get_position()
self.extrusion_dialog_window.hide()
self._extrusion_dialog_visible = state
# don't close the window - just hide it (for "delete-event")
return True
@gui_activity_guard
def toggle_preferences_window(self, widget=None, event=None, state=None):
if state is None:
# the "delete-event" issues the additional "event" argument
state = event
if state is None:
state = not self._preferences_window_visible
if state:
if self._preferences_window_position:
self.preferences_window.move(*self._preferences_window_position)
self.preferences_window.show()
else:
self._preferences_window_position = self.preferences_window.get_position()
self.preferences_window.hide()
self._preferences_window_visible = state
# don't close the window - just hide it (for "delete-event")
return True
def add_log_message(self, title, message, record=None):
timestamp = datetime.datetime.fromtimestamp(
record.created).strftime("%H:%M")
# avoid the ugly character for a linefeed
message = " ".join(message.splitlines())
try:
message = message.encode("utf-8")
except UnicodeDecodeError:
# remove all non-ascii characters
clean_char = lambda c: (32 <= ord(c) < 128) and c or " "
message = "".join([clean_char(char) for char in message])
self.log_model.append((timestamp, title, message))
# update the status bar (if the GTK interface is still active)
if not self.status_bar.window is None:
# remove the last message from the stack (probably not necessary)
self.status_bar.pop(0)
# push the new message
try:
self.status_bar.push(0, message)
except TypeError:
new_message = re.sub("[^\w\s]", "", message)
self.status_bar.push(0, new_message)
# highlight the "warning" icon for warnings/errors
if record and record.levelno > 20:
self.gui.get_object("StatusBarWarning").show()
@gui_activity_guard
def copy_log_to_clipboard(self, widget=None):
content = []
def copy_row(model, path, it, content):
columns = []
for column in range(model.get_n_columns()):
columns.append(model.get_value(it, column))
content.append(" ".join(columns))
self.log_model.foreach(copy_row, content)
self.clipboard.set_text(os.linesep.join(content))
self.gui.get_object("StatusBarWarning").hide()
@gui_activity_guard
def clear_log_window(self, widget=None):
self.log_model.clear()
self.gui.get_object("StatusBarWarning").hide()
@gui_activity_guard
def toggle_log_window(self, widget=None, value=None, action=None):
toggle_log_checkbox = self.gui.get_object("ToggleLogWindow")
checkbox_state = toggle_log_checkbox.get_active()
if value is None:
new_state = checkbox_state
elif isinstance(value, gtk.gdk.Event):
# someone clicked at the status bar -> toggle the window state
new_state = not checkbox_state
else:
if action is None:
new_state = value
else:
new_state = action
if new_state:
if self._log_window_position:
self.log_window.move(*self._log_window_position)
self.log_window.show()
else:
self._log_window_position = self.log_window.get_position()
self.log_window.hide()
toggle_log_checkbox.set_active(new_state)
self.gui.get_object("StatusBarWarning").hide()
# don't destroy the window with a "destroy" event
return True
@gui_activity_guard
def toggle_process_pool_window(self, widget=None, value=None, action=None):
toggle_process_pool_checkbox = self.gui.get_object("ToggleProcessPoolWindow")
checkbox_state = toggle_process_pool_checkbox.get_active()
if value is None:
new_state = checkbox_state
else:
if action is None:
new_state = value
else:
new_state = action
if new_state:
is_available = pycam.Utils.threading.is_pool_available()
disabled_box = self.gui.get_object("ProcessPoolDisabledBox")
statistics_box = self.gui.get_object("ProcessPoolStatisticsBox")
if is_available:
disabled_box.hide()
statistics_box.show()
# start the refresh function
interval = int(max(1, self.gui.get_object(
"ProcessPoolRefreshInterval").get_value()))
gobject.timeout_add_seconds(interval,
self.update_process_pool_statistics, interval)
else:
disabled_box.show()
statistics_box.hide()
self.process_pool_window.show()
else:
self.process_pool_window.hide()
toggle_process_pool_checkbox.set_active(new_state)
# don't destroy the window with a "destroy" event
return True
def update_process_pool_statistics(self, original_interval):
stats = pycam.Utils.threading.get_pool_statistics()
model = self.process_pool_model
model.clear()
for item in stats:
model.append(item)
self.gui.get_object("ProcessPoolConnectedWorkersValue").set_text(
str(len(stats)))
details = pycam.Utils.threading.get_task_statistics()
detail_text = os.linesep.join(["%s: %s" % (key, value)
for (key, value) in details.iteritems()])
self.gui.get_object("ProcessPoolDetails").set_text(detail_text)
current_interval = int(max(1, self.gui.get_object(
"ProcessPoolRefreshInterval").get_value()))
if original_interval != current_interval:
# initiate a new repetition
gobject.timeout_add_seconds(current_interval,
self.update_process_pool_statistics, current_interval)
# stop the current repetition
return False
else:
# don't repeat, if the window is hidden
return self.gui.get_object("ToggleProcessPoolWindow").get_active()
@gui_activity_guard
def toggle_3d_view(self, widget=None, value=None):
toggle_3d_checkbox = self.gui.get_object("Toggle3DView")
# no interactive mode
if self.no_dialog:
return
if self.view3d and not self.view3d.enabled:
# initialization failed - don't do anything
return
if not self.model:
# no model loaded - don't enable the window
return
current_state = not ((self.view3d is None) or (not self.view3d.is_visible))
if value is None:
new_state = not current_state
else:
new_state = value
if new_state == current_state:
return
elif new_state:
if self.view3d is None:
# These buttons are replicated to appear in the 3D view - for
# easier configuration of visible items without the Preferences
# window.
item_buttons = self.gui.get_object(
"PreferencesVisibleItemsBox").get_children()
# do the gl initialization
self.view3d = ModelViewWindowGL(self.gui, self.settings,
notify_destroy=self.toggle_3d_view,
accel_group=self._accel_group,
item_buttons=item_buttons,
context_menu_actions=[self.gui.get_object(name)
for name in ("GeneralSettings", "Help3DView")])
if self.model and self.view3d.enabled:
self.view3d.reset_view()
# configure drag-and-drop for the 3D window
self.configure_drag_and_drop(self.view3d.window)
# disable the "toggle" button, if the 3D view does not work
toggle_3d_checkbox.set_sensitive(self.view3d.enabled)
else:
# the window is just hidden
self.view3d.show()
self.update_view()
else:
self.view3d.hide()
# enable the toggle button only, if the 3d view is available
# (e.g. disabled if no OpenGL support is available)
toggle_3d_checkbox.set_active(self.view3d.enabled and new_state)
@progress_activity_guard
@gui_activity_guard
def transform_model(self, widget):
if widget is self.gui.get_object("Rotate"):
controls = (("x-axis", "x"), ("y-axis", "y"), ("z-axis", "z"))
elif widget is self.gui.get_object("Flip"):
controls = (("xy-plane", "xy"), ("xz-plane", "xz"), ("yz-plane", "yz"))
elif widget is self.gui.get_object("Swap"):
controls = (("x <-> y", "x_swap_y"), ("x <-> z", "x_swap_z"), ("y <-> z", "y_swap_z"))
else:
# broken gui
log.warn("Unknown button action: %s" % str(widget.get_name()))
return
for obj, value in controls:
if self.gui.get_object(obj).get_active():
self._store_undo_state()
self.disable_progress_cancel_button()
self.update_progress_bar("Transforming model")
self.model.transform_by_template(value,
callback=self.update_progress_bar)
self.append_to_queue(self.update_support_model)
self.append_to_queue(self.update_model_dimensions)
self.append_to_queue(self.update_view)
def _treeview_get_active_index(self, table, datalist):
if len(datalist) == 0:
result = None
else:
treeselection = table.get_selection()
(model, iteration) = treeselection.get_selected()
# the first item in the model is the index within the list
try:
result = model[iteration][0]
except TypeError:
result = None
return result
def _treeview_set_active_index(self, table, index):
treeselection = table.get_selection()
treeselection.select_path((index,))
def _treeview_button_event(self, table, datalist, action, update_func):
future_selection_index = None
index = self._treeview_get_active_index(table, datalist)
skip_model_update = False
if action == "update_buttons":
skip_model_update = True
elif action == "move_up":
if index > 0:
# move an item one position up the list
selected = datalist[index]
above = datalist[index-1]
datalist[index] = above
datalist[index-1] = selected
future_selection_index = index - 1
elif action == "move_down":
if index + 1 < len(datalist):
# move an item one position down the list
selected = datalist[index]
below = datalist[index+1]
datalist[index] = below
datalist[index+1] = selected
future_selection_index = index + 1
elif action == "delete":
# delete one item from the list
item = datalist[index]
# Check if we need to remove items that depended on the currently
# deleted one.
if not datalist in (self.tool_list, self.process_list,
self.bounds_list):
# tasks do not depend on this list - just continue
pass
elif len(datalist) == 1:
# There are no replacements available for this item.
# Thus we need to remove _all_ tasks.
while len(self.task_list) > 0:
self.task_list.remove(self.task_list[0])
else:
if index > 0:
alternative = datalist[0]
else:
alternative = datalist[1]
# Replace all references to the to-be-deleted item with the
# alternative.
for task in self.task_list:
for sublist in ("tool", "process", "bounds"):
if item is task[sublist]:
task[sublist] = alternative
# Delete the object. Maybe this is not necessary, if it was the
# last remaining task item (see above).
if item in datalist:
datalist.remove(item)
# don't set a new index, if the list is empty now
if len(datalist) > 0:
if index < len(datalist):
future_selection_index = index
else:
# the last item was removed
future_selection_index = len(datalist) - 1
# update the tasklist table (maybe we removed some items)
self.update_tasklist_table()
# also update the specific description of the tool/process/bounds
if not future_selection_index is None:
if datalist is self.tool_list:
self.settings.set("current_tool",
self.tool_list[future_selection_index])
self.switch_tool_table_selection()
elif datalist is self.process_list:
self.settings.set("current_process",
self.process_list[future_selection_index])
self.switch_process_table_selection()
elif datalist is self.bounds_list:
self.settings.set("current_bounds",
self.bounds_list[future_selection_index])
self.switch_bounds_table_selection()
else:
pass
# any new item can influence the "New task" button
self.append_to_queue(self.update_tasklist_controls)
# removing or adding "bounds" may change the visualization
self.append_to_queue(self.update_boundary_limits)
update_func(new_index=future_selection_index,
skip_model_update=skip_model_update)
def _put_tool_settings_to_gui(self, settings):
self.gui.get_object("ToolName").set_text(settings["name"])
# cutter shapes
def set_cutter_shape_name(value):
self.gui.get_object(value).set_active(True)
set_cutter_shape_name(settings["shape"])
for objname, key in (
("FeedrateControl", "feedrate"),
("SpindleSpeedControl", "speed")):
self.gui.get_object(objname).set_value(settings[key])
# radius -> diameter
for objname, key in (
("ToolDiameterControl", "tool_radius"),
("TorusDiameterControl", "torus_radius")):
self.gui.get_object(objname).set_value(2 * settings[key])
def _load_tool_settings_from_gui(self, settings=None):
if settings is None:
settings = {}
settings["name"] = self.gui.get_object("ToolName").get_text()
def get_cutter_shape_name():
for name in ("SphericalCutter", "CylindricalCutter", "ToroidalCutter"):
if self.gui.get_object(name).get_active():
return name
settings["shape"] = get_cutter_shape_name()
for objname, key in (
("FeedrateControl", "feedrate"),
("SpindleSpeedControl", "speed")):
settings[key] = self.gui.get_object(objname).get_value()
# diameter -> radius
for objname, key in (
("ToolDiameterControl", "tool_radius"),
("TorusDiameterControl", "torus_radius")):
settings[key] = 0.5 * self.gui.get_object(objname).get_value()
return settings
@gui_activity_guard
def handle_tool_settings_change(self, widget=None, data=None):
current_tool = self.settings.get("current_tool")
if not current_tool is None:
self._load_tool_settings_from_gui(current_tool)
self.update_tool_table()
self.update_tool_controls()
@gui_activity_guard
def switch_tool_table_selection(self, widget=None, data=None):
current_tool = self.settings.get("current_tool")
# hide all controls if no process is defined
if not current_tool is None:
self.gui.get_object("ToolSettingsControlsBox").show()
self._put_tool_settings_to_gui(current_tool)
self.update_tool_table()
else:
self.gui.get_object("ToolSettingsControlsBox").hide()
@gui_activity_guard
def _tool_editor_button_event(self, widget, data, action=None):
# "toggle" uses two parameters - all other actions have only one
if action is None:
action = data
self._treeview_button_event(self.tool_editor_table, self.tool_list, action, self.update_tool_table)
if action == "add":
# look for the first unused default name
prefix = "New Tool "
index = 1
# loop while the current name is in use
while [True for process in self.tool_list if process["name"] == "%s%d" % (prefix, index)]:
index += 1
new_settings = self._load_tool_settings_from_gui()
new_settings["name"] = "%s%d" % (prefix, index)
self.tool_list.append(new_settings)
self.update_tool_table(self.tool_list.index(new_settings))
self._put_tool_settings_to_gui(new_settings)
elif action == "delete":
self.append_to_queue(self.switch_tool_table_selection)
def update_tool_table(self, new_index=None, skip_model_update=False):
tool_model = self.gui.get_object("ToolList")
if new_index is None:
# keep the old selection - this may return "None" if nothing is selected
new_index = self._treeview_get_active_index(self.tool_editor_table, self.tool_list)
if not skip_model_update:
tool_model.clear()
counter = 0
for tool in self.tool_list:
# add the tool size to the descriptive text
description = "%s (d=%s)" % (tool["name"], 2 * tool["tool_radius"])
tool_model.append((counter, counter + 1, description))
counter += 1
if not new_index is None:
self._treeview_set_active_index(self.tool_editor_table, new_index)
# en/disable some buttons
selection_active = not new_index is None
self.gui.get_object("ToolListDelete").set_sensitive(selection_active)
self.gui.get_object("ToolListMoveUp").set_sensitive(selection_active and new_index > 0)
self.gui.get_object("ToolListMoveDown").set_sensitive(selection_active and new_index < len(self.tool_list) - 1)
# hide all controls if no process is defined
if new_index is None:
self.gui.get_object("ToolSettingsControlsBox").hide()
else:
self.gui.get_object("ToolSettingsControlsBox").show()
# remove any broken tasks and update changed names
self.update_tool_controls()
self.update_task_description()
def change_unit_init(self, widget=None):
new_unit = self.gui.get_object("unit_control").get_active_text()
if self._last_unit is None:
# first initialization
self._last_unit = new_unit
return
if self._last_unit == new_unit:
# don't show the dialog if the conversion would make no sense
return
if self.no_dialog:
# without the dialog we don't scale anything
return
# show a dialog asking for a possible model scaling due to the unit change
self.unit_change_window.show()
def change_unit_set_selection(self, widget, state):
for key in ("UnitChangeModel", "UnitChangeProcesses", "UnitChangeTools",
"UnitChangeBounds"):
self.gui.get_object(key).set_active(state)
def change_unit_apply(self, widget=None, data=None, apply_scale=True):
if self.no_dialog:
# without the dialog we don't scale anything
return
new_unit = self.gui.get_object("unit_control").get_active_text()
factors = {
("mm", "inch"): 1 / 25.4,
("inch", "mm"): 25.4,
}
conversion = (self._last_unit, new_unit)
if conversion in factors.keys():
factor = factors[conversion]
if apply_scale:
if self.gui.get_object("UnitChangeModel").get_active():
# transform the model if it is selected
# keep the original center of the model
old_center = self._get_model_center()
self._store_undo_state()
self.model.scale(factor)
self._set_model_center(old_center)
if self.gui.get_object("UnitChangeProcesses").get_active():
# scale the process settings
for process in self.process_list:
for key in ("material_allowance", "step_down",
"engrave_offset"):
process[key] *= factor
if self.gui.get_object("UnitChangeBounds").get_active():
# scale the boundaries and keep their center
for bounds in self.bounds_list:
low, high = bounds.get_bounds()
if bounds.get_type() == Bounds.TYPE_FIXED_MARGIN:
low[0] *= factor
high[0] *= factor
low[1] *= factor
high[1] *= factor
low[2] *= factor
high[2] *= factor
bounds.set_bounds(low, high)
elif bounds.get_type() == Bounds.TYPE_CUSTOM:
center = [0, 0, 0]
for i in range(3):
center[i] = (high[i] + low[i]) / 2
for i in range(3):
low[i] = center[i] + (low[i] - center[i]) * factor
high[i] = center[i] + (high[i] - center[i]) * factor
bounds.set_bounds(low, high)
elif bounds.get_type() == Bounds.TYPE_RELATIVE_MARGIN:
# no need to change relative margins
pass
if self.gui.get_object("UnitChangeTools").get_active():
# scale all tool dimensions
for tool in self.tool_list:
for key in ("tool_radius", "torus_radius"):
tool[key] *= factor
self.unit_change_window.hide()
# store the current unit (for the next run of this function)
self._last_unit = new_unit
# update all labels containing the unit size
self.update_unit_labels()
# update all controls and redraw the boundaries
self.switch_tool_table_selection()
self.switch_process_table_selection()
self.switch_bounds_table_selection()
self.switch_tasklist_table_selection()
# redraw the model
self.update_view()
def update_unit_labels(self, widget=None, data=None):
# don't use the "unit" setting, since we need the plural of "inch"
if self.settings.get("unit") == "mm":
base_unit = "mm"
else:
base_unit = "inches"
for key in ("SpeedUnit1", "SpeedUnit2"):
self.gui.get_object(key).set_text("%s/minute" % base_unit)
for key in ("LengthUnit1", "LengthUnit2", "LengthUnitTouchOffHeight"):
self.gui.get_object(key).set_text(base_unit)
def get_filename_with_suffix(self, filename, type_filter):
# use the first extension provided by the filter as the default
if isinstance(type_filter[0], (tuple, list)):
filter_ext = type_filter[0][1]
else:
filter_ext = type_filter[1]
if isinstance(filter_ext, (list, tuple)):
filter_ext = filter_ext[0]
if not filter_ext.startswith("*"):
# weird filter content
return filename
else:
filter_ext = filter_ext[1:]
basename = os.path.basename(filename)
if (basename.rfind(".") == -1) or (basename[-6:].rfind(".") == -1):
# The filename does not contain a dot or the dot is not within the
# last five characters. Dots within the start of the filename are
# ignored.
return filename + filter_ext
else:
# contains at least one dot
return filename
@gui_activity_guard
def save_model(self, widget=None, filename=None, model=None,
store_filename=True):
if model is None:
model = self.model
if not model.is_export_supported():
log.warn(("Saving this type of model (%s) is currently not " \
+ "implemented!") % str(type(model)))
return
# get the filename
if callable(filename):
filename = filename()
uri = None
if not isinstance(filename, (basestring, pycam.Utils.URIHandler)):
# we open a dialog
# determine the file type
# TODO: this needs to be decided by the exporter code
if isinstance(model, pycam.Geometry.Model.Model):
# TODO: fix this extremely fragile filter
type_filter = [(name, patterns)
for name, patterns in FILTER_MODEL
if "STL" in name.upper()]
elif isinstance(model, pycam.Geometry.Model.ContourModel):
type_filter = [(name, patterns)
for name, patterns in FILTER_MODEL
if "SVG" in name.upper()]
filename = self.get_filename_via_dialog("Save model to ...",
mode_load=False, type_filter=type_filter,
filename_templates=(self.last_model_uri,))
if filename:
uri = pycam.Utils.URIHandler(filename)
if uri.is_local() and store_filename:
self.set_model_filename(filename)
else:
uri = pycam.Utils.URIHandler(filename)
# no filename given -> exit
if not uri:
return
if not uri.is_local():
log.error("Unable to write file to a non-local " + \
"destination: %s" % uri)
else:
try:
file_in = open(uri.get_local_path(), "w")
model.export(comment=self.get_meta_data(),
unit=self.settings.get("unit")).write(file_in)
file_in.close()
except IOError, err_msg:
log.error("Failed to save model file: %s" % err_msg)
else:
log.info("Successfully stored the current model as '%s'." % \
str(filename))
self.update_save_actions()
self.add_to_recent_file_list(filename)
@gui_activity_guard
def reset_preferences(self, widget=None):
""" reset all preferences to their default values """
for key, value in PREFERENCES_DEFAULTS.items():
self.settings.set(key, value)
# redraw the model due to changed colors, display items ...
self.update_view()
def load_preferences(self):
""" load all settings that are available in the Preferences window from
a file in the user's home directory """
config_filename = pycam.Gui.Settings.get_config_filename()
if config_filename is None:
# failed to create the personal preferences directory
return
config = ConfigParser.ConfigParser()
if not config.read(config_filename):
# no config file was read
return
# report any ignored (obsolete) preference keys present in the file
for item, value in config.items("DEFAULT"):
if not item in PREFERENCES_DEFAULTS.keys():
log.warn("Skipping obsolete preference item: %s" % str(item))
for item in PREFERENCES_DEFAULTS.keys():
if not config.has_option("DEFAULT", item):
# a new preference setting is missing in the (old) file
continue
value_raw = config.get("DEFAULT", item)
old_value = self.settings.get(item)
value_type = type(PREFERENCES_DEFAULTS[item])
if isinstance(value_type(), basestring):
# keep strings as they are
value = str(value_raw)
else:
# parse tuples, integers, bools, ...
value = eval(value_raw)
self.settings.set(item, value)
def save_preferences(self):
""" save all settings that are available in the Preferences window to
a file in the user's home directory """
config_filename = pycam.Gui.Settings.get_config_filename()
if config_filename is None:
# failed to create the personal preferences directory
log.warn("Failed to create a preferences directory in " \
+ "your user's home directory.")
return
config = ConfigParser.ConfigParser()
for item in PREFERENCES_DEFAULTS.keys():
config.set("DEFAULT", item, self.settings.get(item))
try:
config_file = file(config_filename, "w")
config.write(config_file)
config_file.close()
except IOError, err_msg:
log.warn("Failed to write preferences file (%s): %s" % (config_filename, err_msg))
@progress_activity_guard
@gui_activity_guard
def shift_model(self, widget, use_form_values=True):
if use_form_values:
shift_x = self.gui.get_object("shift_x").get_value()
shift_y = self.gui.get_object("shift_y").get_value()
shift_z = self.gui.get_object("shift_z").get_value()
else:
shift_x = -self.model.minx
shift_y = -self.model.miny
shift_z = -self.model.minz
self._store_undo_state()
self.update_progress_bar("Shifting model")
self.disable_progress_cancel_button()
self.model.shift(shift_x, shift_y, shift_z,
callback=self.update_progress_bar)
self.append_to_queue(self.update_support_model)
self.append_to_queue(self.update_model_dimensions)
self.append_to_queue(self.update_view)
def _get_model_center(self):
if not self.model:
return None
else:
return ((self.model.maxx + self.model.minx) / 2,
(self.model.maxy + self.model.miny) / 2,
(self.model.maxz + self.model.minz) / 2)
def _set_model_center(self, center):
new_x, new_y, new_z = center
old_x, old_y, old_z = self._get_model_center()
self.update_progress_bar("Centering model")
# undo state should be stored in the caller function
self.model.shift(new_x - old_x, new_y - old_y, new_z - old_z,
callback=self.update_progress_bar)
def _get_projection_plane(self):
# determine projection plane
if (self.model.maxz < 0) or (self.model.minz > 0):
# completely above or below zero
plane_z = self.model.minz
else:
plane_z = 0
return Plane(Point(0, 0, plane_z), Vector(0, 0, 1))
@progress_activity_guard
@gui_activity_guard
def projection_2d(self, widget=None):
self.update_progress_bar("Calculating 2D projection")
plane = self._get_projection_plane()
log.info("Projecting current model at level z=%g" % plane.p.z)
if hasattr(self.model, "get_flat_projection"):
projection = self.model.get_flat_projection(plane)
else:
projection = self.model.get_waterline_contour(plane)
if projection:
self.load_model(projection)
else:
log.warn("The 2D projection at z=%g is empty. Aborted." % plane.p.z)
@progress_activity_guard
@gui_activity_guard
def scale_model(self, widget=None, percent=None, keep_center=True):
if percent is None:
percent = self.gui.get_object("ScalePercent").get_value()
factor = percent / 100.0
if (factor <= 0) or (factor == 1):
return
old_center = self._get_model_center()
self._store_undo_state()
self.update_progress_bar("Scaling model")
self.disable_progress_cancel_button()
self.model.scale(factor, callback=self.update_progress_bar)
if keep_center:
self._set_model_center(old_center)
self.append_to_queue(self.update_model_dimensions)
self.append_to_queue(self.update_support_model)
self.append_to_queue(self.update_view)
@gui_activity_guard
def update_model_dimensions(self, widget=None):
if not self.model:
return
# scale controls
axis_control = self.gui.get_object("ScaleDimensionAxis")
scale_button = self.gui.get_object("ScaleDimensionButton")
scale_value = self.gui.get_object("ScaleDimensionControl")
index = axis_control.get_active()
dims = (self.model.maxx - self.model.minx,
self.model.maxy - self.model.miny,
self.model.maxz - self.model.minz)
value = dims[index]
non_zero_dimensions = [i for i, dim in enumerate(dims) if dim > 0]
enable_controls = index in non_zero_dimensions
scale_button.set_sensitive(enable_controls)
scale_value.set_sensitive(enable_controls)
scale_value.set_value(value)
# model corners in 3D view
for attr, label_suffix in (("minx", "XMin"), ("miny", "YMin"),
("minz", "ZMin"), ("maxx", "XMax"), ("maxy", "YMax"),
("maxz", "ZMax")):
label_name = "ModelCorner%s" % label_suffix
value = "%.3f" % getattr(self.model, attr)
self.gui.get_object(label_name).set_label(value)
@progress_activity_guard
@gui_activity_guard
def guess_model_directions(self, widget=None):
if not (self.model and hasattr(self.model, "reverse_directions")):
return
self._store_undo_state()
self.update_progress_bar(text="Analyzing directions of contour model")
self.model.revise_directions(callback=self.update_progress_bar)
self.update_support_model()
@progress_activity_guard
@gui_activity_guard
def reverse_model_direction(self, widget=None):
if not (self.model and hasattr(self.model, "reverse_directions")):
return
self._store_undo_state()
self.update_progress_bar(text="Reversing directions of contour model")
self.model.reverse_directions(callback=self.update_progress_bar)
self.update_support_model()
@progress_activity_guard
@gui_activity_guard
def scale_model_axis_fit(self, widget=None):
proportionally = self.gui.get_object("ScaleDimensionsProportionally").get_active()
value = self.gui.get_object("ScaleDimensionValue").get_value()
index = self.gui.get_object("ScaleDimensionAxis").get_active()
axes = "xyz"
axis_suffix = axes[index]
factor = value / (getattr(self.model, "max" + axis_suffix) - getattr(self.model, "min" + axis_suffix))
# store the original center of the model
old_center = self._get_model_center()
self._store_undo_state()
self.update_progress_bar("Scaling model")
self.disable_progress_cancel_button()
if proportionally:
self.model.scale(factor, callback=self.update_progress_bar)
else:
factor_x, factor_y, factor_z = (1, 1, 1)
if index == 0:
factor_x = factor
elif index == 1:
factor_y = factor
elif index == 2:
factor_z = factor
else:
return
self.model.scale(factor_x, factor_y, factor_z,
callback=self.update_progress_bar)
# move the model to its previous center
self._set_model_center(old_center)
self.append_to_queue(self.update_support_model)
self.append_to_queue(self.update_model_dimensions)
self.append_to_queue(self.update_view)
def destroy(self, widget=None, data=None):
self.update_view()
# check if there is a running process
# BEWARE: this is useless without threading - but we keep it for now
if self._progress_running:
self.cancel_progress()
# wait steps
delay = 0.5
# timeout in seconds
timeout = 5
# wait until if is finished
while self._progress_running and (timeout > 0):
time.sleep(delay)
timeout -= delay
gtk.main_quit()
self.quit()
def quit(self):
self.save_preferences()
def configure_drag_and_drop(self, obj):
obj.connect("drag-data-received", self.handle_data_drop)
flags = gtk.DEST_DEFAULT_ALL
targets = [(key, gtk.TARGET_OTHER_APP, index)
for index, key in enumerate(CLIPBOARD_TARGETS["filename_drag"])]
actions = gtk.gdk.ACTION_COPY | gtk.gdk.ACTION_LINK | \
gtk.gdk.ACTION_DEFAULT | gtk.gdk.ACTION_PRIVATE | \
gtk.gdk.ACTION_ASK
obj.drag_dest_set(flags, targets, actions)
def handle_data_drop(self, widget, drag_context, x, y, selection_data, info,
timestamp):
if info != 0:
uris = [str(selection_data.data)]
elif pycam.Utils.get_platform() == pycam.Utils.PLATFORM_WINDOWS:
uris = selection_data.data.splitlines()
else:
uris = selection_data.get_uris()
if not uris:
# empty selection
return True
for uri in uris:
if not uri or (uri == chr(0)):
continue
uri = pycam.Utils.URIHandler(uri)
file_type, importer = pycam.Importers.detect_file_type(uri,
quiet=True)
if importer:
# looks like the file can be loaded
if self.load_model_file(filename=uri):
return True
if len(uris) > 1:
log.error("Failed to open any of the given models: %s" % \
str(uris))
else:
log.error("Failed to open the model: %s" % str(uris[0]))
return False
def append_to_queue(self, func, *args, **kwargs):
# check if gui is currently active
if self.gui_is_active:
# queue the function call
self._batch_queue.append((func, args, kwargs))
else:
# call the function right now
func(*args, **kwargs)
def load_recent_model_file(self, widget):
uri = widget.get_current_uri()
self.load_model_file(filename=uri)
def _get_program_locations(self):
# import all external program locations into a dict
program_locations = {}
prefix = "external_program_"
for key in self.settings.get_keys():
if key.startswith(prefix) and self.settings.get(key):
program_locations[key[len(prefix):]] = self.settings.get(key)
return program_locations
@gui_activity_guard
@progress_activity_guard
def load_model_file(self, widget=None, filename=None, store_filename=True):
if callable(filename):
filename = filename()
if not filename:
filename = self.get_filename_via_dialog("Loading model ...",
mode_load=True, type_filter=FILTER_MODEL)
if filename:
file_type, importer = pycam.Importers.detect_file_type(filename)
if file_type and callable(importer):
self.update_progress_bar(text="Loading model ...")
# "cancel" is not allowed
self.disable_progress_cancel_button()
if self.load_model(importer(filename,
program_locations=self._get_program_locations(),
unit=self.settings.get("unit"),
fonts_cache=self._fonts_cache,
callback=self.update_progress_bar)):
if store_filename:
self.set_model_filename(filename)
self.add_to_recent_file_list(filename)
return True
else:
return False
else:
log.error("Failed to detect filetype!")
return False
@gui_activity_guard
def export_emc_tools(self, widget=None, filename=None):
if callable(filename):
filename = filename()
if not filename:
filename = self.get_filename_via_dialog("Exporting EMC tool definition ...",
mode_load=False, type_filter=FILTER_EMC_TOOL,
filename_templates=(self.last_model_uri,))
if filename:
export = pycam.Exporters.EMCToolExporter.EMCToolExporter(self.tool_list)
text = export.get_tool_definition_string()
try:
out = file(filename, "w")
out.write(text)
out.close()
except IOError, err_msg:
log.error("Failed to save EMC tool file: %s" % err_msg)
else:
self.add_to_recent_file_list(filename)
def finish_startup(self):
""" This function is called by the pycam script after everything is
set up properly.
"""
# empty the "undo" states (accumulated by loading the defualt model)
while self._undo_states:
self._undo_states.pop(0)
def open_task_settings_file(self, filename):
""" This function is used by the commandline handler """
self.last_task_settings_uri = pycam.Utils.URIHandler(filename)
self.load_task_settings_file(filename=filename)
self.update_save_actions()
@gui_activity_guard
def load_task_settings_file(self, widget=None, filename=None):
if callable(filename):
filename = filename()
if not filename:
filename = self.get_filename_via_dialog("Loading settings ...",
mode_load=True, type_filter=FILTER_CONFIG)
# Only update the last_task_settings attribute if the task file was
# loaded interactively. E.g. ignore the initial task file loading.
if filename:
self.last_task_settings_uri = pycam.Utils.URIHandler(filename)
if filename:
log.info("Loading task settings file: %s" % str(filename))
self.load_task_settings(filename)
self.add_to_recent_file_list(filename)
self.update_save_actions()
def _update_all_model_attributes(self):
self.append_to_queue(self.update_model_dimensions)
self.append_to_queue(self.update_model_type_related_controls)
self.append_to_queue(self.update_save_actions)
self.append_to_queue(self.update_support_controls)
self.append_to_queue(self.toggle_3d_view, value=True)
self.append_to_queue(self.update_view)
def load_model(self, model):
# load the new model only if the import worked
if model:
self._store_undo_state()
self.model = model
self.last_model_uri = None
# do some initialization
self._update_all_model_attributes()
if self.model and self.view3d and self.view3d.enabled:
self.append_to_queue(self.view3d.reset_view)
return True
else:
return False
def load_task_settings(self, filename=None):
settings = pycam.Gui.Settings.ProcessSettings()
if not filename is None:
settings.load_file(filename)
# flush all tables (without re-assigning new objects)
for one_list in (self.tool_list, self.process_list, self.bounds_list, self.task_list):
while len(one_list) > 0:
one_list.pop()
self.tool_list.extend(settings.get_tools())
self.process_list.extend(settings.get_processes())
self.bounds_list.extend(settings.get_bounds())
self.task_list.extend(settings.get_tasks())
self.update_tool_table()
self.update_process_table()
self.update_bounds_table()
self.update_tasklist_table()
def _put_bounds_settings_to_gui(self, settings):
self.gui.get_object("BoundsName").set_text(settings.get_name())
self.gui.get_object(self.BOUNDARY_TYPES[settings.get_type()]).set_active(True)
low, high = settings.get_bounds()
# relative margins are given in percent
if settings.get_type() == Bounds.TYPE_RELATIVE_MARGIN:
factor = 100
else:
factor = 1
for index, axis in enumerate("xyz"):
self.gui.get_object("boundary_%s_low" % axis).set_value(low[index] * factor)
self.gui.get_object("boundary_%s_high" % axis).set_value(high[index] * factor)
def _load_bounds_settings_from_gui(self, settings=None):
def get_boundary_type_from_gui():
for key, objname in self.BOUNDARY_TYPES.items():
if self.gui.get_object(objname).get_active():
return key
if settings is None:
settings = pycam.Toolpath.Bounds()
settings.set_name(self.gui.get_object("BoundsName").get_text())
settings.set_type(get_boundary_type_from_gui())
low = [None] * 3
high = [None] * 3
# relative margins are given in percent
if settings.get_type() == Bounds.TYPE_RELATIVE_MARGIN:
factor = 0.01
else:
factor = 1
for index, axis in enumerate("xyz"):
low[index] = self.gui.get_object(
"boundary_%s_low" % axis).get_value() * factor
high[index] = self.gui.get_object(
"boundary_%s_high" % axis).get_value() * factor
settings.set_bounds(low, high)
return settings
@gui_activity_guard
def handle_bounds_settings_change(self, widget=None, data=None):
current_index = self._treeview_get_active_index(
self.bounds_editor_table, self.bounds_list)
if not current_index is None:
self._load_bounds_settings_from_gui(self.bounds_list[current_index])
self.update_bounds_table()
self.append_to_queue(self.update_boundary_limits)
def update_bounds_controls(self):
current_index = self._treeview_get_active_index(
self.bounds_editor_table, self.bounds_list)
if current_index is None:
# no bounds setting is active
return
# show the proper descriptive label for the current margin type
current_settings = self._load_bounds_settings_from_gui()
current_type = current_settings.get_type()
type_labels = {
Bounds.TYPE_RELATIVE_MARGIN: "BoundsMarginTypeRelativeLabel",
Bounds.TYPE_FIXED_MARGIN: "BoundsMarginTypeFixedLabel",
Bounds.TYPE_CUSTOM: "BoundsMarginTypeCustomLabel",
}
for type_key, label_name in type_labels.items():
is_active = type_key == current_type
if is_active:
self.gui.get_object(label_name).show()
else:
self.gui.get_object(label_name).hide()
# return the control for one of the axes (low/high)
def get_control(index, side):
return self.gui.get_object("boundary_%s_%s" % ("xyz"[index], side))
# disable each zero-dimension in relative margin mode
if current_type == Bounds.TYPE_RELATIVE_MARGIN:
model_dims = (self.model.maxx - self.model.minx,
self.model.maxy - self.model.miny,
self.model.maxz - self.model.minz)
# disable the low/high controls for each zero-dimension
for index in range(3):
# enabled, if dimension is non-zero
state = model_dims[index] != 0
get_control(index, "high").set_sensitive(state)
if (index == 2) and isinstance(self.model,
pycam.Geometry.Model.ContourModel):
# disable lower z for contour models
state = False
get_control(index, "low").set_sensitive(state)
else:
# non-relative margins: enable all controls
for index in range(3):
get_control(index, "high").set_sensitive(True)
if (index == 2) and isinstance(self.model,
pycam.Geometry.Model.ContourModel) and \
(current_type != Bounds.TYPE_CUSTOM):
# disable lower z for contour models
state = False
else:
state = True
get_control(index, "low").set_sensitive(state)
def update_bounds_table(self, new_index=None, skip_model_update=False):
# reset the model data and the selection
if new_index is None:
# keep the old selection - this may return "None" if nothing is selected
new_index = self._treeview_get_active_index(self.bounds_editor_table, self.bounds_list)
if not skip_model_update:
# update the TreeModel data
model = self.gui.get_object("BoundsList")
model.clear()
# columns: index, description
for index, bounds in enumerate(self.bounds_list):
items = (index, bounds.get_name())
model.append(items)
if not new_index is None:
self._treeview_set_active_index(self.bounds_editor_table, new_index)
selection_active = not new_index is None
# enable/disable the modification buttons
self.gui.get_object("BoundsListMoveUp").set_sensitive(selection_active \
and (new_index > 0))
self.gui.get_object("BoundsListDelete").set_sensitive(selection_active)
self.gui.get_object("BoundsListMoveDown").set_sensitive(
selection_active and (new_index + 1 < len(self.bounds_list)))
# hide all controls if no bound is defined
if selection_active:
self.gui.get_object("BoundsSettingsControlsBox").show()
else:
self.gui.get_object("BoundsSettingsControlsBox").hide()
self.update_bounds_controls()
# remove any broken tasks and update changed names
self.update_task_description()
@gui_activity_guard
def switch_bounds_table_selection(self, widget=None, data=None):
bounds = self.settings.get("current_bounds")
if not bounds is None:
self.gui.get_object("BoundsSettingsControlsBox").show()
self._put_bounds_settings_to_gui(bounds)
self.update_bounds_table()
else:
self.gui.get_object("BoundsSettingsControlsBox").hide()
self.append_to_queue(self.update_boundary_limits)
@gui_activity_guard
def handle_bounds_table_event(self, widget, data, action=None):
# "toggle" uses two parameters - all other actions have only one
if action is None:
action = data
self._treeview_button_event(self.bounds_editor_table, self.bounds_list,
action, self.update_bounds_table)
# do some post-processing ...
if action == "add":
# look for the first unused default name
prefix = "New Bounds "
index = 1
# loop while the current name is in use
while [True for bounds in self.bounds_list
if bounds.get_name() == "%s%d" % (prefix, index)]:
index += 1
new_settings = self._load_bounds_settings_from_gui()
new_settings.set_name("%s%d" % (prefix, index))
self.bounds_list.append(new_settings)
self.update_bounds_table(self.bounds_list.index(new_settings))
self._put_bounds_settings_to_gui(new_settings)
elif action == "delete":
self.append_to_queue(self.switch_bounds_table_selection)
def _load_process_settings_from_gui(self, settings=None):
if settings is None:
settings = {}
settings["name"] = self.gui.get_object("ProcessSettingName").get_text()
# path generator
for key in ("PushRemoveStrategy", "ContourPolygonStrategy",
"ContourFollowStrategy", "SurfaceStrategy", "EngraveStrategy"):
if self.gui.get_object(key).get_active():
strategy = key
break
settings["path_strategy"] = strategy
# path direction
for obj, value in (("GridDirectionX", "x"), ("GridDirectionY", "y"),
("GridDirectionXY", "xy")):
if self.gui.get_object(obj).get_active():
direction = value
break
# milling style
for obj, value in (("MillingStyleConventional", "conventional"),
("MillingStyleClimb", "climb"),
("MillingStyleIgnore", "ignore")):
if self.gui.get_object(obj).get_active():
milling_style = value
break
# post_processor and reverse
settings["milling_style"] = milling_style
settings["path_direction"] = direction
for objname, key in (("OverlapPercentControl", "overlap_percent"),
("MaterialAllowanceControl", "material_allowance"),
("MaxStepDownControl", "step_down"),
("EngraveOffsetControl", "engrave_offset")):
settings[key] = self.gui.get_object(objname).get_value()
settings["pocketing_type"] = POCKETING_TYPES[
self.gui.get_object("PocketingControl").get_active()]
return settings
def _put_process_settings_to_gui(self, settings):
self.gui.get_object("ProcessSettingName").set_text(settings["name"])
# path direction
def set_path_direction(direction):
for obj, value in (("GridDirectionX", "x"), ("GridDirectionY", "y"),
("GridDirectionXY", "xy")):
if value == direction:
self.gui.get_object(obj).set_active(True)
return
set_path_direction(settings["path_direction"])
def set_path_strategy(value):
self.gui.get_object(value).set_active(True)
set_path_strategy(settings["path_strategy"])
# milling style
def set_milling_style(style):
STYLES = {"conventional": "MillingStyleConventional",
"climb": "MillingStyleClimb",
"ignore": "MillingStyleIgnore"}
self.gui.get_object(STYLES[style]).set_active(True)
set_milling_style(settings["milling_style"])
for objname, key in (("OverlapPercentControl", "overlap_percent"),
("MaterialAllowanceControl", "material_allowance"),
("MaxStepDownControl", "step_down"),
("EngraveOffsetControl", "engrave_offset")):
self.gui.get_object(objname).set_value(settings[key])
if settings["pocketing_type"] in POCKETING_TYPES:
self.gui.get_object("PocketingControl").set_active(
POCKETING_TYPES.index(settings["pocketing_type"]))
@gui_activity_guard
def handle_process_settings_change(self, widget=None, data=None):
current_process = self.settings.get("current_process")
if not current_process is None:
self._load_process_settings_from_gui(current_process)
self.update_process_table()
def update_process_table(self, new_index=None, skip_model_update=False):
# reset the model data and the selection
if new_index is None:
# keep the old selection - this may return "None" if nothing is selected
new_index = self._treeview_get_active_index(self.process_editor_table, self.process_list)
if not skip_model_update:
# update the TreeModel data
model = self.gui.get_object("ProcessList")
model.clear()
# columns: index, description
for index in range(len(self.process_list)):
process = self.process_list[index]
items = (index, process["name"])
model.append(items)
if not new_index is None:
self._treeview_set_active_index(self.process_editor_table, new_index)
# enable/disable the modification buttons
self.gui.get_object("ProcessListMoveUp").set_sensitive((not new_index is None) and (new_index > 0))
self.gui.get_object("ProcessListDelete").set_sensitive(not new_index is None)
self.gui.get_object("ProcessListMoveDown").set_sensitive((not new_index is None) and (new_index + 1 < len(self.process_list)))
# hide all controls if no process is defined
if new_index is None:
self.gui.get_object("ProcessSettingsControlsBox").hide()
else:
self.gui.get_object("ProcessSettingsControlsBox").show()
# remove any broken tasks and update changed names
self.update_task_description()
@gui_activity_guard
def switch_process_table_selection(self, widget=None, data=None):
current_process = self.settings.get("current_process")
if not current_process is None:
self.gui.get_object("ProcessSettingsControlsBox").show()
self._put_process_settings_to_gui(current_process)
self.update_process_table()
else:
self.gui.get_object("ProcessSettingsControlsBox").hide()
@gui_activity_guard
def handle_process_table_event(self, widget, data, action=None):
# "toggle" uses two parameters - all other actions have only one
if action is None:
action = data
self._treeview_button_event(self.process_editor_table, self.process_list, action, self.update_process_table)
# do some post-processing ...
if action == "add":
# look for the first unused default name
prefix = "New Process "
index = 1
# loop while the current name is in use
while [True for process in self.process_list if process["name"] == "%s%d" % (prefix, index)]:
index += 1
new_settings = self._load_process_settings_from_gui()
new_settings["name"] = "%s%d" % (prefix, index)
self.process_list.append(new_settings)
self.update_process_table(self.process_list.index(new_settings))
self._put_process_settings_to_gui(new_settings)
elif action == "delete":
self.append_to_queue(self.switch_process_table_selection)
@gui_activity_guard
def toolpath_table_event(self, widget, data, action=None):
# "toggle" uses two parameters - all other actions have only one
if action is None:
action = data
if action == "toggle_visibility":
# get the id of the currently selected toolpath
try:
path = int(data)
except ValueError:
path = None
if (not path is None) and (path < len(self.toolpath)):
self.toolpath[path].visible = not self.toolpath[path].visible
tp_model = self.toolpath_table.get_model()
tp_model[path][2] = self.toolpath[path].visible
self.update_toolpath_related_controls()
elif action == "simulate":
index = self._treeview_get_active_index(self.toolpath_table,
self.toolpath)
if not index is None:
self.show_toolpath_simulation(self.toolpath[index])
elif action == "crop":
index = self._treeview_get_active_index(self.toolpath_table,
self.toolpath)
if not index is None:
self.crop_toolpath(self.toolpath[index])
self.update_toolpath_table()
elif action == "grid":
index = self._treeview_get_active_index(self.toolpath_table,
self.toolpath)
if not index is None:
self.create_toolpath_grid(self.toolpath[index])
else:
# process the default operations (move, delete)
self._treeview_button_event(self.toolpath_table, self.toolpath,
action, self.update_toolpath_table)
# do some post-processing ...
if action in ("toggle_visibility", "delete", "crop", "grid"):
self.update_view()
def update_toolpath_grid_window(self, widget=None):
data = self._toolpath_for_grid_data
x_dim = data["maxx"] - data["minx"]
y_dim = data["maxy"] - data["miny"]
x_count = self.gui.get_object("GridXCount").get_value()
x_space = self.gui.get_object("GridXDistance").get_value()
y_count = self.gui.get_object("GridYCount").get_value()
y_space = self.gui.get_object("GridYDistance").get_value()
x_width = x_dim * x_count + x_space * (x_count - 1)
y_width = y_dim * y_count + y_space * (y_count - 1)
self.gui.get_object("LabelGridXWidth").set_label("%g%s" % \
(x_width, self.settings.get("unit")))
self.gui.get_object("LabelGridYWidth").set_label("%g%s" % \
(y_width, self.settings.get("unit")))
def create_toolpath_grid(self, toolpath):
dialog = self.gui.get_object("ToolpathGridDialog")
data = self._toolpath_for_grid_data
data["minx"] = toolpath.minx()
data["maxx"] = toolpath.maxx()
data["miny"] = toolpath.miny()
data["maxy"] = toolpath.maxy()
self.gui.get_object("GridXCount").set_value(1)
self.gui.get_object("GridYCount").set_value(1)
self.update_toolpath_grid_window()
result = dialog.run()
if result == 1:
# "OK" was pressed
new_tp = []
x_count = int(self.gui.get_object("GridXCount").get_value())
y_count = int(self.gui.get_object("GridYCount").get_value())
x_space = self.gui.get_object("GridXDistance").get_value()
y_space = self.gui.get_object("GridYDistance").get_value()
x_dim = data["maxx"] - data["minx"]
y_dim = data["maxy"] - data["miny"]
for x in range(x_count):
for y in range(y_count):
shift = Point(x * (x_space + x_dim),
y * (y_space + y_dim), 0)
for path in toolpath.get_paths():
new_path = pycam.Geometry.Path.Path()
new_path.points = [shift.add(p) for p in path.points]
new_tp.append(new_path)
new_toolpath = pycam.Toolpath.Toolpath(new_tp, toolpath.name,
toolpath.toolpath_settings)
toolpath.visible = False
new_toolpath.visible = True
self.toolpath.append(new_toolpath)
self.update_toolpath_table()
dialog.hide()
@progress_activity_guard
def crop_toolpath(self, toolpath):
if hasattr(self.model, "get_polygons"):
contour = self.model
elif hasattr(self.model, "get_waterline_contour"):
plane = self._get_projection_plane()
self.update_progress_bar("Calculating the 2D projection")
contour = self.model.get_waterline_contour(plane)
self.update_progress_bar("Applying the tool diameter offset")
contour = contour.get_offset_model(
2 * toolpath.get_tool_settings()["tool_radius"])
else:
log.warn(("The current model (%s) does not support " \
+ "projections") % str(type(self.model)))
return
self.update_progress_bar("Cropping the toolpath")
toolpath.crop(contour.get_polygons(), callback=self.update_progress_bar)
def update_toolpath_related_controls(self):
# show or hide the "toolpath" tab
toolpath_tab = self.gui.get_object("ToolPathTab")
if not self.toolpath:
toolpath_tab.hide()
else:
self.gui.get_object("ToolPathTabLabel").set_text(
"%s (%d)" % (self._original_toolpath_tab_label, len(self.toolpath)))
toolpath_tab.show()
# enable/disable the export menu item
self.gui.get_object("ExportGCodeAll").set_sensitive(len(self.toolpath) > 0)
toolpaths_are_visible = any([tp.visible for tp in self.toolpath])
self.gui.get_object("ExportGCodeVisible").set_sensitive(
toolpaths_are_visible)
self.gui.get_object("ExportVisibleToolpathsButton").set_sensitive(
toolpaths_are_visible)
def update_toolpath_table(self, new_index=None, skip_model_update=False):
def get_time_string(minutes):
if minutes > 180:
return "%d hours" % int(round(minutes / 60))
elif minutes > 3:
return "%d minutes" % int(round(minutes))
else:
return "%d seconds" % int(round(minutes * 60))
self.update_toolpath_related_controls()
# reset the model data and the selection
if new_index is None:
# keep the old selection - this may return "None" if nothing is selected
new_index = self._treeview_get_active_index(self.toolpath_table, self.toolpath)
if not skip_model_update:
# update the TreeModel data
model = self.gui.get_object("ToolPathListModel")
model.clear()
# columns: name, visible, drill_size, drill_id, allowance, speed, feedrate
for index in range(len(self.toolpath)):
tp = self.toolpath[index]
toolpath_settings = tp.get_toolpath_settings()
tool = toolpath_settings.get_tool_settings()
process = toolpath_settings.get_process_settings()
items = (index, tp.name, tp.visible, tool["tool_radius"],
tool["id"], process["material_allowance"],
tool["speed"], tool["feedrate"],
get_time_string(tp.get_machine_time(
self.settings.get("gcode_safety_height"))))
model.append(items)
if not new_index is None:
self._treeview_set_active_index(self.toolpath_table, new_index)
# enable/disable the modification buttons
self.gui.get_object("toolpath_up").set_sensitive((not new_index is None) and (new_index > 0))
self.gui.get_object("toolpath_delete").set_sensitive(not new_index is None)
self.gui.get_object("toolpath_down").set_sensitive((not new_index is None) and (new_index + 1 < len(self.toolpath)))
self.gui.get_object("toolpath_simulate").set_sensitive(not new_index is None)
self.gui.get_object("toolpath_crop").set_sensitive(not new_index is None)
self.gui.get_object("ToolpathGrid").set_sensitive(not new_index is None)
@gui_activity_guard
def save_task_settings_file(self, widget=None, filename=None):
if callable(filename):
filename = filename()
if not isinstance(filename, (basestring, pycam.Utils.URIHandler)):
# we open a dialog
filename = self.get_filename_via_dialog("Save settings to ...",
mode_load=False, type_filter=FILTER_CONFIG,
filename_templates=(self.last_task_settings_uri, self.last_model_uri))
if filename:
self.last_task_settings_uri = pycam.Utils.URIHandler(filename)
self.update_save_actions()
# no filename given -> exit
if not filename:
return
settings = pycam.Gui.Settings.ProcessSettings()
if not settings.write_to_file(filename, self.tool_list,
self.process_list, self.bounds_list, self.task_list):
log.error("Failed to save settings file")
else:
log.info("Task settings written to %s" % filename)
self.add_to_recent_file_list(filename)
self.update_save_actions()
def toggle_progress_bar(self, status):
# always hide the progress button - it will be enabled later
self.show_progress_button.hide()
if status:
self.menubar.set_sensitive(False)
self.task_pane.set_sensitive(False)
self._progress_start_time = time.time()
self.update_progress_bar(text="", percent=0)
self.progress_cancel_button.set_sensitive(True)
# enable "pulse" mode for a start (in case of unknown ETA)
self.progress_bar.pulse()
self.progress_widget.show()
else:
self.progress_widget.hide()
self.task_pane.set_sensitive(True)
self.menubar.set_sensitive(True)
def disable_progress_cancel_button(self):
""" mainly useful for non-interruptable operations (e.g. model
transformations)
"""
self.progress_cancel_button.set_sensitive(False)
def update_progress_bar(self, text=None, percent=None):
if not percent is None:
percent = min(max(percent, 0.0), 100.0)
self.progress_bar.set_fraction(percent/100.0)
if (not percent) and (self.progress_bar.get_fraction() == 0):
# use "pulse" mode until we reach 1% of the work to be done
self.progress_bar.pulse()
# update the GUI
current_time = time.time()
# Don't update the GUI more often than once per second.
# Exception: text-only updates
# This restriction improves performance and reduces the
# "snappiness" of the GUI.
if (self._last_gtk_events_time is None) \
or text \
or (self._last_gtk_events_time + 1 <= current_time):
# "estimated time of arrival" text
time_estimation_suffix = " remaining ..."
if self.progress_bar.get_fraction() > 0:
eta_full = (time.time() - self._progress_start_time) / self.progress_bar.get_fraction()
if eta_full > 0:
eta_delta = eta_full - (time.time() - self._progress_start_time)
eta_delta = int(round(eta_delta))
if hasattr(self, "_last_eta_delta"):
previous_eta_delta = self._last_eta_delta
if eta_delta == previous_eta_delta + 1:
# We are currently toggling between two numbers.
# We want to avoid screen flicker, thus we just live
# with the slight inaccuracy.
eta_delta = self._last_eta_delta
self._last_eta_delta = eta_delta
eta_delta_obj = datetime.timedelta(seconds=eta_delta)
eta_text = "%s%s" % (eta_delta_obj, time_estimation_suffix)
else:
eta_text = None
else:
eta_text = None
if not text is None:
lines = [text]
else:
old_lines = self.progress_bar.get_text().split(os.linesep)
# skip the time estimation line
lines = [line for line in old_lines
if not line.endswith(time_estimation_suffix)]
if eta_text:
lines.append(eta_text)
self.progress_bar.set_text(os.linesep.join(lines))
# show the "show_tool_button" ("hide" is called in the progress decorator)
if self.settings.get("toolpath_in_progress"):
self.show_progress_button.show()
while gtk.events_pending():
gtk.main_iteration()
if not text or (self._progress_start_time + 5 < current_time):
# We don't store the timining if the text was changed.
# This is especially nice for the snappines during font
# initialization. This exception is only valid for the first
# five seconds of the operation.
self._last_gtk_events_time = current_time
# return if the user requested a break
return self._progress_cancel_requested
def cancel_progress(self, widget=None):
self._progress_cancel_requested = True
def finish_toolpath_simulation(self, widget=None, data=None):
# hide the simulation tab
self.simulation_window.hide()
# enable all other tabs again
self.toggle_tabs_for_simulation(True)
self.settings.set("simulation_object", None)
self.settings.set("simulation_toolpath_moves", None)
self.settings.set("show_simulation", False)
self.settings.set("simulation_toolpath", None)
self.update_view()
# don't destroy the simulation window (for "destroy" event)
return True
def update_toolpath_simulation(self, widget=None, toolpath=None):
s = self.settings
# update the GUI
while gtk.events_pending():
gtk.main_iteration()
if not s.get("show_simulation"):
# cancel
return False
safety_height = s.get("gcode_safety_height")
if not s.get("simulation_toolpath"):
# get the currently selected toolpath, if none is give
if toolpath is None:
toolpath_index = self._treeview_get_active_index(self.toolpath_table, self.toolpath)
if toolpath_index is None:
return
else:
toolpath = self.toolpath[toolpath_index]
s.set("simulation_toolpath", toolpath)
# set the current cutter
self.cutter = toolpath.toolpath_settings.get_tool()
# calculate steps
s.set("simulation_machine_time",
toolpath.get_machine_time(safety_height=safety_height))
s.set("simulation_complete_distance",
toolpath.get_machine_movement_distance(
safety_height=safety_height))
s.set("simulation_current_distance", 0)
else:
toolpath = s.get("simulation_toolpath")
if (s.get("simulation_current_distance") \
< s.get("simulation_complete_distance")):
if s.get("simulation_current_distance") < 0:
# "-1" -> simulation is finished
updated_distance = s.get("simulation_complete_distance")
else:
time_step = 1.0 / s.get("drill_progress_max_fps")
feedrate = toolpath.toolpath_settings.get_tool_settings(
)["feedrate"]
distance_step = s.get("simulation_speed_factor") * \
time_step * feedrate / 60
updated_distance = min(distance_step + \
s.get("simulation_current_distance"),
s.get("simulation_complete_distance"))
if updated_distance != s.get("simulation_current_distance"):
s.set("simulation_current_distance", updated_distance)
moves = toolpath.get_moves(safety_height=safety_height,
max_movement=updated_distance)
s.set("simulation_toolpath_moves", moves)
if moves:
self.cutter.moveto(moves[-1][0])
self.update_view()
progress_value_percent = 100.0 * s.get("simulation_current_distance") \
/ s.get("simulation_complete_distance")
self.gui.get_object("SimulationProgressTimelineValue").set_value(
progress_value_percent)
return True
@progress_activity_guard
def update_toolpath_simulation_ode(self, widget=None, toolpath=None):
import pycam.Simulation.ODEBlocks as ODEBlocks
# get the currently selected toolpath, if none is give
if toolpath is None:
toolpath_index = self._treeview_get_active_index(self.toolpath_table, self.toolpath)
if toolpath_index is None:
return
else:
toolpath = self.toolpath[toolpath_index]
paths = toolpath.get_paths()
# set the current cutter
self.cutter = pycam.Cutters.get_tool_from_settings(
toolpath.get_tool_settings())
# calculate steps
detail_level = self.gui.get_object("SimulationDetailsValue").get_value()
grid_size = 100 * pow(2, detail_level - 1)
bounding_box = toolpath.get_toolpath_settings().get_bounds()
(minx, miny, minz), (maxx, maxy, maxz) = bounding_box.get_bounds()
# proportion = dimension_x / dimension_y
proportion = (maxx - minx) / (maxy - miny)
x_steps = int(sqrt(grid_size) * proportion)
y_steps = int(sqrt(grid_size) / proportion)
simulation_backend = ODEBlocks.ODEBlocks(toolpath.get_tool_settings(),
toolpath.get_bounding_box(), x_steps=x_steps, y_steps=y_steps)
self.settings.set("simulation_object", simulation_backend)
# disable the simulation widget (avoids confusion regarding "cancel")
if not widget is None:
self.gui.get_object("SimulationTab").set_sensitive(False)
# update the view
self.update_view()
# calculate the simulation and show it simulteneously
for path_index, path in enumerate(paths):
progress_text = "Simulating path %d/%d" % (path_index, len(paths))
progress_value_percent = 100.0 * path_index / len(paths)
if self.update_progress_bar(progress_text, progress_value_percent):
# break if the user pressed the "cancel" button
break
for index in range(len(path.points)):
self.cutter.moveto(path.points[index])
if index != 0:
start = path.points[index - 1]
end = path.points[index]
if start != end:
simulation_backend.process_cutter_movement(start, end)
self.update_view()
# break the loop if someone clicked the "cancel" button
if self.update_progress_bar():
break
# enable the simulation widget again (if we were started from the GUI)
if not widget is None:
self.gui.get_object("SimulationTab").set_sensitive(True)
def toggle_tabs_for_simulation(self, new_state):
for objname in ("ModelTab", "ModelTabLabel", "TasksTab",
"TasksTabLabel", "ToolPathTab", "ToolPathTabLabel", "ToolTab",
"ToolTabLabel", "ProcessTab", "ProcessTabLabel", "BoundsTab",
"BoundsTabLabel"):
self.gui.get_object(objname).set_sensitive(new_state)
def show_toolpath_simulation(self, toolpath=None):
# disable the main controls
self.toggle_tabs_for_simulation(False)
# show the simulation controls
self.simulation_window.show()
# start the simulation
self.settings.set("show_simulation", True)
time_step = int(1000 / self.settings.get("drill_progress_max_fps"))
# update the toolpath simulation repeatedly
gobject.timeout_add(time_step, self.update_toolpath_simulation)
@progress_activity_guard
def generate_toolpath(self, tool_settings, process_settings, bounds):
start_time = time.time()
self.update_progress_bar("Preparing toolpath generation")
parent = self
class UpdateView:
def __init__(self, func, max_fps=1):
self.last_update = time.time()
self.max_fps = max_fps
self.func = func
def update(self, text=None, percent=None, tool_position=None,
toolpath=None):
if parent.settings.get("show_drill_progress"):
if not tool_position is None:
parent.cutter.moveto(tool_position)
if not toolpath is None:
parent.settings.set("toolpath_in_progress", toolpath)
current_time = time.time()
if (current_time - self.last_update) > 1.0/self.max_fps:
self.last_update = current_time
if self.func:
self.func()
# break the loop if someone clicked the "cancel" button
return parent.update_progress_bar(text, percent)
draw_callback = UpdateView(self.update_view,
max_fps=self.settings.get("drill_progress_max_fps")).update
self.update_progress_bar("Generating collision model")
# turn the toolpath settings into a dict
toolpath_settings = self.get_toolpath_settings(tool_settings,
process_settings, bounds)
if toolpath_settings is None:
# behave as if "cancel" was requested
return True
self.cutter = toolpath_settings.get_tool()
# run the toolpath generation
self.update_progress_bar("Starting the toolpath generation")
try:
toolpath = pycam.Toolpath.Generator.generate_toolpath_from_settings(
self.model, toolpath_settings, callback=draw_callback)
except Exception:
# catch all non-system-exiting exceptions
report_exception()
return False
log.info("Toolpath generation time: %f" % (time.time() - start_time))
# don't show the new toolpath anymore
self.settings.set("toolpath_in_progress", None)
if toolpath is None:
# user interruption
# return "False" if the action was cancelled
return not self.update_progress_bar()
elif isinstance(toolpath, basestring):
# an error occoured - "toolpath" contains the error message
log.error("Failed to generate toolpath: %s" % toolpath)
# we were not successful (similar to a "cancel" request)
return False
else:
# hide the previous toolpath if it is the only visible one (automatic mode)
if (len([True for path in self.toolpath if path.visible]) == 1) \
and self.toolpath[-1].visible:
self.toolpath[-1].visible = False
# add the new toolpath
description = "%s / %s" % (tool_settings["name"],
process_settings["name"])
# the tool id numbering should start with 1 instead of zero
self.toolpath.add_toolpath(toolpath, description, toolpath_settings)
self.update_toolpath_table()
self.update_view()
# return "False" if the action was cancelled
return not self.update_progress_bar()
def get_toolpath_settings(self, tool_settings, process_settings, bounds):
toolpath_settings = pycam.Gui.Settings.ToolpathSettings()
# this offset allows to cut a model with a minimal boundary box correctly
offset = tool_settings["tool_radius"]
# check the configured direction of the offset (boundary mode)
if self.settings.get("boundary_mode") == self.BOUNDARY_MODES["inside"]:
# use the negative offset to stay inside the boundaries
offset *= -1
elif self.settings.get("boundary_mode") == self.BOUNDARY_MODES["along"]:
# don't use any offset
offset = 0
elif self.settings.get("boundary_mode") == self.BOUNDARY_MODES["around"]:
# just use the positive offset - no change required
pass
else:
# this should never happen
log.error("Assertion failed: invalid boundary_mode (%s)" % str(self.settings.get("boundary_mode")))
border = (offset, offset, 0)
bounds.set_reference(self.model.get_bounds())
processing_bounds = Bounds(Bounds.TYPE_FIXED_MARGIN, border, border,
reference=bounds)
# check if the boundary limits are valid
if not processing_bounds.is_valid():
# don't generate a toolpath if the area is too small (e.g. due to the tool size)
log.error("Processing boundaries are too small for this tool size.")
return None
toolpath_settings.set_bounds(processing_bounds)
# put the tool settings together
tool_id = self.tool_list.index(tool_settings) + 1
toolpath_settings.set_tool(tool_id, tool_settings["shape"],
tool_settings["tool_radius"], tool_settings["torus_radius"],
tool_settings["speed"], tool_settings["feedrate"])
# get the support grid options
grid_type = self.settings.get("support_grid_type")
if grid_type == GRID_TYPES["grid"]:
toolpath_settings.set_support_grid(
self.settings.get("support_grid_distance_x"),
self.settings.get("support_grid_distance_y"),
self.settings.get("support_grid_thickness"),
self.settings.get("support_grid_height"),
offset_x=self.settings.get("support_grid_offset_x"),
offset_y=self.settings.get("support_grid_offset_y"),
adjustments_x=self.grid_adjustments_x,
adjustments_y=self.grid_adjustments_y)
elif grid_type in (GRID_TYPES["automatic_edge"],
GRID_TYPES["automatic_corner"]):
corner_start = (grid_type == GRID_TYPES["automatic_corner"])
toolpath_settings.set_support_distributed(
self.settings.get("support_grid_average_distance"),
self.settings.get("support_grid_minimum_bridges"),
self.settings.get("support_grid_thickness"),
self.settings.get("support_grid_height"),
self.settings.get("support_grid_length"),
start_at_corners=corner_start)
elif grid_type == GRID_TYPES["none"]:
pass
else:
log.error("Invalid support grid type: %d" % grid_type)
# calculation backend: ODE / None
if self.settings.get("enable_ode"):
toolpath_settings.set_calculation_backend("ODE")
# unit size
toolpath_settings.set_unit_size(self.settings.get("unit"))
STRATEGY_GENERATORS = {
"PushRemoveStrategy": ("PushCutter", "SimpleCutter"),
"ContourPolygonStrategy": ("PushCutter", "ContourCutter"),
"ContourFollowStrategy": ("ContourFollow", "SimpleCutter"),
"SurfaceStrategy": ("DropCutter", "PathAccumulator"),
"EngraveStrategy": ("EngraveCutter", "SimpleCutter")}
generator, postprocessor = STRATEGY_GENERATORS[
process_settings["path_strategy"]]
# process settings
toolpath_settings.set_process_settings(
generator, postprocessor, process_settings["path_direction"],
process_settings["material_allowance"],
process_settings["overlap_percent"],
process_settings["step_down"],
process_settings["engrave_offset"],
process_settings["milling_style"],
process_settings["pocketing_type"])
return toolpath_settings
def get_filename_via_dialog(self, title, mode_load=False, type_filter=None,
filename_templates=None, filename_extension=None, parent=None):
if parent is None:
parent = self.window
# we open a dialog
if mode_load:
dialog = gtk.FileChooserDialog(title=title,
parent=parent, action=gtk.FILE_CHOOSER_ACTION_OPEN,
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
gtk.STOCK_OPEN, gtk.RESPONSE_OK))
else:
dialog = gtk.FileChooserDialog(title=title,
parent=self.window, action=gtk.FILE_CHOOSER_ACTION_SAVE,
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
gtk.STOCK_SAVE, gtk.RESPONSE_OK))
# set the initial directory to the last one used
if self.last_dirname and os.path.isdir(self.last_dirname):
dialog.set_current_folder(self.last_dirname)
# add filter for files
if type_filter:
for file_filter in get_filters_from_list(type_filter):
dialog.add_filter(file_filter)
# guess the export filename based on the model's filename
valid_templates = []
if filename_templates:
for template in filename_templates:
if not template:
continue
elif hasattr(template, "get_path"):
valid_templates.append(template.get_path())
else:
valid_templates.append(template)
if valid_templates:
filename_template = valid_templates[0]
# remove the extension
default_filename = os.path.splitext(filename_template)[0]
if filename_extension:
default_filename += os.path.extsep + filename_extension
elif type_filter:
for one_type in type_filter:
extension = one_type[1]
if isinstance(extension, (list, tuple, set)):
extension = extension[0]
# use only the extension of the type filter string
extension = os.path.splitext(extension)[1]
if extension:
default_filename += extension
# finish the loop
break
dialog.select_filename(default_filename)
try:
dialog.set_current_name(
os.path.basename(default_filename).encode("utf-8"))
except UnicodeError:
# ignore
pass
# add filter for all files
ext_filter = gtk.FileFilter()
ext_filter.set_name("All files")
ext_filter.add_pattern("*")
dialog.add_filter(ext_filter)
done = False
while not done:
dialog.set_filter(dialog.list_filters()[0])
response = dialog.run()
filename = dialog.get_filename()
uri = pycam.Utils.URIHandler(filename)
dialog.hide()
if response != gtk.RESPONSE_OK:
dialog.destroy()
return None
if not mode_load and filename:
# check if we want to add a default suffix
filename = self.get_filename_with_suffix(filename, type_filter)
if not mode_load and os.path.exists(filename):
overwrite_window = gtk.MessageDialog(self.window, type=gtk.MESSAGE_WARNING,
buttons=gtk.BUTTONS_YES_NO,
message_format="This file exists. Do you want to overwrite it?")
overwrite_window.set_title("Confirm overwriting existing file")
response = overwrite_window.run()
overwrite_window.destroy()
done = (response == gtk.RESPONSE_YES)
elif mode_load and not uri.exists():
not_found_window = gtk.MessageDialog(self.window, type=gtk.MESSAGE_ERROR,
buttons=gtk.BUTTONS_OK,
message_format="This file does not exist. Please choose a different filename.")
not_found_window.set_title("Invalid filename selected")
response = not_found_window.run()
not_found_window.destroy()
done = False
else:
done = True
dialog.destroy()
# add the file to the list of recently used ones
if filename:
self.add_to_recent_file_list(filename)
return filename
def add_to_recent_file_list(self, filename):
# Add the item to the recent files list - if it already exists.
# Otherwise it will be added later after writing the file.
uri = pycam.Utils.URIHandler(filename)
if uri.exists():
# skip this, if the recent manager is not available (e.g. GTK 2.12.1 on Windows)
if self.recent_manager:
if self.recent_manager.has_item(uri.get_url()):
try:
self.recent_manager.remove_item(uri.get_url())
except gobject.GError:
pass
self.recent_manager.add_item(uri.get_url())
# store the directory of the last loaded file
if uri.is_local():
self.last_dirname = os.path.dirname(uri.get_local_path())
@gui_activity_guard
def save_toolpath(self, widget=None, only_visible=False):
if not self.toolpath:
return
if callable(widget):
widget = widget()
if isinstance(widget, basestring):
filename = widget
else:
# we open a dialog
if self.settings.get("gcode_filename_extension"):
filename_extension = self.settings.get("gcode_filename_extension")
else:
filename_extension = None
filename = self.get_filename_via_dialog("Save toolpath to ...",
mode_load=False, type_filter=FILTER_GCODE,
filename_templates=(self.last_toolpath_file, self.last_model_uri),
filename_extension=filename_extension)
if filename:
self.last_toolpath_file = filename
self.update_save_actions()
# no filename given -> exit
if not filename:
return
if self.settings.get("gcode_safety_height") < self.settings.get("maxz"):
log.warn(("Safety height (%.4f) is below the top of the model " \
+ "(%.4f) - this can cause collisions of the tool with " \
+ "the material.") % (self.settings.get(
"gcode_safety_height"), self.settings.get("maxz")))
try:
if only_visible:
export_toolpaths = [tp for tp in self.toolpath if tp.visible]
else:
export_toolpaths = self.toolpath
destination = open(filename, "w")
safety_height = self.settings.get("gcode_safety_height")
meta_data = self.get_meta_data()
machine_time = 0
# calculate the machine time and store it in the GCode header
for toolpath in export_toolpaths:
machine_time += toolpath.get_machine_time(safety_height)
all_info = meta_data + os.linesep \
+ "Estimated machine time: %.0f minutes" % machine_time
minimum_steps = [self.settings.get("gcode_minimum_step_x"),
self.settings.get("gcode_minimum_step_y"),
self.settings.get("gcode_minimum_step_z")]
if self.settings.get("touch_off_position_type") == "absolute":
pos_x = self.settings.get("touch_off_position_x")
pos_y = self.settings.get("touch_off_position_y")
pos_z = self.settings.get("touch_off_position_z")
touch_off_pos = Point(pos_x, pos_y, pos_z)
else:
touch_off_pos = None
generator = GCodeGenerator(destination,
metric_units=(self.settings.get("unit") == "mm"),
safety_height=safety_height,
toggle_spindle_status=self.settings.get("gcode_start_stop_spindle"),
spindle_delay=self.settings.get("gcode_spindle_delay"),
comment=all_info, minimum_steps=minimum_steps,
touch_off_on_startup=self.settings.get("touch_off_on_startup"),
touch_off_on_tool_change=self.settings.get("touch_off_on_tool_change"),
touch_off_position=touch_off_pos,
touch_off_rapid_move=self.settings.get("touch_off_rapid_move"),
touch_off_slow_move=self.settings.get("touch_off_slow_move"),
touch_off_slow_feedrate=self.settings.get("touch_off_slow_feedrate"),
touch_off_height=self.settings.get("touch_off_height"),
touch_off_pause_execution=self.settings.get("touch_off_pause_execution"))
path_mode = self.settings.get("gcode_path_mode")
if path_mode == 0:
generator.set_path_mode(PATH_MODES["exact_path"])
elif path_mode == 1:
generator.set_path_mode(PATH_MODES["exact_stop"])
elif path_mode == 2:
generator.set_path_mode(PATH_MODES["continuous"])
else:
naive_tolerance = self.settings.get("gcode_naive_tolerance")
if naive_tolerance == 0:
naive_tolerance = None
generator.set_path_mode(PATH_MODES["continuous"],
self.settings.get("gcode_motion_tolerance"),
naive_tolerance)
for toolpath in export_toolpaths:
settings = toolpath.get_toolpath_settings()
tool = settings.get_tool_settings()
generator.set_speed(tool["feedrate"], tool["speed"])
generator.add_moves(toolpath.get_moves(safety_height),
tool_id=tool["id"], comment=toolpath.get_meta_data())
generator.finish()
destination.close()
log.info("GCode file successfully written: %s" % str(filename))
except IOError, err_msg:
log.error("Failed to save toolpath file: %s" % err_msg)
else:
self.add_to_recent_file_list(filename)
def get_meta_data(self):
filename = "Filename: %s" % str(self.last_model_uri)
timestamp = "Timestamp: %s" % str(datetime.datetime.now())
version = "Version: %s" % VERSION
result = []
for text in (filename, timestamp, version):
result.append("%s %s" % (self.META_DATA_PREFIX, text))
return os.linesep.join(result)
def mainloop(self):
# run the mainloop only if a GUI was requested
if not self.no_dialog:
gtk_settings = gtk.settings_get_default()
# force the icons to be displayed
gtk_settings.props.gtk_menu_images = True
gtk_settings.props.gtk_button_images = True
try:
gtk.main()
except KeyboardInterrupt:
self.quit()
if __name__ == "__main__":
GUI = ProjectGui()
if len(sys.argv) > 1:
GUI.load_model_file(sys.argv[1])
GUI.mainloop()
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""
Tests for Term.
"""
from collections import Counter
from itertools import product
from unittest import TestCase
from zipline.errors import (
DTypeNotSpecified,
InputTermNotAtomic,
InvalidDType,
TermInputsNotSpecified,
WindowLengthNotSpecified,
)
from zipline.pipeline import Factor, TermGraph
from zipline.pipeline.data import Column, DataSet
from zipline.pipeline.term import AssetExists, NotSpecified
from zipline.pipeline.expression import NUMEXPR_MATH_FUNCS
from zipline.utils.numpy_utils import (
datetime64ns_dtype,
float64_dtype,
)
class SomeDataSet(DataSet):
foo = Column(float64_dtype)
bar = Column(float64_dtype)
buzz = Column(float64_dtype)
class SubDataSet(SomeDataSet):
pass
class SubDataSetNewCol(SomeDataSet):
qux = Column(float64_dtype)
class SomeFactor(Factor):
dtype = float64_dtype
window_length = 5
inputs = [SomeDataSet.foo, SomeDataSet.bar]
SomeFactorAlias = SomeFactor
class SomeOtherFactor(Factor):
dtype = float64_dtype
window_length = 5
inputs = [SomeDataSet.bar, SomeDataSet.buzz]
class DateFactor(Factor):
dtype = datetime64ns_dtype
window_length = 5
inputs = [SomeDataSet.bar, SomeDataSet.buzz]
class NoLookbackFactor(Factor):
dtype = float64_dtype
window_length = 0
def gen_equivalent_factors():
"""
Return an iterator of SomeFactor instances that should all be the same
object.
"""
yield SomeFactor()
yield SomeFactor(inputs=NotSpecified)
yield SomeFactor(SomeFactor.inputs)
yield SomeFactor(inputs=SomeFactor.inputs)
yield SomeFactor([SomeDataSet.foo, SomeDataSet.bar])
yield SomeFactor(window_length=SomeFactor.window_length)
yield SomeFactor(window_length=NotSpecified)
yield SomeFactor(
[SomeDataSet.foo, SomeDataSet.bar],
window_length=NotSpecified,
)
yield SomeFactor(
[SomeDataSet.foo, SomeDataSet.bar],
window_length=SomeFactor.window_length,
)
yield SomeFactorAlias()
def to_dict(l):
"""
Convert a list to a dict with keys drawn from '0', '1', '2', ...
Example
-------
>>> to_dict([2, 3, 4])
{'0': 2, '1': 3, '2': 4}
"""
return dict(zip(map(str, range(len(l))), l))
class DependencyResolutionTestCase(TestCase):
def check_dependency_order(self, ordered_terms):
seen = set()
for term in ordered_terms:
for dep in term.dependencies:
self.assertIn(dep, seen)
seen.add(term)
def test_single_factor(self):
"""
Test dependency resolution for a single factor.
"""
def check_output(graph):
resolution_order = list(graph.ordered())
self.assertEqual(len(resolution_order), 4)
self.check_dependency_order(resolution_order)
self.assertIn(AssetExists(), resolution_order)
self.assertIn(SomeDataSet.foo, resolution_order)
self.assertIn(SomeDataSet.bar, resolution_order)
self.assertIn(SomeFactor(), resolution_order)
self.assertEqual(graph.node[SomeDataSet.foo]['extra_rows'], 4)
self.assertEqual(graph.node[SomeDataSet.bar]['extra_rows'], 4)
for foobar in gen_equivalent_factors():
check_output(TermGraph(to_dict([foobar])))
def test_single_factor_instance_args(self):
"""
Test dependency resolution for a single factor with arguments passed to
the constructor.
"""
bar, buzz = SomeDataSet.bar, SomeDataSet.buzz
graph = TermGraph(to_dict([SomeFactor([bar, buzz], window_length=5)]))
resolution_order = list(graph.ordered())
# SomeFactor, its inputs, and AssetExists()
self.assertEqual(len(resolution_order), 4)
self.check_dependency_order(resolution_order)
self.assertIn(AssetExists(), resolution_order)
self.assertEqual(graph.extra_rows[AssetExists()], 4)
self.assertIn(bar, resolution_order)
self.assertIn(buzz, resolution_order)
self.assertIn(SomeFactor([bar, buzz], window_length=5),
resolution_order)
self.assertEqual(graph.extra_rows[bar], 4)
self.assertEqual(graph.extra_rows[buzz], 4)
def test_reuse_atomic_terms(self):
"""
Test that raw inputs only show up in the dependency graph once.
"""
f1 = SomeFactor([SomeDataSet.foo, SomeDataSet.bar])
f2 = SomeOtherFactor([SomeDataSet.bar, SomeDataSet.buzz])
graph = TermGraph(to_dict([f1, f2]))
resolution_order = list(graph.ordered())
# bar should only appear once.
self.assertEqual(len(resolution_order), 6)
self.assertEqual(len(set(resolution_order)), 6)
self.check_dependency_order(resolution_order)
def test_disallow_recursive_lookback(self):
with self.assertRaises(InputTermNotAtomic):
SomeFactor(inputs=[SomeFactor(), SomeDataSet.foo])
class ObjectIdentityTestCase(TestCase):
def assertSameObject(self, *objs):
first = objs[0]
for obj in objs:
self.assertIs(first, obj)
def assertDifferentObjects(self, *objs):
id_counts = Counter(map(id, objs))
((most_common_id, count),) = id_counts.most_common(1)
if count > 1:
dupe = [o for o in objs if id(o) == most_common_id][0]
self.fail("%s appeared %d times in %s" % (dupe, count, objs))
def test_instance_caching(self):
self.assertSameObject(*gen_equivalent_factors())
self.assertIs(
SomeFactor(window_length=SomeFactor.window_length + 1),
SomeFactor(window_length=SomeFactor.window_length + 1),
)
self.assertIs(
SomeFactor(dtype=float64_dtype),
SomeFactor(dtype=float64_dtype),
)
self.assertIs(
SomeFactor(inputs=[SomeFactor.inputs[1], SomeFactor.inputs[0]]),
SomeFactor(inputs=[SomeFactor.inputs[1], SomeFactor.inputs[0]]),
)
def test_instance_non_caching(self):
f = SomeFactor()
# Different window_length.
self.assertIsNot(
f,
SomeFactor(window_length=SomeFactor.window_length + 1),
)
# Different dtype
self.assertIsNot(
f,
SomeFactor(dtype=datetime64ns_dtype)
)
# Reordering inputs changes semantics.
self.assertIsNot(
f,
SomeFactor(inputs=[SomeFactor.inputs[1], SomeFactor.inputs[0]]),
)
def test_instance_non_caching_redefine_class(self):
orig_foobar_instance = SomeFactorAlias()
class SomeFactor(Factor):
dtype = float64_dtype
window_length = 5
inputs = [SomeDataSet.foo, SomeDataSet.bar]
self.assertIsNot(orig_foobar_instance, SomeFactor())
def test_instance_caching_binops(self):
f = SomeFactor()
g = SomeOtherFactor()
for lhs, rhs in product([f, g], [f, g]):
self.assertIs((lhs + rhs), (lhs + rhs))
self.assertIs((lhs - rhs), (lhs - rhs))
self.assertIs((lhs * rhs), (lhs * rhs))
self.assertIs((lhs / rhs), (lhs / rhs))
self.assertIs((lhs ** rhs), (lhs ** rhs))
self.assertIs((1 + rhs), (1 + rhs))
self.assertIs((rhs + 1), (rhs + 1))
self.assertIs((1 - rhs), (1 - rhs))
self.assertIs((rhs - 1), (rhs - 1))
self.assertIs((2 * rhs), (2 * rhs))
self.assertIs((rhs * 2), (rhs * 2))
self.assertIs((2 / rhs), (2 / rhs))
self.assertIs((rhs / 2), (rhs / 2))
self.assertIs((2 ** rhs), (2 ** rhs))
self.assertIs((rhs ** 2), (rhs ** 2))
self.assertIs((f + g) + (f + g), (f + g) + (f + g))
def test_instance_caching_unary_ops(self):
f = SomeFactor()
self.assertIs(-f, -f)
self.assertIs(--f, --f)
self.assertIs(---f, ---f)
def test_instance_caching_math_funcs(self):
f = SomeFactor()
for funcname in NUMEXPR_MATH_FUNCS:
method = getattr(f, funcname)
self.assertIs(method(), method())
def test_parameterized_term(self):
class SomeFactorParameterized(SomeFactor):
params = ('a', 'b')
f = SomeFactorParameterized(a=1, b=2)
self.assertEqual(f.params, {'a': 1, 'b': 2})
g = SomeFactorParameterized(a=1, b=3)
h = SomeFactorParameterized(a=2, b=2)
self.assertDifferentObjects(f, g, h)
f2 = SomeFactorParameterized(a=1, b=2)
f3 = SomeFactorParameterized(b=2, a=1)
self.assertSameObject(f, f2, f3)
self.assertEqual(f.params['a'], 1)
self.assertEqual(f.params['b'], 2)
self.assertEqual(f.window_length, SomeFactor.window_length)
self.assertEqual(f.inputs, tuple(SomeFactor.inputs))
def test_bad_input(self):
class SomeFactor(Factor):
dtype = float64_dtype
class SomeFactorDefaultInputs(SomeFactor):
inputs = (SomeDataSet.foo, SomeDataSet.bar)
class SomeFactorDefaultLength(SomeFactor):
window_length = 10
class SomeFactorNoDType(SomeFactor):
window_length = 10
inputs = (SomeDataSet.foo,)
dtype = NotSpecified
with self.assertRaises(TermInputsNotSpecified):
SomeFactor(window_length=1)
with self.assertRaises(TermInputsNotSpecified):
SomeFactorDefaultLength()
with self.assertRaises(WindowLengthNotSpecified):
SomeFactor(inputs=(SomeDataSet.foo,))
with self.assertRaises(WindowLengthNotSpecified):
SomeFactorDefaultInputs()
with self.assertRaises(DTypeNotSpecified):
SomeFactorNoDType()
with self.assertRaises(InvalidDType):
SomeFactor(dtype=1)
class SubDataSetTestCase(TestCase):
def test_subdataset(self):
some_dataset_map = {
column.name: column for column in SomeDataSet.columns
}
sub_dataset_map = {
column.name: column for column in SubDataSet.columns
}
self.assertEqual(
{column.name for column in SomeDataSet.columns},
{column.name for column in SubDataSet.columns},
)
for k, some_dataset_column in some_dataset_map.items():
sub_dataset_column = sub_dataset_map[k]
self.assertIsNot(
some_dataset_column,
sub_dataset_column,
'subclass column %r should not have the same identity as'
' the parent' % k,
)
self.assertEqual(
some_dataset_column.dtype,
sub_dataset_column.dtype,
'subclass column %r should have the same dtype as the parent' %
k,
)
def test_add_column(self):
some_dataset_map = {
column.name: column for column in SomeDataSet.columns
}
sub_dataset_new_col_map = {
column.name: column for column in SubDataSetNewCol.columns
}
sub_col_names = {column.name for column in SubDataSetNewCol.columns}
# check our extra col
self.assertIn('qux', sub_col_names)
self.assertEqual(
sub_dataset_new_col_map['qux'].dtype,
float64_dtype,
)
self.assertEqual(
{column.name for column in SomeDataSet.columns},
sub_col_names - {'qux'},
)
for k, some_dataset_column in some_dataset_map.items():
sub_dataset_column = sub_dataset_new_col_map[k]
self.assertIsNot(
some_dataset_column,
sub_dataset_column,
'subclass column %r should not have the same identity as'
' the parent' % k,
)
self.assertEqual(
some_dataset_column.dtype,
sub_dataset_column.dtype,
'subclass column %r should have the same dtype as the parent' %
k,
)
|
unknown
|
codeparrot/codeparrot-clean
| ||
import datetime
from decimal import Decimal
from django.db.models.fields import (
AutoField, BinaryField, BooleanField, CharField, DateField, DateTimeField,
DecimalField, EmailField, FilePathField, FloatField, GenericIPAddressField,
IntegerField, IPAddressField, NullBooleanField, PositiveIntegerField,
PositiveSmallIntegerField, SlugField, SmallIntegerField, TextField,
TimeField, URLField,
)
from django.db.models.fields.files import FileField, ImageField
from django.test import SimpleTestCase
from django.utils.functional import lazy
class PromiseTest(SimpleTestCase):
def test_AutoField(self):
lazy_func = lazy(lambda: 1, int)
self.assertIsInstance(AutoField(primary_key=True).get_prep_value(lazy_func()), int)
def test_BinaryField(self):
lazy_func = lazy(lambda: b'', bytes)
self.assertIsInstance(BinaryField().get_prep_value(lazy_func()), bytes)
def test_BooleanField(self):
lazy_func = lazy(lambda: True, bool)
self.assertIsInstance(BooleanField().get_prep_value(lazy_func()), bool)
def test_CharField(self):
lazy_func = lazy(lambda: '', str)
self.assertIsInstance(CharField().get_prep_value(lazy_func()), str)
lazy_func = lazy(lambda: 0, int)
self.assertIsInstance(CharField().get_prep_value(lazy_func()), str)
def test_DateField(self):
lazy_func = lazy(lambda: datetime.date.today(), datetime.date)
self.assertIsInstance(DateField().get_prep_value(lazy_func()), datetime.date)
def test_DateTimeField(self):
lazy_func = lazy(lambda: datetime.datetime.now(), datetime.datetime)
self.assertIsInstance(DateTimeField().get_prep_value(lazy_func()), datetime.datetime)
def test_DecimalField(self):
lazy_func = lazy(lambda: Decimal('1.2'), Decimal)
self.assertIsInstance(DecimalField().get_prep_value(lazy_func()), Decimal)
def test_EmailField(self):
lazy_func = lazy(lambda: 'mailbox@domain.com', str)
self.assertIsInstance(EmailField().get_prep_value(lazy_func()), str)
def test_FileField(self):
lazy_func = lazy(lambda: 'filename.ext', str)
self.assertIsInstance(FileField().get_prep_value(lazy_func()), str)
lazy_func = lazy(lambda: 0, int)
self.assertIsInstance(FileField().get_prep_value(lazy_func()), str)
def test_FilePathField(self):
lazy_func = lazy(lambda: 'tests.py', str)
self.assertIsInstance(FilePathField().get_prep_value(lazy_func()), str)
lazy_func = lazy(lambda: 0, int)
self.assertIsInstance(FilePathField().get_prep_value(lazy_func()), str)
def test_FloatField(self):
lazy_func = lazy(lambda: 1.2, float)
self.assertIsInstance(FloatField().get_prep_value(lazy_func()), float)
def test_ImageField(self):
lazy_func = lazy(lambda: 'filename.ext', str)
self.assertIsInstance(ImageField().get_prep_value(lazy_func()), str)
def test_IntegerField(self):
lazy_func = lazy(lambda: 1, int)
self.assertIsInstance(IntegerField().get_prep_value(lazy_func()), int)
def test_IPAddressField(self):
lazy_func = lazy(lambda: '127.0.0.1', str)
self.assertIsInstance(IPAddressField().get_prep_value(lazy_func()), str)
lazy_func = lazy(lambda: 0, int)
self.assertIsInstance(IPAddressField().get_prep_value(lazy_func()), str)
def test_GenericIPAddressField(self):
lazy_func = lazy(lambda: '127.0.0.1', str)
self.assertIsInstance(GenericIPAddressField().get_prep_value(lazy_func()), str)
lazy_func = lazy(lambda: 0, int)
self.assertIsInstance(GenericIPAddressField().get_prep_value(lazy_func()), str)
def test_NullBooleanField(self):
lazy_func = lazy(lambda: True, bool)
self.assertIsInstance(NullBooleanField().get_prep_value(lazy_func()), bool)
def test_PositiveIntegerField(self):
lazy_func = lazy(lambda: 1, int)
self.assertIsInstance(PositiveIntegerField().get_prep_value(lazy_func()), int)
def test_PositiveSmallIntegerField(self):
lazy_func = lazy(lambda: 1, int)
self.assertIsInstance(PositiveSmallIntegerField().get_prep_value(lazy_func()), int)
def test_SlugField(self):
lazy_func = lazy(lambda: 'slug', str)
self.assertIsInstance(SlugField().get_prep_value(lazy_func()), str)
lazy_func = lazy(lambda: 0, int)
self.assertIsInstance(SlugField().get_prep_value(lazy_func()), str)
def test_SmallIntegerField(self):
lazy_func = lazy(lambda: 1, int)
self.assertIsInstance(SmallIntegerField().get_prep_value(lazy_func()), int)
def test_TextField(self):
lazy_func = lazy(lambda: 'Abc', str)
self.assertIsInstance(TextField().get_prep_value(lazy_func()), str)
lazy_func = lazy(lambda: 0, int)
self.assertIsInstance(TextField().get_prep_value(lazy_func()), str)
def test_TimeField(self):
lazy_func = lazy(lambda: datetime.datetime.now().time(), datetime.time)
self.assertIsInstance(TimeField().get_prep_value(lazy_func()), datetime.time)
def test_URLField(self):
lazy_func = lazy(lambda: 'http://domain.com', str)
self.assertIsInstance(URLField().get_prep_value(lazy_func()), str)
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/env python
"""
Main python eyetracking wrapper
- takes calibration and gaze video filenames as input
- controls calibration and gaze estimation workflow
Example
----
>>> mrgaze_batch.py <Calibration Video> <Gaze Video>
AUTHOR : Mike Tyszka
PLACE : Caltech
DATES : 2014-05-07 JMT From scratch
This file is part of mrgaze.
mrgaze is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
mrgaze is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with mrgaze. If not, see <http://www.gnu.org/licenses/>.
Copyright 2014 California Institute of Technology.
"""
import os
import sys
from mrgaze import utils, pupilometry, calibrate, report, config
def RunBatch(data_dir=[]):
"""
Run the gaze tracking pipeline over all sessions within a data directory
"""
# Default data directory
if not data_dir:
print('* No data directory provided - exiting')
return False
# Check for missing directories
if not os.path.isdir(data_dir):
print('* Data directory does not exist - exiting')
sys.exit(1)
# Loop over all subject subdirectories of the data directory
for subj_sess in os.walk(data_dir).next()[1]:
# Run single-session pipeline
RunSingle(data_dir, subj_sess)
# Clean exit
return True
def RunSingle(data_dir, subj_sess):
"""
Run the gaze tracking pipeline on a single gaze tracking session
"""
print('')
print('Running single-session pipeline : ' + subj_sess)
if not data_dir or not subj_sess:
print('* Data or subject/session directory not provided - returning')
return False
# Subject/session directory name
ss_dir = os.path.join(data_dir, subj_sess)
# Video and results directory names for this subject/session
ss_vid_dir = os.path.join(ss_dir, 'videos')
ss_res_dir = os.path.join(ss_dir, 'results')
# Load configuration from root directory or subj/sess video dir
# If no config file exists, a default root config is created
cfg = config.LoadConfig(data_dir, subj_sess)
if not cfg:
print('* Configuration file missing - returning')
return False
# Extract operational flags from config
do_cal = cfg.getboolean('CALIBRATION', 'calibrate')
# Run pipeline if video directory present
if os.path.isdir(ss_vid_dir):
# Create results subj/sess dir
utils._mkdir(ss_res_dir)
print('')
print(' Calibration Pupilometry')
print(' -----------------------')
pupilometry.VideoPupilometry(data_dir, subj_sess, 'cal', cfg)
if do_cal:
print(' Create calibration model')
C, central_fix = calibrate.AutoCalibrate(ss_res_dir, cfg)
if not C.any():
print('* Empty calibration matrix detected - skipping')
return False
print('')
print(' Gaze Pupilometry')
print(' -----------------------')
pupilometry.VideoPupilometry(data_dir, subj_sess, 'gaze', cfg)
if do_cal:
print(' Calibrate pupilometry')
calibrate.ApplyCalibration(ss_dir, C, central_fix, cfg)
print('')
print(' Generate Report')
print(' ---------------')
report.WriteReport(ss_dir, cfg)
else:
print('%s does not exist - skipping' % ss_vid_dir)
print('')
print('Completed single-session pipeline')
return True
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/env python
# encoding: utf-8
"""
PyRoc.py
Created by Marcel Caraciolo on 2009-11-16.
Copyright (c) 2009 Federal University of Pernambuco. All rights reserved.
IMPORTANT:
Based on the original code by Eithon Cadag (http://www.eithoncadag.com/files/pyroc.txt)
Python Module for calculating the area under the receive operating characteristic curve, given a dataset.
0.1 - First Release
0.2 - Updated the code by adding new metrics for analysis with the confusion matrix.
"""
import random
import math
try:
import pylab
except:
print "error:\tcan't import pylab module, you must install the module:\n"
print "\tmatplotlib to plot charts!'\n"
def random_mixture_model(pos_mu=.6,pos_sigma=.1,neg_mu=.4,neg_sigma=.1,size=200):
pos = [(1,random.gauss(pos_mu,pos_sigma),) for x in xrange(size/2)]
neg = [(0,random.gauss(neg_mu,neg_sigma),) for x in xrange(size/2)]
return pos+neg
def plot_multiple_rocs_separate(rocList,title='', labels = None, equal_aspect = True):
""" Plot multiples ROC curves as separate at the same painting area. """
pylab.clf()
pylab.title(title)
for ix, r in enumerate(rocList):
ax = pylab.subplot(4,4,ix+1)
pylab.ylim((0,1))
pylab.xlim((0,1))
ax.set_yticklabels([])
ax.set_xticklabels([])
if equal_aspect:
cax = pylab.gca()
cax.set_aspect('equal')
if not labels:
labels = ['' for x in rocList]
pylab.text(0.2,0.1,labels[ix],fontsize=8)
pylab.plot([x[0] for x in r.derived_points],[y[1] for y in r.derived_points], 'r-',linewidth=2)
pylab.show()
def _remove_duplicate_styles(rocList):
""" Checks for duplicate linestyles and replaces duplicates with a random one."""
pref_styles = ['cx-','mx-','yx-','gx-','bx-','rx-']
points = 'ov^>+xd'
colors = 'bgrcmy'
lines = ['-','-.',':']
rand_ls = []
for r in rocList:
if r.linestyle not in rand_ls:
rand_ls.append(r.linestyle)
else:
while True:
if len(pref_styles) > 0:
pstyle = pref_styles.pop()
if pstyle not in rand_ls:
r.linestyle = pstyle
rand_ls.append(pstyle)
break
else:
ls = ''.join(random.sample(colors,1) + random.sample(points,1)+ random.sample(lines,1))
if ls not in rand_ls:
r.linestyle = ls
rand_ls.append(ls)
break
def plot_multiple_roc(rocList,title='',labels=None, include_baseline=False, equal_aspect=True):
""" Plots multiple ROC curves on the same chart.
Parameters:
rocList: the list of ROCData objects
title: The tile of the chart
labels: The labels of each ROC curve
include_baseline: if it's True include the random baseline
equal_aspect: keep equal aspect for all roc curves
"""
pylab.clf()
pylab.ylim((0,1))
pylab.xlim((0,1))
pylab.xticks(pylab.arange(0,1.1,.1))
pylab.yticks(pylab.arange(0,1.1,.1))
pylab.grid(True)
if equal_aspect:
cax = pylab.gca()
cax.set_aspect('equal')
pylab.xlabel("1 - Specificity")
pylab.ylabel("Sensitivity")
pylab.title(title)
if not labels:
labels = [ '' for x in rocList]
_remove_duplicate_styles(rocList)
for ix, r in enumerate(rocList):
pylab.plot([x[0] for x in r.derived_points], [y[1] for y in r.derived_points], r.linestyle, linewidth=1, label=labels[ix])
if include_baseline:
pylab.plot([0.0,1.0], [0.0, 1.0], 'k-', label= 'random')
if labels:
pylab.legend(loc='lower right')
pylab.show()
def load_decision_function(path):
""" Function to load the decision function (DataSet)
Parameters:
path: The dataset file path
Return:
model_data: The data modeled
"""
fileHandler = open(path,'r')
reader = fileHandler.readlines()
reader = [line.strip().split() for line in reader]
model_data = []
for line in reader:
if len(line) == 0: continue
fClass,fValue = line
model_data.append((int(fClass), float(fValue)))
fileHandler.close()
return model_data
class ROCData(object):
""" Class that generates an ROC Curve for the data.
Data is in the following format: a list l of tutples t
where:
t[0] = 1 for positive class and t[0] = 0 for negative class
t[1] = score
t[2] = label
"""
def __init__(self,data,linestyle='rx-'):
""" Constructor takes the data and the line style for plotting the ROC Curve.
Parameters:
data: The data a listl of tuples t (l = [t_0,t_1,...t_n]) where:
t[0] = 1 for positive class and 0 for negative class
t[1] = a score
t[2] = any label (optional)
lineStyle: THe matplotlib style string for plots.
Note: The ROCData is still usable w/o matplotlib. The AUC is still available,
but plots cannot be generated.
"""
self.data = sorted(data,lambda x,y: cmp(y[1],x[1]))
self.linestyle = linestyle
self.auc() #Seed initial points with default full ROC
def auc(self,fpnum=0):
""" Uses the trapezoidal ruel to calculate the area under the curve. If fpnum is supplied, it will
calculate a partial AUC, up to the number of false positives in fpnum (the partial AUC is scaled
to between 0 and 1).
It assumes that the positive class is expected to have the higher of the scores (s(+) < s(-))
Parameters:
fpnum: The cumulativr FP count (fps)
Return:
"""
fps_count = 0
relevant_pauc = []
current_index = 0
max_n = len([x for x in self.data if x[0] == 0])
if fpnum == 0:
relevant_pauc = [x for x in self.data]
elif fpnum > max_n:
fpnum = max_n
#Find the upper limit of the data that does not exceed n FPs
else:
while fps_count < fpnum:
relevant_pauc.append(self.data[current_index])
if self.data[current_index][0] == 0:
fps_count += 1
current_index +=1
total_n = len([x for x in relevant_pauc if x[0] == 0])
total_p = len(relevant_pauc) - total_n
#Convert to points in a ROC
previous_df = -1000000.0
current_index = 0
points = []
tp_count, fp_count = 0.0 , 0.0
tpr, fpr = 0, 0
while current_index < len(relevant_pauc):
df = relevant_pauc[current_index][1]
if previous_df != df:
points.append((fpr,tpr,fp_count))
if relevant_pauc[current_index][0] == 0:
fp_count +=1
elif relevant_pauc[current_index][0] == 1:
tp_count +=1
fpr = fp_count/total_n
tpr = tp_count/total_p
previous_df = df
current_index +=1
points.append((fpr,tpr,fp_count)) #Add last point
points.sort(key=lambda i: (i[0],i[1]))
self.derived_points = points
return self._trapezoidal_rule(points)
def _trapezoidal_rule(self,curve_pts):
""" Method to calculate the area under the ROC curve"""
cum_area = 0.0
for ix,x in enumerate(curve_pts[0:-1]):
cur_pt = x
next_pt = curve_pts[ix+1]
cum_area += ((cur_pt[1]+next_pt[1])/2.0) * (next_pt[0]-cur_pt[0])
return cum_area
def calculateStandardError(self,fpnum=0):
""" Returns the standard error associated with the curve.
Parameters:
fpnum: The cumulativr FP count (fps)
Return:
the standard error.
"""
area = self.auc(fpnum)
#real positive cases
Na = len([ x for x in self.data if x[0] == 1])
#real negative cases
Nn = len([ x for x in self.data if x[0] == 0])
Q1 = area / (2.0 - area)
Q2 = 2 * area * area / (1.0 + area)
return math.sqrt( ( area * (1.0 - area) + (Na - 1.0) * (Q1 - area*area) +
(Nn - 1.0) * (Q2 - area * area)) / (Na * Nn))
def plot(self,title='',include_baseline=False,equal_aspect=True):
""" Method that generates a plot of the ROC curve
Parameters:
title: Title of the chart
include_baseline: Add the baseline plot line if it's True
equal_aspect: Aspects to be equal for all plot
"""
pylab.clf()
pylab.plot([x[0] for x in self.derived_points], [y[1] for y in self.derived_points], self.linestyle)
if include_baseline:
pylab.plot([0.0,1.0], [0.0,1.0],'k-.')
pylab.ylim((0,1))
pylab.xlim((0,1))
pylab.xticks(pylab.arange(0,1.1,.1))
pylab.yticks(pylab.arange(0,1.1,.1))
pylab.grid(True)
if equal_aspect:
cax = pylab.gca()
cax.set_aspect('equal')
pylab.xlabel('1 - Specificity')
pylab.ylabel('Sensitivity')
pylab.title(title)
pylab.show()
def confusion_matrix(self,threshold,do_print=False):
""" Returns the confusion matrix (in dictionary form) for a fiven threshold
where all elements > threshold are considered 1 , all else 0.
Parameters:
threshold: threshold to check the decision function
do_print: if it's True show the confusion matrix in the screen
Return:
the dictionary with the TP, FP, FN, TN
"""
pos_points = [x for x in self.data if x[1] >= threshold]
neg_points = [x for x in self.data if x[1] < threshold]
tp,fp,fn,tn = self._calculate_counts(pos_points,neg_points)
if do_print:
print "\t Actual class"
print "\t+(1)\t-(0)"
print "+(1)\t%i\t%i\tPredicted" % (tp,fp)
print "-(0)\t%i\t%i\tclass" % (fn,tn)
return {'TP': tp, 'FP': fp, 'FN': fn, 'TN': tn}
def evaluateMetrics(self,matrix,metric=None,do_print=False):
""" Returns the metrics evaluated from the confusion matrix.
Parameters:
matrix: the confusion matrix
metric: the specific metric of the default value is None (all metrics).
do_print: if it's True show the metrics in the screen
Return:
the dictionary with the Accuracy, Sensitivity, Specificity,Efficiency,
PositivePredictiveValue, NegativePredictiveValue, PhiCoefficient
"""
accuracy = (matrix['TP'] + matrix['TN'])/ float(sum(matrix.values()))
sensitivity = (matrix['TP'])/ float(matrix['TP'] + matrix['FN'])
specificity = (matrix['TN'])/float(matrix['TN'] + matrix['FP'])
efficiency = (sensitivity + specificity) / 2.0
positivePredictiveValue = matrix['TP'] / float(matrix['TP'] + matrix['FP'])
NegativePredictiveValue = matrix['TN'] / float(matrix['TN'] + matrix['FN'])
PhiCoefficient = (matrix['TP'] * matrix['TN'] - matrix['FP'] * matrix['FN'])/(
math.sqrt( (matrix['TP'] + matrix['FP']) *
(matrix['TP'] + matrix['FN']) *
(matrix['TN'] + matrix['FP']) *
(matrix['TN'] + matrix['FN']))) or 1.0
if do_print:
print 'Sensitivity: ' , sensitivity
print 'Specificity: ' , specificity
print 'Efficiency: ' , efficiency
print 'Accuracy: ' , accuracy
print 'PositivePredictiveValue: ' , positivePredictiveValue
print 'NegativePredictiveValue' , NegativePredictiveValue
print 'PhiCoefficient' , PhiCoefficient
return {'SENS': sensitivity, 'SPEC': specificity, 'ACC': accuracy, 'EFF': efficiency,
'PPV':positivePredictiveValue, 'NPV':NegativePredictiveValue , 'PHI': PhiCoefficient}
def _calculate_counts(self,pos_data,neg_data):
""" Calculates the number of false positives, true positives, false negatives and true negatives """
tp_count = len([x for x in pos_data if x[0] == 1])
fp_count = len([x for x in pos_data if x[0] == 0])
fn_count = len([x for x in neg_data if x[0] == 1])
tn_count = len([x for x in neg_data if x[0] == 0])
return tp_count,fp_count,fn_count, tn_count
if __name__ == '__main__':
print "PyRoC - ROC Curve Generator"
print "By Marcel Pinheiro Caraciolo (@marcelcaraciolo)"
print "http://aimotion.bogspot.com\n"
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-f', '--file', dest='origFile', help="Path to a file with the class and decision function. The first column of each row is the class, and the second the decision score.")
parser.add_option("-n", "--max fp", dest = "fp_n", default=0, help= "Maximum false positives to calculate up to (for partial AUC).")
parser.add_option("-p","--plot", action="store_true",dest='plotFlag', default=False, help="Plot the ROC curve (matplotlib required)")
parser.add_option("-t",'--title', dest= 'ptitle' , default='' , help = 'Title of plot.')
(options,args) = parser.parse_args()
if (not options.origFile):
parser.print_help()
exit()
df_data = load_decision_function(options.origFile)
roc = ROCData(df_data)
roc_n = int(options.fp_n)
print "ROC AUC: %s" % (str(roc.auc(roc_n)),)
print 'Standard Error: %s' % (str(roc.calculateStandardError(roc_n)),)
print ''
for pt in roc.derived_points:
print pt[0],pt[1]
if options.plotFlag:
roc.plot(options.ptitle,True,True)
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
lastchange.py -- Chromium revision fetching utility.
"""
import re
import optparse
import os
import subprocess
import sys
class VersionInfo(object):
def __init__(self, url, root, revision):
self.url = url
self.root = root
self.revision = revision
def FetchSVNRevision(directory):
"""
Fetch the Subversion branch and revision for a given directory.
Errors are swallowed.
Returns:
a VersionInfo object or None on error.
"""
try:
proc = subprocess.Popen(['svn', 'info'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=directory,
shell=(sys.platform=='win32'))
except OSError:
# command is apparently either not installed or not executable.
return None
if not proc:
return None
attrs = {}
for line in proc.stdout:
line = line.strip()
if not line:
continue
key, val = line.split(': ', 1)
attrs[key] = val
try:
url = attrs['URL']
root = attrs['Repository Root']
revision = attrs['Revision']
except KeyError:
return None
return VersionInfo(url, root, revision)
def RunGitCommand(directory, command):
"""
Launches git subcommand.
Errors are swallowed.
Returns:
process object or None.
"""
command = ['git'] + command
# Force shell usage under cygwin & win32. This is a workaround for
# mysterious loss of cwd while invoking cygwin's git.
# We can't just pass shell=True to Popen, as under win32 this will
# cause CMD to be used, while we explicitly want a cygwin shell.
if sys.platform in ('cygwin', 'win32'):
command = ['sh', '-c', ' '.join(command)]
try:
proc = subprocess.Popen(command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=directory)
return proc
except OSError:
return None
def FetchGitRevision(directory):
"""
Fetch the Git hash for a given directory.
Errors are swallowed.
Returns:
a VersionInfo object or None on error.
"""
proc = RunGitCommand(directory, ['rev-parse', 'HEAD'])
if proc:
output = proc.communicate()[0].strip()
if proc.returncode == 0 and output:
return VersionInfo('git', 'git', output[:7])
return None
def IsGitSVN(directory):
"""
Checks whether git-svn has been set up.
Errors are swallowed.
Returns:
whether git-svn has been set up.
"""
# To test whether git-svn has been set up, query the config for any
# svn-related configuration. This command exits with an error code
# if there aren't any matches, so ignore its output.
proc = RunGitCommand(directory, ['config', '--get-regexp', '^svn'])
if proc:
return (proc.wait() == 0)
return False
def FetchGitSVNURL(directory):
"""
Fetch URL of SVN repository bound to git.
Errors are swallowed.
Returns:
SVN URL.
"""
if IsGitSVN(directory):
proc = RunGitCommand(directory, ['svn', 'info', '--url'])
if proc:
output = proc.communicate()[0].strip()
if proc.returncode == 0:
match = re.search(r'^\w+://.*$', output, re.M)
if match:
return match.group(0)
return ''
def FetchGitSVNRoot(directory):
"""
Fetch root of SVN repository bound to git.
Errors are swallowed.
Returns:
SVN root repository.
"""
if IsGitSVN(directory):
git_command = ['config', '--get-regexp', '^svn-remote.svn.url$']
proc = RunGitCommand(directory, git_command)
if proc:
output = proc.communicate()[0].strip()
if proc.returncode == 0:
# Zero return code implies presence of requested configuration variable.
# Its value is second (last) field of output.
match = re.search(r'\S+$', output)
if match:
return match.group(0)
return ''
def LookupGitSVNRevision(directory, depth):
"""
Fetch the Git-SVN identifier for the local tree.
Parses first |depth| commit messages.
Errors are swallowed.
"""
if not IsGitSVN(directory):
return None
git_re = re.compile(r'^\s*git-svn-id:\s+(\S+)@(\d+)')
proc = RunGitCommand(directory, ['log', '-' + str(depth)])
if proc:
for line in proc.stdout:
match = git_re.match(line)
if match:
id = match.group(2)
if id:
proc.stdout.close() # Cut pipe for fast exit.
return id
return None
def IsGitSVNDirty(directory):
"""
Checks whether our git-svn tree contains clean trunk or some branch.
Errors are swallowed.
"""
# For git branches the last commit message is either
# some local commit or a merge.
return LookupGitSVNRevision(directory, 1) is None
def FetchGitSVNRevision(directory):
"""
Fetch the Git-SVN identifier for the local tree.
Errors are swallowed.
"""
# We assume that at least first 999 commit messages contain svn evidence.
revision = LookupGitSVNRevision(directory, 999)
if not revision:
return None
if IsGitSVNDirty(directory):
revision = revision + '-dirty'
url = FetchGitSVNURL(directory)
root = FetchGitSVNRoot(directory)
return VersionInfo(url, root, revision)
def FetchVersionInfo(default_lastchange, directory=None):
"""
Returns the last change (in the form of a branch, revision tuple),
from some appropriate revision control system.
"""
version_info = (FetchSVNRevision(directory) or
FetchGitSVNRevision(directory) or FetchGitRevision(directory))
if not version_info:
if default_lastchange and os.path.exists(default_lastchange):
revision = open(default_lastchange, 'r').read().strip()
version_info = VersionInfo(None, None, revision)
else:
version_info = VersionInfo('unknown', '', '0')
return version_info
def WriteIfChanged(file_name, contents):
"""
Writes the specified contents to the specified file_name
iff the contents are different than the current contents.
"""
try:
old_contents = open(file_name, 'r').read()
except EnvironmentError:
pass
else:
if contents == old_contents:
return
os.unlink(file_name)
open(file_name, 'w').write(contents)
def main(argv=None):
if argv is None:
argv = sys.argv
parser = optparse.OptionParser(usage="lastchange.py [options]")
parser.add_option("-d", "--default-lastchange", metavar="FILE",
help="default last change input FILE")
parser.add_option("-o", "--output", metavar="FILE",
help="write last change to FILE")
parser.add_option("--revision-only", action='store_true',
help="just print the SVN revision number")
opts, args = parser.parse_args(argv[1:])
out_file = opts.output
while len(args) and out_file is None:
if out_file is None:
out_file = args.pop(0)
if args:
sys.stderr.write('Unexpected arguments: %r\n\n' % args)
parser.print_help()
sys.exit(2)
version_info = FetchVersionInfo(opts.default_lastchange)
if opts.revision_only:
print version_info.revision
else:
contents = "LASTCHANGE=%s\n" % version_info.revision
if out_file:
WriteIfChanged(out_file, contents)
else:
sys.stdout.write(contents)
return 0
if __name__ == '__main__':
sys.exit(main())
|
unknown
|
codeparrot/codeparrot-clean
| ||
from kivy.app import App
from kivy.uix.gridlayout import GridLayout
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.label import Label
from kivy.uix.behaviors import FocusBehavior
from kivy.graphics import Color, Rectangle
class FocusWithColor(FocusBehavior):
''' Class that when focused, changes its background color to red.
'''
_color = None
_rect = None
def __init__(self, **kwargs):
super(FocusWithColor, self).__init__(**kwargs)
with self.canvas:
self._color = Color(1, 1, 1, .2)
self._rect = Rectangle(size=self.size, pos=self.pos)
self.bind(size=self._update_rect, pos=self._update_rect)
def _update_rect(self, instance, value):
self._rect.pos = instance.pos
self._rect.size = instance.size
def on_focused(self, instance, value, *largs):
self._color.rgba = [1, 0, 0, .2] if value else [1, 1, 1, .2]
class FocusLabel(FocusWithColor, Label):
'''A label, which in addition to turn red when focused, it also sets the
keyboard input to the text of the label.
'''
def keyboard_on_key_down(self, window, keycode, text, modifiers):
'''We call super before doing anything else to enable tab cycling
by FocusBehavior. If we wanted to use tab for ourselves, we could just
not call it, or call it if we didn't need tab.
'''
if super(FocusLabel, self).keyboard_on_key_down(window, keycode,
text, modifiers):
return True
self.text = keycode[1]
return True
class FocusGridLayout(FocusWithColor, GridLayout):
pass
class FocusBoxLayout(FocusWithColor, BoxLayout):
pass
class FocusApp(App):
def build(self):
root = FocusBoxLayout(padding=[10, 10], spacing=10)
self.grid1 = grid1 = FocusGridLayout(cols=4, padding=[10, 10],
spacing=10)
self.grid2 = grid2 = FocusGridLayout(cols=4, padding=[10, 10],
spacing=10)
root.add_widget(FocusLabel(text='Left', size_hint_x=0.4))
root.add_widget(grid1)
root.add_widget(grid2)
root.add_widget(FocusLabel(text='Right', size_hint_x=0.4))
for i in range(40):
grid1.add_widget(FocusLabel(text='l' + str(i)))
for i in range(40):
grid2.add_widget(FocusLabel(text='r' + str(i)))
# make elements 29, 9 un-focusable. The widgets are displayed in
# reverse order, so 9 = 39 - 10
grid2.children[30].text = grid1.children[14].text =\
grid2.children[15].text = grid1.children[34].text = 'Skip me'
grid2.children[15].is_focusable = False
grid2.children[30].is_focusable = False
# similarly, make 39 - 14 = 25, and 5 un-focusable
grid1.children[14].is_focusable = False
grid1.children[34].is_focusable = False
# don't move focus passed this element
grid2.children[35].focus_next = StopIteration
grid2.children[35].text = 'Stop forward'
# exchange the links between the sides so that it'll skip to the other
# side in the middle. Remember that children are displayed reversed
# in layouts.
grid1.children[10].focus_next = grid2.children[9]
grid2.children[10].focus_next = grid1.children[9]
grid1.children[10].text = '-->'
grid2.children[10].text = '<--'
return root
if __name__ == '__main__':
FocusApp().run()
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright (c) 2005-2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
from m5.params import *
from m5.proxy import *
from Device import BasicPioDevice
class AlphaBackdoor(BasicPioDevice):
type = 'AlphaBackdoor'
cpu = Param.BaseCPU(Parent.cpu[0], "Processor")
disk = Param.SimpleDisk("Simple Disk")
terminal = Param.Terminal(Parent.any, "The console terminal")
system = Param.AlphaSystem(Parent.any, "system object")
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
* Copyright 2010-2024 JetBrains s.r.o. and Kotlin Programming Language contributors.
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
*/
package org.jetbrains.kotlin.analysis.api.platform.projectStructure
import com.intellij.openapi.components.serviceOrNull
import com.intellij.openapi.project.Project
import org.jetbrains.kotlin.analysis.api.KaPlatformInterface
import org.jetbrains.kotlin.analysis.api.platform.KotlinOptionalPlatformComponent
import org.jetbrains.kotlin.analysis.api.projectStructure.KaModule
import org.jetbrains.kotlin.extensions.ExtensionPointDescriptor
import org.jetbrains.kotlin.extensions.ProjectExtensionDescriptor
/**
* [KotlinCompilerPluginsProvider] provides information about registered compiler plugins.
*
* The component is optional. If [KotlinCompilerPluginsProvider] is not implemented, the Analysis API engine will assume that no compiler
* plugins are registered.
*/
@KaPlatformInterface
public interface KotlinCompilerPluginsProvider : KotlinOptionalPlatformComponent {
@KaPlatformInterface
public enum class CompilerPluginType {
/**
* An assign expression alterer extension. See `FirAssignExpressionAltererExtension`.
*/
ASSIGNMENT,
}
/**
* Returns a possibly empty list of extensions of a base [extensionType] that compiler plugins have registered for [module].
*
* These extensions are used in addition to those provided by the extension descriptor's [ProjectExtensionDescriptor.getInstances].
*/
public fun <T : Any> getRegisteredExtensions(module: KaModule, extensionType: ExtensionPointDescriptor<T>): List<T>
/**
* Returns `true` if at least one plugin with the requested [pluginType] is registered, and `false` otherwise.
*/
public fun isPluginOfTypeRegistered(module: KaModule, pluginType: CompilerPluginType): Boolean
@KaPlatformInterface
public companion object {
public fun getInstance(project: Project): KotlinCompilerPluginsProvider? = project.serviceOrNull()
}
}
|
kotlin
|
github
|
https://github.com/JetBrains/kotlin
|
analysis/analysis-api-platform-interface/src/org/jetbrains/kotlin/analysis/api/platform/projectStructure/KotlinCompilerPluginsProvider.kt
|
# synstructure
[](https://crates.io/crates/synstructure)
[](https://docs.rs/synstructure)
[](https://github.com/mystor/synstructure/actions)
[](https://blog.rust-lang.org/2018/12/06/Rust-1.31-and-rust-2018.html)
> NOTE: What follows is an exerpt from the module level documentation. For full
> details read the docs on [docs.rs](https://docs.rs/synstructure/)
This crate provides helper types for matching against enum variants, and
extracting bindings to each of the fields in the deriving Struct or Enum in
a generic way.
If you are writing a `#[derive]` which needs to perform some operation on
every field, then you have come to the right place!
# Example: `WalkFields`
### Trait Implementation
```rust
pub trait WalkFields: std::any::Any {
fn walk_fields(&self, walk: &mut FnMut(&WalkFields));
}
impl WalkFields for i32 {
fn walk_fields(&self, _walk: &mut FnMut(&WalkFields)) {}
}
```
### Custom Derive
```rust
#[macro_use]
extern crate synstructure;
#[macro_use]
extern crate quote;
extern crate proc_macro2;
fn walkfields_derive(s: synstructure::Structure) -> proc_macro2::TokenStream {
let body = s.each(|bi| quote!{
walk(#bi)
});
s.bound_impl(quote!(example_traits::WalkFields), quote!{
fn walk_fields(&self, walk: &mut FnMut(&example_traits::WalkFields)) {
match *self { #body }
}
})
}
decl_derive!([WalkFields] => walkfields_derive);
/*
* Test Case
*/
fn main() {
test_derive! {
walkfields_derive {
enum A<T> {
B(i32, T),
C(i32),
}
}
expands to {
const _: () = {
extern crate example_traits;
impl<T> example_traits::WalkFields for A<T>
where T: example_traits::WalkFields
{
fn walk_fields(&self, walk: &mut FnMut(&example_traits::WalkFields)) {
match *self {
A::B(ref __binding_0, ref __binding_1,) => {
{ walk(__binding_0) }
{ walk(__binding_1) }
}
A::C(ref __binding_0,) => {
{ walk(__binding_0) }
}
}
}
}
};
}
}
}
```
# Example: `Interest`
### Trait Implementation
```rust
pub trait Interest {
fn interesting(&self) -> bool;
}
impl Interest for i32 {
fn interesting(&self) -> bool { *self > 0 }
}
```
### Custom Derive
```rust
#[macro_use]
extern crate synstructure;
#[macro_use]
extern crate quote;
extern crate proc_macro2;
fn interest_derive(mut s: synstructure::Structure) -> proc_macro2::TokenStream {
let body = s.fold(false, |acc, bi| quote!{
#acc || example_traits::Interest::interesting(#bi)
});
s.bound_impl(quote!(example_traits::Interest), quote!{
fn interesting(&self) -> bool {
match *self {
#body
}
}
})
}
decl_derive!([Interest] => interest_derive);
/*
* Test Case
*/
fn main() {
test_derive!{
interest_derive {
enum A<T> {
B(i32, T),
C(i32),
}
}
expands to {
const _: () = {
extern crate example_traits;
impl<T> example_traits::Interest for A<T>
where T: example_traits::Interest
{
fn interesting(&self) -> bool {
match *self {
A::B(ref __binding_0, ref __binding_1,) => {
false ||
example_traits::Interest::interesting(__binding_0) ||
example_traits::Interest::interesting(__binding_1)
}
A::C(ref __binding_0,) => {
false ||
example_traits::Interest::interesting(__binding_0)
}
}
}
}
};
}
}
}
```
For more example usage, consider investigating the `abomonation_derive` crate,
which makes use of this crate, and is fairly simple.
|
unknown
|
github
|
https://github.com/nodejs/node
|
deps/crates/vendor/synstructure/README.md
|
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='oauth2-proxy-cookie',
version='0.1.0',
description='bitly/oauth2-proxy compatible library to decode and validate '
'authenticated cookie.',
long_description=long_description,
url='https://github.com/isra17/oauth2-proxy-cookie',
author='isra17',
author_email='isra017@gmail.com',
license='LGPLv3+',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
keywords='oauth2-proxy authentication',
py_modules=['oauth2_proxy_cookie'],
install_requires=['six', 'flask'], )
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
* Copyright 2012-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.configurationmetadata;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.List;
import org.junit.jupiter.api.Test;
import org.springframework.boot.configurationmetadata.json.JSONException;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatIllegalStateException;
/**
* Tests for {@link JsonReader}.
*
* @author Stephane Nicoll
*/
class JsonReaderTests extends AbstractConfigurationMetadataTests {
private static final Charset DEFAULT_CHARSET = StandardCharsets.UTF_8;
private final JsonReader reader = new JsonReader();
@Test
void emptyMetadata() throws IOException {
RawConfigurationMetadata rawMetadata = readFor("empty");
assertThat(rawMetadata.getSources()).isEmpty();
assertThat(rawMetadata.getItems()).isEmpty();
}
@Test
void invalidMetadata() {
assertThatIllegalStateException().isThrownBy(() -> readFor("invalid")).withCauseInstanceOf(JSONException.class);
}
@Test
void emptyGroupName() throws IOException {
RawConfigurationMetadata rawMetadata = readFor("empty-groups");
List<ConfigurationMetadataItem> items = rawMetadata.getItems();
assertThat(items).hasSize(2);
ConfigurationMetadataItem name = items.get(0);
assertProperty(name, "name", "name", String.class, null);
ConfigurationMetadataItem dotTitle = items.get(1);
assertProperty(dotTitle, "title", "title", String.class, null);
}
@Test
void simpleMetadata() throws IOException {
RawConfigurationMetadata rawMetadata = readFor("foo");
List<ConfigurationMetadataSource> sources = rawMetadata.getSources();
assertThat(sources).hasSize(2);
List<ConfigurationMetadataItem> items = rawMetadata.getItems();
assertThat(items).hasSize(4);
List<ConfigurationMetadataHint> hints = rawMetadata.getHints();
assertThat(hints).hasSize(1);
ConfigurationMetadataSource source = sources.get(0);
assertSource(source, "spring.foo", "org.acme.Foo", "org.acme.config.FooApp");
assertThat(source.getSourceMethod()).isEqualTo("foo()");
assertThat(source.getDescription()).isEqualTo("This is Foo.");
assertThat(source.getShortDescription()).isEqualTo("This is Foo.");
ConfigurationMetadataItem item = items.get(0);
assertProperty(item, "spring.foo.name", "name", String.class, null);
assertItem(item, "org.acme.Foo");
ConfigurationMetadataItem item2 = items.get(1);
assertProperty(item2, "spring.foo.description", "description", String.class, "FooBar");
assertThat(item2.getDescription()).isEqualTo("Foo description.");
assertThat(item2.getShortDescription()).isEqualTo("Foo description.");
assertThat(item2.getSourceMethod()).isNull();
assertItem(item2, "org.acme.Foo");
ConfigurationMetadataHint hint = hints.get(0);
assertThat(hint.getId()).isEqualTo("spring.foo.counter");
assertThat(hint.getValueHints()).hasSize(1);
ValueHint valueHint = hint.getValueHints().get(0);
assertThat(valueHint.getValue()).isEqualTo(42);
assertThat(valueHint.getDescription())
.isEqualTo("Because that's the answer to any question, choose it. \nReally.");
assertThat(valueHint.getShortDescription()).isEqualTo("Because that's the answer to any question, choose it.");
assertThat(hint.getValueProviders()).hasSize(1);
ValueProvider valueProvider = hint.getValueProviders().get(0);
assertThat(valueProvider.getName()).isEqualTo("handle-as");
assertThat(valueProvider.getParameters()).hasSize(1);
assertThat(valueProvider.getParameters()).containsEntry("target", Integer.class.getName());
}
@Test
void metadataHints() throws IOException {
RawConfigurationMetadata rawMetadata = readFor("bar");
List<ConfigurationMetadataHint> hints = rawMetadata.getHints();
assertThat(hints).hasSize(1);
ConfigurationMetadataHint hint = hints.get(0);
assertThat(hint.getId()).isEqualTo("spring.bar.description");
assertThat(hint.getValueHints()).hasSize(2);
ValueHint valueHint = hint.getValueHints().get(0);
assertThat(valueHint.getValue()).isEqualTo("one");
assertThat(valueHint.getDescription()).isEqualTo("One.");
ValueHint valueHint2 = hint.getValueHints().get(1);
assertThat(valueHint2.getValue()).isEqualTo("two");
assertThat(valueHint2.getDescription()).isNull();
assertThat(hint.getValueProviders()).hasSize(2);
ValueProvider valueProvider = hint.getValueProviders().get(0);
assertThat(valueProvider.getName()).isEqualTo("handle-as");
assertThat(valueProvider.getParameters()).hasSize(1);
assertThat(valueProvider.getParameters()).containsEntry("target", String.class.getName());
ValueProvider valueProvider2 = hint.getValueProviders().get(1);
assertThat(valueProvider2.getName()).isEqualTo("any");
assertThat(valueProvider2.getParameters()).isEmpty();
}
@Test
void rootMetadata() throws IOException {
RawConfigurationMetadata rawMetadata = readFor("root");
List<ConfigurationMetadataSource> sources = rawMetadata.getSources();
assertThat(sources).isEmpty();
List<ConfigurationMetadataItem> items = rawMetadata.getItems();
assertThat(items).hasSize(2);
ConfigurationMetadataItem item = items.get(0);
assertProperty(item, "spring.root.name", "spring.root.name", String.class, null);
}
@Test
void deprecatedMetadata() throws IOException {
RawConfigurationMetadata rawMetadata = readFor("deprecated");
List<ConfigurationMetadataItem> items = rawMetadata.getItems();
assertThat(items).hasSize(5);
ConfigurationMetadataItem item = items.get(0);
assertProperty(item, "server.port", "server.port", Integer.class, null);
assertThat(item.isDeprecated()).isTrue();
assertThat(item.getDeprecation().getReason()).isEqualTo("Server namespace has moved to spring.server");
assertThat(item.getDeprecation().getShortReason()).isEqualTo("Server namespace has moved to spring.server");
assertThat(item.getDeprecation().getReplacement()).isEqualTo("server.spring.port");
assertThat(item.getDeprecation().getLevel()).isEqualTo(Deprecation.Level.WARNING);
ConfigurationMetadataItem item2 = items.get(1);
assertProperty(item2, "server.cluster-name", "server.cluster-name", String.class, null);
assertThat(item2.isDeprecated()).isTrue();
assertThat(item2.getDeprecation().getReason()).isNull();
assertThat(item2.getDeprecation().getShortReason()).isNull();
assertThat(item2.getDeprecation().getReplacement()).isNull();
assertThat(item.getDeprecation().getLevel()).isEqualTo(Deprecation.Level.WARNING);
ConfigurationMetadataItem item3 = items.get(2);
assertProperty(item3, "spring.server.name", "spring.server.name", String.class, null);
assertThat(item3.isDeprecated()).isFalse();
assertThat(item3.getDeprecation()).isNull();
ConfigurationMetadataItem item4 = items.get(3);
assertProperty(item4, "spring.server-name", "spring.server-name", String.class, null);
assertThat(item4.isDeprecated()).isTrue();
assertThat(item4.getDeprecation().getReason()).isNull();
assertThat(item2.getDeprecation().getShortReason()).isNull();
assertThat(item4.getDeprecation().getReplacement()).isEqualTo("spring.server.name");
assertThat(item4.getDeprecation().getLevel()).isEqualTo(Deprecation.Level.ERROR);
ConfigurationMetadataItem item5 = items.get(4);
assertProperty(item5, "spring.server-name2", "spring.server-name2", String.class, null);
assertThat(item5.isDeprecated()).isTrue();
assertThat(item5.getDeprecation().getReason()).isNull();
assertThat(item2.getDeprecation().getShortReason()).isNull();
assertThat(item5.getDeprecation().getReplacement()).isEqualTo("spring.server.name");
assertThat(item5.getDeprecation().getLevel()).isEqualTo(Deprecation.Level.WARNING);
}
@Test
void multiGroupsMetadata() throws IOException {
RawConfigurationMetadata rawMetadata = readFor("multi-groups");
List<ConfigurationMetadataItem> items = rawMetadata.getItems();
assertThat(items).hasSize(3);
ConfigurationMetadataItem item = items.get(0);
assertThat(item.getName()).isEqualTo("enabled");
assertThat(item.getSourceType()).isEqualTo("com.example.Retry");
ConfigurationMetadataItem item2 = items.get(1);
assertThat(item2.getName()).isEqualTo("enabled");
assertThat(item2.getSourceType()).isEqualTo("com.example.Retry");
ConfigurationMetadataItem item3 = items.get(2);
assertThat(item3.getName()).isEqualTo("enabled");
assertThat(item3.getSourceType()).isEqualTo("com.example.Retry");
}
RawConfigurationMetadata readFor(String path) throws IOException {
return this.reader.read(getInputStreamFor(path), DEFAULT_CHARSET);
}
}
|
java
|
github
|
https://github.com/spring-projects/spring-boot
|
configuration-metadata/spring-boot-configuration-metadata/src/test/java/org/springframework/boot/configurationmetadata/JsonReaderTests.java
|
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2015 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
import json
from .path import Path
class Configuration:
Debug = "debug"
Release = "release"
version = 1
command = None
current = None
project = None
script_path = None
build_script_path = None
source_root = None
target = None
system_root = None
toolchain = None
build_directory = None
intermediate_directory = None
module_cache_directory = None
install_directory = None
prefix = None
swift_install = None
clang = None
clangxx = None
swift = None
swiftc = None
ar = None
swift_sdk = None
bootstrap_directory = None
verbose = False
extra_c_flags = None
extra_swift_flags = None
extra_ld_flags = None
build_mode = None
config_path = None # dont save this; else it would be recursive
variables = {}
def __init__(self):
pass
def _encode_path(self, path):
if path is not None:
return path.absolute()
else:
return None
def write(self, path):
info = {
'version' : self.version,
'command' : self.command,
'project' : self.project,
'script_path' : self._encode_path(self.script_path),
'build_script_path' : self._encode_path(self.build_script_path),
'source_root' : self._encode_path(self.source_root),
'target' : self.target.triple,
'system_root' : self._encode_path(self.system_root),
'toolchain' : self.toolchain,
'build_directory' : self._encode_path(self.build_directory),
'intermediate_directory' : self._encode_path(self.intermediate_directory),
'module_cache_directory' : self._encode_path(self.module_cache_directory),
'install_directory' : self._encode_path(self.install_directory),
'prefix' : self.prefix,
'swift_install' : self.swift_install,
'clang' : self.clang,
'clangxx' : self.clangxx,
'swift' : self.swift,
'swiftc' : self.swiftc,
'ar' : self.ar,
'swift_sdk' : self.swift_sdk,
'bootstrap_directory' : self._encode_path(self.bootstrap_directory),
'verbose' : self.verbose,
'extra_c_flags' : self.extra_c_flags,
'extra_swift_flags' : self.extra_swift_flags,
'extra_ld_flags' : self.extra_ld_flags,
'build_mode' : self.build_mode,
'variables' : self.variables,
}
with open(path, 'w+') as outfile:
json.dump(info, outfile)
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright 2012 VMware, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from oslo.config import cfg
from neutron.api import extensions
from neutron.api.v2 import attributes as attr
from neutron.api.v2 import resource_helper
from neutron.common import exceptions as qexception
from neutron.plugins.common import constants
# L3 Exceptions
class RouterNotFound(qexception.NotFound):
message = _("Router %(router_id)s could not be found")
class RouterInUse(qexception.InUse):
message = _("Router %(router_id)s still has ports")
class RouterInterfaceNotFound(qexception.NotFound):
message = _("Router %(router_id)s does not have "
"an interface with id %(port_id)s")
class RouterInterfaceNotFoundForSubnet(qexception.NotFound):
message = _("Router %(router_id)s has no interface "
"on subnet %(subnet_id)s")
class RouterInterfaceInUseByFloatingIP(qexception.InUse):
message = _("Router interface for subnet %(subnet_id)s on router "
"%(router_id)s cannot be deleted, as it is required "
"by one or more floating IPs.")
class FloatingIPNotFound(qexception.NotFound):
message = _("Floating IP %(floatingip_id)s could not be found")
class ExternalGatewayForFloatingIPNotFound(qexception.NotFound):
message = _("External network %(external_network_id)s is not reachable "
"from subnet %(subnet_id)s. Therefore, cannot associate "
"Port %(port_id)s with a Floating IP.")
class FloatingIPPortAlreadyAssociated(qexception.InUse):
message = _("Cannot associate floating IP %(floating_ip_address)s "
"(%(fip_id)s) with port %(port_id)s "
"using fixed IP %(fixed_ip)s, as that fixed IP already "
"has a floating IP on external network %(net_id)s.")
class L3PortInUse(qexception.InUse):
message = _("Port %(port_id)s has owner %(device_owner)s and therefore"
" cannot be deleted directly via the port API.")
class RouterExternalGatewayInUseByFloatingIp(qexception.InUse):
message = _("Gateway cannot be updated for router %(router_id)s, since a "
"gateway to external network %(net_id)s is required by one or "
"more floating IPs.")
ROUTERS = 'routers'
EXTERNAL_GW_INFO = 'external_gateway_info'
RESOURCE_ATTRIBUTE_MAP = {
ROUTERS: {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'name': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'admin_state_up': {'allow_post': True, 'allow_put': True,
'default': True,
'convert_to': attr.convert_to_boolean,
'is_visible': True},
'status': {'allow_post': False, 'allow_put': False,
'is_visible': True},
'tenant_id': {'allow_post': True, 'allow_put': False,
'required_by_policy': True,
'validate': {'type:string': None},
'is_visible': True},
EXTERNAL_GW_INFO: {'allow_post': True, 'allow_put': True,
'is_visible': True, 'default': None,
'enforce_policy': True}
},
'floatingips': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'floating_ip_address': {'allow_post': False, 'allow_put': False,
'validate': {'type:ip_address_or_none': None},
'is_visible': True},
'floating_network_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True},
'router_id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid_or_none': None},
'is_visible': True, 'default': None},
'port_id': {'allow_post': True, 'allow_put': True,
'validate': {'type:uuid_or_none': None},
'is_visible': True, 'default': None,
'required_by_policy': True},
'fixed_ip_address': {'allow_post': True, 'allow_put': True,
'validate': {'type:ip_address_or_none': None},
'is_visible': True, 'default': None},
'tenant_id': {'allow_post': True, 'allow_put': False,
'required_by_policy': True,
'validate': {'type:string': None},
'is_visible': True},
'status': {'allow_post': False, 'allow_put': False,
'is_visible': True},
},
}
l3_quota_opts = [
cfg.IntOpt('quota_router',
default=10,
help=_('Number of routers allowed per tenant. '
'A negative value means unlimited.')),
cfg.IntOpt('quota_floatingip',
default=50,
help=_('Number of floating IPs allowed per tenant. '
'A negative value means unlimited.')),
]
cfg.CONF.register_opts(l3_quota_opts, 'QUOTAS')
class L3(extensions.ExtensionDescriptor):
@classmethod
def get_name(cls):
return "Neutron L3 Router"
@classmethod
def get_alias(cls):
return "router"
@classmethod
def get_description(cls):
return ("Router abstraction for basic L3 forwarding"
" between L2 Neutron networks and access to external"
" networks via a NAT gateway.")
@classmethod
def get_namespace(cls):
return "http://docs.openstack.org/ext/neutron/router/api/v1.0"
@classmethod
def get_updated(cls):
return "2012-07-20T10:00:00-00:00"
@classmethod
def get_resources(cls):
"""Returns Ext Resources."""
plural_mappings = resource_helper.build_plural_mappings(
{}, RESOURCE_ATTRIBUTE_MAP)
attr.PLURALS.update(plural_mappings)
action_map = {'router': {'add_router_interface': 'PUT',
'remove_router_interface': 'PUT'}}
return resource_helper.build_resource_info(plural_mappings,
RESOURCE_ATTRIBUTE_MAP,
constants.L3_ROUTER_NAT,
action_map=action_map,
register_quota=True)
def update_attributes_map(self, attributes):
super(L3, self).update_attributes_map(
attributes, extension_attrs_map=RESOURCE_ATTRIBUTE_MAP)
def get_extended_resources(self, version):
if version == "2.0":
return RESOURCE_ATTRIBUTE_MAP
else:
return {}
class RouterPluginBase(object):
@abc.abstractmethod
def create_router(self, context, router):
pass
@abc.abstractmethod
def update_router(self, context, id, router):
pass
@abc.abstractmethod
def get_router(self, context, id, fields=None):
pass
@abc.abstractmethod
def delete_router(self, context, id):
pass
@abc.abstractmethod
def get_routers(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None, page_reverse=False):
pass
@abc.abstractmethod
def add_router_interface(self, context, router_id, interface_info):
pass
@abc.abstractmethod
def remove_router_interface(self, context, router_id, interface_info):
pass
@abc.abstractmethod
def create_floatingip(self, context, floatingip):
pass
@abc.abstractmethod
def update_floatingip(self, context, id, floatingip):
pass
@abc.abstractmethod
def get_floatingip(self, context, id, fields=None):
pass
@abc.abstractmethod
def delete_floatingip(self, context, id):
pass
@abc.abstractmethod
def get_floatingips(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
pass
def get_routers_count(self, context, filters=None):
raise NotImplementedError()
def get_floatingips_count(self, context, filters=None):
raise NotImplementedError()
|
unknown
|
codeparrot/codeparrot-clean
| ||
import sys
import traceback
ERROR_WRONG_USAGE = 1
ERROR_EXCEPTION = 4
def usage():
sys.stderr.write('Usage: conda_packaging_tool.py listall | channels | versions PACKAGE\n')
sys.stderr.flush()
exit(ERROR_WRONG_USAGE)
def do_list_available_packages():
import conda
version = conda.__version__
version_splitted = version.split(".")
if len(version_splitted) < 2:
sys.stderr.write("Conda version %s" % version)
sys.stderr.flush()
return
major_version = int(version_splitted[0])
minor_version = int(version_splitted[1])
if major_version >= 4 and minor_version >= 4:
init_context()
from conda.core.index import get_index
index = get_index()
elif major_version == 4 and minor_version >= 2:
from conda.api import get_index
index = get_index()
elif major_version == 4 and minor_version == 1:
from conda.cli.main_search import get_index
index = get_index()
else:
from conda.cli.main_search import common
index = common.get_index_trap()
for pkg in index.values():
sys.stdout.write("\t".join([pkg["name"], pkg["version"], ":".join(pkg["depends"])]) + chr(10))
sys.stdout.flush()
def do_list_channels():
context = init_context()
if context:
channels = context.channels
else:
import conda.config as config
if hasattr(config, "get_channel_urls"):
channels = config.get_channel_urls()
else:
channels = config.context.channels
sys.stdout.write('\n'.join(channels))
sys.stdout.write('\n')
sys.stdout.flush()
def fetch_versions(package):
import json
from distutils.version import LooseVersion
from conda.cli.python_api import run_command, Commands
stdout, stderr, ret_code = run_command(Commands.SEARCH, package, '--json')
if ret_code != 0:
raise Exception(stderr)
results = json.loads(stdout)
results = results.get(package, [])
all_versions = (r.get('version') for r in results)
return sorted(set(v for v in all_versions if v), key=LooseVersion, reverse=True)
def do_list_versions(package):
sys.stdout.write('\n'.join(fetch_versions(package)))
sys.stderr.write('\n')
sys.stdout.flush()
def init_context():
try:
from conda.base.context import context
except ImportError:
return None
context.__init__()
return context
def main():
retcode = 0
try:
if len(sys.argv) < 2:
usage()
cmd = sys.argv[1]
if cmd == 'listall':
if len(sys.argv) != 2:
usage()
return
do_list_available_packages()
elif cmd == 'channels':
if len(sys.argv) != 2:
usage()
return
do_list_channels()
elif cmd == 'versions':
if len(sys.argv) != 3:
usage()
return
do_list_versions(sys.argv[2])
else:
usage()
except Exception:
traceback.print_exc()
exit(ERROR_EXCEPTION)
exit(retcode)
if __name__ == '__main__':
main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
data = (
'A', # 0x00
'a', # 0x01
'A', # 0x02
'a', # 0x03
'E', # 0x04
'e', # 0x05
'E', # 0x06
'e', # 0x07
'I', # 0x08
'i', # 0x09
'I', # 0x0a
'i', # 0x0b
'O', # 0x0c
'o', # 0x0d
'O', # 0x0e
'o', # 0x0f
'R', # 0x10
'r', # 0x11
'R', # 0x12
'r', # 0x13
'U', # 0x14
'u', # 0x15
'U', # 0x16
'u', # 0x17
'S', # 0x18
's', # 0x19
'T', # 0x1a
't', # 0x1b
'Y', # 0x1c
'y', # 0x1d
'H', # 0x1e
'h', # 0x1f
'N', # 0x20
'd', # 0x21
'OU', # 0x22
'ou', # 0x23
'Z', # 0x24
'z', # 0x25
'A', # 0x26
'a', # 0x27
'E', # 0x28
'e', # 0x29
'O', # 0x2a
'o', # 0x2b
'O', # 0x2c
'o', # 0x2d
'O', # 0x2e
'o', # 0x2f
'O', # 0x30
'o', # 0x31
'Y', # 0x32
'y', # 0x33
'l', # 0x34
'n', # 0x35
't', # 0x36
'j', # 0x37
'db', # 0x38
'qp', # 0x39
'A', # 0x3a
'C', # 0x3b
'c', # 0x3c
'L', # 0x3d
'T', # 0x3e
's', # 0x3f
'z', # 0x40
'[?]', # 0x41
'[?]', # 0x42
'B', # 0x43
'U', # 0x44
'^', # 0x45
'E', # 0x46
'e', # 0x47
'J', # 0x48
'j', # 0x49
'q', # 0x4a
'q', # 0x4b
'R', # 0x4c
'r', # 0x4d
'Y', # 0x4e
'y', # 0x4f
'a', # 0x50
'a', # 0x51
'a', # 0x52
'b', # 0x53
'o', # 0x54
'c', # 0x55
'd', # 0x56
'd', # 0x57
'e', # 0x58
'@', # 0x59
'@', # 0x5a
'e', # 0x5b
'e', # 0x5c
'e', # 0x5d
'e', # 0x5e
'j', # 0x5f
'g', # 0x60
'g', # 0x61
'g', # 0x62
'g', # 0x63
'u', # 0x64
'Y', # 0x65
'h', # 0x66
'h', # 0x67
'i', # 0x68
'i', # 0x69
'I', # 0x6a
'l', # 0x6b
'l', # 0x6c
'l', # 0x6d
'lZ', # 0x6e
'W', # 0x6f
'W', # 0x70
'm', # 0x71
'n', # 0x72
'n', # 0x73
'n', # 0x74
'o', # 0x75
'OE', # 0x76
'O', # 0x77
'F', # 0x78
'r', # 0x79
'r', # 0x7a
'r', # 0x7b
'r', # 0x7c
'r', # 0x7d
'r', # 0x7e
'r', # 0x7f
'R', # 0x80
'R', # 0x81
's', # 0x82
'S', # 0x83
'j', # 0x84
'S', # 0x85
'S', # 0x86
't', # 0x87
't', # 0x88
'u', # 0x89
'U', # 0x8a
'v', # 0x8b
'^', # 0x8c
'w', # 0x8d
'y', # 0x8e
'Y', # 0x8f
'z', # 0x90
'z', # 0x91
'Z', # 0x92
'Z', # 0x93
'?', # 0x94
'?', # 0x95
'?', # 0x96
'C', # 0x97
'@', # 0x98
'B', # 0x99
'E', # 0x9a
'G', # 0x9b
'H', # 0x9c
'j', # 0x9d
'k', # 0x9e
'L', # 0x9f
'q', # 0xa0
'?', # 0xa1
'?', # 0xa2
'dz', # 0xa3
'dZ', # 0xa4
'dz', # 0xa5
'ts', # 0xa6
'tS', # 0xa7
'tC', # 0xa8
'fN', # 0xa9
'ls', # 0xaa
'lz', # 0xab
'WW', # 0xac
']]', # 0xad
'h', # 0xae
'h', # 0xaf
'k', # 0xb0
'h', # 0xb1
'j', # 0xb2
'r', # 0xb3
'r', # 0xb4
'r', # 0xb5
'r', # 0xb6
'w', # 0xb7
'y', # 0xb8
'\'', # 0xb9
'"', # 0xba
'`', # 0xbb
'\'', # 0xbc
'`', # 0xbd
'`', # 0xbe
'\'', # 0xbf
'?', # 0xc0
'?', # 0xc1
'<', # 0xc2
'>', # 0xc3
'^', # 0xc4
'V', # 0xc5
'^', # 0xc6
'V', # 0xc7
'\'', # 0xc8
'-', # 0xc9
'/', # 0xca
'\\', # 0xcb
',', # 0xcc
'_', # 0xcd
'\\', # 0xce
'/', # 0xcf
':', # 0xd0
'.', # 0xd1
'`', # 0xd2
'\'', # 0xd3
'^', # 0xd4
'V', # 0xd5
'+', # 0xd6
'-', # 0xd7
'V', # 0xd8
'.', # 0xd9
'@', # 0xda
',', # 0xdb
'~', # 0xdc
'"', # 0xdd
'R', # 0xde
'X', # 0xdf
'G', # 0xe0
'l', # 0xe1
's', # 0xe2
'x', # 0xe3
'?', # 0xe4
'', # 0xe5
'', # 0xe6
'', # 0xe7
'', # 0xe8
'', # 0xe9
'', # 0xea
'', # 0xeb
'V', # 0xec
'=', # 0xed
'"', # 0xee
'[?]', # 0xef
'[?]', # 0xf0
'[?]', # 0xf1
'[?]', # 0xf2
'[?]', # 0xf3
'[?]', # 0xf4
'[?]', # 0xf5
'[?]', # 0xf6
'[?]', # 0xf7
'[?]', # 0xf8
'[?]', # 0xf9
'[?]', # 0xfa
'[?]', # 0xfb
'[?]', # 0xfc
'[?]', # 0xfd
'[?]', # 0xfe
)
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_serialization import jsonutils
from nova.api.openstack import api_version_request as api_version
from nova import test
from nova.tests.unit.api.openstack.compute import microversions
from nova.tests.unit.api.openstack import fakes
class LegacyMicroversionsTest(test.NoDBTestCase):
header_name = 'X-OpenStack-Nova-API-Version'
def setUp(self):
super(LegacyMicroversionsTest, self).setUp()
self.app = fakes.wsgi_app_v21(custom_routes=microversions.ROUTES)
def _test_microversions(self, app, req, ret_code, ret_header=None):
req.environ['CONTENT_TYPE'] = "application/json"
res = req.get_response(app)
self.assertEqual(ret_code, res.status_int)
if ret_header:
if 'nova' not in self.header_name.lower():
ret_header = 'compute %s' % ret_header
self.assertEqual(ret_header,
res.headers[self.header_name])
return res
def _make_header(self, req_header):
if 'nova' in self.header_name.lower():
headers = {self.header_name: req_header}
else:
headers = {self.header_name: 'compute %s' % req_header}
return headers
def test_microversions_no_header(self):
req = fakes.HTTPRequest.blank('/v2/fake/microversions', method='GET')
res = req.get_response(self.app)
self.assertEqual(200, res.status_int)
resp_json = jsonutils.loads(res.body)
self.assertEqual('val', resp_json['param'])
def test_microversions_return_header(self):
req = fakes.HTTPRequest.blank('/v2/fake/microversions')
res = req.get_response(self.app)
self.assertEqual(200, res.status_int)
resp_json = jsonutils.loads(res.body)
self.assertEqual('val', resp_json['param'])
if 'nova' in self.header_name.lower():
self.assertEqual("2.1", res.headers[self.header_name])
else:
self.assertEqual("compute 2.1", res.headers[self.header_name])
self.assertIn(self.header_name, res.headers.getall('Vary'))
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
def test_microversions_return_header_non_default(self,
mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("2.3")
req = fakes.HTTPRequest.blank('/v2/fake/microversions')
req.headers = self._make_header('2.3')
res = req.get_response(self.app)
self.assertEqual(200, res.status_int)
resp_json = jsonutils.loads(res.body)
self.assertEqual('val2', resp_json['param'])
if 'nova' in self.header_name.lower():
self.assertEqual("2.3", res.headers[self.header_name])
else:
self.assertEqual("compute 2.3", res.headers[self.header_name])
self.assertIn(self.header_name, res.headers.getall('Vary'))
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
def test_microversions_return_header_fault(self, mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("3.0")
req = fakes.HTTPRequest.blank('/v2/fake/microversions')
req.headers = self._make_header('3.0')
res = req.get_response(self.app)
self.assertEqual(400, res.status_int)
if 'nova' in self.header_name.lower():
self.assertEqual("3.0", res.headers[self.header_name])
else:
self.assertEqual("compute 3.0", res.headers[self.header_name])
self.assertIn(self.header_name, res.headers.getall('Vary'))
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
def _check_microversion_response(self, url, req_version, resp_param,
mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest('2.3')
req = fakes.HTTPRequest.blank(url)
req.headers = self._make_header(req_version)
res = req.get_response(self.app)
self.assertEqual(200, res.status_int)
resp_json = jsonutils.loads(res.body)
self.assertEqual(resp_param, resp_json['param'])
def test_microversions_with_header(self):
self._check_microversion_response('/v2/fake/microversions',
'2.3', 'val2')
def test_microversions_with_header_exact_match(self):
self._check_microversion_response('/v2/fake/microversions',
'2.2', 'val2')
def test_microversions2_no_2_1_version(self):
self._check_microversion_response('/v2/fake/microversions2',
'2.3', 'controller2_val1')
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
def test_microversions2_later_version(self, mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("3.1")
req = fakes.HTTPRequest.blank('/v2/fake/microversions2')
req.headers = self._make_header('3.0')
res = req.get_response(self.app)
self.assertEqual(202, res.status_int)
resp_json = jsonutils.loads(res.body)
self.assertEqual('controller2_val2', resp_json['param'])
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
def test_microversions2_version_too_high(self, mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("3.5")
req = fakes.HTTPRequest.blank('/v2/fake/microversions2')
req.headers = {self.header_name: '3.2'}
res = req.get_response(self.app)
self.assertEqual(404, res.status_int)
def test_microversions2_version_too_low(self):
req = fakes.HTTPRequest.blank('/v2/fake/microversions2')
req.headers = {self.header_name: '2.1'}
res = req.get_response(self.app)
self.assertEqual(404, res.status_int)
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
def test_microversions_global_version_too_high(self,
mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("3.5")
req = fakes.HTTPRequest.blank('/v2/fake/microversions2')
req.headers = self._make_header('3.7')
res = req.get_response(self.app)
self.assertEqual(406, res.status_int)
res_json = jsonutils.loads(res.body)
self.assertEqual("Version 3.7 is not supported by the API. "
"Minimum is 2.1 and maximum is 3.5.",
res_json['computeFault']['message'])
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
def test_microversions_schema(self, mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("3.3")
req = fakes.HTTPRequest.blank('/v2/fake/microversions3')
req.method = 'POST'
req.headers = self._make_header('2.2')
req.environ['CONTENT_TYPE'] = "application/json"
req.body = jsonutils.dump_as_bytes({'dummy': {'val': 'foo'}})
res = req.get_response(self.app)
self.assertEqual(200, res.status_int)
resp_json = jsonutils.loads(res.body)
self.assertEqual('create_val1', resp_json['param'])
if 'nova' in self.header_name.lower():
self.assertEqual("2.2", res.headers[self.header_name])
else:
self.assertEqual("compute 2.2", res.headers[self.header_name])
self.assertIn(self.header_name, res.headers.getall('Vary'))
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
def test_microversions_schema_fail(self, mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("3.3")
req = fakes.HTTPRequest.blank('/v2/fake/microversions3')
req.method = 'POST'
req.headers = {self.header_name: '2.2'}
req.environ['CONTENT_TYPE'] = "application/json"
req.body = jsonutils.dump_as_bytes({'dummy': {'invalid_param': 'foo'}})
res = req.get_response(self.app)
self.assertEqual(400, res.status_int)
resp_json = jsonutils.loads(res.body)
self.assertTrue(resp_json['badRequest']['message'].startswith(
"Invalid input for field/attribute dummy."))
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
def test_microversions_schema_out_of_version_check(self,
mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("3.3")
req = fakes.HTTPRequest.blank('/v2/fake/microversions3/1')
req.method = 'PUT'
req.headers = self._make_header('2.2')
req.body = jsonutils.dump_as_bytes({'dummy': {'inv_val': 'foo'}})
req.environ['CONTENT_TYPE'] = "application/json"
res = req.get_response(self.app)
self.assertEqual(200, res.status_int)
resp_json = jsonutils.loads(res.body)
self.assertEqual('update_val1', resp_json['param'])
if 'nova' in self.header_name.lower():
self.assertEqual("2.2", res.headers[self.header_name])
else:
self.assertEqual("compute 2.2", res.headers[self.header_name])
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
def test_microversions_schema_second_version(self,
mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("3.3")
req = fakes.HTTPRequest.blank('/v2/fake/microversions3/1')
req.headers = self._make_header('2.10')
req.environ['CONTENT_TYPE'] = "application/json"
req.method = 'PUT'
req.body = jsonutils.dump_as_bytes({'dummy': {'val2': 'foo'}})
res = req.get_response(self.app)
self.assertEqual(200, res.status_int)
resp_json = jsonutils.loads(res.body)
self.assertEqual('update_val1', resp_json['param'])
if 'nova' in self.header_name.lower():
self.assertEqual("2.10", res.headers[self.header_name])
else:
self.assertEqual("compute 2.10", res.headers[self.header_name])
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
def _test_microversions_inner_function(self, version, expected_resp,
mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("2.2")
req = fakes.HTTPRequest.blank('/v2/fake/microversions4')
req.headers = self._make_header(version)
req.environ['CONTENT_TYPE'] = "application/json"
req.method = 'POST'
req.body = b''
res = req.get_response(self.app)
self.assertEqual(200, res.status_int)
resp_json = jsonutils.loads(res.body)
self.assertEqual(expected_resp, resp_json['param'])
if 'nova' not in self.header_name.lower():
version = 'compute %s' % version
self.assertEqual(version, res.headers[self.header_name])
def test_microversions_inner_function_v22(self):
self._test_microversions_inner_function('2.2', 'controller4_val2')
def test_microversions_inner_function_v21(self):
self._test_microversions_inner_function('2.1', 'controller4_val1')
@mock.patch("nova.api.openstack.api_version_request.max_api_version")
def _test_microversions_actions(self, ret_code, ret_header, req_header,
mock_maxver):
mock_maxver.return_value = api_version.APIVersionRequest("2.3")
req = fakes.HTTPRequest.blank('/v2/fake/microversions3/1/action')
if req_header:
req.headers = self._make_header(req_header)
req.method = 'POST'
req.body = jsonutils.dump_as_bytes({'foo': None})
res = self._test_microversions(self.app, req, ret_code,
ret_header=ret_header)
if ret_code == 202:
resp_json = jsonutils.loads(res.body)
self.assertEqual({'foo': 'bar'}, resp_json)
def test_microversions_actions(self):
self._test_microversions_actions(202, "2.1", "2.1")
def test_microversions_actions_too_high(self):
self._test_microversions_actions(404, "2.3", "2.3")
def test_microversions_actions_no_header(self):
self._test_microversions_actions(202, "2.1", None)
class MicroversionsTest(LegacyMicroversionsTest):
header_name = 'OpenStack-API-Version'
|
unknown
|
codeparrot/codeparrot-clean
| ||
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class stock_invoice_onshipping(osv.osv_memory):
def _get_journal(self, cr, uid, context=None):
journal_obj = self.pool.get('account.journal')
journal_type = self._get_journal_type(cr, uid, context=context)
journals = journal_obj.search(cr, uid, [('type', '=', journal_type)])
return journals and journals[0] or False
def _get_journal_type(self, cr, uid, context=None):
if context is None:
context = {}
res_ids = context and context.get('active_ids', [])
pick_obj = self.pool.get('stock.picking')
pickings = pick_obj.browse(cr, uid, res_ids, context=context)
vals = []
pick = pickings and pickings[0]
if not pick or not pick.move_lines:
return 'sale'
src_usage = pick.move_lines[0].location_id.usage
dest_usage = pick.move_lines[0].location_dest_id.usage
type = pick.picking_type_id.code
if type == 'outgoing' and dest_usage == 'supplier':
journal_type = 'purchase_refund'
elif type == 'outgoing' and dest_usage == 'customer':
journal_type = 'sale'
elif type == 'incoming' and src_usage == 'supplier':
journal_type = 'purchase'
elif type == 'incoming' and src_usage == 'customer':
journal_type = 'sale_refund'
else:
journal_type = 'sale'
return journal_type
_name = "stock.invoice.onshipping"
_description = "Stock Invoice Onshipping"
_columns = {
'journal_id': fields.many2one('account.journal', 'Destination Journal', required=True),
'journal_type': fields.selection([('purchase_refund', 'Refund Purchase'), ('purchase', 'Create Supplier Invoice'),
('sale_refund', 'Refund Sale'), ('sale', 'Create Customer Invoice')], 'Journal Type', readonly=True),
'group': fields.boolean("Group by partner"),
'invoice_date': fields.date('Invoice Date'),
}
_defaults = {
'journal_type': _get_journal_type,
'journal_id' : _get_journal,
}
def view_init(self, cr, uid, fields_list, context=None):
if context is None:
context = {}
res = super(stock_invoice_onshipping, self).view_init(cr, uid, fields_list, context=context)
pick_obj = self.pool.get('stock.picking')
count = 0
active_ids = context.get('active_ids',[])
for pick in pick_obj.browse(cr, uid, active_ids, context=context):
if pick.invoice_state != '2binvoiced':
count += 1
if len(active_ids) == count:
raise osv.except_osv(_('Warning!'), _('None of these picking lists require invoicing.'))
return res
def open_invoice(self, cr, uid, ids, context=None):
if context is None:
context = {}
invoice_ids = self.create_invoice(cr, uid, ids, context=context)
if not invoice_ids:
raise osv.except_osv(_('Error!'), _('No invoice created!'))
data = self.browse(cr, uid, ids[0], context=context)
action_model = False
action = {}
journal2type = {'sale':'out_invoice', 'purchase':'in_invoice' , 'sale_refund':'out_refund', 'purchase_refund':'in_refund'}
inv_type = journal2type.get(data.journal_type) or 'out_invoice'
data_pool = self.pool.get('ir.model.data')
if inv_type == "out_invoice":
action_id = data_pool.xmlid_to_res_id(cr, uid, 'account.action_invoice_tree1')
elif inv_type == "in_invoice":
action_id = data_pool.xmlid_to_res_id(cr, uid, 'account.action_invoice_tree2')
elif inv_type == "out_refund":
action_id = data_pool.xmlid_to_res_id(cr, uid, 'account.action_invoice_tree3')
elif inv_type == "in_refund":
action_id = data_pool.xmlid_to_res_id(cr, uid, 'account.action_invoice_tree4')
if action_id:
action_pool = self.pool['ir.actions.act_window']
action = action_pool.read(cr, uid, action_id, context=context)
action['domain'] = "[('id','in', ["+','.join(map(str,invoice_ids))+"])]"
return action
return True
def create_invoice(self, cr, uid, ids, context=None):
context = dict(context or {})
picking_pool = self.pool.get('stock.picking')
data = self.browse(cr, uid, ids[0], context=context)
journal2type = {'sale':'out_invoice', 'purchase':'in_invoice', 'sale_refund':'out_refund', 'purchase_refund':'in_refund'}
context['date_inv'] = data.invoice_date
acc_journal = self.pool.get("account.journal")
inv_type = journal2type.get(data.journal_type) or 'out_invoice'
context['inv_type'] = inv_type
active_ids = context.get('active_ids', [])
res = picking_pool.action_invoice_create(cr, uid, active_ids,
journal_id = data.journal_id.id,
group = data.group,
type = inv_type,
context=context)
return res
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""
The MIT License (MIT)
Copyright (c) 2015 Guillermo Romero Franco (AKA Gato)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import pygame
class Input:
def __init__(self):
self._rot = 0
self._fwd = 0
self._back = 0
self._left = 0
self._right = 0
self._jump = 0
self._shoot = 0
self._actions ={
pygame.K_LEFT:self._updateLeft,
pygame.K_a:self._updateLeft,
pygame.K_RIGHT:self._updateRight,
pygame.K_d:self._updateRight,
pygame.K_UP:self._updateFwd,
pygame.K_w:self._updateFwd,
pygame.K_DOWN:self._updateBack,
pygame.K_s:self._updateBack,
pygame.K_SPACE:self._updateJump,
pygame.K_RETURN:self._updateShoot,
pygame.K_KP_ENTER:self._updateShoot
}
self._should_quit = False
def shouldQuit(self):
return self._should_quit
def wantJump(self):
return self._jump
def wantShoot(self):
return self._shoot
def fwdMotion(self):
return self._fwd - self._back
def rotMotion(self):
return self._left - self._right
def process(self):
self._jump=0
self._shoot=0
events = pygame.event.get()
for event in events:
if event.type == pygame.KEYDOWN:
try:
act = self._actions[event.key]
act(1)
except:
pass
elif event.type == pygame.KEYUP:
try:
act = self._actions[event.key]
act(0)
except:
pass
elif event.type == pygame.QUIT:
self._should_quit = True
#---------
def _updateFwd(self, press):
self._fwd = 1 if press else 0
def _updateBack(self, press):
self._back = 1 if press else 0
def _updateLeft(self, press):
self._left = 1 if press else 0
def _updateRight(self, press):
self._right = 1 if press else 0
def _updateJump(self, press):
if press == 1:
self._jump = 1
def _updateShoot(self, press):
if press == 1:
self._shoot = 1
|
unknown
|
codeparrot/codeparrot-clean
| ||
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
from cryptography.hazmat.backends.interfaces import (
CMACBackend, CipherBackend, DERSerializationBackend, DSABackend,
EllipticCurveBackend, HMACBackend, HashBackend, PBKDF2HMACBackend,
PEMSerializationBackend, RSABackend, X509Backend
)
@utils.register_interface(CMACBackend)
@utils.register_interface(CipherBackend)
@utils.register_interface(DERSerializationBackend)
@utils.register_interface(HashBackend)
@utils.register_interface(HMACBackend)
@utils.register_interface(PBKDF2HMACBackend)
@utils.register_interface(RSABackend)
@utils.register_interface(DSABackend)
@utils.register_interface(EllipticCurveBackend)
@utils.register_interface(PEMSerializationBackend)
@utils.register_interface(X509Backend)
class MultiBackend(object):
name = "multibackend"
def __init__(self, backends):
if len(backends) == 0:
raise ValueError(
"Multibackend cannot be initialized with no backends. If you "
"are seeing this error when trying to use default_backend() "
"please try uninstalling and reinstalling cryptography."
)
self._backends = backends
def _filtered_backends(self, interface):
for b in self._backends:
if isinstance(b, interface):
yield b
def cipher_supported(self, cipher, mode):
return any(
b.cipher_supported(cipher, mode)
for b in self._filtered_backends(CipherBackend)
)
def create_symmetric_encryption_ctx(self, cipher, mode):
for b in self._filtered_backends(CipherBackend):
try:
return b.create_symmetric_encryption_ctx(cipher, mode)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm(
"cipher {0} in {1} mode is not supported by this backend.".format(
cipher.name, mode.name if mode else mode),
_Reasons.UNSUPPORTED_CIPHER
)
def create_symmetric_decryption_ctx(self, cipher, mode):
for b in self._filtered_backends(CipherBackend):
try:
return b.create_symmetric_decryption_ctx(cipher, mode)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm(
"cipher {0} in {1} mode is not supported by this backend.".format(
cipher.name, mode.name if mode else mode),
_Reasons.UNSUPPORTED_CIPHER
)
def hash_supported(self, algorithm):
return any(
b.hash_supported(algorithm)
for b in self._filtered_backends(HashBackend)
)
def create_hash_ctx(self, algorithm):
for b in self._filtered_backends(HashBackend):
try:
return b.create_hash_ctx(algorithm)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
def hmac_supported(self, algorithm):
return any(
b.hmac_supported(algorithm)
for b in self._filtered_backends(HMACBackend)
)
def create_hmac_ctx(self, key, algorithm):
for b in self._filtered_backends(HMACBackend):
try:
return b.create_hmac_ctx(key, algorithm)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
def pbkdf2_hmac_supported(self, algorithm):
return any(
b.pbkdf2_hmac_supported(algorithm)
for b in self._filtered_backends(PBKDF2HMACBackend)
)
def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
key_material):
for b in self._filtered_backends(PBKDF2HMACBackend):
try:
return b.derive_pbkdf2_hmac(
algorithm, length, salt, iterations, key_material
)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
def generate_rsa_private_key(self, public_exponent, key_size):
for b in self._filtered_backends(RSABackend):
return b.generate_rsa_private_key(public_exponent, key_size)
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def generate_rsa_parameters_supported(self, public_exponent, key_size):
for b in self._filtered_backends(RSABackend):
return b.generate_rsa_parameters_supported(
public_exponent, key_size
)
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def rsa_padding_supported(self, padding):
for b in self._filtered_backends(RSABackend):
return b.rsa_padding_supported(padding)
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def load_rsa_private_numbers(self, numbers):
for b in self._filtered_backends(RSABackend):
return b.load_rsa_private_numbers(numbers)
raise UnsupportedAlgorithm("RSA is not supported by the backend",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def load_rsa_public_numbers(self, numbers):
for b in self._filtered_backends(RSABackend):
return b.load_rsa_public_numbers(numbers)
raise UnsupportedAlgorithm("RSA is not supported by the backend",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def generate_dsa_parameters(self, key_size):
for b in self._filtered_backends(DSABackend):
return b.generate_dsa_parameters(key_size)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def generate_dsa_private_key(self, parameters):
for b in self._filtered_backends(DSABackend):
return b.generate_dsa_private_key(parameters)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def generate_dsa_private_key_and_parameters(self, key_size):
for b in self._filtered_backends(DSABackend):
return b.generate_dsa_private_key_and_parameters(key_size)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def dsa_hash_supported(self, algorithm):
for b in self._filtered_backends(DSABackend):
return b.dsa_hash_supported(algorithm)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def dsa_parameters_supported(self, p, q, g):
for b in self._filtered_backends(DSABackend):
return b.dsa_parameters_supported(p, q, g)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def load_dsa_public_numbers(self, numbers):
for b in self._filtered_backends(DSABackend):
return b.load_dsa_public_numbers(numbers)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def load_dsa_private_numbers(self, numbers):
for b in self._filtered_backends(DSABackend):
return b.load_dsa_private_numbers(numbers)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def load_dsa_parameter_numbers(self, numbers):
for b in self._filtered_backends(DSABackend):
return b.load_dsa_parameter_numbers(numbers)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def cmac_algorithm_supported(self, algorithm):
return any(
b.cmac_algorithm_supported(algorithm)
for b in self._filtered_backends(CMACBackend)
)
def create_cmac_ctx(self, algorithm):
for b in self._filtered_backends(CMACBackend):
try:
return b.create_cmac_ctx(algorithm)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm("This backend does not support CMAC.",
_Reasons.UNSUPPORTED_CIPHER)
def elliptic_curve_supported(self, curve):
return any(
b.elliptic_curve_supported(curve)
for b in self._filtered_backends(EllipticCurveBackend)
)
def elliptic_curve_signature_algorithm_supported(
self, signature_algorithm, curve
):
return any(
b.elliptic_curve_signature_algorithm_supported(
signature_algorithm, curve
)
for b in self._filtered_backends(EllipticCurveBackend)
)
def generate_elliptic_curve_private_key(self, curve):
for b in self._filtered_backends(EllipticCurveBackend):
try:
return b.generate_elliptic_curve_private_key(curve)
except UnsupportedAlgorithm:
continue
raise UnsupportedAlgorithm(
"This backend does not support this elliptic curve.",
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
)
def load_elliptic_curve_private_numbers(self, numbers):
for b in self._filtered_backends(EllipticCurveBackend):
try:
return b.load_elliptic_curve_private_numbers(numbers)
except UnsupportedAlgorithm:
continue
raise UnsupportedAlgorithm(
"This backend does not support this elliptic curve.",
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
)
def load_elliptic_curve_public_numbers(self, numbers):
for b in self._filtered_backends(EllipticCurveBackend):
try:
return b.load_elliptic_curve_public_numbers(numbers)
except UnsupportedAlgorithm:
continue
raise UnsupportedAlgorithm(
"This backend does not support this elliptic curve.",
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
)
def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve):
return any(
b.elliptic_curve_exchange_algorithm_supported(algorithm, curve)
for b in self._filtered_backends(EllipticCurveBackend)
)
def load_pem_private_key(self, data, password):
for b in self._filtered_backends(PEMSerializationBackend):
return b.load_pem_private_key(data, password)
raise UnsupportedAlgorithm(
"This backend does not support this key serialization.",
_Reasons.UNSUPPORTED_SERIALIZATION
)
def load_pem_public_key(self, data):
for b in self._filtered_backends(PEMSerializationBackend):
return b.load_pem_public_key(data)
raise UnsupportedAlgorithm(
"This backend does not support this key serialization.",
_Reasons.UNSUPPORTED_SERIALIZATION
)
def load_der_private_key(self, data, password):
for b in self._filtered_backends(DERSerializationBackend):
return b.load_der_private_key(data, password)
raise UnsupportedAlgorithm(
"This backend does not support this key serialization.",
_Reasons.UNSUPPORTED_SERIALIZATION
)
def load_der_public_key(self, data):
for b in self._filtered_backends(DERSerializationBackend):
return b.load_der_public_key(data)
raise UnsupportedAlgorithm(
"This backend does not support this key serialization.",
_Reasons.UNSUPPORTED_SERIALIZATION
)
def load_pem_x509_certificate(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_pem_x509_certificate(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def load_der_x509_certificate(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_der_x509_certificate(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def load_pem_x509_crl(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_pem_x509_crl(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def load_der_x509_crl(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_der_x509_crl(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def load_der_x509_csr(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_der_x509_csr(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def load_pem_x509_csr(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_pem_x509_csr(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def create_x509_csr(self, builder, private_key, algorithm):
for b in self._filtered_backends(X509Backend):
return b.create_x509_csr(builder, private_key, algorithm)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def create_x509_certificate(self, builder, private_key, algorithm):
for b in self._filtered_backends(X509Backend):
return b.create_x509_certificate(builder, private_key, algorithm)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def create_x509_crl(self, builder, private_key, algorithm):
for b in self._filtered_backends(X509Backend):
return b.create_x509_crl(builder, private_key, algorithm)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def create_x509_revoked_certificate(self, builder):
for b in self._filtered_backends(X509Backend):
return b.create_x509_revoked_certificate(builder)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package validation
import (
"fmt"
apiequality "k8s.io/apimachinery/pkg/api/equality"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
metav1validation "k8s.io/apimachinery/pkg/apis/meta/v1/validation"
"k8s.io/apimachinery/pkg/util/validation/field"
authorizationapi "k8s.io/kubernetes/pkg/apis/authorization"
)
// ValidateSubjectAccessReviewSpec validates a SubjectAccessReviewSpec and returns an
// ErrorList with any errors.
func ValidateSubjectAccessReviewSpec(spec authorizationapi.SubjectAccessReviewSpec, fldPath *field.Path) field.ErrorList {
allErrs := field.ErrorList{}
if spec.ResourceAttributes != nil && spec.NonResourceAttributes != nil {
allErrs = append(allErrs, field.Invalid(fldPath.Child("nonResourceAttributes"), spec.NonResourceAttributes, `cannot be specified in combination with resourceAttributes`))
}
if spec.ResourceAttributes == nil && spec.NonResourceAttributes == nil {
allErrs = append(allErrs, field.Invalid(fldPath.Child("resourceAttributes"), spec.NonResourceAttributes, `exactly one of nonResourceAttributes or resourceAttributes must be specified`))
}
if len(spec.User) == 0 && len(spec.Groups) == 0 {
allErrs = append(allErrs, field.Invalid(fldPath.Child("user"), spec.User, `at least one of user or group must be specified`))
}
allErrs = append(allErrs, validateResourceAttributes(spec.ResourceAttributes, field.NewPath("spec.resourceAttributes"))...)
return allErrs
}
// ValidateSelfSubjectAccessReviewSpec validates a SelfSubjectAccessReviewSpec and returns an
// ErrorList with any errors.
func ValidateSelfSubjectAccessReviewSpec(spec authorizationapi.SelfSubjectAccessReviewSpec, fldPath *field.Path) field.ErrorList {
allErrs := field.ErrorList{}
if spec.ResourceAttributes != nil && spec.NonResourceAttributes != nil {
allErrs = append(allErrs, field.Invalid(fldPath.Child("nonResourceAttributes"), spec.NonResourceAttributes, `cannot be specified in combination with resourceAttributes`))
}
if spec.ResourceAttributes == nil && spec.NonResourceAttributes == nil {
allErrs = append(allErrs, field.Invalid(fldPath.Child("resourceAttributes"), spec.NonResourceAttributes, `exactly one of nonResourceAttributes or resourceAttributes must be specified`))
}
allErrs = append(allErrs, validateResourceAttributes(spec.ResourceAttributes, field.NewPath("spec.resourceAttributes"))...)
return allErrs
}
// ValidateSubjectAccessReview validates a SubjectAccessReview and returns an
// ErrorList with any errors.
func ValidateSubjectAccessReview(sar *authorizationapi.SubjectAccessReview) field.ErrorList {
allErrs := ValidateSubjectAccessReviewSpec(sar.Spec, field.NewPath("spec"))
objectMetaShallowCopy := sar.ObjectMeta
objectMetaShallowCopy.ManagedFields = nil
if !apiequality.Semantic.DeepEqual(metav1.ObjectMeta{}, objectMetaShallowCopy) {
allErrs = append(allErrs, field.Invalid(field.NewPath("metadata"), sar.ObjectMeta, `must be empty`))
}
return allErrs
}
// ValidateSelfSubjectAccessReview validates a SelfSubjectAccessReview and returns an
// ErrorList with any errors.
func ValidateSelfSubjectAccessReview(sar *authorizationapi.SelfSubjectAccessReview) field.ErrorList {
allErrs := ValidateSelfSubjectAccessReviewSpec(sar.Spec, field.NewPath("spec"))
objectMetaShallowCopy := sar.ObjectMeta
objectMetaShallowCopy.ManagedFields = nil
if !apiequality.Semantic.DeepEqual(metav1.ObjectMeta{}, objectMetaShallowCopy) {
allErrs = append(allErrs, field.Invalid(field.NewPath("metadata"), sar.ObjectMeta, `must be empty`))
}
return allErrs
}
// ValidateLocalSubjectAccessReview validates a LocalSubjectAccessReview and returns an
// ErrorList with any errors.
func ValidateLocalSubjectAccessReview(sar *authorizationapi.LocalSubjectAccessReview) field.ErrorList {
allErrs := ValidateSubjectAccessReviewSpec(sar.Spec, field.NewPath("spec"))
objectMetaShallowCopy := sar.ObjectMeta
objectMetaShallowCopy.Namespace = ""
objectMetaShallowCopy.ManagedFields = nil
if !apiequality.Semantic.DeepEqual(metav1.ObjectMeta{}, objectMetaShallowCopy) {
allErrs = append(allErrs, field.Invalid(field.NewPath("metadata"), sar.ObjectMeta, `must be empty except for namespace`))
}
if sar.Spec.ResourceAttributes != nil && sar.Spec.ResourceAttributes.Namespace != sar.Namespace {
allErrs = append(allErrs, field.Invalid(field.NewPath("spec.resourceAttributes.namespace"), sar.Spec.ResourceAttributes.Namespace, `must match metadata.namespace`))
}
if sar.Spec.NonResourceAttributes != nil {
allErrs = append(allErrs, field.Invalid(field.NewPath("spec.nonResourceAttributes"), sar.Spec.NonResourceAttributes, `disallowed on this kind of request`))
}
return allErrs
}
func validateResourceAttributes(resourceAttributes *authorizationapi.ResourceAttributes, fldPath *field.Path) field.ErrorList {
if resourceAttributes == nil {
return nil
}
allErrs := field.ErrorList{}
allErrs = append(allErrs, validateFieldSelectorAttributes(resourceAttributes.FieldSelector, fldPath.Child("fieldSelector"))...)
allErrs = append(allErrs, validateLabelSelectorAttributes(resourceAttributes.LabelSelector, fldPath.Child("labelSelector"))...)
return allErrs
}
func validateFieldSelectorAttributes(selector *authorizationapi.FieldSelectorAttributes, fldPath *field.Path) field.ErrorList {
if selector == nil {
return nil
}
allErrs := field.ErrorList{}
if len(selector.RawSelector) > 0 && len(selector.Requirements) > 0 {
allErrs = append(allErrs, field.Invalid(fldPath.Child("rawSelector"), selector.RawSelector, "may not specified at the same time as requirements"))
}
if len(selector.RawSelector) == 0 && len(selector.Requirements) == 0 {
allErrs = append(allErrs, field.Required(fldPath.Child("requirements"), fmt.Sprintf("when %s is specified, requirements or rawSelector is required", fldPath)))
}
// AllowUnknownOperatorInRequirement enables *SubjectAccessReview requests from newer skewed clients which understand operators kube-apiserver does not know about to be authorized.
validationOptions := metav1validation.FieldSelectorValidationOptions{AllowUnknownOperatorInRequirement: true}
for i, requirement := range selector.Requirements {
allErrs = append(allErrs, metav1validation.ValidateFieldSelectorRequirement(requirement, validationOptions, fldPath.Child("requirements").Index(i))...)
}
return allErrs
}
func validateLabelSelectorAttributes(selector *authorizationapi.LabelSelectorAttributes, fldPath *field.Path) field.ErrorList {
if selector == nil {
return nil
}
allErrs := field.ErrorList{}
if len(selector.RawSelector) > 0 && len(selector.Requirements) > 0 {
allErrs = append(allErrs, field.Invalid(fldPath.Child("rawSelector"), selector.RawSelector, "may not specified at the same time as requirements"))
}
if len(selector.RawSelector) == 0 && len(selector.Requirements) == 0 {
allErrs = append(allErrs, field.Required(fldPath.Child("requirements"), fmt.Sprintf("when %s is specified, requirements or rawSelector is required", fldPath)))
}
// AllowUnknownOperatorInRequirement enables *SubjectAccessReview requests from newer skewed clients which understand operators kube-apiserver does not know about to be authorized.
validationOptions := metav1validation.LabelSelectorValidationOptions{AllowUnknownOperatorInRequirement: true}
for i, requirement := range selector.Requirements {
allErrs = append(allErrs, metav1validation.ValidateLabelSelectorRequirement(requirement, validationOptions, fldPath.Child("requirements").Index(i))...)
}
return allErrs
}
|
go
|
github
|
https://github.com/kubernetes/kubernetes
|
pkg/apis/authorization/validation/validation.go
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import multiprocessing
import signal
import sys
import thread
import threading
from multiprocessing.pool import ThreadPool
from pants.reporting.report import Report
class Work(object):
"""Represents multiple concurrent calls to the same callable."""
def __init__(self, func, args_tuples, workunit_name=None):
# A callable.
self.func = func
# A list of tuples of args. func will be called once per tuple, concurrently.
# The length of this list is the cardinality of the work.
self.args_tuples = args_tuples
# If specified, each invocation will be executed in a workunit of this name.
self.workunit_name = workunit_name
class WorkerPool(object):
"""A pool of workers.
Workers are threads, and so are subject to GIL constraints. Submitting CPU-bound work
may not be effective. Use this class primarily for IO-bound work.
"""
def __init__(self, parent_workunit, run_tracker, num_workers):
self._run_tracker = run_tracker
# All workers accrue work to the same root.
self._pool = ThreadPool(processes=num_workers,
initializer=self._run_tracker.register_thread,
initargs=(parent_workunit, ))
# We mustn't shutdown when there are pending workchains, as they may need to submit work
# in the future, and the pool doesn't know about this yet.
self._pending_workchains = 0
self._pending_workchains_cond = threading.Condition() # Protects self._pending_workchains.
self._shutdown_hooks = []
self.num_workers = num_workers
def add_shutdown_hook(self, hook):
self._shutdown_hooks.append(hook)
def submit_async_work(self, work, workunit_parent=None, on_success=None, on_failure=None):
"""Submit work to be executed in the background.
:param work: The work to execute.
:param workunit_parent: If specified, work is accounted for under this workunit.
:param on_success: If specified, a callable taking a single argument, which will be a list
of return values of each invocation, in order. Called only if all work succeeded.
:param on_failure: If specified, a callable taking a single argument, which is an exception
thrown in the work.
:return: `multiprocessing.pool.MapResult`
Don't do work in on_success: not only will it block the result handling thread, but
that thread is not a worker and doesn't have a logging context etc. Use it just to
submit further work to the pool.
"""
if work is None or len(work.args_tuples) == 0: # map_async hangs on 0-length iterables.
if on_success:
on_success([])
else:
def do_work(*args):
self._do_work(work.func, *args, workunit_name=work.workunit_name,
workunit_parent=workunit_parent, on_failure=on_failure)
return self._pool.map_async(do_work, work.args_tuples, chunksize=1, callback=on_success)
def submit_async_work_chain(self, work_chain, workunit_parent, done_hook=None):
"""Submit work to be executed in the background.
- work_chain: An iterable of Work instances. Will be invoked serially. Each instance may
have a different cardinality. There is no output-input chaining: the argument
tuples must already be present in each work instance. If any work throws an
exception no subsequent work in the chain will be attempted.
- workunit_parent: Work is accounted for under this workunit.
- done_hook: If not None, invoked with no args after all work is done, or on error.
"""
def done():
if done_hook:
done_hook()
with self._pending_workchains_cond:
self._pending_workchains -= 1
self._pending_workchains_cond.notify()
def error(e):
done()
self._run_tracker.log(Report.ERROR, '{}'.format(e))
# We filter out Nones defensively. There shouldn't be any, but if a bug causes one,
# Pants might hang indefinitely without this filtering.
work_iter = iter(filter(None, work_chain))
def submit_next():
try:
self.submit_async_work(work_iter.next(), workunit_parent=workunit_parent,
on_success=lambda x: submit_next(), on_failure=error)
except StopIteration:
done() # The success case.
with self._pending_workchains_cond:
self._pending_workchains += 1
try:
submit_next()
except Exception as e: # Handles errors in the submission code.
done()
self._run_tracker.log(Report.ERROR, '{}'.format(e))
raise
def submit_work_and_wait(self, work, workunit_parent=None):
"""Submit work to be executed on this pool, but wait for it to complete.
- work: The work to execute.
- workunit_parent: If specified, work is accounted for under this workunit.
Returns a list of return values of each invocation, in order. Throws if any invocation does.
"""
if work is None or len(work.args_tuples) == 0: # map hangs on 0-length iterables.
return []
else:
def do_work(*args):
return self._do_work(work.func, *args, workunit_name=work.workunit_name,
workunit_parent=workunit_parent)
# We need to specify a timeout explicitly, because otherwise python ignores SIGINT when waiting
# on a condition variable, so we won't be able to ctrl-c out.
return self._pool.map_async(do_work, work.args_tuples, chunksize=1).get(timeout=1000000000)
def _do_work(self, func, args_tuple, workunit_name, workunit_parent, on_failure=None):
try:
if workunit_name:
with self._run_tracker.new_workunit_under_parent(name=workunit_name, parent=workunit_parent):
return func(*args_tuple)
else:
return func(*args_tuple)
except KeyboardInterrupt:
# If a worker thread intercepts a KeyboardInterrupt, we want to propagate it to the main
# thread.
thread.interrupt_main()
raise
except Exception as e:
if on_failure:
# Note that here the work's workunit is closed. So, e.g., it's OK to use on_failure()
# to close an ancestor workunit.
on_failure(e)
raise
def shutdown(self):
with self._pending_workchains_cond:
while self._pending_workchains > 0:
self._pending_workchains_cond.wait()
self._pool.close()
self._pool.join()
for hook in self._shutdown_hooks:
hook()
def abort(self):
self._pool.terminate()
class SubprocPool(object):
"""Singleton for managing multiprocessing.Pool instances
Subprocesses (including multiprocessing.Pool workers) can inherit locks in poorly written
libraries (eg zlib) if other threads in the parent process happen to be holding them at the
moment the worker is fork()'ed. Thus it is important to create any subprocesses BEFORE
starting any threads, or they may deadlock mysteriously when sent a particular piece of work.
This is accomplished in pants by these initializing pools early, when creating the RunTracker.
However, in tests, RunTrackers are created repeatedly, as part of creating Contexts that
are used briefly and discarded. Creating a new subprocess pool every time is expensive, and will
lead to os.fork failing once too many processes are spawned.
To avoid this, the pools themselves are kept in this singleton and new RunTrackers re-use them.
"""
_pool = None
_lock = threading.Lock()
_num_processes = multiprocessing.cpu_count()
@staticmethod
def worker_init():
# Exit quietly on sigint, otherwise we get {num_procs} keyboardinterrupt stacktraces spewn
signal.signal(signal.SIGINT, lambda *args: sys.exit())
@classmethod
def set_num_processes(cls, num_processes):
cls._num_processes = num_processes
@classmethod
def foreground(cls):
with cls._lock:
if cls._pool is None:
cls._pool = multiprocessing.Pool(processes=cls._num_processes,
initializer=SubprocPool.worker_init)
return cls._pool
@classmethod
def shutdown(cls, force):
with cls._lock:
old = cls._pool
cls._pool = None
if old:
if force:
old.terminate()
else:
old.close()
old.join()
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
* Copyright 2014-2021 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license.
*/
package io.ktor.server.http.content
import io.ktor.http.content.*
import io.ktor.server.application.*
internal actual fun platformTransformDefaultContent(
call: ApplicationCall,
value: Any
): OutgoingContent? = null
|
kotlin
|
github
|
https://github.com/ktorio/ktor
|
ktor-server/ktor-server-core/nonJvm/src/io/ktor/server/http/content/DefaultTransform.nonJvm.kt
|
# Copyright 2014 eNovance.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import aioeventlet
import trollius
from oslo_messaging._executors import impl_eventlet
class AsyncioEventletExecutor(impl_eventlet.EventletExecutor):
"""A message executor which integrates with eventlet and trollius.
The executor is based on eventlet executor and so is compatible with it.
The executor supports trollius coroutines, explicit asynchronous
programming, in addition to eventlet greenthreads, implicit asynchronous
programming.
To use the executor, an aioeventlet event loop must the running in the
thread executing the executor (usually the main thread). Example of code to
setup and run an aioeventlet event loop for the executor (in the main
thread)::
import aioeventlet
import trollius
policy = aioeventlet.EventLoopPolicy()
trollius.set_event_loop_policy(policy)
def run_loop(loop):
loop.run_forever()
loop.close()
# Get the aioeventlet event loop (create it if needed)
loop = trollius.get_event_loop()
# run the event loop in a new greenthread,
# close it when it is done
eventlet.spawn(run_loop, loop)
"""
def __init__(self, conf, listener, dispatcher):
super(AsyncioEventletExecutor, self).__init__(conf, listener,
dispatcher)
self._loop = None
def start(self):
# check that the event loop is an aioeventlet event loop
loop = trollius.get_event_loop()
if not isinstance(loop, aioeventlet.EventLoop):
raise RuntimeError("need an aioeventlet event loop")
self._loop = loop
super(AsyncioEventletExecutor, self).start()
def _coroutine_wrapper(self, func, *args, **kw):
result = func(*args, **kw)
if trollius.iscoroutine(result):
result = aioeventlet.yield_future(result, loop=self._loop)
return result
_executor_callback = _coroutine_wrapper
|
unknown
|
codeparrot/codeparrot-clean
| ||
import os
import sys
import shutil
import os.path
import uuid
from future.utils import iteritems
from pandaharvester.harvestercore import core_utils
from .base_stager import BaseStager
from pandaharvester.harvestermover import mover_utils
from rucio.client import Client as RucioClient
from rucio.common.exception import RuleNotFound
# logger
baseLogger = core_utils.setup_logger('rucio_stager')
# plugin for stage-out with Rucio
class RucioStager(BaseStager):
# constructor
def __init__(self, **kwarg):
BaseStager.__init__(self, **kwarg)
if not hasattr(self, 'scopeForTmp'):
self.scopeForTmp = 'panda'
# check status
def check_status(self, jobspec):
# make logger
tmpLog = self.make_logger(baseLogger, 'PandaID={0}'.format(jobspec.PandaID),
method_name='check_status')
tmpLog.debug('start')
# loop over all files
allChecked = True
oneErrMsg = None
transferStatus = dict()
for fileSpec in jobspec.outFiles:
# skip already don
if fileSpec.status in ['finished', 'failed']:
continue
# get transfer ID
transferID = fileSpec.fileAttributes['transferID']
if transferID not in transferStatus:
# get status
try:
rucioAPI = RucioClient()
ruleInfo = rucioAPI.get_replication_rule(transferID)
tmpTransferStatus = ruleInfo['state']
tmpLog.debug('got state={0} for rule={1}'.format(tmpTransferStatus, transferID))
except RuleNotFound:
tmpLog.error('rule {0} not found'.format(transferID))
tmpTransferStatus = 'FAILED'
except:
err_type, err_value = sys.exc_info()[:2]
errMsg = "{0} {1}".format(err_type.__name__, err_value)
tmpLog.error('failed to get status for rule={0} with {1}'.format(transferID, errMsg))
# set dummy not to lookup again
tmpTransferStatus = None
allChecked = False
# keep one message
if oneErrMsg is None:
oneErrMsg = errMsg
tmpTransferStatus = 'OK'
transferStatus[transferID] = tmpTransferStatus
# final status
if transferStatus[transferID] == 'OK':
fileSpec.status = 'finished'
elif transferStatus[transferID] in ['FAILED', 'CANCELED']:
fileSpec.status = 'failed'
if allChecked:
return True, ''
else:
return False, oneErrMsg
# trigger stage out
def trigger_stage_out(self, jobspec):
# make logger
tmpLog = self.make_logger(baseLogger, 'PandaID={0}'.format(jobspec.PandaID),
method_name='trigger_stage_out')
tmpLog.debug('start')
# loop over all files
files = dict()
transferIDs = dict()
transferDatasets = dict()
fileAttrs = jobspec.get_output_file_attributes()
for fileSpec in jobspec.outFiles:
# skip zipped files
if fileSpec.zipFileID is not None:
continue
# skip if already processed
if 'transferDataset' in fileSpec.fileAttributes:
if fileSpec.fileType not in transferDatasets:
transferDatasets[fileSpec.fileType] = fileSpec.fileAttributes['transferDataset']
if fileSpec.fileType not in transferIDs:
transferIDs[fileSpec.fileType] = fileSpec.fileAttributes['transferID']
continue
# set OS ID
if fileSpec.fileType == ['es_output', 'zip_output']:
fileSpec.objstoreID = self.objStoreID_ES
# make path where file is copied for transfer
if fileSpec.fileType != 'zip_output':
scope = fileAttrs[fileSpec.lfn]['scope']
datasetName = fileAttrs[fileSpec.lfn]['dataset']
else:
# use panda scope for zipped files
scope = self.scopeForTmp
datasetName = 'dummy'
srcPath = fileSpec.path
dstPath = mover_utils.construct_file_path(self.srcBasePath, scope, fileSpec.lfn)
# remove
if os.path.exists(dstPath):
os.remove(dstPath)
# copy
tmpLog.debug('copy src={srcPath} dst={dstPath}'.format(srcPath=srcPath, dstPath=dstPath))
dstDir = os.path.dirname(dstPath)
if not os.path.exists(dstDir):
os.makedirs(dstDir)
shutil.copyfile(srcPath, dstPath)
# collect files
tmpFile = dict()
tmpFile['scope'] = scope
tmpFile['name'] = fileSpec.lfn
tmpFile['bytes'] = fileSpec.fsize
if fileSpec.fileType not in files:
files[fileSpec.fileType] = []
files[fileSpec.fileType].append(tmpFile)
# loop over all file types to be registered to rucio
rucioAPI = RucioClient()
for fileType, fileList in iteritems(files):
# set destination RSE
if fileType in ['es_output', 'zip_output']:
dstRSE = self.dstRSE_ES
elif fileType == 'output':
dstRSE = self.dstRSE_Out
elif fileType == 'log':
dstRSE = self.dstRSE_Log
else:
errMsg = 'unsupported file type {0}'.format(fileType)
tmpLog.error(errMsg)
return (False, errMsg)
# skip if destination is None
if dstRSE is None:
continue
# make datasets if missing
if fileType not in transferDatasets:
try:
tmpScope = self.scopeForTmp
tmpDS = 'panda.harvester_stage_out.{0}'.format(str(uuid.uuid4()))
rucioAPI.add_dataset(tmpScope, tmpDS,
meta={'hidden': True},
lifetime=30*24*60*60,
files=fileList,
rse=self.srcRSE
)
transferDatasets[fileType] = tmpDS
# add rule
tmpDID = dict()
tmpDID['scope'] = tmpScope
tmpDID['name'] = tmpDS
tmpRet = rucioAPI.add_replication_rule([tmpDID], 1, dstRSE,
lifetime=30*24*60*60
)
tmpTransferIDs = tmpRet[0]
transferIDs[fileType] = tmpTransferIDs
tmpLog.debug('register dataset {0} with rule {1}'.format(tmpDS, str(tmpTransferIDs)))
except:
errMsg = core_utils.dump_error_message(tmpLog)
return (False, errMsg)
else:
# add files to existing dataset
try:
tmpScope = self.scopeForTmp
tmpDS = transferDatasets[fileType]
rucioAPI.add_files_to_dataset(tmpScope, tmpDS, fileList, self.srcRSE)
tmpLog.debug('added files to {0}'.format(tmpDS))
except:
errMsg = core_utils.dump_error_message(tmpLog)
return (False, errMsg)
# set transfer datasets and rules
for fileSpec in jobspec.outFiles:
# skip zipped files
if fileSpec.zipFileID is not None:
continue
# skip already done
if fileSpec.status in ['finished', 'failed']:
continue
# skip if already processed
if 'transferDataset' in fileSpec.fileAttributes:
continue
# no destination
if fileSpec.fileType not in transferDatasets:
fileSpec.status = 'finished'
continue
# set dataset
fileSpec.fileAttributes['transferDataset'] = transferDatasets[fileSpec.fileType]
# set rule
fileSpec.fileAttributes['transferID'] = transferIDs[fileSpec.fileType]
# force update
fileSpec.force_update('fileAttributes')
# return
tmpLog.debug('done')
return (True, '')
# zip output files
def zip_output(self, jobspec):
# make logger
tmpLog = self.make_logger(baseLogger, 'PandaID={0}'.format(jobspec.PandaID),
method_name='zip_output')
return self.simple_zip_output(jobspec, tmpLog)
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""
Reference mappings for isogeometric analysis.
"""
import numpy as nm
from sfepy.discrete.common.mappings import Mapping
from sfepy.discrete.common.extmods.mappings import CMapping
import sfepy.discrete.iga.extmods.igac as iga
class IGMapping(Mapping):
"""
Reference mapping for isogeometric analysis based on Bezier extraction.
Parameters
----------
domain : IGDomain instance
The mapping domain.
cells : array
The mapping region cells. (All domain cells required.)
nurbs : NurbsPatch instance, optional
If given, the `nurbs` is used instead of `domain.nurbs`. The `nurbs`
has to be obtained by degree elevation of `domain.nurbs`.
"""
def __init__(self, domain, cells, nurbs=None):
self.domain = domain
self.cells = cells
self.nurbs = domain.nurbs if nurbs is None else nurbs
self.v_shape = (len(cells), -1, self.domain.shape.dim)
self.s_shape = (len(cells), -1, 1)
def get_geometry(self):
"""
Return reference element geometry as a GeometryElement instance.
"""
return self.domain.gel
def get_physical_qps(self, qp_coors):
"""
Get physical quadrature points corresponding to given reference
Bezier element quadrature points.
Returns
-------
qps : array
The physical quadrature points ordered element by element,
i.e. with shape (n_el, n_qp, dim).
"""
nurbs = self.nurbs
variable = nm.ones((nurbs.weights.shape[0], 1), dtype=nm.float64)
qps, _, _ = iga.eval_variable_in_qp(variable, qp_coors, nurbs.cps,
nurbs.weights, nurbs.degrees,
nurbs.cs, nurbs.conn, self.cells)
qps = qps.reshape(self.v_shape)
return qps
def get_mapping(self, qp_coors, weights):
"""
Get the mapping for given quadrature points and weights.
Returns
-------
cmap : CMapping instance
The reference mapping.
Notes
-----
Does not set total volume of the C mapping structure!
"""
nurbs = self.nurbs
bfs, bfgs, dets = iga.eval_mapping_data_in_qp(qp_coors, nurbs.cps,
nurbs.weights,
nurbs.degrees, nurbs.cs,
nurbs.conn, self.cells)
# Weight Jacobians by quadrature point weights.
dets = nm.abs(dets) * weights[None, :, None, None]
# Cell volumes.
volumes = dets.sum(axis=1)[..., None]
cmap = CMapping(self.v_shape[0], qp_coors.shape[0], self.v_shape[2],
bfs.shape[3], mode='volume', flag=1)
cmap.bf[:] = bfs
cmap.bfg[:] = bfgs
cmap.det[:] = dets
cmap.volume[:] = volumes
return cmap
|
unknown
|
codeparrot/codeparrot-clean
| ||
#pragma once
#include <ATen/core/Tensor.h>
namespace at::native {
Tensor& qembeddingbag_byte_prepack_out(
Tensor& output,
const Tensor& weight,
const std::optional<Tensor>& rowwise_min_max_opt = std::nullopt);
Tensor qembeddingbag_byte_prepack(const Tensor& weight);
Tensor qembeddingbag_byte_prepack_meta(const Tensor& weight);
} // namespace at::native
|
c
|
github
|
https://github.com/pytorch/pytorch
|
aten/src/ATen/native/quantized/cpu/qembeddingbag_prepack.h
|
#############################################################################
# Copyright 2016 Mass KonFuzion Games
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#############################################################################
# Import game objects (perhaps this can go into a "game manager" of some sort?)
import pygame
import sys
import os
from display_msg import DisplayMessage
from display_msg_manager import DisplayMessageManager
from message_queue import MessageQueue
import game_state_base
import game_state_main_menu
import menu_item_base
import menu_item_spinner
import menu_item_label
import menu_form
# NOTE: Looks like we have to use full import names, because we have circular imports (i.e. intro state imports playing state; but playing state imports intro state. Without using full import names, we run into namespace collisions and weird stuff)
class GameStateImpl(game_state_base.GameStateBase):
__instance = None
def __new__(cls):
"""Override the instantiation of this class. We're creating a singleton yeah"""
return None
def __init__(self):
""" This shouldn't run.. We call __init__ on the __instance member"""
pass
def Init(self, engineRef, takeWith=None):
# Snag some vital object refs from the engine object
self.game_size = engineRef.game_size
self.screen_size = engineRef.screen_size
self.cell_size = engineRef.cell_size
self.surface_bg = engineRef.surface_bg # Possibly won't use this surface for the pause menu. We want to overlay our own surface on top of this one
self.game_viewport = engineRef.game_viewport # Possibly won't use this surface for the pause menu. We want to overlay our own surface on top of this one
self.bg_col = engineRef.bg_col
self.mixer = engineRef.mixer
self.surface_overlay = pygame.Surface((640, 480))
#self.surface_overlay.set_colorkey((64,64,64)) # NOTE: To get a transparency effect, this colorkey value has to be the same as the fill color used during rendering this surface
self.surface_overlay.set_colorkey((0,0,0)) # NOTE: To get a transparency effect, this colorkey value has to be the same as the fill color used during rendering this surface
self.blit_center = ( self.surface_bg.get_size()[0] / 2 - self.surface_overlay.get_size()[0] / 2, self.surface_bg.get_size()[1] / 2 - self.surface_overlay.get_size()[1] / 2 )
self._eventQueue = MessageQueue() # Event queue, e.g. user key/button presses, system events
self._eventQueue.Initialize(16)
# TODO remove self.mm and self.displayMsgScore (which doesn't hold the score.. That's an artifact of copy/paste rushing when trying to submit to low-rez-jam
self.mm = DisplayMessageManager()
self.displayMsgScore = DisplayMessage()
self.displayMsgScore.create(txtStr="Pause", position=[66, 5], color=(192,192,192))
# TODO Allow customization of text colors in the UI
self.ui = menu_form.UIForm(engineRef=engineRef) # the LHS engineRef is the function param; the RHS engineRef is the object we're passing in
self.ui._font = menu_form.UIForm.createFontObject( os.path.normpath(engineRef.exepath + '/../asset/font/ARCADE.TTF'), 32 ) # TODO maybe load one font obj at a higher-level scope than any menu or game state; then pass it in, instead of constructing one at each state change
self.ui.addMenuItem( menu_item_label.MenuItemLabel([200, 200], self.ui._font, 'Paused'), kbSelectIdx=None )
self.ui.addMenuItem( menu_item_label.MenuItemLabel([200, 250], self.ui._font, 'Resume Game'), kbSelectIdx=0, action="resumeGame" )
self.ui.addMenuItem( menu_item_label.MenuItemLabel([200, 300], self.ui._font, 'Back to Main Menu'), kbSelectIdx=1, action="exitUI" )
self.ui.synchronize(0, 1)
# Register Event Listeners
self._eventQueue.RegisterListener('self', self, 'UIControl') # Register "myself" as an event listener
self._eventQueue.RegisterListener('engine', engineRef, 'Application') # Register the game engine to listen to messages with topic, "Application"
def Cleanup(self):
# NOTE this class is a port from a C++ class. Because Python is garbage-collected, Cleanup() is probably not necessary here. But it's included for completeness
pass
@staticmethod
def Instance():
"""Return the instance reference. Create it if it doesn't exist
This method is a static method because it does not use any object
"""
if GameStateImpl.__instance is None:
GameStateImpl.__instance = super(GameStateImpl, GameStateImpl).__new__(GameStateImpl)
GameStateImpl.__instance.__init__()
GameStateImpl.__instance.SetName("Pause State")
return GameStateImpl.__instance
# TODO Consider changing "pause" to "PushState" or something; doesn't HAVE to be 'pause'
def Pause(self):
pass
# TODO Consider changing "resume" to "PopState" or something; doesn't HAVE to be 'resume'
def Resume(self):
pass
def EnqueueApplicationQuitMessage(self):
"""Enqueue a message for the application to shut itself down
"""
self._eventQueue.Enqueue( { 'topic': 'Application',
'payload': { 'action': 'call_function'
, 'function_name': 'setRunningFlagToFalse'
, 'params' : ''
}
} )
def EnqueueUICommandMessage(self, action):
"""Enqueue a UI command message for handling
# NOTE: Every message must have an 'action' key/val. The message parser will look for the 'action' in order to know what to do
"""
self._eventQueue.Enqueue( { 'topic': 'UIControl',
'payload': { 'action': 'call_function'
, 'function_name': 'DoUICommand'
, 'params' : 'uiCommand="{}"'.format(action)
}
} ) # here, the call keyword says that the message payload is an instruction to call a function
def DoUICommand(self, engineRef, argsDict):
"""
NOTE: This function assumes argsDict has one key only: uiCommand. The value of that key dictates what to do
"""
# TODO process the args and figure out what to do
try:
if argsDict['uiCommand'] == 'resumeGame':
engineRef.getState().Cleanup()
engineRef.popState() # NOTE: PopState() returns the state to the program; however, we don't assign it, because we don't care, because we're not going to use it for anything
elif argsDict['uiCommand'] == 'exitUI':
# Music # TODO along with the request to change states, make a request to start the music. This redundant, bifurcated logic is crap
self.mixer.addMusicFileToMap('Theme', os.path.normpath(engineRef.exepath + '/../asset/audio/falldown_theme.ogg'))
self.mixer.loadMusicFile('Theme')
self.mixer.playMusic() # Play loaded music file
# TODO add better management of loaded music files; as usual, we're hack'n'slashing
engineRef.changeState( game_state_main_menu.GameStateImpl.Instance() )
except KeyError as e:
# if there is no uiCommand defined, don't do anything
# (could have also tested if argsDict['uiCommand'] exists, without exception handling, but I like the way the code looks here)
pass
def ProcessEvents(self, engineRef):
for event in pygame.event.get():
if event.type == pygame.QUIT:
# Create a quit request message to the application, to shut itself down. This allows the program to do any necessary cleanup before exiting
self.EnqueueApplicationQuitMessage()
if event.type == pygame.KEYDOWN:
# TODO Perhaps add to the UI a way to determine what the "action activator buttons" should be. e.g., some menus should respond to ESC key, others only ENTER, SPACE, etc. The pause menu should respond to "p"
action = self.ui.processKeyboardEvent(event, engineRef)
if action:
self.EnqueueUICommandMessage(action)
elif event.type == pygame.MOUSEBUTTONDOWN:
action = self.ui.processMouseEvent(event, engineRef)
if action:
self.EnqueueUICommandMessage(action)
# NOTE: game_state_pause does not handle SONG_END_EVENT because the playing state is already configured to play music on an infinite loop. If we change to manual control of song playback in the game/pause states, then we'll need to handle SONG_END_EVENT here
def ProcessCommands(self, engineRef):
msg = self._eventQueue.Dequeue()
while msg:
#print "DEBUG Dequeued message: {}".format(msg)
topic = msg['topic']
for listener_obj_dict in self._eventQueue.RegisteredListeners(topic):
#print "DEBUG Registered Listener {} processing message {}".format(listener_obj_dict['name'], msg['payload'])
# Evaluate the 'action' key to know what to do. The action dictates what other information is required to be in the message
if msg['payload']['action'] == 'call_function':
# The registered listener had better have the function call available heh... otherwise, kaboom
objRef = listener_obj_dict['ref']
fn_ptr = getattr(objRef, msg['payload']['function_name'])
argsDict = eval("dict({})".format(msg['payload']['params']))
if objRef is engineRef:
fn_ptr(argsDict) # If the object is the engine, we don't need to pass the engineRef to it. i.e., the obj will already have its own self reference. TODO make this logic standard across all game states?
# NOTE: Slight cheat here: because this menu is its own event listener, and it's the only one, we pass in engineRef (the application object reference), instead of passing self (as we do in other game states). fn_ptr already points to self.DoUICommand. Admittedly, this is probably over-complicated, but it works..
else:
fn_ptr(engineRef, argsDict)
msg = self._eventQueue.Dequeue()
def Update(self, engineRef, dt_s, cell_size):
# No updates needed here
pass
def PreRenderScene(self, engineRef):
pass
def RenderScene(self, engineRef):
#self.surface_bg.fill((0,0,0)) # I think we want to NOT fill, so we can overlay.
#self.game_viewport.fill(self.bg_col)
self.surface_overlay.fill((0,0,0)) # This is one way to do transparency...: above, the colorkey for this surface is (0,0,0). That causes Pygame to NOT blit any pixels colored (0,0,0), thus causing anything NOT colored (0,0,0) on this surface to not be rendered, making this surface look transparent.
# Render the UI
self.ui.render(self.surface_overlay)
def PostRenderScene(self, engineRef):
#self.displayGameStats() # TODO remove this jank. There are no game stats to display in the pause state (or, otherwise, display the actual game stats: # of tries, level, etc)
self.game_viewport.blit(self.surface_overlay, self.blit_center)
self.surface_bg.blit(self.game_viewport, (0, 0))
pygame.display.flip()
def displayGameStats(self):
# Janky hardcoding here... TODO fix the jankiness
self.displayMsgScore.changeText("Pause")
textSurfaceScore = self.displayMsgScore.getTextSurface(self.mm._font)
self.surface_bg.blit(textSurfaceScore, (self.displayMsgScore._position[0] * self.cell_size[0], self.displayMsgScore._position[1] * self.cell_size[1] ))
|
unknown
|
codeparrot/codeparrot-clean
| ||
# postgresql/pypostgresql.py
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Support for the PostgreSQL database via py-postgresql.
Connecting
----------
URLs are of the form ``postgresql+pypostgresql://user:password@host:port/dbname[?key=value&key=value...]``.
"""
from sqlalchemy import util
from sqlalchemy import types as sqltypes
from sqlalchemy.dialects.postgresql.base import PGDialect, PGExecutionContext
from sqlalchemy import processors
class PGNumeric(sqltypes.Numeric):
def bind_processor(self, dialect):
return processors.to_str
def result_processor(self, dialect, coltype):
if self.asdecimal:
return None
else:
return processors.to_float
class PGExecutionContext_pypostgresql(PGExecutionContext):
pass
class PGDialect_pypostgresql(PGDialect):
driver = 'pypostgresql'
supports_unicode_statements = True
supports_unicode_binds = True
description_encoding = None
default_paramstyle = 'pyformat'
# requires trunk version to support sane rowcounts
# TODO: use dbapi version information to set this flag appropriately
supports_sane_rowcount = True
supports_sane_multi_rowcount = False
execution_ctx_cls = PGExecutionContext_pypostgresql
colspecs = util.update_copy(
PGDialect.colspecs,
{
sqltypes.Numeric : PGNumeric,
sqltypes.Float: sqltypes.Float, # prevents PGNumeric from being used
}
)
@classmethod
def dbapi(cls):
from postgresql.driver import dbapi20
return dbapi20
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
if 'port' in opts:
opts['port'] = int(opts['port'])
else:
opts['port'] = 5432
opts.update(url.query)
return ([], opts)
def is_disconnect(self, e, connection, cursor):
return "connection is closed" in str(e)
dialect = PGDialect_pypostgresql
|
unknown
|
codeparrot/codeparrot-clean
| ||
import itertools
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
from wtforms.compat import with_metaclass, iteritems, itervalues
from wtforms.meta import DefaultMeta
__all__ = (
'BaseForm',
'Form',
)
class BaseForm(object):
"""
Base Form Class. Provides core behaviour like field construction,
validation, and data and error proxying.
"""
def __init__(self, fields, prefix='', meta=DefaultMeta()):
"""
:param fields:
A dict or sequence of 2-tuples of partially-constructed fields.
:param prefix:
If provided, all fields will have their name prefixed with the
value.
:param meta:
A meta instance which is used for configuration and customization
of WTForms behaviors.
"""
if prefix and prefix[-1] not in '-_;:/.':
prefix += '-'
self.meta = meta
self._prefix = prefix
self._errors = None
self._fields = OrderedDict()
if hasattr(fields, 'items'):
fields = fields.items()
translations = self._get_translations()
extra_fields = []
if meta.csrf:
self._csrf = meta.build_csrf(self)
extra_fields.extend(self._csrf.setup_form(self))
for name, unbound_field in itertools.chain(fields, extra_fields):
options = dict(name=name, prefix=prefix, translations=translations)
field = meta.bind_field(self, unbound_field, options)
self._fields[name] = field
def __iter__(self):
"""Iterate form fields in creation order."""
return iter(itervalues(self._fields))
def __contains__(self, name):
""" Returns `True` if the named field is a member of this form. """
return (name in self._fields)
def __getitem__(self, name):
""" Dict-style access to this form's fields."""
return self._fields[name]
def __setitem__(self, name, value):
""" Bind a field to this form. """
self._fields[name] = value.bind(form=self, name=name, prefix=self._prefix)
def __delitem__(self, name):
""" Remove a field from this form. """
del self._fields[name]
def _get_translations(self):
"""
.. deprecated:: 2.0
`_get_translations` is being removed in WTForms 3.0, use
`Meta.get_translations` instead.
Override in subclasses to provide alternate translations factory.
Must return an object that provides gettext() and ngettext() methods.
"""
return self.meta.get_translations(self)
def populate_obj(self, obj):
"""
Populates the attributes of the passed `obj` with data from the form's
fields.
:note: This is a destructive operation; Any attribute with the same name
as a field will be overridden. Use with caution.
"""
for name, field in iteritems(self._fields):
field.populate_obj(obj, name)
def process(self, formdata=None, obj=None, data=None, **kwargs):
"""
Take form, object data, and keyword arg input and have the fields
process them.
:param formdata:
Used to pass data coming from the enduser, usually `request.POST` or
equivalent.
:param obj:
If `formdata` is empty or not provided, this object is checked for
attributes matching form field names, which will be used for field
values.
:param data:
If provided, must be a dictionary of data. This is only used if
`formdata` is empty or not provided and `obj` does not contain
an attribute named the same as the field.
:param `**kwargs`:
If `formdata` is empty or not provided and `obj` does not contain
an attribute named the same as a field, form will assign the value
of a matching keyword argument to the field, if one exists.
"""
formdata = self.meta.wrap_formdata(self, formdata)
if data is not None:
# XXX we want to eventually process 'data' as a new entity.
# Temporarily, this can simply be merged with kwargs.
kwargs = dict(data, **kwargs)
for name, field, in iteritems(self._fields):
if obj is not None and hasattr(obj, name):
field.process(formdata, getattr(obj, name))
elif name in kwargs:
field.process(formdata, kwargs[name])
else:
field.process(formdata)
def validate(self, extra_validators=None):
"""
Validates the form by calling `validate` on each field.
:param extra_validators:
If provided, is a dict mapping field names to a sequence of
callables which will be passed as extra validators to the field's
`validate` method.
Returns `True` if no errors occur.
"""
self._errors = None
success = True
for name, field in iteritems(self._fields):
if extra_validators is not None and name in extra_validators:
extra = extra_validators[name]
else:
extra = tuple()
if not field.validate(self, extra):
success = False
return success
@property
def data(self):
return dict((name, f.data) for name, f in iteritems(self._fields))
@property
def errors(self):
if self._errors is None:
self._errors = dict((name, f.errors) for name, f in iteritems(self._fields) if f.errors)
return self._errors
class FormMeta(type):
"""
The metaclass for `Form` and any subclasses of `Form`.
`FormMeta`'s responsibility is to create the `_unbound_fields` list, which
is a list of `UnboundField` instances sorted by their order of
instantiation. The list is created at the first instantiation of the form.
If any fields are added/removed from the form, the list is cleared to be
re-generated on the next instantiation.
Any properties which begin with an underscore or are not `UnboundField`
instances are ignored by the metaclass.
"""
def __init__(cls, name, bases, attrs):
type.__init__(cls, name, bases, attrs)
cls._unbound_fields = None
cls._wtforms_meta = None
def __call__(cls, *args, **kwargs):
"""
Construct a new `Form` instance.
Creates the `_unbound_fields` list and the internal `_wtforms_meta`
subclass of the class Meta in order to allow a proper inheritance
hierarchy.
"""
if cls._unbound_fields is None:
fields = []
for name in dir(cls):
if not name.startswith('_'):
unbound_field = getattr(cls, name)
if hasattr(unbound_field, '_formfield'):
fields.append((name, unbound_field))
# We keep the name as the second element of the sort
# to ensure a stable sort.
fields.sort(key=lambda x: (x[1].creation_counter, x[0]))
cls._unbound_fields = fields
# Create a subclass of the 'class Meta' using all the ancestors.
if cls._wtforms_meta is None:
bases = []
for mro_class in cls.__mro__:
if 'Meta' in mro_class.__dict__:
bases.append(mro_class.Meta)
cls._wtforms_meta = type('Meta', tuple(bases), {})
return type.__call__(cls, *args, **kwargs)
def __setattr__(cls, name, value):
"""
Add an attribute to the class, clearing `_unbound_fields` if needed.
"""
if name == 'Meta':
cls._wtforms_meta = None
elif not name.startswith('_') and hasattr(value, '_formfield'):
cls._unbound_fields = None
type.__setattr__(cls, name, value)
def __delattr__(cls, name):
"""
Remove an attribute from the class, clearing `_unbound_fields` if
needed.
"""
if not name.startswith('_'):
cls._unbound_fields = None
type.__delattr__(cls, name)
class Form(with_metaclass(FormMeta, BaseForm)):
"""
Declarative Form base class. Extends BaseForm's core behaviour allowing
fields to be defined on Form subclasses as class attributes.
In addition, form and instance input data are taken at construction time
and passed to `process()`.
"""
Meta = DefaultMeta
def __init__(self, formdata=None, obj=None, prefix='', data=None, meta=None, **kwargs):
"""
:param formdata:
Used to pass data coming from the enduser, usually `request.POST` or
equivalent. formdata should be some sort of request-data wrapper which
can get multiple parameters from the form input, and values are unicode
strings, e.g. a Werkzeug/Django/WebOb MultiDict
:param obj:
If `formdata` is empty or not provided, this object is checked for
attributes matching form field names, which will be used for field
values.
:param prefix:
If provided, all fields will have their name prefixed with the
value.
:param data:
Accept a dictionary of data. This is only used if `formdata` and
`obj` are not present.
:param meta:
If provided, this is a dictionary of values to override attributes
on this form's meta instance.
:param `**kwargs`:
If `formdata` is empty or not provided and `obj` does not contain
an attribute named the same as a field, form will assign the value
of a matching keyword argument to the field, if one exists.
"""
meta_obj = self._wtforms_meta()
if meta is not None and isinstance(meta, dict):
meta_obj.update_values(meta)
super(Form, self).__init__(self._unbound_fields, meta=meta_obj, prefix=prefix)
for name, field in iteritems(self._fields):
# Set all the fields to attributes so that they obscure the class
# attributes with the same names.
setattr(self, name, field)
self.process(formdata, obj, data=data, **kwargs)
def __setitem__(self, name, value):
raise TypeError('Fields may not be added to Form instances, only classes.')
def __delitem__(self, name):
del self._fields[name]
setattr(self, name, None)
def __delattr__(self, name):
if name in self._fields:
self.__delitem__(name)
else:
# This is done for idempotency, if we have a name which is a field,
# we want to mask it by setting the value to None.
unbound_field = getattr(self.__class__, name, None)
if unbound_field is not None and hasattr(unbound_field, '_formfield'):
setattr(self, name, None)
else:
super(Form, self).__delattr__(name)
def validate(self):
"""
Validates the form by calling `validate` on each field, passing any
extra `Form.validate_<fieldname>` validators to the field validator.
"""
extra = {}
for name in self._fields:
inline = getattr(self.__class__, 'validate_%s' % name, None)
if inline is not None:
extra[name] = [inline]
return super(Form, self).validate(extra)
|
unknown
|
codeparrot/codeparrot-clean
| ||
<?php
namespace Illuminate\Cache;
use Illuminate\Contracts\Cache\Store;
abstract class TaggableStore implements Store
{
/**
* Begin executing a new tags operation.
*
* @param mixed $names
* @return \Illuminate\Cache\TaggedCache
*/
public function tags($names)
{
return new TaggedCache($this, new TagSet($this, is_array($names) ? $names : func_get_args()));
}
}
|
php
|
github
|
https://github.com/laravel/framework
|
src/Illuminate/Cache/TaggableStore.php
|
#
# (c) 2018 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
---
cliconf: edgeswitch
short_description: Use edgeswitch cliconf to run command on EdgeSwitch platform
description:
- This edgeswitch plugin provides low level abstraction apis for
sending and receiving CLI commands from Ubiquiti EdgeSwitch network devices.
version_added: "2.8"
"""
import re
import time
import json
from itertools import chain
from ansible.errors import AnsibleConnectionFailure
from ansible.module_utils._text import to_text
from ansible.module_utils.network.common.config import dumps
from ansible.module_utils.network.common.utils import to_list
from ansible.plugins.cliconf import CliconfBase, enable_mode
from ansible.module_utils.common._collections_compat import Mapping
class Cliconf(CliconfBase):
def get_device_info(self):
device_info = {}
device_info['network_os'] = 'edgeswitch'
reply = self.get(command='show version')
data = to_text(reply, errors='surrogate_or_strict').strip()
match = re.search(r'Software Version\.+ (.*)', data)
if match:
device_info['network_os_version'] = match.group(1).strip(',')
match = re.search(r'^Machine Model\.+ (.*)', data, re.M)
if match:
device_info['network_os_model'] = match.group(1)
match = re.search(r'System Name\.+ (.*)', data, re.M)
if match:
device_info['network_os_hostname'] = match.group(1)
return device_info
@enable_mode
def get_config(self, source='running', flags=None):
if source not in ('running', 'startup'):
raise ValueError("fetching configuration from %s is not supported" % source)
if source == 'running':
cmd = 'show running-config '
else:
cmd = 'show startup-config '
if flags:
cmd += ' '.join(to_list(flags))
cmd = cmd.strip()
return self.send_command(cmd)
@enable_mode
def edit_config(self, commands):
resp = {}
results = []
requests = []
self.send_command('configure')
for line in to_list(commands):
if not isinstance(line, Mapping):
line = {'command': line}
cmd = line['command']
if cmd != 'end' and cmd[0] != '!':
results.append(self.send_command(**line))
requests.append(cmd)
self.send_command('end')
resp['request'] = requests
resp['response'] = results
return resp
def get(self, command=None, prompt=None, answer=None, sendonly=False, output=None, check_all=False):
if not command:
raise ValueError('must provide value of command to execute')
if output:
raise ValueError("'output' value %s is not supported for get" % output)
return self.send_command(command=command, prompt=prompt, answer=answer, sendonly=sendonly, check_all=check_all)
def get_capabilities(self):
result = super(Cliconf, self).get_capabilities()
result['rpc'] += ['run_commands']
return json.dumps(result)
def run_commands(self, commands=None, check_rc=True):
if commands is None:
raise ValueError("'commands' value is required")
responses = list()
for cmd in to_list(commands):
if not isinstance(cmd, Mapping):
cmd = {'command': cmd}
output = cmd.pop('output', None)
if output:
raise ValueError("'output' value %s is not supported for run_commands" % output)
try:
out = self.send_command(**cmd)
except AnsibleConnectionFailure as e:
if check_rc:
raise
out = getattr(e, 'err', e)
responses.append(out)
return responses
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""
We perform uniqueness checks explicitly on the serializer class, rather
the using Django's `.full_clean()`.
This gives us better separation of concerns, allows us to use single-step
object creation, and makes it possible to switch between using the implicit
`ModelSerializer` class and an equivalent explicit `Serializer` class.
"""
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from rest_framework.compat import unicode_to_repr
from rest_framework.exceptions import ValidationError
from rest_framework.utils.representation import smart_repr
class UniqueValidator:
"""
Validator that corresponds to `unique=True` on a model field.
Should be applied to an individual field on the serializer.
"""
message = _('This field must be unique.')
def __init__(self, queryset, message=None):
self.queryset = queryset
self.serializer_field = None
self.message = message or self.message
def set_context(self, serializer_field):
"""
This hook is called by the serializer instance,
prior to the validation call being made.
"""
# Determine the underlying model field name. This may not be the
# same as the serializer field name if `source=<>` is set.
self.field_name = serializer_field.source_attrs[0]
# Determine the existing instance, if this is an update operation.
self.instance = getattr(serializer_field.parent, 'instance', None)
def filter_queryset(self, value, queryset):
"""
Filter the queryset to all instances matching the given attribute.
"""
filter_kwargs = {self.field_name: value}
return queryset.filter(**filter_kwargs)
def exclude_current_instance(self, queryset):
"""
If an instance is being updated, then do not include
that instance itself as a uniqueness conflict.
"""
if self.instance is not None:
return queryset.exclude(pk=self.instance.pk)
return queryset
def __call__(self, value):
queryset = self.queryset
queryset = self.filter_queryset(value, queryset)
queryset = self.exclude_current_instance(queryset)
if queryset.exists():
raise ValidationError(self.message)
def __repr__(self):
return unicode_to_repr('<%s(queryset=%s)>' % (
self.__class__.__name__,
smart_repr(self.queryset)
))
class UniqueTogetherValidator:
"""
Validator that corresponds to `unique_together = (...)` on a model class.
Should be applied to the serializer class, not to an individual field.
"""
message = _('The fields {field_names} must make a unique set.')
missing_message = _('This field is required.')
def __init__(self, queryset, fields, message=None):
self.queryset = queryset
self.fields = fields
self.serializer_field = None
self.message = message or self.message
def set_context(self, serializer):
"""
This hook is called by the serializer instance,
prior to the validation call being made.
"""
# Determine the existing instance, if this is an update operation.
self.instance = getattr(serializer, 'instance', None)
def enforce_required_fields(self, attrs):
"""
The `UniqueTogetherValidator` always forces an implied 'required'
state on the fields it applies to.
"""
if self.instance is not None:
return
missing = dict([
(field_name, self.missing_message)
for field_name in self.fields
if field_name not in attrs
])
if missing:
raise ValidationError(missing)
def filter_queryset(self, attrs, queryset):
"""
Filter the queryset to all instances matching the given attributes.
"""
# If this is an update, then any unprovided field should
# have it's value set based on the existing instance attribute.
if self.instance is not None:
for field_name in self.fields:
if field_name not in attrs:
attrs[field_name] = getattr(self.instance, field_name)
# Determine the filter keyword arguments and filter the queryset.
filter_kwargs = dict([
(field_name, attrs[field_name])
for field_name in self.fields
])
return queryset.filter(**filter_kwargs)
def exclude_current_instance(self, attrs, queryset):
"""
If an instance is being updated, then do not include
that instance itself as a uniqueness conflict.
"""
if self.instance is not None:
return queryset.exclude(pk=self.instance.pk)
return queryset
def __call__(self, attrs):
self.enforce_required_fields(attrs)
queryset = self.queryset
queryset = self.filter_queryset(attrs, queryset)
queryset = self.exclude_current_instance(attrs, queryset)
if queryset.exists():
field_names = ', '.join(self.fields)
raise ValidationError(self.message.format(field_names=field_names))
def __repr__(self):
return unicode_to_repr('<%s(queryset=%s, fields=%s)>' % (
self.__class__.__name__,
smart_repr(self.queryset),
smart_repr(self.fields)
))
class BaseUniqueForValidator:
message = None
missing_message = _('This field is required.')
def __init__(self, queryset, field, date_field, message=None):
self.queryset = queryset
self.field = field
self.date_field = date_field
self.message = message or self.message
def set_context(self, serializer):
"""
This hook is called by the serializer instance,
prior to the validation call being made.
"""
# Determine the underlying model field names. These may not be the
# same as the serializer field names if `source=<>` is set.
self.field_name = serializer.fields[self.field].source_attrs[0]
self.date_field_name = serializer.fields[self.date_field].source_attrs[0]
# Determine the existing instance, if this is an update operation.
self.instance = getattr(serializer, 'instance', None)
def enforce_required_fields(self, attrs):
"""
The `UniqueFor<Range>Validator` classes always force an implied
'required' state on the fields they are applied to.
"""
missing = dict([
(field_name, self.missing_message)
for field_name in [self.field, self.date_field]
if field_name not in attrs
])
if missing:
raise ValidationError(missing)
def filter_queryset(self, attrs, queryset):
raise NotImplementedError('`filter_queryset` must be implemented.')
def exclude_current_instance(self, attrs, queryset):
"""
If an instance is being updated, then do not include
that instance itself as a uniqueness conflict.
"""
if self.instance is not None:
return queryset.exclude(pk=self.instance.pk)
return queryset
def __call__(self, attrs):
self.enforce_required_fields(attrs)
queryset = self.queryset
queryset = self.filter_queryset(attrs, queryset)
queryset = self.exclude_current_instance(attrs, queryset)
if queryset.exists():
message = self.message.format(date_field=self.date_field)
raise ValidationError({self.field: message})
def __repr__(self):
return unicode_to_repr('<%s(queryset=%s, field=%s, date_field=%s)>' % (
self.__class__.__name__,
smart_repr(self.queryset),
smart_repr(self.field),
smart_repr(self.date_field)
))
class UniqueForDateValidator(BaseUniqueForValidator):
message = _('This field must be unique for the "{date_field}" date.')
def filter_queryset(self, attrs, queryset):
value = attrs[self.field]
date = attrs[self.date_field]
filter_kwargs = {}
filter_kwargs[self.field_name] = value
filter_kwargs['%s__day' % self.date_field_name] = date.day
filter_kwargs['%s__month' % self.date_field_name] = date.month
filter_kwargs['%s__year' % self.date_field_name] = date.year
return queryset.filter(**filter_kwargs)
class UniqueForMonthValidator(BaseUniqueForValidator):
message = _('This field must be unique for the "{date_field}" month.')
def filter_queryset(self, attrs, queryset):
value = attrs[self.field]
date = attrs[self.date_field]
filter_kwargs = {}
filter_kwargs[self.field_name] = value
filter_kwargs['%s__month' % self.date_field_name] = date.month
return queryset.filter(**filter_kwargs)
class UniqueForYearValidator(BaseUniqueForValidator):
message = _('This field must be unique for the "{date_field}" year.')
def filter_queryset(self, attrs, queryset):
value = attrs[self.field]
date = attrs[self.date_field]
filter_kwargs = {}
filter_kwargs[self.field_name] = value
filter_kwargs['%s__year' % self.date_field_name] = date.year
return queryset.filter(**filter_kwargs)
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
* Copyright (C) 2010 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect.testing.testers;
import static com.google.common.collect.testing.Helpers.copyToList;
import static com.google.common.collect.testing.features.CollectionSize.ONE;
import static com.google.common.collect.testing.features.CollectionSize.SEVERAL;
import static com.google.common.collect.testing.features.CollectionSize.ZERO;
import static com.google.common.collect.testing.testers.ReflectionFreeAssertThrows.assertThrows;
import static java.util.Collections.sort;
import com.google.common.annotations.GwtCompatible;
import com.google.common.collect.testing.features.CollectionSize;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.SortedSet;
import org.jspecify.annotations.NullMarked;
import org.jspecify.annotations.Nullable;
import org.junit.Ignore;
/**
* A generic JUnit test which tests operations on a SortedSet. Can't be invoked directly; please see
* {@code SortedSetTestSuiteBuilder}.
*
* @author Jesse Wilson
* @author Louis Wasserman
*/
@GwtCompatible
@Ignore("test runners must not instantiate and run this directly, only via suites we build")
// @Ignore affects the Android test runner, which respects JUnit 4 annotations on JUnit 3 tests.
@SuppressWarnings("JUnit4ClassUsedInJUnit3")
@NullMarked
public class SortedSetNavigationTester<E extends @Nullable Object> extends AbstractSetTester<E> {
private SortedSet<E> sortedSet;
private List<E> values;
private @Nullable E a;
private @Nullable E b;
private @Nullable E c;
@Override
public void setUp() throws Exception {
super.setUp();
sortedSet = (SortedSet<E>) getSet();
values =
copyToList(
getSubjectGenerator()
.getSampleElements(getSubjectGenerator().getCollectionSize().getNumElements()));
sort(values, sortedSet.comparator());
// some tests assume SEVERAL == 3
if (values.size() >= 1) {
a = values.get(0);
if (values.size() >= 3) {
b = values.get(1);
c = values.get(2);
}
}
}
@CollectionSize.Require(ZERO)
public void testEmptySetFirst() {
assertThrows(NoSuchElementException.class, () -> sortedSet.first());
}
@CollectionSize.Require(ZERO)
public void testEmptySetLast() {
assertThrows(NoSuchElementException.class, () -> sortedSet.last());
}
@CollectionSize.Require(ONE)
public void testSingletonSetFirst() {
assertEquals(a, sortedSet.first());
}
@CollectionSize.Require(ONE)
public void testSingletonSetLast() {
assertEquals(a, sortedSet.last());
}
@CollectionSize.Require(SEVERAL)
public void testFirst() {
assertEquals(a, sortedSet.first());
}
@CollectionSize.Require(SEVERAL)
public void testLast() {
assertEquals(c, sortedSet.last());
}
}
|
java
|
github
|
https://github.com/google/guava
|
android/guava-testlib/src/com/google/common/collect/testing/testers/SortedSetNavigationTester.java
|
# Mercurial extension to provide the 'hg bookmark' command
#
# Copyright 2008 David Soria Parra <dsp@php.net>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
'''track a line of development with movable markers
Bookmarks are local movable markers to changesets. Every bookmark
points to a changeset identified by its hash. If you commit a
changeset that is based on a changeset that has a bookmark on it, the
bookmark shifts to the new changeset.
It is possible to use bookmark names in every revision lookup (e.g. hg
merge, hg update).
By default, when several bookmarks point to the same changeset, they
will all move forward together. It is possible to obtain a more
git-like experience by adding the following configuration option to
your .hgrc::
[bookmarks]
track.current = True
This will cause Mercurial to track the bookmark that you are currently
using, and only update it. This is similar to git's approach to
branching.
'''
from mercurial.i18n import _
from mercurial.node import nullid, nullrev, hex, short
from mercurial import util, commands, repair, extensions
import os
def write(repo):
'''Write bookmarks
Write the given bookmark => hash dictionary to the .hg/bookmarks file
in a format equal to those of localtags.
We also store a backup of the previous state in undo.bookmarks that
can be copied back on rollback.
'''
refs = repo._bookmarks
if os.path.exists(repo.join('bookmarks')):
util.copyfile(repo.join('bookmarks'), repo.join('undo.bookmarks'))
if repo._bookmarkcurrent not in refs:
setcurrent(repo, None)
wlock = repo.wlock()
try:
file = repo.opener('bookmarks', 'w', atomictemp=True)
for refspec, node in refs.iteritems():
file.write("%s %s\n" % (hex(node), refspec))
file.rename()
finally:
wlock.release()
def setcurrent(repo, mark):
'''Set the name of the bookmark that we are currently on
Set the name of the bookmark that we are on (hg update <bookmark>).
The name is recorded in .hg/bookmarks.current
'''
current = repo._bookmarkcurrent
if current == mark:
return
refs = repo._bookmarks
# do not update if we do update to a rev equal to the current bookmark
if (mark and mark not in refs and
current and refs[current] == repo.changectx('.').node()):
return
if mark not in refs:
mark = ''
wlock = repo.wlock()
try:
file = repo.opener('bookmarks.current', 'w', atomictemp=True)
file.write(mark)
file.rename()
finally:
wlock.release()
repo._bookmarkcurrent = mark
def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False, rename=None):
'''track a line of development with movable markers
Bookmarks are pointers to certain commits that move when
committing. Bookmarks are local. They can be renamed, copied and
deleted. It is possible to use bookmark names in 'hg merge' and
'hg update' to merge and update respectively to a given bookmark.
You can use 'hg bookmark NAME' to set a bookmark on the working
directory's parent revision with the given name. If you specify
a revision using -r REV (where REV may be an existing bookmark),
the bookmark is assigned to that revision.
'''
hexfn = ui.debugflag and hex or short
marks = repo._bookmarks
cur = repo.changectx('.').node()
if rename:
if rename not in marks:
raise util.Abort(_("a bookmark of this name does not exist"))
if mark in marks and not force:
raise util.Abort(_("a bookmark of the same name already exists"))
if mark is None:
raise util.Abort(_("new bookmark name required"))
marks[mark] = marks[rename]
del marks[rename]
if repo._bookmarkcurrent == rename:
setcurrent(repo, mark)
write(repo)
return
if delete:
if mark is None:
raise util.Abort(_("bookmark name required"))
if mark not in marks:
raise util.Abort(_("a bookmark of this name does not exist"))
if mark == repo._bookmarkcurrent:
setcurrent(repo, None)
del marks[mark]
write(repo)
return
if mark != None:
if "\n" in mark:
raise util.Abort(_("bookmark name cannot contain newlines"))
mark = mark.strip()
if mark in marks and not force:
raise util.Abort(_("a bookmark of the same name already exists"))
if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
and not force):
raise util.Abort(
_("a bookmark cannot have the name of an existing branch"))
if rev:
marks[mark] = repo.lookup(rev)
else:
marks[mark] = repo.changectx('.').node()
setcurrent(repo, mark)
write(repo)
return
if mark is None:
if rev:
raise util.Abort(_("bookmark name required"))
if len(marks) == 0:
ui.status(_("no bookmarks set\n"))
else:
for bmark, n in marks.iteritems():
if ui.configbool('bookmarks', 'track.current'):
current = repo._bookmarkcurrent
prefix = (bmark == current and n == cur) and '*' or ' '
else:
prefix = (n == cur) and '*' or ' '
if ui.quiet:
ui.write("%s\n" % bmark)
else:
ui.write(" %s %-25s %d:%s\n" % (
prefix, bmark, repo.changelog.rev(n), hexfn(n)))
return
def _revstostrip(changelog, node):
srev = changelog.rev(node)
tostrip = [srev]
saveheads = []
for r in xrange(srev, len(changelog)):
parents = changelog.parentrevs(r)
if parents[0] in tostrip or parents[1] in tostrip:
tostrip.append(r)
if parents[1] != nullrev:
for p in parents:
if p not in tostrip and p > srev:
saveheads.append(p)
return [r for r in tostrip if r not in saveheads]
def strip(oldstrip, ui, repo, node, backup="all"):
"""Strip bookmarks if revisions are stripped using
the mercurial.strip method. This usually happens during
qpush and qpop"""
revisions = _revstostrip(repo.changelog, node)
marks = repo._bookmarks
update = []
for mark, n in marks.iteritems():
if repo.changelog.rev(n) in revisions:
update.append(mark)
oldstrip(ui, repo, node, backup)
if len(update) > 0:
for m in update:
marks[m] = repo.changectx('.').node()
write(repo)
def reposetup(ui, repo):
if not repo.local():
return
class bookmark_repo(repo.__class__):
@util.propertycache
def _bookmarks(self):
'''Parse .hg/bookmarks file and return a dictionary
Bookmarks are stored as {HASH}\\s{NAME}\\n (localtags format) values
in the .hg/bookmarks file. They are read returned as a dictionary
with name => hash values.
'''
try:
bookmarks = {}
for line in self.opener('bookmarks'):
sha, refspec = line.strip().split(' ', 1)
bookmarks[refspec] = super(bookmark_repo, self).lookup(sha)
except:
pass
return bookmarks
@util.propertycache
def _bookmarkcurrent(self):
'''Get the current bookmark
If we use gittishsh branches we have a current bookmark that
we are on. This function returns the name of the bookmark. It
is stored in .hg/bookmarks.current
'''
mark = None
if os.path.exists(self.join('bookmarks.current')):
file = self.opener('bookmarks.current')
# No readline() in posixfile_nt, reading everything is cheap
mark = (file.readlines() or [''])[0]
if mark == '':
mark = None
file.close()
return mark
def rollback(self):
if os.path.exists(self.join('undo.bookmarks')):
util.rename(self.join('undo.bookmarks'), self.join('bookmarks'))
return super(bookmark_repo, self).rollback()
def lookup(self, key):
if key in self._bookmarks:
key = self._bookmarks[key]
return super(bookmark_repo, self).lookup(key)
def _bookmarksupdate(self, parents, node):
marks = self._bookmarks
update = False
if ui.configbool('bookmarks', 'track.current'):
mark = self._bookmarkcurrent
if mark and marks[mark] in parents:
marks[mark] = node
update = True
else:
for mark, n in marks.items():
if n in parents:
marks[mark] = node
update = True
if update:
write(self)
def commitctx(self, ctx, error=False):
"""Add a revision to the repository and
move the bookmark"""
wlock = self.wlock() # do both commit and bookmark with lock held
try:
node = super(bookmark_repo, self).commitctx(ctx, error)
if node is None:
return None
parents = self.changelog.parents(node)
if parents[1] == nullid:
parents = (parents[0],)
self._bookmarksupdate(parents, node)
return node
finally:
wlock.release()
def addchangegroup(self, source, srctype, url, emptyok=False):
parents = self.dirstate.parents()
result = super(bookmark_repo, self).addchangegroup(
source, srctype, url, emptyok)
if result > 1:
# We have more heads than before
return result
node = self.changelog.tip()
self._bookmarksupdate(parents, node)
return result
def _findtags(self):
"""Merge bookmarks with normal tags"""
(tags, tagtypes) = super(bookmark_repo, self)._findtags()
tags.update(self._bookmarks)
return (tags, tagtypes)
if hasattr(repo, 'invalidate'):
def invalidate(self):
super(bookmark_repo, self).invalidate()
for attr in ('_bookmarks', '_bookmarkcurrent'):
if attr in self.__dict__:
delattr(repo, attr)
repo.__class__ = bookmark_repo
def uisetup(ui):
extensions.wrapfunction(repair, "strip", strip)
if ui.configbool('bookmarks', 'track.current'):
extensions.wrapcommand(commands.table, 'update', updatecurbookmark)
def updatecurbookmark(orig, ui, repo, *args, **opts):
'''Set the current bookmark
If the user updates to a bookmark we update the .hg/bookmarks.current
file.
'''
res = orig(ui, repo, *args, **opts)
rev = opts['rev']
if not rev and len(args) > 0:
rev = args[0]
setcurrent(repo, rev)
return res
cmdtable = {
"bookmarks":
(bookmark,
[('f', 'force', False, _('force')),
('r', 'rev', '', _('revision')),
('d', 'delete', False, _('delete a given bookmark')),
('m', 'rename', '', _('rename a given bookmark'))],
_('hg bookmarks [-f] [-d] [-m NAME] [-r REV] [NAME]')),
}
|
unknown
|
codeparrot/codeparrot-clean
| ||
#
# Copyright 2009, 2013, Shaheed Haque <srhaque@theiet.org>.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of
# the License or (at your option) version 3 or any later version
# accepted by the membership of KDE e.V. (or its successor approved
# by the membership of KDE e.V.), which shall act as a proxy
# defined in Section 14 of version 3 of the license.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import print_function
import argparse
import atexit
import cmd
import re
import traceback
from IPython.frontend.terminal.console.interactiveshell import ZMQTerminalInteractiveShell
from IPython.lib.kernel import find_connection_file
from IPython.zmq.blockingkernelmanager import BlockingKernelManager
from PyQt4.QtCore import QCoreApplication, QObject
from gdb_command_db import GdbCommandDb
from qgdb import QGdbInterpreter
def dbg0(msg, *args):
print("ERR-0", msg.format(*args))
def dbg1(msg, *args):
print("DBG-1", msg.format(*args))
def dbg2(msg, *args):
print("DBG-2", msg.format(*args))
class IPythonConsoleShell(ZMQTerminalInteractiveShell):
"""A simple console shell for IPython.
References:
- http://stackoverflow.com/questions/9977446/connecting-to-a-remote-ipython-instance
- https://github.com/ipython/ipython/blob/master/IPython/zmq/blockingkernelmanager.py
For the Qt version, see:
- http://stackoverflow.com/questions/11513132/embedding-ipython-qt-console-in-a-pyqt-application
"""
def __init__(self, *args, **kwargs):
connection_file = find_connection_file(kwargs.pop("connection_file"))
km = BlockingKernelManager(connection_file=connection_file)
km.load_connection_file()
heartbeat = True
km.start_channels(hb=heartbeat)
atexit.register(km.cleanup_connection_file)
super(IPythonConsoleShell, self).__init__(kernel_manager = km)
self.km = km
def stop(self):
print("IPythonConsoleShell stop()")
self.exit_now = True
#self.km.stop_channels()
self.km.shutdown_kernel()
self.ask_exit()
class MyArgs(argparse.ArgumentParser):
def __init__(self, **kwargs):
super(MyArgs, self).__init__(**kwargs)
def format_usage(self):
formatter = self._get_formatter()
formatter._indent_increment = 4
formatter.add_usage(self.usage, self._actions,
self._mutually_exclusive_groups, "")
return formatter.format_help()
def format_help(self):
formatter = self._get_formatter()
formatter._indent_increment = 4
# usage
formatter.add_usage(self.usage, self._actions,
self._mutually_exclusive_groups, "")
# description
formatter.add_text(self.description)
# positionals, optionals and user-defined groups
for action_group in self._action_groups:
formatter.start_section(action_group.title)
formatter.add_text(action_group.description)
formatter.add_arguments(action_group._group_actions)
formatter.end_section()
# epilog
formatter.add_text(self.epilog)
# determine help from format above
return formatter.format_help()
class Cli(cmd.Cmd):
"""Python CLI for GDB."""
prompt = "(pygdb) "
#
# Our database of commands.
#
commandDb = None
#
# Commands which will have environment variable substitution applied.
#
filesCommands = None
#
# Output handling.
#
_out = None
def __init__(self, arguments, printLine):
cmd.Cmd.__init__(self)
self._out = printLine
self.gdb = QGdbInterpreter(arguments, printLine)
self.createCommandDb()
def createCommandDb(self):
"""Create a command database we can use to implement our CLI."""
#
# Ask GDB for all the commands it has.
#
helpText = self.gdb.consoleCommand("help all", True)
self.commandDb = GdbCommandDb(helpText)
self.findFilesCommand()
#
# Add in all our overrides; that's any routine starting doXXX.
#
customCommands = [c for c in dir(self) if c.startswith("do_")]
for cmd in customCommands:
self.commandDb.addCustom(getattr(self, cmd))
#dbg0(self.commandDb)
def findFilesCommand(self):
"""Make a list of each command which takes a file/path."""
def matchClass(clazz_exact, arg, indentation, prefix, keyword, apropos, clazz, function):
"""
Add contents of the database which are in the given clazz_exact to
the files set.
"""
if clazz == clazz_exact:
arg[prefix + keyword] = apropos
def matchRegExp(regexp, arg, indentation, prefix, keyword, apropos, clazz, function):
"""
Add contents of the database which match the given regexp to the
files set.
"""
if regexp.search(keyword) or regexp.search(apropos):
arg[prefix + keyword] = apropos
#
# Put all the commands we want to wrap into a dictinary, to avoid duplicates.
#
self.filesCommands = dict()
self.commandDb.walk(matchClass, "files", self.filesCommands)
self.commandDb.walk(matchRegExp, re.compile(" path", re.IGNORECASE), self.filesCommands)
self.commandDb.walk(matchRegExp, re.compile(" file", re.IGNORECASE), self.filesCommands)
#
# See http://lists.baseurl.org/pipermail/yum-devel/2011-August/008495.html
#
def ____cmdloop(self):
""" Sick hack for readline. """
import __builtin__
oraw_input = raw_input
owriter = sys.stdout
_ostdout = owriter #.stream
def _sick_hack_raw_input(prompt):
sys.stdout = _ostdout
#rret = oraw_input(to_utf8(prompt))
rret = oraw_input(prompt)
sys.stdout = owriter
return rret
__builtin__.raw_input = _sick_hack_raw_input
try:
cret = cmd.Cmd.cmdloop(self)
finally:
__builtin__.raw_input = oraw_input
return cret
def asyncWrapper(self, command, args):
"""Execute a command which causes the inferior to run.
"""
dbg0("asyncWrapper", command, args)
command = "{} {}".format(command, args)
dbg0("command", command)
results = self.gdb.consoleCommand(command)
##########################
## Breakpoint commands ##
##########################
def do_break(self, args, getSynopsis = False):
"""
breakpoints
NAME
break -- Set breakpoint at specified line or function
DESCRIPTION
LOCATION may be a probe point, line number, function name, or "*" and an address.
If a line number is specified, break at start of code for that line.
If a function is specified, break at start of code for that function.
If an address is specified, break at that exact address.
With no LOCATION, uses current execution address of the selected
stack frame. This is useful for breaking on return to a stack frame.
THREADNUM is the number from "info threads".
CONDITION is a boolean expression.
Multiple breakpoints at one place are permitted, and useful if their
conditions are different.
Do "help breakpoints" for info on other commands dealing with breakpoints.
"""
parser = MyArgs(prog = "break", add_help = False)
parser.add_argument("-t", "--temporary", action = "store_true", dest = "temporary")
parser.add_argument("-h", "--hardware", action = "store_true", dest = "hw")
parser.add_argument("-d", "--disabled", action = "store_true", dest = "disabled")
parser.add_argument("-a", "--after", type = int, dest = "after")
parser.add_argument("-p", "--probe", choices = ["generic", "stab"], dest = "probe", help = "Generic or SystemTap probe")
parser.add_argument("location", nargs='?')
# TODO add these back when we have optional subcommands working.
#subparsers = parser.add_subparsers()
#if_parser = subparsers.add_parser("if", add_help = False, help = "if CONDITION")
#if_parser.add_argument("condition")
#thread_parser = subparsers.add_parser("thread", add_help = False, help = "thread TID")
#thread_parser.add_argument("tid", type = int)
if getSynopsis:
return parser.format_help()
args = parser.parse_args(args.split())
results = self.gdb._breakpoints.breakpointCreate(**vars(args))
def do_info_breakpoints(self, args):
results = self.gdb._breakpoints.list(args)
if not len(results):
return
#
# Print rows.
#
fmt = "{:<7} {:<14} {:<4} {:<3} {}"
self._out(fmt.format("Num", "Type", "Disp", "Enb", "Where"))
for u in results:
try:
u = u[u'bkpt']
try:
location = u["fullname"]
except KeyError:
try:
location = u["file"]
except KeyError:
try:
location = u["original-location"]
except KeyError:
location = u["at"]
u["type"] = "";
u["disp"] = "";
try:
addr = u["addr"]
except KeyError:
addr = 0
try:
func = u["func"]
line = u["line"]
except KeyError:
func = ""
line = 0
location = "{} {} at {}:{}".format(addr, func, location, line)
self._out(fmt.format(u["number"], u["type"], u["disp"], u["enabled"], location))
try:
times = u["times"]
if times != "0":
self._out(" breakpoint already hit {} times".format(times))
except KeyError:
pass
except KeyError:
#
# Not a standalone breakpoint, just an overload of one.
#
location = "{} {}".format(u["addr"], u["at"])
self._out(fmt.format(u["number"], "", "", u["enabled"], location))
###################
## Data commands ##
###################
def do_call(self, args, getSynopsis = False):
parser = MyArgs(prog = "call", add_help = False)
parser.add_argument("expr")
if getSynopsis:
return parser.format_help()
args = parser.parse_args(args.split())
# TODO assign to local var
self.gdb._data.evalute(**vars(args))
def do_disassemble(self, args, getSynopsis = False):
parser = MyArgs(prog = "disassemble", add_help = False)
parser.add_argument("-s", "--start-addr", type = int)
parser.add_argument("-e", "--end-addr", type = int)
parser.add_argument("-f", "--filename")
parser.add_argument("-l", "--linenum", type = int)
parser.add_argument("-n", "--lines", type = int)
# ["disassembly_only", "with_source", "with_opcodes", "all"]
parser.add_argument("mode", type = int, choices = [ 0, 1, 2, 3 ])
if getSynopsis:
return parser.format_help()
args = parser.parse_args(args.split())
result = self.gdb._data.disassemble(**vars(args))
for u in result:
self._out(u[u'address'], u[u'inst'])
def do_output(self, args, getSynopsis = False):
parser = MyArgs(prog = "output", add_help = False)
parser.add_argument("expr")
if getSynopsis:
return parser.format_help()
args = parser.parse_args(args.split())
self.gdb._data.evalute(**vars(args))
def do_print(self, args, getSynopsis = False):
parser = MyArgs(prog = "print", add_help = False)
parser.add_argument("expr")
if getSynopsis:
return parser.format_help()
args = parser.parse_args(args.split())
# TODO assign to local var
self.gdb._data.evalute(**vars(args))
def do_print(self, args):
"""
data
NAME
print -- Print value of expression EXP
SYNOPSIS
print EXP
DESCRIPTION
EXP can be any of:
- Inferior variables of the lexical environment of the selected
stack frame, plus all those whose scope is global or an entire file.
- $NUM gets previous value number NUM. $ and $$ are the last two
values. $$NUM refers to NUM'th value back from the last one.
- Names starting with $ refer to registers (with the values they
would have if the program were to return to the stack frame now
selected, restoring all registers saved by frames farther in) or
else to ...
- GDB "convenience" variables. Use assignment expressions to give
values to convenience variables.
- {TYPE}ADREXP refers to a datum of data type TYPE, located at address
ADREXP. @ is a binary operator for treating consecutive data objects
anywhere in memory as an array. FOO@NUM gives an array whose first
element is FOO, whose second element is stored in the space following
where FOO is stored, etc. FOO must be an expression whose value
resides in memory.
- Python expressions. In case of ambiguity between an inferior
variable and a python variable, use the "gdb print" or "py print"
commands.
EXP may be preceded with /FMT, where FMT is a format letter
but no count or size letter (see "x" command).
EXAMPLES
print main+1 Print inferior expression.
print $1 Print previous value.
print $getenv("HOME") Print convenience function
print gdb.PYTHONDIR Print Python expression
"""
try:
#
# Assume its an object known to GDB.
#
self.do_gdb("print " + args, name_errors = True)
except NameError as e:
#
# Try a Python variable.
#
try:
self._out(eval(args))
except NameError as f:
self._out("No GDB" + str(e)[2:-1] + ", and Python " + str(f))
def do_info_registers(self, args, getSynopsis = False):
parser = MyArgs(prog = "info registers", add_help = False)
parser.add_argument("regName", nargs = "?")
if getSynopsis:
return parser.format_help()
args = parser.parse_args(args.split())
# TODO assign to local var
results = self.gdb._data.listRegisterValues(**vars(args))
#
# Print rows.
#
for u in results:
self._out(u[u'name'], u[u'value'])
def do_info_all__registers(self, args, getSynopsis = False):
parser = MyArgs(prog = "info all-registers", add_help = False)
parser.add_argument("regName", nargs = "?")
if getSynopsis:
return parser.format_help()
args = parser.parse_args(args.split())
# TODO assign to local var
results = self.gdb._data.listRegisterValues(**vars(args))
#
# Print rows.
#
for u in results:
self._out(u[u'name'], u[u'value'])
def do_x(self, args, getSynopsis = False):
parser = MyArgs(prog = "x", add_help = False)
parser.add_argument("address", type = int)
parser.add_argument("word_format", choices = ["x", "d", "u", "o", "t", "a", "c", "f"])
parser.add_argument("word_size", type = int)
parser.add_argument("nr_rows", type = int)
parser.add_argument("nr_cols", type = int)
parser.add_argument("aschar", nargs="?", default = ".")
parser.add_argument("-o", "--offset-bytes", type = int)
if getSynopsis:
return parser.format_help()
args = parser.parse_args(args.split())
# TODO assign to local var
results = self.gdb._data.readMemory(**vars(args))
for u in results:
self._out(u[u'addr'], u[u'data'])
#####################
## Program control ##
#####################
def do_advance(self, args):
"""
running
NAME
advance -- Continue the program up to the given location (same form as args for break command)
SYNOPSIS
advance [PROBE_MODIFIER] [LOCATION] [thread THREADNUM] [if CONDITION]
DESCRIPTION
Continue the program up to the given location (same form as args for break command).
Execution will also stop upon exit from the current stack frame.
"""
self.asyncWrapper("advance", args)
def do_continue(self, args):
"""
running
NAME
continue -- Continue program being debugged
SYNOPSIS
continue [N|-a]
DESCRIPTION
Continue program being debugged, after signal or breakpoint.
If proceeding from breakpoint, a number N may be used as an argument,
which means to set the ignore count of that breakpoint to N - 1 (so that
the breakpoint won't break until the Nth time it is reached).
If non-stop mode is enabled, continue only the current thread,
otherwise all the threads in the program are continued. To
continue all stopped threads in non-stop mode, use the -a option.
Specifying -a and an ignore count simultaneously is an error.
"""
self.gdb.miCommandExec("-exec-continue", args)
def do_finish(self, args):
"""
running
NAME
finish -- Execute until selected stack frame returns
SYNOPSIS
finish
DESCRIPTION
Execute until selected stack frame returns.
Upon return, the value returned is printed and put in the value history.
"""
self.gdb.miCommandExec("-exec-finish", args)
def do_interrupt(self, args):
self.gdb.miCommandExec("-exec-interrupt", args)
def do_jump(self, args):
"""
running
NAME
jump -- Continue program being debugged at specified line or address
SYNOPSIS
jump LINENUM|*ADDR
DESCRIPTION
Continue program being debugged at specified line or address.
Give as argument either LINENUM or *ADDR, where ADDR is an expression
for an address to start at.
"""
self.asyncWrapper("jump", args)
def do_kill(self, args):
self.gdb.miCommandExec("-exec-abort", args)
def do_next(self, args):
"""
running
NAME
next -- Step program
SYNOPSIS
next [N]
DESCRIPTION
Step program, proceeding through subroutine calls.
Like the "step" command as long as subroutine calls do not happen;
when they do, the call is treated as one instruction.
Argument N means do this N times (or till program stops for another reason).
"""
self.gdb.miCommandExec("-exec-next", args)
def do_nexti(self, args):
"""
running
NAME
nexti -- Step one instruction
SYNOPSIS
nexti [N]
DESCRIPTION
Step one instruction, but proceed through subroutine calls.
Argument N means do this N times (or till program stops for another reason).
"""
self.gdb.miCommandExec("-exec-next-instruction", args)
def do_return(self, args):
self.gdb.miCommandExec("-exec-return", args)
def do_reverse_continue(self, args):
"""
running
NAME
reverse-continue -- Continue program being debugged but run it in reverse
SYNOPSIS
reverse-continue [N]
DESCRIPTION
Continue program being debugged but run it in reverse.
If proceeding from breakpoint, a number N may be used as an argument,
which means to set the ignore count of that breakpoint to N - 1 (so that
the breakpoint won't break until the Nth time it is reached).
"""
self.asyncWrapper("reverse-continue", args)
def do_reverse_finish(self, args):
"""
running
NAME
reverse-finish -- Execute backward until just before selected stack frame is called
SYNOPSIS
reverse-finish
DESCRIPTION
Execute backward until just before selected stack frame is called.
"""
self.asyncWrapper("reverse-finish", args)
def do_reverse_next(self, args):
"""
running
NAME
reverse-next -- Step program backward
SYNOPSIS
reverse-next [N]
DESCRIPTION
Step program backward, proceeding through subroutine calls.
Like the "reverse-step" command as long as subroutine calls do not happen;
when they do, the call is treated as one instruction.
Argument N means do this N times (or till program stops for another reason).
"""
self.asyncWrapper("reverse-next", args)
def do_reverse_nexti(self, args):
"""
running
NAME
reverse-nexti -- Step backward one instruction
SYNOPSIS
reverse-nexti [N]
DESCRIPTION
Step backward one instruction, but proceed through called subroutines.
Argument N means do this N times (or till program stops for another reason).
"""
self.asyncWrapper("reverse-nexti", args)
def do_reverse_step(self, args):
"""
running
NAME
reverse-step -- Step program backward until it reaches the beginning of another source line
SYNOPSIS
reverse-step [N]
DESCRIPTION
Step program backward until it reaches the beginning of another source line.
Argument N means do this N times (or till program stops for another reason).
"""
self.asyncWrapper("reverse-step", args)
def do_reverse_stepi(self, args):
"""
running
NAME
reverse-stepi -- Step backward exactly one instruction
SYNOPSIS
reverse-stepi [N]
DESCRIPTION
Step backward exactly one instruction.
Argument N means do this N times (or till program stops for another reason).
"""
self.asyncWrapper("reverse-stepi", args)
def do_run(self, args):
"""
running
NAME
run -- Start debugged program
SYNOPSIS
run [ARGS]
DESCRIPTION
Start debugged program. You may specify arguments to give it.
Args may include "*", or "[...]"; they are expanded using "sh".
Input and output redirection with ">", "<", or ">>" are also allowed.
With no arguments, uses arguments last specified (with "run" or "set args").
To cancel previous arguments and run with no arguments,
use "set args" without arguments.
"""
tty = self.gdb.startIoThread()
self.gdb.miCommandOne("-inferior-tty-set {}".format(tty))
if args:
self.do_set_args(args)
self.gdb.miCommandExec("-exec-run", args)
def do_set_args(self, args):
self.gdb.miCommandExec("-exec-arguments", args)
def do_show_args(self, args):
self.gdb.miCommandExec("-exec-show-arguments", args)
def do_signal(self, args):
"""
running
NAME
signal -- Continue program giving it signal specified by the argument
SYNOPSIS
signal N
DESCRIPTION
Continue program giving it signal specified by the argument.
An argument of "0" means continue program without giving it a signal.
"""
self.asyncWrapper("signal", args)
def do_start(self, args):
"""
running
NAME
start -- Run the debugged program until the beginning of the main procedure
SYNOPSIS
start [ARGS]
DESCRIPTION
Run the debugged program until the beginning of the main procedure.
You may specify arguments to give to your program, just as with the
"run" command.
"""
results = self.gdb._breakpoints.breakpointCreate("main", temporary = True)
if "pending" in results:
results = self.gdb._breakpoints.breakpointDelete(results["number"])
self._out("Cannot set breakpoint at 'main'")
return
self.do_run(args)
def do_step(self, args):
"""
running
NAME
step -- Step program until it reaches a different source line
SYNOPSIS
step [N]
DESCRIPTION
Step program until it reaches a different source line.
Argument N means do this N times (or till program stops for another reason).
"""
self.gdb.miCommandExec("-exec-step", args)
def do_stepi(self, args):
"""
running
NAME
stepi -- Step one instruction exactly
SYNOPSIS
stepi [N]
DESCRIPTION
Step one instruction exactly.
Argument N means do this N times (or till program stops for another reason).
"""
self.gdb.miCommandExec("-exec-step-instruction", args)
def do_until(self, args):
"""
running
NAME
until -- Execute until the program reaches a source line greater than the current
SYNOPSIS
until [PROBE_MODIFIER] [LOCATION] [thread THREADNUM] [if CONDITION]
DESCRIPTION
Execute until the program reaches a source line greater than the current
or a specified location (same args as break command) within the current frame.
"""
self.gdb.miCommandExec("-exec-until", args)
def do_info_source(self, args):
u = self.gdb._programControl.currentSource()
self._out("Current source file is {}:{}".format(u["file"], u[u'line']))
try:
file = u["fullname"]
except KeyError:
file = u["file"]
self._out("Located in {}".format(file))
if u[u'macro-info'] != "0":
self._out("Does include preprocessor macro info.")
else:
self._out("Does not include preprocessor macro info.")
def do_info_sources(self, args):
results = self.gdb._programControl.allSources()
for u in results:
try:
file = u["fullname"]
except KeyError:
file = u["file"]
self._out(file)
def do_info_files(self, args):
#self.gdb._programControl.execSections()
self.gdb._programControl.symbolFiles()
def do_info_target(self, args):
self.do_info_files(args)
def do_file(self, filename):
self.gdb._programControl.setExecAndSymbols(filename)
#def do_exec_file(self, filename):
# self.gdb._programControl.setExecOnly(filename)
#def do_symbol_file(self, filename):
# self.gdb._programControl.setSymbolsOnly(filename)
####################
## Stack commands ##
####################
def do_bt(self, args):
results = self.gdb._stack.stackFrames(1)
#
# Print rows.
#
for f in results:
u = f[u'frame']
try:
location = u["from"]
except KeyError:
try:
location = u["fullname"] + ":" + u["line"]
except KeyError:
try:
location = u["file"] + ":" + u["line"]
except KeyError:
self._out("#{} {} in {} ()".format(u["level"], u["addr"], u["func"]))
continue
self._out("#{} {} in {} () from {}".format(u["level"], u["addr"], u["func"], location))
def do_backtrace(self, args):
self.do_bt(args)
def do_where(self, args):
self.do_bt(args)
#def do_depth(self, tid, maxFrames = None):
def do_frame(self, args):
if not args:
self.do_info_frame(args)
else:
self.do_info_frame((1, 3))
def do_info_frame(self, args):
u = self.gdb._stack.frameInfo(1)
self._out("#{} {} in {} () from {}".format(u["level"], u["addr"], u["func"], u["from"]))
def do_info_locals(self, args):
#self.gdb._stack.stackArguments(1, 1)
results = self.gdb._stack.frameVariables(1, 1, 8)
for u in results:
try:
self._out("arg {} {} = {} = {}".format(u["arg"], u["name"], u["type"], u["value"]))
except KeyError:
try:
self._out("{} = {} = {}".format(u["name"], u["type"], u["value"]))
except KeyError:
self._out("{} = {}".format(u["name"], u["value"]))
#####################
## Target commands ##
#####################
#'-target-attach'
#'-target-compare-sections'
#'-target-detach'
#'-target-disconnect'
#'-target-download'
#'-target-exec-status'
#'-target-list-available-targets'
#'-target-list-current-targets'
#'-target-list-parameters'
#'-target-list-parameters'
######################
## Thread commands ##
#####################
#'-thread-select'
def do_info_threads(self, args):
currentThread, results = self.gdb._threads.list(args)
if not len(results):
return
#
# Print rows.
#
fmt = "{:<1} {:<4} {:<37} {}"
self._out(fmt.format(" ", "Id", "Target Id", "Where"))
for v in results:
if currentThread == v["id"]:
active = "*"
else:
active = " "
frame = v["frame"]
args = frame["args"]
args = ", ".join(["{}={}".format(d["name"], d["value"]) for d in args])
try:
location = frame["fullname"]
except KeyError:
try:
location = frame["file"]
except KeyError:
location = frame["from"]
try:
line = frame["line"]
except KeyError:
line = ""
location = "{}: {}({}) at {}:{}".format(frame["addr"], frame["func"], args, location, line)
name = v["name"]
if name:
name += ", "
else:
name = ""
self._out(fmt.format(active, v["id"], name + v["target-id"], location))
######################
## General commands ##
######################
#'-enable-timings'
#'-environment-cd'
#'-environment-directory'
#'-environment-path'
#'-environment-pwd'
#'-gdb-exit'
#'-gdb-set'
#'-gdb-show'
#'-gdb-version'
#'-inferior-tty-set'
#'-inferior-tty-show'
#'-interpreter-exec'
#'-list-features'
def do_apropos(self, args):
"""
support
NAME
apropos -- Search for commands matching a REGEXP
SYNOPSIS
apropos REGEXP
DESCRIPTION
Type "apropos word" to search for commands related to "word".
"""
def printAproposEntry(regexp, arg, indentation, prefix, keyword, apropos, clazz, function):
"""Dump the contents of the database as help text.
Only leaf items which match the given regexp are emitted.
"""
if regexp.search(keyword) or regexp.search(apropos):
self._out("\t" + prefix + keyword + " -- " + apropos)
#
# We emit our help database, so that we can override GDB if needed.
#
if args == "":
self._out("REGEXP string is empty")
return
self._out("LIST OF COMMANDS MATCHING '" + args + "'")
self.commandDb.walk(printAproposEntry, re.compile(args, re.IGNORECASE), None, "\t")
self._out("")
def do_EOF(self, args):
"""
alias
NAME
<Ctrl-D> -- Exit GDB.
SYNOPSIS
<Ctrl-D>
DESCRIPTION
Shortcut for "quit".
"""
return True
def do_quit(self, args):
"""
support
NAME
quit -- Exit GDB.
SYNOPSIS
quit
DESCRIPTION
Exit the interpreter. Shortcut: <Ctrl-D>
"""
return True
def do_gdb(self, args):
"""
support
NAME
gdb -- Execute a GDB command directly.
SYNOPSIS
gdb NATIVE-GDB-COMMAND
DESCRIPTION
The command is executed directly, bypassing any overrides in this wrapper.
EXAMPLES
gdb help Get GDB's native help.
"""
results = self.gdb.consoleCommand(args, True)
for line in results:
self._out(line)
def do_help(self, args):
"""
support
NAME
help -- Print list of commands
SYNOPSIS
help [COMMAND|COMMAND-CLASS]
DESCRIPTION
Type "help" followed by a class name for a list of commands in that class.
Type "help all" for the list of all commands.
Type "help" followed by command name for full documentation.
Type "apropos word" to search for commands related to "word".
Command name abbreviations are allowed if unambiguous.
"""
def printManHeader(command, apropos, synopsis, description):
if apropos:
self._out("NAME\n\t" + command + " -- " + apropos)
else:
self._out("NAME\n\t" + command)
if synopsis:
self._out("\nSYNOPSIS\n\t" + synopsis.replace("\n", "\n\t"))
if description:
self._out("\n" + description)
def printClassHelp(keyword):
#
# Now check if the user asked for class-based help.
#
if keyword == "all":
#
# We emit our help database, so that we can override GDB if needed.
#
self._out("LIST OF COMMANDS")
self.commandDb.walk(printAproposEntry, "", None, "\t")
self._out("")
return True
else:
classes = [name for name in self.commandDb.classes_db if name.startswith(keyword)]
if len(classes) == 1:
#
# Emit GDB help for the class.
#
error, helpText = self.gdb.consoleCommand("help " + classes[0], True)
apropos = helpText[0]
synopsis = None
for i in range(1, len(helpText)):
if helpText[i] == "":
#
# Skip the "List of commands"
#
helpText = helpText[i + 1:]
break
if synopsis:
synopsis = "\n\t".join((synopsis, helpText[i]))
else:
synopsis = helpText[i]
printManHeader(classes[0], apropos, synopsis, "LIST OF COMMANDS")
for line in helpText[2:]:
self._out("\t" + line)
return True
elif len(classes) > 1:
message = "Ambiguous keyword: help"
self._out(" ".join((message, keywords[0], str(sorted(classes)))))
self._out("^".rjust(len(message) + 2))
return True
return False
def printAproposEntry(clazzPrefix, arg, indentation, prefix, keyword, apropos, clazz, function):
"""Dump the contents of the database as help text.
Only leaf items which match the given classification prefix are emitted.
"""
if clazz.startswith(clazzPrefix) :
self._out(indentation + keyword + " -- " + apropos)
keywords = args.split()
if (keywords):
#
# First try to find command-specific help.
#
(matched, unmatched, completions, lastMatchedEntry) = self.commandDb.lookup(args)
if unmatched:
if isinstance(completions, dict):
if printClassHelp(keywords[0]):
return
#
# It was not a class-based request for help...
#
message = " ".join(("Keyword not found: help", matched)).rstrip()
self._out(" ".join((message, unmatched, str(sorted(completions.keys())))))
self._out("^".rjust(len(message) + 2))
else:
message = " ".join(("Ambiguous keyword: help", matched)).rstrip()
self._out(" ".join((message, unmatched, str(sorted(completions)))))
self._out("^".rjust(len(message) + 2))
return
#
# We got a match!
#
(oldApropos, oldLevel, oldClazz, oldFunction) = completions
if oldFunction and oldFunction.__doc__:
#
# Emit help for our implementation if we have it.
#
helpText = oldFunction.__doc__.split("\n")
synopsis = helpText[6].lstrip()
if synopsis.startswith(matched):
helpText = [line[2:] for line in helpText[11:]]
else:
helpText = [line[2:] for line in helpText[8:]]
synopsis = matched
else:
#
# Emit help for the GDB implementation.
#
error, helpText = self.gdb.consoleCommand("help " + matched, True)
if len(helpText) > 1 and (helpText[1].startswith(matched) or helpText[1].startswith("Usage:")):
synopsis = helpText[1]
helpText = ["\t" + line for line in helpText[2:]]
elif len(helpText) > 2 and (helpText[2].startswith(matched) or helpText[2].startswith("Usage:")):
synopsis = helpText[2]
helpText = ["\t" + line for line in helpText[3:]]
else:
helpText = ["\t" + line for line in helpText]
synopsis = matched
#
# If we have a dynamically generated synopsis, use it.
#
try:
synopsis = oldFunction(None, getSynopsis = True)
synopsis = synopsis[:-1]
except TypeError:
pass
printManHeader(matched, oldApropos, synopsis, "DESCRIPTION")
for line in helpText:
self._out(line)
else:
#
# Emit summary help from GDB.
#
helpText = self.gdb.consoleCommand("help", True)
self._out("LIST OF CLASSES OF COMMANDS")
for line in helpText[2:]:
self._out("\t" + line)
pythonShell = None
def do_python(self, args):
print("do_python(), calling enter", self.pythonShell)
connectionFile = self.gdb._python.enter(args)
if not self.pythonShell:
self.pythonShell = IPythonConsoleShell(connection_file = connectionFile)
self.pythonShell.interact()
print("do_python(), pythonShell.interact done!")
self.pythonShell.stop()
self.gdb._python.exit()
del self.pythonShell
#################################
## Fallthrough command handler ##
#################################
def default(self, args):
"""
Default command handler, for all commands not matched by a hand-crafted
do_xxx() handler, and any special handlers.
"""
def getenv(name):
from ctypes import CDLL, cChar_p, stringAt
libc = CDLL("libc.so.6")
libc.getenv.argtypes = [cChar_p]
libc.getenv.restype = cChar_p
return libc.getenv(name)
def expandEnvironmentVariables(line):
"""
Fetch any environment variabled, i.e. $FOO or ${FOO}
"""
regexp = re.compile(r"\${(\w+)}|\$(\w+)")
match = regexp.search(line)
while match:
#
# Extract the name of the environment variable.
#
envVar = match.group(1)
if not envVar:
envVar = match.group(2)
#
# Substitute value.
#
envVar = getenv(envVar)
if not envVar:
envVar = ""
line = line[:match.start()] + envVar + line[match.end():]
#
# No recursive resolution for us, so continue from after the
# substitution...
#
match = regexp.search(line, match.start() + len(envVar))
return line
#
# Did we get a command?
#
(matched, unmatched, completions, lastMatchedEntry) = self.commandDb.lookup(args)
if isinstance(completions, list):
self._out("Ambiguous command \"{}\": {}.".format(unmatched, ", ".join(completions)))
return
elif isinstance(completions, tuple) and completions[1]:
subcommands = completions[1]
self._out("\"{}\" must be followed by the name of an {} command.\nList of {} subcommands:\n".format(matched, matched, matched))
for k in sorted(subcommands.keys()):
self._out("{} {} -- {}".format(matched, k, subcommands[k][0]))
return
#
# Extract the arguments.
#
matchedFrags = matched.count(" ") + 1
frags = args.split(None, matchedFrags);
if matchedFrags >= len(frags):
args = ""
else:
args = frags[matchedFrags]
if matched in self.filesCommands:
dbg0("is files command {}", matched)
#
# Does the command which takes files/paths? If so, expand
# any embedded environment variables.
#
args = " ".join(expandEnvironmentVariables(args))
try:
func = getattr(self, "do_" + "_".join(matched.split()))
except AttributeError:
#
# Invoke GDB...
#
self.do_gdb(args)
else:
func(args)
def complete(self, text, state):
"""Use the command database to provide completions."""
matchedKeywords, unmatchedKeyword, completions, lastMatchedEntry = self.commandDb.lookup(text)
#self.stdout.write("=={}==\n".format((matched, unmatched, completions, lastMatchedEntry)))
self.stdout.write("\n{}\n{}{}".format("\t".join(completions), self.prompt, text))
return completions
def completedefault(self, *ignored):
self.stdout.write("completedefault {}".format(ignored))
def completenames(self, text, *ignored):
self.stdout.write("completenames {} {}".format(text, ignored))
if __name__ == "__main__":
import sys
class Test(QObject):
def __init__(self, parent = None):
gdb = Cli(["gdb"], print)
gdb.do_file("/usr/local/bin/kate")
gdb.do_start(None)
gdb.do_break("QWidget::QWidget")
gdb.do_info_breakpoints(None)
gdb.do_continue(None)
gdb.do_x("140737488346128 x 4 8 2") # 0x7fffffffdc10
gdb.do_disassemble("-s 140737488346128 -e 140737488346140 0") # 0x7fffffffdc10
gdb.cmdloop()
app = QCoreApplication(sys.argv)
foo = Test()
#sys.exit(app.exec_())
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/env python
# (c) 2013, Sebastien Goasguen <runseb@gmail.com>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
######################################################################
'''
Apache Libcloud generic external inventory script
=================================
Generates inventory that Ansible can understand by making API request to
Cloud providers using the Apache libcloud library.
This script also assumes there is a libcloud.ini file alongside it
'''
import sys
import os
import argparse
import re
from time import time
import ConfigParser
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
import libcloud.security as sec
try:
import json
except ImportError:
import simplejson as json
class LibcloudInventory(object):
def __init__(self):
''' Main execution path '''
# Inventory grouped by instance IDs, tags, security groups, regions,
# and availability zones
self.inventory = {}
# Index of hostname (address) to instance ID
self.index = {}
# Read settings and parse CLI arguments
self.read_settings()
self.parse_cli_args()
# Cache
if self.args.refresh_cache:
self.do_api_calls_update_cache()
elif not self.is_cache_valid():
self.do_api_calls_update_cache()
# Data to print
if self.args.host:
data_to_print = self.get_host_info()
elif self.args.list:
# Display list of instances for inventory
if len(self.inventory) == 0:
data_to_print = self.get_inventory_from_cache()
else:
data_to_print = self.json_format_dict(self.inventory, True)
print data_to_print
def is_cache_valid(self):
''' Determines if the cache files have expired, or if it is still valid '''
if os.path.isfile(self.cache_path_cache):
mod_time = os.path.getmtime(self.cache_path_cache)
current_time = time()
if (mod_time + self.cache_max_age) > current_time:
if os.path.isfile(self.cache_path_index):
return True
return False
def read_settings(self):
''' Reads the settings from the libcloud.ini file '''
config = ConfigParser.SafeConfigParser()
libcloud_default_ini_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'libcloud.ini')
libcloud_ini_path = os.environ.get('LIBCLOUD_INI_PATH', libcloud_default_ini_path)
config.read(libcloud_ini_path)
if not config.has_section('driver'):
raise ValueError('libcloud.ini file must contain a [driver] section')
if config.has_option('driver', 'provider'):
self.provider = config.get('driver','provider')
else:
raise ValueError('libcloud.ini does not have a provider defined')
if config.has_option('driver', 'key'):
self.key = config.get('driver','key')
else:
raise ValueError('libcloud.ini does not have a key defined')
if config.has_option('driver', 'secret'):
self.secret = config.get('driver','secret')
else:
raise ValueError('libcloud.ini does not have a secret defined')
if config.has_option('driver', 'host'):
self.host = config.get('driver', 'host')
if config.has_option('driver', 'secure'):
self.secure = config.get('driver', 'secure')
if config.has_option('driver', 'verify_ssl_cert'):
self.verify_ssl_cert = config.get('driver', 'verify_ssl_cert')
if config.has_option('driver', 'port'):
self.port = config.get('driver', 'port')
if config.has_option('driver', 'path'):
self.path = config.get('driver', 'path')
if config.has_option('driver', 'api_version'):
self.api_version = config.get('driver', 'api_version')
Driver = get_driver(getattr(Provider, self.provider))
self.conn = Driver(key=self.key, secret=self.secret, secure=self.secure,
host=self.host, path=self.path)
# Cache related
cache_path = config.get('cache', 'cache_path')
self.cache_path_cache = cache_path + "/ansible-libcloud.cache"
self.cache_path_index = cache_path + "/ansible-libcloud.index"
self.cache_max_age = config.getint('cache', 'cache_max_age')
def parse_cli_args(self):
'''
Command line argument processing
'''
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on libcloud supported providers')
parser.add_argument('--list', action='store_true', default=True,
help='List instances (default: True)')
parser.add_argument('--host', action='store',
help='Get all the variables about a specific instance')
parser.add_argument('--refresh-cache', action='store_true', default=False,
help='Force refresh of cache by making API requests to libcloud supported providers (default: False - use cache files)')
self.args = parser.parse_args()
def do_api_calls_update_cache(self):
'''
Do API calls to a location, and save data in cache files
'''
self.get_nodes()
self.write_to_cache(self.inventory, self.cache_path_cache)
self.write_to_cache(self.index, self.cache_path_index)
def get_nodes(self):
'''
Gets the list of all nodes
'''
for node in self.conn.list_nodes():
self.add_node(node)
def get_node(self, node_id):
'''
Gets details about a specific node
'''
return [node for node in self.conn.list_nodes() if node.id == node_id][0]
def add_node(self, node):
'''
Adds a node to the inventory and index, as long as it is
addressable
'''
# Only want running instances
if node.state != 0:
return
# Select the best destination address
if not node.public_ips == []:
dest = node.public_ips[0]
if not dest:
# Skip instances we cannot address (e.g. private VPC subnet)
return
# Add to index
self.index[dest] = node.name
# Inventory: Group by instance ID (always a group of 1)
self.inventory[node.name] = [dest]
'''
# Inventory: Group by region
self.push(self.inventory, region, dest)
# Inventory: Group by availability zone
self.push(self.inventory, node.placement, dest)
# Inventory: Group by instance type
self.push(self.inventory, self.to_safe('type_' + node.instance_type), dest)
'''
# Inventory: Group by key pair
if node.extra['key_name']:
self.push(self.inventory, self.to_safe('key_' + node.extra['key_name']), dest)
# Inventory: Group by security group, quick thing to handle single sg
if node.extra['security_group']:
self.push(self.inventory, self.to_safe('sg_' + node.extra['security_group'][0]), dest)
# Inventory: Group by tag
if node.extra['tags']:
for tagkey in node.extra['tags'].keys():
self.push(self.inventory, self.to_safe('tag_' + tagkey + '_' + node.extra['tags'][tagkey]), dest)
def get_host_info(self):
'''
Get variables about a specific host
'''
if len(self.index) == 0:
# Need to load index from cache
self.load_index_from_cache()
if not self.args.host in self.index:
# try updating the cache
self.do_api_calls_update_cache()
if not self.args.host in self.index:
# host migh not exist anymore
return self.json_format_dict({}, True)
node_id = self.index[self.args.host]
node = self.get_node(node_id)
instance_vars = {}
for key in vars(instance):
value = getattr(instance, key)
key = self.to_safe('ec2_' + key)
# Handle complex types
if type(value) in [int, bool]:
instance_vars[key] = value
elif type(value) in [str, unicode]:
instance_vars[key] = value.strip()
elif type(value) == type(None):
instance_vars[key] = ''
elif key == 'ec2_region':
instance_vars[key] = value.name
elif key == 'ec2_tags':
for k, v in value.iteritems():
key = self.to_safe('ec2_tag_' + k)
instance_vars[key] = v
elif key == 'ec2_groups':
group_ids = []
group_names = []
for group in value:
group_ids.append(group.id)
group_names.append(group.name)
instance_vars["ec2_security_group_ids"] = ','.join(group_ids)
instance_vars["ec2_security_group_names"] = ','.join(group_names)
else:
pass
# TODO Product codes if someone finds them useful
#print key
#print type(value)
#print value
return self.json_format_dict(instance_vars, True)
def push(self, my_dict, key, element):
'''
Pushed an element onto an array that may not have been defined in
the dict
'''
if key in my_dict:
my_dict[key].append(element);
else:
my_dict[key] = [element]
def get_inventory_from_cache(self):
'''
Reads the inventory from the cache file and returns it as a JSON
object
'''
cache = open(self.cache_path_cache, 'r')
json_inventory = cache.read()
return json_inventory
def load_index_from_cache(self):
'''
Reads the index from the cache file sets self.index
'''
cache = open(self.cache_path_index, 'r')
json_index = cache.read()
self.index = json.loads(json_index)
def write_to_cache(self, data, filename):
'''
Writes data in JSON format to a file
'''
json_data = self.json_format_dict(data, True)
cache = open(filename, 'w')
cache.write(json_data)
cache.close()
def to_safe(self, word):
'''
Converts 'bad' characters in a string to underscores so they can be
used as Ansible groups
'''
return re.sub("[^A-Za-z0-9\-]", "_", word)
def json_format_dict(self, data, pretty=False):
'''
Converts a dict to a JSON object and dumps it as a formatted
string
'''
if pretty:
return json.dumps(data, sort_keys=True, indent=2)
else:
return json.dumps(data)
def main():
LibcloudInventory()
if __name__ == '__main__':
main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
export const EXAMPLE_PATH = "cms-prismic";
export const CMS_NAME = "Prismic";
export const CMS_URL = "https://prismic.io/";
export const HOME_OG_IMAGE_URL =
"https://og-image.vercel.app/Next.js%20Blog%20Example%20with%20**Prismic**.png?theme=light&md=1&fontSize=75px&images=https%3A%2F%2Fassets.vercel.com%2Fimage%2Fupload%2Ffront%2Fassets%2Fdesign%2Fnextjs-black-logo.svg&images=https%3A%2F%2Fi.imgur.com%2FGVmKYul.png&widths=undefined&widths=auto&heights=undefined&heights=100";
|
typescript
|
github
|
https://github.com/vercel/next.js
|
examples/cms-prismic/lib/constants.ts
|
// Copyright 2023 The Cockroach Authors.
//
// Use of this software is governed by the CockroachDB Software License
// included in the /LICENSE file.
package backup
import (
"context"
"github.com/cockroachdb/cockroach/pkg/kv"
"github.com/cockroachdb/cockroach/pkg/kv/kvpb"
"github.com/cockroachdb/cockroach/pkg/roachpb"
"github.com/cockroachdb/cockroach/pkg/storage"
"github.com/cockroachdb/cockroach/pkg/util/admission/admissionpb"
"github.com/cockroachdb/cockroach/pkg/util/hlc"
"github.com/cockroachdb/cockroach/pkg/util/timeutil"
)
// VersionedValues is similar to roachpb.KeyValue except instead of just the
// value at one time, it contains all the retrieved revisions of the value for
// the key, with the value timestamps set accordingly.
type VersionedValues struct {
Key roachpb.Key
Values []roachpb.Value
}
// GetAllRevisions scans all keys between startKey and endKey getting all
// revisions between startTime and endTime.
func GetAllRevisions(
ctx context.Context,
db *kv.DB,
startKey, endKey roachpb.Key,
startTime, endTime hlc.Timestamp,
allRevs chan []VersionedValues,
) error {
for {
header := kvpb.Header{
Timestamp: endTime,
ReturnElasticCPUResumeSpans: true,
}
req := &kvpb.ExportRequest{
RequestHeader: kvpb.RequestHeader{Key: startKey, EndKey: endKey},
StartTime: startTime,
MVCCFilter: kvpb.MVCCFilter_All,
}
resp, pErr := kv.SendWrappedWithAdmission(ctx, db.NonTransactionalSender(), header, kvpb.AdmissionHeader{
Priority: int32(admissionpb.BulkNormalPri),
CreateTime: timeutil.Now().UnixNano(),
Source: kvpb.AdmissionHeader_FROM_SQL,
NoMemoryReservedAtSource: true,
}, req)
if pErr != nil {
return pErr.GoError()
}
exportResp := resp.(*kvpb.ExportResponse)
var res []VersionedValues
for _, file := range exportResp.Files {
err := func() error {
iterOpts := storage.IterOptions{
KeyTypes: storage.IterKeyTypePointsOnly,
LowerBound: file.Span.Key,
UpperBound: file.Span.EndKey,
}
iter, err := storage.NewMemSSTIterator(file.SST, true, iterOpts)
if err != nil {
return err
}
defer iter.Close()
iter.SeekGE(storage.MVCCKey{Key: startKey})
for ; ; iter.Next() {
if valid, err := iter.Valid(); !valid || err != nil {
if err != nil {
return err
}
break
} else if iter.UnsafeKey().Key.Compare(endKey) >= 0 {
break
}
key := iter.UnsafeKey()
keyCopy := make([]byte, len(key.Key))
copy(keyCopy, key.Key)
key.Key = keyCopy
v, err := iter.UnsafeValue()
if err != nil {
return err
}
value := make([]byte, len(v))
copy(value, v)
if len(res) == 0 || !res[len(res)-1].Key.Equal(key.Key) {
res = append(res, VersionedValues{Key: key.Key})
}
res[len(res)-1].Values = append(res[len(res)-1].Values, roachpb.Value{Timestamp: key.Timestamp, RawBytes: value})
}
return nil
}()
if err != nil {
return err
}
}
select {
case <-ctx.Done():
return ctx.Err()
case allRevs <- res:
}
// Check if the ExportRequest paginated with a resume span.
if exportResp.ResumeSpan == nil {
return nil
}
startKey = exportResp.ResumeSpan.Key
}
}
|
go
|
github
|
https://github.com/cockroachdb/cockroach
|
pkg/backup/revision_reader.go
|
/*
* Copyright 2002-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.beans.factory
import io.mockk.every
import io.mockk.mockk
import io.mockk.verify
import org.junit.jupiter.api.Test
import kotlin.reflect.full.createInstance
/**
* Mock object based tests for ListableBeanFactory Kotlin extensions
*
* @author Sebastien Deleuze
*/
class ListableBeanFactoryExtensionsTests {
val lbf = mockk<ListableBeanFactory>(relaxed = true)
@Test
fun `getBeanNamesForType with reified type parameters`() {
lbf.getBeanNamesForType<Foo>()
verify { lbf.getBeanNamesForType(Foo::class.java, true , true) }
}
@Test
fun `getBeanNamesForType with reified type parameters and Boolean`() {
lbf.getBeanNamesForType<Foo>(false)
verify { lbf.getBeanNamesForType(Foo::class.java, false , true) }
}
@Test
fun `getBeanNamesForType with reified type parameters, Boolean and Boolean`() {
lbf.getBeanNamesForType<Foo>(false, false)
verify { lbf.getBeanNamesForType(Foo::class.java, false , false) }
}
@Test
fun `getBeansOfType with reified type parameters`() {
lbf.getBeansOfType<Foo>()
verify { lbf.getBeansOfType(Foo::class.java, true , true) }
}
@Test
fun `getBeansOfType with reified type parameters and Boolean`() {
lbf.getBeansOfType<Foo>(false)
verify { lbf.getBeansOfType(Foo::class.java, false , true) }
}
@Test
fun `getBeansOfType with reified type parameters, Boolean and Boolean`() {
lbf.getBeansOfType<Foo>(false, false)
verify { lbf.getBeansOfType(Foo::class.java, false , false) }
}
@Test
fun `getBeanNamesForAnnotation with reified type parameters`() {
lbf.getBeanNamesForAnnotation<Bar>()
verify { lbf.getBeanNamesForAnnotation(Bar::class.java) }
}
@Test
fun `getBeansWithAnnotation with reified type parameters`() {
lbf.getBeansWithAnnotation<Bar>()
verify { lbf.getBeansWithAnnotation(Bar::class.java) }
}
@Suppress("UNUSED_VARIABLE")
@Test
fun `findAnnotationOnBean with String and reified type parameters`() {
val name = "bar"
every { lbf.findAnnotationOnBean(name, Bar::class.java) } returns Bar::class.createInstance()
val annotation: Bar? = lbf.findAnnotationOnBean(name)
verify { lbf.findAnnotationOnBean(name, Bar::class.java) }
}
class Foo
annotation class Bar
}
|
kotlin
|
github
|
https://github.com/spring-projects/spring-framework
|
spring-beans/src/test/kotlin/org/springframework/beans/factory/ListableBeanFactoryExtensionsTests.kt
|
#pragma once
#ifdef USE_VULKAN_API
#include <ATen/native/vulkan/ops/Common.h>
#include <ATen/native/vulkan/ops/VulkanPackedContext.h>
namespace at {
namespace native {
namespace vulkan {
namespace ops {
enum Conv2dMethod {
Conv2dDepthwise,
Conv2dPointwise,
Conv2dSlidingWindow,
};
namespace conv2d {
Tensor rearrange_weights_dw(const Tensor& weight_in);
Tensor rearrange_weights_2d(const Tensor& weight_in, bool tconv);
Tensor rearrange_bias(
const std::optional<Tensor>& bias_in,
const at::Tensor& weight_in,
bool tconv);
} // namespace conv2d
namespace qconv2d_vk {
struct QParams final {
api::utils::uvec3 out_extents;
int32_t ic4;
api::utils::ivec4 sizes_2d;
float output_scale;
float input_scale;
int32_t output_zero_point;
int32_t input_zero_point;
float weight_scale;
float bias_scale;
int32_t weight_zero_point;
int32_t bias_zero_point;
api::utils::ivec2 kernel_size;
api::utils::ivec2 stride;
api::utils::ivec2 padding;
api::utils::ivec2 dilate;
api::utils::vec2 clamp;
api::utils::ivec4 src_filter;
};
} // namespace qconv2d_vk
class Conv2dPackedContext final : virtual public VulkanPackedContext,
public torch::jit::CustomClassHolder {
private:
c10::impl::GenericList unpacked_;
api::ShaderInfo compute_shader_{};
public:
Conv2dPackedContext(
const Tensor& weight,
const std::optional<Tensor>& bias,
const IntArrayRef stride_arg,
const IntArrayRef padding_arg,
const IntArrayRef dilation_arg,
const bool transposed,
const bool quantized,
const IntArrayRef output_padding_arg,
const int64_t groups,
const std::optional<Scalar>& output_min = std::nullopt,
const std::optional<Scalar>& output_max = std::nullopt);
/*
* Assigns a name to each index in the unpacked list.
*/
struct Unpacked final {
static constexpr uint32_t Weight = 0u;
static constexpr uint32_t Bias = 1u;
static constexpr uint32_t Stride = 2u;
static constexpr uint32_t Padding = 3u;
static constexpr uint32_t Dilation = 4u;
static constexpr uint32_t isTransposed = 5u;
static constexpr uint32_t isQuantized = 6u;
static constexpr uint32_t OutputPadding = 7u;
static constexpr uint32_t Groups = 8u;
static constexpr uint32_t OutputMin = 9u;
static constexpr uint32_t OutputMax = 10u;
static constexpr uint32_t NumArgs = 11u;
};
/*
* Assigns a name to each index in the packed list.
*/
struct Packed final {
static constexpr uint32_t Weight = 0u;
static constexpr uint32_t Bias = 1u;
static constexpr uint32_t OverlayRegion = 2u;
static constexpr uint32_t Stride = 3u;
static constexpr uint32_t Padding = 4u;
static constexpr uint32_t OutputPadding = 5u;
static constexpr uint32_t Dilation = 6u;
static constexpr uint32_t isTransposed = 7u;
static constexpr uint32_t isQuantized = 8u;
static constexpr uint32_t Groups = 9u;
static constexpr uint32_t OutputMin = 10u;
static constexpr uint32_t OutputMax = 11u;
static constexpr uint32_t ConvMethod = 12u;
static constexpr uint32_t WeightSizes = 13u;
static constexpr uint32_t NumArgs = 14u;
};
static Conv2dPackedContext pack(c10::impl::GenericList);
const c10::impl::GenericList unpack() const override {
TORCH_CHECK(!unpacked_.empty(), "unpacked_ does not have any elements!");
return unpacked_;
}
inline api::ShaderInfo& compute_shader() {
return compute_shader_;
}
};
c10::intrusive_ptr<Conv2dPackedContext> create_conv2d_context(
Tensor&& weight,
std::optional<Tensor>&& bias,
std::vector<int64_t>&& stride,
std::vector<int64_t>&& padding,
std::vector<int64_t>&& dilation,
const int64_t groups,
const std::optional<Scalar>& output_min = std::nullopt,
const std::optional<Scalar>& output_max = std::nullopt);
Tensor run_conv2d_context(
const Tensor& input,
const c10::intrusive_ptr<Conv2dPackedContext>& context);
c10::intrusive_ptr<Conv2dPackedContext> create_tconv2d_context(
Tensor&& weight,
std::optional<Tensor>&& bias,
std::vector<int64_t>&& stride,
std::vector<int64_t>&& padding,
std::vector<int64_t>&& output_padding,
std::vector<int64_t>&& dilation,
const int64_t groups,
const std::optional<Scalar>& output_min = std::nullopt,
const std::optional<Scalar>& output_max = std::nullopt);
Tensor run_tconv2d_context(
const Tensor& input,
const c10::intrusive_ptr<Conv2dPackedContext>& context);
c10::intrusive_ptr<Conv2dPackedContext> create_qconv2d_context(
Tensor&& weight,
std::optional<Tensor>&& bias,
std::vector<int64_t>&& stride,
std::vector<int64_t>&& padding,
std::vector<int64_t>&& dilation,
const int64_t groups,
const std::optional<Scalar>& output_min = std::nullopt,
const std::optional<Scalar>& output_max = std::nullopt);
Tensor run_qconv2d_context(
const Tensor& input_arg,
double scale,
int64_t zero_point,
const c10::intrusive_ptr<Conv2dPackedContext>& conv_context);
c10::intrusive_ptr<Conv2dPackedContext> create_qtconv2d_context(
Tensor&& weight,
std::optional<Tensor>&& bias,
std::vector<int64_t>&& stride,
std::vector<int64_t>&& padding,
std::vector<int64_t>&& output_padding,
std::vector<int64_t>&& dilation,
const int64_t groups,
const std::optional<Scalar>& output_min = std::nullopt,
const std::optional<Scalar>& output_max = std::nullopt);
// Backwards compatibility
class Conv2dOpContext final : public torch::jit::CustomClassHolder {
public:
static Conv2dOpContext create(
const Tensor& weight,
const std::optional<Tensor>& bias,
IntArrayRef stride,
IntArrayRef padding,
IntArrayRef dilation,
bool transposed,
IntArrayRef output_padding,
int64_t groups,
const std::optional<Scalar>& output_min = std::nullopt,
const std::optional<Scalar>& output_max = std::nullopt);
using State = std::tuple<
Tensor,
std::optional<Tensor>,
std::vector<int64_t>,
std::vector<int64_t>,
std::vector<int64_t>,
int64_t,
std::optional<Scalar>,
std::optional<Scalar>>;
Tensor run(const Tensor& input) const;
State unpack() const;
private:
explicit Conv2dOpContext(Conv2dPackedContext conv_context);
Conv2dPackedContext conv_context_;
};
Tensor conv2d_clamp_run(
const Tensor& input,
const c10::intrusive_ptr<Conv2dOpContext>& context);
c10::intrusive_ptr<Conv2dOpContext> conv2d_clamp_prepack(
Tensor&& weight,
std::optional<Tensor>&& bias,
std::vector<int64_t>&& stride,
std::vector<int64_t>&& padding,
std::vector<int64_t>&& dilation,
const int64_t groups,
const std::optional<Scalar>& output_min,
const std::optional<Scalar>& output_max);
class Conv1dPackedContext final : virtual public VulkanPackedContext,
public torch::jit::CustomClassHolder {
private:
c10::impl::GenericList unpacked_;
api::ShaderInfo compute_shader_{};
public:
Conv1dPackedContext(
const Tensor& weight,
const std::optional<Tensor>& bias,
const IntArrayRef stride_arg,
const IntArrayRef padding_arg,
const IntArrayRef dilation_arg,
const int64_t groups);
/*
* Assigns a name to each index in the unpacked list.
*/
struct Unpacked final {
static constexpr uint32_t Weight = 0u;
static constexpr uint32_t Bias = 1u;
static constexpr uint32_t Stride = 2u;
static constexpr uint32_t Padding = 3u;
static constexpr uint32_t Dilation = 4u;
static constexpr uint32_t Groups = 5u;
static constexpr uint32_t NumArgs = 6u;
};
/*
* Assigns a name to each index in the packed list.
*/
struct Packed final {
static constexpr uint32_t Weight = 0u;
static constexpr uint32_t Bias = 1u;
static constexpr uint32_t Stride = 2u;
static constexpr uint32_t Padding = 3u;
static constexpr uint32_t Dilation = 4u;
static constexpr uint32_t Groups = 5u;
static constexpr uint32_t WeightSizes = 6u;
static constexpr uint32_t NumArgs = 7u;
};
static Conv1dPackedContext pack(c10::impl::GenericList);
const c10::impl::GenericList unpack() const override {
TORCH_CHECK(!unpacked_.empty(), "unpacked_ does not have any elements!");
return unpacked_;
}
inline api::ShaderInfo& compute_shader() {
return compute_shader_;
}
};
c10::intrusive_ptr<Conv1dPackedContext> create_conv1d_context(
Tensor&& weight,
std::optional<Tensor>&& bias,
std::vector<int64_t>&& stride,
std::vector<int64_t>&& padding,
std::vector<int64_t>&& dilation,
const int64_t groups);
Tensor run_conv1d_context(
const Tensor& input,
const c10::intrusive_ptr<Conv1dPackedContext>& context);
} // namespace ops
} // namespace vulkan
} // namespace native
} // namespace at
#endif /* USE_VULKAN_API */
|
c
|
github
|
https://github.com/pytorch/pytorch
|
aten/src/ATen/native/vulkan/ops/Convolution.h
|
.list-group {
width: 100%;
max-width: 460px;
margin-inline: 1.5rem;
}
.form-check-input:checked + .form-checked-content {
opacity: .5;
}
.form-check-input-placeholder {
border-style: dashed;
}
[contenteditable]:focus {
outline: 0;
}
.list-group-checkable .list-group-item {
cursor: pointer;
}
.list-group-item-check {
position: absolute;
clip: rect(0, 0, 0, 0);
}
.list-group-item-check:hover + .list-group-item {
background-color: var(--bs-secondary-bg);
}
.list-group-item-check:checked + .list-group-item {
color: #fff;
background-color: var(--bs-primary);
border-color: var(--bs-primary);
}
.list-group-item-check[disabled] + .list-group-item,
.list-group-item-check:disabled + .list-group-item {
pointer-events: none;
filter: none;
opacity: .5;
}
.list-group-radio .list-group-item {
cursor: pointer;
border-radius: .5rem;
}
.list-group-radio .form-check-input {
z-index: 2;
margin-top: -.5em;
}
.list-group-radio .list-group-item:hover,
.list-group-radio .list-group-item:focus {
background-color: var(--bs-secondary-bg);
}
.list-group-radio .form-check-input:checked + .list-group-item {
background-color: var(--bs-body);
border-color: var(--bs-primary);
box-shadow: 0 0 0 2px var(--bs-primary);
}
.list-group-radio .form-check-input[disabled] + .list-group-item,
.list-group-radio .form-check-input:disabled + .list-group-item {
pointer-events: none;
filter: none;
opacity: .5;
}
|
css
|
github
|
https://github.com/twbs/bootstrap
|
site/src/assets/examples/list-groups/list-groups.css
|
from typing import Any, Union
from ..utils import add_end_docstrings, is_vision_available
from .base import GenericTensor, Pipeline, build_pipeline_init_args
if is_vision_available():
from PIL import Image
from ..image_utils import load_image
@add_end_docstrings(
build_pipeline_init_args(has_image_processor=True),
"""
image_processor_kwargs (`dict`, *optional*):
Additional dictionary of keyword arguments passed along to the image processor e.g.
{"size": {"height": 100, "width": 100}}
pool (`bool`, *optional*, defaults to `False`):
Whether or not to return the pooled output. If `False`, the model will return the raw hidden states.
""",
)
class ImageFeatureExtractionPipeline(Pipeline):
"""
Image feature extraction pipeline uses no model head. This pipeline extracts the hidden states from the base
transformer, which can be used as features in downstream tasks.
Example:
```python
>>> from transformers import pipeline
>>> extractor = pipeline(model="google/vit-base-patch16-224", task="image-feature-extraction")
>>> result = extractor("https://huggingface.co/datasets/Narsil/image_dummy/raw/main/parrots.png", return_tensors=True)
>>> result.shape # This is a tensor of shape [1, sequence_length, hidden_dimension] representing the input image.
torch.Size([1, 197, 768])
```
Learn more about the basics of using a pipeline in the [pipeline tutorial](../pipeline_tutorial)
This image feature extraction pipeline can currently be loaded from [`pipeline`] using the task identifier:
`"image-feature-extraction"`.
All vision models may be used for this pipeline. See a list of all models, including community-contributed models on
[huggingface.co/models](https://huggingface.co/models).
"""
_load_processor = False
_load_image_processor = True
_load_feature_extractor = False
_load_tokenizer = False
def _sanitize_parameters(self, image_processor_kwargs=None, return_tensors=None, pool=None, **kwargs):
preprocess_params = {} if image_processor_kwargs is None else image_processor_kwargs
postprocess_params = {}
if pool is not None:
postprocess_params["pool"] = pool
if return_tensors is not None:
postprocess_params["return_tensors"] = return_tensors
if "timeout" in kwargs:
preprocess_params["timeout"] = kwargs["timeout"]
return preprocess_params, {}, postprocess_params
def preprocess(self, image, timeout=None, **image_processor_kwargs) -> dict[str, GenericTensor]:
image = load_image(image, timeout=timeout)
model_inputs = self.image_processor(image, return_tensors="pt", **image_processor_kwargs)
model_inputs = model_inputs.to(self.dtype)
return model_inputs
def _forward(self, model_inputs):
model_outputs = self.model(**model_inputs)
return model_outputs
def postprocess(self, model_outputs, pool=None, return_tensors=False):
pool = pool if pool is not None else False
if pool:
if "pooler_output" not in model_outputs:
raise ValueError(
"No pooled output was returned. Make sure the model has a `pooler` layer when using the `pool` option."
)
outputs = model_outputs["pooler_output"]
else:
# [0] is the first available tensor, logits or last_hidden_state.
outputs = model_outputs[0]
if return_tensors:
return outputs
return outputs.tolist()
def __call__(self, *args: Union[str, "Image.Image", list["Image.Image"], list[str]], **kwargs: Any) -> list[Any]:
"""
Extract the features of the input(s).
Args:
images (`str`, `list[str]`, `PIL.Image` or `list[PIL.Image]`):
The pipeline handles three types of images:
- A string containing a http link pointing to an image
- A string containing a local path to an image
- An image loaded in PIL directly
The pipeline accepts either a single image or a batch of images, which must then be passed as a string.
Images in a batch must all be in the same format: all as http links, all as local paths, or all as PIL
images.
timeout (`float`, *optional*, defaults to None):
The maximum time in seconds to wait for fetching images from the web. If None, no timeout is used and
the call may block forever.
Return:
A nested list of `float`: The features computed by the model.
"""
return super().__call__(*args, **kwargs)
|
python
|
github
|
https://github.com/huggingface/transformers
|
src/transformers/pipelines/image_feature_extraction.py
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Small utility library of python functions used during SDK building.
"""
import os
import re
import sys
# pylint: disable=E0602
# Reuse last change utility code.
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
SRC_DIR = os.path.dirname(os.path.dirname(os.path.dirname(SCRIPT_DIR)))
sys.path.append(os.path.join(SRC_DIR, 'build/util'))
import lastchange
# Location of chrome's version file.
VERSION_PATH = os.path.join(SRC_DIR, 'chrome', 'VERSION')
def ChromeVersion():
'''Extract chrome version from src/chrome/VERSION + svn.
Returns:
Chrome version string or trunk + svn rev.
'''
info = FetchGitCommitPosition()
if info.url == 'git':
_, ref, revision = ParseCommitPosition(info.revision)
if ref == 'refs/heads/master':
return 'trunk.%s' % revision
return ChromeVersionNoTrunk()
def ChromeVersionNoTrunk():
'''Extract the chrome version from src/chrome/VERSION.
Ignore whether this is a trunk build.
Returns:
Chrome version string.
'''
exec(open(VERSION_PATH).read())
return '%s.%s.%s.%s' % (MAJOR, MINOR, BUILD, PATCH)
def ChromeMajorVersion():
'''Extract chrome major version from src/chrome/VERSION.
Returns:
Chrome major version.
'''
exec(open(VERSION_PATH, 'r').read())
return str(MAJOR)
def ChromeRevision():
'''Extract chrome revision from svn.
Now that the Chrome source-of-truth is git, this will return the
Cr-Commit-Position instead. Fortunately, this value is equal to the SVN
revision if one exists.
Returns:
The Chrome revision as a string. e.g. "12345"
'''
version = FetchGitCommitPosition()
return ParseCommitPosition(version.revision)[2]
def ChromeCommitPosition():
'''Return the full git sha and commit position.
Returns:
A value like:
0178d4831bd36b5fb9ff477f03dc43b11626a6dc-refs/heads/master@{#292238}
'''
return FetchGitCommitPosition().revision
def NaClRevision():
'''Extract NaCl revision from svn.
Returns:
The NaCl revision as a string. e.g. "12345"
'''
nacl_dir = os.path.join(SRC_DIR, 'native_client')
return lastchange.FetchVersionInfo(None, nacl_dir, 'native_client').revision
def FetchGitCommitPosition(directory=None):
'''Return the "commit-position" of the Chromium git repo. This should be
equivalent to the SVN revision if one exists.
'''
SEARCH_LIMIT = 100
for i in xrange(SEARCH_LIMIT):
cmd = ['show', '-s', '--format=%H%n%B', 'HEAD~%d' % i]
proc = lastchange.RunGitCommand(directory, cmd)
if not proc:
break
output = proc.communicate()[0]
if not (proc.returncode == 0 and output):
break
lines = output.splitlines()
# First line is the hash.
hsh = lines[0]
if not re.match(r'[0-9a-fA-F]+', hsh):
break
for line in reversed(lines):
if line.startswith('Cr-Commit-Position:'):
pos = line.rsplit()[-1].strip()
return lastchange.VersionInfo('git', '%s-%s' % (hsh, pos))
raise Exception('Unable to fetch a Git Commit Position.')
def ParseCommitPosition(commit_position):
'''Parse a Chrome commit position into its components.
Given a commit position like:
0178d4831bd36b5fb9ff477f03dc43b11626a6dc-refs/heads/master@{#292238}
Returns:
("0178d4831bd36b5fb9ff477f03dc43b11626a6dc", "refs/heads/master", "292238")
'''
m = re.match(r'([0-9a-fA-F]+)(?:-([^@]+)@{#(\d+)})?', commit_position)
if m:
return m.groups()
return None
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!"C:\Users\hog\Documents\Visual Studio 2010\Projects\ArdupilotMega\ArdupilotMega\bin\Debug\ipy.exe"
"""
setup.py for installing F2PY
Usage:
python setup.py install
Copyright 2001-2005 Pearu Peterson all rights reserved,
Pearu Peterson <pearu@cens.ioc.ee>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy License.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
$Revision: 1.32 $
$Date: 2005/01/30 17:22:14 $
Pearu Peterson
"""
__version__ = "$Id: setup.py,v 1.32 2005/01/30 17:22:14 pearu Exp $"
import os
import sys
from distutils.dep_util import newer
from numpy.distutils import log
from numpy.distutils.core import setup
from numpy.distutils.misc_util import Configuration
from __version__ import version
def configuration(parent_package='',top_path=None):
config = Configuration('f2py', parent_package, top_path)
config.add_data_dir('docs')
config.add_data_files('src/fortranobject.c',
'src/fortranobject.h',
'f2py.1'
)
config.make_svn_version_py()
def generate_f2py_py(build_dir):
f2py_exe = 'f2py'+os.path.basename(sys.executable)[6:]
if f2py_exe[-4:]=='.exe':
f2py_exe = f2py_exe[:-4] + '.py'
if 'bdist_wininst' in sys.argv and f2py_exe[-3:] != '.py':
f2py_exe = f2py_exe + '.py'
target = os.path.join(build_dir,f2py_exe)
if newer(__file__,target):
log.info('Creating %s', target)
f = open(target,'w')
f.write('''\
#!/usr/bin/env %s
# See http://cens.ioc.ee/projects/f2py2e/
import os, sys
for mode in ["g3-numpy", "2e-numeric", "2e-numarray", "2e-numpy"]:
try:
i=sys.argv.index("--"+mode)
del sys.argv[i]
break
except ValueError: pass
os.environ["NO_SCIPY_IMPORT"]="f2py"
if mode=="g3-numpy":
print >> sys.stderr, "G3 f2py support is not implemented, yet."
sys.exit(1)
elif mode=="2e-numeric":
from f2py2e import main
elif mode=="2e-numarray":
sys.argv.append("-DNUMARRAY")
from f2py2e import main
elif mode=="2e-numpy":
from numpy.f2py import main
else:
print >> sys.stderr, "Unknown mode:",`mode`
sys.exit(1)
main()
'''%(os.path.basename(sys.executable)))
f.close()
return target
config.add_scripts(generate_f2py_py)
return config
if __name__ == "__main__":
config = configuration(top_path='')
version = config.get_version()
print 'F2PY Version',version
config = config.todict()
if sys.version[:3]>='2.3':
config['download_url'] = "http://cens.ioc.ee/projects/f2py2e/2.x"\
"/F2PY-2-latest.tar.gz"
config['classifiers'] = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: NumPy License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: C',
'Programming Language :: Fortran',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Software Development :: Code Generators',
]
setup(version=version,
description = "F2PY - Fortran to Python Interface Generaton",
author = "Pearu Peterson",
author_email = "pearu@cens.ioc.ee",
maintainer = "Pearu Peterson",
maintainer_email = "pearu@cens.ioc.ee",
license = "BSD",
platforms = "Unix, Windows (mingw|cygwin), Mac OSX",
long_description = """\
The Fortran to Python Interface Generator, or F2PY for short, is a
command line tool (f2py) for generating Python C/API modules for
wrapping Fortran 77/90/95 subroutines, accessing common blocks from
Python, and calling Python functions from Fortran (call-backs).
Interfacing subroutines/data from Fortran 90/95 modules is supported.""",
url = "http://cens.ioc.ee/projects/f2py2e/",
keywords = ['Fortran','f2py'],
**config)
|
unknown
|
codeparrot/codeparrot-clean
| ||
The test directory contains tests of the Go tool chain and runtime.
It includes black box tests, regression tests, and error output tests.
They are run as part of all.bash.
To run just these tests, execute:
../bin/go test cmd/internal/testdir
To run just tests from specified files in this directory, execute:
../bin/go test cmd/internal/testdir -run='Test/(file1.go|file2.go|...)'
Standard library tests should be written as regular Go tests in the appropriate package.
The tool chain and runtime also have regular Go tests in their packages.
The main reasons to add a new test to this directory are:
* it is most naturally expressed using the test runner; or
* it is also applicable to `gccgo` and other Go tool chains.
|
unknown
|
github
|
https://github.com/golang/go
|
test/README.md
|
from __future__ import absolute_import
from kombu import transport
from kombu.tests.case import Case, Mock, patch
class test_supports_librabbitmq(Case):
def test_eventlet(self):
with patch('kombu.transport._detect_environment') as de:
de.return_value = 'eventlet'
self.assertFalse(transport.supports_librabbitmq())
class test_transport(Case):
def test_resolve_transport(self):
from kombu.transport.memory import Transport
self.assertIs(transport.resolve_transport(
'kombu.transport.memory:Transport'),
Transport)
self.assertIs(transport.resolve_transport(Transport), Transport)
def test_resolve_transport_alias_callable(self):
m = transport.TRANSPORT_ALIASES['George'] = Mock(name='lazyalias')
try:
transport.resolve_transport('George')
m.assert_called_with()
finally:
transport.TRANSPORT_ALIASES.pop('George')
def test_resolve_transport_alias(self):
self.assertTrue(transport.resolve_transport('pyamqp'))
class test_transport_ghettoq(Case):
@patch('warnings.warn')
def test_compat(self, warn):
x = transport._ghettoq('Redis', 'redis', 'redis')
self.assertEqual(x(), 'kombu.transport.redis.Transport')
self.assertTrue(warn.called)
|
unknown
|
codeparrot/codeparrot-clean
| ||
% This is generated by ESQL's AbstractFunctionTestCase. Do not edit it. See ../README.md for how to regenerate it.
```esql
FROM sample_data
| WHERE @timestamp > NOW() - 1 hour
```
|
unknown
|
github
|
https://github.com/elastic/elasticsearch
|
docs/reference/query-languages/esql/_snippets/commands/examples/date.csv-spec/docsNowWhere.md
|
from __future__ import print_function, unicode_literals
import importlib
import os
import sys
from django.apps import apps
from django.db.models.fields import NOT_PROVIDED
from django.utils import datetime_safe, six, timezone
from django.utils.six.moves import input
from .loader import MigrationLoader
class MigrationQuestioner(object):
"""
Gives the autodetector responses to questions it might have.
This base class has a built-in noninteractive mode, but the
interactive subclass is what the command-line arguments will use.
"""
def __init__(self, defaults=None, specified_apps=None, dry_run=None):
self.defaults = defaults or {}
self.specified_apps = specified_apps or set()
self.dry_run = dry_run
def ask_initial(self, app_label):
"Should we create an initial migration for the app?"
# If it was specified on the command line, definitely true
if app_label in self.specified_apps:
return True
# Otherwise, we look to see if it has a migrations module
# without any Python files in it, apart from __init__.py.
# Apps from the new app template will have these; the python
# file check will ensure we skip South ones.
try:
app_config = apps.get_app_config(app_label)
except LookupError: # It's a fake app.
return self.defaults.get("ask_initial", False)
migrations_import_path = MigrationLoader.migrations_module(app_config.label)
try:
migrations_module = importlib.import_module(migrations_import_path)
except ImportError:
return self.defaults.get("ask_initial", False)
else:
if hasattr(migrations_module, "__file__"):
filenames = os.listdir(os.path.dirname(migrations_module.__file__))
elif hasattr(migrations_module, "__path__"):
if len(migrations_module.__path__) > 1:
return False
filenames = os.listdir(list(migrations_module.__path__)[0])
return not any(x.endswith(".py") for x in filenames if x != "__init__.py")
def ask_not_null_addition(self, field_name, model_name):
"Adding a NOT NULL field to a model"
# None means quit
return None
def ask_not_null_alteration(self, field_name, model_name):
"Changing a NULL field to NOT NULL"
# None means quit
return None
def ask_rename(self, model_name, old_name, new_name, field_instance):
"Was this field really renamed?"
return self.defaults.get("ask_rename", False)
def ask_rename_model(self, old_model_state, new_model_state):
"Was this model really renamed?"
return self.defaults.get("ask_rename_model", False)
def ask_merge(self, app_label):
"Do you really want to merge these migrations?"
return self.defaults.get("ask_merge", False)
class InteractiveMigrationQuestioner(MigrationQuestioner):
def _boolean_input(self, question, default=None):
result = input("%s " % question)
if not result and default is not None:
return default
while len(result) < 1 or result[0].lower() not in "yn":
result = input("Please answer yes or no: ")
return result[0].lower() == "y"
def _choice_input(self, question, choices):
print(question)
for i, choice in enumerate(choices):
print(" %s) %s" % (i + 1, choice))
result = input("Select an option: ")
while True:
try:
value = int(result)
if 0 < value <= len(choices):
return value
except ValueError:
pass
result = input("Please select a valid option: ")
def _ask_default(self):
print("Please enter the default value now, as valid Python")
print("The datetime and django.utils.timezone modules are available, so you can do e.g. timezone.now()")
while True:
if six.PY3:
# Six does not correctly abstract over the fact that
# py3 input returns a unicode string, while py2 raw_input
# returns a bytestring.
code = input(">>> ")
else:
code = input(">>> ").decode(sys.stdin.encoding)
if not code:
print("Please enter some code, or 'exit' (with no quotes) to exit.")
elif code == "exit":
sys.exit(1)
else:
try:
return eval(code, {}, {"datetime": datetime_safe, "timezone": timezone})
except (SyntaxError, NameError) as e:
print("Invalid input: %s" % e)
def ask_not_null_addition(self, field_name, model_name):
"Adding a NOT NULL field to a model"
if not self.dry_run:
choice = self._choice_input(
"You are trying to add a non-nullable field '%s' to %s without a default; "
"we can't do that (the database needs something to populate existing rows).\n"
"Please select a fix:" % (field_name, model_name),
[
"Provide a one-off default now (will be set on all existing rows)",
"Quit, and let me add a default in models.py",
]
)
if choice == 2:
sys.exit(3)
else:
return self._ask_default()
return None
def ask_not_null_alteration(self, field_name, model_name):
"Changing a NULL field to NOT NULL"
if not self.dry_run:
choice = self._choice_input(
"You are trying to change the nullable field '%s' on %s to non-nullable "
"without a default; we can't do that (the database needs something to "
"populate existing rows).\n"
"Please select a fix:" % (field_name, model_name),
[
"Provide a one-off default now (will be set on all existing rows)",
("Ignore for now, and let me handle existing rows with NULL myself "
"(e.g. because you added a RunPython or RunSQL operation to handle "
"NULL values in a previous data migration)"),
"Quit, and let me add a default in models.py",
]
)
if choice == 2:
return NOT_PROVIDED
elif choice == 3:
sys.exit(3)
else:
return self._ask_default()
return None
def ask_rename(self, model_name, old_name, new_name, field_instance):
"Was this field really renamed?"
msg = "Did you rename %s.%s to %s.%s (a %s)? [y/N]"
return self._boolean_input(msg % (model_name, old_name, model_name, new_name,
field_instance.__class__.__name__), False)
def ask_rename_model(self, old_model_state, new_model_state):
"Was this model really renamed?"
msg = "Did you rename the %s.%s model to %s? [y/N]"
return self._boolean_input(msg % (old_model_state.app_label, old_model_state.name,
new_model_state.name), False)
def ask_merge(self, app_label):
return self._boolean_input(
"\nMerging will only work if the operations printed above do not conflict\n" +
"with each other (working on different fields or models)\n" +
"Do you want to merge these migration branches? [y/N]",
False,
)
class NonInteractiveMigrationQuestioner(MigrationQuestioner):
def ask_not_null_addition(self, field_name, model_name):
# We can't ask the user, so act like the user aborted.
sys.exit(3)
def ask_not_null_alteration(self, field_name, model_name):
# We can't ask the user, so set as not provided.
return NOT_PROVIDED
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import frappe.defaults
import datetime
from frappe.utils import get_datetime
from six import string_types
# global values -- used for caching
dateformats = {
'yyyy-mm-dd': '%Y-%m-%d',
'mm/dd/yyyy': '%m/%d/%Y',
'mm-dd-yyyy': '%m-%d-%Y',
"mm/dd/yy": "%m/%d/%y",
'dd-mmm-yyyy': '%d-%b-%Y', # numbers app format
'dd/mm/yyyy': '%d/%m/%Y',
'dd.mm.yyyy': '%d.%m.%Y',
'dd-mm-yyyy': '%d-%m-%Y',
"dd/mm/yy": "%d/%m/%y",
}
def user_to_str(date, date_format=None):
if not date: return date
if not date_format:
date_format = get_user_date_format()
try:
return datetime.datetime.strptime(date,
dateformats[date_format]).strftime('%Y-%m-%d')
except ValueError as e:
raise ValueError("Date %s must be in format %s" % (date, date_format))
def parse_date(date):
"""tries to parse given date to system's format i.e. yyyy-mm-dd. returns a string"""
parsed_date = None
if " " in date:
# as date-timestamp, remove the time part
date = date.split(" ")[0]
# why the sorting? checking should be done in a predictable order
check_formats = [None] + sorted(dateformats.keys(),
reverse=not get_user_date_format().startswith("dd"))
for f in check_formats:
try:
parsed_date = user_to_str(date, f)
if parsed_date:
break
except ValueError as e:
pass
if not parsed_date:
raise Exception("""Cannot understand date - '%s'.
Try formatting it like your default format - '%s'""" % (date, get_user_date_format())
)
return parsed_date
def get_user_date_format():
if getattr(frappe.local, "user_date_format", None) is None:
frappe.local.user_date_format = frappe.defaults.get_global_default("date_format") or "yyyy-mm-dd"
return frappe.local.user_date_format
def datetime_in_user_format(date_time):
if not date_time:
return ""
if isinstance(date_time, string_types):
date_time = get_datetime(date_time)
from frappe.utils import formatdate
return formatdate(date_time.date()) + " " + date_time.strftime("%H:%M")
|
unknown
|
codeparrot/codeparrot-clean
| ||
# coding: utf-8
import os
import json
import glob
# import dataset
from normality import slugify
# from datetime import datetime
from pprint import pprint # noqa
from common import DATA_PATH
SOURCE_PATH = os.path.join(DATA_PATH, 'flexicadastre', 'raw')
try:
os.makedirs(SOURCE_PATH)
except:
pass
DEST_PATH = os.path.join(DATA_PATH, 'flexicadastre', 'geo_layers')
try:
os.makedirs(DEST_PATH)
except:
pass
def get_attrs(feature):
out = {}
for k, v in feature.get('attributes').items():
k = k.lower().strip()
out[k] = v
return out
def parse_file(path):
with open(path, 'rb') as fh:
ctx = json.load(fh)
if ctx['source_name'] not in ['TZ']:
return
for layer in ctx.get('layers'):
out = {
"type": "FeatureCollection",
"features": []
}
for fdata in layer.pop('data').get('features'):
attrs = get_attrs(fdata)
if not fdata.get('geometry', {}).get('rings'):
continue
props = dict(attrs)
props['layer'] = layer.get('name')
feature = {
'type': 'Feature',
'geometry': {
'type': 'Polygon',
'coordinates': fdata.get('geometry', {}).get('rings')
},
'properties': props
}
out['features'].append(feature)
name = slugify('%s %s' % (ctx['source_name'], layer.get('name')),
sep='_')
name = name + '.json'
with open(os.path.join(DEST_PATH, name), 'wb') as fh:
json.dump(out, fh)
if __name__ == '__main__':
for file_path in glob.glob(os.path.join(SOURCE_PATH, '*')):
parse_file(file_path)
|
unknown
|
codeparrot/codeparrot-clean
| ||
An invalid number of arguments was passed when calling a function.
Erroneous code example:
```compile_fail,E0061
fn f(u: i32) {}
f(); // error!
```
The number of arguments passed to a function must match the number of arguments
specified in the function signature.
For example, a function like:
```
fn f(a: u16, b: &str) {}
```
Must always be called with exactly two arguments, e.g., `f(2, "test")`.
Note that Rust does not have a notion of optional function arguments or
variadic functions (except for its C-FFI).
|
unknown
|
github
|
https://github.com/rust-lang/rust
|
compiler/rustc_error_codes/src/error_codes/E0061.md
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
GoGrid driver
"""
import time
import hashlib
import copy
from libcloud.utils.py3 import b
from libcloud.common.types import InvalidCredsError, LibcloudError
from libcloud.common.gogrid import GoGridConnection, BaseGoGridDriver
from libcloud.compute.providers import Provider
from libcloud.compute.types import NodeState
from libcloud.compute.base import Node, NodeDriver
from libcloud.compute.base import NodeSize, NodeImage, NodeLocation
STATE = {
"Starting": NodeState.PENDING,
"On": NodeState.RUNNING,
"On/Saving": NodeState.RUNNING,
"Off": NodeState.PENDING,
"Restarting": NodeState.REBOOTING,
"Saving": NodeState.PENDING,
"Restoring": NodeState.PENDING,
}
GOGRID_INSTANCE_TYPES = {
'512MB': {'id': '512MB',
'name': '512MB',
'ram': 512,
'disk': 30,
'bandwidth': None},
'1GB': {'id': '1GB',
'name': '1GB',
'ram': 1024,
'disk': 60,
'bandwidth': None},
'2GB': {'id': '2GB',
'name': '2GB',
'ram': 2048,
'disk': 120,
'bandwidth': None},
'4GB': {'id': '4GB',
'name': '4GB',
'ram': 4096,
'disk': 240,
'bandwidth': None},
'8GB': {'id': '8GB',
'name': '8GB',
'ram': 8192,
'disk': 480,
'bandwidth': None},
'16GB': {'id': '16GB',
'name': '16GB',
'ram': 16384,
'disk': 960,
'bandwidth': None},
'24GB': {'id': '24GB',
'name': '24GB',
'ram': 24576,
'disk': 960,
'bandwidth': None},
}
class GoGridNode(Node):
# Generating uuid based on public ip to get around missing id on
# create_node in gogrid api
#
# Used public ip since it is not mutable and specified at create time,
# so uuid of node should not change after add is completed
def get_uuid(self):
return hashlib.sha1(
b("%s:%s" % (self.public_ips, self.driver.type))
).hexdigest()
class GoGridNodeDriver(BaseGoGridDriver, NodeDriver):
"""
GoGrid node driver
"""
connectionCls = GoGridConnection
type = Provider.GOGRID
api_name = 'gogrid'
name = 'GoGrid'
website = 'http://www.gogrid.com/'
features = {"create_node": ["generates_password"]}
_instance_types = GOGRID_INSTANCE_TYPES
def __init__(self, *args, **kwargs):
"""
@inherits: :class:`NodeDriver.__init__`
"""
super(GoGridNodeDriver, self).__init__(*args, **kwargs)
def _get_state(self, element):
try:
return STATE[element['state']['name']]
except:
pass
return NodeState.UNKNOWN
def _get_ip(self, element):
return element.get('ip').get('ip')
def _get_id(self, element):
return element.get('id')
def _to_node(self, element, password=None):
state = self._get_state(element)
ip = self._get_ip(element)
id = self._get_id(element)
n = GoGridNode(id=id,
name=element['name'],
state=state,
public_ips=[ip],
private_ips=[],
extra={'ram': element.get('ram').get('name'),
'description': element.get('description', '')},
driver=self.connection.driver)
if password:
n.extra['password'] = password
return n
def _to_image(self, element):
n = NodeImage(id=element['id'],
name=element['friendlyName'],
driver=self.connection.driver)
return n
def _to_images(self, object):
return [self._to_image(el)
for el in object['list']]
def _to_location(self, element):
location = NodeLocation(id=element['id'],
name=element['name'],
country="US",
driver=self.connection.driver)
return location
def _to_locations(self, object):
return [self._to_location(el)
for el in object['list']]
def list_images(self, location=None):
params = {}
if location is not None:
params["datacenter"] = location.id
images = self._to_images(
self.connection.request('/api/grid/image/list', params).object)
return images
def list_nodes(self):
"""
@inherits: :class:`NodeDriver.list_nodes`
:rtype: ``list`` of :class:`GoGridNode`
"""
passwords_map = {}
res = self._server_list()
try:
for password in self._password_list()['list']:
try:
passwords_map[password['server']['id']] = \
password['password']
except KeyError:
pass
except InvalidCredsError:
# some gogrid API keys don't have permission to access the
# password list.
pass
return [self._to_node(el, passwords_map.get(el.get('id')))
for el in res['list']]
def reboot_node(self, node):
"""
@inherits: :class:`NodeDriver.reboot_node`
:type node: :class:`GoGridNode`
"""
id = node.id
power = 'restart'
res = self._server_power(id, power)
if not res.success():
raise Exception(res.parse_error())
return True
def destroy_node(self, node):
"""
@inherits: :class:`NodeDriver.reboot_node`
:type node: :class:`GoGridNode`
"""
id = node.id
res = self._server_delete(id)
if not res.success():
raise Exception(res.parse_error())
return True
def _server_list(self):
return self.connection.request('/api/grid/server/list').object
def _password_list(self):
return self.connection.request('/api/support/password/list').object
def _server_power(self, id, power):
# power in ['start', 'stop', 'restart']
params = {'id': id, 'power': power}
return self.connection.request("/api/grid/server/power", params,
method='POST')
def _server_delete(self, id):
params = {'id': id}
return self.connection.request("/api/grid/server/delete", params,
method='POST')
def _get_first_ip(self, location=None):
ips = self.ex_list_ips(public=True, assigned=False, location=location)
try:
return ips[0].ip
except IndexError:
raise LibcloudError('No public unassigned IPs left',
GoGridNodeDriver)
def list_sizes(self, location=None):
sizes = []
for key, values in self._instance_types.items():
attributes = copy.deepcopy(values)
attributes.update({'price': self._get_size_price(size_id=key)})
sizes.append(NodeSize(driver=self.connection.driver, **attributes))
return sizes
def list_locations(self):
locations = self._to_locations(
self.connection.request('/api/common/lookup/list',
params={'lookup': 'ip.datacenter'}).object)
return locations
def ex_create_node_nowait(self, **kwargs):
"""Don't block until GoGrid allocates id for a node
but return right away with id == None.
The existence of this method is explained by the fact
that GoGrid assigns id to a node only few minutes after
creation.
:keyword name: String with a name for this new node (required)
:type name: ``str``
:keyword size: The size of resources allocated to this node .
(required)
:type size: :class:`NodeSize`
:keyword image: OS Image to boot on node. (required)
:type image: :class:`NodeImage`
:keyword ex_description: Description of a Node
:type ex_description: ``str``
:keyword ex_ip: Public IP address to use for a Node. If not
specified, first available IP address will be picked
:type ex_ip: ``str``
:rtype: :class:`GoGridNode`
"""
name = kwargs['name']
image = kwargs['image']
size = kwargs['size']
try:
ip = kwargs['ex_ip']
except KeyError:
ip = self._get_first_ip(kwargs.get('location'))
params = {'name': name,
'image': image.id,
'description': kwargs.get('ex_description', ''),
'server.ram': size.id,
'ip': ip}
object = self.connection.request('/api/grid/server/add',
params=params, method='POST').object
node = self._to_node(object['list'][0])
return node
def create_node(self, **kwargs):
"""Create a new GoGird node
@inherits: :class:`NodeDriver.create_node`
:keyword ex_description: Description of a Node
:type ex_description: ``str``
:keyword ex_ip: Public IP address to use for a Node. If not
specified, first available IP address will be picked
:type ex_ip: ``str``
:rtype: :class:`GoGridNode`
"""
node = self.ex_create_node_nowait(**kwargs)
timeout = 60 * 20
waittime = 0
interval = 2 * 60
while node.id is None and waittime < timeout:
nodes = self.list_nodes()
for i in nodes:
if i.public_ips[0] == node.public_ips[0] and i.id is not None:
return i
waittime += interval
time.sleep(interval)
if id is None:
raise Exception(
"Wasn't able to wait for id allocation for the node %s"
% str(node))
return node
def ex_save_image(self, node, name):
"""Create an image for node.
Please refer to GoGrid documentation to get info
how prepare a node for image creation:
http://wiki.gogrid.com/wiki/index.php/MyGSI
:keyword node: node to use as a base for image
:type node: :class:`GoGridNode`
:keyword name: name for new image
:type name: ``str``
:rtype: :class:`NodeImage`
"""
params = {'server': node.id,
'friendlyName': name}
object = self.connection.request('/api/grid/image/save', params=params,
method='POST').object
return self._to_images(object)[0]
def ex_edit_node(self, **kwargs):
"""Change attributes of a node.
:keyword node: node to be edited (required)
:type node: :class:`GoGridNode`
:keyword size: new size of a node (required)
:type size: :class:`NodeSize`
:keyword ex_description: new description of a node
:type ex_description: ``str``
:rtype: :class:`Node`
"""
node = kwargs['node']
size = kwargs['size']
params = {'id': node.id,
'server.ram': size.id}
if 'ex_description' in kwargs:
params['description'] = kwargs['ex_description']
object = self.connection.request('/api/grid/server/edit',
params=params).object
return self._to_node(object['list'][0])
def ex_edit_image(self, **kwargs):
"""Edit metadata of a server image.
:keyword image: image to be edited (required)
:type image: :class:`NodeImage`
:keyword public: should be the image public (required)
:type public: ``bool``
:keyword ex_description: description of the image (optional)
:type ex_description: ``str``
:keyword name: name of the image
:type name: ``str``
:rtype: :class:`NodeImage`
"""
image = kwargs['image']
public = kwargs['public']
params = {'id': image.id,
'isPublic': str(public).lower()}
if 'ex_description' in kwargs:
params['description'] = kwargs['ex_description']
if 'name' in kwargs:
params['friendlyName'] = kwargs['name']
object = self.connection.request('/api/grid/image/edit',
params=params).object
return self._to_image(object['list'][0])
def ex_list_ips(self, **kwargs):
"""Return list of IP addresses assigned to
the account.
:keyword public: set to True to list only
public IPs or False to list only
private IPs. Set to None or not specify
at all not to filter by type
:type public: ``bool``
:keyword assigned: set to True to list only addresses
assigned to servers, False to list unassigned
addresses and set to None or don't set at all
not no filter by state
:type assigned: ``bool``
:keyword location: filter IP addresses by location
:type location: :class:`NodeLocation`
:rtype: ``list`` of :class:`GoGridIpAddress`
"""
params = {}
if "public" in kwargs and kwargs["public"] is not None:
params["ip.type"] = {True: "Public",
False: "Private"}[kwargs["public"]]
if "assigned" in kwargs and kwargs["assigned"] is not None:
params["ip.state"] = {True: "Assigned",
False: "Unassigned"}[kwargs["assigned"]]
if "location" in kwargs and kwargs['location'] is not None:
params['datacenter'] = kwargs['location'].id
ips = self._to_ips(
self.connection.request('/api/grid/ip/list',
params=params).object)
return ips
|
unknown
|
codeparrot/codeparrot-clean
| ||
// Copyright The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package labels
import (
"fmt"
"math/rand"
"strings"
"testing"
"time"
"unicode/utf8"
"github.com/grafana/regexp"
"github.com/grafana/regexp/syntax"
"github.com/stretchr/testify/require"
)
var (
asciiRunes = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_")
regexes = []string{
"",
"foo",
"^foo",
"(foo|bar)",
"foo.*",
".*foo",
"^.*foo$",
"^.+foo$",
".?",
".*",
".+",
"foo.+",
".+foo",
"foo\n.+",
"foo\n.*",
".*foo.*",
".+foo.+",
"(?s:.*)",
"(?s:.+)",
"(?s:^.*foo$)",
"(?i:foo)",
"(?i:(foo|bar))",
"(?i:(foo1|foo2|bar))",
"^(?i:foo|oo)|(bar)$",
"(?i:(foo1|foo2|aaa|bbb|ccc|ddd|eee|fff|ggg|hhh|iii|lll|mmm|nnn|ooo|ppp|qqq|rrr|sss|ttt|uuu|vvv|www|xxx|yyy|zzz))",
"((.*)(bar|b|buzz)(.+)|foo)$",
"^$",
"(prometheus|api_prom)_api_v1_.+",
"10\\.0\\.(1|2)\\.+",
"10\\.0\\.(1|2).+",
"((fo(bar))|.+foo)",
// A long case sensitive alternation.
"zQPbMkNO|NNSPdvMi|iWuuSoAl|qbvKMimS|IecrXtPa|seTckYqt|NxnyHkgB|fIDlOgKb|UhlWIygH|OtNoJxHG|cUTkFVIV|mTgFIHjr|jQkoIDtE|PPMKxRXl|AwMfwVkQ|CQyMrTQJ|BzrqxVSi|nTpcWuhF|PertdywG|ZZDgCtXN|WWdDPyyE|uVtNQsKk|BdeCHvPZ|wshRnFlH|aOUIitIp|RxZeCdXT|CFZMslCj|AVBZRDxl|IzIGCnhw|ythYuWiz|oztXVXhl|VbLkwqQx|qvaUgyVC|VawUjPWC|ecloYJuj|boCLTdSU|uPrKeAZx|hrMWLWBq|JOnUNHRM|rYnujkPq|dDEdZhIj|DRrfvugG|yEGfDxVV|YMYdJWuP|PHUQZNWM|AmKNrLis|zTxndVfn|FPsHoJnc|EIulZTua|KlAPhdzg|ScHJJCLt|NtTfMzME|eMCwuFdo|SEpJVJbR|cdhXZeCx|sAVtBwRh|kVFEVcMI|jzJrxraA|tGLHTell|NNWoeSaw|DcOKSetX|UXZAJyka|THpMphDP|rizheevl|kDCBRidd|pCZZRqyu|pSygkitl|SwZGkAaW|wILOrfNX|QkwVOerj|kHOMxPDr|EwOVycJv|AJvtzQFS|yEOjKYYB|LizIINLL|JBRSsfcG|YPiUqqNl|IsdEbvee|MjEpGcBm|OxXZVgEQ|xClXGuxa|UzRCGFEb|buJbvfvA|IPZQxRet|oFYShsMc|oBHffuHO|bzzKrcBR|KAjzrGCl|IPUsAVls|OGMUMbIU|gyDccHuR|bjlalnDd|ZLWjeMna|fdsuIlxQ|dVXtiomV|XxedTjNg|XWMHlNoA|nnyqArQX|opfkWGhb|wYtnhdYb",
// An extremely long case sensitive alternation. This is a special
// case because the values share common prefixes rather than being
// entirely random. This is common in the real world. For example, the
// values of a label like kubernetes pod will often include the
// deployment name as a prefix.
"jyyfj00j0061|jyyfj00j0062|jyyfj94j0093|jyyfj99j0093|jyyfm01j0021|jyyfm02j0021|jyefj00j0192|jyefj00j0193|jyefj00j0194|jyefj00j0195|jyefj00j0196|jyefj00j0197|jyefj00j0290|jyefj00j0291|jyefj00j0292|jyefj00j0293|jyefj00j0294|jyefj00j0295|jyefj00j0296|jyefj00j0297|jyefj89j0394|jyefj90j0394|jyefj91j0394|jyefj95j0347|jyefj96j0322|jyefj96j0347|jyefj97j0322|jyefj97j0347|jyefj98j0322|jyefj98j0347|jyefj99j0320|jyefj99j0322|jyefj99j0323|jyefj99j0335|jyefj99j0336|jyefj99j0344|jyefj99j0347|jyefj99j0349|jyefj99j0351|jyeff00j0117|lyyfm01j0025|lyyfm01j0028|lyyfm01j0041|lyyfm01j0133|lyyfm01j0701|lyyfm02j0025|lyyfm02j0028|lyyfm02j0041|lyyfm02j0133|lyyfm02j0701|lyyfm03j0701|lyefj00j0775|lyefj00j0776|lyefj00j0777|lyefj00j0778|lyefj00j0779|lyefj00j0780|lyefj00j0781|lyefj00j0782|lyefj50j3807|lyefj50j3852|lyefj51j3807|lyefj51j3852|lyefj52j3807|lyefj52j3852|lyefj53j3807|lyefj53j3852|lyefj54j3807|lyefj54j3852|lyefj54j3886|lyefj55j3807|lyefj55j3852|lyefj55j3886|lyefj56j3807|lyefj56j3852|lyefj56j3886|lyefj57j3807|lyefj57j3852|lyefj57j3886|lyefj58j3807|lyefj58j3852|lyefj58j3886|lyefj59j3807|lyefj59j3852|lyefj59j3886|lyefj60j3807|lyefj60j3852|lyefj60j3886|lyefj61j3807|lyefj61j3852|lyefj61j3886|lyefj62j3807|lyefj62j3852|lyefj62j3886|lyefj63j3807|lyefj63j3852|lyefj63j3886|lyefj64j3807|lyefj64j3852|lyefj64j3886|lyefj65j3807|lyefj65j3852|lyefj65j3886|lyefj66j3807|lyefj66j3852|lyefj66j3886|lyefj67j3807|lyefj67j3852|lyefj67j3886|lyefj68j3807|lyefj68j3852|lyefj68j3886|lyefj69j3807|lyefj69j3846|lyefj69j3852|lyefj69j3886|lyefj70j3807|lyefj70j3846|lyefj70j3852|lyefj70j3886|lyefj71j3807|lyefj71j3846|lyefj71j3852|lyefj71j3886|lyefj72j3807|lyefj72j3846|lyefj72j3852|lyefj72j3886|lyefj73j3807|lyefj73j3846|lyefj73j3852|lyefj73j3886|lyefj74j3807|lyefj74j3846|lyefj74j3852|lyefj74j3886|lyefj75j3807|lyefj75j3808|lyefj75j3846|lyefj75j3852|lyefj75j3886|lyefj76j3732|lyefj76j3807|lyefj76j3808|lyefj76j3846|lyefj76j3852|lyefj76j3886|lyefj77j3732|lyefj77j3807|lyefj77j3808|lyefj77j3846|lyefj77j3852|lyefj77j3886|lyefj78j3278|lyefj78j3732|lyefj78j3807|lyefj78j3808|lyefj78j3846|lyefj78j3852|lyefj78j3886|lyefj79j3732|lyefj79j3807|lyefj79j3808|lyefj79j3846|lyefj79j3852|lyefj79j3886|lyefj80j3732|lyefj80j3807|lyefj80j3808|lyefj80j3846|lyefj80j3852|lyefj80j3886|lyefj81j3732|lyefj81j3807|lyefj81j3808|lyefj81j3846|lyefj81j3852|lyefj81j3886|lyefj82j3732|lyefj82j3807|lyefj82j3808|lyefj82j3846|lyefj82j3852|lyefj82j3886|lyefj83j3732|lyefj83j3807|lyefj83j3808|lyefj83j3846|lyefj83j3852|lyefj83j3886|lyefj84j3732|lyefj84j3807|lyefj84j3808|lyefj84j3846|lyefj84j3852|lyefj84j3886|lyefj85j3732|lyefj85j3807|lyefj85j3808|lyefj85j3846|lyefj85j3852|lyefj85j3886|lyefj86j3278|lyefj86j3732|lyefj86j3807|lyefj86j3808|lyefj86j3846|lyefj86j3852|lyefj86j3886|lyefj87j3278|lyefj87j3732|lyefj87j3807|lyefj87j3808|lyefj87j3846|lyefj87j3852|lyefj87j3886|lyefj88j3732|lyefj88j3807|lyefj88j3808|lyefj88j3846|lyefj88j3852|lyefj88j3886|lyefj89j3732|lyefj89j3807|lyefj89j3808|lyefj89j3846|lyefj89j3852|lyefj89j3886|lyefj90j3732|lyefj90j3807|lyefj90j3808|lyefj90j3846|lyefj90j3852|lyefj90j3886|lyefj91j3732|lyefj91j3807|lyefj91j3808|lyefj91j3846|lyefj91j3852|lyefj91j3886|lyefj92j3732|lyefj92j3807|lyefj92j3808|lyefj92j3846|lyefj92j3852|lyefj92j3886|lyefj93j3732|lyefj93j3807|lyefj93j3808|lyefj93j3846|lyefj93j3852|lyefj93j3885|lyefj93j3886|lyefj94j3525|lyefj94j3732|lyefj94j3807|lyefj94j3808|lyefj94j3846|lyefj94j3852|lyefj94j3885|lyefj94j3886|lyefj95j3525|lyefj95j3732|lyefj95j3807|lyefj95j3808|lyefj95j3846|lyefj95j3852|lyefj95j3886|lyefj96j3732|lyefj96j3803|lyefj96j3807|lyefj96j3808|lyefj96j3846|lyefj96j3852|lyefj96j3886|lyefj97j3333|lyefj97j3732|lyefj97j3792|lyefj97j3803|lyefj97j3807|lyefj97j3808|lyefj97j3838|lyefj97j3843|lyefj97j3846|lyefj97j3852|lyefj97j3886|lyefj98j3083|lyefj98j3333|lyefj98j3732|lyefj98j3807|lyefj98j3808|lyefj98j3838|lyefj98j3843|lyefj98j3846|lyefj98j3852|lyefj98j3873|lyefj98j3877|lyefj98j3882|lyefj98j3886|lyefj99j2984|lyefj99j3083|lyefj99j3333|lyefj99j3732|lyefj99j3807|lyefj99j3808|lyefj99j3846|lyefj99j3849|lyefj99j3852|lyefj99j3873|lyefj99j3877|lyefj99j3882|lyefj99j3884|lyefj99j3886|lyeff00j0106|lyeff00j0107|lyeff00j0108|lyeff00j0129|lyeff00j0130|lyeff00j0131|lyeff00j0132|lyeff00j0133|lyeff00j0134|lyeff00j0444|lyeff00j0445|lyeff91j0473|lyeff92j0473|lyeff92j3877|lyeff93j3877|lyeff94j0501|lyeff94j3525|lyeff94j3877|lyeff95j0501|lyeff95j3525|lyeff95j3877|lyeff96j0503|lyeff96j3877|lyeff97j3877|lyeff98j3333|lyeff98j3877|lyeff99j2984|lyeff99j3333|lyeff99j3877|mfyr9149ej|mfyr9149ek|mfyr9156ej|mfyr9156ek|mfyr9157ej|mfyr9157ek|mfyr9159ej|mfyr9159ek|mfyr9203ej|mfyr9204ej|mfyr9205ej|mfyr9206ej|mfyr9207ej|mfyr9207ek|mfyr9217ej|mfyr9217ek|mfyr9222ej|mfyr9222ek|mfyu0185ej|mfye9187ej|mfye9187ek|mfye9188ej|mfye9188ek|mfye9189ej|mfye9189ek|mfyf0185ej|oyefj87j0007|oyefj88j0007|oyefj89j0007|oyefj90j0007|oyefj91j0007|oyefj95j0001|oyefj96j0001|oyefj98j0004|oyefj99j0004|oyeff91j0004|oyeff92j0004|oyeff93j0004|oyeff94j0004|oyeff95j0004|oyeff96j0004|rklvyaxmany|ryefj93j0001|ryefj94j0001|tyyfj00a0001|tyyfj84j0005|tyyfj85j0005|tyyfj86j0005|tyyfj87j0005|tyyfj88j0005|tyyfj89j0005|tyyfj90j0005|tyyfj91j0005|tyyfj92j0005|tyyfj93j0005|tyyfj94j0005|tyyfj95j0005|tyyfj96j0005|tyyfj97j0005|tyyfj98j0005|tyyfj99j0005|tyefj50j0015|tyefj50j0017|tyefj50j0019|tyefj50j0020|tyefj50j0021|tyefj51j0015|tyefj51j0017|tyefj51j0019|tyefj51j0020|tyefj51j0021|tyefj52j0015|tyefj52j0017|tyefj52j0019|tyefj52j0020|tyefj52j0021|tyefj53j0015|tyefj53j0017|tyefj53j0019|tyefj53j0020|tyefj53j0021|tyefj54j0015|tyefj54j0017|tyefj54j0019|tyefj54j0020|tyefj54j0021|tyefj55j0015|tyefj55j0017|tyefj55j0019|tyefj55j0020|tyefj55j0021|tyefj56j0015|tyefj56j0017|tyefj56j0019|tyefj56j0020|tyefj56j0021|tyefj57j0015|tyefj57j0017|tyefj57j0019|tyefj57j0020|tyefj57j0021|tyefj58j0015|tyefj58j0017|tyefj58j0019|tyefj58j0020|tyefj58j0021|tyefj59j0015|tyefj59j0017|tyefj59j0019|tyefj59j0020|tyefj59j0021|tyefj60j0015|tyefj60j0017|tyefj60j0019|tyefj60j0020|tyefj60j0021|tyefj61j0015|tyefj61j0017|tyefj61j0019|tyefj61j0020|tyefj61j0021|tyefj62j0015|tyefj62j0017|tyefj62j0019|tyefj62j0020|tyefj62j0021|tyefj63j0015|tyefj63j0017|tyefj63j0019|tyefj63j0020|tyefj63j0021|tyefj64j0015|tyefj64j0017|tyefj64j0019|tyefj64j0020|tyefj64j0021|tyefj65j0015|tyefj65j0017|tyefj65j0019|tyefj65j0020|tyefj65j0021|tyefj66j0015|tyefj66j0017|tyefj66j0019|tyefj66j0020|tyefj66j0021|tyefj67j0015|tyefj67j0017|tyefj67j0019|tyefj67j0020|tyefj67j0021|tyefj68j0015|tyefj68j0017|tyefj68j0019|tyefj68j0020|tyefj68j0021|tyefj69j0015|tyefj69j0017|tyefj69j0019|tyefj69j0020|tyefj69j0021|tyefj70j0015|tyefj70j0017|tyefj70j0019|tyefj70j0020|tyefj70j0021|tyefj71j0015|tyefj71j0017|tyefj71j0019|tyefj71j0020|tyefj71j0021|tyefj72j0015|tyefj72j0017|tyefj72j0019|tyefj72j0020|tyefj72j0021|tyefj72j0022|tyefj73j0015|tyefj73j0017|tyefj73j0019|tyefj73j0020|tyefj73j0021|tyefj73j0022|tyefj74j0015|tyefj74j0017|tyefj74j0019|tyefj74j0020|tyefj74j0021|tyefj74j0022|tyefj75j0015|tyefj75j0017|tyefj75j0019|tyefj75j0020|tyefj75j0021|tyefj75j0022|tyefj76j0015|tyefj76j0017|tyefj76j0019|tyefj76j0020|tyefj76j0021|tyefj76j0022|tyefj76j0119|tyefj77j0015|tyefj77j0017|tyefj77j0019|tyefj77j0020|tyefj77j0021|tyefj77j0022|tyefj77j0119|tyefj78j0015|tyefj78j0017|tyefj78j0019|tyefj78j0020|tyefj78j0021|tyefj78j0022|tyefj78j0119|tyefj79j0015|tyefj79j0017|tyefj79j0019|tyefj79j0020|tyefj79j0021|tyefj79j0022|tyefj79j0119|tyefj80j0015|tyefj80j0017|tyefj80j0019|tyefj80j0020|tyefj80j0021|tyefj80j0022|tyefj80j0114|tyefj80j0119|tyefj81j0015|tyefj81j0017|tyefj81j0019|tyefj81j0020|tyefj81j0021|tyefj81j0022|tyefj81j0114|tyefj81j0119|tyefj82j0015|tyefj82j0017|tyefj82j0019|tyefj82j0020|tyefj82j0021|tyefj82j0022|tyefj82j0119|tyefj83j0015|tyefj83j0017|tyefj83j0019|tyefj83j0020|tyefj83j0021|tyefj83j0022|tyefj83j0119|tyefj84j0014|tyefj84j0015|tyefj84j0017|tyefj84j0019|tyefj84j0020|tyefj84j0021|tyefj84j0022|tyefj84j0119|tyefj85j0014|tyefj85j0015|tyefj85j0017|tyefj85j0019|tyefj85j0020|tyefj85j0021|tyefj85j0022|tyefj85j0119|tyefj86j0014|tyefj86j0015|tyefj86j0017|tyefj86j0019|tyefj86j0020|tyefj86j0021|tyefj86j0022|tyefj87j0014|tyefj87j0015|tyefj87j0017|tyefj87j0019|tyefj87j0020|tyefj87j0021|tyefj87j0022|tyefj88j0014|tyefj88j0015|tyefj88j0017|tyefj88j0019|tyefj88j0020|tyefj88j0021|tyefj88j0022|tyefj88j0100|tyefj88j0115|tyefj89j0003|tyefj89j0014|tyefj89j0015|tyefj89j0017|tyefj89j0019|tyefj89j0020|tyefj89j0021|tyefj89j0022|tyefj89j0100|tyefj89j0115|tyefj90j0014|tyefj90j0015|tyefj90j0016|tyefj90j0017|tyefj90j0018|tyefj90j0019|tyefj90j0020|tyefj90j0021|tyefj90j0022|tyefj90j0100|tyefj90j0111|tyefj90j0115|tyefj91j0014|tyefj91j0015|tyefj91j0016|tyefj91j0017|tyefj91j0018|tyefj91j0019|tyefj91j0020|tyefj91j0021|tyefj91j0022|tyefj91j0100|tyefj91j0111|tyefj91j0115|tyefj92j0014|tyefj92j0015|tyefj92j0016|tyefj92j0017|tyefj92j0018|tyefj92j0019|tyefj92j0020|tyefj92j0021|tyefj92j0022|tyefj92j0100|tyefj92j0105|tyefj92j0115|tyefj92j0121|tyefj93j0004|tyefj93j0014|tyefj93j0015|tyefj93j0017|tyefj93j0018|tyefj93j0019|tyefj93j0020|tyefj93j0021|tyefj93j0022|tyefj93j0100|tyefj93j0105|tyefj93j0115|tyefj93j0121|tyefj94j0002|tyefj94j0004|tyefj94j0008|tyefj94j0014|tyefj94j0015|tyefj94j0017|tyefj94j0019|tyefj94j0020|tyefj94j0021|tyefj94j0022|tyefj94j0084|tyefj94j0088|tyefj94j0100|tyefj94j0106|tyefj94j0116|tyefj94j0121|tyefj94j0123|tyefj95j0002|tyefj95j0004|tyefj95j0008|tyefj95j0014|tyefj95j0015|tyefj95j0017|tyefj95j0019|tyefj95j0020|tyefj95j0021|tyefj95j0022|tyefj95j0084|tyefj95j0088|tyefj95j0100|tyefj95j0101|tyefj95j0106|tyefj95j0112|tyefj95j0116|tyefj95j0121|tyefj95j0123|tyefj96j0014|tyefj96j0015|tyefj96j0017|tyefj96j0019|tyefj96j0020|tyefj96j0021|tyefj96j0022|tyefj96j0082|tyefj96j0084|tyefj96j0100|tyefj96j0101|tyefj96j0112|tyefj96j0117|tyefj96j0121|tyefj96j0124|tyefj97j0014|tyefj97j0015|tyefj97j0017|tyefj97j0019|tyefj97j0020|tyefj97j0021|tyefj97j0022|tyefj97j0081|tyefj97j0087|tyefj97j0098|tyefj97j0100|tyefj97j0107|tyefj97j0109|tyefj97j0113|tyefj97j0117|tyefj97j0118|tyefj97j0121|tyefj98j0003|tyefj98j0006|tyefj98j0014|tyefj98j0015|tyefj98j0017|tyefj98j0019|tyefj98j0020|tyefj98j0021|tyefj98j0022|tyefj98j0083|tyefj98j0085|tyefj98j0086|tyefj98j0100|tyefj98j0104|tyefj98j0118|tyefj98j0121|tyefj99j0003|tyefj99j0006|tyefj99j0007|tyefj99j0014|tyefj99j0015|tyefj99j0017|tyefj99j0019|tyefj99j0020|tyefj99j0021|tyefj99j0022|tyefj99j0023|tyefj99j0100|tyefj99j0108|tyefj99j0110|tyefj99j0121|tyefj99j0125|tyeff94j0002|tyeff94j0008|tyeff94j0010|tyeff94j0011|tyeff94j0035|tyeff95j0002|tyeff95j0006|tyeff95j0008|tyeff95j0010|tyeff95j0011|tyeff95j0035|tyeff96j0003|tyeff96j0006|tyeff96j0009|tyeff96j0010|tyeff97j0004|tyeff97j0009|tyeff97j0116|tyeff98j0007|tyeff99j0007|tyeff99j0125|uyyfj00j0484|uyyfj00j0485|uyyfj00j0486|uyyfj00j0487|uyyfj00j0488|uyyfj00j0489|uyyfj00j0490|uyyfj00j0491|uyyfj00j0492|uyyfj00j0493|uyyfj00j0494|uyyfj00j0495|uyyfj00j0496|uyyfj00j0497|uyyfj00j0498|uyyfj00j0499|uyyfj00j0500|uyyfj00j0501|uyyfj00j0502|uyyfj00j0503|uyyfj00j0504|uyyfj00j0505|uyyfj00j0506|uyyfj00j0507|uyyfj00j0508|uyyfj00j0509|uyyfj00j0510|uyyfj00j0511|uyyfj00j0512|uyyfj00j0513|uyyfj00j0514|uyyfj00j0515|uyyfj00j0516|uyyfj00j0517|uyyfj00j0518|uyyfj00j0519|uyyfj00j0520|uyyfj00j0521|uyyfj00j0522|uyyfj00j0523|uyyfj00j0524|uyyfj00j0525|uyyfj00j0526|uyyfj00j0527|uyyfj00j0528|uyyfj00j0529|uyyfj00j0530|uyyfj00j0531|uyyfj00j0532|uyyfj00j0533|uyyfj00j0534|uyyfj00j0535|uyyfj00j0536|uyyfj00j0537|uyyfj00j0538|uyyfj00j0539|uyyfj00j0540|uyyfj00j0541|uyyfj00j0542|uyyfj00j0543|uyyfj00j0544|uyyfj00j0545|uyyfj00j0546|uyyfj00j0547|uyyfj00j0548|uyyfj00j0549|uyyfj00j0550|uyyfj00j0551|uyyfj00j0553|uyyfj00j0554|uyyfj00j0555|uyyfj00j0556|uyyfj00j0557|uyyfj00j0558|uyyfj00j0559|uyyfj00j0560|uyyfj00j0561|uyyfj00j0562|uyyfj00j0563|uyyfj00j0564|uyyfj00j0565|uyyfj00j0566|uyyfj00j0614|uyyfj00j0615|uyyfj00j0616|uyyfj00j0617|uyyfj00j0618|uyyfj00j0619|uyyfj00j0620|uyyfj00j0621|uyyfj00j0622|uyyfj00j0623|uyyfj00j0624|uyyfj00j0625|uyyfj00j0626|uyyfj00j0627|uyyfj00j0628|uyyfj00j0629|uyyfj00j0630|uyyfj00j0631|uyyfj00j0632|uyyfj00j0633|uyyfj00j0634|uyyfj00j0635|uyyfj00j0636|uyyfj00j0637|uyyfj00j0638|uyyfj00j0639|uyyfj00j0640|uyyfj00j0641|uyyfj00j0642|uyyfj00j0643|uyyfj00j0644|uyyfj00j0645|uyyfj00j0646|uyyfj00j0647|uyyfj00j0648|uyyfj00j0649|uyyfj00j0650|uyyfj00j0651|uyyfj00j0652|uyyfj00j0653|uyyfj00j0654|uyyfj00j0655|uyyfj00j0656|uyyfj00j0657|uyyfj00j0658|uyyfj00j0659|uyyfj00j0660|uyyfj00j0661|uyyfj00j0662|uyyfj00j0663|uyyfj00j0664|uyyfj00j0665|uyyfj00j0666|uyyfj00j0667|uyyfj00j0668|uyyfj00j0669|uyyfj00j0670|uyyfj00j0671|uyyfj00j0672|uyyfj00j0673|uyyfj00j0674|uyyfj00j0675|uyyfj00j0676|uyyfj00j0677|uyyfj00j0678|uyyfj00j0679|uyyfj00j0680|uyyfj00j0681|uyyfj00j0682|uyyfj00j0683|uyyfj00j0684|uyyfj00j0685|uyyfj00j0686|uyyfj00j0687|uyyfj00j0688|uyyfj00j0689|uyyfj00j0690|uyyfj00j0691|uyyfj00j0692|uyyfj00j0693|uyyfj00j0694|uyyfj00j0695|uyyfj00j0696|uyyfj00j0697|uyyfj00j0698|uyyfj00j0699|uyyfj00j0700|uyyfj00j0701|uyyfj00j0702|uyyfj00j0703|uyyfj00j0704|uyyfj00j0705|uyyfj00j0706|uyyfj00j0707|uyyfj00j0708|uyyfj00j0709|uyyfj00j0710|uyyfj00j0711|uyyfj00j0712|uyyfj00j0713|uyyfj00j0714|uyyfj00j0715|uyyfj00j0716|uyyfj00j0717|uyyfj00j0718|uyyfj00j0719|uyyfj00j0720|uyyfj00j0721|uyyfj00j0722|uyyfj00j0723|uyyfj00j0724|uyyfj00j0725|uyyfj00j0726|uyyfj00j0727|uyyfj00j0728|uyyfj00j0729|uyyfj00j0730|uyyfj00j0731|uyyfj00j0732|uyyfj00j0733|uyyfj00j0734|uyyfj00j0735|uyyfj00j0736|uyyfj00j0737|uyyfj00j0738|uyyfj00j0739|uyyfj00j0740|uyyfj00j0741|uyyfj00j0742|uyyfj00j0743|uyyfj00j0744|uyyfj00j0745|uyyfj00j0746|uyyfj00j0747|uyyfj00j0748|uyyfj00j0749|uyyfj00j0750|uyyfj00j0751|uyyfj00j0752|uyyfj00j0753|uyyfj00j0754|uyyfj00j0755|uyyfj00j0756|uyyfj00j0757|uyyfj00j0758|uyyfj00j0759|uyyfj00j0760|uyyfj00j0761|uyyfj00j0762|uyyfj00j0763|uyyfj00j0764|uyyfj00j0765|uyyfj00j0766|uyyfj00j0767|uyyfj00j0768|uyyfj00j0769|uyyfj00j0770|uyyfj00j0771|uyyfj00j0772|uyyfj00j0773|uyyfj00j0774|uyyfj00j0775|uyyfj00j0776|uyyfj00j0777|uyyfj00j0778|uyyfj00j0779|uyyfj00j0780|uyyfj00j0781|uyyfj00j0782|uyyff00j0011|uyyff00j0031|uyyff00j0032|uyyff00j0033|uyyff00j0034|uyyff99j0012|uyefj00j0071|uyefj00j0455|uyefj00j0456|uyefj00j0582|uyefj00j0583|uyefj00j0584|uyefj00j0585|uyefj00j0586|uyefj00j0590|uyeff00j0188|xyrly-f-jyy-y01|xyrly-f-jyy-y02|xyrly-f-jyy-y03|xyrly-f-jyy-y04|xyrly-f-jyy-y05|xyrly-f-jyy-y06|xyrly-f-jyy-y07|xyrly-f-jyy-y08|xyrly-f-jyy-y09|xyrly-f-jyy-y10|xyrly-f-jyy-y11|xyrly-f-jyy-y12|xyrly-f-jyy-y13|xyrly-f-jyy-y14|xyrly-f-jyy-y15|xyrly-f-jyy-y16|xyrly-f-url-y01|xyrly-f-url-y02|yyefj97j0005|ybyfcy4000|ybyfcy4001|ayefj99j0035|by-b-y-bzu-l01|by-b-y-bzu-l02|by-b-e-079|by-b-e-080|by-b-e-082|by-b-e-083|byefj72j0002|byefj73j0002|byefj74j0002|byefj75j0002|byefj76j0002|byefj77j0002|byefj78j0002|byefj79j0002|byefj91j0007|byefj92j0007|byefj98j0003|byefj99j0003|byefj99j0005|byefj99j0006|byeff88j0002|byeff89j0002|byeff90j0002|byeff91j0002|byeff92j0002|byeff93j0002|byeff96j0003|byeff97j0003|byeff98j0003|byeff99j0003|fymfj98j0001|fymfj99j0001|fyyaj98k0297|fyyaj99k0297|fyyfj00j0109|fyyfj00j0110|fyyfj00j0122|fyyfj00j0123|fyyfj00j0201|fyyfj00j0202|fyyfj00j0207|fyyfj00j0208|fyyfj00j0227|fyyfj00j0228|fyyfj00j0229|fyyfj00j0230|fyyfj00j0231|fyyfj00j0232|fyyfj00j0233|fyyfj00j0234|fyyfj00j0235|fyyfj00j0236|fyyfj00j0237|fyyfj00j0238|fyyfj00j0239|fyyfj00j0240|fyyfj00j0241|fyyfj00j0242|fyyfj00j0243|fyyfj00j0244|fyyfj00j0245|fyyfj00j0246|fyyfj00j0247|fyyfj00j0248|fyyfj00j0249|fyyfj00j0250|fyyfj00j0251|fyyfj00j0252|fyyfj00j0253|fyyfj00j0254|fyyfj00j0255|fyyfj00j0256|fyyfj00j0257|fyyfj00j0258|fyyfj00j0259|fyyfj00j0260|fyyfj00j0261|fyyfj00j0262|fyyfj00j0263|fyyfj00j0264|fyyfj00j0265|fyyfj00j0266|fyyfj00j0267|fyyfj00j0268|fyyfj00j0290|fyyfj00j0291|fyyfj00j0292|fyyfj00j0293|fyyfj00j0294|fyyfj00j0295|fyyfj00j0296|fyyfj00j0297|fyyfj00j0298|fyyfj00j0299|fyyfj00j0300|fyyfj00j0301|fyyfj00j0302|fyyfj00j0303|fyyfj00j0304|fyyfj00j0305|fyyfj00j0306|fyyfj00j0307|fyyfj00j0308|fyyfj00j0309|fyyfj00j0310|fyyfj00j0311|fyyfj00j0312|fyyfj00j0313|fyyfj00j0314|fyyfj00j0315|fyyfj00j0316|fyyfj00j0317|fyyfj00j0318|fyyfj00j0319|fyyfj00j0320|fyyfj00j0321|fyyfj00j0322|fyyfj00j0323|fyyfj00j0324|fyyfj00j0325|fyyfj00j0326|fyyfj00j0327|fyyfj00j0328|fyyfj00j0329|fyyfj00j0330|fyyfj00j0331|fyyfj00j0332|fyyfj00j0333|fyyfj00j0334|fyyfj00j0335|fyyfj00j0340|fyyfj00j0341|fyyfj00j0342|fyyfj00j0343|fyyfj00j0344|fyyfj00j0345|fyyfj00j0346|fyyfj00j0347|fyyfj00j0348|fyyfj00j0349|fyyfj00j0367|fyyfj00j0368|fyyfj00j0369|fyyfj00j0370|fyyfj00j0371|fyyfj00j0372|fyyfj00j0373|fyyfj00j0374|fyyfj00j0375|fyyfj00j0376|fyyfj00j0377|fyyfj00j0378|fyyfj00j0379|fyyfj00j0380|fyyfj00j0381|fyyfj00j0382|fyyfj00j0383|fyyfj00j0384|fyyfj00j0385|fyyfj00j0386|fyyfj00j0387|fyyfj00j0388|fyyfj00j0415|fyyfj00j0416|fyyfj00j0417|fyyfj00j0418|fyyfj00j0419|fyyfj00j0420|fyyfj00j0421|fyyfj00j0422|fyyfj00j0423|fyyfj00j0424|fyyfj00j0425|fyyfj00j0426|fyyfj00j0427|fyyfj00j0428|fyyfj00j0429|fyyfj00j0430|fyyfj00j0431|fyyfj00j0432|fyyfj00j0433|fyyfj00j0434|fyyfj00j0435|fyyfj00j0436|fyyfj00j0437|fyyfj00j0438|fyyfj00j0439|fyyfj00j0440|fyyfj00j0441|fyyfj00j0446|fyyfj00j0447|fyyfj00j0448|fyyfj00j0449|fyyfj00j0451|fyyfj00j0452|fyyfj00j0453|fyyfj00j0454|fyyfj00j0455|fyyfj00j0456|fyyfj00j0457|fyyfj00j0459|fyyfj00j0460|fyyfj00j0461|fyyfj00j0462|fyyfj00j0463|fyyfj00j0464|fyyfj00j0465|fyyfj00j0466|fyyfj00j0467|fyyfj00j0468|fyyfj00j0469|fyyfj00j0470|fyyfj00j0471|fyyfj00j0474|fyyfj00j0475|fyyfj00j0476|fyyfj00j0477|fyyfj00j0478|fyyfj00j0479|fyyfj00j0480|fyyfj00j0481|fyyfj00j0482|fyyfj00j0483|fyyfj00j0484|fyyfj00j0485|fyyfj00j0486|fyyfj00j0487|fyyfj00j0488|fyyfj00j0489|fyyfj00j0490|fyyfj00j0491|fyyfj00j0492|fyyfj00j0493|fyyfj00j0494|fyyfj00j0495|fyyfj00j0496|fyyfj00j0497|fyyfj00j0498|fyyfj00j0499|fyyfj00j0500|fyyfj00j0501|fyyfj00j0502|fyyfj00j0503|fyyfj00j0504|fyyfj00j0505|fyyfj00j0506|fyyfj00j0507|fyyfj00j0508|fyyfj00j0509|fyyfj00j0510|fyyfj00j0511|fyyfj00j0512|fyyfj00j0513|fyyfj00j0514|fyyfj00j0515|fyyfj00j0516|fyyfj00j0517|fyyfj00j0518|fyyfj00j0521|fyyfj00j0522|fyyfj00j0523|fyyfj00j0524|fyyfj00j0526|fyyfj00j0527|fyyfj00j0528|fyyfj00j0529|fyyfj00j0530|fyyfj00j0531|fyyfj00j0532|fyyfj00j0533|fyyfj00j0534|fyyfj00j0535|fyyfj00j0536|fyyfj00j0537|fyyfj00j0538|fyyfj00j0539|fyyfj00j0540|fyyfj00j0541|fyyfj00j0542|fyyfj00j0543|fyyfj00j0544|fyyfj00j0545|fyyfj00j0546|fyyfj00j0564|fyyfj00j0565|fyyfj00j0566|fyyfj00j0567|fyyfj00j0568|fyyfj00j0569|fyyfj00j0570|fyyfj00j0571|fyyfj00j0572|fyyfj00j0574|fyyfj00j0575|fyyfj00j0576|fyyfj00j0577|fyyfj00j0578|fyyfj00j0579|fyyfj00j0580|fyyfj01j0473|fyyfj02j0473|fyyfj36j0289|fyyfj37j0209|fyyfj37j0289|fyyfj38j0209|fyyfj38j0289|fyyfj39j0209|fyyfj39j0289|fyyfj40j0209|fyyfj40j0289|fyyfj41j0209|fyyfj41j0289|fyyfj42j0209|fyyfj42j0289|fyyfj43j0209|fyyfj43j0289|fyyfj44j0209|fyyfj44j0289|fyyfj45j0104|fyyfj45j0209|fyyfj45j0289|fyyfj46j0104|fyyfj46j0209|fyyfj46j0289|fyyfj47j0104|fyyfj47j0209|fyyfj47j0289|fyyfj48j0104|fyyfj48j0209|fyyfj48j0289|fyyfj49j0104|fyyfj49j0209|fyyfj49j0289|fyyfj50j0104|fyyfj50j0209|fyyfj50j0289|fyyfj50j0500|fyyfj51j0104|fyyfj51j0209|fyyfj51j0289|fyyfj51j0500|fyyfj52j0104|fyyfj52j0209|fyyfj52j0289|fyyfj52j0500|fyyfj53j0104|fyyfj53j0209|fyyfj53j0289|fyyfj53j0500|fyyfj54j0104|fyyfj54j0209|fyyfj54j0289|fyyfj54j0500|fyyfj55j0104|fyyfj55j0209|fyyfj55j0289|fyyfj55j0500|fyyfj56j0104|fyyfj56j0209|fyyfj56j0289|fyyfj56j0500|fyyfj57j0104|fyyfj57j0209|fyyfj57j0289|fyyfj57j0500|fyyfj58j0104|fyyfj58j0209|fyyfj58j0289|fyyfj58j0500|fyyfj59j0104|fyyfj59j0209|fyyfj59j0289|fyyfj59j0500|fyyfj60j0104|fyyfj60j0209|fyyfj60j0289|fyyfj60j0500|fyyfj61j0104|fyyfj61j0209|fyyfj61j0289|fyyfj61j0500|fyyfj62j0104|fyyfj62j0209|fyyfj62j0289|fyyfj62j0500|fyyfj63j0104|fyyfj63j0209|fyyfj63j0289|fyyfj63j0500|fyyfj64j0104|fyyfj64j0107|fyyfj64j0209|fyyfj64j0289|fyyfj64j0500|fyyfj64j0573|fyyfj65j0104|fyyfj65j0107|fyyfj65j0209|fyyfj65j0289|fyyfj65j0500|fyyfj65j0573|fyyfj66j0104|fyyfj66j0107|fyyfj66j0209|fyyfj66j0289|fyyfj66j0500|fyyfj66j0573|fyyfj67j0104|fyyfj67j0107|fyyfj67j0209|fyyfj67j0289|fyyfj67j0500|fyyfj67j0573|fyyfj68j0104|fyyfj68j0107|fyyfj68j0209|fyyfj68j0289|fyyfj68j0500|fyyfj68j0573|fyyfj69j0104|fyyfj69j0107|fyyfj69j0209|fyyfj69j0289|fyyfj69j0500|fyyfj69j0573|fyyfj70j0104|fyyfj70j0107|fyyfj70j0209|fyyfj70j0289|fyyfj70j0472|fyyfj70j0500|fyyfj70j0573|fyyfj71j0104|fyyfj71j0107|fyyfj71j0209|fyyfj71j0289|fyyfj71j0472|fyyfj71j0500|fyyfj71j0573|fyyfj72j0104|fyyfj72j0107|fyyfj72j0209|fyyfj72j0289|fyyfj72j0472|fyyfj72j0500|fyyfj72j0573|fyyfj73j0104|fyyfj73j0107|fyyfj73j0209|fyyfj73j0289|fyyfj73j0472|fyyfj73j0500|fyyfj73j0573|fyyfj74j0104|fyyfj74j0107|fyyfj74j0209|fyyfj74j0289|fyyfj74j0472|fyyfj74j0500|fyyfj74j0573|fyyfj75j0104|fyyfj75j0107|fyyfj75j0108|fyyfj75j0209|fyyfj75j0289|fyyfj75j0472|fyyfj75j0500|fyyfj75j0573|fyyfj76j0104|fyyfj76j0107|fyyfj76j0108|fyyfj76j0209|fyyfj76j0289|fyyfj76j0472|fyyfj76j0500|fyyfj76j0573|fyyfj77j0104|fyyfj77j0107|fyyfj77j0108|fyyfj77j0209|fyyfj77j0289|fyyfj77j0472|fyyfj77j0500|fyyfj77j0573|fyyfj78j0104|fyyfj78j0107|fyyfj78j0108|fyyfj78j0209|fyyfj78j0289|fyyfj78j0472|fyyfj78j0500|fyyfj78j0573|fyyfj79j0104|fyyfj79j0107|fyyfj79j0108|fyyfj79j0209|fyyfj79j0289|fyyfj79j0339|fyyfj79j0472|fyyfj79j0500|fyyfj79j0573|fyyfj80j0104|fyyfj80j0107|fyyfj80j0108|fyyfj80j0209|fyyfj80j0289|fyyfj80j0339|fyyfj80j0352|fyyfj80j0472|fyyfj80j0500|fyyfj80j0573|fyyfj81j0104|fyyfj81j0107|fyyfj81j0108|fyyfj81j0209|fyyfj81j0289|fyyfj81j0339|fyyfj81j0352|fyyfj81j0472|fyyfj81j0500|fyyfj81j0573|fyyfj82j0104|fyyfj82j0107|fyyfj82j0108|fyyfj82j0209|fyyfj82j0289|fyyfj82j0339|fyyfj82j0352|fyyfj82j0472|fyyfj82j0500|fyyfj82j0573|fyyfj83j0104|fyyfj83j0107|fyyfj83j0108|fyyfj83j0209|fyyfj83j0289|fyyfj83j0339|fyyfj83j0352|fyyfj83j0472|fyyfj83j0500|fyyfj83j0573|fyyfj84j0104|fyyfj84j0107|fyyfj84j0108|fyyfj84j0209|fyyfj84j0289|fyyfj84j0339|fyyfj84j0352|fyyfj84j0472|fyyfj84j0500|fyyfj84j0573|fyyfj85j0104|fyyfj85j0107|fyyfj85j0108|fyyfj85j0209|fyyfj85j0289|fyyfj85j0301|fyyfj85j0339|fyyfj85j0352|fyyfj85j0472|fyyfj85j0500|fyyfj85j0573|fyyfj86j0104|fyyfj86j0107|fyyfj86j0108|fyyfj86j0209|fyyfj86j0289|fyyfj86j0301|fyyfj86j0339|fyyfj86j0352|fyyfj86j0472|fyyfj86j0500|fyyfj86j0573|fyyfj87j0067|fyyfj87j0104|fyyfj87j0107|fyyfj87j0108|fyyfj87j0209|fyyfj87j0289|fyyfj87j0301|fyyfj87j0339|fyyfj87j0352|fyyfj87j0472|fyyfj87j0500|fyyfj87j0573|fyyfj88j0067|fyyfj88j0104|fyyfj88j0107|fyyfj88j0108|fyyfj88j0209|fyyfj88j0289|fyyfj88j0301|fyyfj88j0339|fyyfj88j0352|fyyfj88j0472|fyyfj88j0500|fyyfj88j0573|fyyfj89j0067|fyyfj89j0104|fyyfj89j0107|fyyfj89j0108|fyyfj89j0209|fyyfj89j0289|fyyfj89j0301|fyyfj89j0339|fyyfj89j0352|fyyfj89j0358|fyyfj89j0472|fyyfj89j0500|fyyfj89j0573|fyyfj90j0067|fyyfj90j0104|fyyfj90j0107|fyyfj90j0108|fyyfj90j0209|fyyfj90j0289|fyyfj90j0301|fyyfj90j0321|fyyfj90j0339|fyyfj90j0352|fyyfj90j0358|fyyfj90j0452|fyyfj90j0472|fyyfj90j0500|fyyfj90j0573|fyyfj91j0067|fyyfj91j0104|fyyfj91j0107|fyyfj91j0108|fyyfj91j0209|fyyfj91j0289|fyyfj91j0301|fyyfj91j0321|fyyfj91j0339|fyyfj91j0352|fyyfj91j0358|fyyfj91j0452|fyyfj91j0472|fyyfj91j0500|fyyfj91j0573|fyyfj92j0067|fyyfj92j0104|fyyfj92j0107|fyyfj92j0108|fyyfj92j0209|fyyfj92j0289|fyyfj92j0301|fyyfj92j0321|fyyfj92j0339|fyyfj92j0352|fyyfj92j0358|fyyfj92j0452|fyyfj92j0472|fyyfj92j0500|fyyfj92j0573|fyyfj93j0067|fyyfj93j0099|fyyfj93j0104|fyyfj93j0107|fyyfj93j0108|fyyfj93j0209|fyyfj93j0289|fyyfj93j0301|fyyfj93j0321|fyyfj93j0352|fyyfj93j0358|fyyfj93j0452|fyyfj93j0472|fyyfj93j0500|fyyfj93j0573|fyyfj94j0067|fyyfj94j0099|fyyfj94j0104|fyyfj94j0107|fyyfj94j0108|fyyfj94j0209|fyyfj94j0211|fyyfj94j0289|fyyfj94j0301|fyyfj94j0321|fyyfj94j0352|fyyfj94j0358|fyyfj94j0359|fyyfj94j0452|fyyfj94j0472|fyyfj94j0500|fyyfj94j0573|fyyfj95j0067|fyyfj95j0099|fyyfj95j0104|fyyfj95j0107|fyyfj95j0108|fyyfj95j0209|fyyfj95j0211|fyyfj95j0289|fyyfj95j0298|fyyfj95j0301|fyyfj95j0321|fyyfj95j0339|fyyfj95j0352|fyyfj95j0358|fyyfj95j0359|fyyfj95j0414|fyyfj95j0452|fyyfj95j0472|fyyfj95j0500|fyyfj95j0573|fyyfj96j0067|fyyfj96j0099|fyyfj96j0104|fyyfj96j0107|fyyfj96j0108|fyyfj96j0209|fyyfj96j0211|fyyfj96j0289|fyyfj96j0298|fyyfj96j0301|fyyfj96j0321|fyyfj96j0339|fyyfj96j0352|fyyfj96j0358|fyyfj96j0359|fyyfj96j0414|fyyfj96j0452|fyyfj96j0472|fyyfj96j0500|fyyfj96j0573|fyyfj97j0067|fyyfj97j0099|fyyfj97j0100|fyyfj97j0104|fyyfj97j0107|fyyfj97j0108|fyyfj97j0209|fyyfj97j0211|fyyfj97j0289|fyyfj97j0298|fyyfj97j0301|fyyfj97j0321|fyyfj97j0339|fyyfj97j0352|fyyfj97j0358|fyyfj97j0359|fyyfj97j0414|fyyfj97j0445|fyyfj97j0452|fyyfj97j0472|fyyfj97j0500|fyyfj97j0573|fyyfj98j0067|fyyfj98j0099|fyyfj98j0100|fyyfj98j0104|fyyfj98j0107|fyyfj98j0108|fyyfj98j0178|fyyfj98j0209|fyyfj98j0211|fyyfj98j0289|fyyfj98j0298|fyyfj98j0301|fyyfj98j0303|fyyfj98j0321|fyyfj98j0339|fyyfj98j0352|fyyfj98j0358|fyyfj98j0359|fyyfj98j0413|fyyfj98j0414|fyyfj98j0445|fyyfj98j0452|fyyfj98j0472|fyyfj98j0500|fyyfj98j0573|fyyfj99j0067|fyyfj99j0099|fyyfj99j0100|fyyfj99j0104|fyyfj99j0107|fyyfj99j0108|fyyfj99j0131|fyyfj99j0209|fyyfj99j0211|fyyfj99j0285|fyyfj99j0289|fyyfj99j0298|fyyfj99j0301|fyyfj99j0303|fyyfj99j0321|fyyfj99j0339|fyyfj99j0352|fyyfj99j0358|fyyfj99j0359|fyyfj99j0413|fyyfj99j0414|fyyfj99j0445|fyyfj99j0452|fyyfj99j0472|fyyfj99j0500|fyyfj99j0573|fyyfm01j0064|fyyfm01j0070|fyyfm01j0071|fyyfm01j0088|fyyfm01j0091|fyyfm01j0108|fyyfm01j0111|fyyfm01j0112|fyyfm01j0114|fyyfm01j0115|fyyfm01j0133|fyyfm01j0140|fyyfm01j0141|fyyfm01j0142|fyyfm01j0143|fyyfm01j0148|fyyfm01j0149|fyyfm01j0152|fyyfm01j0153|fyyfm01j0155|fyyfm01j0159|fyyfm01j0160|fyyfm01j0163|fyyfm01j0165|fyyfm01j0168|fyyfm01j0169|fyyfm01j0221|fyyfm01j0223|fyyfm01j0268|fyyfm01j0271|fyyfm01j0285|fyyfm01j0299|fyyfm01j0320|fyyfm01j0321|fyyfm01j0360|fyyfm01j0369|fyyfm01j0400|fyyfm01j0401|fyyfm01j0411|fyyfm01j0572|fyyfm01j0765|fyyfm02j0064|fyyfm02j0069|fyyfm02j0070|fyyfm02j0071|fyyfm02j0088|fyyfm02j0091|fyyfm02j0108|fyyfm02j0111|fyyfm02j0112|fyyfm02j0114|fyyfm02j0115|fyyfm02j0133|fyyfm02j0140|fyyfm02j0141|fyyfm02j0142|fyyfm02j0143|fyyfm02j0148|fyyfm02j0149|fyyfm02j0152|fyyfm02j0153|fyyfm02j0155|fyyfm02j0159|fyyfm02j0160|fyyfm02j0163|fyyfm02j0165|fyyfm02j0168|fyyfm02j0169|fyyfm02j0221|fyyfm02j0223|fyyfm02j0268|fyyfm02j0271|fyyfm02j0285|fyyfm02j0299|fyyfm02j0320|fyyfm02j0321|fyyfm02j0360|fyyfm02j0369|fyyfm02j0400|fyyfm02j0572|fyyfm02j0765|fyyfm03j0064|fyyfm03j0070|fyyfm03j0091|fyyfm03j0108|fyyfm03j0111|fyyfm03j0115|fyyfm03j0160|fyyfm03j0165|fyyfm03j0299|fyyfm03j0400|fyyfm03j0572|fyyfm04j0111|fyyfm51j0064|fyyfm51j0369|fyyfm52j0064|fyyfm52j0369|fyyfr88j0003|fyyfr89j0003|fyyff98j0071|fyyff98j0303|fyyff99j0029|fyyff99j0303|fyefj00j0112|fyefj00j0545|fyefj00j0546|fyefj00j0633|fyefj00j0634|fyefj00j0635|fyefj00j0636|fyefj00j0637|fyefj00j0649|fyefj00j0651|fyefj00j0652|fyefj00j0656|fyefj00j0657|fyefj00j0658|fyefj00j0659|fyefj00j0660|fyefj00j0685|fyefj00j0686|fyefj00j0688|fyefj00j0701|fyefj00j0702|fyefj00j0703|fyefj00j0715|fyefj00j0720|fyefj00j0721|fyefj00j0722|fyefj00j0724|fyefj00j0725|fyefj00j0726|fyefj00j0731|fyefj00j0751|fyefj00j0752|fyefj00j0756|fyefj00j0757|fyefj00j0758|fyefj00j0759|fyefj00j0761|fyefj00j0762|fyefj00j0763|fyefj00j0764|fyefj00j0768|fyefj00j0769|fyefj00j0785|fyefj00j0786|fyefj00j0789|fyefj00j0790|fyefj00j0793|fyefj00j0794|fyefj00j0803|fyefj00j0811|fyefj00j0821|fyefj00j0822|fyefj00j0823|fyefj00j0824|fyefj00j0825|fyefj00j0826|fyefj00j0827|fyefj00j0828|fyefj00j0829|fyefj00j0831|fyefj00j0832|fyefj00j0833|fyefj00j0838|fyefj00j0839|fyefj00j0840|fyefj00j0854|fyefj00j0855|fyefj00j0856|fyefj00j0859|fyefj00j0860|fyefj00j0861|fyefj00j0869|fyefj00j0870|fyefj00j0879|fyefj00j0887|fyefj00j0888|fyefj00j0889|fyefj00j0900|fyefj00j0901|fyefj00j0903|fyefj00j0904|fyefj00j0905|fyefj00j0959|fyefj00j0960|fyefj00j0961|fyefj00j1004|fyefj00j1005|fyefj00j1012|fyefj00j1013|fyefj00j1014|fyefj00j1015|fyefj00j1016|fyefj00j1017|fyefj00j1018|fyefj00j1019|fyefj00j1020|fyefj00j1021|fyefj00j1218|fyefj00j1219|fyefj00j1220|fyefj00j1221|fyefj00j1222|fyefj00j1811|fyefj00j1854|fyefj00j1855|fyefj00j1856|fyefj01j0707|fyefj02j0707|fyefj03j0707|fyefj66j0001|fyefj67j0001|fyefj68j0001|fyefj68j1064|fyefj69j0001|fyefj69j1064|fyefj70j0001|fyefj70j0859|fyefj70j1064|fyefj71j0001|fyefj71j1064|fyefj72j0001|fyefj72j1064|fyefj73j0001|fyefj73j1064|fyefj74j0001|fyefj74j1064|fyefj75j0001|fyefj75j1064|fyefj75j1092|fyefj76j0001|fyefj76j1064|fyefj76j1092|fyefj77j0001|fyefj77j1064|fyefj77j1092|fyefj78j0001|fyefj78j1064|fyefj78j1092|fyefj79j0001|fyefj79j1064|fyefj79j1092|fyefj80j0001|fyefj80j0859|fyefj80j1064|fyefj80j1077|fyefj80j1092|fyefj81j0001|fyefj81j1064|fyefj81j1077|fyefj81j1092|fyefj82j0001|fyefj82j1064|fyefj82j1092|fyefj83j0001|fyefj83j1064|fyefj83j1092|fyefj84j0001|fyefj84j1064|fyefj84j1092|fyefj85j0001|fyefj85j0356|fyefj85j1064|fyefj85j1092|fyefj86j0001|fyefj86j0356|fyefj86j1064|fyefj87j0001|fyefj87j0356|fyefj87j1064|fyefj88j0001|fyefj88j0356|fyefj88j1064|fyefj89j0001|fyefj89j0356|fyefj89j1064|fyefj89j1067|fyefj90j0001|fyefj90j0758|fyefj90j1021|fyefj90j1064|fyefj90j1067|fyefj91j0001|fyefj91j0758|fyefj91j0791|fyefj91j1021|fyefj91j1064|fyefj91j1067|fyefj91j1077|fyefj92j0001|fyefj92j0359|fyefj92j0678|fyefj92j0758|fyefj92j0791|fyefj92j0867|fyefj92j1021|fyefj92j1064|fyefj92j1077|fyefj93j0001|fyefj93j0359|fyefj93j0678|fyefj93j0758|fyefj93j0791|fyefj93j0867|fyefj93j1010|fyefj93j1021|fyefj93j1049|fyefj93j1064|fyefj93j1077|fyefj94j0001|fyefj94j0678|fyefj94j0758|fyefj94j0791|fyefj94j0867|fyefj94j1010|fyefj94j1021|fyefj94j1049|fyefj94j1064|fyefj94j1070|fyefj94j1077|fyefj94j1085|fyefj95j0001|fyefj95j0678|fyefj95j0758|fyefj95j0791|fyefj95j0867|fyefj95j0965|fyefj95j0966|fyefj95j1010|fyefj95j1011|fyefj95j1021|fyefj95j1055|fyefj95j1064|fyefj95j1069|fyefj95j1077|fyefj95j1085|fyefj95j1089|fyefj96j0001|fyefj96j0106|fyefj96j0671|fyefj96j0678|fyefj96j0758|fyefj96j0791|fyefj96j0814|fyefj96j0836|fyefj96j0867|fyefj96j0931|fyefj96j0965|fyefj96j0966|fyefj96j0976|fyefj96j1010|fyefj96j1021|fyefj96j1051|fyefj96j1055|fyefj96j1064|fyefj96j1068|fyefj96j1070|fyefj96j1077|fyefj96j1079|fyefj96j1081|fyefj96j1086|fyefj96j1088|fyefj96j1091|fyefj96j1093|fyefj96j1094|fyefj97j0001|fyefj97j0106|fyefj97j0584|fyefj97j0586|fyefj97j0671|fyefj97j0678|fyefj97j0758|fyefj97j0791|fyefj97j0814|fyefj97j0825|fyefj97j0836|fyefj97j0863|fyefj97j0865|fyefj97j0867|fyefj97j0914|fyefj97j0931|fyefj97j0952|fyefj97j0965|fyefj97j0966|fyefj97j0969|fyefj97j0971|fyefj97j0972|fyefj97j0976|fyefj97j0985|fyefj97j1010|fyefj97j1021|fyefj97j1051|fyefj97j1052|fyefj97j1055|fyefj97j1058|fyefj97j1059|fyefj97j1064|fyefj97j1068|fyefj97j1077|fyefj97j1079|fyefj97j1081|fyefj97j1086|fyefj97j1088|fyefj97j1095|fyefj98j0001|fyefj98j0243|fyefj98j0326|fyefj98j0329|fyefj98j0343|fyefj98j0344|fyefj98j0380|fyefj98j0472|fyefj98j0584|fyefj98j0586|fyefj98j0604|fyefj98j0671|fyefj98j0673|fyefj98j0676|fyefj98j0677|fyefj98j0678|fyefj98j0694|fyefj98j0758|fyefj98j0814|fyefj98j0825|fyefj98j0836|fyefj98j0863|fyefj98j0865|fyefj98j0867|fyefj98j0896|fyefj98j0898|fyefj98j0901|fyefj98j0906|fyefj98j0910|fyefj98j0913|fyefj98j0914|fyefj98j0922|fyefj98j0931|fyefj98j0934|fyefj98j0936|fyefj98j0951|fyefj98j0952|fyefj98j0963|fyefj98j0965|fyefj98j0966|fyefj98j0969|fyefj98j0971|fyefj98j0972|fyefj98j0974|fyefj98j0975|fyefj98j0976|fyefj98j0977|fyefj98j0978|fyefj98j0985|fyefj98j0992|fyefj98j1008|fyefj98j1009|fyefj98j1010|fyefj98j1011|fyefj98j1012|fyefj98j1019|fyefj98j1021|fyefj98j1028|fyefj98j1034|fyefj98j1039|fyefj98j1046|fyefj98j1047|fyefj98j1048|fyefj98j1054|fyefj98j1055|fyefj98j1064|fyefj98j1068|fyefj98j1077|fyefj98j1079|fyefj98j1080|fyefj98j1081|fyefj98j1082|fyefj98j1084|fyefj98j1087|fyefj98j1088|fyefj98j1090|fyefj99j0010|fyefj99j0188|fyefj99j0243|fyefj99j0268|fyefj99j0280|fyefj99j0301|fyefj99j0329|fyefj99j0343|fyefj99j0344|fyefj99j0380|fyefj99j0552|fyefj99j0573|fyefj99j0584|fyefj99j0586|fyefj99j0604|fyefj99j0671|fyefj99j0673|fyefj99j0676|fyefj99j0677|fyefj99j0678|fyefj99j0694|fyefj99j0722|fyefj99j0757|fyefj99j0758|fyefj99j0771|fyefj99j0772|fyefj99j0804|fyefj99j0806|fyefj99j0809|fyefj99j0814|fyefj99j0825|fyefj99j0836|fyefj99j0862|fyefj99j0863|fyefj99j0865|fyefj99j0866|fyefj99j0867|fyefj99j0875|fyefj99j0896|fyefj99j0898|fyefj99j0901|fyefj99j0906|fyefj99j0907|fyefj99j0908|fyefj99j0910|fyefj99j0912|fyefj99j0913|fyefj99j0914|fyefj99j0921|fyefj99j0922|fyefj99j0923|fyefj99j0931|fyefj99j0934|fyefj99j0936|fyefj99j0937|fyefj99j0949|fyefj99j0951|fyefj99j0952|fyefj99j0962|fyefj99j0963|fyefj99j0965|fyefj99j0966|fyefj99j0969|fyefj99j0971|fyefj99j0972|fyefj99j0974|fyefj99j0975|fyefj99j0976|fyefj99j0977|fyefj99j0978|fyefj99j0982|fyefj99j0985|fyefj99j0986|fyefj99j0988|fyefj99j0991|fyefj99j0992|fyefj99j0995|fyefj99j0997|fyefj99j0999|fyefj99j1003|fyefj99j1006|fyefj99j1008|fyefj99j1009|fyefj99j1010|fyefj99j1011|fyefj99j1016|fyefj99j1019|fyefj99j1020|fyefj99j1021|fyefj99j1024|fyefj99j1026|fyefj99j1028|fyefj99j1031|fyefj99j1033|fyefj99j1034|fyefj99j1036|fyefj99j1039|fyefj99j1042|fyefj99j1045|fyefj99j1046|fyefj99j1048|fyefj99j1053|fyefj99j1054|fyefj99j1055|fyefj99j1061|fyefj99j1062|fyefj99j1063|fyefj99j1064|fyefj99j1068|fyefj99j1072|fyefj99j1076|fyefj99j1077|fyefj99j1079|fyefj99j1080|fyefj99j1081|fyefj99j1083|fyefj99j1084|fyefj99j1087|fyefj99j1088|fyefm00j0113|fyefm01j0057|fyefm01j0088|fyefm01j0091|fyefm01j0101|fyefm01j0104|fyefm01j0107|fyefm01j0112|fyefm01j0379|fyefm02j0057|fyefm02j0101|fyefm02j0104|fyefm02j0107|fyefm02j0112|fyefm02j0379|fyefm98j0066|fyefm99j0066|fyefm99j0090|fyefm99j0093|fyefm99j0110|fyefm99j0165|fyefm99j0208|fyefm99j0209|fyefm99j0295|fyefm99j0401|fyefm99j0402|fyefm99j0907|fyefm99j1054|fyefn98j0015|fyefn98j0024|fyefn98j0030|fyefn99j0015|fyefn99j0024|fyefn99j0030|fyefr94j0559|fyefr95j0559|fyefr96j0559|fyefr97j0559|fyefr98j0559|fyefr99j0012|fyefr99j0559|fyefb01305|fyeff00j0170|fyeff00j0224|fyeff00j0227|fyeff00j0228|fyeff00j0229|fyeff00j0280|fyeff00j0281|fyeff00j0282|fyeff00j0283|fyeff00j0288|fyeff00j0289|fyeff00j0331|fyeff00j0332|fyeff00j0333|fyeff00j0334|fyeff00j0335|fyeff00j0336|fyeff00j0337|fyeff00j0338|fyeff00j0346|fyeff00j0347|fyeff00j0348|fyeff00j0349|fyeff00j0350|fyeff00j0351|fyeff00j0357|fyeff00j0358|fyeff00j0371|fyeff00j0372|fyeff00j0396|fyeff00j0397|fyeff00j0424|fyeff00j0425|fyeff01j0416|fyeff02j0416|fyeff78j0418|fyeff79j0418|fyeff79j1051|fyeff80j1051|fyeff81j1051|fyeff82j1051|fyeff83j1051|fyeff84j1051|fyeff85j1051|fyeff86j1051|fyeff87j1051|fyeff88j0422|fyeff89j0422|fyeff90j0422|fyeff90j0434|fyeff90j0440|fyeff91j0422|fyeff91j0434|fyeff91j0440|fyeff92j0440|fyeff93j0440|fyeff93j1045|fyeff93j1067|fyeff94j0392|fyeff94j0440|fyeff94j0443|fyeff94j1045|fyeff94j1067|fyeff95j0219|fyeff95j0392|fyeff95j0439|fyeff95j0440|fyeff95j0443|fyeff96j0053|fyeff96j0219|fyeff96j0392|fyeff96j0429|fyeff96j0434|fyeff96j0950|fyeff96j1019|fyeff96j1028|fyeff97j0053|fyeff97j0178|fyeff97j0191|fyeff97j0219|fyeff97j0221|fyeff97j0258|fyeff97j0324|fyeff97j0355|fyeff97j0370|fyeff97j0377|fyeff97j0392|fyeff97j0429|fyeff97j0434|fyeff97j0950|fyeff97j1019|fyeff98j0053|fyeff98j0065|fyeff98j0101|fyeff98j0144|fyeff98j0156|fyeff98j0178|fyeff98j0191|fyeff98j0193|fyeff98j0196|fyeff98j0197|fyeff98j0209|fyeff98j0210|fyeff98j0211|fyeff98j0214|fyeff98j0215|fyeff98j0218|fyeff98j0219|fyeff98j0221|fyeff98j0258|fyeff98j0260|fyeff98j0279|fyeff98j0284|fyeff98j0295|fyeff98j0296|fyeff98j0298|fyeff98j0324|fyeff98j0355|fyeff98j0370|fyeff98j0376|fyeff98j0379|fyeff98j0381|fyeff98j0392|fyeff98j0401|fyeff98j0404|fyeff98j0405|fyeff98j0407|fyeff98j0411|fyeff98j0418|fyeff98j0421|fyeff98j0423|fyeff98j0433|fyeff98j0436|fyeff98j0673|fyeff98j0896|fyeff98j0950|fyeff98j0985|fyeff98j1012|fyeff99j0053|fyeff99j0065|fyeff99j0152|fyeff99j0156|fyeff99j0159|fyeff99j0178|fyeff99j0191|fyeff99j0193|fyeff99j0196|fyeff99j0197|fyeff99j0209|fyeff99j0210|fyeff99j0211|fyeff99j0214|fyeff99j0215|fyeff99j0218|fyeff99j0219|fyeff99j0220|fyeff99j0221|fyeff99j0260|fyeff99j0279|fyeff99j0284|fyeff99j0291|fyeff99j0295|fyeff99j0296|fyeff99j0297|fyeff99j0298|fyeff99j0324|fyeff99j0339|fyeff99j0355|fyeff99j0370|fyeff99j0376|fyeff99j0379|fyeff99j0381|fyeff99j0392|fyeff99j0401|fyeff99j0404|fyeff99j0405|fyeff99j0407|fyeff99j0410|fyeff99j0411|fyeff99j0413|fyeff99j0414|fyeff99j0415|fyeff99j0418|fyeff99j0421|fyeff99j0423|fyeff99j0436|fyeff99j0673|fyeff99j0896|fyeff99j0950|fyeff99j0962|fyeff99j0985|fyeff99j1010|fyeff99j1012|fyeff99j1028|fyeff99j1090|fyeff99j1370|fayfm01j0148|fayfm01j0149|fayfm01j0155|fayfm02j0148|fayfm02j0149|fayfm02j0155|faefj00j0594|faefj00j0595|faefj00j0596|faefj00j0597|faefj01j0707|faefj02j0707|faefj03j0707|faefj90j1023|faefj91j1023|faefj92j1023|faefj94j1056|faefj95j1023|faefj95j1056|faefj96j1056|faefj98j1038|faefj99j1078|fdeff99j9001|fdeff99j9002|gyefj99j0005",
// A long case insensitive alternation.
"(?i:(zQPbMkNO|NNSPdvMi|iWuuSoAl|qbvKMimS|IecrXtPa|seTckYqt|NxnyHkgB|fIDlOgKb|UhlWIygH|OtNoJxHG|cUTkFVIV|mTgFIHjr|jQkoIDtE|PPMKxRXl|AwMfwVkQ|CQyMrTQJ|BzrqxVSi|nTpcWuhF|PertdywG|ZZDgCtXN|WWdDPyyE|uVtNQsKk|BdeCHvPZ|wshRnFlH|aOUIitIp|RxZeCdXT|CFZMslCj|AVBZRDxl|IzIGCnhw|ythYuWiz|oztXVXhl|VbLkwqQx|qvaUgyVC|VawUjPWC|ecloYJuj|boCLTdSU|uPrKeAZx|hrMWLWBq|JOnUNHRM|rYnujkPq|dDEdZhIj|DRrfvugG|yEGfDxVV|YMYdJWuP|PHUQZNWM|AmKNrLis|zTxndVfn|FPsHoJnc|EIulZTua|KlAPhdzg|ScHJJCLt|NtTfMzME|eMCwuFdo|SEpJVJbR|cdhXZeCx|sAVtBwRh|kVFEVcMI|jzJrxraA|tGLHTell|NNWoeSaw|DcOKSetX|UXZAJyka|THpMphDP|rizheevl|kDCBRidd|pCZZRqyu|pSygkitl|SwZGkAaW|wILOrfNX|QkwVOerj|kHOMxPDr|EwOVycJv|AJvtzQFS|yEOjKYYB|LizIINLL|JBRSsfcG|YPiUqqNl|IsdEbvee|MjEpGcBm|OxXZVgEQ|xClXGuxa|UzRCGFEb|buJbvfvA|IPZQxRet|oFYShsMc|oBHffuHO|bzzKrcBR|KAjzrGCl|IPUsAVls|OGMUMbIU|gyDccHuR|bjlalnDd|ZLWjeMna|fdsuIlxQ|dVXtiomV|XxedTjNg|XWMHlNoA|nnyqArQX|opfkWGhb|wYtnhdYb))",
"(?i:(AAAAAAAAAAAAAAAAAAAAAAAA|BBBBBBBBBBBBBBBBBBBBBBBB|cccccccccccccccccccccccC|ſſſſſſſſſſſſſſſſſſſſſſſſS|SSSSSSSSSSSSSSSSSSSSSSSSſ))",
// A short case insensitive alternation where each entry ends with ".*".
"(?i:(zQPbMkNO.*|NNSPdvMi.*|iWuuSoAl.*))",
// A long case insensitive alternation where each entry ends with ".*".
"(?i:(zQPbMkNO.*|NNSPdvMi.*|iWuuSoAl.*|qbvKMimS.*|IecrXtPa.*|seTckYqt.*|NxnyHkgB.*|fIDlOgKb.*|UhlWIygH.*|OtNoJxHG.*|cUTkFVIV.*|mTgFIHjr.*|jQkoIDtE.*|PPMKxRXl.*|AwMfwVkQ.*|CQyMrTQJ.*|BzrqxVSi.*|nTpcWuhF.*|PertdywG.*|ZZDgCtXN.*|WWdDPyyE.*|uVtNQsKk.*|BdeCHvPZ.*|wshRnFlH.*|aOUIitIp.*|RxZeCdXT.*|CFZMslCj.*|AVBZRDxl.*|IzIGCnhw.*|ythYuWiz.*|oztXVXhl.*|VbLkwqQx.*|qvaUgyVC.*|VawUjPWC.*|ecloYJuj.*|boCLTdSU.*|uPrKeAZx.*|hrMWLWBq.*|JOnUNHRM.*|rYnujkPq.*|dDEdZhIj.*|DRrfvugG.*|yEGfDxVV.*|YMYdJWuP.*|PHUQZNWM.*|AmKNrLis.*|zTxndVfn.*|FPsHoJnc.*|EIulZTua.*|KlAPhdzg.*|ScHJJCLt.*|NtTfMzME.*|eMCwuFdo.*|SEpJVJbR.*|cdhXZeCx.*|sAVtBwRh.*|kVFEVcMI.*|jzJrxraA.*|tGLHTell.*|NNWoeSaw.*|DcOKSetX.*|UXZAJyka.*|THpMphDP.*|rizheevl.*|kDCBRidd.*|pCZZRqyu.*|pSygkitl.*|SwZGkAaW.*|wILOrfNX.*|QkwVOerj.*|kHOMxPDr.*|EwOVycJv.*|AJvtzQFS.*|yEOjKYYB.*|LizIINLL.*|JBRSsfcG.*|YPiUqqNl.*|IsdEbvee.*|MjEpGcBm.*|OxXZVgEQ.*|xClXGuxa.*|UzRCGFEb.*|buJbvfvA.*|IPZQxRet.*|oFYShsMc.*|oBHffuHO.*|bzzKrcBR.*|KAjzrGCl.*|IPUsAVls.*|OGMUMbIU.*|gyDccHuR.*|bjlalnDd.*|ZLWjeMna.*|fdsuIlxQ.*|dVXtiomV.*|XxedTjNg.*|XWMHlNoA.*|nnyqArQX.*|opfkWGhb.*|wYtnhdYb.*))",
// A long case insensitive alternation where each entry starts with ".*".
"(?i:(.*zQPbMkNO|.*NNSPdvMi|.*iWuuSoAl|.*qbvKMimS|.*IecrXtPa|.*seTckYqt|.*NxnyHkgB|.*fIDlOgKb|.*UhlWIygH|.*OtNoJxHG|.*cUTkFVIV|.*mTgFIHjr|.*jQkoIDtE|.*PPMKxRXl|.*AwMfwVkQ|.*CQyMrTQJ|.*BzrqxVSi|.*nTpcWuhF|.*PertdywG|.*ZZDgCtXN|.*WWdDPyyE|.*uVtNQsKk|.*BdeCHvPZ|.*wshRnFlH|.*aOUIitIp|.*RxZeCdXT|.*CFZMslCj|.*AVBZRDxl|.*IzIGCnhw|.*ythYuWiz|.*oztXVXhl|.*VbLkwqQx|.*qvaUgyVC|.*VawUjPWC|.*ecloYJuj|.*boCLTdSU|.*uPrKeAZx|.*hrMWLWBq|.*JOnUNHRM|.*rYnujkPq|.*dDEdZhIj|.*DRrfvugG|.*yEGfDxVV|.*YMYdJWuP|.*PHUQZNWM|.*AmKNrLis|.*zTxndVfn|.*FPsHoJnc|.*EIulZTua|.*KlAPhdzg|.*ScHJJCLt|.*NtTfMzME|.*eMCwuFdo|.*SEpJVJbR|.*cdhXZeCx|.*sAVtBwRh|.*kVFEVcMI|.*jzJrxraA|.*tGLHTell|.*NNWoeSaw|.*DcOKSetX|.*UXZAJyka|.*THpMphDP|.*rizheevl|.*kDCBRidd|.*pCZZRqyu|.*pSygkitl|.*SwZGkAaW|.*wILOrfNX|.*QkwVOerj|.*kHOMxPDr|.*EwOVycJv|.*AJvtzQFS|.*yEOjKYYB|.*LizIINLL|.*JBRSsfcG|.*YPiUqqNl|.*IsdEbvee|.*MjEpGcBm|.*OxXZVgEQ|.*xClXGuxa|.*UzRCGFEb|.*buJbvfvA|.*IPZQxRet|.*oFYShsMc|.*oBHffuHO|.*bzzKrcBR|.*KAjzrGCl|.*IPUsAVls|.*OGMUMbIU|.*gyDccHuR|.*bjlalnDd|.*ZLWjeMna|.*fdsuIlxQ|.*dVXtiomV|.*XxedTjNg|.*XWMHlNoA|.*nnyqArQX|.*opfkWGhb|.*wYtnhdYb))",
// Quest ".?".
"fo.?",
"foo.?",
"f.?o",
".*foo.?",
".?foo.+",
"foo.?|bar",
"ſſs",
// Concat of literals and wildcards.
".*-.*-.*-.*-.*",
".+-.*-.*-.*-.+",
"-.*-.*-.*-.*",
".*-.*-.*-.*-",
"(.+)-(.+)-(.+)-(.+)-(.+)",
"((.*))(?i:f)((.*))o((.*))o((.*))",
"((.*))f((.*))(?i:o)((.*))o((.*))",
"(.*0.*)",
}
values = []string{
"foo", " foo bar", "bar", "buzz\nbar", "bar foo", "bfoo", "\n", "\nfoo", "foo\n", "hello foo world", "hello foo\n world", "",
"FOO", "Foo", "fOo", "foO", "OO", "Oo", "\nfoo\n", strings.Repeat("f", 20), "prometheus", "prometheus_api_v1", "prometheus_api_v1_foo",
"10.0.1.20", "10.0.2.10", "10.0.3.30", "10.0.4.40",
"foofoo0", "foofoo", "😀foo0", "ſſs", "ſſS", "AAAAAAAAAAAAAAAAAAAAAAAA", "BBBBBBBBBBBBBBBBBBBBBBBB", "cccccccccccccccccccccccC", "ſſſſſſſſſſſſſſſſſſſſſſſſS", "SSSSSSSSSSSSSSSSSSSSSSSSſ",
"a-b-c-d-e",
"aaaaaa-bbbbbb-cccccc-dddddd-eeeeee",
"aaaaaa----eeeeee",
"----",
"-a-a-a-",
// Values matching / not matching the test regexps on long alternations.
"zQPbMkNO", "zQPbMkNo", "jyyfj00j0061", "jyyfj00j006", "jyyfj00j00612", "NNSPdvMi", "NNSPdvMiXXX", "NNSPdvMixxx", "nnSPdvMi", "nnSPdvMiXXX",
// Invalid utf8
"\xfefoo",
"foo\xfe",
"\xfd",
"\xff\xff",
}
)
func TestFastRegexMatcher_MatchString(t *testing.T) {
// Run the test both against a set of predefined values and a set of random ones.
testValues := append([]string{}, values...)
testValues = append(testValues, generateRandomValues()...)
for _, r := range regexes {
for _, v := range testValues {
t.Run(readable(r)+` on "`+readable(v)+`"`, func(t *testing.T) {
t.Parallel()
m, err := NewFastRegexMatcher(r)
require.NoError(t, err)
re := regexp.MustCompile("^(?s:" + r + ")$")
require.Equal(t, re.MatchString(v), m.MatchString(v))
})
}
}
}
func readable(s string) string {
const maxReadableStringLen = 40
if len(s) < maxReadableStringLen {
return s
}
return s[:maxReadableStringLen] + "..."
}
func TestOptimizeConcatRegex(t *testing.T) {
cases := []struct {
regex string
prefix string
suffix string
contains []string
}{
{regex: "foo(hello|bar)", prefix: "foo", suffix: "", contains: nil},
{regex: "foo(hello|bar)world", prefix: "foo", suffix: "world", contains: nil},
{regex: "foo.*", prefix: "foo", suffix: "", contains: nil},
{regex: "foo.*hello.*bar", prefix: "foo", suffix: "bar", contains: []string{"hello"}},
{regex: ".*foo", prefix: "", suffix: "foo", contains: nil},
{regex: "^.*foo$", prefix: "", suffix: "foo", contains: nil},
{regex: ".*foo.*", prefix: "", suffix: "", contains: []string{"foo"}},
{regex: ".*foo.*bar.*", prefix: "", suffix: "", contains: []string{"foo", "bar"}},
{regex: ".*(foo|bar).*", prefix: "", suffix: "", contains: nil},
{regex: ".*[abc].*", prefix: "", suffix: "", contains: nil},
{regex: ".*((?i)abc).*", prefix: "", suffix: "", contains: nil},
{regex: ".*(?i:abc).*", prefix: "", suffix: "", contains: nil},
{regex: "(?i:abc).*", prefix: "", suffix: "", contains: nil},
{regex: ".*(?i:abc)", prefix: "", suffix: "", contains: nil},
{regex: ".*(?i:abc)def.*", prefix: "", suffix: "", contains: []string{"def"}},
{regex: "(?i).*(?-i:abc)def", prefix: "", suffix: "", contains: []string{"abc"}},
{regex: ".*(?msU:abc).*", prefix: "", suffix: "", contains: []string{"abc"}},
{regex: "[aA]bc.*", prefix: "", suffix: "", contains: []string{"bc"}},
{regex: "^5..$", prefix: "5", suffix: "", contains: nil},
{regex: "^release.*", prefix: "release", suffix: "", contains: nil},
{regex: "^env-[0-9]+laio[1]?[^0-9].*", prefix: "env-", suffix: "", contains: []string{"laio"}},
{regex: ".*-.*-.*-.*-.*", prefix: "", suffix: "", contains: []string{"-", "-", "-", "-"}},
}
for _, c := range cases {
parsed, err := syntax.Parse(c.regex, syntax.Perl|syntax.DotNL)
require.NoError(t, err)
prefix, suffix, contains := optimizeConcatRegex(parsed)
require.Equal(t, c.prefix, prefix)
require.Equal(t, c.suffix, suffix)
require.Equal(t, c.contains, contains)
}
}
// Refer to https://github.com/prometheus/prometheus/issues/2651.
func TestFindSetMatches(t *testing.T) {
for _, c := range []struct {
pattern string
expMatches []string
expCaseSensitive bool
}{
// Single value, coming from a `bar=~"foo"` selector.
{"foo", []string{"foo"}, true},
{"^foo", []string{"foo"}, true},
{"^foo$", []string{"foo"}, true},
// Simple sets alternates.
{"foo|bar|zz", []string{"foo", "bar", "zz"}, true},
// Simple sets alternate and concat (bar|baz is parsed as "ba[rz]").
{"foo|bar|baz", []string{"foo", "bar", "baz"}, true},
// Simple sets alternate and concat and capture
{"foo|bar|baz|(zz)", []string{"foo", "bar", "baz", "zz"}, true},
// Simple sets alternate and concat and alternates with empty matches
// parsed as b(ar|(?:)|uzz) where b(?:) means literal b.
{"bar|b|buzz", []string{"bar", "b", "buzz"}, true},
// Skip nested capture groups.
{"^((bar|b|buzz))$", []string{"bar", "b", "buzz"}, true},
// Skip outer anchors (it's enforced anyway at the root).
{"^(bar|b|buzz)$", []string{"bar", "b", "buzz"}, true},
{"^(?:prod|production)$", []string{"prod", "production"}, true},
// Do not optimize regexp with inner anchors.
{"(bar|b|b^uz$z)", nil, false},
// Do not optimize regexp with empty string matcher.
{"^$|Running", nil, false},
// Simple sets containing escaped characters.
{"fo\\.o|bar\\?|\\^baz", []string{"fo.o", "bar?", "^baz"}, true},
// using charclass
{"[abc]d", []string{"ad", "bd", "cd"}, true},
// high low charset different => A(B[CD]|EF)|BC[XY]
{"ABC|ABD|AEF|BCX|BCY", []string{"ABC", "ABD", "AEF", "BCX", "BCY"}, true},
// triple concat
{"api_(v1|prom)_push", []string{"api_v1_push", "api_prom_push"}, true},
// triple concat with multiple alternates
{"(api|rpc)_(v1|prom)_push", []string{"api_v1_push", "api_prom_push", "rpc_v1_push", "rpc_prom_push"}, true},
{"(api|rpc)_(v1|prom)_(push|query)", []string{"api_v1_push", "api_v1_query", "api_prom_push", "api_prom_query", "rpc_v1_push", "rpc_v1_query", "rpc_prom_push", "rpc_prom_query"}, true},
// class starting with "-"
{"[-1-2][a-c]", []string{"-a", "-b", "-c", "1a", "1b", "1c", "2a", "2b", "2c"}, true},
{"[1^3]", []string{"1", "3", "^"}, true},
// OpPlus with concat
{"(.+)/(foo|bar)", nil, false},
// Simple sets containing special characters without escaping.
{"fo.o|bar?|^baz", nil, false},
// case sensitive wrapper.
{"(?i)foo", []string{"FOO"}, false},
// case sensitive wrapper on alternate.
{"(?i)foo|bar|baz", []string{"FOO", "BAR", "BAZ", "BAr", "BAz"}, false},
// mixed case sensitivity.
{"(api|rpc)_(v1|prom)_((?i)push|query)", nil, false},
// mixed case sensitivity concatenation only without capture group.
{"api_v1_(?i)push", nil, false},
// mixed case sensitivity alternation only without capture group.
{"api|(?i)rpc", nil, false},
// case sensitive after unsetting insensitivity.
{"rpc|(?i)(?-i)api", []string{"rpc", "api"}, true},
// case sensitive after unsetting insensitivity in all alternation options.
{"(?i)((?-i)api|(?-i)rpc)", []string{"api", "rpc"}, true},
// mixed case sensitivity after unsetting insensitivity.
{"(?i)rpc|(?-i)api", nil, false},
// too high charset combination
{"(api|rpc)_[^0-9]", nil, false},
// too many combinations
{"[a-z][a-z]", nil, false},
} {
t.Run(c.pattern, func(t *testing.T) {
t.Parallel()
parsed, err := syntax.Parse(c.pattern, syntax.Perl|syntax.DotNL)
require.NoError(t, err)
matches, actualCaseSensitive := findSetMatches(parsed)
require.Equal(t, c.expMatches, matches)
require.Equal(t, c.expCaseSensitive, actualCaseSensitive)
if c.expCaseSensitive {
// When the regexp is case sensitive, we want to ensure that the
// set matches are maintained in the final matcher.
r, err := NewFastRegexMatcher(c.pattern)
require.NoError(t, err)
require.Equal(t, c.expMatches, r.SetMatches())
}
})
}
}
func TestFastRegexMatcher_SetMatches_ShouldReturnACopy(t *testing.T) {
m, err := NewFastRegexMatcher("a|b")
require.NoError(t, err)
require.Equal(t, []string{"a", "b"}, m.SetMatches())
// Manipulate the returned slice.
matches := m.SetMatches()
matches[0] = "xxx"
matches[1] = "yyy"
// Ensure that if we call SetMatches() again we get the original one.
require.Equal(t, []string{"a", "b"}, m.SetMatches())
}
func BenchmarkFastRegexMatcher(b *testing.B) {
texts := generateRandomValues()
for _, r := range regexes {
b.Run(getTestNameFromRegexp(r), func(b *testing.B) {
m, err := NewFastRegexMatcher(r)
require.NoError(b, err)
b.ResetTimer()
for b.Loop() {
for _, text := range texts {
_ = m.MatchString(text)
}
}
})
}
}
func BenchmarkToNormalizedLower(b *testing.B) {
benchCase := func(l int, uppercase string, asciiOnly bool, alt int) string {
chars := "abcdefghijklmnopqrstuvwxyz"
if !asciiOnly {
chars = "aаbбcвdгeдfеgёhжiзjиkйlкmлnмoнpоqпrрsсtтuуvфwхxцyчzш"
}
// Swap the alphabet to make alternatives.
chars = chars[alt%len(chars):] + chars[:alt%len(chars)]
str := strings.Repeat(chars, l/len(chars)+1)[:l]
switch uppercase {
case "first":
return strings.ToUpper(str[:1]) + str[1:]
case "last":
return str[:len(str)-1] + strings.ToUpper(str[len(str)-1:])
case "all":
return strings.ToUpper(str)
case "none":
return str
default:
panic("invalid uppercase")
}
}
for _, l := range []int{10, 100, 1000, 4000} {
b.Run(fmt.Sprintf("length=%d", l), func(b *testing.B) {
for _, uppercase := range []string{"none", "first", "last", "all"} {
b.Run("uppercase="+uppercase, func(b *testing.B) {
for _, asciiOnly := range []bool{true, false} {
b.Run(fmt.Sprintf("ascii=%t", asciiOnly), func(b *testing.B) {
inputs := make([]string, 10)
for i := range inputs {
inputs[i] = benchCase(l, uppercase, asciiOnly, i)
}
b.ResetTimer()
for n := 0; b.Loop(); n++ {
var a [256]byte
toNormalisedLower(inputs[n%len(inputs)], a[:])
}
})
}
})
}
})
}
}
func TestNewFastRegexMatcher(t *testing.T) {
for _, c := range []struct {
pattern string
exp StringMatcher
}{
{".*", trueMatcher{}},
{".*?", trueMatcher{}},
{"(?s:.*)", trueMatcher{}},
{"(.*)", trueMatcher{}},
{"^.*$", trueMatcher{}},
{".+", &anyNonEmptyStringMatcher{matchNL: true}},
{"(?s:.+)", &anyNonEmptyStringMatcher{matchNL: true}},
{"^.+$", &anyNonEmptyStringMatcher{matchNL: true}},
{"(.+)", &anyNonEmptyStringMatcher{matchNL: true}},
{"", emptyStringMatcher{}},
{"^$", emptyStringMatcher{}},
{"^foo$", &equalStringMatcher{s: "foo", caseSensitive: true}},
{"^(?i:foo)$", &equalStringMatcher{s: "FOO", caseSensitive: false}},
{"^((?i:foo)|(bar))$", orStringMatcher([]StringMatcher{&equalStringMatcher{s: "FOO", caseSensitive: false}, &equalStringMatcher{s: "bar", caseSensitive: true}})},
{`(?i:((foo|bar)))`, orStringMatcher([]StringMatcher{&equalStringMatcher{s: "FOO", caseSensitive: false}, &equalStringMatcher{s: "BAR", caseSensitive: false}})},
{`(?i:((foo1|foo2|bar)))`, orStringMatcher([]StringMatcher{orStringMatcher([]StringMatcher{&equalStringMatcher{s: "FOO1", caseSensitive: false}, &equalStringMatcher{s: "FOO2", caseSensitive: false}}), &equalStringMatcher{s: "BAR", caseSensitive: false}})},
{"^((?i:foo|oo)|(bar))$", orStringMatcher([]StringMatcher{&equalStringMatcher{s: "FOO", caseSensitive: false}, &equalStringMatcher{s: "OO", caseSensitive: false}, &equalStringMatcher{s: "bar", caseSensitive: true}})},
{"(?i:(foo1|foo2|bar))", orStringMatcher([]StringMatcher{orStringMatcher([]StringMatcher{&equalStringMatcher{s: "FOO1", caseSensitive: false}, &equalStringMatcher{s: "FOO2", caseSensitive: false}}), &equalStringMatcher{s: "BAR", caseSensitive: false}})},
{".*foo.*", trueMatcher{}}, // The containsInOrder check done in the function returned by compileMatchStringFunction is sufficient.
{"(.*)foo.*", trueMatcher{}}, // The containsInOrder check done in the function returned by compileMatchStringFunction is sufficient.
{"(.*)foo(.*)", trueMatcher{}}, // The containsInOrder check done in the function returned by compileMatchStringFunction is sufficient.
{"(.+)foo(.*)", &containsStringMatcher{substrings: []string{"foo"}, left: &anyNonEmptyStringMatcher{matchNL: true}, right: trueMatcher{}}},
{"^.+foo.+", &containsStringMatcher{substrings: []string{"foo"}, left: &anyNonEmptyStringMatcher{matchNL: true}, right: &anyNonEmptyStringMatcher{matchNL: true}}},
{"^(.*)(foo)(.*)$", trueMatcher{}}, // The containsInOrder check done in the function returned by compileMatchStringFunction is sufficient.
{"^(.*)(foo|foobar)(.*)$", &containsStringMatcher{substrings: []string{"foo", "foobar"}, left: trueMatcher{}, right: trueMatcher{}}},
{"^(.*)(foo|foobar)(.+)$", &containsStringMatcher{substrings: []string{"foo", "foobar"}, left: trueMatcher{}, right: &anyNonEmptyStringMatcher{matchNL: true}}},
{"^(.*)(bar|b|buzz)(.+)$", &containsStringMatcher{substrings: []string{"bar", "b", "buzz"}, left: trueMatcher{}, right: &anyNonEmptyStringMatcher{matchNL: true}}},
{"10\\.0\\.(1|2)\\.+", nil},
{"10\\.0\\.(1|2).+", &containsStringMatcher{substrings: []string{"10.0.1", "10.0.2"}, left: nil, right: &anyNonEmptyStringMatcher{matchNL: true}}},
{"^.+foo", &literalSuffixStringMatcher{left: &anyNonEmptyStringMatcher{matchNL: true}, suffix: "foo", suffixCaseSensitive: true}},
{"foo-.*$", &literalPrefixSensitiveStringMatcher{prefix: "foo-", right: trueMatcher{}}},
{"(prometheus|api_prom)_api_v1_.+", &containsStringMatcher{substrings: []string{"prometheus_api_v1_", "api_prom_api_v1_"}, left: nil, right: &anyNonEmptyStringMatcher{matchNL: true}}},
{"^((.*)(bar|b|buzz)(.+)|foo)$", orStringMatcher([]StringMatcher{&containsStringMatcher{substrings: []string{"bar", "b", "buzz"}, left: trueMatcher{}, right: &anyNonEmptyStringMatcher{matchNL: true}}, &equalStringMatcher{s: "foo", caseSensitive: true}})},
{"((fo(bar))|.+foo)", orStringMatcher([]StringMatcher{orStringMatcher([]StringMatcher{&equalStringMatcher{s: "fobar", caseSensitive: true}}), &literalSuffixStringMatcher{suffix: "foo", suffixCaseSensitive: true, left: &anyNonEmptyStringMatcher{matchNL: true}}})},
{"(.+)/(gateway|cortex-gw|cortex-gw-internal)", &containsStringMatcher{substrings: []string{"/gateway", "/cortex-gw", "/cortex-gw-internal"}, left: &anyNonEmptyStringMatcher{matchNL: true}, right: nil}},
// we don't support case insensitive matching for contains.
// This is because there's no strings.IndexOfFold function.
// We can revisit later if this is really popular by using strings.ToUpper.
{"^(.*)((?i)foo|foobar)(.*)$", nil},
{"(api|rpc)_(v1|prom)_((?i)push|query)", nil},
{"[a-z][a-z]", nil},
{"[1^3]", nil},
{".*foo.*bar.*", trueMatcher{}}, // The containsInOrder check done in the function returned by compileMatchStringFunction is sufficient.
{`\d*`, nil},
{".", nil},
{"/|/bar.*", &literalPrefixSensitiveStringMatcher{prefix: "/", right: orStringMatcher{emptyStringMatcher{}, &literalPrefixSensitiveStringMatcher{prefix: "bar", right: trueMatcher{}}}}},
// This one is not supported because `stringMatcherFromRegexp` is not reentrant for syntax.OpConcat.
// It would make the code too complex to handle it.
{"(.+)/(foo.*|bar$)", nil},
// Case sensitive alternate with same literal prefix and .* suffix.
{"(xyz-016a-ixb-dp.*|xyz-016a-ixb-op.*)", &literalPrefixSensitiveStringMatcher{prefix: "xyz-016a-ixb-", right: orStringMatcher{&literalPrefixSensitiveStringMatcher{prefix: "dp", right: trueMatcher{}}, &literalPrefixSensitiveStringMatcher{prefix: "op", right: trueMatcher{}}}}},
// Case insensitive alternate with same literal prefix and .* suffix.
{"(?i:(xyz-016a-ixb-dp.*|xyz-016a-ixb-op.*))", &literalPrefixInsensitiveStringMatcher{prefix: "XYZ-016A-IXB-", right: orStringMatcher{&literalPrefixInsensitiveStringMatcher{prefix: "DP", right: trueMatcher{}}, &literalPrefixInsensitiveStringMatcher{prefix: "OP", right: trueMatcher{}}}}},
{"(?i)(xyz-016a-ixb-dp.*|xyz-016a-ixb-op.*)", &literalPrefixInsensitiveStringMatcher{prefix: "XYZ-016A-IXB-", right: orStringMatcher{&literalPrefixInsensitiveStringMatcher{prefix: "DP", right: trueMatcher{}}, &literalPrefixInsensitiveStringMatcher{prefix: "OP", right: trueMatcher{}}}}},
// Concatenated variable length selectors are not supported.
{"foo.*.*", nil},
{"foo.+.+", nil},
{".*.*foo", nil},
{".+.+foo", nil},
{"aaa.?.?", nil},
{"aaa.?.*", nil},
// Regexps with ".?".
{"ext.?|xfs", orStringMatcher{&literalPrefixSensitiveStringMatcher{prefix: "ext", right: &zeroOrOneCharacterStringMatcher{matchNL: true}}, &equalStringMatcher{s: "xfs", caseSensitive: true}}},
{"(?s)(ext.?|xfs)", orStringMatcher{&literalPrefixSensitiveStringMatcher{prefix: "ext", right: &zeroOrOneCharacterStringMatcher{matchNL: true}}, &equalStringMatcher{s: "xfs", caseSensitive: true}}},
{"foo.?", &literalPrefixSensitiveStringMatcher{prefix: "foo", right: &zeroOrOneCharacterStringMatcher{matchNL: true}}},
{"f.?o", nil},
} {
t.Run(c.pattern, func(t *testing.T) {
t.Parallel()
matcher, err := NewFastRegexMatcher(c.pattern)
require.NoError(t, err)
require.Equal(t, c.exp, matcher.stringMatcher)
})
}
}
func TestStringMatcherFromRegexp_LiteralPrefix(t *testing.T) {
for _, c := range []struct {
pattern string
expectedLiteralPrefixMatchers int
expectedMatches []string
expectedNotMatches []string
}{
// Case sensitive.
{
pattern: "(xyz-016a-ixb-dp.*|xyz-016a-ixb-op.*)",
expectedLiteralPrefixMatchers: 3,
expectedMatches: []string{"xyz-016a-ixb-dp", "xyz-016a-ixb-dpXXX", "xyz-016a-ixb-op", "xyz-016a-ixb-opXXX", "xyz-016a-ixb-dp\n"},
expectedNotMatches: []string{"XYZ-016a-ixb-dp", "xyz-016a-ixb-d", "XYZ-016a-ixb-op", "xyz-016a-ixb-o", "xyz", "dp"},
},
// Case insensitive.
{
pattern: "(?i)(xyz-016a-ixb-dp.*|xyz-016a-ixb-op.*)",
expectedLiteralPrefixMatchers: 3,
expectedMatches: []string{"xyz-016a-ixb-dp", "XYZ-016a-ixb-dpXXX", "xyz-016a-ixb-op", "XYZ-016a-ixb-opXXX", "xyz-016a-ixb-dp\n"},
expectedNotMatches: []string{"xyz-016a-ixb-d", "xyz", "dp"},
},
// Nested literal prefixes, case sensitive.
{
pattern: "(xyz-(aaa-(111.*)|bbb-(222.*)))|(xyz-(aaa-(333.*)|bbb-(444.*)))",
expectedLiteralPrefixMatchers: 10,
expectedMatches: []string{"xyz-aaa-111", "xyz-aaa-111XXX", "xyz-aaa-333", "xyz-aaa-333XXX", "xyz-bbb-222", "xyz-bbb-222XXX", "xyz-bbb-444", "xyz-bbb-444XXX"},
expectedNotMatches: []string{"XYZ-aaa-111", "xyz-aaa-11", "xyz-aaa-222", "xyz-bbb-111"},
},
// Nested literal prefixes, case insensitive.
{
pattern: "(?i)(xyz-(aaa-(111.*)|bbb-(222.*)))|(xyz-(aaa-(333.*)|bbb-(444.*)))",
expectedLiteralPrefixMatchers: 10,
expectedMatches: []string{"xyz-aaa-111", "XYZ-aaa-111XXX", "xyz-aaa-333", "xyz-AAA-333XXX", "xyz-bbb-222", "xyz-BBB-222XXX", "XYZ-bbb-444", "xyz-bbb-444XXX"},
expectedNotMatches: []string{"xyz-aaa-11", "xyz-aaa-222", "xyz-bbb-111"},
},
// Mixed case sensitivity.
{
pattern: "(xyz-((?i)(aaa.*|bbb.*)))",
expectedLiteralPrefixMatchers: 3,
expectedMatches: []string{"xyz-aaa", "xyz-AAA", "xyz-aaaXXX", "xyz-AAAXXX", "xyz-bbb", "xyz-BBBXXX"},
expectedNotMatches: []string{"XYZ-aaa", "xyz-aa", "yz-aaa", "aaa"},
},
} {
t.Run(c.pattern, func(t *testing.T) {
parsed, err := syntax.Parse(c.pattern, syntax.Perl|syntax.DotNL)
require.NoError(t, err)
matcher := stringMatcherFromRegexp(parsed)
require.NotNil(t, matcher)
re := regexp.MustCompile("^(?s:" + c.pattern + ")$")
// Pre-condition check: ensure it contains literalPrefixSensitiveStringMatcher or literalPrefixInsensitiveStringMatcher.
numPrefixMatchers := 0
visitStringMatcher(matcher, func(matcher StringMatcher) {
if _, ok := matcher.(*literalPrefixSensitiveStringMatcher); ok {
numPrefixMatchers++
}
if _, ok := matcher.(*literalPrefixInsensitiveStringMatcher); ok {
numPrefixMatchers++
}
})
require.Equal(t, c.expectedLiteralPrefixMatchers, numPrefixMatchers)
for _, value := range c.expectedMatches {
require.Truef(t, matcher.Matches(value), "Value: %s", value)
// Ensure the golang regexp engine would return the same.
require.Truef(t, re.MatchString(value), "Value: %s", value)
}
for _, value := range c.expectedNotMatches {
require.Falsef(t, matcher.Matches(value), "Value: %s", value)
// Ensure the golang regexp engine would return the same.
require.Falsef(t, re.MatchString(value), "Value: %s", value)
}
})
}
}
func TestStringMatcherFromRegexp_LiteralSuffix(t *testing.T) {
for _, c := range []struct {
pattern string
expectedLiteralSuffixMatchers int
expectedMatches []string
expectedNotMatches []string
}{
// Case sensitive.
{
pattern: "(.*xyz-016a-ixb-dp|.*xyz-016a-ixb-op)",
expectedLiteralSuffixMatchers: 2,
expectedMatches: []string{"xyz-016a-ixb-dp", "XXXxyz-016a-ixb-dp", "xyz-016a-ixb-op", "XXXxyz-016a-ixb-op", "\nxyz-016a-ixb-dp"},
expectedNotMatches: []string{"XYZ-016a-ixb-dp", "yz-016a-ixb-dp", "XYZ-016a-ixb-op", "xyz-016a-ixb-o", "xyz", "dp"},
},
// Case insensitive.
{
pattern: "(?i)(.*xyz-016a-ixb-dp|.*xyz-016a-ixb-op)",
expectedLiteralSuffixMatchers: 2,
expectedMatches: []string{"xyz-016a-ixb-dp", "XYZ-016a-ixb-dp", "XXXxyz-016a-ixb-dp", "XyZ-016a-ixb-op", "XXXxyz-016a-ixb-op", "\nxyz-016a-ixb-dp"},
expectedNotMatches: []string{"yz-016a-ixb-dp", "xyz-016a-ixb-o", "xyz", "dp"},
},
// Nested literal suffixes, case sensitive.
{
pattern: "(.*aaa|.*bbb(.*ccc|.*ddd))",
expectedLiteralSuffixMatchers: 3,
expectedMatches: []string{"aaa", "XXXaaa", "bbbccc", "XXXbbbccc", "XXXbbbXXXccc", "bbbddd", "bbbddd", "XXXbbbddd", "XXXbbbXXXddd", "bbbXXXccc", "aaabbbccc", "aaabbbddd"},
expectedNotMatches: []string{"AAA", "aa", "Xaa", "BBBCCC", "bb", "Xbb", "bbccc", "bbbcc", "bbbdd"},
},
// Mixed case sensitivity.
{
pattern: "(.*aaa|.*bbb((?i)(.*ccc|.*ddd)))",
expectedLiteralSuffixMatchers: 3,
expectedMatches: []string{"aaa", "XXXaaa", "bbbccc", "bbbCCC", "bbbXXXCCC", "bbbddd", "bbbDDD", "bbbXXXddd", "bbbXXXDDD"},
expectedNotMatches: []string{"AAA", "XXXAAA", "BBBccc", "BBBCCC", "aaaBBB"},
},
} {
t.Run(c.pattern, func(t *testing.T) {
parsed, err := syntax.Parse(c.pattern, syntax.Perl|syntax.DotNL)
require.NoError(t, err)
matcher := stringMatcherFromRegexp(parsed)
require.NotNil(t, matcher)
re := regexp.MustCompile("^(?s:" + c.pattern + ")$")
// Pre-condition check: ensure it contains literalSuffixStringMatcher.
numSuffixMatchers := 0
visitStringMatcher(matcher, func(matcher StringMatcher) {
if _, ok := matcher.(*literalSuffixStringMatcher); ok {
numSuffixMatchers++
}
})
require.Equal(t, c.expectedLiteralSuffixMatchers, numSuffixMatchers)
for _, value := range c.expectedMatches {
require.Truef(t, matcher.Matches(value), "Value: %s", value)
// Ensure the golang regexp engine would return the same.
require.Truef(t, re.MatchString(value), "Value: %s", value)
}
for _, value := range c.expectedNotMatches {
require.Falsef(t, matcher.Matches(value), "Value: %s", value)
// Ensure the golang regexp engine would return the same.
require.Falsef(t, re.MatchString(value), "Value: %s", value)
}
})
}
}
func TestStringMatcherFromRegexp_Quest(t *testing.T) {
for _, c := range []struct {
pattern string
expectedZeroOrOneMatchers int
expectedMatches []string
expectedNotMatches []string
}{
// Not match newline.
{
pattern: "test.?",
expectedZeroOrOneMatchers: 1,
expectedMatches: []string{"test\n", "test", "test!"},
expectedNotMatches: []string{"tes", "test!!"},
},
{
pattern: ".?test",
expectedZeroOrOneMatchers: 1,
expectedMatches: []string{"\ntest", "test", "!test"},
expectedNotMatches: []string{"tes", "test!"},
},
{
pattern: "(aaa.?|bbb.?)",
expectedZeroOrOneMatchers: 2,
expectedMatches: []string{"aaa", "aaaX", "bbb", "bbbX", "aaa\n", "bbb\n"},
expectedNotMatches: []string{"aa", "aaaXX", "bb", "bbbXX"},
},
{
pattern: ".*aaa.?",
expectedZeroOrOneMatchers: 1,
expectedMatches: []string{"aaa", "Xaaa", "aaaX", "XXXaaa", "XXXaaaX", "XXXaaa\n"},
expectedNotMatches: []string{"aa", "aaaXX", "XXXaaaXXX"},
},
// Match newline.
{
pattern: "(?s)test.?",
expectedZeroOrOneMatchers: 1,
expectedMatches: []string{"test", "test!", "test\n"},
expectedNotMatches: []string{"tes", "test!!", "test\n\n"},
},
// Mixed flags (a part matches newline another doesn't).
{
pattern: "(aaa.?|((?s).?bbb.+))",
expectedZeroOrOneMatchers: 2,
expectedMatches: []string{"aaa", "aaaX", "bbbX", "XbbbX", "bbbXXX", "\nbbbX", "aaa\n"},
expectedNotMatches: []string{"aa", "Xbbb", "\nbbb"},
},
} {
t.Run(c.pattern, func(t *testing.T) {
parsed, err := syntax.Parse(c.pattern, syntax.Perl|syntax.DotNL)
require.NoError(t, err)
matcher := stringMatcherFromRegexp(parsed)
require.NotNil(t, matcher)
re := regexp.MustCompile("^(?s:" + c.pattern + ")$")
// Pre-condition check: ensure it contains zeroOrOneCharacterStringMatcher.
numZeroOrOneMatchers := 0
visitStringMatcher(matcher, func(matcher StringMatcher) {
if _, ok := matcher.(*zeroOrOneCharacterStringMatcher); ok {
numZeroOrOneMatchers++
}
})
require.Equal(t, c.expectedZeroOrOneMatchers, numZeroOrOneMatchers)
for _, value := range c.expectedMatches {
require.Truef(t, matcher.Matches(value), "Value: %s", value)
// Ensure the golang regexp engine would return the same.
require.Truef(t, re.MatchString(value), "Value: %s", value)
}
for _, value := range c.expectedNotMatches {
require.Falsef(t, matcher.Matches(value), "Value: %s", value)
// Ensure the golang regexp engine would return the same.
require.Falsef(t, re.MatchString(value), "Value: %s", value)
}
})
}
}
func randString(randGenerator *rand.Rand, length int) string {
b := make([]rune, length)
for i := range b {
b[i] = asciiRunes[randGenerator.Intn(len(asciiRunes))]
}
return string(b)
}
func randStrings(randGenerator *rand.Rand, many, length int) []string {
out := make([]string, 0, many)
for range many {
out = append(out, randString(randGenerator, length))
}
return out
}
func randStringsWithSuffix(randGenerator *rand.Rand, many, length int, suffix string) []string {
out := randStrings(randGenerator, many, length)
for i := range out {
out[i] += suffix
}
return out
}
func TestOptimizeEqualOrPrefixStringMatchers(t *testing.T) {
tests := map[string]struct {
input StringMatcher
expectedValues []string
expectedCaseSensitive bool
}{
"should skip optimization on orStringMatcher with containsStringMatcher": {
input: orStringMatcher{
&equalStringMatcher{s: "FOO", caseSensitive: true},
&containsStringMatcher{substrings: []string{"a", "b", "c"}},
},
expectedValues: nil,
},
"should run optimization on orStringMatcher with equalStringMatcher and same case sensitivity": {
input: orStringMatcher{
&equalStringMatcher{s: "FOO", caseSensitive: true},
&equalStringMatcher{s: "bar", caseSensitive: true},
&equalStringMatcher{s: "baz", caseSensitive: true},
},
expectedValues: []string{"FOO", "bar", "baz"},
expectedCaseSensitive: true,
},
"should skip optimization on orStringMatcher with equalStringMatcher but different case sensitivity": {
input: orStringMatcher{
&equalStringMatcher{s: "FOO", caseSensitive: true},
&equalStringMatcher{s: "bar", caseSensitive: false},
&equalStringMatcher{s: "baz", caseSensitive: true},
},
expectedValues: nil,
},
"should run optimization on orStringMatcher with nested orStringMatcher and equalStringMatcher, and same case sensitivity": {
input: orStringMatcher{
&equalStringMatcher{s: "FOO", caseSensitive: true},
orStringMatcher{
&equalStringMatcher{s: "bar", caseSensitive: true},
&equalStringMatcher{s: "xxx", caseSensitive: true},
},
&equalStringMatcher{s: "baz", caseSensitive: true},
},
expectedValues: []string{"FOO", "bar", "xxx", "baz"},
expectedCaseSensitive: true,
},
"should skip optimization on orStringMatcher with nested orStringMatcher and equalStringMatcher, but different case sensitivity": {
input: orStringMatcher{
&equalStringMatcher{s: "FOO", caseSensitive: true},
orStringMatcher{
// Case sensitivity is different within items at the same level.
&equalStringMatcher{s: "bar", caseSensitive: true},
&equalStringMatcher{s: "xxx", caseSensitive: false},
},
&equalStringMatcher{s: "baz", caseSensitive: true},
},
expectedValues: nil,
},
"should skip optimization on orStringMatcher with nested orStringMatcher and equalStringMatcher, but different case sensitivity in the nested one": {
input: orStringMatcher{
&equalStringMatcher{s: "FOO", caseSensitive: true},
// Case sensitivity is different between the parent and child.
orStringMatcher{
&equalStringMatcher{s: "bar", caseSensitive: false},
&equalStringMatcher{s: "xxx", caseSensitive: false},
},
&equalStringMatcher{s: "baz", caseSensitive: true},
},
expectedValues: nil,
},
"should return unchanged values on few case insensitive matchers": {
input: orStringMatcher{
&equalStringMatcher{s: "FOO", caseSensitive: false},
orStringMatcher{
&equalStringMatcher{s: "bAr", caseSensitive: false},
},
&equalStringMatcher{s: "baZ", caseSensitive: false},
},
expectedValues: []string{"FOO", "bAr", "baZ"},
expectedCaseSensitive: false,
},
}
for testName, testData := range tests {
t.Run(testName, func(t *testing.T) {
actualMatcher := optimizeEqualOrPrefixStringMatchers(testData.input, 0)
if testData.expectedValues == nil {
require.IsType(t, testData.input, actualMatcher)
} else {
require.IsType(t, &equalMultiStringSliceMatcher{}, actualMatcher)
require.Equal(t, testData.expectedValues, actualMatcher.(*equalMultiStringSliceMatcher).values)
require.Equal(t, testData.expectedCaseSensitive, actualMatcher.(*equalMultiStringSliceMatcher).caseSensitive)
}
})
}
}
func TestNewEqualMultiStringMatcher(t *testing.T) {
tests := map[string]struct {
values []string
caseSensitivePrefixes []*literalPrefixSensitiveStringMatcher
caseSensitive bool
expectedValuesMap map[string]struct{}
expectedPrefixesMap map[string][]StringMatcher
expectedValuesList []string
}{
"few case sensitive values": {
values: []string{"a", "B"},
caseSensitive: true,
expectedValuesList: []string{"a", "B"},
},
"few case insensitive values": {
values: []string{"a", "B"},
caseSensitive: false,
expectedValuesList: []string{"a", "B"},
},
"few case sensitive values and prefixes": {
values: []string{"a"},
caseSensitivePrefixes: []*literalPrefixSensitiveStringMatcher{{prefix: "B", right: anyStringWithoutNewlineMatcher{}}},
caseSensitive: true,
expectedValuesMap: map[string]struct{}{"a": {}},
expectedPrefixesMap: map[string][]StringMatcher{"B": {&literalPrefixSensitiveStringMatcher{prefix: "B", right: anyStringWithoutNewlineMatcher{}}}},
},
"many case sensitive values": {
values: []string{"a", "B", "c", "D", "e", "F", "g", "H", "i", "L", "m", "N", "o", "P", "q", "r"},
caseSensitive: true,
expectedValuesMap: map[string]struct{}{"a": {}, "B": {}, "c": {}, "D": {}, "e": {}, "F": {}, "g": {}, "H": {}, "i": {}, "L": {}, "m": {}, "N": {}, "o": {}, "P": {}, "q": {}, "r": {}},
expectedPrefixesMap: map[string][]StringMatcher{},
},
"many case insensitive values": {
values: []string{"a", "B", "c", "D", "e", "F", "g", "H", "i", "L", "m", "N", "o", "P", "q", "r"},
caseSensitive: false,
expectedValuesMap: map[string]struct{}{"a": {}, "b": {}, "c": {}, "d": {}, "e": {}, "f": {}, "g": {}, "h": {}, "i": {}, "l": {}, "m": {}, "n": {}, "o": {}, "p": {}, "q": {}, "r": {}},
expectedPrefixesMap: map[string][]StringMatcher{},
},
}
for testName, testData := range tests {
t.Run(testName, func(t *testing.T) {
// To keep this test simple, we always assume a min prefix length of 1.
minPrefixLength := 0
if len(testData.caseSensitivePrefixes) > 0 {
minPrefixLength = 1
}
matcher := newEqualMultiStringMatcher(testData.caseSensitive, len(testData.values), len(testData.caseSensitivePrefixes), minPrefixLength)
for _, v := range testData.values {
matcher.add(v)
}
for _, p := range testData.caseSensitivePrefixes {
matcher.addPrefix(p.prefix, true, p)
}
if testData.expectedValuesMap != nil || testData.expectedPrefixesMap != nil {
require.IsType(t, &equalMultiStringMapMatcher{}, matcher)
require.Equal(t, testData.expectedValuesMap, matcher.(*equalMultiStringMapMatcher).values)
require.Equal(t, testData.expectedPrefixesMap, matcher.(*equalMultiStringMapMatcher).prefixes)
require.Equal(t, testData.caseSensitive, matcher.(*equalMultiStringMapMatcher).caseSensitive)
}
if testData.expectedValuesList != nil {
require.IsType(t, &equalMultiStringSliceMatcher{}, matcher)
require.Equal(t, testData.expectedValuesList, matcher.(*equalMultiStringSliceMatcher).values)
require.Equal(t, testData.caseSensitive, matcher.(*equalMultiStringSliceMatcher).caseSensitive)
}
})
}
}
func TestEqualMultiStringMapMatcher_addPrefix(t *testing.T) {
t.Run("should panic if the matcher is case sensitive but the prefix is not case sensitive", func(t *testing.T) {
matcher := newEqualMultiStringMatcher(true, 0, 1, 1)
require.Panics(t, func() {
matcher.addPrefix("a", false, &literalPrefixInsensitiveStringMatcher{
prefix: "a",
})
})
})
t.Run("should panic if the matcher is not case sensitive but the prefix is case sensitive", func(t *testing.T) {
matcher := newEqualMultiStringMatcher(false, 0, 1, 1)
require.Panics(t, func() {
matcher.addPrefix("a", true, &literalPrefixSensitiveStringMatcher{
prefix: "a",
})
})
})
}
func TestEqualMultiStringMatcher_Matches(t *testing.T) {
tests := map[string]struct {
values []string
prefixes []StringMatcher
caseSensitive bool
expectedMatches []string
expectedNotMatches []string
}{
"few case sensitive values": {
values: []string{"a", "B"},
caseSensitive: true,
expectedMatches: []string{"a", "B"},
expectedNotMatches: []string{"A", "b"},
},
"few case insensitive values": {
values: []string{"a", "B"},
caseSensitive: false,
expectedMatches: []string{"a", "A", "b", "B"},
expectedNotMatches: []string{"c", "C"},
},
"few case sensitive prefixes": {
prefixes: []StringMatcher{
&literalPrefixSensitiveStringMatcher{prefix: "a", right: anyStringWithoutNewlineMatcher{}},
&literalPrefixSensitiveStringMatcher{prefix: "B", right: anyStringWithoutNewlineMatcher{}},
},
caseSensitive: true,
expectedMatches: []string{"a", "aX", "B", "BX"},
expectedNotMatches: []string{"A", "b"},
},
"few case insensitive prefixes": {
prefixes: []StringMatcher{
&literalPrefixInsensitiveStringMatcher{prefix: "a", right: anyStringWithoutNewlineMatcher{}},
&literalPrefixInsensitiveStringMatcher{prefix: "B", right: anyStringWithoutNewlineMatcher{}},
},
caseSensitive: false,
expectedMatches: []string{"a", "aX", "A", "AX", "b", "bX", "B", "BX"},
expectedNotMatches: []string{"c", "cX", "C", "CX"},
},
"many case sensitive values": {
values: []string{"a", "B", "c", "D", "e", "F", "g", "H", "i", "L", "m", "N", "o", "P", "q", "r"},
caseSensitive: true,
expectedMatches: []string{"a", "B"},
expectedNotMatches: []string{"A", "b"},
},
"many case insensitive values": {
values: []string{"a", "B", "c", "D", "e", "F", "g", "H", "i", "L", "m", "N", "o", "P", "q", "r"},
caseSensitive: false,
expectedMatches: []string{"a", "A", "b", "B"},
expectedNotMatches: []string{"x", "X"},
},
"mixed values and prefixes": {
values: []string{"a"},
prefixes: []StringMatcher{&literalPrefixSensitiveStringMatcher{prefix: "B", right: anyStringWithoutNewlineMatcher{}}},
caseSensitive: true,
expectedMatches: []string{"a", "B", "BX"},
expectedNotMatches: []string{"aX", "A", "b", "bX"},
},
}
for testName, testData := range tests {
t.Run(testName, func(t *testing.T) {
// To keep this test simple, we always assume a min prefix length of 1.
minPrefixLength := 0
if len(testData.prefixes) > 0 {
minPrefixLength = 1
}
matcher := newEqualMultiStringMatcher(testData.caseSensitive, len(testData.values), len(testData.prefixes), minPrefixLength)
for _, v := range testData.values {
matcher.add(v)
}
for _, p := range testData.prefixes {
switch m := p.(type) {
case *literalPrefixSensitiveStringMatcher:
matcher.addPrefix(m.prefix, true, p)
case *literalPrefixInsensitiveStringMatcher:
matcher.addPrefix(m.prefix, false, p)
default:
panic("Unexpected type in test case")
}
}
for _, v := range testData.expectedMatches {
require.True(t, matcher.Matches(v), "value: %s", v)
}
for _, v := range testData.expectedNotMatches {
require.False(t, matcher.Matches(v), "value: %s", v)
}
})
}
}
func TestFindEqualOrPrefixStringMatchers(t *testing.T) {
type match struct {
s string
caseSensitive bool
}
// Utility to call findEqualOrPrefixStringMatchers() and collect all callback invocations.
findEqualOrPrefixStringMatchersAndCollectMatches := func(input StringMatcher) (matches []match, ok bool) {
ok = findEqualOrPrefixStringMatchers(input, func(matcher *equalStringMatcher) bool {
matches = append(matches, match{matcher.s, matcher.caseSensitive})
return true
}, func(prefix string, prefixCaseSensitive bool, _ StringMatcher) bool {
matches = append(matches, match{prefix, prefixCaseSensitive})
return true
})
return matches, ok
}
t.Run("empty matcher", func(t *testing.T) {
actualMatches, actualOk := findEqualOrPrefixStringMatchersAndCollectMatches(emptyStringMatcher{})
require.False(t, actualOk)
require.Empty(t, actualMatches)
})
t.Run("concat of literal matchers (case sensitive)", func(t *testing.T) {
actualMatches, actualOk := findEqualOrPrefixStringMatchersAndCollectMatches(
orStringMatcher{
&equalStringMatcher{s: "test-1", caseSensitive: true},
&equalStringMatcher{s: "test-2", caseSensitive: true},
},
)
require.True(t, actualOk)
require.Equal(t, []match{{"test-1", true}, {"test-2", true}}, actualMatches)
})
t.Run("concat of literal matchers (case insensitive)", func(t *testing.T) {
actualMatches, actualOk := findEqualOrPrefixStringMatchersAndCollectMatches(
orStringMatcher{
&equalStringMatcher{s: "test-1", caseSensitive: false},
&equalStringMatcher{s: "test-2", caseSensitive: false},
},
)
require.True(t, actualOk)
require.Equal(t, []match{{"test-1", false}, {"test-2", false}}, actualMatches)
})
t.Run("concat of literal matchers (mixed case)", func(t *testing.T) {
actualMatches, actualOk := findEqualOrPrefixStringMatchersAndCollectMatches(
orStringMatcher{
&equalStringMatcher{s: "test-1", caseSensitive: false},
&equalStringMatcher{s: "test-2", caseSensitive: true},
},
)
require.True(t, actualOk)
require.Equal(t, []match{{"test-1", false}, {"test-2", true}}, actualMatches)
})
t.Run("concat of literal prefix matchers (case sensitive)", func(t *testing.T) {
actualMatches, actualOk := findEqualOrPrefixStringMatchersAndCollectMatches(
orStringMatcher{
&literalPrefixSensitiveStringMatcher{prefix: "test-1"},
&literalPrefixSensitiveStringMatcher{prefix: "test-2"},
},
)
require.True(t, actualOk)
require.Equal(t, []match{{"test-1", true}, {"test-2", true}}, actualMatches)
})
t.Run("concat of literal prefix matchers (case insensitive)", func(t *testing.T) {
actualMatches, actualOk := findEqualOrPrefixStringMatchersAndCollectMatches(
orStringMatcher{
&literalPrefixInsensitiveStringMatcher{prefix: "test-1"},
&literalPrefixInsensitiveStringMatcher{prefix: "test-2"},
},
)
require.True(t, actualOk)
require.Equal(t, []match{{"test-1", false}, {"test-2", false}}, actualMatches)
})
t.Run("concat of literal prefix matchers (mixed case)", func(t *testing.T) {
actualMatches, actualOk := findEqualOrPrefixStringMatchersAndCollectMatches(
orStringMatcher{
&literalPrefixInsensitiveStringMatcher{prefix: "test-1"},
&literalPrefixSensitiveStringMatcher{prefix: "test-2"},
},
)
require.True(t, actualOk)
require.Equal(t, []match{{"test-1", false}, {"test-2", true}}, actualMatches)
})
t.Run("concat of literal string and prefix matchers (case sensitive)", func(t *testing.T) {
actualMatches, actualOk := findEqualOrPrefixStringMatchersAndCollectMatches(
orStringMatcher{
&equalStringMatcher{s: "test-1", caseSensitive: true},
&literalPrefixSensitiveStringMatcher{prefix: "test-2"},
},
)
require.True(t, actualOk)
require.Equal(t, []match{{"test-1", true}, {"test-2", true}}, actualMatches)
})
}
// This benchmark is used to find a good threshold to use to apply the optimization
// done by optimizeEqualOrPrefixStringMatchers().
func BenchmarkOptimizeEqualOrPrefixStringMatchers(b *testing.B) {
randGenerator := rand.New(rand.NewSource(time.Now().UnixNano()))
// Generate variable lengths random texts to match against.
texts := append([]string{}, randStrings(randGenerator, 10, 10)...)
texts = append(texts, randStrings(randGenerator, 5, 30)...)
texts = append(texts, randStrings(randGenerator, 1, 100)...)
for numAlternations := 2; numAlternations <= 256; numAlternations *= 2 {
for _, caseSensitive := range []bool{true, false} {
for _, prefixMatcher := range []bool{true, false} {
b.Run(fmt.Sprintf("alternations: %d case sensitive: %t prefix matcher: %t", numAlternations, caseSensitive, prefixMatcher), func(b *testing.B) {
// If the test should run on prefix matchers, we add a wildcard matcher as suffix (prefix will be a literal).
suffix := ""
if prefixMatcher {
suffix = ".*"
}
// Generate a regex with the expected number of alternations.
re := strings.Join(randStringsWithSuffix(randGenerator, numAlternations, 10, suffix), "|")
if !caseSensitive {
re = "(?i:(" + re + "))"
}
b.Logf("regexp: %s", re)
parsed, err := syntax.Parse(re, syntax.Perl|syntax.DotNL)
require.NoError(b, err)
unoptimized := stringMatcherFromRegexpInternal(parsed)
require.IsType(b, orStringMatcher{}, unoptimized)
optimized := optimizeEqualOrPrefixStringMatchers(unoptimized, 0)
if numAlternations < minEqualMultiStringMatcherMapThreshold && !prefixMatcher {
require.IsType(b, &equalMultiStringSliceMatcher{}, optimized)
} else {
require.IsType(b, &equalMultiStringMapMatcher{}, optimized)
}
b.Run("without optimizeEqualOrPrefixStringMatchers()", func(b *testing.B) {
for b.Loop() {
for _, t := range texts {
unoptimized.Matches(t)
}
}
})
b.Run("with optimizeEqualOrPrefixStringMatchers()", func(b *testing.B) {
for b.Loop() {
for _, t := range texts {
optimized.Matches(t)
}
}
})
})
}
}
}
}
func TestZeroOrOneCharacterStringMatcher(t *testing.T) {
t.Run("match newline", func(t *testing.T) {
matcher := &zeroOrOneCharacterStringMatcher{matchNL: true}
require.True(t, matcher.Matches(""))
require.True(t, matcher.Matches("x"))
require.True(t, matcher.Matches("\n"))
require.False(t, matcher.Matches("xx"))
require.False(t, matcher.Matches("\n\n"))
})
t.Run("do not match newline", func(t *testing.T) {
matcher := &zeroOrOneCharacterStringMatcher{matchNL: false}
require.True(t, matcher.Matches(""))
require.True(t, matcher.Matches("x"))
require.False(t, matcher.Matches("\n"))
require.False(t, matcher.Matches("xx"))
require.False(t, matcher.Matches("\n\n"))
})
t.Run("unicode", func(t *testing.T) {
// Just for documentation purposes, emoji1 is 1 rune, emoji2 is 2 runes.
// Having this in mind, will make future readers fixing tests easier.
emoji1 := "😀"
emoji2 := "❤️"
require.Equal(t, 1, utf8.RuneCountInString(emoji1))
require.Equal(t, 2, utf8.RuneCountInString(emoji2))
matcher := &zeroOrOneCharacterStringMatcher{matchNL: true}
require.True(t, matcher.Matches(emoji1))
require.False(t, matcher.Matches(emoji2))
require.False(t, matcher.Matches(emoji1+emoji1))
require.False(t, matcher.Matches("x"+emoji1))
require.False(t, matcher.Matches(emoji1+"x"))
require.False(t, matcher.Matches(emoji1+emoji2))
})
t.Run("invalid unicode", func(t *testing.T) {
// Just for reference, we also compare to what `^.?$` regular expression matches.
re := regexp.MustCompile("^.?$")
matcher := &zeroOrOneCharacterStringMatcher{matchNL: true}
requireMatches := func(s string, expected bool) {
t.Helper()
require.Equal(t, expected, matcher.Matches(s))
require.Equal(t, re.MatchString(s), matcher.Matches(s))
}
requireMatches("\xff", true)
requireMatches("x\xff", false)
requireMatches("\xffx", false)
requireMatches("\xff\xfe", false)
})
}
func BenchmarkZeroOrOneCharacterStringMatcher(b *testing.B) {
type benchCase struct {
str string
matches bool
}
emoji1 := "😀"
emoji2 := "❤️"
cases := []benchCase{
{"", true},
{"x", true},
{"\n", true},
{"xx", false},
{"\n\n", false},
{emoji1, true},
{emoji2, false},
{emoji1 + emoji1, false},
{strings.Repeat("x", 100), false},
{strings.Repeat(emoji1, 100), false},
{strings.Repeat(emoji2, 100), false},
}
matcher := &zeroOrOneCharacterStringMatcher{matchNL: true}
for n := 0; b.Loop(); n++ {
c := cases[n%len(cases)]
got := matcher.Matches(c.str)
if got != c.matches {
b.Fatalf("unexpected result for %q: got %t, want %t", c.str, got, c.matches)
}
}
}
func TestLiteralPrefixSensitiveStringMatcher(t *testing.T) {
m := &literalPrefixSensitiveStringMatcher{prefix: "mar", right: &emptyStringMatcher{}}
require.True(t, m.Matches("mar"))
require.False(t, m.Matches("marco"))
require.False(t, m.Matches("ma"))
require.False(t, m.Matches("mAr"))
m = &literalPrefixSensitiveStringMatcher{prefix: "mar", right: &equalStringMatcher{s: "co", caseSensitive: false}}
require.True(t, m.Matches("marco"))
require.True(t, m.Matches("marCO"))
require.False(t, m.Matches("MARco"))
require.False(t, m.Matches("mar"))
require.False(t, m.Matches("marcopracucci"))
}
func TestLiteralPrefixInsensitiveStringMatcher(t *testing.T) {
m := &literalPrefixInsensitiveStringMatcher{prefix: "mar", right: &emptyStringMatcher{}}
require.True(t, m.Matches("mar"))
require.False(t, m.Matches("marco"))
require.False(t, m.Matches("ma"))
require.True(t, m.Matches("mAr"))
}
func TestLiteralSuffixStringMatcher(t *testing.T) {
m := &literalSuffixStringMatcher{left: &emptyStringMatcher{}, suffix: "co", suffixCaseSensitive: true}
require.True(t, m.Matches("co"))
require.False(t, m.Matches("marco"))
require.False(t, m.Matches("coo"))
require.False(t, m.Matches("Co"))
m = &literalSuffixStringMatcher{left: &emptyStringMatcher{}, suffix: "co", suffixCaseSensitive: false}
require.True(t, m.Matches("co"))
require.False(t, m.Matches("marco"))
require.False(t, m.Matches("coo"))
require.True(t, m.Matches("Co"))
m = &literalSuffixStringMatcher{left: &equalStringMatcher{s: "mar", caseSensitive: false}, suffix: "co", suffixCaseSensitive: true}
require.True(t, m.Matches("marco"))
require.True(t, m.Matches("MARco"))
require.False(t, m.Matches("marCO"))
require.False(t, m.Matches("mar"))
require.False(t, m.Matches("marcopracucci"))
m = &literalSuffixStringMatcher{left: &equalStringMatcher{s: "mar", caseSensitive: false}, suffix: "co", suffixCaseSensitive: false}
require.True(t, m.Matches("marco"))
require.True(t, m.Matches("MARco"))
require.True(t, m.Matches("marCO"))
require.False(t, m.Matches("mar"))
require.False(t, m.Matches("marcopracucci"))
}
func TestHasPrefixCaseInsensitive(t *testing.T) {
require.True(t, hasPrefixCaseInsensitive("marco", "mar"))
require.True(t, hasPrefixCaseInsensitive("mArco", "mar"))
require.True(t, hasPrefixCaseInsensitive("marco", "MaR"))
require.True(t, hasPrefixCaseInsensitive("marco", "marco"))
require.True(t, hasPrefixCaseInsensitive("mArco", "marco"))
require.False(t, hasPrefixCaseInsensitive("marco", "a"))
require.False(t, hasPrefixCaseInsensitive("marco", "abcdefghi"))
}
func TestHasSuffixCaseInsensitive(t *testing.T) {
require.True(t, hasSuffixCaseInsensitive("marco", "rco"))
require.True(t, hasSuffixCaseInsensitive("marco", "RcO"))
require.True(t, hasSuffixCaseInsensitive("marco", "marco"))
require.False(t, hasSuffixCaseInsensitive("marco", "a"))
require.False(t, hasSuffixCaseInsensitive("marco", "abcdefghi"))
}
func TestContainsInOrder(t *testing.T) {
require.True(t, containsInOrder("abcdefghilmno", []string{"ab", "cd", "no"}))
require.True(t, containsInOrder("abcdefghilmno", []string{"def", "hil"}))
require.False(t, containsInOrder("abcdefghilmno", []string{"ac"}))
require.False(t, containsInOrder("abcdefghilmno", []string{"ab", "cd", "de"}))
require.False(t, containsInOrder("abcdefghilmno", []string{"cd", "ab"}))
}
func getTestNameFromRegexp(re string) string {
if len(re) > 32 {
return re[:32]
}
return re
}
func generateRandomValues() []string {
// Init the random seed with a constant, so that it doesn't change between runs.
randGenerator := rand.New(rand.NewSource(1))
// Generate variable lengths random texts to match against.
texts := append([]string{}, randStrings(randGenerator, 10, 10)...)
texts = append(texts, randStrings(randGenerator, 5, 30)...)
texts = append(texts, randStrings(randGenerator, 1, 100)...)
texts = append(texts, "foo"+randString(randGenerator, 50))
texts = append(texts, randString(randGenerator, 50)+"foo")
return texts
}
func visitStringMatcher(matcher StringMatcher, callback func(matcher StringMatcher)) {
callback(matcher)
switch casted := matcher.(type) {
case *containsStringMatcher:
if casted.left != nil {
visitStringMatcher(casted.left, callback)
}
if casted.right != nil {
visitStringMatcher(casted.right, callback)
}
case *literalPrefixSensitiveStringMatcher:
visitStringMatcher(casted.right, callback)
case *literalPrefixInsensitiveStringMatcher:
visitStringMatcher(casted.right, callback)
case *literalSuffixStringMatcher:
visitStringMatcher(casted.left, callback)
case orStringMatcher:
for _, entry := range casted {
visitStringMatcher(entry, callback)
}
// No nested matchers for the following ones.
case *equalMultiStringMapMatcher:
for _, prefixes := range casted.prefixes {
for _, matcher := range prefixes {
visitStringMatcher(matcher, callback)
}
}
case emptyStringMatcher:
case *equalStringMatcher:
case *equalMultiStringSliceMatcher:
case anyStringWithoutNewlineMatcher:
case *anyNonEmptyStringMatcher:
case trueMatcher:
}
}
func TestToNormalisedLower(t *testing.T) {
testCases := map[string]string{
"foo": "foo",
"FOO": "foo",
"Foo": "foo",
"foO": "foo",
"fOo": "foo",
"AAAAAAAAAAAAAAAAAAAAAAAA": "aaaaaaaaaaaaaaaaaaaaaaaa",
"cccccccccccccccccccccccC": "cccccccccccccccccccccccc",
"ſſſſſſſſſſſſſſſſſſſſſſſſS": "sssssssssssssssssssssssss",
"ſſAſſa": "ssassa",
}
for input, expectedOutput := range testCases {
require.Equal(t, expectedOutput, toNormalisedLower(input, nil))
}
}
func TestIsSimpleConcatenationPattern(t *testing.T) {
testCases := map[string]bool{
".*-.*-.*-.*-.*": true,
".+-.*-.*-.*-.+": false,
"-.*-.*-.*-.*": false,
".*-.*-.*-.*-": false,
"-": false,
".*": false,
}
for testCase, expected := range testCases {
t.Run(testCase, func(t *testing.T) {
re, err := syntax.Parse(testCase, syntax.Perl|syntax.DotNL)
require.NoError(t, err)
require.Equal(t, expected, isSimpleConcatenationPattern(re))
})
}
}
func BenchmarkFastRegexMatcher_ConcatenatedPattern(b *testing.B) {
pattern, err := NewFastRegexMatcher(".*-.*-.*-.*-.*")
require.NoError(b, err)
testCases := []string{
"a-b-c-d-e",
"aaaaaa-bbbbbb-cccccc-dddddd-eeeeee",
"aaaaaa----eeeeee",
"----",
"-a-a-a-",
"abcd",
}
for b.Loop() {
for _, s := range testCases {
pattern.MatchString(s)
}
}
}
|
go
|
github
|
https://github.com/prometheus/prometheus
|
model/labels/regexp_test.go
|
import Vapor
import XCTest
open class XCTVaporTests: XCTestCase {
open var app: Application!
open override func setUpWithError() throws {
// The XCTest runner calls this function before setUp()
try super.setUpWithError()
// this optional check due to prior usage by users
// see: https://github.com/vapor/vapor/pull/2585#issuecomment-802144636
if let _app = XCTVapor.app {
self.app = try _app()
}
}
open override func setUp() {
// The XCTest runner calls this after setupWithError()
super.setUp()
guard let _app = XCTVapor.app else {
fatalError("implement static app generator")
}
if self.app == nil {
// this was the behavior of this class pre 4.41.5
// keeping for compatibility however it will crash if
// the function throws. Provided the user assigns to
// XCTVapor.app in the class setUp or setUpWithError everything will work
// as we intend.
self.app = try! _app()
}
}
open override func tearDown() {
super.tearDown()
self.app?.shutdown()
self.app = nil
}
}
|
swift
|
github
|
https://github.com/vapor/vapor
|
Sources/XCTVapor/XCTVaporTests.swift
|
#!/usr/bin/env python
"""
NAME:
sparser.py
SYNOPSIS:
sparser.py [options] filename
DESCRIPTION:
The sparser.py script is a Specified PARSER. It is unique (as far as I can
tell) because it doesn't care about the delimiter(s). The user specifies
what is expected, and the order, for each line of text. All of the heavy
lifting is handled by pyparsing (http://pyparsing.sf.net).
OPTIONS:
-h,--help this message
-v,--version version
-d,--debug turn on debug messages
EXAMPLES:
1. As standalone
sparser.py myfile
2. As library
import sparser
...
#Copyright (C) 2006 Tim Cera timcera@earthlink.net
#
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 675 Mass Ave, Cambridge, MA 02139, USA.
"""
#===imports======================
import sys
import os
import getopt
import re
import gzip
from pyparsing import *
#===globals======================
modname = "sparser"
__version__ = "0.1"
#--option args--
debug_p = 0
#opt_b=None #string arg, default is undefined
#---positional args, default is empty---
pargs = []
#---other---
#===utilities====================
def msg(txt):
"""Send message to stdout."""
sys.stdout.write(txt)
sys.stdout.flush()
def debug(ftn, txt):
"""Used for debugging."""
if debug_p:
sys.stdout.write("%s.%s:%s\n" % (modname, ftn, txt))
sys.stdout.flush()
def fatal(ftn, txt):
"""If can't continue."""
msg = "%s.%s:FATAL:%s\n" % (modname, ftn, txt)
raise SystemExit, msg
def usage():
"""Prints the docstring."""
print __doc__
#====================================
class ToInteger(TokenConverter):
"""Converter to make token into an integer."""
def postParse( self, instring, loc, tokenlist ):
return int(tokenlist[0])
class ToFloat(TokenConverter):
"""Converter to make token into a float."""
def postParse( self, instring, loc, tokenlist ):
return float(tokenlist[0])
class ParseFileLineByLine:
"""
Bring data from text files into a program, optionally parsing each line
according to specifications in a parse definition file.
ParseFileLineByLine instances can be used like normal file objects (i.e. by
calling readline(), readlines(), and write()), but can also be used as
sequences of lines in for-loops.
ParseFileLineByLine objects also handle compression transparently. i.e. it
is possible to read lines from a compressed text file as if it were not
compressed. Compression is deduced from the file name suffixes '.Z'
(compress/uncompress), '.gz' (gzip/gunzip), and '.bz2' (bzip2).
The parse definition file name is developed based on the input file name.
If the input file name is 'basename.ext', then the definition file is
'basename_def.ext'. If a definition file specific to the input file is not
found, then the program searches for the file 'sparse.def' which would be
the definition file for all files in that directory without a file specific
definition file.
Finally, ParseFileLineByLine objects accept file names that start with '~'
or '~user' to indicate a home directory, as well as URLs (for reading
only).
Constructor:
ParseFileLineByLine(|filename|, |mode|='"r"'), where |filename| is the name
of the file (or a URL) and |mode| is one of '"r"' (read), '"w"' (write) or
'"a"' (append, not supported for .Z files).
"""
def __init__(self, filename, mode = 'r'):
"""Opens input file, and if available the definition file. If the
definition file is available __init__ will then create some pyparsing
helper variables. """
if mode not in ['r', 'w', 'a']:
raise IOError, (0, 'Illegal mode: ' + repr(mode))
if string.find(filename, ':/') > 1: # URL
if mode == 'w':
raise IOError, "can't write to a URL"
import urllib
self.file = urllib.urlopen(filename)
else:
filename = os.path.expanduser(filename)
if mode == 'r' or mode == 'a':
if not os.path.exists(filename):
raise IOError, (2, 'No such file or directory: ' + filename)
filen, file_extension = os.path.splitext(filename)
command_dict = {
('.Z', 'r'):
"self.file = os.popen('uncompress -c ' + filename, mode)",
('.gz', 'r'):
"self.file = gzip.GzipFile(filename, 'rb')",
('.bz2', 'r'):
"self.file = os.popen('bzip2 -dc ' + filename, mode)",
('.Z', 'w'):
"self.file = os.popen('compress > ' + filename, mode)",
('.gz', 'w'):
"self.file = gzip.GzipFile(filename, 'wb')",
('.bz2', 'w'):
"self.file = os.popen('bzip2 > ' + filename, mode)",
('.Z', 'a'):
"raise IOError, (0, 'Can\'t append to .Z files')",
('.gz', 'a'):
"self.file = gzip.GzipFile(filename, 'ab')",
('.bz2', 'a'):
"raise IOError, (0, 'Can\'t append to .bz2 files')",
}
exec command_dict.get((file_extension, mode),
'self.file = open(filename, mode)')
self.grammar = None
# Try to find a parse ('*_def.ext') definition file. First try to find
# a file specific parse definition file, then look for 'sparse.def'
# that would be the definition file for all files within the directory.
# The definition file is pure Python. The one variable that needs to
# be specified is 'parse'. The 'parse' variable is a list of tuples
# defining the name, type, and because it is a list, the order of
# variables on each line in the data file. The variable name is a
# string, the type variable is defined as integer, real, and qString.
# parse = [
# ('year', integer),
# ('month', integer),
# ('day', integer),
# ('value', real),
# ]
definition_file_one = filen + "_def" + file_extension
definition_file_two = os.path.dirname(filen) + os.sep + "sparse.def"
if os.path.exists(definition_file_one):
self.parsedef = definition_file_one
elif os.path.exists(definition_file_two):
self.parsedef = definition_file_two
else:
self.parsedef = None
return None
# Create some handy pyparsing constructs. I kept 'decimal_sep' so that
# could easily change to parse if the decimal separator is a ",".
decimal_sep = "."
sign = oneOf("+ -")
# part of printables without decimal_sep, +, -
special_chars = string.replace('!"#$%&\'()*,./:;<=>?@[\\]^_`{|}~',
decimal_sep, "")
integer = ToInteger(
Combine(Optional(sign) +
Word(nums))).setName("integer")
positive_integer = ToInteger(
Combine(Optional("+") +
Word(nums))).setName("integer")
negative_integer = ToInteger(
Combine("-" +
Word(nums))).setName("integer")
real = ToFloat(
Combine(Optional(sign) +
Word(nums) +
decimal_sep +
Optional(Word(nums)) +
Optional(oneOf("E e") +
Word(nums)))).setName("real")
positive_real = ToFloat(
Combine(Optional("+") +
Word(nums) +
decimal_sep +
Optional(Word(nums)) +
Optional(oneOf("E e") +
Word(nums)))).setName("real")
negative_real = ToFloat(
Combine("-" +
Word(nums) +
decimal_sep +
Optional(Word(nums)) +
Optional(oneOf("E e") +
Word(nums)))).setName("real")
qString = ( sglQuotedString | dblQuotedString ).setName("qString")
# add other characters we should skip over between interesting fields
integer_junk = Optional(
Suppress(
Word(alphas +
special_chars +
decimal_sep))).setName("integer_junk")
real_junk = Optional(
Suppress(
Word(alphas +
special_chars))).setName("real_junk")
qString_junk = SkipTo(qString).setName("qString_junk")
# Now that 'integer', 'real', and 'qString' have been assigned I can
# execute the definition file.
execfile(self.parsedef)
# Build the grammar, combination of the 'integer', 'real, 'qString',
# and '*_junk' variables assigned above in the order specified in the
# definition file.
grammar = []
for nam, expr in parse:
grammar.append( eval(expr.name + "_junk"))
grammar.append( expr.setResultsName(nam) )
self.grammar = And( grammar[1:] + [restOfLine] )
def __del__(self):
"""Delete (close) the file wrapper."""
self.close()
def __getitem__(self, item):
"""Used in 'for line in fp:' idiom."""
line = self.readline()
if not line:
raise IndexError
return line
def readline(self):
"""Reads (and optionally parses) a single line."""
line = self.file.readline()
if self.grammar and line:
try:
return self.grammar.parseString(line).asDict()
except ParseException:
return self.readline()
else:
return line
def readlines(self):
"""Returns a list of all lines (optionally parsed) in the file."""
if self.grammar:
tot = []
# Used this way instead of a 'for' loop against
# self.file.readlines() so that there wasn't two copies of the file
# in memory.
while 1:
line = self.file.readline()
if not line:
break
tot.append(line)
return tot
return self.file.readlines()
def write(self, data):
"""Write to a file."""
self.file.write(data)
def writelines(self, list):
"""Write a list to a file. Each item in the list is a line in the
file.
"""
for line in list:
self.file.write(line)
def close(self):
"""Close the file."""
self.file.close()
def flush(self):
"""Flush in memory contents to file."""
self.file.flush()
#=============================
def main(pargs):
"""This should only be used for testing. The primary mode of operation is
as an imported library.
"""
input_file = sys.argv[1]
fp = ParseFileLineByLine(input_file)
for i in fp:
print i
#-------------------------
if __name__ == '__main__':
ftn = "main"
opts, pargs = getopt.getopt(sys.argv[1:], 'hvd',
['help', 'version', 'debug', 'bb='])
for opt in opts:
if opt[0] == '-h' or opt[0] == '--help':
print modname+": version="+__version__
usage()
sys.exit(0)
elif opt[0] == '-v' or opt[0] == '--version':
print modname+": version="+__version__
sys.exit(0)
elif opt[0] == '-d' or opt[0] == '--debug':
debug_p = 1
elif opt[0] == '--bb':
opt_b = opt[1]
#---make the object and run it---
main(pargs)
#===Revision Log===
#Created by mkpythonproj:
#2006-02-06 Tim Cera
#
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.clients.admin;
import java.util.Collection;
/**
* Options for {@link Admin#deleteTopics(Collection)}.
*/
public class DeleteTopicsOptions extends AbstractOptions<DeleteTopicsOptions> {
private boolean retryOnQuotaViolation = true;
/**
* Set the timeout in milliseconds for this operation or {@code null} if the default api timeout for the
* AdminClient should be used.
*
*/
// This method is retained to keep binary compatibility with 0.11
public DeleteTopicsOptions timeoutMs(Integer timeoutMs) {
this.timeoutMs = timeoutMs;
return this;
}
/**
* Set to true if quota violation should be automatically retried.
*/
public DeleteTopicsOptions retryOnQuotaViolation(boolean retryOnQuotaViolation) {
this.retryOnQuotaViolation = retryOnQuotaViolation;
return this;
}
/**
* Returns true if quota violation should be automatically retried.
*/
public boolean shouldRetryOnQuotaViolation() {
return retryOnQuotaViolation;
}
}
|
java
|
github
|
https://github.com/apache/kafka
|
clients/src/main/java/org/apache/kafka/clients/admin/DeleteTopicsOptions.java
|
import setuptools
with open('VERSION.txt', 'r') as f:
version = f.read().strip()
setuptools.setup(
name="odoo12-addons-oca-manufacture",
description="Meta package for oca-manufacture Odoo addons",
version=version,
install_requires=[
'odoo12-addon-account_move_line_manufacture_info',
'odoo12-addon-base_repair',
'odoo12-addon-base_repair_config',
'odoo12-addon-mrp_auto_assign',
'odoo12-addon-mrp_auto_create_lot',
'odoo12-addon-mrp_bom_component_menu',
'odoo12-addon-mrp_bom_line_sequence',
'odoo12-addon-mrp_bom_location',
'odoo12-addon-mrp_bom_multi_company',
'odoo12-addon-mrp_bom_note',
'odoo12-addon-mrp_bom_tracking',
'odoo12-addon-mrp_multi_level',
'odoo12-addon-mrp_multi_level_estimate',
'odoo12-addon-mrp_planned_order_matrix',
'odoo12-addon-mrp_production_auto_post_inventory',
'odoo12-addon-mrp_production_grouped_by_product',
'odoo12-addon-mrp_production_hierarchy',
'odoo12-addon-mrp_production_note',
'odoo12-addon-mrp_production_putaway_strategy',
'odoo12-addon-mrp_production_quant_manual_assign',
'odoo12-addon-mrp_production_request',
'odoo12-addon-mrp_production_show_post_inventory',
'odoo12-addon-mrp_progress_button',
'odoo12-addon-mrp_request_bom_structure',
'odoo12-addon-mrp_request_workcenter_cycle',
'odoo12-addon-mrp_sale_info',
'odoo12-addon-mrp_stock_orderpoint_manual_procurement',
'odoo12-addon-mrp_subcontracting',
'odoo12-addon-mrp_subcontracting_purchase_link',
'odoo12-addon-mrp_unbuild_tracked_raw_material',
'odoo12-addon-mrp_warehouse_calendar',
'odoo12-addon-mrp_workorder_sequence',
'odoo12-addon-product_mrp_info',
'odoo12-addon-product_quick_bom',
'odoo12-addon-quality_control',
'odoo12-addon-quality_control_issue',
'odoo12-addon-quality_control_mrp',
'odoo12-addon-quality_control_stock',
'odoo12-addon-quality_control_team',
'odoo12-addon-repair_calendar_view',
'odoo12-addon-repair_default_terms_conditions',
'odoo12-addon-repair_discount',
'odoo12-addon-repair_payment_term',
'odoo12-addon-repair_refurbish',
'odoo12-addon-repair_timeline',
'odoo12-addon-stock_picking_product_kit_helper',
'odoo12-addon-stock_whole_kit_constraint',
],
classifiers=[
'Programming Language :: Python',
'Framework :: Odoo',
]
)
|
unknown
|
codeparrot/codeparrot-clean
| ||
# -*- coding: utf-8 -*-
#
# XCint documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 12 14:59:50 2014.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.mathjax', 'sphinx.ext.ifconfig']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'XCint'
copyright = u'2014, Radovan Bast'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0'
# The full version, including alpha/beta/rc tags.
release = '0.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'XCintdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'XCint.tex', u'XCint Documentation',
u'Radovan Bast', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'xcint', u'XCint Documentation',
[u'Radovan Bast'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'XCint', u'XCint Documentation',
u'Radovan Bast', 'XCint', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import socket
import sys
import django
from django.utils import html_parser
import xstatic.main
import xstatic.pkg.jquery
from horizon.test import patches
# Patch django.utils.html_parser.HTMLParser as a workaround for bug 1273943
if django.get_version() == '1.4' and sys.version_info[:3] > (2, 7, 3):
html_parser.HTMLParser.parse_starttag = patches.parse_starttag_patched
socket.setdefaulttimeout(1)
LOGIN_URL = '/auth/login/'
LOGOUT_URL = '/auth/logout/'
LOGIN_REDIRECT_URL = '/'
ROOT_PATH = os.path.dirname(os.path.abspath(__file__))
DEBUG = False
TEMPLATE_DEBUG = DEBUG
TESTSERVER = 'http://testserver'
SECRET_KEY = 'elj1IWiLoWHgcyYxFVLj7cM5rGOOxWl0'
USE_I18N = True
USE_L10N = True
USE_TZ = True
DATABASES = {'default': {'ENGINE': 'django.db.backends.sqlite3'}}
DEFAULT_EXCEPTION_REPORTER_FILTER = 'horizon.exceptions.HorizonReporterFilter'
INSTALLED_APPS = (
'django.contrib.sessions',
'django.contrib.staticfiles',
'django.contrib.messages',
'django.contrib.humanize',
'django.contrib.auth',
'django.contrib.contenttypes',
'django_nose',
'compressor',
'horizon',
'horizon.test',
'horizon.test.test_dashboards.cats',
'horizon.test.test_dashboards.dogs'
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.doc.XViewMiddleware',
'django.middleware.locale.LocaleMiddleware',
'horizon.middleware.HorizonMiddleware')
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.request',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.contrib.messages.context_processors.messages',
'horizon.context_processors.horizon')
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'horizon.loaders.TemplateLoader'
)
STATIC_URL = '/static/'
MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
ROOT_URLCONF = 'horizon.test.urls'
TEMPLATE_DIRS = (os.path.join(ROOT_PATH, 'tests', 'templates'),)
SITE_ID = 1
SITE_BRANDING = 'Horizon'
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_ARGS = ['--nocapture',
'--nologcapture',
'--exclude-dir=horizon/conf/',
'--exclude-dir=horizon/test/customization',
'--cover-package=horizon',
'--cover-inclusive',
'--all-modules']
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
SESSION_ENGINE = 'django.contrib.sessions.backends.signed_cookies'
SESSION_COOKIE_HTTPONLY = True
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
SESSION_COOKIE_SECURE = False
HORIZON_CONFIG = {
'dashboards': ('cats', 'dogs'),
'default_dashboard': 'cats',
"password_validator": {
"regex": '^.{8,18}$',
"help_text": "Password must be between 8 and 18 characters."
},
'user_home': None,
'help_url': "http://example.com",
}
COMPRESS_ENABLED = True
COMPRESS_OFFLINE = False
COMPRESS_ROOT = "/tmp/"
COMPRESS_PARSER = 'compressor.parser.HtmlParser'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
STATICFILES_DIRS = (
('horizon/lib/jquery', xstatic.main.XStatic(xstatic.pkg.jquery).base_dir),
)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'null': {
'level': 'DEBUG',
'class': 'django.utils.log.NullHandler',
},
'test': {
'level': 'ERROR',
'class': 'logging.StreamHandler',
}
},
'loggers': {
'django.db.backends': {
'handlers': ['null'],
'propagate': False,
},
'horizon': {
'handlers': ['test'],
'propagate': False,
},
'nose.plugins.manager': {
'handlers': ['null'],
'propagate': False,
},
'selenium': {
'handlers': ['null'],
'propagate': False,
}
}
}
|
unknown
|
codeparrot/codeparrot-clean
| ||
// Copyright IBM Corp. 2016, 2025
// SPDX-License-Identifier: BUSL-1.1
package mfa
func (c *Config) SentinelGet(key string) (interface{}, error) {
if c == nil {
return nil, nil
}
switch key {
case "type":
return c.Type, nil
case "name":
return c.Name, nil
case "mount_accessor":
return c.MountAccessor, nil
}
return nil, nil
}
func (c *Config) SentinelKeys() []string {
return []string{
"type",
"name",
"mount_accessor",
}
}
|
go
|
github
|
https://github.com/hashicorp/vault
|
helper/identity/mfa/sentinel.go
|
# Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
#
# SPDX-License-Identifier: curl
name: 'URLs'
'on':
push:
branches:
- master
- '*/ci'
pull_request:
branches:
- master
schedule:
- cron: '10 5 * * *'
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}
cancel-in-progress: true
permissions: {}
jobs:
linkcheck:
if: ${{ github.repository_owner == 'curl' || github.event_name != 'schedule' }}
name: 'linkcheck'
runs-on: ubuntu-slim
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: 'mdlinkcheck (dry run)'
if: ${{ github.event_name != 'schedule' }}
run: ./scripts/mdlinkcheck --dry-run
- name: 'mdlinkcheck'
if: ${{ github.event_name == 'schedule' }}
run: ./scripts/mdlinkcheck
|
unknown
|
github
|
https://github.com/curl/curl
|
.github/workflows/checkurls.yml
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pytest
from pytest import mark
from translate.misc import wStringIO
from translate.storage import po, xliff
from translate.tools import pomerge
def test_str2bool():
"""test the str2bool function"""
assert pomerge.str2bool("yes")
assert pomerge.str2bool("true")
assert pomerge.str2bool("1")
assert not pomerge.str2bool("no")
assert not pomerge.str2bool("false")
assert not pomerge.str2bool("0")
pytest.raises(ValueError, pomerge.str2bool, "2")
class TestPOMerge:
xliffskeleton = '''<?xml version="1.0" ?>
<xliff version="1.1" xmlns="urn:oasis:names:tc:xliff:document:1.1">
<file original="filename.po" source-language="en-US" datatype="po">
<body>
%s
</body>
</file>
</xliff>'''
def mergestore(self, templatesource, inputsource, mergeblanks="yes",
mergefuzzy="yes",
mergecomments="yes"):
"""merges the sources of the given files and returns a new pofile
object"""
templatefile = wStringIO.StringIO(templatesource)
inputfile = wStringIO.StringIO(inputsource)
outputfile = wStringIO.StringIO()
assert pomerge.mergestore(inputfile, outputfile, templatefile,
mergeblanks=mergeblanks,
mergefuzzy=mergefuzzy,
mergecomments=mergecomments,)
outputpostring = outputfile.getvalue()
outputpofile = po.pofile(outputpostring)
return outputpofile
def mergexliff(self, templatesource, inputsource, mergeblanks="yes",
mergefuzzy="yes",
mergecomments="yes"):
"""merges the sources of the given files and returns a new xlifffile
object"""
templatefile = wStringIO.StringIO(templatesource)
inputfile = wStringIO.StringIO(inputsource)
outputfile = wStringIO.StringIO()
assert pomerge.mergestore(inputfile, outputfile, templatefile,
mergeblanks=mergeblanks,
mergefuzzy=mergefuzzy,
mergecomments=mergecomments)
outputxliffstring = outputfile.getvalue()
print("Generated XML:")
print(outputxliffstring)
outputxlifffile = xliff.xlifffile(outputxliffstring)
return outputxlifffile
def countunits(self, pofile):
"""returns the number of non-header items"""
if pofile.units[0].isheader():
return len(pofile.units) - 1
else:
return len(pofile.units)
def singleunit(self, pofile):
"""checks that the pofile contains a single non-header unit, and
returns it"""
assert self.countunits(pofile) == 1
return pofile.units[-1]
def test_mergesore_bad_data(self):
"""Test that we catch bad options sent to mergestore"""
templatefile = wStringIO.StringIO("")
inputfile = wStringIO.StringIO("")
outputfile = wStringIO.StringIO()
pytest.raises(ValueError, pomerge.mergestore, inputfile, outputfile,
templatefile, mergeblanks="yay")
pytest.raises(ValueError, pomerge.mergestore, inputfile, outputfile,
templatefile, mergecomments="yay")
def test_simplemerge(self):
"""checks that a simple po entry merges OK"""
templatepo = '''#: simple.test\nmsgid "Simple String"\nmsgstr ""\n'''
inputpo = '''#: simple.test\nmsgid "Simple String"\nmsgstr "Dimpled Ring"\n'''
pofile = self.mergestore(templatepo, inputpo)
pounit = self.singleunit(pofile)
assert pounit.source == "Simple String"
assert pounit.target == "Dimpled Ring"
def test_simplemerge_no_locations(self):
"""checks that a simple po entry merges OK"""
templatepo = '''msgid "Simple String"
msgstr ""'''
inputpo = '''msgid "Simple String"
msgstr "Dimpled Ring"'''
pofile = self.mergestore(templatepo, inputpo)
pounit = self.singleunit(pofile)
assert pounit.source == "Simple String"
assert pounit.target == "Dimpled Ring"
def test_replacemerge(self):
"""checks that a simple po entry merges OK"""
templatepo = '''#: simple.test\nmsgid "Simple String"\nmsgstr "Dimpled Ring"\n'''
inputpo = '''#: simple.test\nmsgid "Simple String"\nmsgstr "Dimpled King"\n'''
pofile = self.mergestore(templatepo, inputpo)
pounit = self.singleunit(pofile)
assert pounit.source == "Simple String"
assert pounit.target == "Dimpled King"
def test_merging_blanks(self):
"""By default we will merge blanks, but we can also override that"""
templatepo = '''#: simple.test\nmsgid "Simple String"\nmsgstr "Dimpled Ring"\n'''
inputpo = '''#: simple.test\nmsgid "Simple String"\nmsgstr ""\n'''
pofile = self.mergestore(templatepo, inputpo)
pounit = self.singleunit(pofile)
assert pounit.source == "Simple String"
assert pounit.target == ""
pofile = self.mergestore(templatepo, inputpo, mergeblanks="no")
pounit = self.singleunit(pofile)
assert pounit.source == "Simple String"
assert pounit.target == "Dimpled Ring"
def test_merging_fuzzies(self):
"""By default we will merge fuzzies, but can can also override that."""
templatepo = '''#: simple.test\nmsgid "Simple String"\nmsgstr "Dimpled Ring"\n'''
inputpo = '''#: simple.test\n#, fuzzy\nmsgid "Simple String"\nmsgstr "changed fish"\n'''
pofile = self.mergestore(templatepo, inputpo)
pounit = self.singleunit(pofile)
assert pounit.source == "Simple String"
assert pounit.target == "changed fish"
assert pounit.isfuzzy()
pofile = self.mergestore(templatepo, inputpo, mergefuzzy="no")
pounit = self.singleunit(pofile)
assert pounit.source == "Simple String"
assert pounit.target == "Dimpled Ring"
assert not pounit.isfuzzy()
def test_merging_locations(self):
"""check that locations on separate lines are output in Gettext form
of all on one line"""
templatepo = '''#: location.c:1\n#: location.c:2\nmsgid "Simple String"\nmsgstr ""\n'''
inputpo = '''#: location.c:1\n#: location.c:2\nmsgid "Simple String"\nmsgstr "Dimpled Ring"\n'''
expectedpo = '''#: location.c:1%slocation.c:2\nmsgid "Simple String"\nmsgstr "Dimpled Ring"\n''' % po.lsep
pofile = self.mergestore(templatepo, inputpo)
print(pofile)
assert str(pofile) == expectedpo
def test_unit_missing_in_template_with_locations(self):
"""If the unit is missing in the template we should raise an error"""
templatepo = '''#: location.c:1
msgid "Simple String"
msgstr ""'''
inputpo = '''#: location.c:1
msgid "Simple String"
msgstr "Dimpled Ring"
#: location.c:1
msgid "Extra string"
msgstr "Perplexa ring"'''
expectedpo = '''#: location.c:1
msgid "Simple String"
msgstr "Dimpled Ring"
'''
pofile = self.mergestore(templatepo, inputpo)
print(pofile)
assert str(pofile) == expectedpo
def test_unit_missing_in_template_no_locations(self):
"""If the unit is missing in the template we should raise an error"""
templatepo = '''msgid "Simple String"
msgstr ""'''
inputpo = '''msgid "Simple String"
msgstr "Dimpled Ring"
msgid "Extra string"
msgstr "Perplexa ring"'''
expectedpo = '''msgid "Simple String"
msgstr "Dimpled Ring"
'''
pofile = self.mergestore(templatepo, inputpo)
print(pofile)
assert str(pofile) == expectedpo
def test_reflowed_source_comments(self):
"""ensure that we don't duplicate source comments (locations) if they
have been reflowed"""
templatepo = '''#: newMenu.label\n#: newMenu.accesskey\nmsgid "&New"\nmsgstr ""\n'''
newpo = '''#: newMenu.label newMenu.accesskey\nmsgid "&New"\nmsgstr "&Nuwe"\n'''
expectedpo = '''#: newMenu.label%snewMenu.accesskey\nmsgid "&New"\nmsgstr "&Nuwe"\n''' % po.lsep
pofile = self.mergestore(templatepo, newpo)
pounit = self.singleunit(pofile)
print(pofile)
assert str(pofile) == expectedpo
def test_comments_with_blank_lines(self):
"""ensure that we don't loose empty newlines in comments"""
templatepo = '''# # ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# bla bla
msgid "bla"
msgstr "blabla"
'''
newpo = templatepo
expectedpo = templatepo
pofile = self.mergestore(templatepo, newpo)
pounit = self.singleunit(pofile)
print(pofile)
assert str(pofile) == expectedpo
def test_merge_dont_delete_unassociated_comments(self):
"""ensure that we do not delete comments in the PO file that are not
assocaited with a message block"""
templatepo = '''# Lonely comment\n\n# Translation comment\nmsgid "Bob"\nmsgstr "Toolmaker"\n'''
mergepo = '''# Translation comment\nmsgid "Bob"\nmsgstr "Builder"\n'''
expectedpo = '''# Lonely comment\n# Translation comment\nmsgid "Bob"\nmsgstr "Builder"\n'''
pofile = self.mergestore(templatepo, mergepo)
# pounit = self.singleunit(pofile)
print(pofile)
assert str(pofile) == expectedpo
def test_preserve_format_trailing_newlines(self):
"""Test that we can merge messages correctly that end with a newline"""
templatepo = '''msgid "Simple string\\n"\nmsgstr ""\n'''
mergepo = '''msgid "Simple string\\n"\nmsgstr "Dimpled ring\\n"\n'''
expectedpo = '''msgid "Simple string\\n"\nmsgstr "Dimpled ring\\n"\n'''
pofile = self.mergestore(templatepo, mergepo)
print("Expected:\n%s\n\nMerged:\n%s" % (expectedpo, str(pofile)))
assert str(pofile) == expectedpo
templatepo = '''msgid ""\n"Simple string\\n"\nmsgstr ""\n'''
mergepo = '''msgid ""\n"Simple string\\n"\nmsgstr ""\n"Dimpled ring\\n"\n'''
expectedpo = '''msgid ""\n"Simple string\\n"\nmsgstr "Dimpled ring\\n"\n'''
expectedpo2 = '''msgid "Simple string\\n"\nmsgstr "Dimpled ring\\n"\n'''
pofile = self.mergestore(templatepo, mergepo)
print("Expected:\n%s\n\nMerged:\n%s" % (expectedpo, str(pofile)))
assert str(pofile) == expectedpo or str(pofile) == expectedpo2
def test_preserve_format_minor_start_and_end_of_sentence_changes(self):
"""Test that we are not too fussy about large diffs for simple
changes at the start or end of a sentence"""
templatepo = '''msgid "Target type:"\nmsgstr "Doelsoort"\n\n'''
mergepo = '''msgid "Target type:"\nmsgstr "Doelsoort:"\n'''
expectedpo = mergepo
pofile = self.mergestore(templatepo, mergepo)
print("Expected:\n%s\n\nMerged:\n%s" % (expectedpo, str(pofile)))
assert str(pofile) == expectedpo
templatepo = '''msgid "&Select"\nmsgstr "Kies"\n\n'''
mergepo = '''msgid "&Select"\nmsgstr "&Kies"\n'''
expectedpo = mergepo
pofile = self.mergestore(templatepo, mergepo)
print("Expected:\n%s\n\nMerged:\n%s" % (expectedpo, str(pofile)))
assert str(pofile) == expectedpo
templatepo = '''msgid "en-us, en"\nmsgstr "en-us, en"\n'''
mergepo = '''msgid "en-us, en"\nmsgstr "af-za, af, en-za, en-gb, en-us, en"\n'''
expectedpo = mergepo
pofile = self.mergestore(templatepo, mergepo)
print("Expected:\n%s\n\nMerged:\n%s" % (expectedpo, str(pofile)))
assert str(pofile) == expectedpo
def test_preserve_format_last_entry_in_a_file(self):
"""The last entry in a PO file is usualy not followed by an empty
line. Test that we preserve this"""
templatepo = '''msgid "First"\nmsgstr ""\n\nmsgid "Second"\nmsgstr ""\n'''
mergepo = '''msgid "First"\nmsgstr "Eerste"\n\nmsgid "Second"\nmsgstr "Tweede"\n'''
expectedpo = '''msgid "First"\nmsgstr "Eerste"\n\nmsgid "Second"\nmsgstr "Tweede"\n'''
pofile = self.mergestore(templatepo, mergepo)
print("Expected:\n%s\n\nMerged:\n%s" % (expectedpo, str(pofile)))
assert str(pofile) == expectedpo
templatepo = '''msgid "First"\nmsgstr ""\n\nmsgid "Second"\nmsgstr ""\n\n'''
mergepo = '''msgid "First"\nmsgstr "Eerste"\n\nmsgid "Second"\nmsgstr "Tweede"\n'''
expectedpo = '''msgid "First"\nmsgstr "Eerste"\n\nmsgid "Second"\nmsgstr "Tweede"\n'''
pofile = self.mergestore(templatepo, mergepo)
print("Expected:\n%s\n\nMerged:\n%s" % (expectedpo, str(pofile)))
assert str(pofile) == expectedpo
@mark.xfail(reason="Not Implemented")
def test_escape_tabs(self):
"""Ensure that input tabs are escaped in the output, like
gettext does."""
# The strings below contains the tab character, not spaces.
templatepo = '''msgid "First Second"\nmsgstr ""\n\n'''
mergepo = '''msgid "First Second"\nmsgstr "Eerste Tweede"\n'''
expectedpo = r'''msgid "First\tSecond"
msgstr "Eerste\tTweede"
'''
pofile = self.mergestore(templatepo, mergepo)
print("Expected:\n%s\n\nMerged:\n%s" % (expectedpo, str(pofile)))
assert str(pofile) == expectedpo
def test_preserve_comments_layout(self):
"""Ensure that when we merge with new '# (poconflict)' or other
comments that we don't mess formating"""
templatepo = '''#: filename\nmsgid "Desktop Background.bmp"\nmsgstr "Desktop Background.bmp"\n\n'''
mergepo = '''# (pofilter) unchanged: please translate\n#: filename\nmsgid "Desktop Background.bmp"\nmsgstr "Desktop Background.bmp"\n'''
expectedpo = mergepo
pofile = self.mergestore(templatepo, mergepo)
print("Expected:\n%s\n\nMerged:\n%s" % (expectedpo, str(pofile)))
assert str(pofile) == expectedpo
def test_merge_dos2unix(self):
"""Test that merging a comment line with dos newlines doesn't add a
new line"""
templatepo = '''# User comment\n# (pofilter) Translate Toolkit comment\n#. Automatic comment\n#: location_comment.c:110\nmsgid "File"\nmsgstr "File"\n\n'''
mergepo = '''# User comment\r\n# (pofilter) Translate Toolkit comment\r\n#. Automatic comment\r\n#: location_comment.c:110\r\nmsgid "File"\r\nmsgstr "Ifayile"\r\n\r\n'''
expectedpo = '''# User comment\n# (pofilter) Translate Toolkit comment\n#. Automatic comment\n#: location_comment.c:110\nmsgid "File"\nmsgstr "Ifayile"\n'''
pofile = self.mergestore(templatepo, mergepo)
assert str(pofile) == expectedpo
# Unassociated comment
templatepo = '''# Lonely comment\n\n#: location_comment.c:110\nmsgid "Bob"\nmsgstr "Toolmaker"\n'''
mergepo = '''# Lonely comment\r\n\r\n#: location_comment.c:110\r\nmsgid "Bob"\r\nmsgstr "Builder"\r\n\r\n'''
expectedpo = '''# Lonely comment\n#: location_comment.c:110\nmsgid "Bob"\nmsgstr "Builder"\n'''
pofile = self.mergestore(templatepo, mergepo)
assert str(pofile) == expectedpo
# New comment
templatepo = '''#: location_comment.c:110\nmsgid "File"\nmsgstr "File"\n\n'''
mergepo = '''# User comment\r\n# (pofilter) Translate Toolkit comment\r\n#: location_comment.c:110\r\nmsgid "File"\r\nmsgstr "Ifayile"\r\n\r\n'''
expectedpo = '''# User comment\n# (pofilter) Translate Toolkit comment\n#: location_comment.c:110\nmsgid "File"\nmsgstr "Ifayile"\n'''
pofile = self.mergestore(templatepo, mergepo)
assert str(pofile) == expectedpo
def test_xliff_into_xliff(self):
templatexliff = self.xliffskeleton % '''<trans-unit>
<source>red</source>
<target></target>
</trans-unit>'''
mergexliff = self.xliffskeleton % '''<trans-unit>
<source>red</source>
<target>rooi</target>
</trans-unit>'''
xlifffile = self.mergexliff(templatexliff, mergexliff)
assert len(xlifffile.units) == 1
unit = xlifffile.units[0]
assert unit.source == "red"
assert unit.target == "rooi"
def test_po_into_xliff(self):
templatexliff = self.xliffskeleton % '''<trans-unit>
<source>red</source>
<target></target>
</trans-unit>'''
mergepo = 'msgid "red"\nmsgstr "rooi"'
xlifffile = self.mergexliff(templatexliff, mergepo)
assert len(xlifffile.units) == 1
unit = xlifffile.units[0]
assert unit.source == "red"
assert unit.target == "rooi"
def test_xliff_into_po(self):
templatepo = '# my comment\nmsgid "red"\nmsgstr ""'
mergexliff = self.xliffskeleton % '''<trans-unit>
<source>red</source>
<target>rooi</target>
</trans-unit>'''
expectedpo = '# my comment\nmsgid "red"\nmsgstr "rooi"\n'
pofile = self.mergestore(templatepo, mergexliff)
assert str(pofile) == expectedpo
def test_merging_dont_merge_kde_comments_found_in_translation(self):
"""If we find a KDE comment in the translation (target) then do not
merge it."""
templatepo = '''msgid "_: KDE comment\\n"\n"File"\nmsgstr "File"\n\n'''
mergepo = '''msgid "_: KDE comment\\n"\n"File"\nmsgstr "_: KDE comment\\n"\n"Ifayile"\n\n'''
expectedpo = '''msgid ""\n"_: KDE comment\\n"\n"File"\nmsgstr "Ifayile"\n'''
pofile = self.mergestore(templatepo, mergepo)
print("Expected:\n%s\n\nMerged:\n%s" % (expectedpo, str(pofile)))
assert str(pofile) == expectedpo
# Translated kde comment.
mergepo = '''msgid "_: KDE comment\\n"\n"File"\nmsgstr "_: KDE kommentaar\\n"\n"Ifayile"\n\n'''
print("Expected:\n%s\n\nMerged:\n%s" % (expectedpo, str(pofile)))
assert str(pofile) == expectedpo
# multiline KDE comment
templatepo = '''msgid "_: KDE "\n"comment\\n"\n"File"\nmsgstr "File"\n\n'''
mergepo = '''msgid "_: KDE "\n"comment\\n"\n"File"\nmsgstr "_: KDE "\n"comment\\n"\n"Ifayile"\n\n'''
expectedpo = '''msgid ""\n"_: KDE comment\\n"\n"File"\nmsgstr "Ifayile"\n'''
pofile = self.mergestore(templatepo, mergepo)
print("Expected:\n%s\n\nMerged:\n%s" % (expectedpo, str(pofile)))
assert str(pofile) == expectedpo
def test_merging_untranslated_with_kde_disambiguation(self):
"""test merging untranslated messages that are the same except for
KDE disambiguation"""
templatepo = r'''#: sendMsgTitle
#: sendMsgTitle.accesskey
msgid "_: sendMsgTitle sendMsgTitle.accesskey\n"
"Send Message"
msgstr ""
#: sendMessageCheckWindowTitle
#: sendMessageCheckWindowTitle.accesskey
msgid "_: sendMessageCheckWindowTitle sendMessageCheckWindowTitle.accesskey\n"
"Send Message"
msgstr ""
'''
mergepo = r'''#: sendMsgTitle%ssendMsgTitle.accesskey
msgid ""
"_: sendMsgTitle sendMsgTitle.accesskey\n"
"Send Message"
msgstr "Stuur"
#: sendMessageCheckWindowTitle%ssendMessageCheckWindowTitle.accesskey
msgid ""
"_: sendMessageCheckWindowTitle sendMessageCheckWindowTitle.accesskey\n"
"Send Message"
msgstr "Stuur"
''' % (po.lsep, po.lsep)
expectedpo = mergepo
pofile = self.mergestore(templatepo, mergepo)
print("Expected:\n%s\n---\nMerged:\n%s\n---" % (expectedpo, str(pofile)))
assert str(pofile) == expectedpo
def test_merging_header_entries(self):
"""Check that we do the right thing if we have header entries in the
input PO."""
templatepo = r'''#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: new@example.com\n"
"POT-Creation-Date: 2006-11-11 11:11+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=INTEGER; plural=EXPRESSION;\n"
"X-Generator: Translate Toolkit 0.10rc2\n"
#: simple.test
msgid "Simple String"
msgstr ""
'''
mergepo = r'''msgid ""
msgstr ""
"Project-Id-Version: Pootle 0.10\n"
"Report-Msgid-Bugs-To: old@example.com\n"
"POT-Creation-Date: 2006-01-01 01:01+0100\n"
"PO-Revision-Date: 2006-09-09 09:09+0900\n"
"Last-Translator: Joe Translate <joe@example.com>\n"
"Language-Team: Pig Latin <piglatin@example.com>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
"X-Generator: Translate Toolkit 0.9\n"
#: simple.test
msgid "Simple String"
msgstr "Dimpled Ring"
'''
expectedpo = r'''msgid ""
msgstr ""
"Project-Id-Version: Pootle 0.10\n"
"Report-Msgid-Bugs-To: new@example.com\n"
"POT-Creation-Date: 2006-11-11 11:11+0000\n"
"PO-Revision-Date: 2006-09-09 09:09+0900\n"
"Last-Translator: Joe Translate <joe@example.com>\n"
"Language-Team: Pig Latin <piglatin@example.com>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
"X-Generator: Translate Toolkit 0.10rc2\n"
#: simple.test
msgid "Simple String"
msgstr "Dimpled Ring"
'''
pofile = self.mergestore(templatepo, mergepo)
print("Expected:\n%s\n---\nMerged:\n%s\n---" % (expectedpo, str(pofile)))
assert str(pofile) == expectedpo
def test_merging_different_locations(self):
"""Test when merging units that are unchanged except for changed
locations that we don't go fuzzy (bug 1583)"""
templatepo = r'''#, fuzzy
msgid ""
msgstr ""
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
#: sentinelheadline
msgctxt "sentinelheadline"
msgid "DESTROY SENTINELS"
msgstr "ZERSTÖRE WACHPOSTEN"
#: sentinelheadline1
msgctxt "sentinelheadline1"
msgid "DESTROY SENTINELS"
msgstr "ZERSTÖRE WACHPOSTEN"
'''
mergepo = r'''msgid ""
msgstr ""
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
#: sentinelheadline
#: sentinelheadline1
msgctxt "sentinelheadline"
msgid "DESTROY SENTINELS"
msgstr "ZERSTÖRE WACHPOSTEN"
#: sentinelheadline1
msgctxt "sentinelheadline1"
msgid "DESTROY SENTINELS"
msgstr "ZERSTÖRE WACHPOSTEN"
'''
expectedpo2 = r'''msgid ""
msgstr ""
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
#: sentinelheadline sentinelheadline1
msgctxt "sentinelheadline"
msgid "DESTROY SENTINELS"
msgstr "ZERSTÖRE WACHPOSTEN"
#: sentinelheadline1
msgctxt "sentinelheadline1"
msgid "DESTROY SENTINELS"
msgstr "ZERSTÖRE WACHPOSTEN"
'''
expectedpo = mergepo
pofile = self.mergestore(templatepo, mergepo)
print("Expected:\n%s\n---\nMerged:\n%s\n---" % (expectedpo, str(pofile)))
assert str(pofile) == expectedpo or str(pofile) == expectedpo2
|
unknown
|
codeparrot/codeparrot-clean
| ||
import os
import sys
from setuptools import setup, find_packages
version = '0.1.5'
def read(f):
return open(os.path.join(os.path.dirname(__file__), f)).read().strip()
setup(name='android-resource-remover',
version=version,
description=('Android resource remover'),
long_description='\n\n'.join((read('README.md'), read('CHANGELOG'))),
keywords=['android'],
classifiers=[
'License :: OSI Approved :: BSD License',
'Intended Audience :: Developers',
'Programming Language :: Python'],
author='Keepsafe',
author_email='support@getkeepsafe.com',
url='https://github.com/KeepSafe/android-resource-remover/',
license='Apache',
py_modules=['android_clean_app'],
namespace_packages=[],
install_requires=['lxml >= 3.3.3'],
data_files=[('.', ['AUTHORS', 'CHANGELOG', 'LICENSE', 'README.md'])],
entry_points={
'console_scripts': [
'android-resource-remover = android_clean_app:main']
},
include_package_data = False)
|
unknown
|
codeparrot/codeparrot-clean
| ||
use std::{collections::BTreeMap, sync::LazyLock};
use anyhow::{Context, Result};
use either::Either;
use rustc_hash::FxHashMap;
use turbo_rcstr::{RcStr, rcstr};
use turbo_tasks::{FxIndexMap, ResolvedVc, Vc, fxindexmap};
use turbo_tasks_fs::{FileSystem, FileSystemPath, to_sys_path};
use turbopack_core::{
issue::{Issue, IssueExt, IssueSeverity, IssueStage, StyledString},
reference_type::{CommonJsReferenceSubType, ReferenceType},
resolve::{
AliasPattern, ExternalTraced, ExternalType, ResolveAliasMap, SubpathValue,
node::node_cjs_resolve_options,
options::{ConditionValue, ImportMap, ImportMapping, ResolvedMap},
parse::Request,
pattern::Pattern,
resolve,
},
source::Source,
};
use turbopack_node::execution_context::ExecutionContext;
use crate::{
app_structure::CollectedRootParams,
embed_js::{VIRTUAL_PACKAGE_NAME, next_js_fs},
mode::NextMode,
next_client::context::ClientContextType,
next_config::{NextConfig, OptionFileSystemPath},
next_edge::unsupported::NextEdgeUnsupportedModuleReplacer,
next_font::google::{
GOOGLE_FONTS_INTERNAL_PREFIX, NextFontGoogleCssModuleReplacer,
NextFontGoogleFontFileReplacer, NextFontGoogleReplacer,
},
next_root_params::insert_next_root_params_mapping,
next_server::context::ServerContextType,
util::NextRuntime,
};
/// List of node.js internals that are not supported by edge runtime.
/// If these imports are used & user does not provide alias for the polyfill,
/// runtime error will be thrown.
/// This is not identical to the list of entire node.js internals, refer
/// https://vercel.com/docs/functions/runtimes/edge-runtime#compatible-node.js-modules
/// for the allowed imports.
static EDGE_UNSUPPORTED_NODE_INTERNALS: LazyLock<[RcStr; 44]> = LazyLock::new(|| {
[
rcstr!("child_process"),
rcstr!("cluster"),
rcstr!("console"),
rcstr!("constants"),
rcstr!("crypto"),
rcstr!("dgram"),
rcstr!("diagnostics_channel"),
rcstr!("dns"),
rcstr!("dns/promises"),
rcstr!("domain"),
rcstr!("fs"),
rcstr!("fs/promises"),
rcstr!("http"),
rcstr!("http2"),
rcstr!("https"),
rcstr!("inspector"),
rcstr!("module"),
rcstr!("net"),
rcstr!("os"),
rcstr!("path"),
rcstr!("path/posix"),
rcstr!("path/win32"),
rcstr!("perf_hooks"),
rcstr!("process"),
rcstr!("punycode"),
rcstr!("querystring"),
rcstr!("readline"),
rcstr!("repl"),
rcstr!("stream"),
rcstr!("stream/promises"),
rcstr!("stream/web"),
rcstr!("string_decoder"),
rcstr!("sys"),
rcstr!("timers"),
rcstr!("timers/promises"),
rcstr!("tls"),
rcstr!("trace_events"),
rcstr!("tty"),
rcstr!("v8"),
rcstr!("vm"),
rcstr!("wasi"),
rcstr!("worker_threads"),
rcstr!("zlib"),
rcstr!("pnpapi"),
]
});
// Make sure to not add any external requests here.
/// Computes the Next-specific client import map.
#[turbo_tasks::function]
pub async fn get_next_client_import_map(
project_path: FileSystemPath,
ty: ClientContextType,
next_config: Vc<NextConfig>,
next_mode: Vc<NextMode>,
execution_context: Vc<ExecutionContext>,
) -> Result<Vc<ImportMap>> {
let mut import_map = ImportMap::empty();
insert_next_shared_aliases(
&mut import_map,
project_path.clone(),
execution_context,
next_config,
next_mode,
false,
)
.await?;
insert_optimized_module_aliases(&mut import_map, project_path.clone()).await?;
insert_alias_option(
&mut import_map,
&project_path,
next_config.resolve_alias_options(),
["browser"],
)
.await?;
match &ty {
ClientContextType::Pages { .. } => {}
ClientContextType::App { app_dir } => {
// Keep in sync with file:///./../../../packages/next/src/lib/needs-experimental-react.ts
let taint = *next_config.enable_taint().await?;
let transition_indicator = *next_config.enable_transition_indicator().await?;
let gesture_transition = *next_config.enable_gesture_transition().await?;
let react_channel = if taint || transition_indicator || gesture_transition {
"-experimental"
} else {
""
};
import_map.insert_exact_alias(
rcstr!("react"),
request_to_import_mapping(
app_dir.clone(),
format!("next/dist/compiled/react{react_channel}").into(),
),
);
import_map.insert_wildcard_alias(
rcstr!("react/"),
request_to_import_mapping(
app_dir.clone(),
format!("next/dist/compiled/react{react_channel}/*").into(),
),
);
import_map.insert_exact_alias(
rcstr!("react-dom"),
request_to_import_mapping(
app_dir.clone(),
format!("next/dist/compiled/react-dom{react_channel}").into(),
),
);
import_map.insert_exact_alias(
rcstr!("react-dom/static"),
request_to_import_mapping(
app_dir.clone(),
rcstr!("next/dist/compiled/react-dom-experimental/static"),
),
);
import_map.insert_exact_alias(
rcstr!("react-dom/static.edge"),
request_to_import_mapping(
app_dir.clone(),
rcstr!("next/dist/compiled/react-dom-experimental/static.edge"),
),
);
import_map.insert_exact_alias(
rcstr!("react-dom/static.browser"),
request_to_import_mapping(
app_dir.clone(),
rcstr!("next/dist/compiled/react-dom-experimental/static.browser"),
),
);
let react_client_package = get_react_client_package(next_config).await?;
import_map.insert_exact_alias(
rcstr!("react-dom/client"),
request_to_import_mapping(
app_dir.clone(),
format!("next/dist/compiled/react-dom{react_channel}/{react_client_package}")
.into(),
),
);
import_map.insert_wildcard_alias(
rcstr!("react-dom/"),
request_to_import_mapping(
app_dir.clone(),
format!("next/dist/compiled/react-dom{react_channel}/*").into(),
),
);
import_map.insert_wildcard_alias(
rcstr!("react-server-dom-webpack/"),
request_to_import_mapping(app_dir.clone(), rcstr!("react-server-dom-turbopack/*")),
);
import_map.insert_wildcard_alias(
rcstr!("react-server-dom-turbopack/"),
request_to_import_mapping(
app_dir.clone(),
format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/*")
.into(),
),
);
insert_exact_alias_or_js(
&mut import_map,
rcstr!("next/head"),
request_to_import_mapping(
project_path.clone(),
rcstr!("next/dist/client/components/noop-head"),
),
);
insert_exact_alias_or_js(
&mut import_map,
rcstr!("next/dynamic"),
request_to_import_mapping(
project_path.clone(),
rcstr!("next/dist/shared/lib/app-dynamic"),
),
);
insert_exact_alias_or_js(
&mut import_map,
rcstr!("next/link"),
request_to_import_mapping(
project_path.clone(),
rcstr!("next/dist/client/app-dir/link"),
),
);
insert_exact_alias_or_js(
&mut import_map,
rcstr!("next/form"),
request_to_import_mapping(
project_path.clone(),
rcstr!("next/dist/client/app-dir/form"),
),
);
}
ClientContextType::Fallback => {}
ClientContextType::Other => {}
}
// see https://github.com/vercel/next.js/blob/8013ef7372fc545d49dbd060461224ceb563b454/packages/next/src/build/webpack-config.ts#L1449-L1531
insert_exact_alias_map(
&mut import_map,
project_path.clone(),
fxindexmap! {
rcstr!("server-only") => rcstr!("next/dist/compiled/server-only/index"),
rcstr!("client-only") => rcstr!("next/dist/compiled/client-only/index"),
rcstr!("next/dist/compiled/server-only") => rcstr!("next/dist/compiled/server-only/index"),
rcstr!("next/dist/compiled/client-only") => rcstr!("next/dist/compiled/client-only/index"),
},
);
insert_next_root_params_mapping(
&mut import_map,
next_config.enable_root_params(),
Either::Right(ty.clone()),
None,
)
.await?;
match ty {
ClientContextType::Pages { .. }
| ClientContextType::App { .. }
| ClientContextType::Fallback => {
for (original, alias) in NEXT_ALIASES.iter() {
import_map.insert_exact_alias(
format!("node:{original}"),
request_to_import_mapping(project_path.clone(), alias.clone()),
);
}
}
ClientContextType::Other => {}
}
insert_turbopack_dev_alias(&mut import_map).await?;
insert_instrumentation_client_alias(&mut import_map, project_path).await?;
Ok(import_map.cell())
}
/// Computes the Next-specific client fallback import map, which provides
/// polyfills to Node.js externals.
#[turbo_tasks::function]
pub async fn get_next_client_fallback_import_map(ty: ClientContextType) -> Result<Vc<ImportMap>> {
let mut import_map = ImportMap::empty();
match ty {
ClientContextType::Pages {
pages_dir: context_dir,
}
| ClientContextType::App {
app_dir: context_dir,
} => {
for (original, alias) in NEXT_ALIASES.iter() {
import_map.insert_exact_alias(
original.clone(),
request_to_import_mapping(context_dir.clone(), alias.clone()),
);
}
}
ClientContextType::Fallback => {}
ClientContextType::Other => {}
}
insert_turbopack_dev_alias(&mut import_map).await?;
Ok(import_map.cell())
}
/// Computes the Next-specific server-side import map.
#[turbo_tasks::function]
pub async fn get_next_server_import_map(
project_path: FileSystemPath,
ty: ServerContextType,
next_config: Vc<NextConfig>,
next_mode: Vc<NextMode>,
execution_context: Vc<ExecutionContext>,
collected_root_params: Option<Vc<CollectedRootParams>>,
) -> Result<Vc<ImportMap>> {
let mut import_map = ImportMap::empty();
insert_next_shared_aliases(
&mut import_map,
project_path.clone(),
execution_context,
next_config,
next_mode,
false,
)
.await?;
insert_alias_option(
&mut import_map,
&project_path,
next_config.resolve_alias_options(),
[],
)
.await?;
let external = ImportMapping::External(None, ExternalType::CommonJs, ExternalTraced::Traced)
.resolved_cell();
import_map.insert_exact_alias(rcstr!("next/dist/server/require-hook"), external);
match ty {
ServerContextType::Pages { .. } | ServerContextType::PagesApi { .. } => {
import_map.insert_exact_alias(rcstr!("react"), external);
import_map.insert_wildcard_alias(rcstr!("react/"), external);
import_map.insert_exact_alias(rcstr!("react-dom"), external);
import_map.insert_exact_alias(rcstr!("react-dom/client"), external);
import_map.insert_wildcard_alias(rcstr!("react-dom/"), external);
import_map.insert_exact_alias(rcstr!("styled-jsx"), external);
import_map.insert_exact_alias(
rcstr!("styled-jsx/style"),
ImportMapping::External(
Some(rcstr!("styled-jsx/style.js")),
ExternalType::CommonJs,
ExternalTraced::Traced,
)
.resolved_cell(),
);
import_map.insert_wildcard_alias(rcstr!("styled-jsx/"), external);
// TODO: we should not bundle next/dist/build/utils in the pages renderer at all
import_map.insert_wildcard_alias(rcstr!("next/dist/build/utils"), external);
}
ServerContextType::AppSSR { .. }
| ServerContextType::AppRSC { .. }
| ServerContextType::AppRoute { .. } => {
insert_exact_alias_or_js(
&mut import_map,
rcstr!("next/head"),
request_to_import_mapping(
project_path.clone(),
rcstr!("next/dist/client/components/noop-head"),
),
);
insert_exact_alias_or_js(
&mut import_map,
rcstr!("next/dynamic"),
request_to_import_mapping(
project_path.clone(),
rcstr!("next/dist/shared/lib/app-dynamic"),
),
);
insert_exact_alias_or_js(
&mut import_map,
rcstr!("next/link"),
request_to_import_mapping(
project_path.clone(),
rcstr!("next/dist/client/app-dir/link"),
),
);
insert_exact_alias_or_js(
&mut import_map,
rcstr!("next/form"),
request_to_import_mapping(
project_path.clone(),
rcstr!("next/dist/client/app-dir/form"),
),
);
}
ServerContextType::Middleware { .. } | ServerContextType::Instrumentation { .. } => {}
}
insert_next_server_special_aliases(
&mut import_map,
project_path.clone(),
ty,
NextRuntime::NodeJs,
next_config,
collected_root_params,
)
.await?;
Ok(import_map.cell())
}
/// Computes the Next-specific edge-side import map.
#[turbo_tasks::function]
pub async fn get_next_edge_import_map(
project_path: FileSystemPath,
ty: ServerContextType,
next_config: Vc<NextConfig>,
next_mode: Vc<NextMode>,
execution_context: Vc<ExecutionContext>,
collected_root_params: Option<Vc<CollectedRootParams>>,
) -> Result<Vc<ImportMap>> {
let mut import_map = ImportMap::empty();
// https://github.com/vercel/next.js/blob/786ef25e529e1fb2dda398aebd02ccbc8d0fb673/packages/next/src/build/webpack-config.ts#L815-L861
// Alias next/dist imports to next/dist/esm assets
insert_wildcard_alias_map(
&mut import_map,
project_path.clone(),
fxindexmap! {
rcstr!("next/dist/build/") => rcstr!("next/dist/esm/build/*"),
rcstr!("next/dist/client/") => rcstr!("next/dist/esm/client/*"),
rcstr!("next/dist/shared/") => rcstr!("next/dist/esm/shared/*"),
rcstr!("next/dist/pages/") => rcstr!("next/dist/esm/pages/*"),
rcstr!("next/dist/lib/") => rcstr!("next/dist/esm/lib/*"),
rcstr!("next/dist/server/") => rcstr!("next/dist/esm/server/*"),
rcstr!("next/dist/api/") => rcstr!("next/dist/esm/api/*"),
},
);
// Alias the usage of next public APIs
insert_exact_alias_map(
&mut import_map,
project_path.clone(),
fxindexmap! {
rcstr!("next/app") => rcstr!("next/dist/api/app"),
rcstr!("next/document") => rcstr!("next/dist/api/document"),
rcstr!("next/dynamic") => rcstr!("next/dist/api/dynamic"),
rcstr!("next/form") => rcstr!("next/dist/api/form"),
rcstr!("next/head") => rcstr!("next/dist/api/head"),
rcstr!("next/headers") => rcstr!("next/dist/api/headers"),
rcstr!("next/image") => rcstr!("next/dist/api/image"),
rcstr!("next/link") => rcstr!("next/dist/api/link"),
rcstr!("next/navigation") => rcstr!("next/dist/api/navigation"),
rcstr!("next/router") => rcstr!("next/dist/api/router"),
rcstr!("next/script") => rcstr!("next/dist/api/script"),
rcstr!("next/server") => rcstr!("next/dist/api/server"),
rcstr!("next/og") => rcstr!("next/dist/api/og"),
// Alias built-in @vercel/og to edge bundle for edge runtime
rcstr!("next/dist/compiled/@vercel/og/index.node.js") => rcstr!("next/dist/compiled/@vercel/og/index.edge.js"),
},
);
insert_next_shared_aliases(
&mut import_map,
project_path.clone(),
execution_context,
next_config,
next_mode,
true,
)
.await?;
insert_optimized_module_aliases(&mut import_map, project_path.clone()).await?;
insert_alias_option(
&mut import_map,
&project_path,
next_config.resolve_alias_options(),
[],
)
.await?;
match &ty {
ServerContextType::Pages { .. }
| ServerContextType::PagesApi { .. }
| ServerContextType::Middleware { .. }
| ServerContextType::Instrumentation { .. } => {}
ServerContextType::AppSSR { .. }
| ServerContextType::AppRSC { .. }
| ServerContextType::AppRoute { .. } => {
insert_exact_alias_or_js(
&mut import_map,
rcstr!("next/head"),
request_to_import_mapping(
project_path.clone(),
rcstr!("next/dist/client/components/noop-head"),
),
);
insert_exact_alias_or_js(
&mut import_map,
rcstr!("next/dynamic"),
request_to_import_mapping(
project_path.clone(),
rcstr!("next/dist/shared/lib/app-dynamic"),
),
);
insert_exact_alias_or_js(
&mut import_map,
rcstr!("next/link"),
request_to_import_mapping(
project_path.clone(),
rcstr!("next/dist/client/app-dir/link"),
),
);
}
}
insert_next_server_special_aliases(
&mut import_map,
project_path.clone(),
ty.clone(),
NextRuntime::Edge,
next_config,
collected_root_params,
)
.await?;
// Look for where 'server/web/globals.ts` are imported to find out corresponding
// context
match ty {
ServerContextType::AppSSR { .. }
| ServerContextType::AppRSC { .. }
| ServerContextType::AppRoute { .. }
| ServerContextType::Middleware { .. }
| ServerContextType::Instrumentation { .. }
| ServerContextType::Pages { .. }
| ServerContextType::PagesApi { .. } => {
insert_unsupported_node_internal_aliases(&mut import_map).await?;
}
}
Ok(import_map.cell())
}
/// Computes the Next-specific server-side and edge-side fallback import map.
#[turbo_tasks::function]
pub async fn get_next_edge_and_server_fallback_import_map(
project_path: FileSystemPath,
runtime: NextRuntime,
) -> Result<Vc<ImportMap>> {
let mut fallback_import_map = ImportMap::empty();
let external_cjs_if_node = move |context_dir: FileSystemPath, request: RcStr| match runtime {
NextRuntime::Edge => request_to_import_mapping(context_dir, request),
NextRuntime::NodeJs => external_request_to_cjs_import_mapping(context_dir, request),
};
fallback_import_map.insert_exact_alias(
rcstr!("@opentelemetry/api"),
// It needs to prefer the local version of @opentelemetry/api, so put this in the fallback
// import map
ImportMapping::Alternatives(vec![external_cjs_if_node(
project_path,
rcstr!("next/dist/compiled/@opentelemetry/api"),
)])
.resolved_cell(),
);
Ok(fallback_import_map.cell())
}
/// Insert default aliases for the node.js's internal to raise unsupported
/// runtime errors. User may provide polyfills for their own by setting user
/// config's alias.
async fn insert_unsupported_node_internal_aliases(import_map: &mut ImportMap) -> Result<()> {
let unsupported_replacer = ImportMapping::Dynamic(ResolvedVc::upcast(
NextEdgeUnsupportedModuleReplacer::new()
.to_resolved()
.await?,
))
.resolved_cell();
EDGE_UNSUPPORTED_NODE_INTERNALS.iter().for_each(|module| {
import_map.insert_alias(AliasPattern::exact(module.clone()), unsupported_replacer);
});
Ok(())
}
pub fn get_next_client_resolved_map(
_context: FileSystemPath,
_root: FileSystemPath,
_mode: NextMode,
) -> Vc<ResolvedMap> {
let glob_mappings = vec![];
ResolvedMap {
by_glob: glob_mappings,
}
.cell()
}
static NEXT_ALIASES: LazyLock<[(RcStr, RcStr); 23]> = LazyLock::new(|| {
[
(rcstr!("assert"), rcstr!("next/dist/compiled/assert")),
(rcstr!("buffer"), rcstr!("next/dist/compiled/buffer")),
(
rcstr!("constants"),
rcstr!("next/dist/compiled/constants-browserify"),
),
(
rcstr!("crypto"),
rcstr!("next/dist/compiled/crypto-browserify"),
),
(
rcstr!("domain"),
rcstr!("next/dist/compiled/domain-browser"),
),
(rcstr!("http"), rcstr!("next/dist/compiled/stream-http")),
(
rcstr!("https"),
rcstr!("next/dist/compiled/https-browserify"),
),
(rcstr!("os"), rcstr!("next/dist/compiled/os-browserify")),
(rcstr!("path"), rcstr!("next/dist/compiled/path-browserify")),
(rcstr!("punycode"), rcstr!("next/dist/compiled/punycode")),
(
rcstr!("process"),
rcstr!("next/dist/build/polyfills/process"),
),
(
rcstr!("querystring"),
rcstr!("next/dist/compiled/querystring-es3"),
),
(
rcstr!("stream"),
rcstr!("next/dist/compiled/stream-browserify"),
),
(
rcstr!("string_decoder"),
rcstr!("next/dist/compiled/string_decoder"),
),
(rcstr!("sys"), rcstr!("next/dist/compiled/util")),
(
rcstr!("timers"),
rcstr!("next/dist/compiled/timers-browserify"),
),
(rcstr!("tty"), rcstr!("next/dist/compiled/tty-browserify")),
(rcstr!("url"), rcstr!("next/dist/compiled/native-url")),
(rcstr!("util"), rcstr!("next/dist/compiled/util")),
(rcstr!("vm"), rcstr!("next/dist/compiled/vm-browserify")),
(rcstr!("zlib"), rcstr!("next/dist/compiled/browserify-zlib")),
(rcstr!("events"), rcstr!("next/dist/compiled/events")),
(
rcstr!("setImmediate"),
rcstr!("next/dist/compiled/setimmediate"),
),
]
});
async fn insert_next_server_special_aliases(
import_map: &mut ImportMap,
project_path: FileSystemPath,
ty: ServerContextType,
runtime: NextRuntime,
next_config: Vc<NextConfig>,
collected_root_params: Option<Vc<CollectedRootParams>>,
) -> Result<()> {
let external_cjs_if_node = move |context_dir: FileSystemPath, request: RcStr| match runtime {
NextRuntime::Edge => request_to_import_mapping(context_dir, request),
NextRuntime::NodeJs => external_request_to_cjs_import_mapping(context_dir, request),
};
let external_esm_if_node = move |context_dir: FileSystemPath, request: RcStr| match runtime {
NextRuntime::Edge => request_to_import_mapping(context_dir, request),
NextRuntime::NodeJs => external_request_to_esm_import_mapping(context_dir, request),
};
import_map.insert_exact_alias(
rcstr!("next/dist/compiled/@vercel/og/index.node.js"),
external_esm_if_node(
project_path.clone(),
rcstr!("next/dist/compiled/@vercel/og/index.node.js"),
),
);
import_map.insert_exact_alias(
rcstr!("next/dist/server/ReactDOMServerPages"),
ImportMapping::Alternatives(vec![
request_to_import_mapping(project_path.clone(), rcstr!("react-dom/server.edge")),
request_to_import_mapping(project_path.clone(), rcstr!("react-dom/server.browser")),
])
.resolved_cell(),
);
match &ty {
ServerContextType::Pages { .. } | ServerContextType::PagesApi { .. } => {}
// the logic closely follows the one in createRSCAliases in webpack-config.ts
ServerContextType::AppSSR { app_dir }
| ServerContextType::AppRSC { app_dir, .. }
| ServerContextType::AppRoute { app_dir, .. } => {
let next_package = get_next_package(app_dir.clone()).await?;
import_map.insert_exact_alias(
rcstr!("styled-jsx"),
request_to_import_mapping(next_package.clone(), rcstr!("styled-jsx")),
);
import_map.insert_wildcard_alias(
rcstr!("styled-jsx/"),
request_to_import_mapping(next_package.clone(), rcstr!("styled-jsx/*")),
);
rsc_aliases(
import_map,
project_path.clone(),
ty.clone(),
runtime,
next_config,
)
.await?;
}
ServerContextType::Middleware { .. } | ServerContextType::Instrumentation { .. } => {
rsc_aliases(
import_map,
project_path.clone(),
ty.clone(),
runtime,
next_config,
)
.await?;
}
}
// see https://github.com/vercel/next.js/blob/8013ef7372fc545d49dbd060461224ceb563b454/packages/next/src/build/webpack-config.ts#L1449-L1531
// Sets runtime aliases for the import to client|server-only. Depends on the
// context, it'll resolve to the noop where it's allowed, or aliased into
// the error which throws a runtime error. This works with in combination of
// build-time error as well, refer https://github.com/vercel/next.js/blob/0060de1c4905593ea875fa7250d4b5d5ce10897d/packages/next-swc/crates/next-core/src/next_server/context.rs#L103
match &ty {
ServerContextType::Pages { .. } => {
insert_exact_alias_map(
import_map,
project_path.clone(),
fxindexmap! {
rcstr!("server-only") => rcstr!("next/dist/compiled/server-only/empty"),
rcstr!("client-only") => rcstr!("next/dist/compiled/client-only/index"),
rcstr!("next/dist/compiled/server-only") => rcstr!("next/dist/compiled/server-only/empty"),
rcstr!("next/dist/compiled/client-only") => rcstr!("next/dist/compiled/client-only/index"),
},
);
}
ServerContextType::PagesApi { .. }
| ServerContextType::AppRSC { .. }
| ServerContextType::AppRoute { .. }
| ServerContextType::Middleware { .. }
| ServerContextType::Instrumentation { .. } => {
insert_exact_alias_map(
import_map,
project_path.clone(),
fxindexmap! {
rcstr!("server-only") => rcstr!("next/dist/compiled/server-only/empty"),
rcstr!("client-only") => rcstr!("next/dist/compiled/client-only/error"),
rcstr!("next/dist/compiled/server-only") => rcstr!("next/dist/compiled/server-only/empty"),
rcstr!("next/dist/compiled/client-only") => rcstr!("next/dist/compiled/client-only/error"),
},
);
}
ServerContextType::AppSSR { .. } => {
insert_exact_alias_map(
import_map,
project_path.clone(),
fxindexmap! {
rcstr!("server-only") => rcstr!("next/dist/compiled/server-only/index"),
rcstr!("client-only") => rcstr!("next/dist/compiled/client-only/index"),
rcstr!("next/dist/compiled/server-only") => rcstr!("next/dist/compiled/server-only/index"),
rcstr!("next/dist/compiled/client-only") => rcstr!("next/dist/compiled/client-only/index"),
},
);
}
}
insert_next_root_params_mapping(
import_map,
next_config.enable_root_params(),
Either::Left(ty),
collected_root_params,
)
.await?;
import_map.insert_exact_alias(
rcstr!("@vercel/og"),
external_cjs_if_node(
project_path.clone(),
rcstr!("next/dist/server/og/image-response"),
),
);
import_map.insert_exact_alias(
rcstr!("next/dist/compiled/next-devtools"),
request_to_import_mapping(
project_path.clone(),
rcstr!("next/dist/next-devtools/dev-overlay.shim.js"),
),
);
Ok(())
}
async fn get_react_client_package(next_config: Vc<NextConfig>) -> Result<&'static str> {
let react_production_profiling = *next_config.enable_react_production_profiling().await?;
let react_client_package = if react_production_profiling {
"profiling"
} else {
"client"
};
Ok(react_client_package)
}
// Use createVendoredReactAliases in file:///./../../../packages/next/src/build/create-compiler-aliases.ts
// as the source of truth.
async fn apply_vendored_react_aliases_server(
import_map: &mut ImportMap,
project_path: FileSystemPath,
ty: ServerContextType,
runtime: NextRuntime,
next_config: Vc<NextConfig>,
) -> Result<()> {
let taint = *next_config.enable_taint().await?;
let transition_indicator = *next_config.enable_transition_indicator().await?;
let gesture_transition = *next_config.enable_gesture_transition().await?;
let react_channel = if taint || transition_indicator || gesture_transition {
"-experimental"
} else {
""
};
let react_condition = if ty.should_use_react_server_condition() {
"server"
} else {
"client"
};
// ✅ Correct alias
// ❌ Incorrect alias i.e. importing this entrypoint should throw an error.
// ❔ Alias that may produce correct code in certain conditions.Keep until react-markup is
// available.
let mut react_alias = FxIndexMap::default();
if runtime == NextRuntime::NodeJs && react_condition == "client" {
react_alias.extend(fxindexmap! {
// file:///./../../../packages/next/src/compiled/react/package.json
rcstr!("react") => /* ✅ */ rcstr!("next/dist/server/route-modules/app-page/vendored/ssr/react"),
rcstr!("react/compiler-runtime") => /* ✅ */ rcstr!("next/dist/server/route-modules/app-page/vendored/ssr/react-compiler-runtime"),
rcstr!("react/jsx-dev-runtime") => /* ✅ */ rcstr!("next/dist/server/route-modules/app-page/vendored/ssr/react-jsx-dev-runtime"),
rcstr!("react/jsx-runtime") => /* ✅ */ rcstr!("next/dist/server/route-modules/app-page/vendored/ssr/react-jsx-runtime"),
// file:///./../../../packages/next/src/compiled/react-dom/package.json
rcstr!("react-dom") => /* ✅ */ rcstr!("next/dist/server/route-modules/app-page/vendored/ssr/react-dom"),
rcstr!("react-dom/client") => /* ❔ */ format!("next/dist/compiled/react-dom{react_channel}/client").into(),
rcstr!("react-dom/server") => /* ❔ */ format!("next/dist/compiled/react-dom{react_channel}/server.node").into(),
rcstr!("react-dom/server.browser") => /* ❔ */ format!("next/dist/compiled/react-dom{react_channel}/server.browser").into(),
// TODO: Use build without legacy APIs
rcstr!("react-dom/server.edge") => /* ❔ */ format!("next/dist/compiled/react-dom{react_channel}/server.edge").into(),
rcstr!("react-dom/static") => /* ❔ */ format!("next/dist/compiled/react-dom{react_channel}/static.node").into(),
rcstr!("react-dom/static.browser") => /* ❔ */ format!("next/dist/compiled/react-dom{react_channel}/static.browser").into(),
rcstr!("react-dom/static.edge") => /* ❔ */ format!("next/dist/compiled/react-dom{react_channel}/static.edge").into(),
// file:///./../../../packages/next/src/compiled/react-server-dom-webpack/package.json
rcstr!("react-server-dom-webpack/client") => /* ✅ */ rcstr!("next/dist/server/route-modules/app-page/vendored/ssr/react-server-dom-turbopack-client"),
rcstr!("react-server-dom-webpack/server") => /* ❌ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/server.node").into(),
rcstr!("react-server-dom-webpack/server.node") => /* ❌ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/server.node").into(),
rcstr!("react-server-dom-webpack/static") => /* ❌ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/static.node").into(),
rcstr!("react-server-dom-turbopack/client") => /* ✅ */ rcstr!("next/dist/server/route-modules/app-page/vendored/ssr/react-server-dom-turbopack-client"),
rcstr!("react-server-dom-turbopack/server") => /* ❌ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/server.node").into(),
rcstr!("react-server-dom-turbopack/server.node") => /* ❌ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/server.node").into(),
rcstr!("react-server-dom-turbopack/static.edge") => /* ❌ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/static.edge").into(),
})
} else if runtime == NextRuntime::NodeJs && react_condition == "server" {
react_alias.extend(fxindexmap! {
// file:///./../../../packages/next/src/compiled/react/package.json
rcstr!("react") => /* ✅ */ rcstr!("next/dist/server/route-modules/app-page/vendored/rsc/react"),
rcstr!("react/compiler-runtime") => /* ✅ */ rcstr!("next/dist/server/route-modules/app-page/vendored/rsc/react-compiler-runtime"),
rcstr!("react/jsx-dev-runtime") => /* ✅ */ rcstr!("next/dist/server/route-modules/app-page/vendored/rsc/react-jsx-dev-runtime"),
rcstr!("react/jsx-runtime") => /* ✅ */ rcstr!("next/dist/server/route-modules/app-page/vendored/rsc/react-jsx-runtime"),
// file:///./../../../packages/next/src/compiled/react-dom/package.json
rcstr!("react-dom") => /* ✅ */ rcstr!("next/dist/server/route-modules/app-page/vendored/rsc/react-dom"),
rcstr!("react-dom/client") => /* ❔ */ format!("next/dist/compiled/react-dom{react_channel}/client").into(),
rcstr!("react-dom/server") => /* ❔ */ format!("next/dist/compiled/react-dom{react_channel}/server.node").into(),
rcstr!("react-dom/server.browser") => /* ❔ */ format!("next/dist/compiled/react-dom{react_channel}/server.browser").into(),
// TODO: Use build without legacy APIs
rcstr!("react-dom/server.edge") => /* ❔ */ format!("next/dist/compiled/react-dom{react_channel}/server.edge").into(),
rcstr!("react-dom/static") => /* ❔ */ format!("next/dist/compiled/react-dom{react_channel}/static.node").into(),
rcstr!("react-dom/static.browser") => /* ❔ */ format!("next/dist/compiled/react-dom{react_channel}/static.browser").into(),
rcstr!("react-dom/static.edge") => /* ❔ */ format!("next/dist/compiled/react-dom{react_channel}/static.edge").into(),
// file:///./../../../packages/next/src/compiled/react-server-dom-webpack/package.json
rcstr!("react-server-dom-webpack/client") => /* ❔ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/client.node").into(),
rcstr!("react-server-dom-webpack/server") => /* ✅ */ rcstr!("next/dist/server/route-modules/app-page/vendored/rsc/react-server-dom-turbopack-server"),
rcstr!("react-server-dom-webpack/server.node") => /* ✅ */ rcstr!("next/dist/server/route-modules/app-page/vendored/rsc/react-server-dom-turbopack-server"),
rcstr!("react-server-dom-webpack/static") => /* ✅ */ rcstr!("next/dist/server/route-modules/app-page/vendored/rsc/react-server-dom-turbopack-static"),
rcstr!("react-server-dom-turbopack/client") => /* ❔ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/client.node").into(),
rcstr!("react-server-dom-turbopack/server") => /* ✅ */ rcstr!("next/dist/server/route-modules/app-page/vendored/rsc/react-server-dom-turbopack-server"),
rcstr!("react-server-dom-turbopack/server.node") => /* ✅ */ rcstr!("next/dist/server/route-modules/app-page/vendored/rsc/react-server-dom-turbopack-server"),
rcstr!("react-server-dom-turbopack/static") => /* ✅ */ rcstr!("next/dist/server/route-modules/app-page/vendored/rsc/react-server-dom-turbopack-static"),
// Needed to make `react-dom/server` work.
// TODO: really?
rcstr!("next/dist/compiled/react") => rcstr!("next/dist/compiled/react/index.js"),
})
} else if runtime == NextRuntime::Edge && react_condition == "client" {
react_alias.extend(fxindexmap! {
// file:///./../../../packages/next/src/compiled/react/package.json
rcstr!("react") => /* ✅ */ format!("next/dist/compiled/react{react_channel}").into(),
rcstr!("react/compiler-runtime") => /* ✅ */ format!("next/dist/compiled/react{react_channel}/compiler-runtime").into(),
rcstr!("react/jsx-dev-runtime") => /* ✅ */ format!("next/dist/compiled/react{react_channel}/jsx-dev-runtime").into(),
rcstr!("react/jsx-runtime") => /* ✅ */ format!("next/dist/compiled/react{react_channel}/jsx-runtime").into(),
// file:///./../../../packages/next/src/compiled/react-dom/package.json
rcstr!("react-dom") => /* ✅ */ format!("next/dist/compiled/react-dom{react_channel}").into(),
rcstr!("react-dom/client") => /* ✅ */ format!("next/dist/compiled/react-dom{react_channel}/client").into(),
rcstr!("react-dom/server") => /* ✅ */ format!("next/dist/compiled/react-dom{react_channel}/server.edge").into(),
rcstr!("react-dom/server.browser") => /* ✅ */ format!("next/dist/compiled/react-dom{react_channel}/server.browser").into(),
// TODO: Use build without legacy APIs
rcstr!("react-dom/server.edge") => /* ✅ */ format!("next/dist/compiled/react-dom{react_channel}/server.edge").into(),
rcstr!("react-dom/static") => /* ✅ */ format!("next/dist/compiled/react-dom{react_channel}/static.edge").into(),
rcstr!("react-dom/static.browser") => /* ✅ */ format!("next/dist/compiled/react-dom{react_channel}/static.browser").into(),
rcstr!("react-dom/static.edge") => /* ✅ */ format!("next/dist/compiled/react-dom{react_channel}/static.edge").into(),
// file:///./../../../packages/next/src/compiled/react-server-dom-webpack/package.json
rcstr!("react-server-dom-webpack/client") => /* ✅ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/client.edge").into(),
rcstr!("react-server-dom-webpack/server") => /* ❌ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/server.edge").into(),
rcstr!("react-server-dom-webpack/server.node") => /* ❌ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/server.node").into(),
rcstr!("react-server-dom-webpack/static") => /* ❌ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/static.edge").into(),
rcstr!("react-server-dom-turbopack/client") => /* ✅ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/client.edge").into(),
rcstr!("react-server-dom-turbopack/server") => /* ❌ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/server.edge").into(),
rcstr!("react-server-dom-turbopack/server.node") => /* ❌ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/server.node").into(),
rcstr!("react-server-dom-turbopack/static") => /* ❌ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/static.edge").into(),
})
} else if runtime == NextRuntime::Edge && react_condition == "server" {
react_alias.extend(fxindexmap! {
// file:///./../../../packages/next/src/compiled/react/package.json
rcstr!("react") => /* ✅ */ format!("next/dist/compiled/react{react_channel}/react.react-server").into(),
rcstr!("react/compiler-runtime") => /* ❌ */ format!("next/dist/compiled/react{react_channel}/compiler-runtime").into(),
rcstr!("react/jsx-dev-runtime") => /* ✅ */ format!("next/dist/compiled/react{react_channel}/jsx-dev-runtime.react-server").into(),
rcstr!("react/jsx-runtime") => /* ✅ */ format!("next/dist/compiled/react{react_channel}/jsx-runtime.react-server").into(),
// file:///./../../../packages/next/src/compiled/react-dom/package.json
rcstr!("react-dom") => /* ✅ */ format!("next/dist/compiled/react-dom{react_channel}/react-dom.react-server").into(),
rcstr!("react-dom/client") => /* ❌ */ format!("next/dist/compiled/react-dom{react_channel}/client").into(),
rcstr!("react-dom/server") => /* ❌ */ format!("next/dist/compiled/react-dom{react_channel}/server.edge").into(),
rcstr!("react-dom/server.browser") => /* ❌ */ format!("next/dist/compiled/react-dom{react_channel}/server.browser").into(),
// TODO: Use build without legacy APIs
rcstr!("react-dom/server.edge") => /* ❌ */ format!("next/dist/compiled/react-dom{react_channel}/server.edge").into(),
rcstr!("react-dom/static") => /* ❌ */ format!("next/dist/compiled/react-dom{react_channel}/static.edge").into(),
rcstr!("react-dom/static.browser") => /* ❌ */ format!("next/dist/compiled/react-dom{react_channel}/static.browser").into(),
rcstr!("react-dom/static.edge") => /* ❌ */ format!("next/dist/compiled/react-dom{react_channel}/static.edge").into(),
// file:///./../../../packages/next/src/compiled/react-server-dom-webpack/package.json
rcstr!("react-server-dom-webpack/client") => /* ❔ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/client.edge").into(),
rcstr!("react-server-dom-webpack/server") => /* ✅ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/server.edge").into(),
rcstr!("react-server-dom-webpack/server.node") => /* ✅ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/server.node").into(),
rcstr!("react-server-dom-webpack/static") => /* ✅ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/static.edge").into(),
rcstr!("react-server-dom-turbopack/client") => /* ❔ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/client.edge").into(),
rcstr!("react-server-dom-turbopack/server") => /* ✅ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/server.edge").into(),
rcstr!("react-server-dom-turbopack/server.node") => /* ✅ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/server.node").into(),
rcstr!("react-server-dom-turbopack/static") => /* ✅ */ format!("next/dist/compiled/react-server-dom-turbopack{react_channel}/static.edge").into(),
});
react_alias.extend(fxindexmap! {
// This should just be `next/dist/compiled/react${react_channel}` but how to Rust.
rcstr!("next/dist/compiled/react") => react_alias["react"].clone(),
rcstr!("next/dist/compiled/react-experimental") => react_alias["react"].clone(),
rcstr!("next/dist/compiled/react/compiler-runtime") => react_alias["react/compiler-runtime"].clone(),
rcstr!("next/dist/compiled/react-experimental/compiler-runtime") => react_alias["react/compiler-runtime"].clone(),
rcstr!("next/dist/compiled/react/jsx-dev-runtime") => react_alias["react/jsx-dev-runtime"].clone(),
rcstr!("next/dist/compiled/react-experimental/jsx-dev-runtime") => react_alias["react/jsx-dev-runtime"].clone(),
rcstr!("next/dist/compiled/react/jsx-runtime") => react_alias["react/jsx-runtime"].clone(),
rcstr!("next/dist/compiled/react-experimental/jsx-runtime") => react_alias["react/jsx-runtime"].clone(),
rcstr!("next/dist/compiled/react-dom") => react_alias["react-dom"].clone(),
rcstr!("next/dist/compiled/react-dom-experimental") => react_alias["react-dom"].clone(),
});
}
let react_client_package = get_react_client_package(next_config).await?;
react_alias.extend(fxindexmap! {
rcstr!("react-dom/client") => RcStr::from(format!("next/dist/compiled/react-dom{react_channel}/{react_client_package}")),
});
let mut alias = react_alias;
if react_condition == "server" {
// This is used in the server runtime to import React Server Components.
alias.extend(fxindexmap! {
rcstr!("next/navigation") => rcstr!("next/dist/api/navigation.react-server"),
rcstr!("next/link") => rcstr!("next/dist/client/app-dir/link.react-server"),
});
}
insert_exact_alias_map(import_map, project_path, alias);
Ok(())
}
async fn rsc_aliases(
import_map: &mut ImportMap,
project_path: FileSystemPath,
ty: ServerContextType,
runtime: NextRuntime,
next_config: Vc<NextConfig>,
) -> Result<()> {
apply_vendored_react_aliases_server(
import_map,
project_path.clone(),
ty.clone(),
runtime,
next_config,
)
.await?;
let mut alias = FxIndexMap::default();
if ty.should_use_react_server_condition() {
// This is used in the server runtime to import React Server Components.
alias.extend(fxindexmap! {
rcstr!("next/navigation") => rcstr!("next/dist/api/navigation.react-server"),
rcstr!("next/link") => rcstr!("next/dist/client/app-dir/link.react-server"),
});
}
insert_exact_alias_map(import_map, project_path.clone(), alias);
Ok(())
}
pub fn mdx_import_source_file() -> RcStr {
format!("{VIRTUAL_PACKAGE_NAME}/mdx-import-source").into()
}
// Insert aliases for Next.js stubs of fetch, object-assign, and url
// Keep in sync with getOptimizedModuleAliases in webpack-config.ts
async fn insert_optimized_module_aliases(
import_map: &mut ImportMap,
project_path: FileSystemPath,
) -> Result<()> {
insert_exact_alias_map(
import_map,
project_path,
fxindexmap! {
rcstr!("unfetch") => rcstr!("next/dist/build/polyfills/fetch/index.js"),
rcstr!("isomorphic-unfetch") => rcstr!("next/dist/build/polyfills/fetch/index.js"),
rcstr!("whatwg-fetch") => rcstr!("next/dist/build/polyfills/fetch/whatwg-fetch.js"),
rcstr!("object-assign") => rcstr!("next/dist/build/polyfills/object-assign.js"),
rcstr!("object.assign/auto") => rcstr!("next/dist/build/polyfills/object.assign/auto.js"),
rcstr!("object.assign/implementation") => rcstr!("next/dist/build/polyfills/object.assign/implementation.js"),
rcstr!("object.assign/polyfill") => rcstr!("next/dist/build/polyfills/object.assign/polyfill.js"),
rcstr!("object.assign/shim") => rcstr!("next/dist/build/polyfills/object.assign/shim.js"),
rcstr!("url") => rcstr!("next/dist/compiled/native-url"),
rcstr!("node:url") => rcstr!("next/dist/compiled/native-url"),
},
);
Ok(())
}
// Make sure to not add any external requests here.
async fn insert_next_shared_aliases(
import_map: &mut ImportMap,
project_path: FileSystemPath,
execution_context: Vc<ExecutionContext>,
next_config: Vc<NextConfig>,
next_mode: Vc<NextMode>,
is_runtime_edge: bool,
) -> Result<()> {
let package_root = next_js_fs().root().owned().await?;
insert_alias_to_alternatives(
import_map,
mdx_import_source_file(),
vec![
request_to_import_mapping(project_path.clone(), rcstr!("./mdx-components")),
request_to_import_mapping(project_path.clone(), rcstr!("./src/mdx-components")),
request_to_import_mapping(project_path.clone(), rcstr!("@mdx-js/react")),
request_to_import_mapping(project_path.clone(), rcstr!("@next/mdx/mdx-components.js")),
],
);
insert_package_alias(
import_map,
&format!("{VIRTUAL_PACKAGE_NAME}/"),
package_root,
);
// NOTE: `@next/font/local` has moved to a BeforeResolve Plugin, so it does not
// have ImportMapping replacers here.
//
// TODO: Add BeforeResolve plugins for `@next/font/google`
let next_font_google_replacer_mapping = ImportMapping::Dynamic(ResolvedVc::upcast(
NextFontGoogleReplacer::new(project_path.clone())
.to_resolved()
.await?,
))
.resolved_cell();
import_map.insert_alias(
// Request path from js via next-font swc transform
AliasPattern::exact(rcstr!("next/font/google/target.css")),
next_font_google_replacer_mapping,
);
import_map.insert_alias(
// Request path from js via next-font swc transform
AliasPattern::exact(rcstr!("@next/font/google/target.css")),
next_font_google_replacer_mapping,
);
let fetch_client = next_config.fetch_client();
import_map.insert_alias(
AliasPattern::exact(rcstr!(
"@vercel/turbopack-next/internal/font/google/cssmodule.module.css"
)),
ImportMapping::Dynamic(ResolvedVc::upcast(
NextFontGoogleCssModuleReplacer::new(
project_path.clone(),
execution_context,
next_mode,
fetch_client,
)
.to_resolved()
.await?,
))
.resolved_cell(),
);
import_map.insert_alias(
AliasPattern::exact(rcstr!(GOOGLE_FONTS_INTERNAL_PREFIX)),
ImportMapping::Dynamic(ResolvedVc::upcast(
NextFontGoogleFontFileReplacer::new(project_path.clone(), fetch_client)
.to_resolved()
.await?,
))
.resolved_cell(),
);
let next_package = get_next_package(project_path.clone()).await?;
import_map.insert_singleton_alias(rcstr!("@swc/helpers"), next_package.clone());
import_map.insert_singleton_alias(rcstr!("styled-jsx"), next_package.clone());
import_map.insert_singleton_alias(rcstr!("next"), project_path.clone());
import_map.insert_singleton_alias(rcstr!("react"), project_path.clone());
import_map.insert_singleton_alias(rcstr!("react-dom"), project_path.clone());
let react_client_package = get_react_client_package(next_config).await?;
import_map.insert_exact_alias(
rcstr!("react-dom/client"),
request_to_import_mapping(
project_path.clone(),
format!("react-dom/{react_client_package}").into(),
),
);
import_map.insert_alias(
// Make sure you can't import custom server as it'll cause all Next.js internals to be
// bundled which doesn't work.
AliasPattern::exact(rcstr!("next")),
ImportMapping::Empty.resolved_cell(),
);
//https://github.com/vercel/next.js/blob/f94d4f93e4802f951063cfa3351dd5a2325724b3/packages/next/src/build/webpack-config.ts#L1196
import_map.insert_exact_alias(
rcstr!("setimmediate"),
request_to_import_mapping(
project_path.clone(),
rcstr!("next/dist/compiled/setimmediate"),
),
);
import_map.insert_exact_alias(
rcstr!("private-next-rsc-server-reference"),
request_to_import_mapping(
project_path.clone(),
rcstr!("next/dist/build/webpack/loaders/next-flight-loader/server-reference"),
),
);
import_map.insert_exact_alias(
rcstr!("private-next-rsc-action-client-wrapper"),
request_to_import_mapping(
project_path.clone(),
rcstr!("next/dist/build/webpack/loaders/next-flight-loader/action-client-wrapper"),
),
);
import_map.insert_exact_alias(
rcstr!("private-next-rsc-action-validate"),
request_to_import_mapping(
project_path.clone(),
rcstr!("next/dist/build/webpack/loaders/next-flight-loader/action-validate"),
),
);
import_map.insert_exact_alias(
rcstr!("private-next-rsc-action-encryption"),
request_to_import_mapping(
project_path.clone(),
rcstr!("next/dist/server/app-render/encryption"),
),
);
import_map.insert_exact_alias(
rcstr!("private-next-rsc-cache-wrapper"),
request_to_import_mapping(
project_path.clone(),
rcstr!("next/dist/build/webpack/loaders/next-flight-loader/cache-wrapper"),
),
);
import_map.insert_exact_alias(
rcstr!("private-next-rsc-track-dynamic-import"),
request_to_import_mapping(
project_path.clone(),
rcstr!("next/dist/build/webpack/loaders/next-flight-loader/track-dynamic-import"),
),
);
insert_turbopack_dev_alias(import_map).await?;
insert_package_alias(
import_map,
"@vercel/turbopack-node/",
turbopack_node::embed_js::embed_fs().root().owned().await?,
);
let image_config = next_config.image_config().await?;
if let Some(loader_file) = image_config.loader_file.as_deref().map(RcStr::from) {
import_map.insert_exact_alias(
rcstr!("next/dist/shared/lib/image-loader"),
request_to_import_mapping(project_path.clone(), loader_file.clone()),
);
if is_runtime_edge {
import_map.insert_exact_alias(
rcstr!("next/dist/esm/shared/lib/image-loader"),
request_to_import_mapping(project_path.clone(), loader_file),
);
}
}
Ok(())
}
pub async fn get_next_package(context_directory: FileSystemPath) -> Result<FileSystemPath> {
try_get_next_package(context_directory)
.owned()
.await?
.context("Next.js package not found")
}
#[turbo_tasks::value(shared)]
struct MissingNextFolderIssue {
path: FileSystemPath,
}
#[turbo_tasks::value_impl]
impl Issue for MissingNextFolderIssue {
#[turbo_tasks::function]
fn file_path(&self) -> Vc<FileSystemPath> {
self.path.clone().cell()
}
fn severity(&self) -> IssueSeverity {
IssueSeverity::Fatal
}
#[turbo_tasks::function]
fn stage(&self) -> Vc<IssueStage> {
IssueStage::Resolve.cell()
}
#[turbo_tasks::function]
async fn title(&self) -> Result<Vc<StyledString>> {
let system_path = match to_sys_path(self.path.clone()).await? {
Some(path) => path.to_str().unwrap_or("{unknown}").to_string(),
_ => "{unknown}".to_string(),
};
Ok(StyledString::Stack(vec![
StyledString::Line(vec![
StyledString::Text(
"Error: Next.js inferred your workspace root, but it may not be correct.".into(),
),
]),
StyledString::Line(vec![
StyledString::Text("We couldn't find the Next.js package (".into()),
StyledString::Strong("next/package.json".into()),
StyledString::Text(") from the project directory: ".into()),
StyledString::Strong(system_path.into()),
]),
StyledString::Line(vec![
StyledString::Text(" To fix this, set ".into()),
StyledString::Code("turbopack.root".into()),
StyledString::Text(
" in your Next.js config, or ensure the Next.js package is resolvable from this directory.".into(),
),
]),
StyledString::Line(vec![
StyledString::Text("Note: For security and performance reasons, files outside of the project directory will not be compiled.".into()),
]),
StyledString::Line(vec![
StyledString::Text("See ".into()),
StyledString::Strong("https://nextjs.org/docs/app/api-reference/config/next-config-js/turbopack#root-directory".into()),
StyledString::Text(" for more information.".into())
]),
])
.cell())
}
}
#[turbo_tasks::function]
pub async fn try_get_next_package(
context_directory: FileSystemPath,
) -> Result<Vc<OptionFileSystemPath>> {
let root = context_directory.root().owned().await?;
let result = resolve(
context_directory.clone(),
ReferenceType::CommonJs(CommonJsReferenceSubType::Undefined),
Request::parse(Pattern::Constant(rcstr!("next/package.json"))),
node_cjs_resolve_options(root),
);
if let Some(source) = &*result.first_source().await? {
Ok(Vc::cell(Some(source.ident().path().await?.parent())))
} else {
MissingNextFolderIssue {
path: context_directory,
}
.resolved_cell()
.emit();
Ok(Vc::cell(None))
}
}
pub async fn insert_alias_option<const N: usize>(
import_map: &mut ImportMap,
project_path: &FileSystemPath,
alias_options: Vc<ResolveAliasMap>,
conditions: [&'static str; N],
) -> Result<()> {
let conditions = BTreeMap::from(conditions.map(|c| (c.into(), ConditionValue::Set)));
for (alias, value) in &alias_options.await? {
if let Some(mapping) = export_value_to_import_mapping(value, &conditions, project_path) {
import_map.insert_alias(alias, mapping);
}
}
Ok(())
}
fn export_value_to_import_mapping(
value: &SubpathValue,
conditions: &BTreeMap<RcStr, ConditionValue>,
project_path: &FileSystemPath,
) -> Option<ResolvedVc<ImportMapping>> {
let mut result = Vec::new();
value.add_results(
conditions,
&ConditionValue::Unset,
&mut FxHashMap::default(),
&mut result,
);
if result.is_empty() {
None
} else {
Some(if result.len() == 1 {
ImportMapping::PrimaryAlternative(result[0].0.into(), Some(project_path.clone()))
.resolved_cell()
} else {
ImportMapping::Alternatives(
result
.iter()
.map(|(m, _)| {
ImportMapping::PrimaryAlternative((*m).into(), Some(project_path.clone()))
.resolved_cell()
})
.collect(),
)
.resolved_cell()
})
}
}
fn insert_exact_alias_map(
import_map: &mut ImportMap,
project_path: FileSystemPath,
map: FxIndexMap<RcStr, RcStr>,
) {
for (pattern, request) in map {
import_map.insert_exact_alias(
pattern,
request_to_import_mapping(project_path.clone(), request),
);
}
}
fn insert_wildcard_alias_map(
import_map: &mut ImportMap,
project_path: FileSystemPath,
map: FxIndexMap<RcStr, RcStr>,
) {
for (pattern, request) in map {
import_map.insert_wildcard_alias(
pattern,
request_to_import_mapping(project_path.clone(), request),
);
}
}
/// Inserts an alias to an alternative of import mappings into an import map.
fn insert_alias_to_alternatives<'a>(
import_map: &mut ImportMap,
alias: impl Into<RcStr> + 'a,
alternatives: Vec<ResolvedVc<ImportMapping>>,
) {
import_map.insert_exact_alias(
alias.into(),
ImportMapping::Alternatives(alternatives).resolved_cell(),
);
}
/// Inserts an alias to an import mapping into an import map.
fn insert_package_alias(import_map: &mut ImportMap, prefix: &str, package_root: FileSystemPath) {
import_map.insert_wildcard_alias(
prefix,
ImportMapping::PrimaryAlternative(rcstr!("./*"), Some(package_root)).resolved_cell(),
);
}
/// Inserts an alias to @vercel/turbopack-dev into an import map.
async fn insert_turbopack_dev_alias(import_map: &mut ImportMap) -> Result<()> {
insert_package_alias(
import_map,
"@vercel/turbopack-ecmascript-runtime/",
turbopack_ecmascript_runtime::embed_fs()
.root()
.owned()
.await?,
);
Ok(())
}
/// Handles instrumentation-client.ts bundling logic
async fn insert_instrumentation_client_alias(
import_map: &mut ImportMap,
project_path: FileSystemPath,
) -> Result<()> {
insert_alias_to_alternatives(
import_map,
rcstr!("private-next-instrumentation-client"),
vec![
request_to_import_mapping(project_path.clone(), rcstr!("./src/instrumentation-client")),
request_to_import_mapping(
project_path.clone(),
rcstr!("./src/instrumentation-client.ts"),
),
request_to_import_mapping(project_path.clone(), rcstr!("./instrumentation-client")),
request_to_import_mapping(project_path.clone(), rcstr!("./instrumentation-client.ts")),
ImportMapping::Ignore.resolved_cell(),
],
);
Ok(())
}
// To alias e.g. both `import "next/link"` and `import "next/link.js"`
fn insert_exact_alias_or_js(
import_map: &mut ImportMap,
pattern: RcStr,
mapping: ResolvedVc<ImportMapping>,
) {
import_map.insert_exact_alias(format!("{pattern}.js"), mapping);
import_map.insert_exact_alias(pattern, mapping);
}
/// Creates a direct import mapping to the result of resolving a request
/// in a context.
fn request_to_import_mapping(
context_path: FileSystemPath,
request: RcStr,
) -> ResolvedVc<ImportMapping> {
ImportMapping::PrimaryAlternative(request, Some(context_path)).resolved_cell()
}
/// Creates a direct import mapping to the result of resolving an external
/// request.
fn external_request_to_cjs_import_mapping(
context_dir: FileSystemPath,
request: RcStr,
) -> ResolvedVc<ImportMapping> {
ImportMapping::PrimaryAlternativeExternal {
name: Some(request),
ty: ExternalType::CommonJs,
traced: ExternalTraced::Traced,
lookup_dir: context_dir,
}
.resolved_cell()
}
/// Creates a direct import mapping to the result of resolving an external
/// request.
fn external_request_to_esm_import_mapping(
context_dir: FileSystemPath,
request: RcStr,
) -> ResolvedVc<ImportMapping> {
ImportMapping::PrimaryAlternativeExternal {
name: Some(request),
ty: ExternalType::EcmaScriptModule,
traced: ExternalTraced::Traced,
lookup_dir: context_dir,
}
.resolved_cell()
}
|
rust
|
github
|
https://github.com/vercel/next.js
|
crates/next-core/src/next_import_map.rs
|
"""
A general resource system.
Use the classes in this file to create a resource system that supports
registering of resource classes, lazy validation of resource attributes, and
resource caching. You would not typically have users create `Resource` instances
directly; instead, some factory would be responsible for creating resources,
and would probably contain a `ResourcePool` to manage the resource instances.
Resources have attributes whos names match the entries in `Resource.schema`.
When a resource attribute is accessed for the first time, data validation (and
possibly conversion) is applied and cached. Resource data is also loaded lazily
- when the first attribute is accessed.
Resources themselves are also cached, according to the `cache_size` argument of
`ResourcePool`.
Use the `ResourceWrapper` class to implement classes that provide a public API
for a resource. This extra layer is useful because the same resource type may
be available from several sources or in different formats. In this situation
you can implement multiple `Resource` subclasses, but wrap them with a single
`ResourceWrapper` class.
Another reason to use `ResourceWrapper` is when you have attributes on your
`Resource` that don't require its data to be loaded - instead, some attributes
can be derived directly from the resource's variables. If you provide properties
in your resource wrapper for these attributes, then the unnecessary resource
data load is avoided.
See the 'pets' unit test in tests/test_resources.py for a complete example.
"""
from rez.utils.data_utils import cached_property, AttributeForwardMeta, \
LazyAttributeMeta
from rez.config import config
from rez.exceptions import ResourceError
from rez.backport.lru_cache import lru_cache
from rez.utils.logging_ import print_debug
class Resource(object):
"""Abstract base class for a data resource.
A resource is an object uniquely identified by a 'key' (the resource type),
and a dict of variables. For example, a very simple banking system might
have a resource type with key 'account.checking', and a single variable
'account_owner' that uniquely identifies each checking account.
Resources may have a schema, which describes the data associated with the
resource. For example, a checking account might have a current balance (an
integer) and a social security number (also an integer).
Keys in a resource's schema are mapped onto the resource class. So a
checking account instance 'account' would have attributes 'account.balance',
'account.ssn' etc. Attributes are lazily validated, using the schema, on
first access.
A resource's data is loaded lazily, on first attribute access. This,
combined with lazy attribute validation, means that many resources can be
iterated, while potentially expensive operations (data loading, attribute
validation) are put off as long as possible.
Note:
You can access the entire validated resource data dict using the
`validated_data` function, and test full validation using `validate_data`.
Attributes:
key (str): Unique identifier of the resource type.
schema (Schema): Schema for the resource data. Must validate a dict.
Can be None, in which case the resource does not load any data.
schema_error (Exception): The exception type to raise on key
validation failure.
"""
__metaclass__ = LazyAttributeMeta
key = None
schema = None
schema_error = Exception
@classmethod
def normalize_variables(cls, variables):
"""Give subclasses a chance to standardize values for certain variables
"""
return variables
def __init__(self, variables=None):
self.variables = self.normalize_variables(variables or {})
@cached_property
def handle(self):
"""Get the resource handle."""
return ResourceHandle(self.key, self.variables)
@cached_property
def _data(self):
if not self.schema:
return None
data = self._load()
if config.debug("resources"):
print_debug("Loaded resource: %s" % str(self))
return data
def get(self, key, default=None):
"""Get the value of a resource variable."""
return self.variables.get(key, default)
def __str__(self):
return "%s%r" % (self.key, self.variables)
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.variables)
def __hash__(self):
return hash((self.__class__, self.handle))
def __eq__(self, other):
return (self.handle == other.handle)
def _load(self):
"""Load the data associated with the resource.
You are not expected to cache this data - the resource system does this
for you.
If `schema` is None, this signifies that the resource does not load any
data. In this case you don't need to implement this function - it will
never be called.
Returns:
dict.
"""
raise NotImplementedError
class ResourceHandle(object):
"""A `Resource` handle.
A handle uniquely identifies a resource. A handle can be stored and used
with a `ResourcePool` to retrieve the same resource at a later date.
"""
def __init__(self, key, variables=None):
self.key = key
self.variables = variables or {}
def get(self, key, default=None):
"""Get the value of a resource variable."""
return self.variables.get(key, default)
def to_dict(self):
"""Serialize the contents of this resource handle to a dictionary
representation.
"""
return dict(key=self.key, variables=self.variables)
@classmethod
def from_dict(cls, d):
"""Return a `ResourceHandle` instance from a serialized dict
This should ONLY be used with dicts created with ResourceHandle.to_dict;
if you wish to create a "new" ResourceHandle, you should do it through
PackageRepository.make_resource_handle
"""
return cls(**d)
def __str__(self):
return str(self.to_dict())
def __repr__(self):
return "%s(%r, %r)" % (self.__class__.__name__, self.key, self.variables)
def __eq__(self, other):
return (self.key == other.key) and (self.variables == other.variables)
def __hash__(self):
return hash((self.key, frozenset(self.variables.items())))
class ResourcePool(object):
"""A resource pool.
A resource pool manages a set of registered resource types, and acts as a
resource cache. It will create any resource you ask for - typically
resources are created via some factory class, which first checks for the
existence of the resource before creating one from a pool.
"""
def __init__(self, cache_size=None):
self.resource_classes = {}
cache = lru_cache(maxsize=cache_size)
self.cached_get_resource = cache(self._get_resource)
def register_resource(self, resource_class):
resource_key = resource_class.key
assert issubclass(resource_class, Resource)
assert resource_key is not None
cls_ = self.resource_classes.get(resource_key)
if cls_:
if cls_ == resource_class:
return # already registered
else:
raise ResourceError(
"Error registering resource class %s: Resource pool has "
"already registered %r to %s"
% (resource_class.__class__.__name__, resource_key,
cls_.__class__.__name__))
self.resource_classes[resource_key] = resource_class
def get_resource_from_handle(self, resource_handle):
return self.cached_get_resource(resource_handle)
def clear_caches(self):
self.cached_get_resource.cache_clear()
def get_resource_class(self, resource_key):
resource_class = self.resource_classes.get(resource_key)
if resource_class is None:
raise ResourceError("Error getting resource from pool: Unknown "
"resource type %r" % resource_key)
return resource_class
def _get_resource(self, resource_handle):
resource_class = self.get_resource_class(resource_handle.key)
return resource_class(resource_handle.variables)
class ResourceWrapper(object):
"""An object that wraps a resource instance.
A resource wrapper is useful for two main reasons. First, we can wrap
several different resources with the one class, giving them a common
interface. This is useful when the same resource can be loaded from various
different sources (perhaps a database and the filesystem for example), and
further common functionality needs to be supplied.
Second, some resource attributes can be derived from the resource's
variables, which means the resource's data doesn't need to be loaded to get
these attributes. The wrapper can provide its own properties that do this,
avoiding unnecessary data loads.
You must subclass this class and provide `keys` - the list of attributes in
the resource that you want to expose in the wrapper. The `schema_keys`
function is provided to help get a list of keys from a resource schema.
"""
__metaclass__ = AttributeForwardMeta
keys = None
def __init__(self, resource):
self.wrapped = resource
@property
def resource(self):
return self.wrapped
@property
def handle(self):
return self.resource.handle
@property
def data(self):
return self.resource._data
def validated_data(self):
return self.resource.validated_data()
def validate_data(self):
self.resource.validate_data()
def __eq__(self, other):
return (self.__class__ == other.__class__
and self.resource == other.resource)
def __str__(self):
return "%s(%s)" % (self.__class__.__name__, str(self.resource))
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.resource)
def __hash__(self):
return hash((self.__class__, self.resource))
# Copyright 2013-2016 Allan Johns.
#
# This library is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
|
unknown
|
codeparrot/codeparrot-clean
| ||
//! #[diplomat::attr] and other attributes
use crate::ast;
use crate::ast::attrs::{AttrInheritContext, DiplomatBackendAttrCfg, StandardAttribute};
use crate::hir::lowering::ErrorStore;
use crate::hir::{
EnumVariant, LoweringError, Method, Mutability, OpaqueId, ReturnType, SelfType, SuccessType,
TraitDef, Type, TypeDef, TypeId,
};
use syn::Meta;
pub use crate::ast::attrs::RenameAttr;
/// Diplomat attribute that can be specified on items, methods, and enum variants. These
/// can be used to control the codegen in a particular backend.
///
/// Most of these are specified via `#[diplomat::attr(some cfg here, attrname)]`, where `some cfg here`
/// can be used to pick which backends something applies to.
#[non_exhaustive]
#[derive(Clone, Default, Debug)]
pub struct Attrs {
/// "disable" this item: do not generate code for it in the backend
///
/// This attribute is always inherited except to variants
pub disable: bool,
/// Mark this item deprecated in FFI.
pub deprecated: Option<String>,
/// An optional namespace. None is equivalent to the root namespace.
///
/// This attribute is inherited to types (and is not allowed elsewhere)
pub namespace: Option<String>,
/// Rename this item/method/variant
///
/// This attribute is inherited except through methods and variants (and is not allowed on variants)
pub rename: RenameAttr,
/// Rename this item in the C ABI. This *must* be respected by backends.
///
/// This attribute is inherited except through variants
pub abi_rename: RenameAttr,
/// This method is "special": it should generate something other than a regular method on the other side.
/// This can be something like a constructor, an accessor, a stringifier etc.
///
/// This attribute does not participate in inheritance and must always
/// be specified on individual methods
pub special_method: Option<SpecialMethod>,
/// This user-defined type can be used as the error type in a Result.
pub custom_errors: bool,
/// This user-defined type has a "default" state that can be computed purely foreign-language-side
///
/// Can be applied to enum variants to signal the default
pub default: bool,
/// From #[diplomat::demo()]. Created from [`crate::ast::attrs::Attrs::demo_attrs`].
/// List of attributes specific to automatic demo generation.
/// Currently just for demo_gen in diplomat-tool (which generates sample webpages), but could be used for broader purposes (i.e., demo Android apps)
pub demo_attrs: DemoInfo,
/// From #[diplomat::attr()]. If true, generates a mocking interface for this type.
pub generate_mocking_interface: bool,
/// From #[diplomat::attr()]. If true, Diplomat will check that this struct has the same memory layout in backends which support it. Allows this struct to be used in slices ([`super::Slice::Struct`]) and to be borrowed in function parameters.
pub abi_compatible: bool,
}
// #region: Demo specific attributes.
/// For `#[diplomat::demo(input(...))]`, stored in [DemoInfo::input_cfg].
#[non_exhaustive]
#[derive(Clone, Default, Debug)]
pub struct DemoInputCFG {
/// `#[diplomat::demo(input(label = "..."))]`
/// Label that this input parameter should have. Let demo_gen pick a valid name if this is empty.
///
/// For instance <label for="v">Number Here</label><input name="v"/>
pub label: String,
/// `#[diplomat::demo(input(default_value = "..."))]`
/// Sets the default value for a parameter.
///
/// Should ALWAYS be a string. The HTML renderer is expected to do validation for us.
pub default_value: String,
}
#[non_exhaustive]
#[derive(Clone, Default, Debug)]
pub struct DemoInfo {
/// `#[diplomat::demo(generate)]`. If automatic generation is disabled by default (see [`diplomat_tool::demo_gen::DemoConfig`]), then the below render terminus will be allowed to generate.
pub generate: bool,
/// `#[diplomat::demo(default_constructor)]`
/// We search for any methods specially tagged with `Constructor`, but if there's are no default Constructors and there's NamedConstructor that you want to be default instead, use this.
/// TODO: Should probably ignore other `Constructors` if a default has been set.
pub default_constructor: bool,
/// `#[diplomat::demo(external)]` represents an item that we will not evaluate, and should be passed to the rendering engine to provide.
pub external: bool,
/// `#[diplomat::demo(custom_func = "/file/name/here.mjs")]` can be used above any `struct` definition in the bridge. The linked `.mjs` should contain a JS definition of functions that should be bundled with demo_gen's output.
///
/// We call these functions "custom functions", as they are JS functions that are not automagically generated by demo_gen, but rather included as part of its JS output in the `RenderInfo` object.
///
/// For more information on custom functions (and their use), see the relevant chapter in [the book](https://rust-diplomat.github.io/diplomat/demo_gen/custom_functions.html).
///
/// Files are located relative to lib.rs.
///
pub custom_func: Option<String>,
/// `#[diplomat::demo(input(...))]` represents configuration options for anywhere we might expect user input.
pub input_cfg: DemoInputCFG,
}
// #endregion
/// Attributes that mark methods as "special"
#[non_exhaustive]
#[derive(Clone, Debug)]
pub enum SpecialMethod {
/// A constructor.
///
/// Must return Self (or Result<Self> for backends with `fallible_constructors` enabled )
Constructor,
/// A named constructor, with optional name. If the name isn't specified, it will be derived
/// from the method name
///
/// Must return Self (or Result<Self> for backends with `fallible_constructors` enabled )
NamedConstructor(Option<String>),
/// A getter, with optional name. If the name isn't specified, it will be derived
/// from the method name
///
/// Must have no parameters and must return something.
Getter(Option<String>),
/// A setter, with optional name. If the name isn't specified, it will be derived
/// from the method name
///
/// Must have no return type (aside from potentially a `Result<(), _>`) and must have one parameter
Setter(Option<String>),
/// A stringifier. Must have no parameters and return a string (DiplomatWrite)
Stringifier,
/// A comparison operator. Currently not universally supported
Comparison,
/// An iterator (a type that is mutated to produce new values)
Iterator,
/// An iterable (a type that can produce an iterator)
Iterable,
/// Indexes into the type using an integer
Indexer,
/// Arithmetic operators. May not return references
Add,
Sub,
Mul,
Div,
/// In-place arithmetic operators. Must not return a value
AddAssign,
SubAssign,
MulAssign,
DivAssign,
}
impl SpecialMethod {
pub fn from_path_and_meta(path: &str, meta: &Meta) -> Result<Option<Self>, LoweringError> {
let parse_meta = |meta| match StandardAttribute::from_meta(meta) {
Ok(StandardAttribute::String(s)) => Ok(Some(s)),
Ok(StandardAttribute::Empty) => Ok(None),
Ok(_) | Err(_) => Err(LoweringError::Other(format!(
"`{path}` must have a single string parameter or no parameter"
))),
};
match path {
"constructor" => Ok(Some(Self::Constructor)),
"named_constructor" => Ok(Some(Self::NamedConstructor(parse_meta(meta)?))),
"getter" => Ok(Some(Self::Getter(parse_meta(meta)?))),
"setter" => Ok(Some(Self::Setter(parse_meta(meta)?))),
"stringifier" => Ok(Some(Self::Stringifier)),
"comparison" => Ok(Some(Self::Comparison)),
"iterator" => Ok(Some(Self::Iterator)),
"iterable" => Ok(Some(Self::Iterable)),
"indexer" => Ok(Some(Self::Indexer)),
"add" => Ok(Some(Self::Add)),
"sub" => Ok(Some(Self::Sub)),
"mul" => Ok(Some(Self::Mul)),
"div" => Ok(Some(Self::Div)),
"add_assign" => Ok(Some(Self::AddAssign)),
"sub_assign" => Ok(Some(Self::SubAssign)),
"mul_assign" => Ok(Some(Self::MulAssign)),
"div_assign" => Ok(Some(Self::DivAssign)),
_ => Ok(None),
}
}
// Returns the standard operator string (if any) associated with this special method
pub fn operator_str(&self) -> Option<&str> {
match self {
SpecialMethod::Add => Some("+"),
SpecialMethod::Sub => Some("-"),
SpecialMethod::Mul => Some("*"),
SpecialMethod::Div => Some("/"),
SpecialMethod::AddAssign => Some("+="),
SpecialMethod::SubAssign => Some("-="),
SpecialMethod::MulAssign => Some("*="),
SpecialMethod::DivAssign => Some("/="),
SpecialMethod::Indexer => Some("[]"),
_ => None,
}
}
}
/// For special methods that affect type semantics, whether this type has this method.
///
/// This will likely only contain a subset of special methods, but feel free to add more as needed.
#[derive(Debug, Default)]
#[non_exhaustive]
pub struct SpecialMethodPresence {
pub comparator: bool,
/// If it is an iterator, the type it iterates over
pub iterator: Option<SuccessType>,
/// If it is an iterable, the iterator type it returns (*not* the type it iterates over,
/// perform lookup on that type to access)
pub iterable: Option<OpaqueId>,
}
/// Where the attribute was found. Some attributes are only allowed in some contexts
/// (e.g. namespaces cannot be specified on methods)
#[non_exhaustive] // might add module attrs in the future
#[derive(Debug)]
pub enum AttributeContext<'a, 'b> {
Type(TypeDef<'a>),
Trait(&'a TraitDef),
EnumVariant(&'a EnumVariant),
Method(&'a Method, TypeId, &'b mut SpecialMethodPresence),
Function(&'a Method),
Module,
Param,
SelfParam,
Field,
}
fn maybe_error_unsupported(
auto_found: bool,
attribute: &str,
backend: &str,
errors: &mut ErrorStore,
) {
if !auto_found {
errors.push(LoweringError::Other(format!(
"`{attribute}` not supported in backend {backend}"
)));
}
}
impl Attrs {
pub fn from_ast(
ast: &ast::Attrs,
validator: &(impl AttributeValidator + ?Sized),
parent_attrs: &Attrs,
errors: &mut ErrorStore,
) -> Self {
let mut this = parent_attrs.clone();
// Backends must support this since it applies to the macro/C code.
// No special inheritance, was already appropriately inherited in AST
this.abi_rename = ast.abi_rename.clone();
this.deprecated = ast.deprecated.clone();
let support = validator.attrs_supported();
let backend = validator.primary_name();
for attr in &ast.attrs {
let mut auto_found = false;
match validator.satisfies_cfg(&attr.cfg, Some(&mut auto_found)) {
Ok(satisfies) if !satisfies => continue,
Err(e) => {
errors.push(e);
continue;
}
Ok(_) => {}
};
let path = attr.meta.path();
if let Some(path) = path.get_ident() {
let path = path.to_string();
let warn_auto = |errors: &mut ErrorStore| {
if auto_found {
errors.push(LoweringError::Other(format!(
"Diplomat attribute {path:?} gated on 'auto' but is not one that works with 'auto'"
)));
}
};
// Check against the set of attributes that can have platform support
if support.check_string(&path) == Some(false) {
maybe_error_unsupported(auto_found, &path, backend, errors);
continue;
}
match SpecialMethod::from_path_and_meta(&path, &attr.meta) {
Ok(Some(kind)) => {
if let Some(ref existing) = this.special_method {
errors.push(LoweringError::Other(format!(
"Multiple special method markers found on the same method, found {path} and {existing:?}"
)));
} else {
this.special_method = Some(kind);
}
}
Err(error) => errors.push(error),
Ok(None) => match path.as_str() {
// No match found in the special methods, check the other keywords
"disable" => {
if let Meta::Path(_) = attr.meta {
if this.disable {
errors.push(LoweringError::Other(
"Duplicate `disable` attribute".into(),
));
} else {
this.disable = true;
}
} else {
errors.push(LoweringError::Other(
"`disable` must be a simple path".into(),
))
}
warn_auto(errors);
}
"default" => {
if let Meta::Path(_) = attr.meta {
if this.default {
errors.push(LoweringError::Other(
"Duplicate `default` attribute".into(),
));
} else {
this.default = true;
}
} else {
errors.push(LoweringError::Other(
"`default` must be a simple path".into(),
))
}
}
"rename" => {
match RenameAttr::from_meta(&attr.meta) {
Ok(rename) => {
// We use the override extend mode: a single ast::Attrs
// will have had these attributes inherited into the list by appending
// to the end; so a later attribute in the list is more pertinent.
this.rename.extend(&rename);
}
Err(e) => errors.push(LoweringError::Other(format!(
"`rename` attr failed to parse: {e:?}"
))),
}
warn_auto(errors);
}
"namespace" => match StandardAttribute::from_meta(&attr.meta) {
Ok(StandardAttribute::String(s)) if s.is_empty() => {
this.namespace = None
}
Ok(StandardAttribute::String(s)) => this.namespace = Some(s),
Ok(_) | Err(_) => {
errors.push(LoweringError::Other(
"`namespace` must have a single string parameter".to_string(),
));
}
},
"error" => {
this.custom_errors = true;
}
"generate_mocking_interface" => {
if !support.generate_mocking_interface {
maybe_error_unsupported(
auto_found,
"generate_mocking_interface",
backend,
errors,
);
continue;
}
this.generate_mocking_interface = true;
}
"abi_compatible" => {
if !support.abi_compatibles {
maybe_error_unsupported(
auto_found,
"abi_compatible",
backend,
errors,
);
continue;
}
this.abi_compatible = true;
}
_ => {
errors.push(LoweringError::Other(format!(
"Unknown diplomat attribute {path}: expected one of: `disable, rename, namespace, constructor, stringifier, comparison, named_constructor, getter, setter, indexer, error`"
)));
}
},
}
}
}
for attr in &ast.demo_attrs {
let path = attr.meta.path();
if let Some(path_ident) = path.get_ident() {
if path_ident == "external" {
this.demo_attrs.external = true;
} else if path_ident == "default_constructor" {
this.demo_attrs.default_constructor = true;
} else if path_ident == "generate" {
this.demo_attrs.generate = true;
} else if path_ident == "input" {
let meta_list = attr
.meta
.require_list()
.expect("Could not get MetaList, expected #[diplomat::demo(input(...))]");
meta_list
.parse_nested_meta(|meta| {
if meta.path.is_ident("label") {
let value = meta.value()?;
let s: syn::LitStr = value.parse()?;
this.demo_attrs.input_cfg.label = s.value();
Ok(())
} else if meta.path.is_ident("default_value") {
let value = meta.value()?;
let str_val: String;
let ahead = value.lookahead1();
if ahead.peek(syn::LitFloat) {
let s: syn::LitFloat = value.parse()?;
str_val = s.base10_parse::<f64>()?.to_string();
} else if ahead.peek(syn::LitInt) {
let s: syn::LitInt = value.parse()?;
str_val = s.base10_parse::<i64>()?.to_string();
} else {
let s: syn::LitStr = value.parse()?;
str_val = s.value();
}
this.demo_attrs.input_cfg.default_value = str_val;
Ok(())
} else {
Err(meta.error(format!(
"Unsupported ident {:?}",
meta.path.get_ident()
)))
}
})
.expect("Could not read input(...)");
} else if path_ident == "custom_func" {
let v = &attr.meta.require_name_value().unwrap().value;
if let syn::Expr::Lit(s) = v {
if let syn::Lit::Str(string) = &s.lit {
this.demo_attrs.custom_func = Some(string.value());
} else {
errors.push(LoweringError::Other(format!(
"#[diplomat::demo(custom_func={s:?}) must be a literal string."
)));
}
} else {
errors.push(LoweringError::Other(format!(
"#[diplomat::demo(custom_func={v:?}) must be a literal string."
)));
}
} else {
errors.push(LoweringError::Other(format!(
"Unknown demo_attr: {path_ident:?}"
)));
}
} else {
errors.push(LoweringError::Other(format!("Unknown demo_attr: {path:?}")));
}
}
this
}
/// Validate that this attribute is allowed in this context
pub(crate) fn validate(
&self,
validator: &(impl AttributeValidator + ?Sized),
mut context: AttributeContext,
errors: &mut ErrorStore,
) {
// use an exhaustive destructure so new attributes are handled
let Attrs {
disable,
deprecated: _deprecated,
namespace,
rename,
abi_rename,
special_method,
custom_errors,
default,
demo_attrs: _,
generate_mocking_interface,
abi_compatible,
} = &self;
if *disable && matches!(context, AttributeContext::EnumVariant(..)) {
errors.push(LoweringError::Other(
"`disable` cannot be used on enum variants".into(),
))
}
if let Some(ref special) = special_method {
if let AttributeContext::Method(method, self_id, ref mut special_method_presence) =
context
{
let check_param_count = |name: &str, count: usize, errors: &mut ErrorStore| {
if method.params.len() != count {
errors.push(LoweringError::Other(format!(
"{name} must have exactly {count} parameter{}",
if count == 1 { "" } else { "s" }
)))
}
};
let check_self_param = |name: &str, need_self: bool, errors: &mut ErrorStore| {
if method.param_self.is_some() != need_self {
errors.push(LoweringError::Other(format!(
"{name} must{} accept a self parameter",
if need_self { "" } else { " not" }
)));
}
};
match special {
SpecialMethod::Constructor | SpecialMethod::NamedConstructor(..) => {
check_self_param("Constructors", false, errors);
let output = method.output.success_type();
match method.output {
ReturnType::Infallible(_) => (),
ReturnType::Fallible(..) => {
// Only report an error if constructors *are* supported but failable constructors *arent*
if validator.attrs_supported().constructors
&& !validator.attrs_supported().fallible_constructors
{
errors.push(LoweringError::Other(
"This backend doesn't support fallible constructors"
.to_string(),
))
}
}
ReturnType::Nullable(..) => {
errors.push(LoweringError::Other("Diplomat doesn't support turning nullable methods into constructors".to_string()));
}
}
if let SuccessType::OutType(t) = &output {
if t.id() != Some(self_id) {
errors.push(LoweringError::Other(
"Constructors must return Self!".to_string(),
));
}
} else {
errors.push(LoweringError::Other(
"Constructors must return Self!".to_string(),
));
}
}
SpecialMethod::Getter(_) => {
if !method.params.is_empty() {
errors
.push(LoweringError::Other("Getter cannot have parameters".into()));
}
if method.param_self.is_none()
&& !validator.attrs_supported().static_accessors
{
errors.push(LoweringError::Other(format!("No self parameter on Getter {} but static_acessors are not supported",method.name.as_str())));
}
// Currently does not forbid nullable getters, could if desired
}
SpecialMethod::Setter(_) => {
if !matches!(method.output.success_type(), SuccessType::Unit) {
errors.push(LoweringError::Other("Setters must return unit".into()));
}
if method.param_self.is_none()
&& !validator.attrs_supported().static_accessors
{
errors.push(LoweringError::Other(format!("No self parameter on Setter {} but static_acessors are not supported",method.name.as_str())));
}
check_param_count("Setter", 1, errors);
// Currently does not forbid fallible setters, could if desired
}
SpecialMethod::Stringifier => {
if !method.params.is_empty() {
errors
.push(LoweringError::Other("Getter cannot have parameters".into()));
}
if !matches!(method.output.success_type(), SuccessType::Write) {
errors.push(LoweringError::Other(
"Stringifier must return string".into(),
));
}
}
SpecialMethod::Comparison => {
check_param_count("Comparator", 1, errors);
if special_method_presence.comparator {
errors.push(LoweringError::Other(
"Cannot define two comparators on the same type".into(),
));
}
special_method_presence.comparator = true;
// In the long run we can actually support heterogeneous comparators. Not a priority right now.
const COMPARATOR_ERROR: &str =
"Comparator's parameter must be identical to self";
check_self_param("Comparators", true, errors);
if let Some(ref selfty) = method.param_self {
if let Some(param) = method.params.first() {
match (&selfty.ty, ¶m.ty) {
(SelfType::Opaque(p), Type::Opaque(p2)) => {
if p.tcx_id != p2.tcx_id {
errors.push(LoweringError::Other(
COMPARATOR_ERROR.into(),
));
}
if p.owner.mutability != Mutability::Immutable
|| p2.owner.mutability != Mutability::Immutable
{
errors.push(LoweringError::Other(
"comparators must accept immutable parameters"
.into(),
));
}
if p2.optional.0 {
errors.push(LoweringError::Other(
"comparators must accept non-optional parameters"
.into(),
));
}
}
(SelfType::Struct(p), Type::Struct(p2)) => {
if p.tcx_id != p2.tcx_id {
errors.push(LoweringError::Other(
COMPARATOR_ERROR.into(),
));
}
if p.owner
.as_borrowed()
.map(|o| !o.mutability.is_immutable())
.unwrap_or(false)
|| p2
.owner
.as_borrowed()
.map(|o| !o.mutability.is_immutable())
.unwrap_or(false)
{
errors.push(LoweringError::Other(
"comparators must accept immutable parameters"
.into(),
));
}
}
(SelfType::Enum(p), Type::Enum(p2)) => {
if p.tcx_id != p2.tcx_id {
errors.push(LoweringError::Other(
COMPARATOR_ERROR.into(),
));
}
}
_ => {
errors.push(LoweringError::Other(COMPARATOR_ERROR.into()));
}
}
}
}
}
SpecialMethod::Iterator => {
if special_method_presence.iterator.is_some() {
errors.push(LoweringError::Other(
"Cannot mark type as iterator twice".into(),
));
}
check_param_count("Iterator", 0, errors);
// In theory we could support struct and enum iterators. The benefit is slight:
// it generates probably inefficient code whilst being rather weird when it comes to the
// "structs and enums convert across the boundary" norm for backends.
//
// Essentially, the `&mut self` behavior won't work right.
//
// Furthermore, in some backends (like Dart) defining an iterator may requiring adding fields,
// which may not be possible for enums, and would still be an odd-one-out field for structs.g s
check_self_param("Iterator", true, errors);
if let Some(this) = &method.param_self {
if !matches!(this.ty, SelfType::Opaque(..)) {
errors.push(LoweringError::Other(
"Iterators only allowed on opaques".into(),
))
}
}
if let ReturnType::Nullable(ref o) = method.output {
if let SuccessType::Unit = o {
errors.push(LoweringError::Other(
"Iterator method must return something".into(),
));
}
special_method_presence.iterator = Some(o.clone());
} else if let ReturnType::Infallible(SuccessType::OutType(
crate::hir::OutType::Opaque(
ref o @ crate::hir::OpaquePath {
optional: crate::hir::Optional(true),
..
},
),
)) = method.output
{
let mut o = o.clone();
o.optional = crate::hir::Optional(false);
special_method_presence.iterator =
Some(SuccessType::OutType(crate::hir::OutType::Opaque(o)));
} else {
errors.push(LoweringError::Other(
"Iterator method must return nullable value".into(),
));
}
}
SpecialMethod::Iterable => {
if special_method_presence.iterable.is_some() {
errors.push(LoweringError::Other(
"Cannot mark type as iterable twice".into(),
));
}
check_param_count("Iterator", 0, errors);
check_self_param("Iterables", true, errors);
match method.output.success_type() {
SuccessType::OutType(ty) => {
if let Some(TypeId::Opaque(id)) = ty.id() {
special_method_presence.iterable = Some(id);
} else {
errors.push(LoweringError::Other(
"Iterables must return a custom opaque type".into(),
))
}
}
_ => errors.push(LoweringError::Other(
"Iterables must return a custom type".into(),
)),
}
}
SpecialMethod::Indexer => {
check_param_count("Indexer", 1, errors);
check_self_param("Indexer", true, errors);
if method.output.success_type().is_unit() {
errors.push(LoweringError::Other("Indexer must return a value".into()));
}
}
e @ (SpecialMethod::Add
| SpecialMethod::Sub
| SpecialMethod::Mul
| SpecialMethod::Div) => {
let name = match e {
SpecialMethod::Add => "Add",
SpecialMethod::Sub => "Sub",
SpecialMethod::Mul => "Mul",
SpecialMethod::Div => "Div",
_ => unreachable!(),
};
check_param_count(name, 1, errors);
check_self_param(name, true, errors);
if method.output.success_type().is_unit() {
errors
.push(LoweringError::Other(format!("{name} must return a value")));
}
}
e @ (SpecialMethod::AddAssign
| SpecialMethod::SubAssign
| SpecialMethod::MulAssign
| SpecialMethod::DivAssign) => {
let name = match e {
SpecialMethod::AddAssign => "AddAssign",
SpecialMethod::SubAssign => "SubAssign",
SpecialMethod::MulAssign => "MulAssign",
SpecialMethod::DivAssign => "DivAssign",
_ => unreachable!(),
};
check_param_count(name, 1, errors);
check_self_param(name, true, errors);
if let Some(self_param) = &method.param_self {
if matches!(self_param.ty, SelfType::Struct(_) | SelfType::Enum(_)) {
errors.push(LoweringError::Other("*Assign arithmetic operations not allowed on non-opaque types. \
Use the non-mutating arithmetic operators instead".to_string()));
} else if self_param.ty.is_immutably_borrowed() {
errors.push(LoweringError::Other(format!(
"{name} must take self by mutable reference"
)));
}
}
if !method.output.success_type().is_unit() {
errors.push(LoweringError::Other(format!(
"{name} must not return a value"
)));
}
}
}
} else {
errors.push(LoweringError::Other(format!("Special method (type {special:?}) not allowed on non-method context {context:?}")))
}
}
if namespace.is_some()
&& matches!(
context,
AttributeContext::Method(..) | AttributeContext::EnumVariant(..)
)
{
errors.push(LoweringError::Other(
"`namespace` can only be used on types".to_string(),
));
}
if *default && !matches!(context, AttributeContext::EnumVariant(..)) {
errors.push(LoweringError::Other(
"`default` can only be used on types and enum variants".to_string(),
));
}
if matches!(
context,
AttributeContext::Param | AttributeContext::SelfParam | AttributeContext::Field
) {
if *disable {
errors.push(LoweringError::Other(format!(
"`disable`s cannot be used on an {context:?}."
)));
}
if namespace.is_some() {
errors.push(LoweringError::Other(format!(
"`namespace` cannot be used on an {context:?}."
)));
}
if !rename.is_empty() || !abi_rename.is_empty() {
errors.push(LoweringError::Other(format!(
"`rename`s cannot be used on an {context:?}."
)));
}
if special_method.is_some() {
errors.push(LoweringError::Other(format!(
"{context:?} cannot be special methods."
)));
}
}
if *custom_errors
&& !matches!(
context,
AttributeContext::Type(..)
| AttributeContext::Trait(..)
| AttributeContext::Function(..)
)
{
errors.push(LoweringError::Other(
"`error` can only be used on types".to_string(),
));
}
if *generate_mocking_interface
&& !matches!(context, AttributeContext::Type(TypeDef::Opaque(..)))
{
errors.push(LoweringError::Other(
"`generate_mocking_interface` can only be used on opaque types".to_string(),
));
}
if *abi_compatible && !matches!(context, AttributeContext::Type(TypeDef::Struct(..))) {
errors.push(LoweringError::Other(
"`abi_compatible` can only be used on non-output-only struct types.".into(),
));
}
}
pub(crate) fn for_inheritance(&self, context: AttrInheritContext) -> Attrs {
let rename = self.rename.attrs_for_inheritance(context, false);
// Disabling shouldn't inherit to variants
let disable = if context == AttrInheritContext::Variant {
false
} else {
self.disable
};
let namespace = if matches!(
context,
AttrInheritContext::Module | AttrInheritContext::Type
) {
self.namespace.clone()
} else {
None
};
Attrs {
disable,
deprecated: None,
rename,
namespace,
// Should not inherit from enums to their variants
default: false,
// Was already inherited on the AST side
abi_rename: Default::default(),
// Never inherited
special_method: None,
// Not inherited
custom_errors: false,
demo_attrs: Default::default(),
// Not inherited
generate_mocking_interface: false,
abi_compatible: false,
}
}
}
/// Non-exhaustive list of what attributes and other features your backend is able to handle, based on #[diplomat::attr(...)] contents.
/// Set this through an [`AttributeValidator`].
///
/// See [`SpecialMethod`] and [`Attrs`] for your specific implementation needs.
///
/// For example, the current dart backend supports [`BackendAttrSupport::constructors`]. So when it encounters:
/// ```ignore
/// struct Sample {}
/// impl Sample {
/// #[diplomat::attr(constructor)]
/// pub fn new() -> Box<Self> {
/// Box::new(Sample{})
/// }
/// }
///
/// ```
///
/// It generates
/// ```dart
/// factory Sample()
/// ```
///
/// If a backend does not support a specific `#[diplomat::attr(...)]`, it may error.
#[non_exhaustive]
#[derive(Copy, Clone, Debug, Default)]
pub struct BackendAttrSupport {
/// Namespacing types, e.g. C++ `namespace`.
pub namespacing: bool,
/// Rust can directly acccess the memory of this language, like C and C++.
/// This is not supported in any garbage-collected language.
pub memory_sharing: bool,
/// This language's structs are non-exhaustive by default, i.e. adding
/// fields is not a breaking change.
pub non_exhaustive_structs: bool,
/// Whether the language supports method overloading
pub method_overloading: bool,
/// Whether the language uses UTF-8 strings
pub utf8_strings: bool,
/// Whether the language uses UTF-16 strings
pub utf16_strings: bool,
/// Whether the language supports using slices with 'static lifetimes.
pub static_slices: bool,
/// Whether the language supports marking types as having a default value
pub defaults: bool,
// Special methods
/// Marking a method as a constructor to generate special constructor methods.
pub constructors: bool,
/// Marking a method as a named constructor to generate special named constructor methods.
pub named_constructors: bool,
/// Marking constructors as being able to return errors. This is possible in languages where
/// errors are thrown as exceptions (Dart), but not for example in C++, where errors are
/// returned as values (constructors usually have to return the type itself).
pub fallible_constructors: bool,
/// Marking methods as field getters and setters, see [`SpecialMethod::Getter`] and [`SpecialMethod::Setter`]
pub accessors: bool,
/// Marking *static* methods as field getters and setters, see [`SpecialMethod::Getter`] and [`SpecialMethod::Setter`]
pub static_accessors: bool,
/// Marking a method as the `to_string` method, which is special in this language.
pub stringifiers: bool,
/// Marking a method as the `compare_to` method, which is special in this language.
pub comparators: bool,
/// Marking a method as the `next` method, which is special in this language.
pub iterators: bool,
/// Marking a method as the `iterator` method, which is special in this language.
pub iterables: bool,
/// Marking a method as the `[]` operator, which is special in this language.
pub indexing: bool,
/// Marking a method as an arithmetic operator (+-*/[=])
pub arithmetic: bool,
/// Support for Option<Struct> and Option<Primitive>
pub option: bool,
/// Allowing callback arguments
pub callbacks: bool,
/// Allowing traits
pub traits: bool,
/// Marking a user-defined type as being a valid error result type.
pub custom_errors: bool,
/// Traits are safe to Send between threads (safe to mark as std::marker::Send)
pub traits_are_send: bool,
/// Traits are safe to Sync between threads (safe to mark as std::marker::Sync)
pub traits_are_sync: bool,
/// Whether to generate mocking interface.
pub generate_mocking_interface: bool,
/// Passing of structs that only hold (non-slice) primitive types
/// (for use in slices and languages that support taking direct pointers to structs):
pub abi_compatibles: bool,
/// Whether or not the language supports &Struct or &mut Struct
pub struct_refs: bool,
/// Whether the language supports generating functions not associated with any type.
pub free_functions: bool,
}
impl BackendAttrSupport {
#[cfg(test)]
fn all_true() -> Self {
Self {
namespacing: true,
memory_sharing: true,
non_exhaustive_structs: true,
method_overloading: true,
utf8_strings: true,
utf16_strings: true,
static_slices: true,
defaults: true,
constructors: true,
named_constructors: true,
fallible_constructors: true,
static_accessors: true,
accessors: true,
stringifiers: true,
comparators: true,
iterators: true,
iterables: true,
indexing: true,
arithmetic: true,
option: true,
callbacks: true,
traits: true,
custom_errors: true,
traits_are_send: true,
traits_are_sync: true,
generate_mocking_interface: true,
abi_compatibles: true,
struct_refs: true,
free_functions: true,
}
}
fn check_string(&self, v: &str) -> Option<bool> {
match v {
"namespacing" => Some(self.namespacing),
"memory_sharing" => Some(self.memory_sharing),
"non_exhaustive_structs" => Some(self.non_exhaustive_structs),
"method_overloading" => Some(self.method_overloading),
"utf8_strings" => Some(self.utf8_strings),
"utf16_strings" => Some(self.utf16_strings),
"static_slices" => Some(self.static_slices),
"default" => Some(self.defaults),
"constructors" => Some(self.constructors),
"named_constructors" => Some(self.named_constructors),
"fallible_constructors" => Some(self.fallible_constructors),
"accessors" => Some(self.accessors),
"stringifiers" => Some(self.stringifiers),
"comparators" => Some(self.comparators),
"iterators" => Some(self.iterators),
"iterables" => Some(self.iterables),
"indexing" => Some(self.indexing),
"arithmetic" => Some(self.arithmetic),
"option" => Some(self.option),
"callbacks" => Some(self.callbacks),
"traits" => Some(self.traits),
"custom_errors" => Some(self.custom_errors),
"traits_are_send" => Some(self.traits_are_send),
"traits_are_sync" => Some(self.traits_are_sync),
"abi_compatibles" => Some(self.abi_compatibles),
"struct_refs" => Some(self.struct_refs),
"free_functions" => Some(self.free_functions),
_ => None,
}
}
}
/// Defined by backends when validating attributes
pub trait AttributeValidator {
/// The primary name of the backend, for use in diagnostics
fn primary_name(&self) -> &str;
/// Does this backend satisfy `cfg(backend_name)`?
/// (Backends are allowed to satisfy multiple backend names, useful when there
/// are multiple backends for a language)
fn is_backend(&self, backend_name: &str) -> bool;
/// does this backend satisfy cfg(name = value)?
fn is_name_value(&self, name: &str, value: &str) -> Result<bool, LoweringError>;
/// What backedn attrs does this support?
fn attrs_supported(&self) -> BackendAttrSupport;
/// Provided, checks if type satisfies a `DiplomatBackendAttrCfg`
///
/// auto_found helps check for `auto`, which is only allowed within `any` and at the top level. When `None`,
/// `auto` is not allowed.
fn satisfies_cfg(
&self,
cfg: &DiplomatBackendAttrCfg,
mut auto_found: Option<&mut bool>,
) -> Result<bool, LoweringError> {
Ok(match *cfg {
DiplomatBackendAttrCfg::Not(ref c) => !self.satisfies_cfg(c, None)?,
DiplomatBackendAttrCfg::Any(ref cs) => {
#[allow(clippy::needless_option_as_deref)]
// False positive: we need this for reborrowing
for c in cs {
if self.satisfies_cfg(c, auto_found.as_deref_mut())? {
return Ok(true);
}
}
false
}
DiplomatBackendAttrCfg::All(ref cs) => {
for c in cs {
if !self.satisfies_cfg(c, None)? {
return Ok(false);
}
}
true
}
DiplomatBackendAttrCfg::Auto => {
if let Some(found) = auto_found {
*found = true;
return Ok(true);
} else {
return Err(LoweringError::Other("auto in diplomat::attr() is only allowed at the top level and within `any`".into()));
}
}
DiplomatBackendAttrCfg::Star => true,
DiplomatBackendAttrCfg::BackendName(ref n) => self.is_backend(n),
DiplomatBackendAttrCfg::NameValue(ref n, ref v) => self.is_name_value(n, v)?,
})
}
// Provided, constructs an attribute
fn attr_from_ast(
&self,
ast: &ast::Attrs,
parent_attrs: &Attrs,
errors: &mut ErrorStore,
) -> Attrs {
Attrs::from_ast(ast, self, parent_attrs, errors)
}
// Provided: validates an attribute in the context in which it was constructed
fn validate(&self, attrs: &Attrs, context: AttributeContext, errors: &mut ErrorStore) {
attrs.validate(self, context, errors)
}
}
/// A basic attribute validator
#[non_exhaustive]
#[derive(Default)]
pub struct BasicAttributeValidator {
/// The primary name of this backend (should be unique, ideally)
pub backend_name: String,
/// The attributes supported
pub support: BackendAttrSupport,
/// Additional names for this backend
pub other_backend_names: Vec<String>,
/// override is_name_value()
#[allow(clippy::type_complexity)] // dyn fn is not that complex
pub is_name_value: Option<Box<dyn Fn(&str, &str) -> bool>>,
}
impl BasicAttributeValidator {
pub fn new(backend_name: &str) -> Self {
BasicAttributeValidator {
backend_name: backend_name.into(),
..Self::default()
}
}
}
impl AttributeValidator for BasicAttributeValidator {
fn primary_name(&self) -> &str {
&self.backend_name
}
fn is_backend(&self, backend_name: &str) -> bool {
self.backend_name == backend_name
|| self.other_backend_names.iter().any(|n| n == backend_name)
}
fn is_name_value(&self, name: &str, value: &str) -> Result<bool, LoweringError> {
Ok(if name == "supports" {
// destructure so new fields are forced to be added
let BackendAttrSupport {
namespacing,
memory_sharing,
non_exhaustive_structs,
method_overloading,
utf8_strings,
utf16_strings,
static_slices,
defaults,
constructors,
named_constructors,
fallible_constructors,
accessors,
static_accessors,
stringifiers,
comparators,
iterators,
iterables,
indexing,
arithmetic,
option,
callbacks,
traits,
custom_errors,
traits_are_send,
traits_are_sync,
generate_mocking_interface,
abi_compatibles,
struct_refs,
free_functions,
} = self.support;
match value {
"namespacing" => namespacing,
"memory_sharing" => memory_sharing,
"non_exhaustive_structs" => non_exhaustive_structs,
"method_overloading" => method_overloading,
"utf8_strings" => utf8_strings,
"utf16_strings" => utf16_strings,
"static_slices" => static_slices,
"defaults" => defaults,
"constructors" => constructors,
"named_constructors" => named_constructors,
"fallible_constructors" => fallible_constructors,
"accessors" => accessors,
"static_accessors" => static_accessors,
"stringifiers" => stringifiers,
"comparators" => comparators,
"iterators" => iterators,
"iterables" => iterables,
"indexing" => indexing,
"arithmetic" => arithmetic,
"option" => option,
"callbacks" => callbacks,
"traits" => traits,
"custom_errors" => custom_errors,
"traits_are_send" => traits_are_send,
"traits_are_sync" => traits_are_sync,
"generate_mocking_interface" => generate_mocking_interface,
"abi_compatibles" => abi_compatibles,
"struct_refs" => struct_refs,
"free_functions" => free_functions,
_ => {
return Err(LoweringError::Other(format!(
"Unknown supports = value found: {value}"
)))
}
}
} else if let Some(ref nv) = self.is_name_value {
nv(name, value)
} else {
false
})
}
fn attrs_supported(&self) -> BackendAttrSupport {
self.support
}
}
#[cfg(test)]
mod tests {
use crate::hir;
use std::fmt::Write;
macro_rules! uitest_lowering_attr {
($attrs:expr, $($file:tt)*) => {
let parsed: syn::File = syn::parse_quote! { $($file)* };
let mut output = String::new();
let mut attr_validator = hir::BasicAttributeValidator::new("tests");
attr_validator.support = $attrs;
match hir::TypeContext::from_syn(&parsed, Default::default(), attr_validator) {
Ok(_context) => (),
Err(e) => {
for (ctx, err) in e {
writeln!(&mut output, "Lowering error in {ctx}: {err}").unwrap();
}
}
};
insta::with_settings!({}, {
insta::assert_snapshot!(output)
});
}
}
#[test]
fn test_auto() {
uitest_lowering_attr! { hir::BackendAttrSupport { comparators: true, ..Default::default()},
#[diplomat::bridge]
mod ffi {
use std::cmp;
#[diplomat::opaque]
#[diplomat::attr(auto, namespace = "should_not_show_up")]
struct Opaque;
impl Opaque {
#[diplomat::attr(auto, comparison)]
pub fn comparator_static(&self, other: &Opaque) -> cmp::Ordering {
todo!()
}
#[diplomat::attr(*, iterator)]
pub fn next(&mut self) -> Option<u8> {
self.0.next()
}
#[diplomat::attr(auto, rename = "bar")]
pub fn auto_doesnt_work_on_renames(&self) {
}
#[diplomat::attr(auto, disable)]
pub fn auto_doesnt_work_on_disables(&self) {
}
}
}
}
}
#[test]
fn test_comparator() {
uitest_lowering_attr! { hir::BackendAttrSupport::all_true(),
#[diplomat::bridge]
mod ffi {
use std::cmp;
#[diplomat::opaque]
struct Opaque;
struct Struct {
field: u8
}
impl Opaque {
#[diplomat::attr(auto, comparison)]
pub fn comparator_static(other: &Opaque) -> cmp::Ordering {
todo!()
}
#[diplomat::attr(auto, comparison)]
pub fn comparator_none(&self) -> cmp::Ordering {
todo!()
}
#[diplomat::attr(auto, comparison)]
pub fn comparator_othertype(other: Struct) -> cmp::Ordering {
todo!()
}
#[diplomat::attr(auto, comparison)]
pub fn comparator_badreturn(&self, other: &Opaque) -> u8 {
todo!()
}
#[diplomat::attr(auto, comparison)]
pub fn comparison_correct(&self, other: &Opaque) -> cmp::Ordering {
todo!()
}
pub fn comparison_unmarked(&self, other: &Opaque) -> cmp::Ordering {
todo!()
}
pub fn ordering_wrong(&self, other: cmp::Ordering) {
todo!()
}
#[diplomat::attr(auto, comparison)]
pub fn comparison_mut(&self, other: &mut Opaque) -> cmp::Ordering {
todo!()
}
#[diplomat::attr(auto, comparison)]
pub fn comparison_opt(&self, other: Option<&Opaque>) -> cmp::Ordering {
todo!()
}
}
impl Struct {
#[diplomat::attr(auto, comparison)]
pub fn comparison_other(self, other: &Opaque) -> cmp::Ordering {
todo!()
}
#[diplomat::attr(auto, comparison)]
pub fn comparison_correct(self, other: Self) -> cmp::Ordering {
todo!()
}
#[diplomat::attr(auto, comparison)]
pub fn comparison_ref(&self, other: &Self) -> cmp::Ordering {
todo!()
}
#[diplomat::attr(auto, comparison)]
pub fn comparison_mut(&mut self, other: &Self) -> cmp::Ordering {
todo!()
}
}
}
}
}
#[test]
fn test_iterator() {
uitest_lowering_attr! { hir::BackendAttrSupport::all_true(),
#[diplomat::bridge]
mod ffi {
#[diplomat::opaque]
struct Opaque(Vec<u8>);
#[diplomat::opaque]
struct OpaqueIterator<'a>(std::slice::Iter<'a>);
impl Opaque {
#[diplomat::attr(auto, iterable)]
pub fn iterable<'a>(&'a self) -> Box<OpaqueIterator<'a>> {
Box::new(OpaqueIterator(self.0.iter()))
}
}
impl OpaqueIterator {
#[diplomat::attr(auto, iterator)]
pub fn next(&mut self) -> Option<u8> {
self.0.next()
}
}
#[diplomat::opaque]
struct Broken;
impl Broken {
#[diplomat::attr(auto, iterable)]
pub fn iterable_no_return(&self) {}
#[diplomat::attr(auto, iterable)]
pub fn iterable_no_self() -> Box<BrokenIterator> { todo!() }
#[diplomat::attr(auto, iterable)]
pub fn iterable_non_custom(&self) -> u8 { todo!() }
}
#[diplomat::opaque]
struct BrokenIterator;
impl BrokenIterator {
#[diplomat::attr(auto, iterator)]
pub fn iterator_no_return(&self) {}
#[diplomat::attr(auto, iterator)]
pub fn iterator_no_self() -> Option<u8> { todo!() }
#[diplomat::attr(auto, iterator)]
pub fn iterator_no_option(&self) -> u8 { todo!() }
}
}
}
}
#[test]
fn test_unsupported_features() {
uitest_lowering_attr! { hir::BackendAttrSupport::default(),
#[diplomat::bridge]
mod ffi {
use std::cmp;
use diplomat_runtime::DiplomatOption;
#[diplomat::opaque]
struct Opaque;
struct Struct {
pub a: u8,
pub b: u8,
pub c: DiplomatOption<u8>,
}
struct Struct2 {
pub a: DiplomatOption<Struct>,
}
#[diplomat::out]
struct OutStruct {
pub option: DiplomatOption<u8>
}
impl Opaque {
pub fn take_option(&self, option: DiplomatOption<u8>) {
todo!()
}
// Always ok since this translates to a Resulty return
pub fn returning_option_is_ok(&self) -> Option<u8> {
todo!()
}
}
}
}
}
#[test]
fn test_mocking_interface_for_opaque_type() {
uitest_lowering_attr! { hir::BackendAttrSupport::all_true(),
#[diplomat::bridge]
mod ffi {
#[diplomat::opaque]
#[diplomat::attr(tests, generate_mocking_interface)]
pub struct Foo {
pub x: u32,
pub y: u32,
}
impl Foo {
pub fn new() -> Box<Self> {
Box::new(Self { x: 0, y: 0 })
}
pub fn get_x(&self) -> u32 {
self.x
}
pub fn get_y(&self) -> u32 {
self.y
}
}
}
}
}
#[test]
fn test_mocking_interface_for_non_opaque_type() {
uitest_lowering_attr! { hir::BackendAttrSupport::all_true(),
#[diplomat::bridge]
mod ffi {
#[diplomat::attr(tests, generate_mocking_interface)]
pub struct Foo {
pub x: u32,
pub y: u32,
}
impl Foo {
pub fn new() -> Self {
Self { x: 0, y: 0 }
}
pub fn get_x(self) -> u32 {
self.x
}
pub fn get_y(self) -> u32 {
self.y
}
}
}
}
}
#[test]
fn test_mocking_interface_for_unsupported_backend() {
uitest_lowering_attr! { hir::BackendAttrSupport::default(),
#[diplomat::bridge]
mod ffi {
#[diplomat::attr(tests, generate_mocking_interface)]
pub struct Foo {
pub x: u32,
pub y: u32,
}
impl Foo {
pub fn new() -> Self {
Self { x: 0, y: 0 }
}
pub fn get_x(self) -> u32 {
self.x
}
pub fn get_y(self) -> u32 {
self.y
}
}
}
}
}
#[test]
fn test_primitive_struct_slices() {
uitest_lowering_attr! { hir::BackendAttrSupport::all_true(),
#[diplomat::bridge]
mod ffi {
#[diplomat::attr(auto, abi_compatible)]
pub struct Foo {
pub x: u32,
pub y: u32
}
impl Foo {
pub fn takes_slice(sl : &[Foo]) {
todo!()
}
}
}
}
}
#[test]
fn test_primitive_struct_slices_for_unsupported_backend() {
uitest_lowering_attr! { hir::BackendAttrSupport::default(),
#[diplomat::bridge]
mod ffi {
#[diplomat::attr(auto, abi_compatible)]
pub struct Foo {
pub x: u32,
pub y: u32
}
impl Foo {
pub fn takes_slice(sl : &[Foo]) {
todo!()
}
}
}
}
}
#[test]
fn test_struct_ref_for_unsupported_backend() {
uitest_lowering_attr! { hir::BackendAttrSupport::default(),
#[diplomat::bridge]
mod ffi {
#[diplomat::attr(auto, abi_compatible)]
pub struct Foo {
pub x: u32,
pub y: u32
}
impl Foo {
pub fn takes_mut(&mut self) {
todo!()
}
}
}
}
}
}
|
rust
|
github
|
https://github.com/nodejs/node
|
deps/crates/vendor/diplomat_core/src/hir/attrs.rs
|
export class CancelError extends Error {
constructor(message: string) {
super(message);
this.name = 'CancelError';
}
public get isCancelled(): boolean {
return true;
}
}
export interface OnCancel {
readonly isResolved: boolean;
readonly isRejected: boolean;
readonly isCancelled: boolean;
(cancelHandler: () => void): void;
}
export class CancelablePromise<T> implements Promise<T> {
private _isResolved: boolean;
private _isRejected: boolean;
private _isCancelled: boolean;
readonly cancelHandlers: (() => void)[];
readonly promise: Promise<T>;
private _resolve?: (value: T | PromiseLike<T>) => void;
private _reject?: (reason?: unknown) => void;
constructor(
executor: (
resolve: (value: T | PromiseLike<T>) => void,
reject: (reason?: unknown) => void,
onCancel: OnCancel
) => void
) {
this._isResolved = false;
this._isRejected = false;
this._isCancelled = false;
this.cancelHandlers = [];
this.promise = new Promise<T>((resolve, reject) => {
this._resolve = resolve;
this._reject = reject;
const onResolve = (value: T | PromiseLike<T>): void => {
if (this._isResolved || this._isRejected || this._isCancelled) {
return;
}
this._isResolved = true;
if (this._resolve) this._resolve(value);
};
const onReject = (reason?: unknown): void => {
if (this._isResolved || this._isRejected || this._isCancelled) {
return;
}
this._isRejected = true;
if (this._reject) this._reject(reason);
};
const onCancel = (cancelHandler: () => void): void => {
if (this._isResolved || this._isRejected || this._isCancelled) {
return;
}
this.cancelHandlers.push(cancelHandler);
};
Object.defineProperty(onCancel, 'isResolved', {
get: (): boolean => this._isResolved,
});
Object.defineProperty(onCancel, 'isRejected', {
get: (): boolean => this._isRejected,
});
Object.defineProperty(onCancel, 'isCancelled', {
get: (): boolean => this._isCancelled,
});
return executor(onResolve, onReject, onCancel as OnCancel);
});
}
get [Symbol.toStringTag]() {
return "Cancellable Promise";
}
public then<TResult1 = T, TResult2 = never>(
onFulfilled?: ((value: T) => TResult1 | PromiseLike<TResult1>) | null,
onRejected?: ((reason: unknown) => TResult2 | PromiseLike<TResult2>) | null
): Promise<TResult1 | TResult2> {
return this.promise.then(onFulfilled, onRejected);
}
public catch<TResult = never>(
onRejected?: ((reason: unknown) => TResult | PromiseLike<TResult>) | null
): Promise<T | TResult> {
return this.promise.catch(onRejected);
}
public finally(onFinally?: (() => void) | null): Promise<T> {
return this.promise.finally(onFinally);
}
public cancel(): void {
if (this._isResolved || this._isRejected || this._isCancelled) {
return;
}
this._isCancelled = true;
if (this.cancelHandlers.length) {
try {
for (const cancelHandler of this.cancelHandlers) {
cancelHandler();
}
} catch (error) {
console.warn('Cancellation threw an error', error);
return;
}
}
this.cancelHandlers.length = 0;
if (this._reject) this._reject(new CancelError('Request aborted'));
}
public get isCancelled(): boolean {
return this._isCancelled;
}
}
|
typescript
|
github
|
https://github.com/apache/airflow
|
airflow-core/src/airflow/ui/openapi-gen/requests/core/CancelablePromise.ts
|
# Gnome15 - Suite of tools for the Logitech G series keyboards and headsets
# Copyright (C) 2010 Brett Smith <tanktarta@blueyonder.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import threading
class Runnable(object):
'''Helper object to create thread content objects doing periodic tasks, or
tasks supporting premature termination.
Override execute() in inherited class. This will be called until the
thread is stopped. A Runnable can be started multiple times opposed to
threading.Thread.
To write a non-periodic task that should support premature termination,
simply override run() and call is_about_to_stop() at possible termination
points.
'''
def __init__(self):
self.__keepRunning = True
self.__mutex = threading.Lock()
def execute(self):
'''This method must be implemented and will be executed in an infinite
loop as long as stop() was not called.
An implementation is free to check is_about_to_stop() at any time to
allow a clean termination of current processing before reaching the end
of execute().
'''
pass
def is_about_to_stop(self):
'''Returns whether this thread will terminate after completing the
current execution cycle.
@return True if thread will terminate after current execution cycle.
'''
self.__mutex.acquire()
val = self.__keepRunning
self.__mutex.release()
return not val
def run(self):
'''Implements the infinite loop. Do not override, but override
execute() instead.
'''
while not self.is_about_to_stop():
self.execute()
def start(self):
'''Starts the thread. If stop() was called, but start() was not, run()
will do nothing.
'''
self.__mutex.acquire()
self.__keepRunning = True
self.__mutex.release()
def stop(self):
'''Flags this thread to be terminated after next completed execution
cycle. Calling this method will NOT stop the thread instantaniously,
but will complete the current operation and terminate in a clean way.
'''
self.__mutex.acquire()
self.__keepRunning = False
self.__mutex.release()
|
unknown
|
codeparrot/codeparrot-clean
| ||
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import random
import unittest
from pyspark import SparkContext, SparkConf
class ConfTests(unittest.TestCase):
def test_memory_conf(self):
memoryList = ["1T", "1G", "1M", "1024K"]
for memory in memoryList:
sc = SparkContext(conf=SparkConf().set("spark.python.worker.memory", memory))
l = list(range(1024))
random.shuffle(l)
rdd = sc.parallelize(l, 4)
self.assertEqual(sorted(l), rdd.sortBy(lambda x: x).collect())
sc.stop()
if __name__ == "__main__":
from pyspark.tests.test_conf import * # noqa: F401
try:
import xmlrunner # type: ignore[import]
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports', verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
|
unknown
|
codeparrot/codeparrot-clean
| ||
<!--Copyright 2020 The HuggingFace Team. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
⚠️ Note that this file is in Markdown but contain specific syntax for our doc-builder (similar to MDX) that may not be
rendered properly in your Markdown viewer.
-->
<div style="float: right;">
<div class="flex flex-wrap space-x-1">
<img alt="PyTorch" src="https://img.shields.io/badge/PyTorch-DE3412?style=flat&logo=pytorch&logoColor=white">
<img alt="TensorFlow" src="https://img.shields.io/badge/TensorFlow-FF6F00?style=flat&logo=tensorflow&logoColor=white">
<img alt="Flax" src="https://img.shields.io/badge/Flax-29a79b.svg?style=flat&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAC0AAAAtCAMAAAANxBKoAAAC7lBMVEUAAADg5vYHPVgAoJH+/v76+v39/f9JbLP///9+AIgAnY3///+mcqzt8fXy9fgkXa3Ax9709fr+///9/f8qXq49qp5AaLGMwrv8/P0eW60VWawxYq8yqJzG2dytt9Wyu9elzci519Lf3O3S2efY3OrY0+Xp7PT///////+dqNCexMc6Z7AGpJeGvbenstPZ5ejQ1OfJzOLa7ejh4+/r8fT29vpccbklWK8PVa0AS6ghW63O498vYa+lsdKz1NDRt9Kw1c672tbD3tnAxt7R6OHp5vDe7OrDyuDn6vLl6/EAQKak0MgATakkppo3ZK/Bz9y8w9yzu9jey97axdvHzeG21NHH4trTwthKZrVGZLSUSpuPQJiGAI+GAI8SWKydycLL4d7f2OTi1+S9xNzL0ePT6OLGzeEAo5U0qJw/aLEAo5JFa7JBabEAp5Y4qZ2QxLyKmsm3kL2xoMOehrRNb7RIbbOZgrGre68AUqwAqZqNN5aKJ5N/lMq+qsd8kMa4pcWzh7muhLMEV69juq2kbKqgUaOTR5uMMZWLLZSGAI5VAIdEAH+ovNDHuNCnxcy3qcaYx8K8msGplrx+wLahjbYdXrV6vbMvYK9DrZ8QrZ8tqJuFms+Sos6sw8ecy8RffsNVeMCvmb43aLltv7Q4Y7EZWK4QWa1gt6meZKUdr6GOAZVeA4xPAISyveLUwtivxtKTpNJ2jcqfvcltiMiwwcfAoMVxhL+Kx7xjdrqTe60tsaNQs6KaRKACrJ6UTZwkqpqTL5pkHY4AloSgsd2ptNXPvNOOncuxxsqFl8lmg8apt8FJcr9EbryGxLqlkrkrY7dRa7ZGZLQ5t6iXUZ6PPpgVpZeJCJFKAIGareTa0+KJod3H0deY2M+esM25usmYu8d2zsJOdcBVvrCLbqcAOaaHaKQAMaScWqKBXqCXMJ2RHpiLF5NmJZAdAHN2kta11dKu1M+DkcZLdb+Mcql3TppyRJdzQ5ZtNZNlIY+DF4+voCOQAAAAZ3RSTlMABAT+MEEJ/RH+/TP+Zlv+pUo6Ifz8+fco/fz6+evr39S9nJmOilQaF/7+/f38+smmoYp6b1T+/v7++vj189zU0tDJxsGzsrKSfv34+Pf27dDOysG9t6+n/vv6+vr59uzr1tG+tZ6Qg9Ym3QAABR5JREFUSMeNlVVUG1EQhpcuxEspXqS0SKEtxQp1d3d332STTRpIQhIISQgJhODu7lAoDoUCpe7u7u7+1puGpqnCPOyZvffbOXPm/PsP9JfQgyCC+tmTABTOcbxDz/heENS7/1F+9nhvkHePG0wNDLbGWwdXL+rbLWvpmZHXD8+gMfBjTh+aSe6Gnn7lwQIOTR0c8wfX3PWgv7avbdKwf/ZoBp1Gp/PvuvXW3vw5ib7emnTW4OR+3D4jB9vjNJ/7gNvfWWeH/TO/JyYrsiKCRjVEZA3UB+96kON+DxOQ/NLE8PE5iUYgIXjFnCOlxEQMaSGVxjg4gxOnEycGz8bptuNjVx08LscIgrzH3umcn+KKtiBIyvzOO2O99aAdR8cF19oZalnCtvREUw79tCd5sow1g1UKM6kXqUx4T8wsi3sTjJ3yzDmmhenLXLpo8u45eG5y4Vvbk6kkC4LLtJMowkSQxmk4ggVJEG+7c6QpHT8vvW9X7/o7+3ELmiJi2mEzZJiz8cT6TBlanBk70cB5GGIGC1gRDdZ00yADLW1FL6gqhtvNXNG5S9gdSrk4M1qu7JAsmYshzDS4peoMrU/gT7qQdqYGZaYhxZmVbGJAm/CS/HloWyhRUlknQ9KYcExTwS80d3VNOxUZJpITYyspl0LbhArhpZCD9cRWEQuhYkNGMHToQ/2Cs6swJlb39CsllxdXX6IUKh/H5jbnSsPKjgmoaFQ1f8wRLR0UnGE/RcDEjj2jXG1WVTwUs8+zxfcrVO+vSsuOpVKxCfYZiQ0/aPKuxQbQ8lIz+DClxC8u+snlcJ7Yr1z1JPqUH0V+GDXbOwAib931Y4Imaq0NTIXPXY+N5L18GJ37SVWu+hwXff8l72Ds9XuwYIBaXPq6Shm4l+Vl/5QiOlV+uTk6YR9PxKsI9xNJny31ygK1e+nIRC1N97EGkFPI+jCpiHe5PCEy7oWqWSwRrpOvhFzcbTWMbm3ZJAOn1rUKpYIt/lDhW/5RHHteeWFN60qo98YJuoq1nK3uW5AabyspC1BcIEpOhft+SZAShYoLSvnmSfnYADUERP5jJn2h5XtsgCRuhYQqAvwTwn33+YWEKUI72HX5AtfSAZDe8F2DtPPm77afhl0EkthzuCQU0BWApgQIH9+KB0JhopMM7bJrdTRoleM2JAVNMyPF+wdoaz+XJpGoVAQ7WXUkcV7gT3oUZyi/ISIJAVKhgNp+4b4veCFhYVJw4locdSjZCp9cPUhLF9EZ3KKzURepMEtCDPP3VcWFx4UIiZIklIpFNfHpdEafIF2aRmOcrUmjohbT2WUllbmRvgfbythbQO3222fpDJoufaQPncYYuqoGtUEsCJZL6/3PR5b4syeSjZMQG/T2maGANlXT2v8S4AULWaUkCxfLyW8iW4kdka+nEMjxpL2NCwsYNBp+Q61PF43zyDg9Bm9+3NNySn78jMZUUkumqE4Gp7JmFOdP1vc8PpRrzj9+wPinCy8K1PiJ4aYbnTYpCCbDkBSbzhu2QJ1Gd82t8jI8TH51+OzvXoWbnXUOBkNW+0mWFwGcGOUVpU81/n3TOHb5oMt2FgYGjzau0Nif0Ss7Q3XB33hjjQHjHA5E5aOyIQc8CBrLdQSs3j92VG+3nNEjbkbdbBr9zm04ruvw37vh0QKOdeGIkckc80fX3KH/h7PT4BOjgCty8VZ5ux1MoO5Cf5naca2LAsEgehI+drX8o/0Nu+W0m6K/I9gGPd/dfx/EN/wN62AhsBWuAAAAAElFTkSuQmCC
">
<img alt="SDPA" src="https://img.shields.io/badge/SDPA-DE3412?style=flat&logo=pytorch&logoColor=white">
</div>
</div>
# BERT
[BERT](https://huggingface.co/papers/1810.04805) 是一个在无标签的文本数据上预训练的双向 transformer,用于预测句子中被掩码的(masked) token,以及预测一个句子是否跟随在另一个句子之后。其主要思想是,在预训练过程中,通过随机掩码一些 token,让模型利用左右上下文的信息预测它们,从而获得更全面深入的理解。此外,BERT 具有很强的通用性,其学习到的语言表示可以通过额外的层或头进行微调,从而适配其他下游 NLP 任务。
你可以在 [BERT](https://huggingface.co/collections/google/bert-release-64ff5e7a4be99045d1896dbc) 集合下找到 BERT 的所有原始 checkpoint。
> [!TIP]
> 点击右侧边栏中的 BERT 模型,以查看将 BERT 应用于不同语言任务的更多示例。
下面的示例演示了如何使用 [`Pipeline`], [`AutoModel`] 和命令行预测 `[MASK]` token。
<hfoptions id="usage">
<hfoption id="Pipeline">
```py
import torch
from transformers import pipeline
pipeline = pipeline(
task="fill-mask",
model="google-bert/bert-base-uncased",
dtype=torch.float16,
device=0
)
pipeline("Plants create [MASK] through a process known as photosynthesis.")
```
</hfoption>
<hfoption id="AutoModel">
```py
import torch
from transformers import AutoModelForMaskedLM, AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained(
"google-bert/bert-base-uncased",
)
model = AutoModelForMaskedLM.from_pretrained(
"google-bert/bert-base-uncased",
dtype=torch.float16,
device_map="auto",
attn_implementation="sdpa"
)
inputs = tokenizer("Plants create [MASK] through a process known as photosynthesis.", return_tensors="pt").to("cuda")
with torch.no_grad():
outputs = model(**inputs)
predictions = outputs.logits
masked_index = torch.where(inputs['input_ids'] == tokenizer.mask_token_id)[1]
predicted_token_id = predictions[0, masked_index].argmax(dim=-1)
predicted_token = tokenizer.decode(predicted_token_id)
print(f"The predicted token is: {predicted_token}")
```
</hfoption>
<hfoption id="transformers">
```bash
echo -e "Plants create [MASK] through a process known as photosynthesis." | transformers run --task fill-mask --model google-bert/bert-base-uncased --device 0
```
</hfoption>
</hfoptions>
## 注意
- 输入内容应在右侧进行填充,因为 BERT 使用绝对位置嵌入。
## BertConfig
[[autodoc]] BertConfig
- all
## BertTokenizer
[[autodoc]] BertTokenizer
- get_special_tokens_mask
- save_vocabulary
## BertTokenizerLegacy
[[autodoc]] BertTokenizerLegacy
## BertTokenizerFast
[[autodoc]] BertTokenizerFast
## BertModel
[[autodoc]] BertModel
- forward
## BertForPreTraining
[[autodoc]] BertForPreTraining
- forward
## BertLMHeadModel
[[autodoc]] BertLMHeadModel
- forward
## BertForMaskedLM
[[autodoc]] BertForMaskedLM
- forward
## BertForNextSentencePrediction
[[autodoc]] BertForNextSentencePrediction
- forward
## BertForSequenceClassification
[[autodoc]] BertForSequenceClassification
- forward
## BertForMultipleChoice
[[autodoc]] BertForMultipleChoice
- forward
## BertForTokenClassification
[[autodoc]] BertForTokenClassification
- forward
## BertForQuestionAnswering
[[autodoc]] BertForQuestionAnswering
- forward
## Bert specific outputs
[[autodoc]] models.bert.modeling_bert.BertForPreTrainingOutput
|
unknown
|
github
|
https://github.com/huggingface/transformers
|
docs/source/zh/model_doc/bert.md
|
import os
from six.moves.urllib.parse import urljoin
from abc import ABCMeta, abstractmethod, abstractproperty
def get_source_file(source_files, tests_root, manifest, path):
def make_new():
from .sourcefile import SourceFile
return SourceFile(tests_root, path, manifest.url_base)
if source_files is None:
return make_new()
if path not in source_files:
source_files[path] = make_new()
return source_files[path]
class ManifestItem(object):
__metaclass__ = ABCMeta
item_type = None
def __init__(self, source_file, manifest=None):
self.manifest = manifest
self.source_file = source_file
@abstractproperty
def id(self):
"""The test's id (usually its url)"""
pass
@property
def path(self):
"""The test path relative to the test_root"""
return self.source_file.rel_path
@property
def https(self):
return "https" in self.source_file.meta_flags
def key(self):
"""A unique identifier for the test"""
return (self.item_type, self.id)
def meta_key(self):
"""Extra metadata that doesn't form part of the test identity, but for
which changes mean regenerating the manifest (e.g. the test timeout."""
return ()
def __eq__(self, other):
if not hasattr(other, "key"):
return False
return self.key() == other.key()
def __hash__(self):
return hash(self.key() + self.meta_key())
def __repr__(self):
return "<%s.%s id=%s, path=%s>" % (self.__module__, self.__class__.__name__, self.id, self.path)
def to_json(self):
return [{}]
@classmethod
def from_json(cls, manifest, tests_root, path, obj, source_files=None):
source_file = get_source_file(source_files, tests_root, manifest, path)
return cls(source_file,
manifest=manifest)
class URLManifestItem(ManifestItem):
def __init__(self, source_file, url, url_base="/", manifest=None):
ManifestItem.__init__(self, source_file, manifest=manifest)
self._url = url
self.url_base = url_base
@property
def id(self):
return self.url
@property
def url(self):
return urljoin(self.url_base, self._url)
def to_json(self):
rv = [self._url, {}]
return rv
@classmethod
def from_json(cls, manifest, tests_root, path, obj, source_files=None):
source_file = get_source_file(source_files, tests_root, manifest, path)
url, extras = obj
return cls(source_file,
url,
url_base=manifest.url_base,
manifest=manifest)
class TestharnessTest(URLManifestItem):
item_type = "testharness"
def __init__(self, source_file, url, url_base="/", timeout=None, manifest=None):
URLManifestItem.__init__(self, source_file, url, url_base=url_base, manifest=manifest)
self.timeout = timeout
def meta_key(self):
return (self.timeout,)
def to_json(self):
rv = URLManifestItem.to_json(self)
if self.timeout is not None:
rv[-1]["timeout"] = self.timeout
return rv
@classmethod
def from_json(cls, manifest, tests_root, path, obj, source_files=None):
source_file = get_source_file(source_files, tests_root, manifest, path)
url, extras = obj
return cls(source_file,
url,
url_base=manifest.url_base,
timeout=extras.get("timeout"),
manifest=manifest)
class RefTestNode(URLManifestItem):
item_type = "reftest_node"
def __init__(self, source_file, url, references, url_base="/", timeout=None,
viewport_size=None, dpi=None, manifest=None):
URLManifestItem.__init__(self, source_file, url, url_base=url_base, manifest=manifest)
for _, ref_type in references:
if ref_type not in ["==", "!="]:
raise ValueError("Unrecognised ref_type %s" % ref_type)
self.references = tuple(references)
self.timeout = timeout
self.viewport_size = viewport_size
self.dpi = dpi
def meta_key(self):
return (self.timeout, self.viewport_size, self.dpi)
def to_json(self):
rv = [self.url, self.references, {}]
extras = rv[-1]
if self.timeout is not None:
extras["timeout"] = self.timeout
if self.viewport_size is not None:
extras["viewport_size"] = self.viewport_size
if self.dpi is not None:
extras["dpi"] = self.dpi
return rv
@classmethod
def from_json(cls, manifest, tests_root, path, obj, source_files=None):
source_file = get_source_file(source_files, tests_root, manifest, path)
url, references, extras = obj
return cls(source_file,
url,
references,
url_base=manifest.url_base,
timeout=extras.get("timeout"),
viewport_size=extras.get("viewport_size"),
dpi=extras.get("dpi"),
manifest=manifest)
def to_RefTest(self):
if type(self) == RefTest:
return self
rv = RefTest.__new__(RefTest)
rv.__dict__.update(self.__dict__)
return rv
def to_RefTestNode(self):
if type(self) == RefTestNode:
return self
rv = RefTestNode.__new__(RefTestNode)
rv.__dict__.update(self.__dict__)
return rv
class RefTest(RefTestNode):
item_type = "reftest"
class ManualTest(URLManifestItem):
item_type = "manual"
class ConformanceCheckerTest(URLManifestItem):
item_type = "conformancechecker"
class VisualTest(URLManifestItem):
item_type = "visual"
class Stub(URLManifestItem):
item_type = "stub"
class WebdriverSpecTest(URLManifestItem):
item_type = "wdspec"
def __init__(self, source_file, url, url_base="/", timeout=None, manifest=None):
URLManifestItem.__init__(self, source_file, url, url_base=url_base, manifest=manifest)
self.timeout = timeout
class SupportFile(ManifestItem):
item_type = "support"
@property
def id(self):
return self.source_file.rel_path
|
unknown
|
codeparrot/codeparrot-clean
| ||
This directory contains all Redis dependencies, except for the libc that
should be provided by the operating system.
* **Jemalloc** is our memory allocator, used as replacement for libc malloc on Linux by default. It has good performances and excellent fragmentation behavior. This component is upgraded from time to time.
* **hiredis** is the official C client library for Redis. It is used by redis-cli, redis-benchmark and Redis Sentinel. It is part of the Redis official ecosystem but is developed externally from the Redis repository, so we just upgrade it as needed.
* **linenoise** is a readline replacement. It is developed by the same authors of Redis but is managed as a separated project and updated as needed.
* **lua** is Lua 5.1 with minor changes for security and additional libraries.
* **hdr_histogram** Used for per-command latency tracking histograms.
How to upgrade the above dependencies
===
Jemalloc
---
Jemalloc is modified with changes that allow us to implement the Redis
active defragmentation logic. However this feature of Redis is not mandatory
and Redis is able to understand if the Jemalloc version it is compiled
against supports such Redis-specific modifications. So in theory, if you
are not interested in the active defragmentation, you can replace Jemalloc
just following these steps:
1. Remove the jemalloc directory.
2. Substitute it with the new jemalloc source tree.
3. Edit the Makefile located in the same directory as the README you are
reading, and change the --with-version in the Jemalloc configure script
options with the version you are using. This is required because otherwise
Jemalloc configuration script is broken and will not work nested in another
git repository.
However note that we change Jemalloc settings via the `configure` script of Jemalloc using the `--with-lg-quantum` option, setting it to the value of 3 instead of 4. This provides us with more size classes that better suit the Redis data structures, in order to gain memory efficiency.
If you want to upgrade Jemalloc while also providing support for
active defragmentation, in addition to the above steps you need to perform
the following additional steps:
5. In Jemalloc tree, file `include/jemalloc/jemalloc_macros.h.in`, make sure
to add `#define JEMALLOC_FRAG_HINT`.
6. Implement the function `je_get_defrag_hint()` inside `src/jemalloc.c`. You
can see how it is implemented in the current Jemalloc source tree shipped
with Redis, and rewrite it according to the new Jemalloc internals, if they
changed, otherwise you could just copy the old implementation if you are
upgrading just to a similar version of Jemalloc.
#### Updating/upgrading jemalloc
The jemalloc directory is pulled as a subtree from the upstream jemalloc github repo. To update it you should run from the project root:
1. `git subtree pull --prefix deps/jemalloc https://github.com/jemalloc/jemalloc.git <version-tag> --squash`<br>
This should hopefully merge the local changes into the new version.
2. In case any conflicts arise (due to our changes) you'll need to resolve them and commit.
3. Reconfigure jemalloc:<br>
```sh
rm deps/jemalloc/VERSION deps/jemalloc/configure
cd deps/jemalloc
./autogen.sh --with-version=<version-tag>-0-g0
```
4. Update jemalloc's version in `deps/Makefile`: search for "`--with-version=<old-version-tag>-0-g0`" and update it accordingly.
5. Commit the changes (VERSION,configure,Makefile).
Hiredis
---
Hiredis is used by Sentinel, `redis-cli` and `redis-benchmark`. Like Redis, uses the SDS string library, but not necessarily the same version. In order to avoid conflicts, this version has all SDS identifiers prefixed by `hi`.
1. `git subtree pull --prefix deps/hiredis https://github.com/redis/hiredis.git <version-tag> --squash`<br>
This should hopefully merge the local changes into the new version.
2. Conflicts will arise (due to our changes) you'll need to resolve them and commit.
Linenoise
---
Linenoise is rarely upgraded as needed. The upgrade process is trivial since
Redis uses a non modified version of linenoise, so to upgrade just do the
following:
1. Remove the linenoise directory.
2. Substitute it with the new linenoise source tree.
Lua
---
We use Lua 5.1 and no upgrade is planned currently, since we don't want to break
Lua scripts for new Lua features: in the context of Redis Lua scripts the
capabilities of 5.1 are usually more than enough, the release is rock solid,
and we definitely don't want to break old scripts.
So upgrading of Lua is up to the Redis project maintainers and should be a
manual procedure performed by taking a diff between the different versions.
Currently we have at least the following differences between official Lua 5.1
and our version:
1. Makefile is modified to allow a different compiler than GCC.
2. We have the implementation source code, and directly link to the following external libraries: `lua_cjson.o`, `lua_struct.o`, `lua_cmsgpack.o` and `lua_bit.o`.
3. There is a security fix in `ldo.c`, line 498: The check for `LUA_SIGNATURE[0]` is removed in order to avoid direct bytecode execution.
Hdr_Histogram
---
Updated source can be found here: https://github.com/HdrHistogram/HdrHistogram_c
We use a customized version based on master branch commit e4448cf6d1cd08fff519812d3b1e58bd5a94ac42.
1. Compare all changes under /hdr_histogram directory to upstream master commit e4448cf6d1cd08fff519812d3b1e58bd5a94ac42
2. Copy updated files from newer version onto files in /hdr_histogram.
3. Apply the changes from 1 above to the updated files.
|
unknown
|
github
|
https://github.com/redis/redis
|
deps/README.md
|
from chanutils import get_doc, select_all, select_one
from chanutils import get_attr, get_text, get_text_content
from playitem import PlayItem, PlayItemList, MoreEpisodesAction
_PREFIX = 'https://www.itv.com'
_SEARCH_URL = _PREFIX + '/itvplayer/search/term/'
_FEEDLIST = [
{'title':'Popular', 'url':'https://www.itv.com/itvplayer/categories/browse/popular'},
{'title':'Children', 'url':'https://www.itv.com/itvplayer/categories/children/popular'},
{'title':'Comedy', 'url':'https://www.itv.com/itvplayer/categories/comedy/popular'},
{'title':'Drama & Soaps', 'url':'https://www.itv.com/itvplayer/categories/drama-soaps/popular'},
{'title':'Entertainment', 'url':'https://www.itv.com/itvplayer/categories/entertainment/popular'},
{'title':'Factual', 'url':'https://www.itv.com/itvplayer/categories/factual/popular'},
{'title':'Films', 'url':'https://www.itv.com/itvplayer/categories/films/popular'},
{'title':'Lifestyle', 'url':'https://www.itv.com/itvplayer/categories/lifestyle/popular'},
{'title':'Sport', 'url':'https://www.itv.com/itvplayer/categories/sport/popular'},
]
def name():
return 'ITV Player'
def image():
return 'icon.png'
def description():
return "ITV Player Channel (<a target='_blank' href='https://www.itv.com/itvplayer/'>https://www.itv.com/itvplayer</a>). Geo-restricted to UK."
def feedlist():
return _FEEDLIST
def feed(idx):
url = _FEEDLIST[idx]['url']
doc = get_doc(url)
rtree = select_all(doc, 'li.programme')
results = PlayItemList()
for l in rtree:
el = select_one(l, '.programme-title a')
url = _PREFIX + get_attr(el, 'href')
title = get_text(el)
el = select_one(l, 'img')
img = get_attr(el, 'src')
subtitle = get_text(select_one(l, '.episode-info span.episode-free'))
item = PlayItem(title, img, url, subtitle)
if (subtitle is not None) and (not subtitle.startswith('1 ')):
item.add_action(MoreEpisodesAction(url, title))
results.add(item)
return results
def search(q):
q = q.replace(' ', '-')
q = q.replace("'", '')
doc = get_doc(_SEARCH_URL + q)
rtree = select_all(doc, 'div.search-wrapper')
results = PlayItemList()
for l in rtree:
el = select_one(l, 'h4 a')
url = get_attr(el, 'href')
title = get_text(el)
el = select_one(l, "div.search-result-image a img")
img = get_attr(el, 'src')
el = select_one(l, ".search-episode-count")
matched = int(get_attr(el, 'data-matched_episodes'))
episodes = get_text(el)
episodes = int(episodes[0:episodes.find(' ')])
action = None
if episodes > matched:
action = MoreEpisodesAction(url, title)
eps = select_all(l, ".episode")
for e in eps:
el = select_one(e, ".episode-title a")
url = _PREFIX + get_attr(el, 'href')
subtitle = get_text(el)
el = select_one(e, ".description")
synopsis = get_text_content(el)
item = PlayItem(title, img, url, subtitle, synopsis)
results.add(item)
if action:
item.add_action(action)
break
return results
def showmore(link):
doc = get_doc(link)
rtree = select_all(doc, 'div.views-row')
results = PlayItemList()
for l in rtree:
el = select_one(l, 'a')
url = _PREFIX + get_attr(el, 'href')
el = select_one(el, 'img')
img = get_attr(el, 'src')
el = select_one(l, 'span.date-display-single')
subtitle = get_text(el)
el = select_one(l, 'div.field-season-number')
title1 = get_text_content(el)
el = select_one(l, 'div.field-episode-number')
title = title1 + " " + get_text_content(el)
el = select_one(l, 'div.field-name-field-short-synopsis')
synopsis = get_text_content(el)
item = PlayItem(title, img, url, subtitle, synopsis)
results.add(item)
return results
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!venv/bin/python
# -*- coding: utf-8 -*-
import tornado
import tornado.websocket
import tornado.wsgi
import logging
import time
import json
import random
from app import app, db
from app.models import User, Game, Fact, Deck, ROLE_USER, ROLE_ADMIN, get_object_or_404
a = {'あ':'a',
'い':'i',
'う':'u',
'え':'e',
'お':'o',
'か':'ka',
'き':'ki',
'く':'ku',
'け':'ke',
'こ':'ko',
'さ':'sa',
'し':'shi',
'す':'su',
'せ':'se',
'そ':'so',
'た':'ta',
'ち':'chi',
'つ':'tsu',
'て':'te',
'と':'to',
'な':'na',
'に':'ni',
'ぬ':'nu',
'ね':'ne',
'の':'no',
'は':'ha',
'ひ':'hi',
'ふ':'fu',
'へ':'he',
'ほ':'ho',
'ま':'ma',
'み':'mi',
'む':'mu',
'め':'me',
'も':'mo',
'や':'ya',
'ゆ':'yu',
'よ':'yo',
'ら':'ra',
'り':'ri',
'る':'ru',
'れ':'re',
'ろ':'ro',
'わ':'wa',
'を':'o',
'ん':'n',
'が':'ga',
'ぎ':'gi',
'ぐ':'gu',
'げ':'ge',
'ご':'go',
'ざ':'za',
'じ':'ji',
'ず':'zu',
'ぜ':'ze',
'ぞ':'zo',
'だ':'da',
'で':'de',
'ど':'do',
'ば':'ba',
'び':'bi',
'ぶ':'bu',
'べ':'be',
'ぼ':'bo',
'ぱ':'pa',
'ぴ':'pi',
'ぷ':'pu',
'ぺ':'pe',
'ぽ':'po',
'きゃ':'kya',
'きゅ':'kyu',
'きょ':'kyo',
'しゃ':'sha',
'しゅ':'shu',
'しょ':'sho',
'ちゃ':'cha',
'ちゅ':'chu',
'ちょ':'cho',
'にゃ':'nya',
'にゅ':'nyu',
'にょ':'nyo',
'ひゃ':'hya',
'ひゅ':'hyu',
'ひょ':'hyo',
'みゃ':'mya',
'みゅ':'myu',
'みょ':'myo',
'りゃ':'rya',
'りゅ':'ryu',
'りょ':'ryo',
'ぎゃ':'gya',
'ぎゅ':'gyu',
'ぎょ':'gyo',
'じゃ':'ja',
'じゅ':'ju',
'じょ':'jo',
'びゃ':'bya',
'びゅ':'byu',
'びょ':'byo',
'ぴゃ':'pya',
'ぴゅ':'pyu',
'ぴょ':'pyo'}
d = Deck.query.get(1)
for k,v in a.items():
z = Fact(front=k, back=v, deck=d)
db.session.add(z)
db.session.commit()
|
unknown
|
codeparrot/codeparrot-clean
| ||
#!/usr/bin/python
#
# Copyright (C) Citrix Systems Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; version 2.1 only.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Functions to read and write SR metadata
#
import util
import metadata
import os
import sys
sys.path.insert(0,'/opt/xensource/sm/snapwatchd')
import xs_errors
import lvutil
import xml.sax.saxutils
SECTOR_SIZE = 512
XML_HEADER = "<?xml version=\"1.0\" ?>"
SECTOR2_FMT= "%s%s%s"
MAX_METADATA_LENGTH_SIZE = 10
LEN_FMT = "%" + "-%ds" % MAX_METADATA_LENGTH_SIZE
SECTOR_STRUCT = "%-512s"
OFFSET_TAG = 'offset'
# define xml tags for metadata
ALLOCATION_TAG = 'allocation'
NAME_LABEL_TAG = 'name_label'
NAME_DESCRIPTION_TAG = 'name_description'
VDI_TAG = 'vdi'
VDI_CLOSING_TAG = '</%s>' % VDI_TAG
VDI_DELETED_TAG = 'deleted'
UUID_TAG = 'uuid'
IS_A_SNAPSHOT_TAG = 'is_a_snapshot'
SNAPSHOT_OF_TAG = 'snapshot_of'
TYPE_TAG = 'type'
VDI_TYPE_TAG = 'vdi_type'
READ_ONLY_TAG = 'read_only'
MANAGED_TAG = 'managed'
SNAPSHOT_TIME_TAG = 'snapshot_time'
METADATA_OF_POOL_TAG = 'metadata_of_pool'
SVID_TAG = 'svid'
LUN_LABEL_TAG = 'll'
VDI_SECTOR_1 = "<%s><%s>%s</%s><%s>%s</%s>" % (VDI_TAG,
NAME_LABEL_TAG,
'%s',
NAME_LABEL_TAG,
NAME_DESCRIPTION_TAG,
'%s',
NAME_DESCRIPTION_TAG)
MAX_VDI_NAME_LABEL_DESC_LENGTH = SECTOR_SIZE - 2*len(NAME_LABEL_TAG) - \
2*len(NAME_DESCRIPTION_TAG) - len(VDI_TAG) - 12
ATOMIC_UPDATE_PARAMS_AND_OFFSET = {NAME_LABEL_TAG: 2,
NAME_DESCRIPTION_TAG: 3}
SR_INFO_SIZE_IN_SECTORS = 4
HEADER_SEP = ':'
METADATA_UPDATE_OBJECT_TYPE_TAG = 'objtype'
METADATA_OBJECT_TYPE_SR = 'sr'
METADATA_OBJECT_TYPE_VDI = 'vdi'
METADATA_BLK_SIZE = 512
# ----------------- # General helper functions - begin # -----------------
def get_min_blk_size_wrapper(fd):
return METADATA_BLK_SIZE
def open_file(path, write = False):
if write:
try:
file_p = os.open(path, os.O_RDWR )
except OSError, e:
raise OSError("Failed to open file %s for read-write. Error: %s" % \
(path, (e.errno)))
else:
try:
file_p = os.open(path, os.O_RDONLY)
except OSError, e:
raise OSError("Failed to open file %s for read. Error: %s" % \
(path, (e.errno)))
return file_p
def file_write_wrapper(fd, offset, blocksize, data, length):
try:
newlength = length
if length % blocksize:
newlength = length + (blocksize - length % blocksize)
os.lseek(fd, offset, os.SEEK_SET)
result = os.write(fd, data + ' ' * (newlength - length))
except OSError, e:
raise OSError("Failed to write file with params %s. Error: %s" % \
([fd, offset, blocksize, data, length], \
(e.errno)))
return result
def file_read_wrapper(fd, offset, bytesToRead, min_block_size):
try:
os.lseek(fd, offset, os.SEEK_SET)
result = os.read(fd, bytesToRead)
except OSError, e:
raise OSError("Failed to read file with params %s. Error: %s" % \
([fd, offset, min_block_size, bytesToRead], \
(e.errno)))
return result
def close(fd):
if fd != -1:
os.close(fd)
# get a range which is block aligned, contains 'offset' and allows
# length bytes to be written
def getBlockAlignedRange(block_size, offset, length):
lower = 0
if offset%block_size == 0:
lower = offset
else:
lower = offset - offset%block_size
upper = lower + block_size
while upper < (lower + length):
upper += block_size
return (lower, upper)
def buildHeader(len, major = metadata.MD_MAJOR, minor = metadata.MD_MINOR):
# build the header, which is the first sector
output = ("%s%s%s%s%s%s%s" % (metadata.HDR_STRING,
HEADER_SEP,
LEN_FMT,
HEADER_SEP,
str(major),
HEADER_SEP,
str(minor)
)) % len
return output
def unpackHeader(input):
vals = input.split(HEADER_SEP)
if len(vals) != 4 or vals[0] != metadata.HDR_STRING:
util.SMlog("Exception unpacking metadata header: "
"Error: Bad header '%s'" % (input))
raise xs_errors.XenError('MetadataError', \
opterr='Bad header')
return (vals[0], vals[1], vals[2], vals[3])
def getSector(str):
sector = SECTOR_STRUCT % str
return sector
def getSectorAlignedXML(tagName, value):
# truncate data if we breach the 512 limit
if len("<%s>%s</%s>" % (tagName, value, tagName)) > SECTOR_SIZE:
length = util.unictrunc(value, SECTOR_SIZE - 2*len(tagName) - 5)
util.SMlog('warning: SR ' + tagName + ' truncated from ' \
+ str(len(value)) + ' to ' + str(length) + ' bytes')
value = value[:length]
return "<%s>%s</%s>" % (tagName, value, tagName)
def getXMLTag(tagName):
return "<%s>%s</%s>" % (tagName, '%s', tagName)
def updateLengthInHeader(fd, length, major = metadata.MD_MAJOR, \
minor = metadata.MD_MINOR):
try:
min_block_size = get_min_blk_size_wrapper(fd)
md = ''
md = file_read_wrapper(fd, 0, min_block_size, min_block_size)
updated_md = buildHeader(length, major, minor)
updated_md += md[SECTOR_SIZE:]
# Now write the new length
file_write_wrapper(fd, 0, min_block_size, updated_md, len(updated_md))
except Exception, e:
util.SMlog("Exception updating metadata length with length: %d."
"Error: %s" % (length, str(e)))
raise
def getMetadataLength(fd):
try:
min_blk_size = get_min_blk_size_wrapper(fd)
sector1 = \
file_read_wrapper(fd, 0, SECTOR_SIZE, min_blk_size).strip()
hdr = unpackHeader(sector1)
len = int(hdr[1])
return len
except Exception, e:
util.SMlog("Exception getting metadata length: "
"Error: %s" % str(e))
raise
def requiresUpgrade(path):
# if the metadata requires upgrade either by pre-Boston logic
# or Boston logic upgrade it
# First check if this is a pre-6.0 pool using the old metadata
pre_boston_upgrade = False
boston_upgrade = False
try:
if metadata.requiresUpgrade(path):
pre_boston_upgrade = True
except Exception, e:
util.SMlog("This looks like a 6.0 or later pool, try checking " \
"for upgrade using the new metadata header format. " \
"Error: %s" % str(e))
try:
# Now check for upgrade using the header format for 6.0/post-6.0
try:
fd = -1
fd = open_file(path)
min_blk_size = get_min_blk_size_wrapper(fd)
sector1 = \
file_read_wrapper(fd, 0, SECTOR_SIZE, min_blk_size).strip()
hdr = unpackHeader(sector1)
mdmajor = int(hdr[2])
mdminor = int(hdr[3])
if mdmajor < metadata.MD_MAJOR:
boston_upgrade = True
elif mdmajor == metadata.MD_MAJOR and mdminor < metadata.MD_MINOR:
boston_upgrade = True
except Exception, e:
util.SMlog("Exception checking header version, upgrading metadata."\
" Error: %s" % str(e))
return True
finally:
close(fd)
return pre_boston_upgrade or boston_upgrade
# ----------------- # General helper functions - end # -----------------
class MetadataHandler:
VDI_INFO_SIZE_IN_SECTORS = None
# constructor
def __init__(self, path = None, write = True):
self.fd = -1
self.path = path
if self.path != None:
self.fd = open_file(self.path, write)
def __del__(self):
if self.fd != -1:
close(self.fd)
def spaceAvailableForVdis(self, count):
raise NotImplementedError("spaceAvailableForVdis is undefined")
# common utility functions
def getMetadata(self,params = {}):
try:
sr_info = {}
vdi_info = {}
try:
md = self.getMetadataInternal(params)
sr_info = md['sr_info']
vdi_info = md['vdi_info']
except:
# Maybe there is no metadata yet
pass
except Exception, e:
util.SMlog('Exception getting metadata. Error: %s' % str(e))
raise xs_errors.XenError('MetadataError', \
opterr='%s' % str(e))
return (sr_info, vdi_info)
def writeMetadata(self, sr_info, vdi_info):
try:
self.writeMetadataInternal(sr_info, vdi_info)
except Exception, e:
util.SMlog('Exception writing metadata. Error: %s' % str(e))
raise xs_errors.XenError('MetadataError', \
opterr='%s' % str(e))
# read metadata for this SR and find if a metadata VDI exists
def findMetadataVDI(self):
try:
vdi_info = self.getMetadata()[1]
for offset in vdi_info.keys():
if vdi_info[offset][TYPE_TAG] == 'metadata' and \
vdi_info[offset][IS_A_SNAPSHOT_TAG] == '0':
return vdi_info[offset][UUID_TAG]
return None
except Exception, e:
util.SMlog('Exception checking if SR metadata a metadata VDI.'\
'Error: %s' % str(e))
raise xs_errors.XenError('MetadataError', \
opterr='%s' % str(e))
# update the SR information or one of the VDIs information
# the passed in map would have a key 'objtype', either sr or vdi.
# if the key is sr, the following might be passed in
# SR name-label
# SR name_description
# if the key is vdi, the following information per VDI may be passed in
# uuid - mandatory
# name-label
# name_description
# is_a_snapshot
# snapshot_of, if snapshot status is true
# snapshot time
# type: system, user or metadata etc
# vdi_type: raw or vhd
# read_only
# location
# managed
# metadata_of_pool
def updateMetadata(self, update_map = {}):
util.SMlog("Updating metadata : %s" % update_map)
try:
objtype = update_map[METADATA_UPDATE_OBJECT_TYPE_TAG]
del update_map[METADATA_UPDATE_OBJECT_TYPE_TAG]
if objtype == METADATA_OBJECT_TYPE_SR:
self.updateSR(update_map)
elif objtype == METADATA_OBJECT_TYPE_VDI:
self.updateVdi(update_map)
except Exception, e:
util.SMlog('Error updating Metadata Volume with update' \
'map: %s. Error: %s' % (update_map, str(e)))
raise xs_errors.XenError('MetadataError', \
opterr='%s' % str(e))
def deleteVdiFromMetadata(self, vdi_uuid):
util.SMlog("Deleting vdi: %s" % vdi_uuid)
try:
self.deleteVdi(vdi_uuid)
except Exception, e:
util.SMlog('Error deleting vdi %s from the metadata. '\
'Error: %s' % (vdi_uuid, str(e)))
raise xs_errors.XenError('MetadataError', \
opterr='%s' % str(e))
def addVdi(self, vdi_info = {}):
util.SMlog("Adding VDI with info: %s" % vdi_info)
try:
self.addVdiInternal(vdi_info)
except Exception, e:
util.SMlog('Error adding VDI to Metadata Volume with '\
'update map: %s. Error: %s' % (vdi_info, str(e)))
raise xs_errors.XenError('MetadataError', \
opterr='%s' % (str(e)))
def ensureSpaceIsAvailableForVdis(self, count):
util.SMlog("Checking if there is space in the metadata for %d VDI." % \
count)
try:
self.spaceAvailableForVdis(count)
except Exception, e:
raise xs_errors.XenError('MetadataError', \
opterr='%s' % str(e))
# common functions
def deleteVdi(self, vdi_uuid, offset = 0):
util.SMlog("Entering deleteVdi")
try:
md = self.getMetadataInternal({'vdi_uuid': vdi_uuid})
if not md.has_key('offset'):
util.SMlog("Metadata for VDI %s not present, or already removed, " \
"no further deletion action required." % vdi_uuid)
return
md['vdi_info'][md['offset']][VDI_DELETED_TAG] = '1'
self.updateVdi(md['vdi_info'][md['offset']])
try:
mdlength = getMetadataLength(self.fd)
if (mdlength - md['offset']) == \
self.VDI_INFO_SIZE_IN_SECTORS * SECTOR_SIZE:
updateLengthInHeader(self.fd, (mdlength - \
self.VDI_INFO_SIZE_IN_SECTORS * SECTOR_SIZE))
except:
raise
except Exception, e:
raise Exception("VDI delete operation failed for "\
"parameters: %s, %s. Error: %s" % \
(self.path, vdi_uuid, str(e)))
# common functions with some details derived from the child class
def generateVDIsForRange(self, vdi_info, lower, upper, update_map = {}, \
offset = 0):
value = ''
if not len(vdi_info.keys()) or not vdi_info.has_key(offset):
return self.getVdiInfo(update_map)
for vdi_offset in vdi_info.keys():
if vdi_offset < lower:
continue
if len(value) >= (upper - lower):
break
vdi_map = vdi_info[vdi_offset]
if vdi_offset == offset:
# write passed in VDI info
for key in update_map.keys():
vdi_map[key] = update_map[key]
for i in range(1, self.VDI_INFO_SIZE_IN_SECTORS + 1):
if len(value) < (upper - lower):
value += self.getVdiInfo(vdi_map, i)
return value
def addVdiInternal(self, Dict):
util.SMlog("Entering addVdiInternal")
try:
value = ''
Dict[VDI_DELETED_TAG] = '0'
min_block_size = get_min_blk_size_wrapper(self.fd)
mdlength = getMetadataLength(self.fd)
md = self.getMetadataInternal({'firstDeleted': 1, 'includeDeletedVdis': 1})
if not md.has_key('foundDeleted'):
md['offset'] = mdlength
(md['lower'], md['upper']) = \
getBlockAlignedRange(min_block_size, mdlength, \
SECTOR_SIZE * self.VDI_INFO_SIZE_IN_SECTORS)
# If this has created a new VDI, update metadata length
if md.has_key('foundDeleted'):
value = self.getMetadataToWrite(md['sr_info'], md['vdi_info'], \
md['lower'], md['upper'], Dict, md['offset'])
else:
value = self.getMetadataToWrite(md['sr_info'], md['vdi_info'], \
md['lower'], md['upper'], Dict, mdlength)
file_write_wrapper(self.fd, md['lower'], min_block_size, \
value, len(value))
if md.has_key('foundDeleted'):
updateLengthInHeader(self.fd, mdlength)
else:
updateLengthInHeader(self.fd, mdlength + \
SECTOR_SIZE * self.VDI_INFO_SIZE_IN_SECTORS)
return True
except Exception, e:
util.SMlog("Exception adding vdi with info: %s. Error: %s" % \
(Dict, str(e)))
raise
# Get metadata from the file name passed in
# additional params:
# includeDeletedVdis - include deleted VDIs in the returned metadata
# vdi_uuid - only fetch metadata till a particular VDI
# offset - only fetch metadata till a particular offset
# firstDeleted - get the first deleted VDI
# indexByUuid - index VDIs by uuid
# the return value of this function is a dictionary having the following keys
# sr_info: dictionary containing sr information
# vdi_info: dictionary containing vdi information indexed by offset
# offset: when passing in vdi_uuid/firstDeleted below
# deleted - true if deleted VDI found to be replaced
def getMetadataInternal(self, params = {}):
try:
lower = 0; upper = 0
retmap = {}; sr_info_map = {}; ret_vdi_info = {}
length = getMetadataLength(self.fd)
min_blk_size = get_min_blk_size_wrapper(self.fd)
# Read in the metadata fil
metadataxml = ''
metadataxml = file_read_wrapper(self.fd, 0, length, min_blk_size)
# At this point we have the complete metadata in metadataxml
offset = SECTOR_SIZE + len(XML_HEADER)
sr_info = metadataxml[offset: SECTOR_SIZE * 4]
offset = SECTOR_SIZE * 4
sr_info = sr_info.replace('\x00','')
parsable_metadata = '%s<%s>%s</%s>' % (XML_HEADER, metadata.XML_TAG,
sr_info, metadata.XML_TAG)
retmap['sr_info'] = metadata._parseXML(parsable_metadata)
# At this point we check if an offset has been passed in
if params.has_key('offset'):
upper = getBlockAlignedRange(min_blk_size, params['offset'], \
0)[1]
else:
upper = length
# Now look at the VDI objects
while offset < upper:
vdi_info = metadataxml[offset:
offset +
(SECTOR_SIZE * self.VDI_INFO_SIZE_IN_SECTORS)]
vdi_info = vdi_info.replace('\x00','')
parsable_metadata = '%s<%s>%s</%s>' % (XML_HEADER, metadata.XML_TAG,
vdi_info, metadata.XML_TAG)
vdi_info_map = metadata._parseXML(parsable_metadata)[VDI_TAG]
vdi_info_map[OFFSET_TAG] = offset
if not params.has_key('includeDeletedVdis') and \
vdi_info_map[VDI_DELETED_TAG] == '1':
offset += SECTOR_SIZE * self.VDI_INFO_SIZE_IN_SECTORS
continue
if params.has_key('indexByUuid'):
ret_vdi_info[vdi_info_map[UUID_TAG]] = vdi_info_map
else:
ret_vdi_info[offset] = vdi_info_map
if params.has_key('vdi_uuid'):
if vdi_info_map[UUID_TAG] == params['vdi_uuid']:
retmap['offset'] = offset
(lower, upper) = \
getBlockAlignedRange(min_blk_size, offset, \
SECTOR_SIZE * self.VDI_INFO_SIZE_IN_SECTORS)
elif params.has_key('firstDeleted'):
if vdi_info_map[VDI_DELETED_TAG] == '1':
retmap['foundDeleted'] = 1
retmap['offset'] = offset
(lower, upper) = \
getBlockAlignedRange(min_blk_size, offset, \
SECTOR_SIZE * self.VDI_INFO_SIZE_IN_SECTORS)
offset += SECTOR_SIZE * self.VDI_INFO_SIZE_IN_SECTORS
retmap['lower'] = lower
retmap['upper'] = upper
retmap['vdi_info'] = ret_vdi_info
return retmap
except Exception, e:
util.SMlog("Exception getting metadata with params" \
"%s. Error: %s" % (params, str(e)))
raise
# This function expects both sr name_label and sr name_description to be
# passed in
def updateSR(self, Dict):
util.SMlog('entering updateSR')
value = ''
# Find the offset depending on what we are updating
diff = set(Dict.keys()) - set(ATOMIC_UPDATE_PARAMS_AND_OFFSET.keys())
if diff == set([]):
offset = SECTOR_SIZE * 2
(lower, upper) = getBlockAlignedRange(get_min_blk_size_wrapper( \
self.fd), offset, SECTOR_SIZE * 2)
md = self.getMetadataInternal({'offset': \
SECTOR_SIZE * (SR_INFO_SIZE_IN_SECTORS - 1)})
sr_info = md['sr_info']
vdi_info_by_offset = md['vdi_info']
# update SR info with Dict
for key in Dict.keys():
sr_info[key] = Dict[key]
# if lower is less than SR header size
if lower < SR_INFO_SIZE_IN_SECTORS * SECTOR_SIZE:
# if upper is less than SR header size
if upper <= SR_INFO_SIZE_IN_SECTORS * SECTOR_SIZE:
for i in range(lower/SECTOR_SIZE, upper/SECTOR_SIZE):
value += self.getSRInfoForSectors(sr_info, range(i, i + 1))
else:
for i in range(lower/SECTOR_SIZE, SR_INFO_SIZE_IN_SECTORS):
value += self.getSRInfoForSectors(sr_info, range(i, i + 1))
# generate the remaining VDI
value += self.generateVDIsForRange(vdi_info_by_offset,
SR_INFO_SIZE_IN_SECTORS, upper)
else:
# generate the remaining VDI
value += self.generateVDIsForRange(vdi_info_by_offset, lower, upper)
file_write_wrapper(self.fd, lower, \
get_min_blk_size_wrapper(self.fd), value, len(value))
else:
raise Exception("SR Update operation not supported for "
"parameters: %s" % diff)
def updateVdi(self, Dict):
util.SMlog('entering updateVdi')
try:
value = ''
min_block_size = get_min_blk_size_wrapper(self.fd)
mdlength = getMetadataLength(self.fd)
md = self.getMetadataInternal({'vdi_uuid': Dict[UUID_TAG]})
value = self.getMetadataToWrite(md['sr_info'], md['vdi_info'], \
md['lower'], md['upper'], Dict, md['offset'])
file_write_wrapper(self.fd, md['lower'], min_block_size, value, len(value))
return True
except Exception, e:
util.SMlog("Exception updating vdi with info: %s. Error: %s" % \
(Dict, str(e)))
raise
# This should be called only in the cases where we are initially writing
# metadata, the function would expect a dictionary which had all information
# about the SRs and all its VDIs
def writeMetadataInternal(self, sr_info, vdi_info):
try:
md = ''
md = self.getSRInfoForSectors(sr_info, range(0, SR_INFO_SIZE_IN_SECTORS))
# Go over the VDIs passed and for each
for key in vdi_info.keys():
md += self.getVdiInfo(vdi_info[key])
# Now write the metadata on disk.
min_block_size = get_min_blk_size_wrapper(self.fd)
file_write_wrapper(self.fd, 0, min_block_size, md, len(md))
updateLengthInHeader(self.fd, len(md))
except Exception, e:
util.SMlog("Exception writing metadata with info: %s, %s. "\
"Error: %s" % (sr_info, vdi_info, str(e)))
raise
# generates metadata info to write taking the following parameters:
# a range, lower - upper
# sr and vdi information
# VDI information to update
# an optional offset to the VDI to update
def getMetadataToWrite(self, sr_info, vdi_info, lower, upper, update_map, \
offset):
util.SMlog("Entering getMetadataToWrite")
try:
value = ''
vdi_map = {}
# if lower is less than SR info
if lower < SECTOR_SIZE * SR_INFO_SIZE_IN_SECTORS:
# generate SR info
for i in range(lower/SECTOR_SIZE, SR_INFO_SIZE_IN_SECTORS):
value += self.getSRInfoForSectors(sr_info, range(i, i + 1))
# generate the rest of the VDIs till upper
value += self.generateVDIsForRange(vdi_info, \
SECTOR_SIZE * SR_INFO_SIZE_IN_SECTORS, upper, update_map, offset)
else:
# skip till you get a VDI with lower as the offset, then generate
value += self.generateVDIsForRange(vdi_info, lower, upper, \
update_map, offset)
return value
except Exception, e:
util.SMlog("Exception generating metadata to write with info: "\
"sr_info: %s, vdi_info: %s, lower: %d, upper: %d, "\
"update_map: %s, offset: %d. Error: %s" % \
(sr_info, vdi_info, lower, upper, update_map, offset,str(e)))
raise
# specific functions, to be implement by the child classes
def getVdiInfo(self, Dict, generateSector = 0):
return
def getSRInfoForSectors(self, sr_info, range):
return
class LVMMetadataHandler(MetadataHandler):
VDI_INFO_SIZE_IN_SECTORS = 2
# constructor
def __init__(self, path = None, write = True):
lvutil.ensurePathExists(path)
MetadataHandler.__init__(self, path, write)
def spaceAvailableForVdis(self, count):
try:
created = False
try:
# The easiest way to do this, is to create a dummy vdi and write it
uuid = util.gen_uuid()
vdi_info = { UUID_TAG: uuid,
NAME_LABEL_TAG: 'dummy vdi for space check',
NAME_DESCRIPTION_TAG: 'dummy vdi for space check',
IS_A_SNAPSHOT_TAG: 0,
SNAPSHOT_OF_TAG: '',
SNAPSHOT_TIME_TAG: '',
TYPE_TAG: 'user',
VDI_TYPE_TAG: 'vhd',
READ_ONLY_TAG: 0,
MANAGED_TAG: 0,
'metadata_of_pool': ''
}
created = self.addVdiInternal(vdi_info)
except IOError, e:
raise
finally:
if created:
# Now delete the dummy VDI created above
self.deleteVdi(uuid)
return
# This function generates VDI info based on the passed in information
# it also takes in a parameter to determine whether both the sector
# or only one sector needs to be generated, and which one
# generateSector - can be 1 or 2, defaults to 0 and generates both sectors
def getVdiInfo(self, Dict, generateSector = 0):
util.SMlog("Entering VDI info")
try:
vdi_info = ''
# HP split into 2 functions, 1 for generating the first 2 sectors,
# which will be called by all classes
# and one specific to this class
if generateSector == 1 or generateSector == 0:
Dict[NAME_LABEL_TAG] = \
xml.sax.saxutils.escape(Dict[NAME_LABEL_TAG])
Dict[NAME_DESCRIPTION_TAG] = \
xml.sax.saxutils.escape(Dict[NAME_DESCRIPTION_TAG])
if len(Dict[NAME_LABEL_TAG]) + len(Dict[NAME_DESCRIPTION_TAG]) > \
MAX_VDI_NAME_LABEL_DESC_LENGTH:
if len(Dict[NAME_LABEL_TAG]) > MAX_VDI_NAME_LABEL_DESC_LENGTH/2:
length = util.unictrunc(Dict[NAME_LABEL_TAG], \
MAX_VDI_NAME_LABEL_DESC_LENGTH/2)
util.SMlog('warning: name-label truncated from ' \
+ str(len(Dict[NAME_LABEL_TAG])) + ' to ' \
+ str(length) + ' bytes')
Dict[NAME_LABEL_TAG] = Dict[NAME_LABEL_TAG][:length]
if len(Dict[NAME_DESCRIPTION_TAG]) > \
MAX_VDI_NAME_LABEL_DESC_LENGTH/2: \
length = util.unictrunc(Dict[NAME_DESCRIPTION_TAG], \
MAX_VDI_NAME_LABEL_DESC_LENGTH/2)
util.SMlog('warning: description truncated from ' \
+ str(len(Dict[NAME_DESCRIPTION_TAG])) + \
' to ' + str(length) + ' bytes')
Dict[NAME_DESCRIPTION_TAG] = \
Dict[NAME_DESCRIPTION_TAG][:length]
# Fill the open struct and write it
vdi_info += getSector(VDI_SECTOR_1 % (Dict[NAME_LABEL_TAG],
Dict[NAME_DESCRIPTION_TAG]))
if generateSector == 2 or generateSector == 0:
sector2 = ''
if not Dict.has_key(VDI_DELETED_TAG):
Dict.update({VDI_DELETED_TAG:'0'})
for tag in Dict.keys():
if tag == NAME_LABEL_TAG or tag == NAME_DESCRIPTION_TAG:
continue
sector2 += getXMLTag(tag) % Dict[tag]
sector2 += VDI_CLOSING_TAG
vdi_info += getSector(sector2)
return vdi_info
except Exception, e:
util.SMlog("Exception generating vdi info: %s. Error: %s" % \
(Dict, str(e)))
raise
def getSRInfoForSectors(self, sr_info, range):
srinfo = ''
try:
# write header, name_labael and description in that function
# as its common to all
# Fill up the first sector
if 0 in range:
srinfo = getSector(buildHeader(SECTOR_SIZE))
if 1 in range:
uuid = getXMLTag(UUID_TAG) % sr_info[UUID_TAG]
allocation = getXMLTag(ALLOCATION_TAG) % sr_info[ALLOCATION_TAG]
second = SECTOR2_FMT % (XML_HEADER, uuid, allocation)
srinfo += getSector(second)
if 2 in range:
# Fill up the SR name_label
srinfo += getSector(getSectorAlignedXML(NAME_LABEL_TAG,
xml.sax.saxutils.escape(sr_info[NAME_LABEL_TAG])))
if 3 in range:
# Fill the name_description
srinfo += getSector(getSectorAlignedXML(NAME_DESCRIPTION_TAG,
xml.sax.saxutils.escape(sr_info[NAME_DESCRIPTION_TAG])))
return srinfo
except Exception, e:
util.SMlog("Exception getting SR info with parameters: sr_info: %s," \
"range: %s. Error: %s" % (sr_info, range, str(e)))
raise
class SLMetadataHandler(MetadataHandler):
VDI_INFO_SIZE_IN_SECTORS = 2
SECTOR2_FMT= "%s%s"
# constructor
def __init__(self, path = None, write = True):
MetadataHandler.__init__(self, path, write)
def spaceAvailableForVdis(self, count):
try:
created = False
try:
# The easiest way to do this, is to create a dummy vdi and write it
uuid = util.gen_uuid()
vdi_info = { UUID_TAG: uuid,
NAME_LABEL_TAG: 'dummy vdi for space check',
NAME_DESCRIPTION_TAG: 'dummy vdi for space check',
IS_A_SNAPSHOT_TAG: 0,
SNAPSHOT_OF_TAG: '',
SNAPSHOT_TIME_TAG: '',
TYPE_TAG: 'user',
VDI_TYPE_TAG: 'vhd',
READ_ONLY_TAG: 0,
MANAGED_TAG: 0,
'metadata_of_pool': ''
}
created = self.addVdiInternal(vdi_info)
except IOError, e:
raise
finally:
if created:
# Now delete the dummy VDI created above
self.deleteVdi(uuid)
return
# This function generates VDI info based on the passed in information
# it also takes in a parameter to determine whether both the sector
# or only one sector needs to be generated, and which one
# generateSector - can be 1 or 2, defaults to 0 and generates both sectors
def getVdiInfo(self, Dict, generateSector = 0):
util.SMlog("Entering VDI info")
try:
vdi_info = ''
# HP split into 2 functions, 1 for generating the first 2 sectors,
# which will be called by all classes
# and one specific to this class
if generateSector == 1 or generateSector == 0:
Dict[NAME_LABEL_TAG] = \
xml.sax.saxutils.escape(Dict[NAME_LABEL_TAG])
Dict[NAME_DESCRIPTION_TAG] = \
xml.sax.saxutils.escape(Dict[NAME_DESCRIPTION_TAG])
if len(Dict[NAME_LABEL_TAG]) + len(Dict[NAME_DESCRIPTION_TAG]) > \
MAX_VDI_NAME_LABEL_DESC_LENGTH:
if len(Dict[NAME_LABEL_TAG]) > MAX_VDI_NAME_LABEL_DESC_LENGTH/2:
length = util.unictrunc(Dict[NAME_LABEL_TAG], \
MAX_VDI_NAME_LABEL_DESC_LENGTH/2)
util.SMlog('warning: name-label truncated from ' \
+ str(len(Dict[NAME_LABEL_TAG])) + ' to ' \
+ str(length) + ' bytes')
Dict[NAME_LABEL_TAG] = Dict[NAME_LABEL_TAG][:length]
if len(Dict[NAME_DESCRIPTION_TAG]) > \
MAX_VDI_NAME_LABEL_DESC_LENGTH/2: \
length = util.unictrunc(Dict[NAME_DESCRIPTION_TAG], \
MAX_VDI_NAME_LABEL_DESC_LENGTH/2)
util.SMlog('warning: description truncated from ' \
+ str(len(Dict[NAME_DESCRIPTION_TAG])) + \
' to ' + str(length) + ' bytes')
Dict[NAME_DESCRIPTION_TAG] = \
Dict[NAME_DESCRIPTION_TAG][:length]
# Fill the open struct and write it
vdi_info += getSector(VDI_SECTOR_1 % (Dict[NAME_LABEL_TAG],
Dict[NAME_DESCRIPTION_TAG]))
if generateSector == 2 or generateSector == 0:
sector2 = ''
if not Dict.has_key(VDI_DELETED_TAG):
Dict.update({VDI_DELETED_TAG:'0'})
for tag in Dict.keys():
if tag == NAME_LABEL_TAG or tag == NAME_DESCRIPTION_TAG:
continue
sector2 += getXMLTag(tag) % Dict[tag]
sector2 += VDI_CLOSING_TAG
vdi_info += getSector(sector2)
return vdi_info
except Exception, e:
util.SMlog("Exception generating vdi info: %s. Error: %s" % \
(Dict, str(e)))
raise
def getSRInfoForSectors(self, sr_info, range):
srinfo = ''
try:
# write header, name_labael and description in that function
# as its common to all
# Fill up the first sector
if 0 in range:
srinfo = getSector(buildHeader(SECTOR_SIZE))
if 1 in range:
uuid = getXMLTag(UUID_TAG) % sr_info[UUID_TAG]
second = self.SECTOR2_FMT % (XML_HEADER, uuid)
srinfo += getSector(second)
if 2 in range:
# Fill up the SR name_label
srinfo += getSector(getSectorAlignedXML(NAME_LABEL_TAG,
xml.sax.saxutils.escape(sr_info[NAME_LABEL_TAG])))
if 3 in range:
# Fill the name_description
srinfo += getSector(getSectorAlignedXML(NAME_DESCRIPTION_TAG,
xml.sax.saxutils.escape(sr_info[NAME_DESCRIPTION_TAG])))
return srinfo
except Exception, e:
util.SMlog("Exception getting SR info with parameters: sr_info: %s," \
"range: %s. Error: %s" % (sr_info, range, str(e)))
raise
|
unknown
|
codeparrot/codeparrot-clean
| ||
##############################################################################
# MDTraj: A Python Library for Loading, Saving, and Manipulating
# Molecular Dynamics Trajectories.
# Copyright 2012-2013 Stanford University and the Authors
#
# Authors: Robert McGibbon
# Contributors:
#
# MDTraj is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with MDTraj. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
"""The mdtraj package contains tools for loading and saving molecular dynamics
trajectories in a variety of formats, including Gromacs XTC & TRR, CHARMM/NAMD
DCD, AMBER BINPOS, PDB, and HDF5.
"""
import numpy as _ # silence cython related numpy warnings, see github.com/numpy/numpy/pull/432
from .formats.registry import FormatRegistry
from .formats.xtc import load_xtc
from .formats.trr import load_trr
from .formats.hdf5 import load_hdf5
from .formats.lh5 import load_lh5
from .formats.netcdf import load_netcdf
from .formats.mdcrd import load_mdcrd
from .formats.dcd import load_dcd
from .formats.binpos import load_binpos
from .formats.pdb import load_pdb
from .formats.arc import load_arc
from .formats.openmmxml import load_xml
from .formats.prmtop import load_prmtop
from .formats.psf import load_psf
from .formats.mol2 import load_mol2
from .formats.amberrst import load_restrt, load_ncrestrt
from .formats.lammpstrj import load_lammpstrj
from .formats.dtr import load_dtr, load_stk
from .formats.xyzfile import load_xyz
from .formats.hoomdxml import load_hoomdxml
from .formats.tng import load_tng
from .core import element
from ._rmsd import rmsd, rmsf
from ._lprmsd import lprmsd
from .core.topology import Topology, Single, Double, Triple, Amide, Aromatic
from .geometry import *
from .core.trajectory import *
from .nmr import *
from . import reporters
def capi():
import os
import sys
module_path = sys.modules['mdtraj'].__path__[0]
return {
'lib_dir': os.path.join(module_path, 'core', 'lib'),
'include_dir': os.path.join(module_path, 'core', 'lib'),
}
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""
Get API information encoded in C files.
See ``find_function`` for how functions should be formatted, and
``read_order`` for how the order of the functions should be
specified.
"""
import sys, os, re
try:
import hashlib
md5new = hashlib.md5
except ImportError:
import md5
md5new = md5.new
if sys.version_info[:2] < (2, 6):
from sets import Set as set
import textwrap
from os.path import join
__docformat__ = 'restructuredtext'
# The files under src/ that are scanned for API functions
API_FILES = [join('multiarray', 'methods.c'),
join('multiarray', 'arrayobject.c'),
join('multiarray', 'flagsobject.c'),
join('multiarray', 'descriptor.c'),
join('multiarray', 'iterators.c'),
join('multiarray', 'getset.c'),
join('multiarray', 'number.c'),
join('multiarray', 'sequence.c'),
join('multiarray', 'ctors.c'),
join('multiarray', 'convert.c'),
join('multiarray', 'shape.c'),
join('multiarray', 'item_selection.c'),
join('multiarray', 'convert_datatype.c'),
join('multiarray', 'arraytypes.c.src'),
join('multiarray', 'multiarraymodule.c'),
join('multiarray', 'scalartypes.c.src'),
join('multiarray', 'scalarapi.c'),
join('multiarray', 'calculation.c'),
join('multiarray', 'usertypes.c'),
join('multiarray', 'refcount.c'),
join('multiarray', 'conversion_utils.c'),
join('multiarray', 'buffer.c'),
join('multiarray', 'datetime.c'),
# join('libnumpy', 'npy_arraytypes.c.src'),
join('umath', 'ufunc_object.c'),
join('umath', 'loops.c.src'),
]
THIS_DIR = os.path.dirname(__file__)
API_FILES = [os.path.join(THIS_DIR, '..', 'src', a) for a in API_FILES]
def file_in_this_dir(filename):
return os.path.join(THIS_DIR, filename)
def remove_whitespace(s):
return ''.join(s.split())
def _repl(str):
return str.replace(' intp', ' npy_intp').replace('Bool','npy_bool')
class Function(object):
def __init__(self, name, return_type, args, doc=''):
self.name = name
self.return_type = _repl(return_type)
self.args = args
self.doc = doc
def _format_arg(self, typename, name):
if typename.endswith('*'):
return typename + name
else:
return typename + ' ' + name
def __str__(self):
argstr = ', '.join([self._format_arg(*a) for a in self.args])
if self.doc:
doccomment = '/* %s */\n' % self.doc
else:
doccomment = ''
return '%s%s %s(%s)' % (doccomment, self.return_type, self.name, argstr)
def to_ReST(self):
lines = ['::', '', ' ' + self.return_type]
argstr = ',\000'.join([self._format_arg(*a) for a in self.args])
name = ' %s' % (self.name,)
s = textwrap.wrap('(%s)' % (argstr,), width=72,
initial_indent=name,
subsequent_indent=' ' * (len(name)+1),
break_long_words=False)
for l in s:
lines.append(l.replace('\000', ' ').rstrip())
lines.append('')
if self.doc:
lines.append(textwrap.dedent(self.doc))
return '\n'.join(lines)
def api_hash(self):
m = md5new()
m.update(remove_whitespace(self.return_type))
m.update('\000')
m.update(self.name)
m.update('\000')
for typename, name in self.args:
m.update(remove_whitespace(typename))
m.update('\000')
return m.hexdigest()[:8]
class ParseError(Exception):
def __init__(self, filename, lineno, msg):
self.filename = filename
self.lineno = lineno
self.msg = msg
def __str__(self):
return '%s:%s:%s' % (self.filename, self.lineno, self.msg)
def skip_brackets(s, lbrac, rbrac):
count = 0
for i, c in enumerate(s):
if c == lbrac:
count += 1
elif c == rbrac:
count -= 1
if count == 0:
return i
raise ValueError("no match '%s' for '%s' (%r)" % (lbrac, rbrac, s))
def split_arguments(argstr):
arguments = []
bracket_counts = {'(': 0, '[': 0}
current_argument = []
state = 0
i = 0
def finish_arg():
if current_argument:
argstr = ''.join(current_argument).strip()
m = re.match(r'(.*(\s+|[*]))(\w+)$', argstr)
if m:
typename = m.group(1).strip()
name = m.group(3)
else:
typename = argstr
name = ''
arguments.append((typename, name))
del current_argument[:]
while i < len(argstr):
c = argstr[i]
if c == ',':
finish_arg()
elif c == '(':
p = skip_brackets(argstr[i:], '(', ')')
current_argument += argstr[i:i+p]
i += p-1
else:
current_argument += c
i += 1
finish_arg()
return arguments
def find_functions(filename, tag='API'):
"""
Scan the file, looking for tagged functions.
Assuming ``tag=='API'``, a tagged function looks like::
/*API*/
static returntype*
function_name(argtype1 arg1, argtype2 arg2)
{
}
where the return type must be on a separate line, the function
name must start the line, and the opening ``{`` must start the line.
An optional documentation comment in ReST format may follow the tag,
as in::
/*API
This function does foo...
*/
"""
fo = open(filename, 'r')
functions = []
return_type = None
function_name = None
function_args = []
doclist = []
SCANNING, STATE_DOC, STATE_RETTYPE, STATE_NAME, STATE_ARGS = range(5)
state = SCANNING
tagcomment = '/*' + tag
for lineno, line in enumerate(fo):
try:
line = line.strip()
if state == SCANNING:
if line.startswith(tagcomment):
if line.endswith('*/'):
state = STATE_RETTYPE
else:
state = STATE_DOC
elif state == STATE_DOC:
if line.startswith('*/'):
state = STATE_RETTYPE
else:
line = line.lstrip(' *')
doclist.append(line)
elif state == STATE_RETTYPE:
# first line of declaration with return type
m = re.match(r'NPY_NO_EXPORT\s+(.*)$', line)
if m:
line = m.group(1)
return_type = line
state = STATE_NAME
elif state == STATE_NAME:
# second line, with function name
m = re.match(r'(\w+)\s*\(', line)
if m:
function_name = m.group(1)
else:
raise ParseError(filename, lineno+1,
'could not find function name')
function_args.append(line[m.end():])
state = STATE_ARGS
elif state == STATE_ARGS:
if line.startswith('{'):
# finished
fargs_str = ' '.join(function_args).rstrip(' )')
fargs = split_arguments(fargs_str)
f = Function(function_name, return_type, fargs,
'\n'.join(doclist))
functions.append(f)
return_type = None
function_name = None
function_args = []
doclist = []
state = SCANNING
else:
function_args.append(line)
except:
print(filename, lineno+1)
raise
fo.close()
return functions
def should_rebuild(targets, source_files):
from distutils.dep_util import newer_group
for t in targets:
if not os.path.exists(t):
return True
sources = API_FILES + list(source_files) + [__file__]
if newer_group(sources, targets[0], missing='newer'):
return True
return False
# Those *Api classes instances know how to output strings for the generated code
class TypeApi:
def __init__(self, name, index, ptr_cast, api_name):
self.index = index
self.name = name
self.ptr_cast = ptr_cast
self.api_name = api_name
def define_from_array_api_string(self):
return "#define %s (*(%s *)%s[%d])" % (self.name,
self.ptr_cast,
self.api_name,
self.index)
def array_api_define(self):
return " (void *) &%s" % self.name
def internal_define(self):
astr = """\
extern NPY_NO_EXPORT PyTypeObject %(type)s;
""" % {'type': self.name}
return astr
class GlobalVarApi:
def __init__(self, name, index, type, api_name):
self.name = name
self.index = index
self.type = type
self.api_name = api_name
def define_from_array_api_string(self):
return "#define %s (*(%s *)%s[%d])" % (self.name,
self.type,
self.api_name,
self.index)
def array_api_define(self):
return " (%s *) &%s" % (self.type, self.name)
def internal_define(self):
astr = """\
extern NPY_NO_EXPORT %(type)s %(name)s;
""" % {'type': self.type, 'name': self.name}
return astr
# Dummy to be able to consistently use *Api instances for all items in the
# array api
class BoolValuesApi:
def __init__(self, name, index, api_name):
self.name = name
self.index = index
self.type = 'PyBoolScalarObject'
self.api_name = api_name
def define_from_array_api_string(self):
return "#define %s ((%s *)%s[%d])" % (self.name,
self.type,
self.api_name,
self.index)
def array_api_define(self):
return " (void *) &%s" % self.name
def internal_define(self):
astr = """\
extern NPY_NO_EXPORT PyBoolScalarObject _PyArrayScalar_BoolValues[2];
"""
return astr
class FunctionApi:
def __init__(self, name, index, return_type, args, api_name):
self.name = name
self.index = index
self.return_type = return_type
self.args = args
self.api_name = api_name
def _argtypes_string(self):
if not self.args:
return 'void'
argstr = ', '.join([_repl(a[0]) for a in self.args])
return argstr
def define_from_array_api_string(self):
define = """\
#define %s \\\n (*(%s (*)(%s)) \\
%s[%d])""" % (self.name,
self.return_type,
self._argtypes_string(),
self.api_name,
self.index)
return define
def array_api_define(self):
return " (void *) %s" % self.name
def internal_define(self):
astr = """\
NPY_NO_EXPORT %s %s \\\n (%s);""" % (self.return_type,
self.name,
self._argtypes_string())
return astr
def order_dict(d):
"""Order dict by its values."""
o = d.items()
def _key(x):
return (x[1], x[0])
return sorted(o, key=_key)
def merge_api_dicts(dicts):
ret = {}
for d in dicts:
for k, v in d.items():
ret[k] = v
return ret
def check_api_dict(d):
"""Check that an api dict is valid (does not use the same index twice)."""
# We have if a same index is used twice: we 'revert' the dict so that index
# become keys. If the length is different, it means one index has been used
# at least twice
revert_dict = dict([(v, k) for k, v in d.items()])
if not len(revert_dict) == len(d):
# We compute a dict index -> list of associated items
doubled = {}
for name, index in d.items():
try:
doubled[index].append(name)
except KeyError:
doubled[index] = [name]
msg = """\
Same index has been used twice in api definition: %s
""" % ['index %d -> %s' % (index, names) for index, names in doubled.items() \
if len(names) != 1]
raise ValueError(msg)
# No 'hole' in the indexes may be allowed, and it must starts at 0
indexes = set(d.values())
expected = set(range(len(indexes)))
if not indexes == expected:
diff = expected.symmetric_difference(indexes)
msg = "There are some holes in the API indexing: " \
"(symmetric diff is %s)" % diff
raise ValueError(msg)
def get_api_functions(tagname, api_dict):
"""Parse source files to get functions tagged by the given tag."""
functions = []
for f in API_FILES:
functions.extend(find_functions(f, tagname))
dfunctions = []
for func in functions:
o = api_dict[func.name]
dfunctions.append( (o, func) )
dfunctions.sort()
return [a[1] for a in dfunctions]
def fullapi_hash(api_dicts):
"""Given a list of api dicts defining the numpy C API, compute a checksum
of the list of items in the API (as a string)."""
a = []
for d in api_dicts:
def sorted_by_values(d):
"""Sort a dictionary by its values. Assume the dictionary items is of
the form func_name -> order"""
return sorted(d.items(), key=lambda x_y: (x_y[1], x_y[0]))
for name, index in sorted_by_values(d):
a.extend(name)
a.extend(str(index))
return md5new(''.join(a).encode('ascii')).hexdigest()
# To parse strings like 'hex = checksum' where hex is e.g. 0x1234567F and
# checksum a 128 bits md5 checksum (hex format as well)
VERRE = re.compile('(^0x[\da-f]{8})\s*=\s*([\da-f]{32})')
def get_versions_hash():
d = []
file = os.path.join(os.path.dirname(__file__), 'cversions.txt')
fid = open(file, 'r')
try:
for line in fid.readlines():
m = VERRE.match(line)
if m:
d.append((int(m.group(1), 16), m.group(2)))
finally:
fid.close()
return dict(d)
def main():
tagname = sys.argv[1]
order_file = sys.argv[2]
functions = get_api_functions(tagname, order_file)
m = md5new(tagname)
for func in functions:
print(func)
ah = func.api_hash()
m.update(ah)
print(hex(int(ah,16)))
print(hex(int(m.hexdigest()[:8],16)))
if __name__ == '__main__':
main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
test_kind: fsm_workload_test
selector:
roots:
- jstests/concurrency/fsm_workloads/**/*.js
exclude_files:
##
# Disabled due to MongoDB restrictions and/or workload restrictions
##
exclude_with_any_tags:
- requires_standalone
- requires_sharding
executor:
archive:
hooks:
- SimulateCrash
tests: true
config: {}
hooks:
- class: SimulateCrash
- class: CleanupConcurrencyWorkloads
- class: CleanEveryN
n: 10
fixture:
class: ReplicaSetFixture
mongod_options:
oplogSize: 1024
syncdelay: 10
set_parameters:
enableTestCommands: 1
queryAnalysisSamplerConfigurationRefreshSecs: 1
queryAnalysisWriterIntervalSecs: 1
num_nodes: 3
|
unknown
|
github
|
https://github.com/mongodb/mongo
|
buildscripts/resmokeconfig/suites/simulate_crash_concurrency_replication.yml
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
# Default configuration dictionary. Do not mutate.
HORIZON_CONFIG = {
# Allow for ordering dashboards; list or tuple if provided.
'dashboards': None,
# Name of a default dashboard; defaults to first alphabetically if None
'default_dashboard': None,
# Default redirect url for users' home
'user_home': settings.LOGIN_REDIRECT_URL,
# AJAX settings for JavaScript
'ajax_queue_limit': 10,
'ajax_poll_interval': 2500,
# URL for additional help with this site.
'help_url': None,
# Exception configuration.
'exceptions': {'unauthorized': [],
'not_found': [],
'recoverable': []},
# Password configuration.
'password_validator': {'regex': '.*',
'help_text': _("Password is not accepted")},
'password_autocomplete': 'off',
# Enable or disable simplified floating IP address management.
'simple_ip_management': True
}
|
unknown
|
codeparrot/codeparrot-clean
| ||
import os
import sys
import obd
import shutil
from PiHud import PiHud
from PyQt4 import QtGui
from GlobalConfig import GlobalConfig
try:
import RPi.GPIO as GPIO
except:
print "[pihud] Warning: RPi.GPIO library not found"
# file paths
running_dir = os.path.dirname(os.path.realpath(__file__))
default_config_path = os.path.join(running_dir, 'default.rc')
config_path = os.path.join(os.path.expanduser('~'), 'pihud.rc')
def main():
""" entry point """
# ============================ Config loading =============================
if not os.path.isfile(config_path):
# copy the default config
if not os.path.isfile(default_config_path):
print "[pihud] Fatal: Missing default config file. Try reinstalling"
sys.exit(1)
else:
shutil.copyfile(default_config_path, config_path)
global_config = GlobalConfig(config_path)
# =========================== OBD-II Connection ===========================
if global_config["debug"]:
obd.logger.setLevel(obd.logging.DEBUG) # enables all debug information
connection = obd.Async(global_config["port"])
# if global_config["debug"]:
# for i in range(32):
# connection.supported_commands.append(obd.commands[1][i])
# ============================ QT Application =============================
app = QtGui.QApplication(sys.argv)
pihud = PiHud(global_config, connection)
# ============================== GPIO Setup ===============================
try:
pin = self.config.page_adv_pin
GPIO.setmode(GPIO.BCM)
GPIO.setup(pin,
GPIO.IN,
pull_up_down=GPIO.PUD_UP)
GIO.add_event_detect(pin,
GPIO.FALLING,
callback=pihud.next_page,
bouncetime=200)
except:
pass
# ================================= Start =================================
status = app.exec_() # blocks until application quit
# ================================= Exit ==================================
connection.close()
sys.exit(status)
if __name__ == "__main__":
main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
from cms.utils.compat.dj import is_installed
from django.conf.urls import include, url
from cms.apphook_pool import apphook_pool
from cms.views import details
from django.conf import settings
if settings.APPEND_SLASH:
reg = url(r'^(?P<slug>[0-9A-Za-z-_.//]+)/$', details, name='pages-details-by-slug')
else:
reg = url(r'^(?P<slug>[0-9A-Za-z-_.//]+)$', details, name='pages-details-by-slug')
urlpatterns = [
# Public pages
url(r'^example/',
include('cms.test_utils.project.sampleapp.urls_example', namespace="example1", app_name='example_app')),
url(r'^example2/',
include('cms.test_utils.project.sampleapp.urls_example', namespace="example2", app_name='example_app')),
url(r'^$', details, {'slug': ''}, name='pages-root'),
reg,
]
if apphook_pool.get_apphooks():
"""If there are some application urls, add special resolver, so we will
have standard reverse support.
"""
from cms.appresolver import get_app_patterns
urlpatterns = get_app_patterns() + urlpatterns
if settings.DEBUG and is_installed('debug_toolbar'):
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
unknown
|
codeparrot/codeparrot-clean
| ||
require 'optparse'
parser = OptionParser.new
parser.on('--true_class=TRUE_CLASS', TrueClass) do |value|
p [value, value.class]
end
parser.parse!
|
ruby
|
github
|
https://github.com/ruby/ruby
|
doc/optparse/ruby/true_class.rb
|
# Copyright (C) 2015, Carlo de Franchis <carlo.de-franchis@cmla.ens-cachan.fr>
# Copyright (C) 2015, Gabriele Facciolo <facciolo@cmla.ens-cachan.fr>
# Copyright (C) 2015, Enric Meinhardt <enric.meinhardt@cmla.ens-cachan.fr>
# Copyright (C) 2015, Julien Michel <julien.michel@cnes.fr>
import os
import numpy as np
import rpcm
from s2p import sift
from s2p import rpc_utils
from s2p import estimation
from s2p.config import cfg
def error_vectors(m, F, ind='ref'):
"""
Computes the error vectors for a list of matches and a given fundamental
matrix.
Args:
m: Nx4 numpy array containing a list of matches, one per line. Each
match is given by (x, y, x', y') where (x, y) is a point of the
reference view and (x', y') is the corresponding point in the
secondary view.
F: fundamental matrix between the two views
ind (optional, default is 'ref'): index of the image on which the lines
are plotted to compute the error vectors. Must be either 'ref' or
'sec' (reference or secondary image)
Returns:
Nx2 numpy array containing a list of planar vectors. Each vector is
obtained as the difference between x' and its projection on the
epipolar line Fx.
"""
# divide keypoints in two lists: x (first image) and xx (second image), and
# convert them to homogeneous coordinates
N = len(m)
x = np.ones((N, 3))
xx = np.ones((N, 3))
x [:, 0:2] = m[:, 0:2]
xx[:, 0:2] = m[:, 2:4]
# epipolar lines: 2D array of size Nx3, one epipolar line per row
if ind == 'sec':
l = np.dot(x, F.T)
elif ind == 'ref':
l = np.dot(xx, F)
else:
print("pointing_accuracy.error_vectors: invalid 'ind' argument")
# compute the error vectors (going from the projection of x or xx on l to x
# or xx)
if ind == 'sec':
n = np.multiply(xx[:, 0], l[:, 0]) + np.multiply(xx[:, 1], l[:, 1]) + l[:, 2]
else:
n = np.multiply(x[:, 0], l[:, 0]) + np.multiply(x[:, 1], l[:, 1]) + l[:, 2]
d = np.square(l[:, 0]) + np.square(l[:, 1])
a = np.divide(n, d)
return np.vstack((np.multiply(a, l[:, 0]), np.multiply(a, l[:, 1]))).T
def local_translation(r1, r2, x, y, w, h, m):
"""
Estimates the optimal translation to minimise the relative pointing error
on a given tile.
Args:
r1, r2: two instances of the rpcm.RPCModel class
x, y, w, h: region of interest in the reference image (r1)
m: Nx4 numpy array containing a list of matches, one per line. Each
match is given by (p1, p2, q1, q2) where (p1, p2) is a point of the
reference view and (q1, q2) is the corresponding point in the
secondary view.
Returns:
3x3 numpy array containing the homogeneous representation of the
optimal planar translation, to be applied to the secondary image in
order to correct the pointing error.
"""
# estimate the affine fundamental matrix between the two views
n = cfg['n_gcp_per_axis']
rpc_matches = rpc_utils.matches_from_rpc(r1, r2, x, y, w, h, n)
F = estimation.affine_fundamental_matrix(rpc_matches)
# compute the error vectors
e = error_vectors(m, F, 'sec')
# compute the median: as the vectors are collinear (because F is affine)
# computing the median of each component independently is correct
N = len(e)
out_x = np.sort(e[:, 0])[int(N/2)]
out_y = np.sort(e[:, 1])[int(N/2)]
# the correction to be applied to the second view is the opposite
A = np.array([[1, 0, -out_x],
[0, 1, -out_y],
[0, 0, 1]])
return A
def compute_correction(img1, img2, rpc1, rpc2, x, y, w, h):
"""
Computes pointing correction matrix for specific ROI
Args:
img1 (str): path to the reference image
img2 (str): path to the secondary image
rpc1 (rpcm.RPCModel): camera model of the reference image
rpc2 (rpcm.RPCModel): camera model of the secondary image
x, y, w, h: four integers defining the rectangular ROI in the reference
image. (x, y) is the top-left corner, and (w, h) are the dimensions
of the rectangle. The ROI may be as big as you want. If bigger than
1 Mpix, only five crops will be used to compute sift matches.
Returns:
a 3x3 matrix representing the planar transformation to apply to img2 in
order to correct the pointing error, and the list of sift matches used
to compute this correction.
"""
m = sift.matches_on_rpc_roi(img1, img2, rpc1, rpc2, x, y, w, h)
if m is not None:
A = local_translation(rpc1, rpc2, x, y, w, h, m)
else:
A = None
return A, m
def global_from_local(tiles):
"""
Computes the pointing correction of a full roi using local corrections on
tiles.
Args:
tiles: list of paths to folders associated to each tile
Returns:
the estimated pointing correction for the specified tile
In each folder we expect to find the files pointing.txt and center.txt. The
file pointing.txt contains the local correction (a projective transform
given in homogeneous coordinates), and the file center.txt contains the
coordinates of the mean of the keypoints of the secondary image.
"""
# lists of matching points
x = []
xx = []
# loop over all the tiles
for f in tiles:
center = os.path.join(f, 'center_keypts_sec.txt')
pointing = os.path.join(f, 'pointing.txt')
if os.path.isfile(center) and os.path.isfile(pointing):
A = np.loadtxt(pointing)
p = np.loadtxt(center)
if A.shape == (3, 3) and p.shape == (2,):
q = np.dot(A, np.array([p[0], p[1], 1]))
x.append(p)
xx.append(q[0:2])
if not x:
return np.eye(3)
elif len(x) == 1:
return A
elif len(x) == 2:
#TODO: replace translation with similarity
return estimation.translation(np.array(x), np.array(xx))
else:
# estimate an affine transformation transforming x in xx
return estimation.affine_transformation(np.array(x), np.array(xx))
|
unknown
|
codeparrot/codeparrot-clean
| ||
// Copyright 2017 The Abseil Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef ABSL_BASE_INTERNAL_ATOMIC_HOOK_TEST_HELPER_H_
#define ABSL_BASE_INTERNAL_ATOMIC_HOOK_TEST_HELPER_H_
#include "absl/base/internal/atomic_hook.h"
namespace absl {
ABSL_NAMESPACE_BEGIN
namespace atomic_hook_internal {
using VoidF = void (*)();
extern absl::base_internal::AtomicHook<VoidF> func;
extern int default_func_calls;
void DefaultFunc();
void RegisterFunc(VoidF func);
} // namespace atomic_hook_internal
ABSL_NAMESPACE_END
} // namespace absl
#endif // ABSL_BASE_INTERNAL_ATOMIC_HOOK_TEST_HELPER_H_
|
c
|
github
|
https://github.com/mysql/mysql-server
|
extra/abseil/abseil-cpp-20230802.1/absl/base/internal/atomic_hook_test_helper.h
|
package multierror
import (
"errors"
"fmt"
"testing"
"gotest.tools/v3/assert"
)
func TestErrorJoin(t *testing.T) {
t.Run("single", func(t *testing.T) {
err := Join(fmt.Errorf("invalid config: %w", Join(errors.New("foo"))))
const expected = `invalid config: foo`
assert.Equal(t, err.Error(), expected)
})
t.Run("multiple", func(t *testing.T) {
err := Join(errors.New("foobar"), fmt.Errorf("invalid config: \n%w", Join(errors.New("foo"), errors.New("bar"))))
const expected = `* foobar
* invalid config:
* foo
* bar`
assert.Equal(t, err.Error(), expected)
})
}
|
go
|
github
|
https://github.com/moby/moby
|
daemon/internal/multierror/multierror_test.go
|
from opencog.cogserver import MindAgent
from opencog.atomspace import types
from pln.chainers import Chainer
from pln.rules import *
class InferenceAgent(MindAgent):
def __init__(self):
self.chainer = None
self.num_steps_per_cycle = 100
def create_chainer(self, atomspace):
# Note: using stimulateAtoms will cause a segfault if you
# create the Agent from the Python shell (use the agents-start
# command in the cogserver shell). It's because giving atoms
# stimulus only works if the MindAgent is added to the
# CogServer's list of agents.
self.chainer = Chainer(atomspace,
stimulateAtoms=False,
agent=self,
learnRuleFrequencies=True)
# Todo: Cleanup the following section where rules are added
# ImplicationLink is MixedImplicationLink, you could also have
# Extensional and Intensional Implication. etc. but that's a bit
# much.
# similarity_types =
# [types.SimilarityLink,
# types.ExtensionalSimilarityLink,
# types.IntensionalSimilarityLink,
# types.EquivalenceLink]
#
# conditional_probability_types =
# [types.InheritanceLink,
# types.SubsetLink,
# types.IntensionalInheritanceLink,
# types.ImplicationLink]
# always use the mixed inheritance types, because human inference
# is normally a mix of intensional and extensional
conditional_probability_types = [types.InheritanceLink,
types.ImplicationLink,
types.PredictiveImplicationLink]
similarity_types = [types.SimilarityLink,
types.EquivalenceLink]
for link_type in conditional_probability_types:
self.chainer.add_rule(InversionRule(self.chainer, link_type))
self.chainer.add_rule(DeductionRule(self.chainer, link_type))
self.chainer.add_rule(InductionRule(self.chainer, link_type))
self.chainer.add_rule(AbductionRule(self.chainer, link_type))
# Seems better than Modus Ponens - it doesn't make anything up
self.chainer.add_rule(TermProbabilityRule(self.chainer, link_type))
self.chainer.add_rule(ModusPonensRule(self.chainer, link_type))
self.chainer.add_rule(PreciseModusPonensRule(self.chainer, link_type))
for link_type in similarity_types:
# SimilarityLinks don't require an InversionRule obviously
self.chainer.add_rule(TransitiveSimilarityRule(self.chainer,
link_type))
self.chainer.add_rule(SymmetricModusPonensRule(self.chainer,
link_type))
self.chainer.add_rule(EvaluationImplicationRule(self.chainer))
# These two Rules create mixed links out of intensional and
# extensional links
self.chainer.add_rule(InheritanceRule(self.chainer))
self.chainer.add_rule(SimilarityRule(self.chainer))
for link_type in conditional_probability_types:
self.chainer.add_rule(AndToSubsetRule1(self.chainer, link_type))
for N in xrange(2, 8):
self.chainer.add_rule(AndToSubsetRuleN(self.chainer, link_type, N))
# boolean links
for rule in create_and_or_rules(self.chainer, 2, 8):
self.chainer.add_rule(rule)
for N in xrange(2, 8):
self.chainer.add_rule(
boolean_rules.AndBulkEvaluationRule(self.chainer, N))
for N in xrange(3, 8):
self.chainer.add_rule(
boolean_rules.NegatedAndBulkEvaluationRule(self.chainer, N))
# create probabilistic logical links out of MemberLinks
self.chainer.add_rule(AndEvaluationRule(self.chainer))
self.chainer.add_rule(OrEvaluationRule(self.chainer))
# These two "macro rules" make the individual rules redundant
self.chainer.add_rule(ExtensionalLinkEvaluationRule(self.chainer))
self.chainer.add_rule(IntensionalLinkEvaluationRule(self.chainer))
#self.chainer.add_rule(SubsetEvaluationRule(self.chainer))
self.chainer.add_rule(NegatedSubsetEvaluationRule(self.chainer))
#self.chainer.add_rule(
# ExtensionalSimilarityEvaluationRule(self.chainer))
#self.chainer.add_rule(
# IntensionalInheritanceEvaluationRule(self.chainer))
#self.chainer.add_rule(
# IntensionalSimilarityEvaluationRule(self.chainer))
self.member_rules = [GeneralEvaluationToMemberRule(self.chainer),
MemberToEvaluationRule(self.chainer)]
self.member_rules += \
create_general_evaluation_to_member_rules(self.chainer)
for rule in self.member_rules:
self.chainer.add_rule(rule)
# It's important to have both of these
self.chainer.add_rule(MemberToInheritanceRule(self.chainer))
self.chainer.add_rule(InheritanceToMemberRule(self.chainer))
# AttractionLink could be useful for causality
self.chainer.add_rule(AttractionRule(self.chainer))
self.chainer.add_rule(ScholemRule(self.chainer))
boolean_transformation_rules = create_boolean_transformation_rules(self.chainer)
for rule in boolean_transformation_rules:
self.chainer.add_rule(rule)
#self.chainer.add_rule(OntologicalInheritanceRule(self.chainer))
#for rule in temporal_create_temporal_rules(self.chainer):
#self.chainer.add_rule(rule)
#higher_order_rules = []
#for rule in self.chainer.rules:
#higher_order_append(HigherOrderRule(self.chainer, rule))
#contextual_rules = []
#for rule in self.chainer.rules:
# contextual_append(ContextualRule(self.chainer, rule))
#for rule in higher_order_rules + contextual_rules:
# self.chainer.add_rule(rule)
#self.chainer.add_rule(AndToContextRule(self.chainer,
# types.InheritanceLink))
def run(self, atomspace):
if self.chainer is None:
self.create_chainer(atomspace)
# For simplicity, do nothing the first time. Silly APIs
# mean you have to call run to set the atomspace
return
# Update all of the node probabilities at each step
#self.chainer.update_all_node_probabilities()
for i in xrange(0, self.num_steps_per_cycle):
self.step()
# Todo: The variable 'result' is never used
def step(self):
result = self.chainer.forward_step()
result = self.chainer.backward_step()
'''
# test it with forgetting, updating and diffusion
scm-eval (load-scm-from-file "../wordpairs.scm")
loadpy pln
agents-start pln.InferenceAgent opencog::ForgettingAgent opencog::ImportanceUpdatingAgent opencog::ImportanceDiffusionAgent
'''
class TestInferenceAgent(InferenceAgent):
def run(self, atomspace):
if self.chainer is None:
self.create_chainer(atomspace)
self.chainer.find_atom(self.chainer.get_query(), time_allowed=300)
class ForwardInferenceAgent(InferenceAgent):
# Todo: The variable 'result' is never used
def step(self):
result = self.chainer.forward_step()
class BackwardInferenceAgent(InferenceAgent):
# Todo: The variable 'result' is never used
def step(self):
result = self.chainer.backward_step()
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
* Copyright 2002-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.beans.factory.config;
import java.beans.PropertyEditor;
import java.beans.PropertyEditorSupport;
import java.text.DateFormat;
import java.text.ParseException;
import java.util.Date;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import org.junit.jupiter.api.Test;
import org.springframework.beans.MutablePropertyValues;
import org.springframework.beans.PropertyEditorRegistrar;
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
import org.springframework.beans.factory.support.RootBeanDefinition;
import org.springframework.beans.propertyeditors.CustomDateEditor;
import org.springframework.beans.testfixture.beans.TestBean;
import static org.assertj.core.api.Assertions.assertThat;
/**
* @author Juergen Hoeller
* @author Chris Beams
* @since 31.07.2004
*/
class CustomEditorConfigurerTests {
@Test
void testCustomEditorConfigurerWithPropertyEditorRegistrar() throws ParseException {
DefaultListableBeanFactory bf = new DefaultListableBeanFactory();
CustomEditorConfigurer cec = new CustomEditorConfigurer();
final DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, Locale.GERMAN);
cec.setPropertyEditorRegistrars(new PropertyEditorRegistrar[] {
registry -> registry.registerCustomEditor(Date.class, new CustomDateEditor(df, true))});
cec.postProcessBeanFactory(bf);
MutablePropertyValues pvs = new MutablePropertyValues();
pvs.add("date", "2.12.1975");
RootBeanDefinition bd1 = new RootBeanDefinition(TestBean.class);
bd1.setPropertyValues(pvs);
bf.registerBeanDefinition("tb1", bd1);
pvs = new MutablePropertyValues();
pvs.add("someMap[myKey]", new TypedStringValue("2.12.1975", Date.class));
RootBeanDefinition bd2 = new RootBeanDefinition(TestBean.class);
bd2.setPropertyValues(pvs);
bf.registerBeanDefinition("tb2", bd2);
TestBean tb1 = (TestBean) bf.getBean("tb1");
assertThat(tb1.getDate()).isEqualTo(df.parse("2.12.1975"));
TestBean tb2 = (TestBean) bf.getBean("tb2");
assertThat(tb2.getSomeMap().get("myKey")).isEqualTo(df.parse("2.12.1975"));
}
@Test
void testCustomEditorConfigurerWithEditorAsClass() throws ParseException {
DefaultListableBeanFactory bf = new DefaultListableBeanFactory();
CustomEditorConfigurer cec = new CustomEditorConfigurer();
Map<Class<?>, Class<? extends PropertyEditor>> editors = new HashMap<>();
editors.put(Date.class, MyDateEditor.class);
cec.setCustomEditors(editors);
cec.postProcessBeanFactory(bf);
MutablePropertyValues pvs = new MutablePropertyValues();
pvs.add("date", "2.12.1975");
RootBeanDefinition bd = new RootBeanDefinition(TestBean.class);
bd.setPropertyValues(pvs);
bf.registerBeanDefinition("tb", bd);
TestBean tb = (TestBean) bf.getBean("tb");
DateFormat df = DateFormat.getDateInstance(DateFormat.SHORT, Locale.GERMAN);
assertThat(tb.getDate()).isEqualTo(df.parse("2.12.1975"));
}
@Test
void testCustomEditorConfigurerWithRequiredTypeArray() {
DefaultListableBeanFactory bf = new DefaultListableBeanFactory();
CustomEditorConfigurer cec = new CustomEditorConfigurer();
Map<Class<?>, Class<? extends PropertyEditor>> editors = new HashMap<>();
editors.put(String[].class, MyTestEditor.class);
cec.setCustomEditors(editors);
cec.postProcessBeanFactory(bf);
MutablePropertyValues pvs = new MutablePropertyValues();
pvs.add("stringArray", "xxx");
RootBeanDefinition bd = new RootBeanDefinition(TestBean.class);
bd.setPropertyValues(pvs);
bf.registerBeanDefinition("tb", bd);
TestBean tb = (TestBean) bf.getBean("tb");
assertThat(tb.getStringArray() != null && tb.getStringArray().length == 1).isTrue();
assertThat(tb.getStringArray()[0]).isEqualTo("test");
}
public static class MyDateEditor extends CustomDateEditor {
public MyDateEditor() {
super(DateFormat.getDateInstance(DateFormat.SHORT, Locale.GERMAN), true);
}
}
public static class MyTestEditor extends PropertyEditorSupport {
@Override
public void setAsText(String text) {
setValue(new String[] {"test"});
}
}
}
|
java
|
github
|
https://github.com/spring-projects/spring-framework
|
spring-beans/src/test/java/org/springframework/beans/factory/config/CustomEditorConfigurerTests.java
|
# tempfile.py unit tests.
import tempfile
import os
import sys
import re
import warnings
import unittest
from test import support
if hasattr(os, 'stat'):
import stat
has_stat = 1
else:
has_stat = 0
has_textmode = (tempfile._text_openflags != tempfile._bin_openflags)
has_spawnl = hasattr(os, 'spawnl')
# TEST_FILES may need to be tweaked for systems depending on the maximum
# number of files that can be opened at one time (see ulimit -n)
if sys.platform in ('openbsd3', 'openbsd4'):
TEST_FILES = 48
else:
TEST_FILES = 100
# This is organized as one test for each chunk of code in tempfile.py,
# in order of their appearance in the file. Testing which requires
# threads is not done here.
# Common functionality.
class TC(unittest.TestCase):
str_check = re.compile(r"[a-zA-Z0-9_-]{6}$")
def setUp(self):
self._warnings_manager = support.check_warnings()
self._warnings_manager.__enter__()
warnings.filterwarnings("ignore", category=RuntimeWarning,
message="mktemp", module=__name__)
def tearDown(self):
self._warnings_manager.__exit__(None, None, None)
def failOnException(self, what, ei=None):
if ei is None:
ei = sys.exc_info()
self.fail("%s raised %s: %s" % (what, ei[0], ei[1]))
def nameCheck(self, name, dir, pre, suf):
(ndir, nbase) = os.path.split(name)
npre = nbase[:len(pre)]
nsuf = nbase[len(nbase)-len(suf):]
# check for equality of the absolute paths!
self.assertEqual(os.path.abspath(ndir), os.path.abspath(dir),
"file '%s' not in directory '%s'" % (name, dir))
self.assertEqual(npre, pre,
"file '%s' does not begin with '%s'" % (nbase, pre))
self.assertEqual(nsuf, suf,
"file '%s' does not end with '%s'" % (nbase, suf))
nbase = nbase[len(pre):len(nbase)-len(suf)]
self.assertTrue(self.str_check.match(nbase),
"random string '%s' does not match /^[a-zA-Z0-9_-]{6}$/"
% nbase)
test_classes = []
class test_exports(TC):
def test_exports(self):
# There are no surprising symbols in the tempfile module
dict = tempfile.__dict__
expected = {
"NamedTemporaryFile" : 1,
"TemporaryFile" : 1,
"mkstemp" : 1,
"mkdtemp" : 1,
"mktemp" : 1,
"TMP_MAX" : 1,
"gettempprefix" : 1,
"gettempdir" : 1,
"tempdir" : 1,
"template" : 1,
"SpooledTemporaryFile" : 1,
"TemporaryDirectory" : 1,
}
unexp = []
for key in dict:
if key[0] != '_' and key not in expected:
unexp.append(key)
self.assertTrue(len(unexp) == 0,
"unexpected keys: %s" % unexp)
test_classes.append(test_exports)
class test__RandomNameSequence(TC):
"""Test the internal iterator object _RandomNameSequence."""
def setUp(self):
self.r = tempfile._RandomNameSequence()
super().setUp()
def test_get_six_char_str(self):
# _RandomNameSequence returns a six-character string
s = next(self.r)
self.nameCheck(s, '', '', '')
def test_many(self):
# _RandomNameSequence returns no duplicate strings (stochastic)
dict = {}
r = self.r
for i in range(TEST_FILES):
s = next(r)
self.nameCheck(s, '', '', '')
self.assertNotIn(s, dict)
dict[s] = 1
def supports_iter(self):
# _RandomNameSequence supports the iterator protocol
i = 0
r = self.r
try:
for s in r:
i += 1
if i == 20:
break
except:
self.failOnException("iteration")
test_classes.append(test__RandomNameSequence)
class test__candidate_tempdir_list(TC):
"""Test the internal function _candidate_tempdir_list."""
def test_nonempty_list(self):
# _candidate_tempdir_list returns a nonempty list of strings
cand = tempfile._candidate_tempdir_list()
self.assertFalse(len(cand) == 0)
for c in cand:
self.assertIsInstance(c, str)
def test_wanted_dirs(self):
# _candidate_tempdir_list contains the expected directories
# Make sure the interesting environment variables are all set.
with support.EnvironmentVarGuard() as env:
for envname in 'TMPDIR', 'TEMP', 'TMP':
dirname = os.getenv(envname)
if not dirname:
env[envname] = os.path.abspath(envname)
cand = tempfile._candidate_tempdir_list()
for envname in 'TMPDIR', 'TEMP', 'TMP':
dirname = os.getenv(envname)
if not dirname: raise ValueError
self.assertIn(dirname, cand)
try:
dirname = os.getcwd()
except (AttributeError, os.error):
dirname = os.curdir
self.assertIn(dirname, cand)
# Not practical to try to verify the presence of OS-specific
# paths in this list.
test_classes.append(test__candidate_tempdir_list)
# We test _get_default_tempdir by testing gettempdir.
class test__get_candidate_names(TC):
"""Test the internal function _get_candidate_names."""
def test_retval(self):
# _get_candidate_names returns a _RandomNameSequence object
obj = tempfile._get_candidate_names()
self.assertIsInstance(obj, tempfile._RandomNameSequence)
def test_same_thing(self):
# _get_candidate_names always returns the same object
a = tempfile._get_candidate_names()
b = tempfile._get_candidate_names()
self.assertTrue(a is b)
test_classes.append(test__get_candidate_names)
class test__mkstemp_inner(TC):
"""Test the internal function _mkstemp_inner."""
class mkstemped:
_bflags = tempfile._bin_openflags
_tflags = tempfile._text_openflags
_close = os.close
_unlink = os.unlink
def __init__(self, dir, pre, suf, bin):
if bin: flags = self._bflags
else: flags = self._tflags
(self.fd, self.name) = tempfile._mkstemp_inner(dir, pre, suf, flags)
def write(self, str):
os.write(self.fd, str)
def __del__(self):
self._close(self.fd)
self._unlink(self.name)
def do_create(self, dir=None, pre="", suf="", bin=1):
if dir is None:
dir = tempfile.gettempdir()
try:
file = self.mkstemped(dir, pre, suf, bin)
except:
self.failOnException("_mkstemp_inner")
self.nameCheck(file.name, dir, pre, suf)
return file
def test_basic(self):
# _mkstemp_inner can create files
self.do_create().write(b"blat")
self.do_create(pre="a").write(b"blat")
self.do_create(suf="b").write(b"blat")
self.do_create(pre="a", suf="b").write(b"blat")
self.do_create(pre="aa", suf=".txt").write(b"blat")
def test_basic_many(self):
# _mkstemp_inner can create many files (stochastic)
extant = list(range(TEST_FILES))
for i in extant:
extant[i] = self.do_create(pre="aa")
def test_choose_directory(self):
# _mkstemp_inner can create files in a user-selected directory
dir = tempfile.mkdtemp()
try:
self.do_create(dir=dir).write(b"blat")
finally:
os.rmdir(dir)
def test_file_mode(self):
# _mkstemp_inner creates files with the proper mode
if not has_stat:
return # ugh, can't use SkipTest.
file = self.do_create()
mode = stat.S_IMODE(os.stat(file.name).st_mode)
expected = 0o600
if sys.platform in ('win32', 'os2emx'):
# There's no distinction among 'user', 'group' and 'world';
# replicate the 'user' bits.
user = expected >> 6
expected = user * (1 + 8 + 64)
self.assertEqual(mode, expected)
def test_noinherit(self):
# _mkstemp_inner file handles are not inherited by child processes
if not has_spawnl:
return # ugh, can't use SkipTest.
if support.verbose:
v="v"
else:
v="q"
file = self.do_create()
fd = "%d" % file.fd
try:
me = __file__
except NameError:
me = sys.argv[0]
# We have to exec something, so that FD_CLOEXEC will take
# effect. The core of this test is therefore in
# tf_inherit_check.py, which see.
tester = os.path.join(os.path.dirname(os.path.abspath(me)),
"tf_inherit_check.py")
# On Windows a spawn* /path/ with embedded spaces shouldn't be quoted,
# but an arg with embedded spaces should be decorated with double
# quotes on each end
if sys.platform in ('win32',):
decorated = '"%s"' % sys.executable
tester = '"%s"' % tester
else:
decorated = sys.executable
retval = os.spawnl(os.P_WAIT, sys.executable, decorated, tester, v, fd)
self.assertFalse(retval < 0,
"child process caught fatal signal %d" % -retval)
self.assertFalse(retval > 0, "child process reports failure %d"%retval)
def test_textmode(self):
# _mkstemp_inner can create files in text mode
if not has_textmode:
return # ugh, can't use SkipTest.
# A text file is truncated at the first Ctrl+Z byte
f = self.do_create(bin=0)
f.write(b"blat\x1a")
f.write(b"extra\n")
os.lseek(f.fd, 0, os.SEEK_SET)
self.assertEqual(os.read(f.fd, 20), b"blat")
test_classes.append(test__mkstemp_inner)
class test_gettempprefix(TC):
"""Test gettempprefix()."""
def test_sane_template(self):
# gettempprefix returns a nonempty prefix string
p = tempfile.gettempprefix()
self.assertIsInstance(p, str)
self.assertTrue(len(p) > 0)
def test_usable_template(self):
# gettempprefix returns a usable prefix string
# Create a temp directory, avoiding use of the prefix.
# Then attempt to create a file whose name is
# prefix + 'xxxxxx.xxx' in that directory.
p = tempfile.gettempprefix() + "xxxxxx.xxx"
d = tempfile.mkdtemp(prefix="")
try:
p = os.path.join(d, p)
try:
fd = os.open(p, os.O_RDWR | os.O_CREAT)
except:
self.failOnException("os.open")
os.close(fd)
os.unlink(p)
finally:
os.rmdir(d)
test_classes.append(test_gettempprefix)
class test_gettempdir(TC):
"""Test gettempdir()."""
def test_directory_exists(self):
# gettempdir returns a directory which exists
dir = tempfile.gettempdir()
self.assertTrue(os.path.isabs(dir) or dir == os.curdir,
"%s is not an absolute path" % dir)
self.assertTrue(os.path.isdir(dir),
"%s is not a directory" % dir)
def test_directory_writable(self):
# gettempdir returns a directory writable by the user
# sneaky: just instantiate a NamedTemporaryFile, which
# defaults to writing into the directory returned by
# gettempdir.
try:
file = tempfile.NamedTemporaryFile()
file.write(b"blat")
file.close()
except:
self.failOnException("create file in %s" % tempfile.gettempdir())
def test_same_thing(self):
# gettempdir always returns the same object
a = tempfile.gettempdir()
b = tempfile.gettempdir()
self.assertTrue(a is b)
test_classes.append(test_gettempdir)
class test_mkstemp(TC):
"""Test mkstemp()."""
def do_create(self, dir=None, pre="", suf=""):
if dir is None:
dir = tempfile.gettempdir()
try:
(fd, name) = tempfile.mkstemp(dir=dir, prefix=pre, suffix=suf)
(ndir, nbase) = os.path.split(name)
adir = os.path.abspath(dir)
self.assertEqual(adir, ndir,
"Directory '%s' incorrectly returned as '%s'" % (adir, ndir))
except:
self.failOnException("mkstemp")
try:
self.nameCheck(name, dir, pre, suf)
finally:
os.close(fd)
os.unlink(name)
def test_basic(self):
# mkstemp can create files
self.do_create()
self.do_create(pre="a")
self.do_create(suf="b")
self.do_create(pre="a", suf="b")
self.do_create(pre="aa", suf=".txt")
self.do_create(dir=".")
def test_choose_directory(self):
# mkstemp can create directories in a user-selected directory
dir = tempfile.mkdtemp()
try:
self.do_create(dir=dir)
finally:
os.rmdir(dir)
test_classes.append(test_mkstemp)
class test_mkdtemp(TC):
"""Test mkdtemp()."""
def do_create(self, dir=None, pre="", suf=""):
if dir is None:
dir = tempfile.gettempdir()
try:
name = tempfile.mkdtemp(dir=dir, prefix=pre, suffix=suf)
except:
self.failOnException("mkdtemp")
try:
self.nameCheck(name, dir, pre, suf)
return name
except:
os.rmdir(name)
raise
def test_basic(self):
# mkdtemp can create directories
os.rmdir(self.do_create())
os.rmdir(self.do_create(pre="a"))
os.rmdir(self.do_create(suf="b"))
os.rmdir(self.do_create(pre="a", suf="b"))
os.rmdir(self.do_create(pre="aa", suf=".txt"))
def test_basic_many(self):
# mkdtemp can create many directories (stochastic)
extant = list(range(TEST_FILES))
try:
for i in extant:
extant[i] = self.do_create(pre="aa")
finally:
for i in extant:
if(isinstance(i, str)):
os.rmdir(i)
def test_choose_directory(self):
# mkdtemp can create directories in a user-selected directory
dir = tempfile.mkdtemp()
try:
os.rmdir(self.do_create(dir=dir))
finally:
os.rmdir(dir)
def test_mode(self):
# mkdtemp creates directories with the proper mode
if not has_stat:
return # ugh, can't use SkipTest.
dir = self.do_create()
try:
mode = stat.S_IMODE(os.stat(dir).st_mode)
mode &= 0o777 # Mask off sticky bits inherited from /tmp
expected = 0o700
if sys.platform in ('win32', 'os2emx'):
# There's no distinction among 'user', 'group' and 'world';
# replicate the 'user' bits.
user = expected >> 6
expected = user * (1 + 8 + 64)
self.assertEqual(mode, expected)
finally:
os.rmdir(dir)
test_classes.append(test_mkdtemp)
class test_mktemp(TC):
"""Test mktemp()."""
# For safety, all use of mktemp must occur in a private directory.
# We must also suppress the RuntimeWarning it generates.
def setUp(self):
self.dir = tempfile.mkdtemp()
super().setUp()
def tearDown(self):
if self.dir:
os.rmdir(self.dir)
self.dir = None
super().tearDown()
class mktemped:
_unlink = os.unlink
_bflags = tempfile._bin_openflags
def __init__(self, dir, pre, suf):
self.name = tempfile.mktemp(dir=dir, prefix=pre, suffix=suf)
# Create the file. This will raise an exception if it's
# mysteriously appeared in the meanwhile.
os.close(os.open(self.name, self._bflags, 0o600))
def __del__(self):
self._unlink(self.name)
def do_create(self, pre="", suf=""):
try:
file = self.mktemped(self.dir, pre, suf)
except:
self.failOnException("mktemp")
self.nameCheck(file.name, self.dir, pre, suf)
return file
def test_basic(self):
# mktemp can choose usable file names
self.do_create()
self.do_create(pre="a")
self.do_create(suf="b")
self.do_create(pre="a", suf="b")
self.do_create(pre="aa", suf=".txt")
def test_many(self):
# mktemp can choose many usable file names (stochastic)
extant = list(range(TEST_FILES))
for i in extant:
extant[i] = self.do_create(pre="aa")
## def test_warning(self):
## # mktemp issues a warning when used
## warnings.filterwarnings("error",
## category=RuntimeWarning,
## message="mktemp")
## self.assertRaises(RuntimeWarning,
## tempfile.mktemp, dir=self.dir)
test_classes.append(test_mktemp)
# We test _TemporaryFileWrapper by testing NamedTemporaryFile.
class test_NamedTemporaryFile(TC):
"""Test NamedTemporaryFile()."""
def do_create(self, dir=None, pre="", suf="", delete=True):
if dir is None:
dir = tempfile.gettempdir()
try:
file = tempfile.NamedTemporaryFile(dir=dir, prefix=pre, suffix=suf,
delete=delete)
except:
self.failOnException("NamedTemporaryFile")
self.nameCheck(file.name, dir, pre, suf)
return file
def test_basic(self):
# NamedTemporaryFile can create files
self.do_create()
self.do_create(pre="a")
self.do_create(suf="b")
self.do_create(pre="a", suf="b")
self.do_create(pre="aa", suf=".txt")
def test_creates_named(self):
# NamedTemporaryFile creates files with names
f = tempfile.NamedTemporaryFile()
self.assertTrue(os.path.exists(f.name),
"NamedTemporaryFile %s does not exist" % f.name)
def test_del_on_close(self):
# A NamedTemporaryFile is deleted when closed
dir = tempfile.mkdtemp()
try:
f = tempfile.NamedTemporaryFile(dir=dir)
f.write(b'blat')
f.close()
self.assertFalse(os.path.exists(f.name),
"NamedTemporaryFile %s exists after close" % f.name)
finally:
os.rmdir(dir)
def test_dis_del_on_close(self):
# Tests that delete-on-close can be disabled
dir = tempfile.mkdtemp()
tmp = None
try:
f = tempfile.NamedTemporaryFile(dir=dir, delete=False)
tmp = f.name
f.write(b'blat')
f.close()
self.assertTrue(os.path.exists(f.name),
"NamedTemporaryFile %s missing after close" % f.name)
finally:
if tmp is not None:
os.unlink(tmp)
os.rmdir(dir)
def test_multiple_close(self):
# A NamedTemporaryFile can be closed many times without error
f = tempfile.NamedTemporaryFile()
f.write(b'abc\n')
f.close()
try:
f.close()
f.close()
except:
self.failOnException("close")
def test_context_manager(self):
# A NamedTemporaryFile can be used as a context manager
with tempfile.NamedTemporaryFile() as f:
self.assertTrue(os.path.exists(f.name))
self.assertFalse(os.path.exists(f.name))
def use_closed():
with f:
pass
self.assertRaises(ValueError, use_closed)
# How to test the mode and bufsize parameters?
test_classes.append(test_NamedTemporaryFile)
class test_SpooledTemporaryFile(TC):
"""Test SpooledTemporaryFile()."""
def do_create(self, max_size=0, dir=None, pre="", suf=""):
if dir is None:
dir = tempfile.gettempdir()
try:
file = tempfile.SpooledTemporaryFile(max_size=max_size, dir=dir, prefix=pre, suffix=suf)
except:
self.failOnException("SpooledTemporaryFile")
return file
def test_basic(self):
# SpooledTemporaryFile can create files
f = self.do_create()
self.assertFalse(f._rolled)
f = self.do_create(max_size=100, pre="a", suf=".txt")
self.assertFalse(f._rolled)
def test_del_on_close(self):
# A SpooledTemporaryFile is deleted when closed
dir = tempfile.mkdtemp()
try:
f = tempfile.SpooledTemporaryFile(max_size=10, dir=dir)
self.assertFalse(f._rolled)
f.write(b'blat ' * 5)
self.assertTrue(f._rolled)
filename = f.name
f.close()
self.assertFalse(isinstance(filename, str) and os.path.exists(filename),
"SpooledTemporaryFile %s exists after close" % filename)
finally:
os.rmdir(dir)
def test_rewrite_small(self):
# A SpooledTemporaryFile can be written to multiple within the max_size
f = self.do_create(max_size=30)
self.assertFalse(f._rolled)
for i in range(5):
f.seek(0, 0)
f.write(b'x' * 20)
self.assertFalse(f._rolled)
def test_write_sequential(self):
# A SpooledTemporaryFile should hold exactly max_size bytes, and roll
# over afterward
f = self.do_create(max_size=30)
self.assertFalse(f._rolled)
f.write(b'x' * 20)
self.assertFalse(f._rolled)
f.write(b'x' * 10)
self.assertFalse(f._rolled)
f.write(b'x')
self.assertTrue(f._rolled)
def test_writelines(self):
# Verify writelines with a SpooledTemporaryFile
f = self.do_create()
f.writelines((b'x', b'y', b'z'))
f.seek(0)
buf = f.read()
self.assertEqual(buf, b'xyz')
def test_writelines_sequential(self):
# A SpooledTemporaryFile should hold exactly max_size bytes, and roll
# over afterward
f = self.do_create(max_size=35)
f.writelines((b'x' * 20, b'x' * 10, b'x' * 5))
self.assertFalse(f._rolled)
f.write(b'x')
self.assertTrue(f._rolled)
def test_sparse(self):
# A SpooledTemporaryFile that is written late in the file will extend
# when that occurs
f = self.do_create(max_size=30)
self.assertFalse(f._rolled)
f.seek(100, 0)
self.assertFalse(f._rolled)
f.write(b'x')
self.assertTrue(f._rolled)
def test_fileno(self):
# A SpooledTemporaryFile should roll over to a real file on fileno()
f = self.do_create(max_size=30)
self.assertFalse(f._rolled)
self.assertTrue(f.fileno() > 0)
self.assertTrue(f._rolled)
def test_multiple_close_before_rollover(self):
# A SpooledTemporaryFile can be closed many times without error
f = tempfile.SpooledTemporaryFile()
f.write(b'abc\n')
self.assertFalse(f._rolled)
f.close()
try:
f.close()
f.close()
except:
self.failOnException("close")
def test_multiple_close_after_rollover(self):
# A SpooledTemporaryFile can be closed many times without error
f = tempfile.SpooledTemporaryFile(max_size=1)
f.write(b'abc\n')
self.assertTrue(f._rolled)
f.close()
try:
f.close()
f.close()
except:
self.failOnException("close")
def test_bound_methods(self):
# It should be OK to steal a bound method from a SpooledTemporaryFile
# and use it independently; when the file rolls over, those bound
# methods should continue to function
f = self.do_create(max_size=30)
read = f.read
write = f.write
seek = f.seek
write(b"a" * 35)
write(b"b" * 35)
seek(0, 0)
self.assertEqual(read(70), b'a'*35 + b'b'*35)
def test_text_mode(self):
# Creating a SpooledTemporaryFile with a text mode should produce
# a file object reading and writing (Unicode) text strings.
f = tempfile.SpooledTemporaryFile(mode='w+', max_size=10)
f.write("abc\n")
f.seek(0)
self.assertEqual(f.read(), "abc\n")
f.write("def\n")
f.seek(0)
self.assertEqual(f.read(), "abc\ndef\n")
f.write("xyzzy\n")
f.seek(0)
self.assertEqual(f.read(), "abc\ndef\nxyzzy\n")
# Check that Ctrl+Z doesn't truncate the file
f.write("foo\x1abar\n")
f.seek(0)
self.assertEqual(f.read(), "abc\ndef\nxyzzy\nfoo\x1abar\n")
def test_text_newline_and_encoding(self):
f = tempfile.SpooledTemporaryFile(mode='w+', max_size=10,
newline='', encoding='utf-8')
f.write("\u039B\r\n")
f.seek(0)
self.assertEqual(f.read(), "\u039B\r\n")
self.assertFalse(f._rolled)
f.write("\u039B" * 20 + "\r\n")
f.seek(0)
self.assertEqual(f.read(), "\u039B\r\n" + ("\u039B" * 20) + "\r\n")
self.assertTrue(f._rolled)
def test_context_manager_before_rollover(self):
# A SpooledTemporaryFile can be used as a context manager
with tempfile.SpooledTemporaryFile(max_size=1) as f:
self.assertFalse(f._rolled)
self.assertFalse(f.closed)
self.assertTrue(f.closed)
def use_closed():
with f:
pass
self.assertRaises(ValueError, use_closed)
def test_context_manager_during_rollover(self):
# A SpooledTemporaryFile can be used as a context manager
with tempfile.SpooledTemporaryFile(max_size=1) as f:
self.assertFalse(f._rolled)
f.write(b'abc\n')
f.flush()
self.assertTrue(f._rolled)
self.assertFalse(f.closed)
self.assertTrue(f.closed)
def use_closed():
with f:
pass
self.assertRaises(ValueError, use_closed)
def test_context_manager_after_rollover(self):
# A SpooledTemporaryFile can be used as a context manager
f = tempfile.SpooledTemporaryFile(max_size=1)
f.write(b'abc\n')
f.flush()
self.assertTrue(f._rolled)
with f:
self.assertFalse(f.closed)
self.assertTrue(f.closed)
def use_closed():
with f:
pass
self.assertRaises(ValueError, use_closed)
test_classes.append(test_SpooledTemporaryFile)
class test_TemporaryFile(TC):
"""Test TemporaryFile()."""
def test_basic(self):
# TemporaryFile can create files
# No point in testing the name params - the file has no name.
try:
tempfile.TemporaryFile()
except:
self.failOnException("TemporaryFile")
def test_has_no_name(self):
# TemporaryFile creates files with no names (on this system)
dir = tempfile.mkdtemp()
f = tempfile.TemporaryFile(dir=dir)
f.write(b'blat')
# Sneaky: because this file has no name, it should not prevent
# us from removing the directory it was created in.
try:
os.rmdir(dir)
except:
ei = sys.exc_info()
# cleanup
f.close()
os.rmdir(dir)
self.failOnException("rmdir", ei)
def test_multiple_close(self):
# A TemporaryFile can be closed many times without error
f = tempfile.TemporaryFile()
f.write(b'abc\n')
f.close()
try:
f.close()
f.close()
except:
self.failOnException("close")
# How to test the mode and bufsize parameters?
def test_mode_and_encoding(self):
def roundtrip(input, *args, **kwargs):
with tempfile.TemporaryFile(*args, **kwargs) as fileobj:
fileobj.write(input)
fileobj.seek(0)
self.assertEqual(input, fileobj.read())
roundtrip(b"1234", "w+b")
roundtrip("abdc\n", "w+")
roundtrip("\u039B", "w+", encoding="utf-16")
roundtrip("foo\r\n", "w+", newline="")
if tempfile.NamedTemporaryFile is not tempfile.TemporaryFile:
test_classes.append(test_TemporaryFile)
# Helper for test_del_on_shutdown
class NulledModules:
def __init__(self, *modules):
self.refs = [mod.__dict__ for mod in modules]
self.contents = [ref.copy() for ref in self.refs]
def __enter__(self):
for d in self.refs:
for key in d:
d[key] = None
def __exit__(self, *exc_info):
for d, c in zip(self.refs, self.contents):
d.clear()
d.update(c)
class test_TemporaryDirectory(TC):
"""Test TemporaryDirectory()."""
def do_create(self, dir=None, pre="", suf="", recurse=1):
if dir is None:
dir = tempfile.gettempdir()
try:
tmp = tempfile.TemporaryDirectory(dir=dir, prefix=pre, suffix=suf)
except:
self.failOnException("TemporaryDirectory")
self.nameCheck(tmp.name, dir, pre, suf)
# Create a subdirectory and some files
if recurse:
self.do_create(tmp.name, pre, suf, recurse-1)
with open(os.path.join(tmp.name, "test.txt"), "wb") as f:
f.write(b"Hello world!")
return tmp
def test_mkdtemp_failure(self):
# Check no additional exception if mkdtemp fails
# Previously would raise AttributeError instead
# (noted as part of Issue #10188)
with tempfile.TemporaryDirectory() as nonexistent:
pass
with self.assertRaises(os.error):
tempfile.TemporaryDirectory(dir=nonexistent)
def test_explicit_cleanup(self):
# A TemporaryDirectory is deleted when cleaned up
dir = tempfile.mkdtemp()
try:
d = self.do_create(dir=dir)
self.assertTrue(os.path.exists(d.name),
"TemporaryDirectory %s does not exist" % d.name)
d.cleanup()
self.assertFalse(os.path.exists(d.name),
"TemporaryDirectory %s exists after cleanup" % d.name)
finally:
os.rmdir(dir)
@support.skip_unless_symlink
def test_cleanup_with_symlink_to_a_directory(self):
# cleanup() should not follow symlinks to directories (issue #12464)
d1 = self.do_create()
d2 = self.do_create()
# Symlink d1/foo -> d2
os.symlink(d2.name, os.path.join(d1.name, "foo"))
# This call to cleanup() should not follow the "foo" symlink
d1.cleanup()
self.assertFalse(os.path.exists(d1.name),
"TemporaryDirectory %s exists after cleanup" % d1.name)
self.assertTrue(os.path.exists(d2.name),
"Directory pointed to by a symlink was deleted")
self.assertEqual(os.listdir(d2.name), ['test.txt'],
"Contents of the directory pointed to by a symlink "
"were deleted")
d2.cleanup()
@support.cpython_only
def test_del_on_collection(self):
# A TemporaryDirectory is deleted when garbage collected
dir = tempfile.mkdtemp()
try:
d = self.do_create(dir=dir)
name = d.name
del d # Rely on refcounting to invoke __del__
self.assertFalse(os.path.exists(name),
"TemporaryDirectory %s exists after __del__" % name)
finally:
os.rmdir(dir)
@unittest.expectedFailure # See issue #10188
def test_del_on_shutdown(self):
# A TemporaryDirectory may be cleaned up during shutdown
# Make sure it works with the relevant modules nulled out
with self.do_create() as dir:
d = self.do_create(dir=dir)
# Mimic the nulling out of modules that
# occurs during system shutdown
modules = [os, os.path]
if has_stat:
modules.append(stat)
# Currently broken, so suppress the warning
# that is otherwise emitted on stdout
with support.captured_stderr() as err:
with NulledModules(*modules):
d.cleanup()
# Currently broken, so stop spurious exception by
# indicating the object has already been closed
d._closed = True
# And this assert will fail, as expected by the
# unittest decorator...
self.assertFalse(os.path.exists(d.name),
"TemporaryDirectory %s exists after cleanup" % d.name)
def test_warnings_on_cleanup(self):
# Two kinds of warning on shutdown
# Issue 10888: may write to stderr if modules are nulled out
# ResourceWarning will be triggered by __del__
with self.do_create() as dir:
if os.sep != '\\':
# Embed a backslash in order to make sure string escaping
# in the displayed error message is dealt with correctly
suffix = '\\check_backslash_handling'
else:
suffix = ''
d = self.do_create(dir=dir, suf=suffix)
#Check for the Issue 10888 message
modules = [os, os.path]
if has_stat:
modules.append(stat)
with support.captured_stderr() as err:
with NulledModules(*modules):
d.cleanup()
message = err.getvalue().replace('\\\\', '\\')
self.assertIn("while cleaning up", message)
self.assertIn(d.name, message)
# Check for the resource warning
with support.check_warnings(('Implicitly', ResourceWarning), quiet=False):
warnings.filterwarnings("always", category=ResourceWarning)
d.__del__()
self.assertFalse(os.path.exists(d.name),
"TemporaryDirectory %s exists after __del__" % d.name)
def test_multiple_close(self):
# Can be cleaned-up many times without error
d = self.do_create()
d.cleanup()
try:
d.cleanup()
d.cleanup()
except:
self.failOnException("cleanup")
def test_context_manager(self):
# Can be used as a context manager
d = self.do_create()
with d as name:
self.assertTrue(os.path.exists(name))
self.assertEqual(name, d.name)
self.assertFalse(os.path.exists(name))
test_classes.append(test_TemporaryDirectory)
def test_main():
support.run_unittest(*test_classes)
if __name__ == "__main__":
test_main()
|
unknown
|
codeparrot/codeparrot-clean
| ||
def main(request, response):
import simplejson as json
f = file('config.json')
source = f.read()
s = json.JSONDecoder().decode(source)
url1 = "http://" + s['host'] + ":" + str(s['ports']['http'][1])
response.headers.set("Content-Security-Policy", "font-src *")
response.headers.set("X-Content-Security-Policy", "font-src *")
response.headers.set("X-WebKit-CSP", "font-src *")
return """<!DOCTYPE html>
<!--
Copyright (c) 2013 Intel Corporation.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of works must retain the original copyright notice, this list
of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the original copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Intel Corporation nor the names of its contributors
may be used to endorse or promote products derived from this work without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Authors:
Hao, Yunfei <yunfeix.hao@intel.com>
-->
<html>
<head>
<title>CSP Test: csp_font-src_asterisk_allowed_ext</title>
<link rel="author" title="Intel" href="http://www.intel.com"/>
<link rel="help" href="http://www.w3.org/TR/2012/CR-CSP-20121115/#font-src"/>
<meta name="flags" content=""/>
<meta name="assert" content="font-src *"/>
<meta charset="utf-8"/>
<style>
@font-face {
font-family: Canvas;
src: url('""" + url1 + """/tests/csp/support/w3c/CanvasTest.ttf');
}
#test {
font-family: Canvas;
}
</style>
</head>
<body>
<p>Test passes if the two lines are different in font</p>
<div id="test">1234 ABCD</div>
<div>1234 ABCD</div>
</body>
</html> """
|
unknown
|
codeparrot/codeparrot-clean
| ||
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
An implementation of
U{Web Resource Gateway Interface<http://www.python.org/dev/peps/pep-0333/>}.
"""
__metaclass__ = type
from sys import exc_info
from zope.interface import implements
from twisted.python.log import msg, err
from twisted.python.failure import Failure
from twisted.web.resource import IResource
from twisted.web.server import NOT_DONE_YET
from twisted.web.http import INTERNAL_SERVER_ERROR
class _ErrorStream:
"""
File-like object instances of which are used as the value for the
C{'wsgi.errors'} key in the C{environ} dictionary passed to the application
object.
This simply passes writes on to L{logging<twisted.python.log>} system as
error events from the C{'wsgi'} system. In the future, it may be desirable
to expose more information in the events it logs, such as the application
object which generated the message.
"""
def write(self, bytes):
"""
Generate an event for the logging system with the given bytes as the
message.
This is called in a WSGI application thread, not the I/O thread.
"""
msg(bytes, system='wsgi', isError=True)
def writelines(self, iovec):
"""
Join the given lines and pass them to C{write} to be handled in the
usual way.
This is called in a WSGI application thread, not the I/O thread.
@param iovec: A C{list} of C{'\\n'}-terminated C{str} which will be
logged.
"""
self.write(''.join(iovec))
def flush(self):
"""
Nothing is buffered, so flushing does nothing. This method is required
to exist by PEP 333, though.
This is called in a WSGI application thread, not the I/O thread.
"""
class _InputStream:
"""
File-like object instances of which are used as the value for the
C{'wsgi.input'} key in the C{environ} dictionary passed to the application
object.
This only exists to make the handling of C{readline(-1)} consistent across
different possible underlying file-like object implementations. The other
supported methods pass through directly to the wrapped object.
"""
def __init__(self, input):
"""
Initialize the instance.
This is called in the I/O thread, not a WSGI application thread.
"""
self._wrapped = input
def read(self, size=None):
"""
Pass through to the underlying C{read}.
This is called in a WSGI application thread, not the I/O thread.
"""
# Avoid passing None because cStringIO and file don't like it.
if size is None:
return self._wrapped.read()
return self._wrapped.read(size)
def readline(self, size=None):
"""
Pass through to the underlying C{readline}, with a size of C{-1} replaced
with a size of C{None}.
This is called in a WSGI application thread, not the I/O thread.
"""
# Check for -1 because StringIO doesn't handle it correctly. Check for
# None because files and tempfiles don't accept that.
if size == -1 or size is None:
return self._wrapped.readline()
return self._wrapped.readline(size)
def readlines(self, size=None):
"""
Pass through to the underlying C{readlines}.
This is called in a WSGI application thread, not the I/O thread.
"""
# Avoid passing None because cStringIO and file don't like it.
if size is None:
return self._wrapped.readlines()
return self._wrapped.readlines(size)
def __iter__(self):
"""
Pass through to the underlying C{__iter__}.
This is called in a WSGI application thread, not the I/O thread.
"""
return iter(self._wrapped)
class _WSGIResponse:
"""
Helper for L{WSGIResource} which drives the WSGI application using a
threadpool and hooks it up to the L{Request}.
@ivar started: A C{bool} indicating whether or not the response status and
headers have been written to the request yet. This may only be read or
written in the WSGI application thread.
@ivar reactor: An L{IReactorThreads} provider which is used to call methods
on the request in the I/O thread.
@ivar threadpool: A L{ThreadPool} which is used to call the WSGI
application object in a non-I/O thread.
@ivar application: The WSGI application object.
@ivar request: The L{Request} upon which the WSGI environment is based and
to which the application's output will be sent.
@ivar environ: The WSGI environment C{dict}.
@ivar status: The HTTP response status C{str} supplied to the WSGI
I{start_response} callable by the application.
@ivar headers: A list of HTTP response headers supplied to the WSGI
I{start_response} callable by the application.
@ivar _requestFinished: A flag which indicates whether it is possible to
generate more response data or not. This is C{False} until
L{Request.notifyFinish} tells us the request is done, then C{True}.
"""
_requestFinished = False
def __init__(self, reactor, threadpool, application, request):
self.started = False
self.reactor = reactor
self.threadpool = threadpool
self.application = application
self.request = request
self.request.notifyFinish().addBoth(self._finished)
if request.prepath:
scriptName = '/' + '/'.join(request.prepath)
else:
scriptName = ''
if request.postpath:
pathInfo = '/' + '/'.join(request.postpath)
else:
pathInfo = ''
parts = request.uri.split('?', 1)
if len(parts) == 1:
queryString = ''
else:
queryString = parts[1]
self.environ = {
'REQUEST_METHOD': request.method,
'REMOTE_ADDR': request.getClientIP(),
'SCRIPT_NAME': scriptName,
'PATH_INFO': pathInfo,
'QUERY_STRING': queryString,
'CONTENT_TYPE': request.getHeader('content-type') or '',
'CONTENT_LENGTH': request.getHeader('content-length') or '',
'SERVER_NAME': request.getRequestHostname(),
'SERVER_PORT': str(request.getHost().port),
'SERVER_PROTOCOL': request.clientproto}
# The application object is entirely in control of response headers;
# disable the default Content-Type value normally provided by
# twisted.web.server.Request.
self.request.defaultContentType = None
for name, values in request.requestHeaders.getAllRawHeaders():
name = 'HTTP_' + name.upper().replace('-', '_')
# It might be preferable for http.HTTPChannel to clear out
# newlines.
self.environ[name] = ','.join([
v.replace('\n', ' ') for v in values])
self.environ.update({
'wsgi.version': (1, 0),
'wsgi.url_scheme': request.isSecure() and 'https' or 'http',
'wsgi.run_once': False,
'wsgi.multithread': True,
'wsgi.multiprocess': False,
'wsgi.errors': _ErrorStream(),
# Attend: request.content was owned by the I/O thread up until
# this point. By wrapping it and putting the result into the
# environment dictionary, it is effectively being given to
# another thread. This means that whatever it is, it has to be
# safe to access it from two different threads. The access
# *should* all be serialized (first the I/O thread writes to
# it, then the WSGI thread reads from it, then the I/O thread
# closes it). However, since the request is made available to
# arbitrary application code during resource traversal, it's
# possible that some other code might decide to use it in the
# I/O thread concurrently with its use in the WSGI thread.
# More likely than not, this will break. This seems like an
# unlikely possibility to me, but if it is to be allowed,
# something here needs to change. -exarkun
'wsgi.input': _InputStream(request.content)})
def _finished(self, ignored):
"""
Record the end of the response generation for the request being
serviced.
"""
self._requestFinished = True
def startResponse(self, status, headers, excInfo=None):
"""
The WSGI I{start_response} callable. The given values are saved until
they are needed to generate the response.
This will be called in a non-I/O thread.
"""
if self.started and excInfo is not None:
raise excInfo[0], excInfo[1], excInfo[2]
self.status = status
self.headers = headers
return self.write
def write(self, bytes):
"""
The WSGI I{write} callable returned by the I{start_response} callable.
The given bytes will be written to the response body, possibly flushing
the status and headers first.
This will be called in a non-I/O thread.
"""
def wsgiWrite(started):
if not started:
self._sendResponseHeaders()
self.request.write(bytes)
self.reactor.callFromThread(wsgiWrite, self.started)
self.started = True
def _sendResponseHeaders(self):
"""
Set the response code and response headers on the request object, but
do not flush them. The caller is responsible for doing a write in
order for anything to actually be written out in response to the
request.
This must be called in the I/O thread.
"""
code, message = self.status.split(None, 1)
code = int(code)
self.request.setResponseCode(code, message)
for name, value in self.headers:
# Don't allow the application to control these required headers.
if name.lower() not in ('server', 'date'):
self.request.responseHeaders.addRawHeader(name, value)
def start(self):
"""
Start the WSGI application in the threadpool.
This must be called in the I/O thread.
"""
self.threadpool.callInThread(self.run)
def run(self):
"""
Call the WSGI application object, iterate it, and handle its output.
This must be called in a non-I/O thread (ie, a WSGI application
thread).
"""
try:
appIterator = self.application(self.environ, self.startResponse)
for elem in appIterator:
if elem:
self.write(elem)
if self._requestFinished:
break
close = getattr(appIterator, 'close', None)
if close is not None:
close()
except:
def wsgiError(started, type, value, traceback):
err(Failure(value, type, traceback), "WSGI application error")
if started:
self.request.transport.loseConnection()
else:
self.request.setResponseCode(INTERNAL_SERVER_ERROR)
self.request.finish()
self.reactor.callFromThread(wsgiError, self.started, *exc_info())
else:
def wsgiFinish(started):
if not self._requestFinished:
if not started:
self._sendResponseHeaders()
self.request.finish()
self.reactor.callFromThread(wsgiFinish, self.started)
self.started = True
class WSGIResource:
"""
An L{IResource} implementation which delegates responsibility for all
resources hierarchically inferior to it to a WSGI application.
@ivar _reactor: An L{IReactorThreads} provider which will be passed on to
L{_WSGIResponse} to schedule calls in the I/O thread.
@ivar _threadpool: A L{ThreadPool} which will be passed on to
L{_WSGIResponse} to run the WSGI application object.
@ivar _application: The WSGI application object.
"""
implements(IResource)
# Further resource segments are left up to the WSGI application object to
# handle.
isLeaf = True
def __init__(self, reactor, threadpool, application):
self._reactor = reactor
self._threadpool = threadpool
self._application = application
def render(self, request):
"""
Turn the request into the appropriate C{environ} C{dict} suitable to be
passed to the WSGI application object and then pass it on.
The WSGI application object is given almost complete control of the
rendering process. C{NOT_DONE_YET} will always be returned in order
and response completion will be dictated by the application object, as
will the status, headers, and the response body.
"""
response = _WSGIResponse(
self._reactor, self._threadpool, self._application, request)
response.start()
return NOT_DONE_YET
def getChildWithDefault(self, name, request):
"""
Reject attempts to retrieve a child resource. All path segments beyond
the one which refers to this resource are handled by the WSGI
application object.
"""
raise RuntimeError("Cannot get IResource children from WSGIResource")
def putChild(self, path, child):
"""
Reject attempts to add a child resource to this resource. The WSGI
application object handles all path segments beneath this resource, so
L{IResource} children can never be found.
"""
raise RuntimeError("Cannot put IResource children under WSGIResource")
__all__ = ['WSGIResource']
|
unknown
|
codeparrot/codeparrot-clean
| ||
# From EderSantana's https://github.com/EderSantana/seya repository
import numpy as np
import theano
import theano.tensor as T
import keras.backend as K
from keras.layers.core import Layer
floatX = theano.config.floatX
class SpatialTransformer(Layer):
"""Spatial Transformer Layer
Implements a spatial transformer layer as described in [1]_.
Borrowed from [2]_:
downsample_fator : float
A value of 1 will keep the orignal size of the image.
Values larger than 1 will down sample the image. Values below 1 will
upsample the image.
example image: height= 100, width = 200
downsample_factor = 2
output image will then be 50, 100
References
----------
.. [1] Spatial Transformer Networks
Max Jaderberg, Karen Simonyan, Andrew Zisserman, Koray Kavukcuoglu
Submitted on 5 Jun 2015
.. [2] https://github.com/skaae/transformer_network/blob/master/transformerlayer.py
"""
def __init__(self,
localization_net,
downsample_factor=1,
return_theta=False,
**kwargs):
self.downsample_factor = downsample_factor
self.locnet = localization_net
self.return_theta = return_theta
super(SpatialTransformer, self).__init__(**kwargs)
def build(self, input_shape):
self.locnet.build(input_shape)
self.trainable_weights = self.locnet.trainable_weights
self.regularizers = self.locnet.regularizers
self.constraints = self.locnet.constraints
def get_output_shape_for(self, input_shape):
return (None, int(input_shape[1]),
int(input_shape[2] / self.downsample_factor),
int(input_shape[3] / self.downsample_factor))
def call(self, X, mask=None):
theta = self.locnet.call(X)
theta = theta.reshape((X.shape[0], 2, 3))
output = self._transform(theta, X, self.downsample_factor)
if self.return_theta:
return theta.reshape((X.shape[0], 6))
else:
return output
@staticmethod
def _repeat(x, n_repeats):
rep = T.ones((n_repeats,), dtype='int32').dimshuffle('x', 0)
x = T.dot(x.reshape((-1, 1)), rep)
return x.flatten()
@staticmethod
def _interpolate(im, x, y, downsample_factor):
# constants
num_batch, height, width, channels = im.shape
height_f = T.cast(height, floatX)
width_f = T.cast(width, floatX)
out_height = T.cast(height_f // downsample_factor, 'int64')
out_width = T.cast(width_f // downsample_factor, 'int64')
zero = T.zeros([], dtype='int64')
max_y = T.cast(im.shape[1] - 1, 'int64')
max_x = T.cast(im.shape[2] - 1, 'int64')
# scale indices from [-1, 1] to [0, width/height]
x = (x + 1.0)*(width_f) / 2.0
y = (y + 1.0)*(height_f) / 2.0
# do sampling
x0 = T.cast(T.floor(x), 'int64')
x1 = x0 + 1
y0 = T.cast(T.floor(y), 'int64')
y1 = y0 + 1
x0 = T.clip(x0, zero, max_x)
x1 = T.clip(x1, zero, max_x)
y0 = T.clip(y0, zero, max_y)
y1 = T.clip(y1, zero, max_y)
dim2 = width
dim1 = width*height
base = SpatialTransformer._repeat(
T.arange(num_batch, dtype='int32')*dim1, out_height*out_width)
base_y0 = base + y0*dim2
base_y1 = base + y1*dim2
idx_a = base_y0 + x0
idx_b = base_y1 + x0
idx_c = base_y0 + x1
idx_d = base_y1 + x1
# use indices to lookup pixels in the flat
# image and restore channels dim
im_flat = im.reshape((-1, channels))
Ia = im_flat[idx_a]
Ib = im_flat[idx_b]
Ic = im_flat[idx_c]
Id = im_flat[idx_d]
# and finanly calculate interpolated values
x0_f = T.cast(x0, floatX)
x1_f = T.cast(x1, floatX)
y0_f = T.cast(y0, floatX)
y1_f = T.cast(y1, floatX)
wa = ((x1_f-x) * (y1_f-y)).dimshuffle(0, 'x')
wb = ((x1_f-x) * (y-y0_f)).dimshuffle(0, 'x')
wc = ((x-x0_f) * (y1_f-y)).dimshuffle(0, 'x')
wd = ((x-x0_f) * (y-y0_f)).dimshuffle(0, 'x')
output = T.sum([wa*Ia, wb*Ib, wc*Ic, wd*Id], axis=0)
return output
@staticmethod
def _linspace(start, stop, num):
# produces results identical to:
# np.linspace(start, stop, num)
start = T.cast(start, floatX)
stop = T.cast(stop, floatX)
num = T.cast(num, floatX)
step = (stop-start)/(num-1)
return T.arange(num, dtype=floatX)*step+start
@staticmethod
def _meshgrid(height, width):
# This should be equivalent to:
# x_t, y_t = np.meshgrid(np.linspace(-1, 1, width),
# np.linspace(-1, 1, height))
# ones = np.ones(np.prod(x_t.shape))
# grid = np.vstack([x_t.flatten(), y_t.flatten(), ones])
x_t = T.dot(T.ones((height, 1)),
SpatialTransformer._linspace(-1.0, 1.0, width).dimshuffle('x', 0))
y_t = T.dot(SpatialTransformer._linspace(-1.0, 1.0, height).dimshuffle(0, 'x'),
T.ones((1, width)))
x_t_flat = x_t.reshape((1, -1))
y_t_flat = y_t.reshape((1, -1))
ones = T.ones_like(x_t_flat)
grid = T.concatenate([x_t_flat, y_t_flat, ones], axis=0)
return grid
@staticmethod
def _transform(theta, input, downsample_factor):
num_batch, num_channels, height, width = input.shape
theta = theta.reshape((num_batch, 2, 3)) # T.reshape(theta, (-1, 2, 3))
# grid of (x_t, y_t, 1), eq (1) in ref [1]
height_f = T.cast(height, floatX)
width_f = T.cast(width, floatX)
out_height = T.cast(height_f // downsample_factor, 'int64')
out_width = T.cast(width_f // downsample_factor, 'int64')
grid = SpatialTransformer._meshgrid(out_height, out_width)
# Transform A x (x_t, y_t, 1)^T -> (x_s, y_s)
T_g = T.dot(theta, grid)
x_s, y_s = T_g[:, 0], T_g[:, 1]
x_s_flat = x_s.flatten()
y_s_flat = y_s.flatten()
# dimshuffle input to (bs, height, width, channels)
input_dim = input.dimshuffle(0, 2, 3, 1)
input_transformed = SpatialTransformer._interpolate(
input_dim, x_s_flat, y_s_flat,
downsample_factor)
output = T.reshape(input_transformed,
(num_batch, out_height, out_width, num_channels))
output = output.dimshuffle(0, 3, 1, 2)
return output
class Homography(Layer):
"""Homography layer
"""
def __init__(self,
downsample_factor=1,
**kwargs):
self.downsample_factor = downsample_factor
super(Homography, self).__init__(**kwargs)
def build(self, input_shape):
W = np.zeros((2, 3), dtype='float32')
W[0, 0] = .9
W[1, 1] = .9
self.W = K.variable(W, name='{}_W'.format(self.name))
M = np.ones((2, 3), dtype='float32')
# M[0, 0] = 8.
# M[1, 1] = 5.
# M[1, 0] = 0.
# M[0, 1] = 0.
# M[0, 2] = 0.
# M[1, 2] = 1.
self.M = K.variable(M, name="{}_mask".format(self.name))
self.trainable_weights = [self.W]
def call(self, X, mask=None):
theta = self.W * self.M
theta = T.repeat(theta.dimshuffle('x', 0, 1), X.shape[0], axis=0)
output = SpatialTransformer._transform(theta, X, self.downsample_factor)
return output
def output_shape_for(self, input_shape):
return (None, input_shape[1],
int(input_shape[2] / self.downsample_factor),
int(input_shape[2] / self.downsample_factor))
class Cropper(Layer):
"""Homography layer
"""
def __init__(self,
downsample_factor=1,
init_scale=1.,
ratio=1.,
**kwargs):
self.downsample_factor = downsample_factor
self.init_scale = init_scale
self.ratio = ratio
super(Cropper, self).__init__(**kwargs)
def build(self, input_shape):
W = np.zeros((4,), dtype='float32')
W[0] = self.init_scale
W[1] = self.init_scale
self.W = K.variable(W, name='{}_W'.format(self.name))
self.trainable_weights = [self.W]
def call(self, X, mask=None):
sx = self.W[0:1]
sy = self.W[1:2]
tx = self.W[2:3]
ty = self.W[3:]
zero = K.zeros((1,))
first_row = K.reshape(K.concatenate([sx, zero, tx]), (1, 3))
second_row = K.reshape(K.concatenate([zero, sy, ty]), (1, 3))
theta = K.concatenate([first_row, second_row], axis=0)
theta = T.repeat(theta.dimshuffle('x', 0, 1), X.shape[0], axis=0)
output = SpatialTransformer._transform(theta, X, self.downsample_factor)
return output
def output_shape_for(self, input_shape):
return (None, input_shape[1],
int(input_shape[2] / self.downsample_factor),
int(input_shape[2] / self.downsample_factor))
class DifferentiableRAM(Layer):
"""DifferentiableRAM uses Gaussian attention mechanism from DRAW [1]_
out_grid: list (height, width)
References
----------
"""
def __init__(self,
localization_net,
out_grid,
return_theta=False,
**kwargs):
self.out_grid = out_grid
self.locnet = localization_net
self.return_theta = return_theta
super(DifferentiableRAM, self).__init__(**kwargs)
def build(self, input_shape):
self.locnet.build(input_shape)
self.trainable_weights = self.locnet.trainable_weights
self.regularizers = self.locnet.regularizers
self.constraints = self.locnet.constraints
self.width = input_shape[3]
self.height = input_shape[2]
def output_shape_for(self, input_shape):
return (None, input_shape[1],
int(self.out_grid[0]),
int(self.out_grid[1]))
def call(self, X, mask=None):
p = self.locnet.call(X)
gx, gy, sigma2, delta, gamma = self._get_attention_params(p)
Fx, Fy = self._get_filterbank(gx, gy, sigma2, delta)
output = self._read(X, gamma, Fx, Fy)
if self.return_theta:
return p
else:
return output
def _get_attention_params(self, p):
N = np.min(self.out_grid)
gx = self.out_grid[0] * (p[:, 0]+1) / 2.
gy = self.out_grid[1] * (p[:, 1]+1) / 2.
sigma2 = T.exp(p[:, 2])
delta = T.exp(p[:, 3]) * (max(self.width, self.height) - 1) / (N - 1.)
gamma = T.exp(p[:, 4])
return gx, gy, sigma2, delta, gamma
def _get_filterbank(self, gx, gy, sigma2, delta):
N = np.min(self.out_grid)
small = 1e-4
i1 = T.arange(self.out_grid[0]).astype("float32")
i2 = T.arange(self.out_grid[1]).astype("float32")
a = T.arange(self.width).astype("float32")
b = T.arange(self.height).astype("float32")
mx = gx[:, None] + delta[:, None] * (i1 - N/2. - .5)
my = gy[:, None] + delta[:, None] * (i2 - N/2. - .5)
Fx = T.exp(-(a - mx[:, :, None])**2 / 2. / sigma2[:, None, None])
Fx /= (Fx.sum(axis=-1)[:, :, None] + small)
Fy = K.exp(-(b - my[:, :, None])**2 / 2. / sigma2[:, None, None])
Fy /= (Fy.sum(axis=-1)[:, :, None] + small)
return Fx, Fy
def _read(self, x, gamma, Fx, Fy):
Fyx = (Fy[:, None, :, :, None] * x[:, :, None, :, :]).sum(axis=3)
FxT = Fx.dimshuffle(0, 2, 1)
FyxFx = (Fyx[:, :, :, :, None] * FxT[:, None, None, :, :]).sum(axis=3)
return gamma[:, None, None, None] * FyxFx
class Translate(Layer):
def __init__(self,
localization_net,
downsample_factor=1,
scale=[1., 1.],
**kwargs):
self.downsample_factor = downsample_factor
self.locnet = localization_net
self.scale = scale
self.return_theta = False
super(Translate, self).__init__(**kwargs)
def build(self, input_shape):
self.locnet.build(input_shape)
self.trainable_weights = self.locnet.trainable_weights
self.regularizers = self.locnet.regularizers
self.constraints = self.locnet.constraints
def output_shape_for(self, input_shape):
return (None, 3,
int(input_shape[2] / self.downsample_factor),
int(input_shape[2] / self.downsample_factor))
def call(self, X, mask=None):
vals = self.locnet.call(X)
tx = vals[:, 4:5]
ty = vals[:, 5:6]
# sx = self.W[0:1]
# sy = self.W[1:2]
zero = K.zeros_like(tx)
one = K.ones_like(tx)
first_row = K.reshape(K.concatenate([one, zero, tx], axis=1), (-1, 1, 3))
second_row = K.reshape(K.concatenate([zero, one, ty], axis=1), (-1, 1, 3))
theta = K.concatenate([first_row, second_row], axis=1)
theta = theta.reshape((X.shape[0], 2, 3))
output = SpatialTransformer._transform(theta, X, self.downsample_factor)
if self.return_theta:
return theta.reshape((X.shape[0], 6))
else:
return output
class ProjectiveTransformer(Layer):
"""Projective Transformer Layer
Implements a spatial transformer layer as described in [1]_.
This implements the full 3x3 homography.
downsample_fator : float
A value of 1 will keep the orignal size of the image.
Values larger than 1 will down sample the image. Values below 1 will
upsample the image.
example image: height= 100, width = 200
downsample_factor = 2
output image will then be 50, 100
References
----------
.. [1] Spatial Transformer Networks
Max Jaderberg, Karen Simonyan, Andrew Zisserman, Koray Kavukcuoglu
Submitted on 5 Jun 2015
.. [2] https://github.com/skaae/transformer_network/blob/master/transformerlayer.py
"""
def __init__(self,
localization_net,
downsample_factor=1,
return_theta=False,
**kwargs):
self.downsample_factor = downsample_factor
self.locnet = localization_net
self.return_theta = return_theta
super(SpatialTransformer, self).__init__(**kwargs)
def build(self, input_shape):
self.locnet.build(input_shape)
self.trainable_weights = self.locnet.trainable_weights
self.regularizers = self.locnet.regularizers
self.constraints = self.locnet.constraints
def output_shape_for(self, input_shape):
return (input_shape[0], input_shape[1],
int(input_shape[2] / self.downsample_factor),
int(input_shape[3] / self.downsample_factor))
def call(self, X, mask=None):
theta = self.locnet.call(X)
theta = theta.reshape((X.shape[0], 3, 3))
output = self._transform(theta, X, self.downsample_factor)
if self.return_theta:
return theta.reshape((X.shape[0], 9))
else:
return output
@staticmethod
def _transform(theta, input, downsample_factor):
num_batch, num_channels, height, width = input.shape
theta = theta.reshape((num_batch, 3, 3)) # T.reshape(theta, (-1, 2, 3))
# grid of (x_t, y_t, 1), eq (1) in ref [1]
height_f = T.cast(height, floatX)
width_f = T.cast(width, floatX)
out_height = T.cast(height_f // downsample_factor, 'int64')
out_width = T.cast(width_f // downsample_factor, 'int64')
grid = SpatialTransformer._meshgrid(out_height, out_width)
# Transform A x (x_t, y_t, 1)^T -> (x_s / z_s, y_s / z_s)
T_g = T.dot(theta, grid)
x_s, y_s = T_g[:, 0] / T_g[:, 2], T_g[:, 1] / T_g[:, 2]
x_s_flat = x_s.flatten()
y_s_flat = y_s.flatten()
# dimshuffle input to (bs, height, width, channels)
input_dim = input.dimshuffle(0, 2, 3, 1)
input_transformed = SpatialTransformer._interpolate(
input_dim, x_s_flat, y_s_flat,
downsample_factor)
output = T.reshape(input_transformed,
(num_batch, out_height, out_width, num_channels))
output = output.dimshuffle(0, 3, 1, 2)
return output
|
unknown
|
codeparrot/codeparrot-clean
| ||
from sqlalchemy import (
Table, Column, INTEGER, String, Text, TIMESTAMP, DateTime, func)
from .basic import metadata
# login table
login_info = Table("login_info", metadata,
Column("id", INTEGER, primary_key=True, autoincrement=True),
Column("name", String(100), unique=True),
Column("password", String(200)),
Column("enable", INTEGER, default=1, server_default='1'),
)
# weibo user info
wbuser = Table("wbuser", metadata,
Column("id", INTEGER, primary_key=True, autoincrement=True),
Column("uid", String(20), unique=True),
Column("name", String(200), default='', server_default=''),
Column("gender", INTEGER, default=0, server_default='0'),
Column("birthday", String(200), default='', server_default=''),
Column("location", String(100), default='', server_default=''),
Column("description", String(500), default='', server_default=''),
Column("register_time", String(200), default='', server_default=''),
Column("verify_type", INTEGER, default=0, server_default='0'),
Column("verify_info", String(2500), default='', server_default=''),
Column("follows_num", INTEGER, default=0, server_default='0'),
Column("fans_num", INTEGER, default=0, server_default='0'),
Column("wb_num", INTEGER, default=0, server_default='0'),
Column("level", INTEGER, default=0, server_default='0'),
Column("tags", String(500), default='', server_default=''),
Column("work_info", String(500), default='', server_default=''),
Column("contact_info", String(300), default='', server_default=''),
Column("education_info", String(300), default='', server_default=''),
Column("head_img", String(500), default='', server_default=''),
)
# seed ids for user crawling
seed_ids = Table('seed_ids', metadata,
Column("id", INTEGER, primary_key=True, autoincrement=True),
Column("uid", String(20), unique=True),
Column("is_crawled", INTEGER, default=0, server_default='0'),
Column("other_crawled", INTEGER, default=0, server_default='0'),
Column("home_crawled", INTEGER, default=0, server_default='0'),
)
# search keywords table
keywords = Table('keywords', metadata,
Column("id", INTEGER, primary_key=True, autoincrement=True),
Column("keyword", String(200), unique=True),
Column("enable", INTEGER, default=1, server_default='1'),
)
# weibo info data
weibo_data = Table('weibo_data', metadata,
Column("id", INTEGER, primary_key=True, autoincrement=True),
Column("weibo_id", String(200), unique=True),
Column("weibo_cont", Text),
Column("weibo_img", String(1000)),
Column("weibo_img_path", String(1000), server_default=''),
Column("weibo_video", String(1000)),
Column("repost_num", INTEGER, default=0, server_default='0'),
Column("comment_num", INTEGER, default=0, server_default='0'),
Column("praise_num", INTEGER, default=0, server_default='0'),
Column("uid", String(20)),
Column("is_origin", INTEGER, default=1, server_default='1'),
Column("device", String(200), default='', server_default=''),
Column("weibo_url", String(300), default='', server_default=''),
Column("create_time", String(200)),
Column("comment_crawled", INTEGER, default=0, server_default='0'),
Column("repost_crawled", INTEGER, default=0, server_default='0'),
Column("dialogue_crawled", INTEGER, default=0, server_default='0'),
Column("praise_crawled", INTEGER, default=0, server_default='0'),
)
# keywords and weibodata relationship
keywords_wbdata = Table('keywords_wbdata', metadata,
Column("id", INTEGER, primary_key=True, autoincrement=True),
Column("keyword_id", INTEGER),
Column("wb_id", String(200)),
)
# comment table
weibo_comment = Table('weibo_comment', metadata,
Column("id", INTEGER, primary_key=True, autoincrement=True),
Column("comment_id", String(50), unique=True),
Column("comment_cont", Text),
Column("comment_screen_name", Text),
Column("weibo_id", String(200)),
Column("user_id", String(20)),
Column("create_time", String(200)),
)
# praise table
weibo_praise = Table('weibo_praise', metadata,
Column("id", INTEGER, primary_key=True, autoincrement=True),
Column("user_id", String(20)),
Column("weibo_id", String(200)),
Column("crawl_time", TIMESTAMP),
)
# repost table
weibo_repost = Table("weibo_repost", metadata,
Column("id", INTEGER, primary_key=True, autoincrement=True),
Column("user_id", String(20)),
Column("user_name", String(200)),
Column("weibo_id", String(200), unique=True),
Column("parent_user_id", String(20)),
Column("repost_time", String(200)),
Column("repost_cont", Text),
Column("weibo_url", String(200)),
Column("parent_user_name", String(200)),
Column("root_weibo_id", String(200)),
)
# relations about user and there fans and follows
user_relation = Table("user_relation", metadata,
Column('id', INTEGER, primary_key=True, autoincrement=True),
Column('user_id', String(20)),
Column('follow_or_fans_id', String(20)),
Column('type', INTEGER), # 1 stands for fans, 2 stands for follows
Column('from_where', String(60)),
Column('crawl_time', DateTime(3)) # DATETIME(6) means save 6 digits milliseconds
# time is stored in UTC
)
# dialogue table
weibo_dialogue = Table("weibo_dialogue", metadata,
Column("id", INTEGER, primary_key=True, autoincrement=True),
Column("dialogue_id", String(50), unique=True),
Column("weibo_id", String(200)),
Column("dialogue_cont", Text),
Column("dialogue_rounds", INTEGER),
)
__all__ = ['login_info', 'wbuser', 'seed_ids', 'keywords', 'weibo_data', 'keywords_wbdata', 'weibo_comment',
'weibo_repost', 'user_relation', 'weibo_dialogue', 'weibo_praise']
|
unknown
|
codeparrot/codeparrot-clean
| ||
---
body:
- type: markdown
attributes:
value: |
For NGINX troubleshooting/technical help, please visit our community forum instead of asking your questions here. We will politely redirect these types of questions to the forum.
- type: textarea
id: q-a
attributes:
label: What question do you have?
description: Please provide as much context as possible. Remember that only questions related to the NGINX codebase will be addressed on GitHub. For anything else, please visit our [community forum](https://community.nginx.org/).
value: |
I would like to know...
validations:
required: true
|
unknown
|
github
|
https://github.com/nginx/nginx
|
.github/DISCUSSION_TEMPLATE/q-a.yml
|
#include "perf_precomp.hpp"
namespace opencv_test
{
using namespace perf;
typedef tuple<std::string, cv::Size> String_Size_t;
typedef perf::TestBaseWithParam<String_Size_t> String_Size;
PERF_TEST_P(String_Size, asymm_circles_grid, testing::Values(
String_Size_t("cv/cameracalibration/asymmetric_circles/acircles1.png", Size(7,13)),
String_Size_t("cv/cameracalibration/asymmetric_circles/acircles2.png", Size(7,13)),
String_Size_t("cv/cameracalibration/asymmetric_circles/acircles3.png", Size(7,13)),
String_Size_t("cv/cameracalibration/asymmetric_circles/acircles4.png", Size(5,5)),
String_Size_t("cv/cameracalibration/asymmetric_circles/acircles5.png", Size(5,5)),
String_Size_t("cv/cameracalibration/asymmetric_circles/acircles6.png", Size(5,5)),
String_Size_t("cv/cameracalibration/asymmetric_circles/acircles7.png", Size(3,9)),
String_Size_t("cv/cameracalibration/asymmetric_circles/acircles8.png", Size(3,9)),
String_Size_t("cv/cameracalibration/asymmetric_circles/acircles9.png", Size(3,9))
)
)
{
string filename = getDataPath(get<0>(GetParam()));
Size gridSize = get<1>(GetParam());
Mat frame = imread(filename);
if (frame.empty())
FAIL() << "Unable to load source image " << filename;
vector<Point2f> ptvec;
ptvec.resize(gridSize.area());
cvtColor(frame, frame, COLOR_BGR2GRAY);
declare.in(frame).out(ptvec);
TEST_CYCLE() ASSERT_TRUE(findCirclesGrid(frame, gridSize, ptvec, CALIB_CB_CLUSTERING | CALIB_CB_ASYMMETRIC_GRID));
SANITY_CHECK(ptvec, 2);
}
} // namespace
|
cpp
|
github
|
https://github.com/opencv/opencv
|
modules/calib3d/perf/perf_cicrlesGrid.cpp
|
//! Error and Result module
// This is meant to be a glob import of the whole error module except for `Error`. Rustdoc can't yet
// correctly resolve the conflicting `Error` type defined in this module, so these re-exports are
// expanded manually.
//
// See <https://github.com/rust-lang/rust/issues/83375>
pub use actix_http::error::{ContentTypeError, DispatchError, HttpError, ParseError, PayloadError};
use derive_more::{Display, Error, From};
use serde_json::error::Error as JsonError;
use serde_urlencoded::{de::Error as FormDeError, ser::Error as FormError};
use url::ParseError as UrlParseError;
use crate::http::StatusCode;
#[allow(clippy::module_inception)]
mod error;
mod internal;
mod macros;
mod response_error;
pub(crate) use self::macros::{downcast_dyn, downcast_get_type_id};
pub use self::{error::Error, internal::*, response_error::ResponseError};
pub use crate::types::EitherExtractError;
/// A convenience [`Result`](std::result::Result) for Actix Web operations.
///
/// This type alias is generally used to avoid writing out `actix_http::Error` directly.
pub type Result<T, E = Error> = std::result::Result<T, E>;
/// An error representing a problem running a blocking task on a thread pool.
#[derive(Debug, Display, Error)]
#[display("Blocking thread pool is shut down unexpectedly")]
#[non_exhaustive]
pub struct BlockingError;
impl ResponseError for crate::error::BlockingError {}
/// Errors which can occur when attempting to generate resource uri.
#[derive(Debug, PartialEq, Eq, Display, Error, From)]
#[non_exhaustive]
pub enum UrlGenerationError {
/// Resource not found.
#[display("Resource not found")]
ResourceNotFound,
/// Not all URL parameters covered.
#[display("Not all URL parameters covered")]
NotEnoughElements,
/// URL parse error.
#[display("{}", _0)]
ParseError(UrlParseError),
}
impl ResponseError for UrlGenerationError {}
/// A set of errors that can occur during parsing urlencoded payloads
#[derive(Debug, Display, Error, From)]
#[non_exhaustive]
pub enum UrlencodedError {
/// Can not decode chunked transfer encoding.
#[display("Can not decode chunked transfer encoding.")]
Chunked,
/// Payload size is larger than allowed. (default limit: 256kB).
#[display(
"URL encoded payload is larger ({} bytes) than allowed (limit: {} bytes).",
size,
limit
)]
Overflow { size: usize, limit: usize },
/// Payload size is now known.
#[display("Payload size is now known.")]
UnknownLength,
/// Content type error.
#[display("Content type error.")]
ContentType,
/// Parse error.
#[display("Parse error: {}.", _0)]
Parse(FormDeError),
/// Encoding error.
#[display("Encoding error.")]
Encoding,
/// Serialize error.
#[display("Serialize error: {}.", _0)]
Serialize(FormError),
/// Payload error.
#[display("Error that occur during reading payload: {}.", _0)]
Payload(PayloadError),
}
impl ResponseError for UrlencodedError {
fn status_code(&self) -> StatusCode {
match self {
Self::Overflow { .. } => StatusCode::PAYLOAD_TOO_LARGE,
Self::UnknownLength => StatusCode::LENGTH_REQUIRED,
Self::ContentType => StatusCode::UNSUPPORTED_MEDIA_TYPE,
Self::Payload(err) => err.status_code(),
_ => StatusCode::BAD_REQUEST,
}
}
}
/// A set of errors that can occur during parsing json payloads
#[derive(Debug, Display, Error)]
#[non_exhaustive]
pub enum JsonPayloadError {
/// Payload size is bigger than allowed & content length header set. (default: 2MB)
#[display(
"JSON payload ({} bytes) is larger than allowed (limit: {} bytes).",
length,
limit
)]
OverflowKnownLength { length: usize, limit: usize },
/// Payload size is bigger than allowed but no content length header set. (default: 2MB)
#[display("JSON payload has exceeded limit ({} bytes).", limit)]
Overflow { limit: usize },
/// Content type error
#[display("Content type error")]
ContentType,
/// Deserialize error
#[display("Json deserialize error: {}", _0)]
Deserialize(JsonError),
/// Serialize error
#[display("Json serialize error: {}", _0)]
Serialize(JsonError),
/// Payload error
#[display("Error that occur during reading payload: {}", _0)]
Payload(PayloadError),
}
impl From<PayloadError> for JsonPayloadError {
fn from(err: PayloadError) -> Self {
Self::Payload(err)
}
}
impl ResponseError for JsonPayloadError {
fn status_code(&self) -> StatusCode {
match self {
Self::OverflowKnownLength {
length: _,
limit: _,
} => StatusCode::PAYLOAD_TOO_LARGE,
Self::Overflow { limit: _ } => StatusCode::PAYLOAD_TOO_LARGE,
Self::Serialize(_) => StatusCode::INTERNAL_SERVER_ERROR,
Self::Payload(err) => err.status_code(),
_ => StatusCode::BAD_REQUEST,
}
}
}
/// A set of errors that can occur during parsing request paths
#[derive(Debug, Display, Error)]
#[non_exhaustive]
pub enum PathError {
/// Deserialize error
#[display("Path deserialize error: {}", _0)]
Deserialize(serde::de::value::Error),
}
/// Return `BadRequest` for `PathError`
impl ResponseError for PathError {
fn status_code(&self) -> StatusCode {
StatusCode::BAD_REQUEST
}
}
/// A set of errors that can occur during parsing query strings.
#[derive(Debug, Display, Error, From)]
#[non_exhaustive]
pub enum QueryPayloadError {
/// Query deserialize error.
#[display("Query deserialize error: {}", _0)]
Deserialize(serde::de::value::Error),
}
impl ResponseError for QueryPayloadError {
fn status_code(&self) -> StatusCode {
StatusCode::BAD_REQUEST
}
}
/// Error type returned when reading body as lines.
#[derive(Debug, Display, Error, From)]
#[non_exhaustive]
pub enum ReadlinesError {
#[display("Encoding error")]
/// Payload size is bigger than allowed. (default: 256kB)
EncodingError,
/// Payload error.
#[display("Error that occur during reading payload: {}", _0)]
Payload(PayloadError),
/// Line limit exceeded.
#[display("Line limit exceeded")]
LimitOverflow,
/// ContentType error.
#[display("Content-type error")]
ContentTypeError(ContentTypeError),
}
impl ResponseError for ReadlinesError {
fn status_code(&self) -> StatusCode {
match *self {
ReadlinesError::LimitOverflow => StatusCode::PAYLOAD_TOO_LARGE,
_ => StatusCode::BAD_REQUEST,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_urlencoded_error() {
let resp = UrlencodedError::Overflow { size: 0, limit: 0 }.error_response();
assert_eq!(resp.status(), StatusCode::PAYLOAD_TOO_LARGE);
let resp = UrlencodedError::UnknownLength.error_response();
assert_eq!(resp.status(), StatusCode::LENGTH_REQUIRED);
let resp = UrlencodedError::ContentType.error_response();
assert_eq!(resp.status(), StatusCode::UNSUPPORTED_MEDIA_TYPE);
}
#[test]
fn test_json_payload_error() {
let resp = JsonPayloadError::OverflowKnownLength {
length: 0,
limit: 0,
}
.error_response();
assert_eq!(resp.status(), StatusCode::PAYLOAD_TOO_LARGE);
let resp = JsonPayloadError::Overflow { limit: 0 }.error_response();
assert_eq!(resp.status(), StatusCode::PAYLOAD_TOO_LARGE);
let resp = JsonPayloadError::ContentType.error_response();
assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
}
#[test]
fn test_query_payload_error() {
let resp = QueryPayloadError::Deserialize(
serde_urlencoded::from_str::<i32>("bad query").unwrap_err(),
)
.error_response();
assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
}
#[test]
fn test_readlines_error() {
let resp = ReadlinesError::LimitOverflow.error_response();
assert_eq!(resp.status(), StatusCode::PAYLOAD_TOO_LARGE);
let resp = ReadlinesError::EncodingError.error_response();
assert_eq!(resp.status(), StatusCode::BAD_REQUEST);
}
}
|
rust
|
github
|
https://github.com/actix/actix-web
|
actix-web/src/error/mod.rs
|
#
# TODO:
# parse vertex colors and UV coordinates
# remove faces with the hole material
#
from erlang_ext import *
import types
import pprint
import mesh
try:
import Image
except:
pass
def safe_append(ctr, key, value):
if ctr.has_key(key):
ctr[key].append(value)
else:
ctr[key] = [value]
class wings_reader:
dump = __name__ == '__main__'
def __init__(self, raw_data, writeImages, keepRotation):
self.data = raw_data
self.writeImages = writeImages
self.keepRotation = keepRotation
# read and check
a, self.ver, wingsdata = self.data
if a != erlang_atom("wings") or self.ver != 2:
raise IOError("Unknown wings version")
#if self.dump:
# pp = pprint.PrettyPrinter(indent=4,width=78)
# pp.pprint(wingsdata)
self.raw_objects, self.raw_materials, self.raw_props = wingsdata
def parse(self):
return self.parse_2()
def parse_2(self):
scene = mesh.Mesh()
scene.name = "wings_object"
self.materials = scene.materials
self.mat_images = {}
for raw_mat in self.raw_materials:
wmat = self.parse_2_material(raw_mat)
scene.materials.append(wmat)
self.parse_2_images()
for raw_obj in self.raw_objects:
wobj = self.parse_2_object(raw_obj)
scene.merge(wobj)
self.postprocess(scene)
return scene
def parse_2_image(self, index, raw_image):
w, h, spp = 0, 0, 0
pixels = None
filename = None
for elem in raw_image:
if elem[0] == erlang_atom("width"):
w = elem[1]
if elem[0] == erlang_atom("height"):
h = elem[1]
if elem[0] == erlang_atom("samples_per_pixel"):
spp = elem[1]
if elem[0] == erlang_atom("pixels"):
pixels = elem[1]
if not pixels:
return None
if spp == 3: mode = 'RGB'
else: mode = 'L'
im = Image.new(mode, (w, h))
print "processing image"
print mode, spp, w, h, len(pixels)
pixels = map(lambda x: ord(x), pixels)
for x in range(w):
for y in range(h):
i = (x + y * w) * 3
yy = h - 1 - y # huh?
im.putpixel((x, yy), tuple(pixels[i:i+3]))
#bands = [tuple(pixels[i*3:i*3+3]) for i in range(w * h)]
#print pixels
#print bands
#im.putdata(bands)
if self.mat_images.has_key(index):
filename = self.mat_images[index]
im.save(filename)
return im
def parse_2_images(self):
if not self.writeImages:
return
images = []
if self.raw_props:
for elem in self.raw_props:
if elem[0] == erlang_atom('images'):
images = elem[1]
for raw_im_data in images:
index, raw_im = raw_im_data[:2]
self.parse_2_image(index, raw_im)
def parse_2_material(self, raw_mat):
atom, data = raw_mat
#pp = pprint.PrettyPrinter(indent=4,width=78)
#pp.pprint(data)
#raw_maps, raw_gl = data[:2]
mat = mesh.Material(str(atom))
for tag in data:
a, elem_data = tag
if a == erlang_atom('openg'):
for elem in elem_data:
if elem[0] == erlang_atom('ambient'):
mat.ambient = elem[1]
if elem[0] == erlang_atom('diffuse'):
mat.diffuse = elem[1]
if elem[0] == erlang_atom('specular'):
mat.specular = elem[1]
if elem[0] == erlang_atom('shininess'):
mat.shininess = elem[1]
elif a == erlang_atom('maps') and elem_data:
filename = str(atom) + '.png'
mat.textures.append(filename)
self.mat_images[elem_data[0][1]] = filename
return mat
def check_atom(self, atom, name):
if atom != erlang_atom(name):
raise IOError("Unexpected atom: %s expected, %s found" %
(erlang_atom(name), atom))
def parse_2_edges(self, wobj, raw_edges, hard_edges):
faces = {}
for edge_index in range(len(raw_edges)):
raw_edge = raw_edges[edge_index]
LSp, LEp = None, None
for elem in raw_edge:
if elem[0] == erlang_atom('edge'):
edgedata = elem
#
# the color data for the face on the sides of this
# edge, rgb1/uv1 is for Lf:Sv, rgb2/uv2 is for Rf:Ev
#
if elem[0] == erlang_atom('uv'):
uvdata = struct.unpack('>dddd', elem[1])
u1, v1, u2, v2 = uvdata
LSp = mesh.ColorProp((u1, v1))
LEp = mesh.ColorProp((u2, v2))
# new UV packing for Wings3D 0.98.16b?
# I leave the old code in for older mesh files
if elem[0] == erlang_atom('uv_lt'):
uvdata = struct.unpack('>dd', elem[1])
u1, v1 = uvdata
LSp = mesh.ColorProp((u1, v1))
if elem[0] == erlang_atom('uv_rt'):
uvdata = struct.unpack('>dd', elem[1])
u2, v2 = uvdata
LEp = mesh.ColorProp((u2, v2))
if elem[0] == erlang_atom('color'):
colordata = struct.unpack('>dddddd', elem[1])
r1, g1, b1, r2, g2, b2 = colordata
LSp = mesh.ColorProp((r1, g1, b1, 1))
LEp = mesh.ColorProp((r2, g2, b2, 1))
# read winged data
a, Sv, Ev, Lf, Rf, LP, LS, RP, RS = edgedata
self.check_atom(a, "edge")
minf, maxf = min(Lf, Rf), max(Lf, Rf)
wobj.edges.append((minf, maxf, Sv, Ev))
# store color info here if any
if LSp and LEp:
if wobj.face_vert_colors.has_key((Lf, Sv)) or \
wobj.face_vert_colors.has_key((Rf, Ev)):
print "hey!"
wobj.face_vert_colors[(Lf, Sv)] = LSp
wobj.face_vert_colors[(Rf, Ev)] = LEp
# store hardness info
if edge_index in hard_edges:
wobj.hard_edges.append((minf, maxf))
# store left and right face
safe_append(faces, Lf, (Sv, Ev))
safe_append(faces, Rf, (Ev, Sv))
# === put edges (Sv & Ev) in correct order ===
# === faces{} now contains a sorted list of edges (Sv & Ev) for each face
for i in range(len(faces)):
face = faces[i]
swaps = 1
while swaps:
swaps = 0
for j in range(len(face)-2):
if face[j][1] != face[j+1][0]:
face[j+1], face[j+2] = face[j+2], face[j+1] # swap them
swaps = 1
# replace tuples with vertex indices, also convert the map to sequence
# s is a sequence of edges, e is an edge
wobj.faces = map(lambda s: map(lambda e: e[0], s), faces.values())
if self.dump:
print "*** Edges parsed"
pp = pprint.PrettyPrinter(indent=4,width=78)
pp.pprint(wobj.faces)
pp.pprint(wobj.face_vert_colors)
pp.pprint(wobj.hard_edges)
def parse_2_faces(self, wobj, raw_faces):
for face in range(len(raw_faces)):
raw_face = raw_faces[face]
if raw_face:
for elem in raw_face:
if elem[0] == erlang_atom('material'):
mat_name = str(elem[1])
mat_id = wobj.find_material(mat_name)
else:
try:
mat_id = wobj.find_material("default")
except:
mat_id = 0
wobj.face_materials.append(mat_id)
if self.dump:
print "*** Faces parsed"
pp = pprint.PrettyPrinter(indent=4,width=78)
pp.pprint(wobj.face_materials)
def parse_2_verts(self, wobj, raw_verts):
wobj.verts = []
for vertdata in raw_verts:
x, y, z = struct.unpack(">ddd", vertdata[0]) # double precision
if self.keepRotation:
wobj.verts.append((x, -z, y))
else:
wobj.verts.append((x, y, z))
def parse_2_object(self, obj):
a, name, winged, mode = obj
self.check_atom(a, "object")
# if mode is invisible, skip this
a, raw_edges, raw_faces, raw_verts, raw_edge_htable = winged
self.check_atom(a, "winged")
print "reading object '%s' (%d faces, %d edges, %d vertices)" % (name,
len(raw_faces), len(raw_edges), len(raw_verts))
# raw_edge_htable lists hard edges
# (edges are soft by default, so this table may be empty, thus None)
if raw_edge_htable == None: raw_edge_htable = []
if type(raw_edge_htable) == types.StringType:
raw_edge_htable = map(ord, raw_edge_htable)
#print raw_edge_htable
wobj = mesh.Mesh()
wobj.materials = self.materials
wobj.name = name
self.parse_2_edges(wobj, raw_edges, raw_edge_htable)
self.parse_2_faces(wobj, raw_faces)
self.parse_2_verts(wobj, raw_verts)
return wobj
def postprocess(self, wobj):
wobj.make_face_normals()
wobj.make_vert_normals(1)
wobj.flatten()
wobj.triangulate()
wobj.submeshize()
if self.dump:
wobj.dump()
def read_wings(filename, writeImages, keepRotation):
e = erlang_ext_reader(filename)
raw_data = e.read()
ob = wings_reader(raw_data, writeImages, keepRotation)
scene = ob.parse()
return scene
if __name__ == '__main__':
try:
e = erlang_ext_reader("C:/projects/3d/erpy/uv-cube.wings")
#e = erlang_ext_reader("C:/projects/3d/erpy/mycar.wings")
#e = erlang_ext_reader("/home/attis/src/erpy/cube-colored.wings")
#e = erlang_ext_reader("/home/attis/src/erpy/tank1w.wings")
raw_data = e.read()
print "read"
ob = wings_reader(raw_data)
ob.parse()
print "done"
#pp = pprint.PrettyPrinter(indent=4,width=78)
#file = open("log1.txt", "w")
#file.write(pp.pformat(raw_data))
#file.write('\n')
print "ok"
finally:
pp = pprint.PrettyPrinter(indent=4,width=78)
pp.pprint(raw_data)
|
unknown
|
codeparrot/codeparrot-clean
| ||
"""The tests for the State vacuum Mqtt platform."""
from copy import deepcopy
import json
from homeassistant.components import mqtt, vacuum
from homeassistant.components.mqtt import CONF_COMMAND_TOPIC, CONF_STATE_TOPIC
from homeassistant.components.mqtt.discovery import async_start
from homeassistant.components.mqtt.vacuum import CONF_SCHEMA, schema_state as mqttvacuum
from homeassistant.components.mqtt.vacuum.schema import services_to_strings
from homeassistant.components.mqtt.vacuum.schema_state import SERVICE_TO_STRING
from homeassistant.components.vacuum import (
ATTR_BATTERY_ICON,
ATTR_BATTERY_LEVEL,
ATTR_FAN_SPEED,
ATTR_FAN_SPEED_LIST,
DOMAIN,
SERVICE_CLEAN_SPOT,
SERVICE_LOCATE,
SERVICE_PAUSE,
SERVICE_RETURN_TO_BASE,
SERVICE_START,
SERVICE_STOP,
STATE_CLEANING,
STATE_DOCKED,
)
from homeassistant.const import (
CONF_NAME,
CONF_PLATFORM,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.setup import async_setup_component
from tests.common import (
MockConfigEntry,
async_fire_mqtt_message,
async_mock_mqtt_component,
)
from tests.components.vacuum import common
COMMAND_TOPIC = "vacuum/command"
SEND_COMMAND_TOPIC = "vacuum/send_command"
STATE_TOPIC = "vacuum/state"
DEFAULT_CONFIG = {
CONF_PLATFORM: "mqtt",
CONF_SCHEMA: "state",
CONF_NAME: "mqtttest",
CONF_COMMAND_TOPIC: COMMAND_TOPIC,
mqttvacuum.CONF_SEND_COMMAND_TOPIC: SEND_COMMAND_TOPIC,
CONF_STATE_TOPIC: STATE_TOPIC,
mqttvacuum.CONF_SET_FAN_SPEED_TOPIC: "vacuum/set_fan_speed",
mqttvacuum.CONF_FAN_SPEED_LIST: ["min", "medium", "high", "max"],
}
async def test_default_supported_features(hass, mqtt_mock):
"""Test that the correct supported features."""
assert await async_setup_component(
hass, vacuum.DOMAIN, {vacuum.DOMAIN: DEFAULT_CONFIG}
)
entity = hass.states.get("vacuum.mqtttest")
entity_features = entity.attributes.get(mqttvacuum.CONF_SUPPORTED_FEATURES, 0)
assert sorted(services_to_strings(entity_features, SERVICE_TO_STRING)) == sorted(
["start", "stop", "return_home", "battery", "status", "clean_spot"]
)
async def test_all_commands(hass, mqtt_mock):
"""Test simple commands send to the vacuum."""
config = deepcopy(DEFAULT_CONFIG)
config[mqttvacuum.CONF_SUPPORTED_FEATURES] = services_to_strings(
mqttvacuum.ALL_SERVICES, SERVICE_TO_STRING
)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
await hass.services.async_call(DOMAIN, SERVICE_START, blocking=True)
mqtt_mock.async_publish.assert_called_once_with(COMMAND_TOPIC, "start", 0, False)
mqtt_mock.async_publish.reset_mock()
await hass.services.async_call(DOMAIN, SERVICE_STOP, blocking=True)
mqtt_mock.async_publish.assert_called_once_with(COMMAND_TOPIC, "stop", 0, False)
mqtt_mock.async_publish.reset_mock()
await hass.services.async_call(DOMAIN, SERVICE_PAUSE, blocking=True)
mqtt_mock.async_publish.assert_called_once_with(COMMAND_TOPIC, "pause", 0, False)
mqtt_mock.async_publish.reset_mock()
await hass.services.async_call(DOMAIN, SERVICE_LOCATE, blocking=True)
mqtt_mock.async_publish.assert_called_once_with(COMMAND_TOPIC, "locate", 0, False)
mqtt_mock.async_publish.reset_mock()
await hass.services.async_call(DOMAIN, SERVICE_CLEAN_SPOT, blocking=True)
mqtt_mock.async_publish.assert_called_once_with(
COMMAND_TOPIC, "clean_spot", 0, False
)
mqtt_mock.async_publish.reset_mock()
await hass.services.async_call(DOMAIN, SERVICE_RETURN_TO_BASE, blocking=True)
mqtt_mock.async_publish.assert_called_once_with(
COMMAND_TOPIC, "return_to_base", 0, False
)
mqtt_mock.async_publish.reset_mock()
await common.async_set_fan_speed(hass, "medium", "vacuum.mqtttest")
mqtt_mock.async_publish.assert_called_once_with(
"vacuum/set_fan_speed", "medium", 0, False
)
mqtt_mock.async_publish.reset_mock()
await common.async_send_command(hass, "44 FE 93", entity_id="vacuum.mqtttest")
mqtt_mock.async_publish.assert_called_once_with(
"vacuum/send_command", "44 FE 93", 0, False
)
mqtt_mock.async_publish.reset_mock()
await common.async_send_command(
hass, "44 FE 93", {"key": "value"}, entity_id="vacuum.mqtttest"
)
assert json.loads(mqtt_mock.async_publish.mock_calls[-1][1][1]) == {
"command": "44 FE 93",
"key": "value",
}
async def test_commands_without_supported_features(hass, mqtt_mock):
"""Test commands which are not supported by the vacuum."""
config = deepcopy(DEFAULT_CONFIG)
services = mqttvacuum.STRING_TO_SERVICE["status"]
config[mqttvacuum.CONF_SUPPORTED_FEATURES] = services_to_strings(
services, SERVICE_TO_STRING
)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
await hass.services.async_call(DOMAIN, SERVICE_START, blocking=True)
mqtt_mock.async_publish.assert_not_called()
mqtt_mock.async_publish.reset_mock()
await hass.services.async_call(DOMAIN, SERVICE_PAUSE, blocking=True)
mqtt_mock.async_publish.assert_not_called()
mqtt_mock.async_publish.reset_mock()
await hass.services.async_call(DOMAIN, SERVICE_STOP, blocking=True)
mqtt_mock.async_publish.assert_not_called()
mqtt_mock.async_publish.reset_mock()
await hass.services.async_call(DOMAIN, SERVICE_RETURN_TO_BASE, blocking=True)
mqtt_mock.async_publish.assert_not_called()
mqtt_mock.async_publish.reset_mock()
await hass.services.async_call(DOMAIN, SERVICE_LOCATE, blocking=True)
mqtt_mock.async_publish.assert_not_called()
mqtt_mock.async_publish.reset_mock()
await hass.services.async_call(DOMAIN, SERVICE_CLEAN_SPOT, blocking=True)
mqtt_mock.async_publish.assert_not_called()
mqtt_mock.async_publish.reset_mock()
await common.async_set_fan_speed(hass, "medium", "vacuum.mqtttest")
mqtt_mock.async_publish.assert_not_called()
mqtt_mock.async_publish.reset_mock()
await common.async_send_command(
hass, "44 FE 93", {"key": "value"}, entity_id="vacuum.mqtttest"
)
mqtt_mock.async_publish.assert_not_called()
async def test_status(hass, mqtt_mock):
"""Test status updates from the vacuum."""
config = deepcopy(DEFAULT_CONFIG)
config[mqttvacuum.CONF_SUPPORTED_FEATURES] = services_to_strings(
mqttvacuum.ALL_SERVICES, SERVICE_TO_STRING
)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
message = """{
"battery_level": 54,
"state": "cleaning",
"fan_speed": "max"
}"""
async_fire_mqtt_message(hass, "vacuum/state", message)
state = hass.states.get("vacuum.mqtttest")
assert state.state == STATE_CLEANING
assert state.attributes.get(ATTR_BATTERY_LEVEL) == 54
assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-50"
assert state.attributes.get(ATTR_FAN_SPEED) == "max"
message = """{
"battery_level": 61,
"state": "docked",
"fan_speed": "min"
}"""
async_fire_mqtt_message(hass, "vacuum/state", message)
state = hass.states.get("vacuum.mqtttest")
assert state.state == STATE_DOCKED
assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-charging-60"
assert state.attributes.get(ATTR_BATTERY_LEVEL) == 61
assert state.attributes.get(ATTR_FAN_SPEED) == "min"
assert state.attributes.get(ATTR_FAN_SPEED_LIST) == ["min", "medium", "high", "max"]
async def test_no_fan_vacuum(hass, mqtt_mock):
"""Test status updates from the vacuum when fan is not supported."""
config = deepcopy(DEFAULT_CONFIG)
del config[mqttvacuum.CONF_FAN_SPEED_LIST]
config[mqttvacuum.CONF_SUPPORTED_FEATURES] = services_to_strings(
mqttvacuum.DEFAULT_SERVICES, SERVICE_TO_STRING
)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
message = """{
"battery_level": 54,
"state": "cleaning"
}"""
async_fire_mqtt_message(hass, "vacuum/state", message)
state = hass.states.get("vacuum.mqtttest")
assert state.state == STATE_CLEANING
assert state.attributes.get(ATTR_FAN_SPEED) is None
assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None
assert state.attributes.get(ATTR_BATTERY_LEVEL) == 54
assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-50"
message = """{
"battery_level": 54,
"state": "cleaning",
"fan_speed": "max"
}"""
async_fire_mqtt_message(hass, "vacuum/state", message)
state = hass.states.get("vacuum.mqtttest")
assert state.state == STATE_CLEANING
assert state.attributes.get(ATTR_FAN_SPEED) is None
assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None
assert state.attributes.get(ATTR_BATTERY_LEVEL) == 54
assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-50"
message = """{
"battery_level": 61,
"state": "docked"
}"""
async_fire_mqtt_message(hass, "vacuum/state", message)
state = hass.states.get("vacuum.mqtttest")
assert state.state == STATE_DOCKED
assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-charging-60"
assert state.attributes.get(ATTR_BATTERY_LEVEL) == 61
async def test_status_invalid_json(hass, mqtt_mock):
"""Test to make sure nothing breaks if the vacuum sends bad JSON."""
config = deepcopy(DEFAULT_CONFIG)
config[mqttvacuum.CONF_SUPPORTED_FEATURES] = services_to_strings(
mqttvacuum.ALL_SERVICES, SERVICE_TO_STRING
)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
async_fire_mqtt_message(hass, "vacuum/state", '{"asdfasas false}')
state = hass.states.get("vacuum.mqtttest")
assert state.state == STATE_UNKNOWN
async def test_default_availability_payload(hass, mqtt_mock):
"""Test availability by default payload with defined topic."""
config = deepcopy(DEFAULT_CONFIG)
config.update({"availability_topic": "availability-topic"})
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
state = hass.states.get("vacuum.mqtttest")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic", "online")
state = hass.states.get("vacuum.mqtttest")
assert STATE_UNAVAILABLE != state.state
async_fire_mqtt_message(hass, "availability-topic", "offline")
state = hass.states.get("vacuum.mqtttest")
assert state.state == STATE_UNAVAILABLE
async def test_custom_availability_payload(hass, mqtt_mock):
"""Test availability by custom payload with defined topic."""
config = deepcopy(DEFAULT_CONFIG)
config.update(
{
"availability_topic": "availability-topic",
"payload_available": "good",
"payload_not_available": "nogood",
}
)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
state = hass.states.get("vacuum.mqtttest")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic", "good")
state = hass.states.get("vacuum.mqtttest")
assert state.state != STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic", "nogood")
state = hass.states.get("vacuum.mqtttest")
assert state.state == STATE_UNAVAILABLE
async def test_discovery_removal_vacuum(hass, mqtt_mock):
"""Test removal of discovered vacuum."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, "homeassistant", {}, entry)
data = '{ "name": "Beer",' ' "command_topic": "test_topic"}'
async_fire_mqtt_message(hass, "homeassistant/vacuum/bla/config", data)
await hass.async_block_till_done()
state = hass.states.get("vacuum.beer")
assert state is not None
assert state.name == "Beer"
async_fire_mqtt_message(hass, "homeassistant/vacuum/bla/config", "")
await hass.async_block_till_done()
state = hass.states.get("vacuum.beer")
assert state is None
async def test_discovery_broken(hass, mqtt_mock, caplog):
"""Test handling of bad discovery message."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, "homeassistant", {}, entry)
data1 = '{ "name": "Beer",' ' "command_topic": "test_topic#"}'
data2 = '{ "name": "Milk",' ' "command_topic": "test_topic"}'
async_fire_mqtt_message(hass, "homeassistant/vacuum/bla/config", data1)
await hass.async_block_till_done()
state = hass.states.get("vacuum.beer")
assert state is None
async_fire_mqtt_message(hass, "homeassistant/vacuum/bla/config", data2)
await hass.async_block_till_done()
state = hass.states.get("vacuum.milk")
assert state is not None
assert state.name == "Milk"
state = hass.states.get("vacuum.beer")
assert state is None
async def test_discovery_update_vacuum(hass, mqtt_mock):
"""Test update of discovered vacuum."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, "homeassistant", {}, entry)
data1 = '{ "name": "Beer",' ' "command_topic": "test_topic"}'
data2 = '{ "name": "Milk",' ' "command_topic": "test_topic"}'
async_fire_mqtt_message(hass, "homeassistant/vacuum/bla/config", data1)
await hass.async_block_till_done()
state = hass.states.get("vacuum.beer")
assert state is not None
assert state.name == "Beer"
async_fire_mqtt_message(hass, "homeassistant/vacuum/bla/config", data2)
await hass.async_block_till_done()
state = hass.states.get("vacuum.beer")
assert state is not None
assert state.name == "Milk"
state = hass.states.get("vacuum.milk")
assert state is None
async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
assert await async_setup_component(
hass,
vacuum.DOMAIN,
{
vacuum.DOMAIN: {
"platform": "mqtt",
"name": "test",
"json_attributes_topic": "attr-topic",
}
},
)
async_fire_mqtt_message(hass, "attr-topic", '{ "val": "100" }')
state = hass.states.get("vacuum.test")
assert state.attributes.get("val") == "100"
async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
assert await async_setup_component(
hass,
vacuum.DOMAIN,
{
vacuum.DOMAIN: {
"platform": "mqtt",
"name": "test",
"json_attributes_topic": "attr-topic",
}
},
)
async_fire_mqtt_message(hass, "attr-topic", '[ "list", "of", "things"]')
state = hass.states.get("vacuum.test")
assert state.attributes.get("val") is None
assert "JSON result was not a dictionary" in caplog.text
async def test_update_with_json_attrs_bad_json(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
assert await async_setup_component(
hass,
vacuum.DOMAIN,
{
vacuum.DOMAIN: {
"platform": "mqtt",
"name": "test",
"json_attributes_topic": "attr-topic",
}
},
)
async_fire_mqtt_message(hass, "attr-topic", "This is not JSON")
state = hass.states.get("vacuum.test")
assert state.attributes.get("val") is None
assert "Erroneous JSON: This is not JSON" in caplog.text
async def test_discovery_update_attr(hass, mqtt_mock, caplog):
"""Test update of discovered MQTTAttributes."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
await async_start(hass, "homeassistant", {}, entry)
data1 = (
'{ "name": "Beer",'
' "command_topic": "test_topic",'
' "json_attributes_topic": "attr-topic1" }'
)
data2 = (
'{ "name": "Beer",'
' "command_topic": "test_topic",'
' "json_attributes_topic": "attr-topic2" }'
)
async_fire_mqtt_message(hass, "homeassistant/vacuum/bla/config", data1)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "attr-topic1", '{ "val": "100" }')
state = hass.states.get("vacuum.beer")
assert state.attributes.get("val") == "100"
# Change json_attributes_topic
async_fire_mqtt_message(hass, "homeassistant/vacuum/bla/config", data2)
await hass.async_block_till_done()
# Verify we are no longer subscribing to the old topic
async_fire_mqtt_message(hass, "attr-topic1", '{ "val": "50" }')
state = hass.states.get("vacuum.beer")
assert state.attributes.get("val") == "100"
# Verify we are subscribing to the new topic
async_fire_mqtt_message(hass, "attr-topic2", '{ "val": "75" }')
state = hass.states.get("vacuum.beer")
assert state.attributes.get("val") == "75"
async def test_unique_id(hass, mqtt_mock):
"""Test unique id option only creates one vacuum per unique_id."""
await async_mock_mqtt_component(hass)
assert await async_setup_component(
hass,
vacuum.DOMAIN,
{
vacuum.DOMAIN: [
{
"platform": "mqtt",
"name": "Test 1",
"command_topic": "command-topic",
"unique_id": "TOTALLY_UNIQUE",
},
{
"platform": "mqtt",
"name": "Test 2",
"command_topic": "command-topic",
"unique_id": "TOTALLY_UNIQUE",
},
]
},
)
async_fire_mqtt_message(hass, "test-topic", "payload")
assert len(hass.states.async_entity_ids()) == 2
# all vacuums group is 1, unique id created is 1
async def test_entity_device_info_with_identifier(hass, mqtt_mock):
"""Test MQTT vacuum device registry integration."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
entry.add_to_hass(hass)
await async_start(hass, "homeassistant", {}, entry)
registry = await hass.helpers.device_registry.async_get_registry()
data = json.dumps(
{
"platform": "mqtt",
"name": "Test 1",
"command_topic": "test-command-topic",
"device": {
"identifiers": ["helloworld"],
"connections": [["mac", "02:5b:26:a8:dc:12"]],
"manufacturer": "Whatever",
"name": "Beer",
"model": "Glass",
"sw_version": "0.1-beta",
},
"unique_id": "veryunique",
}
)
async_fire_mqtt_message(hass, "homeassistant/vacuum/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")}, set())
assert device is not None
assert device.identifiers == {("mqtt", "helloworld")}
assert device.connections == {("mac", "02:5b:26:a8:dc:12")}
assert device.manufacturer == "Whatever"
assert device.name == "Beer"
assert device.model == "Glass"
assert device.sw_version == "0.1-beta"
async def test_entity_device_info_update(hass, mqtt_mock):
"""Test device registry update."""
entry = MockConfigEntry(domain=mqtt.DOMAIN)
entry.add_to_hass(hass)
await async_start(hass, "homeassistant", {}, entry)
registry = await hass.helpers.device_registry.async_get_registry()
config = {
"platform": "mqtt",
"name": "Test 1",
"command_topic": "test-command-topic",
"device": {
"identifiers": ["helloworld"],
"connections": [["mac", "02:5b:26:a8:dc:12"]],
"manufacturer": "Whatever",
"name": "Beer",
"model": "Glass",
"sw_version": "0.1-beta",
},
"unique_id": "veryunique",
}
data = json.dumps(config)
async_fire_mqtt_message(hass, "homeassistant/vacuum/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")}, set())
assert device is not None
assert device.name == "Beer"
config["device"]["name"] = "Milk"
data = json.dumps(config)
async_fire_mqtt_message(hass, "homeassistant/vacuum/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")}, set())
assert device is not None
assert device.name == "Milk"
|
unknown
|
codeparrot/codeparrot-clean
| ||
/*
Copyright 2022 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// +k8s:conversion-gen=k8s.io/kubernetes/pkg/apis/certificates
// +k8s:conversion-gen-external-types=k8s.io/api/certificates/v1alpha1
// +k8s:defaulter-gen=TypeMeta
// +k8s:defaulter-gen-input=k8s.io/api/certificates/v1alpha1
// +k8s:validation-gen=TypeMeta
// +k8s:validation-gen-input=k8s.io/api/certificates/v1alpha1
// +groupName=certificates.k8s.io
package v1alpha1
|
go
|
github
|
https://github.com/kubernetes/kubernetes
|
pkg/apis/certificates/v1alpha1/doc.go
|
"""Exceptions used by amqp"""
# Copyright (C) 2007-2008 Barry Pederson <bp@barryp.org>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
from __future__ import absolute_import
from struct import pack, unpack
__all__ = [
'AMQPError',
'ConnectionError', 'ChannelError',
'RecoverableConnectionError', 'IrrecoverableConnectionError',
'RecoverableChannelError', 'IrrecoverableChannelError',
'ConsumerCancelled', 'ContentTooLarge', 'NoConsumers',
'ConnectionForced', 'InvalidPath', 'AccessRefused', 'NotFound',
'ResourceLocked', 'PreconditionFailed', 'FrameError', 'FrameSyntaxError',
'InvalidCommand', 'ChannelNotOpen', 'UnexpectedFrame', 'ResourceError',
'NotAllowed', 'AMQPNotImplementedError', 'InternalError',
]
class AMQPError(Exception):
code = 0
def __init__(self, reply_text=None, method_sig=None,
method_name=None, reply_code=None):
self.message = reply_text
self.reply_code = reply_code or self.code
self.reply_text = reply_text
self.method_sig = method_sig
self.method_name = method_name or ''
if method_sig and not self.method_name:
self.method_name = METHOD_NAME_MAP.get(method_sig, '')
Exception.__init__(self, reply_code,
reply_text, method_sig, self.method_name)
def __str__(self):
if self.method:
return '{0.method}: ({0.reply_code}) {0.reply_text}'.format(self)
return self.reply_text or '<AMQPError: unknown error>'
@property
def method(self):
return self.method_name or self.method_sig
class ConnectionError(AMQPError):
pass
class ChannelError(AMQPError):
pass
class RecoverableChannelError(ChannelError):
pass
class IrrecoverableChannelError(ChannelError):
pass
class RecoverableConnectionError(ConnectionError):
pass
class IrrecoverableConnectionError(ConnectionError):
pass
class Blocked(RecoverableConnectionError):
pass
class ConsumerCancelled(RecoverableConnectionError):
pass
class ContentTooLarge(RecoverableChannelError):
code = 311
class NoConsumers(RecoverableChannelError):
code = 313
class ConnectionForced(RecoverableConnectionError):
code = 320
class InvalidPath(IrrecoverableConnectionError):
code = 402
class AccessRefused(IrrecoverableChannelError):
code = 403
class NotFound(IrrecoverableChannelError):
code = 404
class ResourceLocked(RecoverableChannelError):
code = 405
class PreconditionFailed(IrrecoverableChannelError):
code = 406
class FrameError(IrrecoverableConnectionError):
code = 501
class FrameSyntaxError(IrrecoverableConnectionError):
code = 502
class InvalidCommand(IrrecoverableConnectionError):
code = 503
class ChannelNotOpen(IrrecoverableConnectionError):
code = 504
class UnexpectedFrame(IrrecoverableConnectionError):
code = 505
class ResourceError(RecoverableConnectionError):
code = 506
class NotAllowed(IrrecoverableConnectionError):
code = 530
class AMQPNotImplementedError(IrrecoverableConnectionError):
code = 540
class InternalError(IrrecoverableConnectionError):
code = 541
ERROR_MAP = {
311: ContentTooLarge,
313: NoConsumers,
320: ConnectionForced,
402: InvalidPath,
403: AccessRefused,
404: NotFound,
405: ResourceLocked,
406: PreconditionFailed,
501: FrameError,
502: FrameSyntaxError,
503: InvalidCommand,
504: ChannelNotOpen,
505: UnexpectedFrame,
506: ResourceError,
530: NotAllowed,
540: AMQPNotImplementedError,
541: InternalError,
}
def error_for_code(code, text, method, default):
try:
return ERROR_MAP[code](text, method, reply_code=code)
except KeyError:
return default(text, method, reply_code=code)
def raise_for_code(code, text, method, default):
raise error_for_code(code, text, method, default)
METHOD_NAME_MAP = {
(10, 10): 'Connection.start',
(10, 11): 'Connection.start_ok',
(10, 20): 'Connection.secure',
(10, 21): 'Connection.secure_ok',
(10, 30): 'Connection.tune',
(10, 31): 'Connection.tune_ok',
(10, 40): 'Connection.open',
(10, 41): 'Connection.open_ok',
(10, 50): 'Connection.close',
(10, 51): 'Connection.close_ok',
(20, 10): 'Channel.open',
(20, 11): 'Channel.open_ok',
(20, 20): 'Channel.flow',
(20, 21): 'Channel.flow_ok',
(20, 40): 'Channel.close',
(20, 41): 'Channel.close_ok',
(30, 10): 'Access.request',
(30, 11): 'Access.request_ok',
(40, 10): 'Exchange.declare',
(40, 11): 'Exchange.declare_ok',
(40, 20): 'Exchange.delete',
(40, 21): 'Exchange.delete_ok',
(40, 30): 'Exchange.bind',
(40, 31): 'Exchange.bind_ok',
(40, 40): 'Exchange.unbind',
(40, 41): 'Exchange.unbind_ok',
(50, 10): 'Queue.declare',
(50, 11): 'Queue.declare_ok',
(50, 20): 'Queue.bind',
(50, 21): 'Queue.bind_ok',
(50, 30): 'Queue.purge',
(50, 31): 'Queue.purge_ok',
(50, 40): 'Queue.delete',
(50, 41): 'Queue.delete_ok',
(50, 50): 'Queue.unbind',
(50, 51): 'Queue.unbind_ok',
(60, 10): 'Basic.qos',
(60, 11): 'Basic.qos_ok',
(60, 20): 'Basic.consume',
(60, 21): 'Basic.consume_ok',
(60, 30): 'Basic.cancel',
(60, 31): 'Basic.cancel_ok',
(60, 40): 'Basic.publish',
(60, 50): 'Basic.return',
(60, 60): 'Basic.deliver',
(60, 70): 'Basic.get',
(60, 71): 'Basic.get_ok',
(60, 72): 'Basic.get_empty',
(60, 80): 'Basic.ack',
(60, 90): 'Basic.reject',
(60, 100): 'Basic.recover_async',
(60, 110): 'Basic.recover',
(60, 111): 'Basic.recover_ok',
(60, 120): 'Basic.nack',
(90, 10): 'Tx.select',
(90, 11): 'Tx.select_ok',
(90, 20): 'Tx.commit',
(90, 21): 'Tx.commit_ok',
(90, 30): 'Tx.rollback',
(90, 31): 'Tx.rollback_ok',
(85, 10): 'Confirm.select',
(85, 11): 'Confirm.select_ok',
}
for _method_id, _method_name in list(METHOD_NAME_MAP.items()):
METHOD_NAME_MAP[unpack('>I', pack('>HH', *_method_id))[0]] = _method_name
|
unknown
|
codeparrot/codeparrot-clean
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.