code stringlengths 1 25.8M | language stringclasses 18 values | source stringclasses 4 values | repo stringclasses 78 values | path stringlengths 0 268 |
|---|---|---|---|---|
from django.contrib import admin
from django.contrib.admin import helpers
from django.contrib.admin.utils import model_ngettext, get_deleted_objects
from django.db import router
from django.core.exceptions import PermissionDenied
from django.template.response import TemplateResponse
from django.utils.encoding import force_unicode
from django.utils.translation import ugettext_lazy, ugettext as _
from kuma.core.managers import NamespacedTaggableManager
from taggit.forms import TagWidget
from .models import Submission
def censor_selected(modeladmin, request, queryset):
"""
Censor the selected submissions, with confirmation interstitial.
Largely stolen from django.contrib.admin.actions.delete_selected
"""
opts = modeladmin.model._meta
app_label = opts.app_label
# Check that the user has delete permission for the actual model
if not modeladmin.has_delete_permission(request):
raise PermissionDenied
# The user has already confirmed the deletion.
# Do the deletion and return a None to display the change list view again.
if request.POST.get('post'):
censored_url = request.POST.get('censored_url', None)
n = queryset.count()
if n:
for obj in queryset:
obj.censor(url=censored_url)
obj_display = force_unicode(obj)
modeladmin.message_user(request, _("Censored %(item)s") % {
"item": obj_display
})
modeladmin.message_user(
request,
_("Successfully censored %(count)d %(items)s.") % {
"count": n, "items": model_ngettext(modeladmin.opts, n)
})
# Return None to display the change list page again.
return None
if len(queryset) == 1:
objects_name = force_unicode(opts.verbose_name)
else:
objects_name = force_unicode(opts.verbose_name_plural)
context = {
"title": _("Are you sure?"),
"object_name": objects_name,
"queryset": queryset,
"opts": opts,
"app_label": app_label,
"action_checkbox_name": helpers.ACTION_CHECKBOX_NAME,
}
# Display the confirmation page
return TemplateResponse(
request,
'admin/demos/submission/censor_selected_confirmation.html',
context, current_app=modeladmin.admin_site.name)
censor_selected.short_description = ugettext_lazy("Censor selected %(verbose_name_plural)s")
def delete_selected(modeladmin, request, queryset):
"""
The out-of-box Django delete never calls Submission.delete(), so this is a
mostly redundant lift-and-hack to ensure that happens. This is important
because Submission.delete() also cleans up its uploaded files.
See also: https://docs.djangoproject.com/en/dev/ref/contrib/admin/actions/
"""
opts = modeladmin.model._meta
app_label = opts.app_label
# Check that the user has delete permission for the actual model
if not modeladmin.has_delete_permission(request):
raise PermissionDenied
using = router.db_for_write(modeladmin.model)
# Populate deletable_objects, a data structure of all related objects that
# will also be deleted.
deletable_objects, perms_needed, protected = get_deleted_objects(
queryset, opts, request.user, modeladmin.admin_site, using)
# The user has already confirmed the deletion.
# Do the deletion and return a None to display the change list view again.
if request.POST.get('post'):
if perms_needed:
raise PermissionDenied
n = queryset.count()
if n:
for obj in queryset:
obj_display = force_unicode(obj)
modeladmin.log_deletion(request, obj, obj_display)
obj.delete()
modeladmin.message_user(
request,
_("Deleted and uploaded files for %(item)s") % {
"item": obj_display
})
modeladmin.message_user(
request,
_("Successfully deleted %(count)d %(items)s.") % {
"count": n, "items": model_ngettext(modeladmin.opts, n)
})
# Return None to display the change list page again.
return None
if len(queryset) == 1:
objects_name = force_unicode(opts.verbose_name)
else:
objects_name = force_unicode(opts.verbose_name_plural)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": objects_name}
else:
title = _("Are you sure?")
context = {
"title": title,
"object_name": objects_name,
"deletable_objects": [deletable_objects],
"queryset": queryset,
"perms_lacking": perms_needed,
"protected": protected,
"opts": opts,
"app_label": app_label,
"action_checkbox_name": helpers.ACTION_CHECKBOX_NAME,
}
# Display the confirmation page
return TemplateResponse(request, modeladmin.delete_selected_confirmation_template or [
"admin/%s/%s/delete_selected_confirmation.html" % (app_label, opts.object_name.lower()),
"admin/%s/delete_selected_confirmation.html" % app_label,
"admin/delete_selected_confirmation.html"
], context, current_app=modeladmin.admin_site.name)
delete_selected.short_description = ugettext_lazy("Delete selected %(verbose_name_plural)s")
class SubmissionAdmin(admin.ModelAdmin):
actions = (delete_selected, censor_selected)
list_display = ('title', 'creator', 'featured', 'censored', 'hidden',
'taggit_tags', 'modified')
list_editable = ('featured', 'taggit_tags')
search_fields = ('title', 'summary', 'description', 'taggit_tags__name')
list_filter = ('censored', 'hidden', 'created', 'modified')
readonly_fields = ('censored',)
formfield_overrides = {
NamespacedTaggableManager: {
"widget": TagWidget(attrs={"size": 70})
}
}
def get_queryset(self, request):
return Submission.admin_manager.all()
admin.site.register(Submission, SubmissionAdmin) | unknown | codeparrot/codeparrot-clean | ||
import unittest
from omrdatasettools.ExportPath import ExportPath
class ExportPathTest(unittest.TestCase):
def test_get_full_path_without_stroke_thickness(self):
# Arrange
export_path = ExportPath("data/images", "3-4-Time", "1-13", "png")
# Act
full_path = export_path.get_full_path()
# Assert
full_path = full_path.replace('\\', '/')
self.assertEqual("data/images/3-4-Time/1-13.png", full_path)
def test_get_full_path(self):
# Arrange
export_path = ExportPath("data/images", "3-4-Time", "1-13", "png", 3)
# Act
full_path = export_path.get_full_path()
# Assert
full_path = full_path.replace('\\', '/')
self.assertEqual("data/images/3-4-Time/1-13_3.png", full_path)
def test_get_full_path_with_offset(self):
# Arrange
export_path = ExportPath("data/images", "3-4-Time", "1-13", "png", 3)
# Act
full_path = export_path.get_full_path(33)
# Assert
full_path = full_path.replace('\\', '/')
self.assertEqual("data/images/3-4-Time/1-13_3_offset_33.png", full_path)
def test_get_class_name_and_file_path(self):
# Arrange
export_path = ExportPath("data/images", "3-4-Time", "1-13", "png", 3)
# Act
full_path = export_path.get_class_name_and_file_path()
# Assert
full_path = full_path.replace('\\', '/')
self.assertEqual("3-4-Time/1-13_3.png", full_path)
def test_get_class_name_and_file_path_with_offset(self):
# Arrange
export_path = ExportPath("data/images", "3-4-Time", "1-13", "png", 3)
# Act
full_path = export_path.get_class_name_and_file_path(33)
# Assert
full_path = full_path.replace('\\', '/')
self.assertEqual("3-4-Time/1-13_3_offset_33.png", full_path)
if __name__ == '__main__':
unittest.main() | unknown | codeparrot/codeparrot-clean | ||
---
blank_issues_enabled: false
contact_links:
- about: 'Please ask and answer usage questions on Stack Overflow.'
name: Question
url: 'https://stackoverflow.com/questions/tagged/typescript'
- about: 'Alternatively, you can use the TypeScript Community Discord.'
name: Chat
url: 'https://discord.gg/typescript'
- about: 'Please check the FAQ before filing new issues'
name: 'TypeScript FAQ'
url: 'https://github.com/microsoft/TypeScript/wiki/FAQ'
- about: 'Please raise issues about the site on its own repo.'
name: Website
url: 'https://github.com/microsoft/TypeScript-Website/issues/new' | unknown | github | https://github.com/microsoft/TypeScript | .github/ISSUE_TEMPLATE/config.yml |
# Natural Language Toolkit: CFG visualization
#
# Copyright (C) 2001-2015 NLTK Project
# Author: Edward Loper <edloper@gmail.com>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
"""
Visualization tools for CFGs.
"""
# Idea for a nice demo:
# - 3 panes: grammar, treelet, working area
# - grammar is a list of productions
# - when you select a production, the treelet that it licenses appears
# in the treelet area
# - the working area has the text on the bottom, and S at top. When
# you select a production, it shows (ghosted) the locations where
# that production's treelet could be attached to either the text
# or the tree rooted at S.
# - the user can drag the treelet onto one of those (or click on them?)
# - the user can delete pieces of the tree from the working area
# (right click?)
# - connecting top to bottom? drag one NP onto another?
#
# +-------------------------------------------------------------+
# | S -> NP VP | S |
# |[NP -> Det N ]| / \ |
# | ... | NP VP |
# | N -> 'dog' | |
# | N -> 'cat' | |
# | ... | |
# +--------------+ |
# | NP | Det N |
# | / \ | | | |
# | Det N | the cat saw the dog |
# | | |
# +--------------+----------------------------------------------+
#
# Operations:
# - connect a new treelet -- drag or click shadow
# - delete a treelet -- right click
# - if only connected to top, delete everything below
# - if only connected to bottom, delete everything above
# - connect top & bottom -- drag a leaf to a root or a root to a leaf
# - disconnect top & bottom -- right click
# - if connected to top & bottom, then disconnect
import nltk.compat
import re
from tkinter import (Button, Canvas, Entry, Frame, IntVar, Label,
Scrollbar, Text, Tk, Toplevel)
from nltk.grammar import (CFG, _read_cfg_production,
Nonterminal, nonterminals)
from nltk.tree import Tree
from nltk.draw.tree import TreeSegmentWidget, tree_to_treesegment
from nltk.draw.util import (CanvasFrame, ColorizedList, ShowText,
SymbolWidget, TextWidget)
from nltk import compat
######################################################################
# Production List
######################################################################
class ProductionList(ColorizedList):
ARROW = SymbolWidget.SYMBOLS['rightarrow']
def _init_colortags(self, textwidget, options):
textwidget.tag_config('terminal', foreground='#006000')
textwidget.tag_config('arrow', font='symbol', underline='0')
textwidget.tag_config('nonterminal', foreground='blue',
font=('helvetica', -12, 'bold'))
def _item_repr(self, item):
contents = []
contents.append(('%s\t' % item.lhs(), 'nonterminal'))
contents.append((self.ARROW, 'arrow'))
for elt in item.rhs():
if isinstance(elt, Nonterminal):
contents.append((' %s' % elt.symbol(), 'nonterminal'))
else:
contents.append((' %r' % elt, 'terminal'))
return contents
######################################################################
# CFG Editor
######################################################################
_CFGEditor_HELP = """
The CFG Editor can be used to create or modify context free grammars.
A context free grammar consists of a start symbol and a list of
productions. The start symbol is specified by the text entry field in
the upper right hand corner of the editor; and the list of productions
are specified in the main text editing box.
Every non-blank line specifies a single production. Each production
has the form "LHS -> RHS," where LHS is a single nonterminal, and RHS
is a list of nonterminals and terminals.
Nonterminals must be a single word, such as S or NP or NP_subj.
Currently, nonterminals must consists of alphanumeric characters and
underscores (_). Nonterminals are colored blue. If you place the
mouse over any nonterminal, then all occurrences of that nonterminal
will be highlighted.
Terminals must be surrounded by single quotes (') or double
quotes(\"). For example, "dog" and "New York" are terminals.
Currently, the string within the quotes must consist of alphanumeric
characters, underscores, and spaces.
To enter a new production, go to a blank line, and type a nonterminal,
followed by an arrow (->), followed by a sequence of terminals and
nonterminals. Note that "->" (dash + greater-than) is automatically
converted to an arrow symbol. When you move your cursor to a
different line, your production will automatically be colorized. If
there are any errors, they will be highlighted in red.
Note that the order of the productions is significant for some
algorithms. To re-order the productions, use cut and paste to move
them.
Use the buttons at the bottom of the window when you are done editing
the CFG:
- Ok: apply the new CFG, and exit the editor.
- Apply: apply the new CFG, and do not exit the editor.
- Reset: revert to the original CFG, and do not exit the editor.
- Cancel: revert to the original CFG, and exit the editor.
"""
class CFGEditor(object):
"""
A dialog window for creating and editing context free grammars.
``CFGEditor`` imposes the following restrictions:
- All nonterminals must be strings consisting of word
characters.
- All terminals must be strings consisting of word characters
and space characters.
"""
# Regular expressions used by _analyze_line. Precompile them, so
# we can process the text faster.
ARROW = SymbolWidget.SYMBOLS['rightarrow']
_LHS_RE = re.compile(r"(^\s*\w+\s*)(->|("+ARROW+"))")
_ARROW_RE = re.compile("\s*(->|("+ARROW+"))\s*")
_PRODUCTION_RE = re.compile(r"(^\s*\w+\s*)" + # LHS
"(->|("+ARROW+"))\s*" + # arrow
r"((\w+|'[\w ]*'|\"[\w ]*\"|\|)\s*)*$") # RHS
_TOKEN_RE = re.compile("\\w+|->|'[\\w ]+'|\"[\\w ]+\"|("+ARROW+")")
_BOLD = ('helvetica', -12, 'bold')
def __init__(self, parent, cfg=None, set_cfg_callback=None):
self._parent = parent
if cfg is not None: self._cfg = cfg
else: self._cfg = CFG(Nonterminal('S'), [])
self._set_cfg_callback = set_cfg_callback
self._highlight_matching_nonterminals = 1
# Create the top-level window.
self._top = Toplevel(parent)
self._init_bindings()
self._init_startframe()
self._startframe.pack(side='top', fill='x', expand=0)
self._init_prodframe()
self._prodframe.pack(side='top', fill='both', expand=1)
self._init_buttons()
self._buttonframe.pack(side='bottom', fill='x', expand=0)
self._textwidget.focus()
def _init_startframe(self):
frame = self._startframe = Frame(self._top)
self._start = Entry(frame)
self._start.pack(side='right')
Label(frame, text='Start Symbol:').pack(side='right')
Label(frame, text='Productions:').pack(side='left')
self._start.insert(0, self._cfg.start().symbol())
def _init_buttons(self):
frame = self._buttonframe = Frame(self._top)
Button(frame, text='Ok', command=self._ok,
underline=0, takefocus=0).pack(side='left')
Button(frame, text='Apply', command=self._apply,
underline=0, takefocus=0).pack(side='left')
Button(frame, text='Reset', command=self._reset,
underline=0, takefocus=0,).pack(side='left')
Button(frame, text='Cancel', command=self._cancel,
underline=0, takefocus=0).pack(side='left')
Button(frame, text='Help', command=self._help,
underline=0, takefocus=0).pack(side='right')
def _init_bindings(self):
self._top.title('CFG Editor')
self._top.bind('<Control-q>', self._cancel)
self._top.bind('<Alt-q>', self._cancel)
self._top.bind('<Control-d>', self._cancel)
#self._top.bind('<Control-x>', self._cancel)
self._top.bind('<Alt-x>', self._cancel)
self._top.bind('<Escape>', self._cancel)
#self._top.bind('<Control-c>', self._cancel)
self._top.bind('<Alt-c>', self._cancel)
self._top.bind('<Control-o>', self._ok)
self._top.bind('<Alt-o>', self._ok)
self._top.bind('<Control-a>', self._apply)
self._top.bind('<Alt-a>', self._apply)
self._top.bind('<Control-r>', self._reset)
self._top.bind('<Alt-r>', self._reset)
self._top.bind('<Control-h>', self._help)
self._top.bind('<Alt-h>', self._help)
self._top.bind('<F1>', self._help)
def _init_prodframe(self):
self._prodframe = Frame(self._top)
# Create the basic Text widget & scrollbar.
self._textwidget = Text(self._prodframe, background='#e0e0e0',
exportselection=1)
self._textscroll = Scrollbar(self._prodframe, takefocus=0,
orient='vertical')
self._textwidget.config(yscrollcommand = self._textscroll.set)
self._textscroll.config(command=self._textwidget.yview)
self._textscroll.pack(side='right', fill='y')
self._textwidget.pack(expand=1, fill='both', side='left')
# Initialize the colorization tags. Each nonterminal gets its
# own tag, so they aren't listed here.
self._textwidget.tag_config('terminal', foreground='#006000')
self._textwidget.tag_config('arrow', font='symbol')
self._textwidget.tag_config('error', background='red')
# Keep track of what line they're on. We use that to remember
# to re-analyze a line whenever they leave it.
self._linenum = 0
# Expand "->" to an arrow.
self._top.bind('>', self._replace_arrows)
# Re-colorize lines when appropriate.
self._top.bind('<<Paste>>', self._analyze)
self._top.bind('<KeyPress>', self._check_analyze)
self._top.bind('<ButtonPress>', self._check_analyze)
# Tab cycles focus. (why doesn't this work??)
def cycle(e, textwidget=self._textwidget):
textwidget.tk_focusNext().focus()
self._textwidget.bind('<Tab>', cycle)
prod_tuples = [(p.lhs(),[p.rhs()]) for p in self._cfg.productions()]
for i in range(len(prod_tuples)-1,0,-1):
if (prod_tuples[i][0] == prod_tuples[i-1][0]):
if () in prod_tuples[i][1]: continue
if () in prod_tuples[i-1][1]: continue
print(prod_tuples[i-1][1])
print(prod_tuples[i][1])
prod_tuples[i-1][1].extend(prod_tuples[i][1])
del prod_tuples[i]
for lhs, rhss in prod_tuples:
print(lhs, rhss)
s = '%s ->' % lhs
for rhs in rhss:
for elt in rhs:
if isinstance(elt, Nonterminal): s += ' %s' % elt
else: s += ' %r' % elt
s += ' |'
s = s[:-2] + '\n'
self._textwidget.insert('end', s)
self._analyze()
# # Add the producitons to the text widget, and colorize them.
# prod_by_lhs = {}
# for prod in self._cfg.productions():
# if len(prod.rhs()) > 0:
# prod_by_lhs.setdefault(prod.lhs(),[]).append(prod)
# for (lhs, prods) in prod_by_lhs.items():
# self._textwidget.insert('end', '%s ->' % lhs)
# self._textwidget.insert('end', self._rhs(prods[0]))
# for prod in prods[1:]:
# print '\t|'+self._rhs(prod),
# self._textwidget.insert('end', '\t|'+self._rhs(prod))
# print
# self._textwidget.insert('end', '\n')
# for prod in self._cfg.productions():
# if len(prod.rhs()) == 0:
# self._textwidget.insert('end', '%s' % prod)
# self._analyze()
# def _rhs(self, prod):
# s = ''
# for elt in prod.rhs():
# if isinstance(elt, Nonterminal): s += ' %s' % elt.symbol()
# else: s += ' %r' % elt
# return s
def _clear_tags(self, linenum):
"""
Remove all tags (except ``arrow`` and ``sel``) from the given
line of the text widget used for editing the productions.
"""
start = '%d.0'%linenum
end = '%d.end'%linenum
for tag in self._textwidget.tag_names():
if tag not in ('arrow', 'sel'):
self._textwidget.tag_remove(tag, start, end)
def _check_analyze(self, *e):
"""
Check if we've moved to a new line. If we have, then remove
all colorization from the line we moved to, and re-colorize
the line that we moved from.
"""
linenum = int(self._textwidget.index('insert').split('.')[0])
if linenum != self._linenum:
self._clear_tags(linenum)
self._analyze_line(self._linenum)
self._linenum = linenum
def _replace_arrows(self, *e):
"""
Replace any ``'->'`` text strings with arrows (char \\256, in
symbol font). This searches the whole buffer, but is fast
enough to be done anytime they press '>'.
"""
arrow = '1.0'
while True:
arrow = self._textwidget.search('->', arrow, 'end+1char')
if arrow == '': break
self._textwidget.delete(arrow, arrow+'+2char')
self._textwidget.insert(arrow, self.ARROW, 'arrow')
self._textwidget.insert(arrow, '\t')
arrow = '1.0'
while True:
arrow = self._textwidget.search(self.ARROW, arrow+'+1char',
'end+1char')
if arrow == '': break
self._textwidget.tag_add('arrow', arrow, arrow+'+1char')
def _analyze_token(self, match, linenum):
"""
Given a line number and a regexp match for a token on that
line, colorize the token. Note that the regexp match gives us
the token's text, start index (on the line), and end index (on
the line).
"""
# What type of token is it?
if match.group()[0] in "'\"": tag = 'terminal'
elif match.group() in ('->', self.ARROW): tag = 'arrow'
else:
# If it's a nonterminal, then set up new bindings, so we
# can highlight all instances of that nonterminal when we
# put the mouse over it.
tag = 'nonterminal_'+match.group()
if tag not in self._textwidget.tag_names():
self._init_nonterminal_tag(tag)
start = '%d.%d' % (linenum, match.start())
end = '%d.%d' % (linenum, match.end())
self._textwidget.tag_add(tag, start, end)
def _init_nonterminal_tag(self, tag, foreground='blue'):
self._textwidget.tag_config(tag, foreground=foreground,
font=CFGEditor._BOLD)
if not self._highlight_matching_nonterminals:
return
def enter(e, textwidget=self._textwidget, tag=tag):
textwidget.tag_config(tag, background='#80ff80')
def leave(e, textwidget=self._textwidget, tag=tag):
textwidget.tag_config(tag, background='')
self._textwidget.tag_bind(tag, '<Enter>', enter)
self._textwidget.tag_bind(tag, '<Leave>', leave)
def _analyze_line(self, linenum):
"""
Colorize a given line.
"""
# Get rid of any tags that were previously on the line.
self._clear_tags(linenum)
# Get the line line's text string.
line = self._textwidget.get(repr(linenum)+'.0', repr(linenum)+'.end')
# If it's a valid production, then colorize each token.
if CFGEditor._PRODUCTION_RE.match(line):
# It's valid; Use _TOKEN_RE to tokenize the production,
# and call analyze_token on each token.
def analyze_token(match, self=self, linenum=linenum):
self._analyze_token(match, linenum)
return ''
CFGEditor._TOKEN_RE.sub(analyze_token, line)
elif line.strip() != '':
# It's invalid; show the user where the error is.
self._mark_error(linenum, line)
def _mark_error(self, linenum, line):
"""
Mark the location of an error in a line.
"""
arrowmatch = CFGEditor._ARROW_RE.search(line)
if not arrowmatch:
# If there's no arrow at all, highlight the whole line.
start = '%d.0' % linenum
end = '%d.end' % linenum
elif not CFGEditor._LHS_RE.match(line):
# Otherwise, if the LHS is bad, highlight it.
start = '%d.0' % linenum
end = '%d.%d' % (linenum, arrowmatch.start())
else:
# Otherwise, highlight the RHS.
start = '%d.%d' % (linenum, arrowmatch.end())
end = '%d.end' % linenum
# If we're highlighting 0 chars, highlight the whole line.
if self._textwidget.compare(start, '==', end):
start = '%d.0' % linenum
end = '%d.end' % linenum
self._textwidget.tag_add('error', start, end)
def _analyze(self, *e):
"""
Replace ``->`` with arrows, and colorize the entire buffer.
"""
self._replace_arrows()
numlines = int(self._textwidget.index('end').split('.')[0])
for linenum in range(1, numlines+1): # line numbers start at 1.
self._analyze_line(linenum)
def _parse_productions(self):
"""
Parse the current contents of the textwidget buffer, to create
a list of productions.
"""
productions = []
# Get the text, normalize it, and split it into lines.
text = self._textwidget.get('1.0', 'end')
text = re.sub(self.ARROW, '->', text)
text = re.sub('\t', ' ', text)
lines = text.split('\n')
# Convert each line to a CFG production
for line in lines:
line = line.strip()
if line=='': continue
productions += _read_cfg_production(line)
#if line.strip() == '': continue
#if not CFGEditor._PRODUCTION_RE.match(line):
# raise ValueError('Bad production string %r' % line)
#
#(lhs_str, rhs_str) = line.split('->')
#lhs = Nonterminal(lhs_str.strip())
#rhs = []
#def parse_token(match, rhs=rhs):
# token = match.group()
# if token[0] in "'\"": rhs.append(token[1:-1])
# else: rhs.append(Nonterminal(token))
# return ''
#CFGEditor._TOKEN_RE.sub(parse_token, rhs_str)
#
#productions.append(Production(lhs, *rhs))
return productions
def _destroy(self, *e):
if self._top is None: return
self._top.destroy()
self._top = None
def _ok(self, *e):
self._apply()
self._destroy()
def _apply(self, *e):
productions = self._parse_productions()
start = Nonterminal(self._start.get())
cfg = CFG(start, productions)
if self._set_cfg_callback is not None:
self._set_cfg_callback(cfg)
def _reset(self, *e):
self._textwidget.delete('1.0', 'end')
for production in self._cfg.productions():
self._textwidget.insert('end', '%s\n' % production)
self._analyze()
if self._set_cfg_callback is not None:
self._set_cfg_callback(self._cfg)
def _cancel(self, *e):
try: self._reset()
except: pass
self._destroy()
def _help(self, *e):
# The default font's not very legible; try using 'fixed' instead.
try:
ShowText(self._parent, 'Help: Chart Parser Demo',
(_CFGEditor_HELP).strip(), width=75, font='fixed')
except:
ShowText(self._parent, 'Help: Chart Parser Demo',
(_CFGEditor_HELP).strip(), width=75)
######################################################################
# New Demo (built tree based on cfg)
######################################################################
class CFGDemo(object):
def __init__(self, grammar, text):
self._grammar = grammar
self._text = text
# Set up the main window.
self._top = Tk()
self._top.title('Context Free Grammar Demo')
# Base font size
self._size = IntVar(self._top)
self._size.set(12) # = medium
# Set up the key bindings
self._init_bindings(self._top)
# Create the basic frames
frame1 = Frame(self._top)
frame1.pack(side='left', fill='y', expand=0)
self._init_menubar(self._top)
self._init_buttons(self._top)
self._init_grammar(frame1)
self._init_treelet(frame1)
self._init_workspace(self._top)
#//////////////////////////////////////////////////
# Initialization
#//////////////////////////////////////////////////
def _init_bindings(self, top):
top.bind('<Control-q>', self.destroy)
def _init_menubar(self, parent): pass
def _init_buttons(self, parent): pass
def _init_grammar(self, parent):
self._prodlist = ProductionList(parent, self._grammar, width=20)
self._prodlist.pack(side='top', fill='both', expand=1)
self._prodlist.focus()
self._prodlist.add_callback('select', self._selectprod_cb)
self._prodlist.add_callback('move', self._selectprod_cb)
def _init_treelet(self, parent):
self._treelet_canvas = Canvas(parent, background='white')
self._treelet_canvas.pack(side='bottom', fill='x')
self._treelet = None
def _init_workspace(self, parent):
self._workspace = CanvasFrame(parent, background='white')
self._workspace.pack(side='right', fill='both', expand=1)
self._tree = None
self.reset_workspace()
#//////////////////////////////////////////////////
# Workspace
#//////////////////////////////////////////////////
def reset_workspace(self):
c = self._workspace.canvas()
fontsize = int(self._size.get())
node_font = ('helvetica', -(fontsize+4), 'bold')
leaf_font = ('helvetica', -(fontsize+2))
# Remove the old tree
if self._tree is not None:
self._workspace.remove_widget(self._tree)
# The root of the tree.
start = self._grammar.start().symbol()
rootnode = TextWidget(c, start, font=node_font, draggable=1)
# The leaves of the tree.
leaves = []
for word in self._text:
leaves.append(TextWidget(c, word, font=leaf_font, draggable=1))
# Put it all together into one tree
self._tree = TreeSegmentWidget(c, rootnode, leaves,
color='white')
# Add it to the workspace.
self._workspace.add_widget(self._tree)
# Move the leaves to the bottom of the workspace.
for leaf in leaves: leaf.move(0,100)
#self._nodes = {start:1}
#self._leaves = dict([(l,1) for l in leaves])
def workspace_markprod(self, production):
pass
def _markproduction(self, prod, tree=None):
if tree is None: tree = self._tree
for i in range(len(tree.subtrees())-len(prod.rhs())):
if tree['color', i] == 'white':
self._markproduction
for j, node in enumerate(prod.rhs()):
widget = tree.subtrees()[i+j]
if (isinstance(node, Nonterminal) and
isinstance(widget, TreeSegmentWidget) and
node.symbol == widget.label().text()):
pass # matching nonterminal
elif (isinstance(node, compat.string_types) and
isinstance(widget, TextWidget) and
node == widget.text()):
pass # matching nonterminal
else: break
else:
# Everything matched!
print('MATCH AT', i)
#//////////////////////////////////////////////////
# Grammar
#//////////////////////////////////////////////////
def _selectprod_cb(self, production):
canvas = self._treelet_canvas
self._prodlist.highlight(production)
if self._treelet is not None: self._treelet.destroy()
# Convert the production to a tree.
rhs = production.rhs()
for (i, elt) in enumerate(rhs):
if isinstance(elt, Nonterminal): elt = Tree(elt)
tree = Tree(production.lhs().symbol(), *rhs)
# Draw the tree in the treelet area.
fontsize = int(self._size.get())
node_font = ('helvetica', -(fontsize+4), 'bold')
leaf_font = ('helvetica', -(fontsize+2))
self._treelet = tree_to_treesegment(canvas, tree,
node_font=node_font,
leaf_font=leaf_font)
self._treelet['draggable'] = 1
# Center the treelet.
(x1, y1, x2, y2) = self._treelet.bbox()
w, h = int(canvas['width']), int(canvas['height'])
self._treelet.move((w-x1-x2)/2, (h-y1-y2)/2)
# Mark the places where we can add it to the workspace.
self._markproduction(production)
def destroy(self, *args):
self._top.destroy()
def mainloop(self, *args, **kwargs):
self._top.mainloop(*args, **kwargs)
def demo2():
from nltk import Nonterminal, Production, CFG
nonterminals = 'S VP NP PP P N Name V Det'
(S, VP, NP, PP, P, N, Name, V, Det) = [Nonterminal(s)
for s in nonterminals.split()]
productions = (
# Syntactic Productions
Production(S, [NP, VP]),
Production(NP, [Det, N]),
Production(NP, [NP, PP]),
Production(VP, [VP, PP]),
Production(VP, [V, NP, PP]),
Production(VP, [V, NP]),
Production(PP, [P, NP]),
Production(PP, []),
Production(PP, ['up', 'over', NP]),
# Lexical Productions
Production(NP, ['I']), Production(Det, ['the']),
Production(Det, ['a']), Production(N, ['man']),
Production(V, ['saw']), Production(P, ['in']),
Production(P, ['with']), Production(N, ['park']),
Production(N, ['dog']), Production(N, ['statue']),
Production(Det, ['my']),
)
grammar = CFG(S, productions)
text = 'I saw a man in the park'.split()
d=CFGDemo(grammar, text)
d.mainloop()
######################################################################
# Old Demo
######################################################################
def demo():
from nltk import Nonterminal, CFG
nonterminals = 'S VP NP PP P N Name V Det'
(S, VP, NP, PP, P, N, Name, V, Det) = [Nonterminal(s)
for s in nonterminals.split()]
grammar = CFG.fromstring("""
S -> NP VP
PP -> P NP
NP -> Det N
NP -> NP PP
VP -> V NP
VP -> VP PP
Det -> 'a'
Det -> 'the'
Det -> 'my'
NP -> 'I'
N -> 'dog'
N -> 'man'
N -> 'park'
N -> 'statue'
V -> 'saw'
P -> 'in'
P -> 'up'
P -> 'over'
P -> 'with'
""")
def cb(grammar): print(grammar)
top = Tk()
editor = CFGEditor(top, grammar, cb)
Label(top, text='\nTesting CFG Editor\n').pack()
Button(top, text='Quit', command=top.destroy).pack()
top.mainloop()
def demo3():
from nltk import Production
(S, VP, NP, PP, P, N, Name, V, Det) = \
nonterminals('S, VP, NP, PP, P, N, Name, V, Det')
productions = (
# Syntactic Productions
Production(S, [NP, VP]),
Production(NP, [Det, N]),
Production(NP, [NP, PP]),
Production(VP, [VP, PP]),
Production(VP, [V, NP, PP]),
Production(VP, [V, NP]),
Production(PP, [P, NP]),
Production(PP, []),
Production(PP, ['up', 'over', NP]),
# Lexical Productions
Production(NP, ['I']), Production(Det, ['the']),
Production(Det, ['a']), Production(N, ['man']),
Production(V, ['saw']), Production(P, ['in']),
Production(P, ['with']), Production(N, ['park']),
Production(N, ['dog']), Production(N, ['statue']),
Production(Det, ['my']),
)
t = Tk()
def destroy(e, t=t): t.destroy()
t.bind('q', destroy)
p = ProductionList(t, productions)
p.pack(expand=1, fill='both')
p.add_callback('select', p.markonly)
p.add_callback('move', p.markonly)
p.focus()
p.mark(productions[2])
p.mark(productions[8])
if __name__ == '__main__': demo() | unknown | codeparrot/codeparrot-clean | ||
// Copyright 2025 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cache
type ringBuffer[T any] struct {
buffer []entry[T]
// head is the index immediately after the last non-empty entry in the buffer (i.e., the next write position).
head, tail, size int
revisionOf RevisionOf[T]
}
type entry[T any] struct {
revision int64
item T
}
type (
KeyPredicate = func([]byte) bool
RevisionOf[T any] func(T) int64
IterFunc[T any] func(rev int64, item T) bool
)
func newRingBuffer[T any](capacity int, revisionOf RevisionOf[T]) *ringBuffer[T] {
// assume capacity > 0 – validated by Cache
return &ringBuffer[T]{
buffer: make([]entry[T], capacity),
revisionOf: revisionOf,
}
}
func (r *ringBuffer[T]) Append(item T) {
entry := entry[T]{revision: r.revisionOf(item), item: item}
if r.full() {
r.tail = (r.tail + 1) % len(r.buffer)
} else {
r.size++
}
r.buffer[r.head] = entry
r.head = (r.head + 1) % len(r.buffer)
}
func (r *ringBuffer[T]) full() bool {
return r.size == len(r.buffer)
}
// AscendGreaterOrEqual iterates through entries in ascending order starting from the first entry with revision >= pivot.
// TODO: use binary search on the ring buffer to locate the first entry >= nextRev instead of a full scan
func (r *ringBuffer[T]) AscendGreaterOrEqual(pivot int64, iter IterFunc[T]) {
if r.size == 0 {
return
}
for n, i := 0, r.tail; n < r.size; n, i = n+1, (i+1)%len(r.buffer) {
entry := r.buffer[i]
if entry.revision < pivot {
continue
}
if !iter(entry.revision, entry.item) {
return
}
}
}
// AscendLessThan iterates in ascending order over entries with revision < pivot.
func (r *ringBuffer[T]) AscendLessThan(pivot int64, iter IterFunc[T]) {
if r.size == 0 {
return
}
for n, i := 0, r.tail; n < r.size; n, i = n+1, (i+1)%len(r.buffer) {
entry := r.buffer[i]
if entry.revision >= pivot {
return
}
if !iter(entry.revision, entry.item) {
return
}
}
}
// DescendGreaterThan iterates in descending order over entries with revision > pivot.
func (r *ringBuffer[T]) DescendGreaterThan(pivot int64, iter IterFunc[T]) {
if r.size == 0 {
return
}
for n, i := 0, r.moduloIndex(r.head-1); n < r.size; n, i = n+1, r.moduloIndex(i-1) {
entry := r.buffer[i]
if entry.revision <= pivot {
return
}
if !iter(entry.revision, entry.item) {
return
}
}
}
// DescendLessOrEqual iterates in descending order over entries with revision <= pivot.
func (r *ringBuffer[T]) DescendLessOrEqual(pivot int64, iter IterFunc[T]) {
if r.size == 0 {
return
}
for n, i := 0, r.moduloIndex(r.head-1); n < r.size; n, i = n+1, r.moduloIndex(i-1) {
entry := r.buffer[i]
if entry.revision > pivot {
continue
}
if !iter(entry.revision, entry.item) {
return
}
}
}
// PeekLatest returns the most recently-appended revision (or 0 if empty).
func (r *ringBuffer[T]) PeekLatest() int64 {
if r.size == 0 {
return 0
}
idx := (r.head - 1 + len(r.buffer)) % len(r.buffer)
return r.buffer[idx].revision
}
// PeekOldest returns the oldest revision currently stored (or 0 if empty).
func (r *ringBuffer[T]) PeekOldest() int64 {
if r.size == 0 {
return 0
}
return r.buffer[r.tail].revision
}
func (r *ringBuffer[T]) RebaseHistory() {
r.head, r.tail, r.size = 0, 0, 0
for i := range r.buffer {
r.buffer[i] = entry[T]{}
}
}
func (r *ringBuffer[T]) moduloIndex(index int) int {
return (index + len(r.buffer)) % len(r.buffer)
} | go | github | https://github.com/etcd-io/etcd | cache/ringbuffer.go |
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleError
from ansible.plugins.action import ActionBase
from ansible.utils.vars import merge_hash
class ActionModule(ActionBase):
_VALID_ARGS = frozenset(('jid', 'mode'))
def run(self, tmp=None, task_vars=None):
results = super(ActionModule, self).run(tmp, task_vars)
del tmp # tmp no longer has any effect
if "jid" not in self._task.args:
raise AnsibleError("jid is required")
jid = self._task.args["jid"]
mode = self._task.args.get("mode", "status")
env_async_dir = [e for e in self._task.environment if
"ANSIBLE_ASYNC_DIR" in e]
if len(env_async_dir) > 0:
# for backwards compatibility we need to get the dir from
# ANSIBLE_ASYNC_DIR that is defined in the environment. This is
# deprecated and will be removed in favour of shell options
async_dir = env_async_dir[0]['ANSIBLE_ASYNC_DIR']
msg = "Setting the async dir from the environment keyword " \
"ANSIBLE_ASYNC_DIR is deprecated. Set the async_dir " \
"shell option instead"
self._display.deprecated(msg, "2.12", collection_name='ansible.builtin')
else:
# inject the async directory based on the shell option into the
# module args
async_dir = self.get_shell_option('async_dir', default="~/.ansible_async")
module_args = dict(jid=jid, mode=mode, _async_dir=async_dir)
status = self._execute_module(module_name='ansible.legacy.async_status', task_vars=task_vars,
module_args=module_args)
results = merge_hash(results, status)
return results | unknown | codeparrot/codeparrot-clean | ||
import os
import json
import time
import logging
try:
import urllib.request as urllib2
except ImportError:
import urllib2
log = logging.getLogger("travis.leader")
log.addHandler(logging.StreamHandler())
log.setLevel(logging.INFO)
TRAVIS_JOB_NUMBER = 'TRAVIS_JOB_NUMBER'
TRAVIS_BUILD_ID = 'TRAVIS_BUILD_ID'
POLLING_INTERVAL = 'LEADER_POLLING_INTERVAL'
build_id = os.getenv(TRAVIS_BUILD_ID)
polling_interval = int(os.getenv(POLLING_INTERVAL, '5'))
#assume, first job is the leader
is_leader = lambda job_number: job_number.endswith('.1')
if not os.getenv(TRAVIS_JOB_NUMBER):
# seems even for builds with only one job, this won't get here
log.fatal("Don't use defining leader for build without matrix")
exit(1)
elif is_leader(os.getenv(TRAVIS_JOB_NUMBER)):
log.info("This is a leader")
else:
#since python is subprocess, env variables are exported back via file
with open(".to_export_back", "w") as export_var:
export_var.write("BUILD_MINION=YES")
log.info("This is a minion")
exit(0)
class MatrixElement(object):
def __init__(self, json_raw):
self.allow_failure = json_raw['allow_failure']
self.is_finished = json_raw['finished_at'] is not None
self.is_succeeded = json_raw['result'] == 0
self.number = json_raw['number']
self.is_leader = is_leader(self.number)
def matrix_snapshot():
"""
:return: Matrix List
"""
response = urllib2.build_opener().open("https://api.travis-ci.org/builds/{0}".format(build_id)).read()
raw_json = json.loads(response)
matrix_without_leader = [MatrixElement(element) for element in raw_json["matrix"]]
return matrix_without_leader
def wait_others_to_finish():
def others_finished():
"""
Dumps others to finish
Leader cannot finish, it is working now
:return: tuple(True or False, List of not finished jobs)
"""
snapshot = matrix_snapshot()
finished = [el.is_finished for el in snapshot if not (el.is_leader or el.allow_failure)]
return reduce(lambda a, b: a and b, finished), [el.number for el in snapshot if
not el.is_leader and not el.is_finished]
while True:
finished, waiting_list = others_finished()
if finished: break
log.info("Leader waits for minions {0}...".format(waiting_list)) # just in case do not get "silence timeout"
time.sleep(polling_interval)
try:
wait_others_to_finish()
final_snapshot = matrix_snapshot()
log.info("Final Results: {0}".format([(e.number, e.is_succeeded, e.allow_failure) for e in final_snapshot]))
BUILD_AGGREGATE_STATUS = 'BUILD_AGGREGATE_STATUS'
others_snapshot = [el for el in final_snapshot if not (el.is_leader or el.allow_failure)]
if reduce(lambda a, b: a and b, [e.is_succeeded for e in others_snapshot]):
os.environ[BUILD_AGGREGATE_STATUS] = "others_succeeded"
elif reduce(lambda a, b: a and b, [not e.is_succeeded for e in others_snapshot]):
log.error("Others Failed")
os.environ[BUILD_AGGREGATE_STATUS] = "others_failed"
else:
log.warn("Others Unknown")
os.environ[BUILD_AGGREGATE_STATUS] = "unknown"
#since python is subprocess, env variables are exported back via file
with open(".to_export_back", "w") as export_var:
export_var.write("BUILD_LEADER=YES {0}={1}".format(BUILD_AGGREGATE_STATUS, os.environ[BUILD_AGGREGATE_STATUS]))
except Exception as e:
log.fatal(e) | unknown | codeparrot/codeparrot-clean | ||
import unittest
from django.contrib.auth.models import Group, Permission
from django.core.cache import caches
from django.core.files.uploadedfile import SimpleUploadedFile
from django.db.utils import IntegrityError
from django.test import TestCase
from django.test.utils import override_settings
from django.urls import reverse
from willow.image import Image as WillowImage
from wagtail.core.models import Collection, GroupCollectionPermission, Page
from wagtail.images.models import Rendition, SourceImageIOError
from wagtail.images.rect import Rect
from wagtail.tests.testapp.models import EventPage, EventPageCarouselItem
from wagtail.tests.utils import WagtailTestUtils
from .utils import Image, get_test_image_file
class TestImage(TestCase):
def setUp(self):
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(colour='white'),
)
def test_is_portrait(self):
self.assertFalse(self.image.is_portrait())
def test_is_landscape(self):
self.assertTrue(self.image.is_landscape())
def test_get_rect(self):
self.assertTrue(self.image.get_rect(), Rect(0, 0, 640, 480))
def test_get_focal_point(self):
self.assertEqual(self.image.get_focal_point(), None)
# Add a focal point to the image
self.image.focal_point_x = 100
self.image.focal_point_y = 200
self.image.focal_point_width = 50
self.image.focal_point_height = 20
# Get it
self.assertEqual(self.image.get_focal_point(), Rect(75, 190, 125, 210))
def test_has_focal_point(self):
self.assertFalse(self.image.has_focal_point())
# Add a focal point to the image
self.image.focal_point_x = 100
self.image.focal_point_y = 200
self.image.focal_point_width = 50
self.image.focal_point_height = 20
self.assertTrue(self.image.has_focal_point())
def test_set_focal_point(self):
self.assertEqual(self.image.focal_point_x, None)
self.assertEqual(self.image.focal_point_y, None)
self.assertEqual(self.image.focal_point_width, None)
self.assertEqual(self.image.focal_point_height, None)
self.image.set_focal_point(Rect(100, 150, 200, 350))
self.assertEqual(self.image.focal_point_x, 150)
self.assertEqual(self.image.focal_point_y, 250)
self.assertEqual(self.image.focal_point_width, 100)
self.assertEqual(self.image.focal_point_height, 200)
self.image.set_focal_point(None)
self.assertEqual(self.image.focal_point_x, None)
self.assertEqual(self.image.focal_point_y, None)
self.assertEqual(self.image.focal_point_width, None)
self.assertEqual(self.image.focal_point_height, None)
def test_is_stored_locally(self):
self.assertTrue(self.image.is_stored_locally())
@override_settings(DEFAULT_FILE_STORAGE='wagtail.tests.dummy_external_storage.DummyExternalStorage')
def test_is_stored_locally_with_external_storage(self):
self.assertFalse(self.image.is_stored_locally())
def test_get_file_size(self):
file_size = self.image.get_file_size()
self.assertIsInstance(file_size, int)
self.assertGreater(file_size, 0)
def test_get_file_size_on_missing_file_raises_sourceimageioerror(self):
self.image.file.delete(save=False)
with self.assertRaises(SourceImageIOError):
self.image.get_file_size()
class TestImageQuerySet(TestCase):
def test_search_method(self):
# Create an image for running tests on
image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
# Search for it
results = Image.objects.search("Test")
self.assertEqual(list(results), [image])
def test_operators(self):
aaa_image = Image.objects.create(
title="AAA Test image",
file=get_test_image_file(),
)
zzz_image = Image.objects.create(
title="ZZZ Test image",
file=get_test_image_file(),
)
results = Image.objects.search("aaa test", operator='and')
self.assertEqual(list(results), [aaa_image])
results = Image.objects.search("aaa test", operator='or')
sorted_results = sorted(results, key=lambda img: img.title)
self.assertEqual(sorted_results, [aaa_image, zzz_image])
def test_custom_ordering(self):
aaa_image = Image.objects.create(
title="AAA Test image",
file=get_test_image_file(),
)
zzz_image = Image.objects.create(
title="ZZZ Test image",
file=get_test_image_file(),
)
results = Image.objects.order_by('title').search("Test")
self.assertEqual(list(results), [aaa_image, zzz_image])
results = Image.objects.order_by('-title').search("Test")
self.assertEqual(list(results), [zzz_image, aaa_image])
def test_search_indexing_prefetches_tags(self):
for i in range(0, 10):
image = Image.objects.create(
title="Test image %d" % i,
file=get_test_image_file(),
)
image.tags.add('aardvark', 'artichoke', 'armadillo')
with self.assertNumQueries(2):
results = {
image.title: [tag.name for tag in image.tags.all()]
for image in Image.get_indexed_objects()
}
self.assertTrue('aardvark' in results['Test image 0'])
class TestImagePermissions(TestCase, WagtailTestUtils):
def setUp(self):
# Create some user accounts for testing permissions
self.user = self.create_user(username='user', email='user@email.com', password='password')
self.owner = self.create_user(username='owner', email='owner@email.com', password='password')
self.editor = self.create_user(username='editor', email='editor@email.com', password='password')
self.editor.groups.add(Group.objects.get(name='Editors'))
self.administrator = self.create_superuser(
username='administrator', email='administrator@email.com', password='password'
)
# Owner user must have the add_image permission
image_adders_group = Group.objects.create(name="Image adders")
GroupCollectionPermission.objects.create(
group=image_adders_group,
collection=Collection.get_first_root_node(),
permission=Permission.objects.get(codename='add_image'),
)
self.owner.groups.add(image_adders_group)
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
uploaded_by_user=self.owner,
file=get_test_image_file(),
)
def test_administrator_can_edit(self):
self.assertTrue(self.image.is_editable_by_user(self.administrator))
def test_editor_can_edit(self):
self.assertTrue(self.image.is_editable_by_user(self.editor))
def test_owner_can_edit(self):
self.assertTrue(self.image.is_editable_by_user(self.owner))
def test_user_cant_edit(self):
self.assertFalse(self.image.is_editable_by_user(self.user))
class TestRenditions(TestCase):
def setUp(self):
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def test_get_rendition_model(self):
self.assertIs(Image.get_rendition_model(), Rendition)
def test_minification(self):
rendition = self.image.get_rendition('width-400')
# Check size
self.assertEqual(rendition.width, 400)
self.assertEqual(rendition.height, 300)
# check that the rendition has been recorded under the correct filter,
# via the Rendition.filter_spec attribute (in active use as of Wagtail 1.8)
self.assertEqual(rendition.filter_spec, 'width-400')
def test_resize_to_max(self):
rendition = self.image.get_rendition('max-100x100')
# Check size
self.assertEqual(rendition.width, 100)
self.assertEqual(rendition.height, 75)
def test_resize_to_min(self):
rendition = self.image.get_rendition('min-120x120')
# Check size
self.assertEqual(rendition.width, 160)
self.assertEqual(rendition.height, 120)
def test_resize_to_original(self):
rendition = self.image.get_rendition('original')
# Check size
self.assertEqual(rendition.width, 640)
self.assertEqual(rendition.height, 480)
def test_cache(self):
# Get two renditions with the same filter
first_rendition = self.image.get_rendition('width-400')
second_rendition = self.image.get_rendition('width-400')
# Check that they are the same object
self.assertEqual(first_rendition, second_rendition)
def test_alt_attribute(self):
rendition = self.image.get_rendition('width-400')
self.assertEqual(rendition.alt, "Test image")
@override_settings(
CACHES={
'renditions': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
)
def test_renditions_cache_backend(self):
cache = caches['renditions']
rendition = self.image.get_rendition('width-500')
rendition_cache_key = "image-{}-{}-{}".format(
rendition.image.id,
rendition.focal_point_key,
rendition.filter_spec
)
# Check rendition is saved to cache
self.assertEqual(cache.get(rendition_cache_key), rendition)
# Mark a rendition to check it comes from cache
rendition._from_cache = 'original'
cache.set(rendition_cache_key, rendition)
# Check if get_rendition returns the rendition from cache
with self.assertNumQueries(0):
new_rendition = self.image.get_rendition('width-500')
self.assertEqual(new_rendition._from_cache, 'original')
# changing the image file should invalidate the cache
self.image.file = get_test_image_file(colour='green')
self.image.save()
# deleting renditions would normally happen within the 'edit' view on file change -
# we're bypassing that here, so have to do it manually
self.image.renditions.all().delete()
new_rendition = self.image.get_rendition('width-500')
self.assertFalse(hasattr(new_rendition, '_from_cache'))
# changing it back should also generate a new rendition and not re-use
# the original one (because that file has now been deleted in the change)
self.image.file = get_test_image_file(colour='white')
self.image.save()
self.image.renditions.all().delete()
new_rendition = self.image.get_rendition('width-500')
self.assertFalse(hasattr(new_rendition, '_from_cache'))
class TestUsageCount(TestCase):
fixtures = ['test.json']
def setUp(self):
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_unused_image_usage_count(self):
self.assertEqual(self.image.get_usage().count(), 0)
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_used_image_document_usage_count(self):
page = EventPage.objects.get(id=4)
event_page_carousel_item = EventPageCarouselItem()
event_page_carousel_item.page = page
event_page_carousel_item.image = self.image
event_page_carousel_item.save()
self.assertEqual(self.image.get_usage().count(), 1)
class TestGetUsage(TestCase):
fixtures = ['test.json']
def setUp(self):
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def test_image_get_usage_not_enabled(self):
self.assertEqual(list(self.image.get_usage()), [])
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_unused_image_get_usage(self):
self.assertEqual(list(self.image.get_usage()), [])
@override_settings(WAGTAIL_USAGE_COUNT_ENABLED=True)
def test_used_image_document_get_usage(self):
page = EventPage.objects.get(id=4)
event_page_carousel_item = EventPageCarouselItem()
event_page_carousel_item.page = page
event_page_carousel_item.image = self.image
event_page_carousel_item.save()
self.assertTrue(issubclass(Page, type(self.image.get_usage()[0])))
class TestGetWillowImage(TestCase):
fixtures = ['test.json']
def setUp(self):
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def test_willow_image_object_returned(self):
with self.image.get_willow_image() as willow_image:
self.assertIsInstance(willow_image, WillowImage)
def test_with_missing_image(self):
# Image id=1 in test fixtures has a missing image file
bad_image = Image.objects.get(id=1)
# Attempting to get the Willow image for images without files
# should raise a SourceImageIOError
with self.assertRaises(SourceImageIOError):
with bad_image.get_willow_image():
self.fail() # Shouldn't get here
def test_closes_image(self):
# This tests that willow closes images after use
with self.image.get_willow_image():
self.assertFalse(self.image.file.closed)
self.assertTrue(self.image.file.closed)
def test_closes_image_on_exception(self):
# This tests that willow closes images when the with is exited with an exception
try:
with self.image.get_willow_image():
self.assertFalse(self.image.file.closed)
raise ValueError("Something went wrong!")
except ValueError:
pass
self.assertTrue(self.image.file.closed)
def test_doesnt_close_open_image(self):
# This tests that when the image file is already open, get_willow_image doesn't close it (#1256)
self.image.file.open('rb')
with self.image.get_willow_image():
pass
self.assertFalse(self.image.file.closed)
self.image.file.close()
class TestIssue573(TestCase):
"""
This tests for a bug which causes filename limit on Renditions to be reached
when the Image has a long original filename and a big focal point key
"""
def test_issue_573(self):
# Create an image with a big filename and focal point
image = Image.objects.create(
title="Test image",
file=get_test_image_file(
'thisisaverylongfilename-abcdefghijklmnopqrstuvwxyz-supercalifragilisticexpialidocious.png'
),
focal_point_x=1000,
focal_point_y=1000,
focal_point_width=1000,
focal_point_height=1000,
)
# Try creating a rendition from that image
# This would crash if the bug is present
image.get_rendition('fill-800x600')
@override_settings(_WAGTAILSEARCH_FORCE_AUTO_UPDATE=['elasticsearch'])
class TestIssue613(TestCase, WagtailTestUtils):
def get_elasticsearch_backend(self):
from django.conf import settings
from wagtail.search.backends import get_search_backend
if 'elasticsearch' not in settings.WAGTAILSEARCH_BACKENDS:
raise unittest.SkipTest("No elasticsearch backend active")
return get_search_backend('elasticsearch')
def setUp(self):
self.search_backend = self.get_elasticsearch_backend()
self.login()
def add_image(self, **params):
post_data = {
'title': "Test image",
'file': SimpleUploadedFile('test.png', get_test_image_file().file.getvalue()),
}
post_data.update(params)
response = self.client.post(reverse('wagtailimages:add'), post_data)
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailimages:index'))
# Check that the image was created
images = Image.objects.filter(title="Test image")
self.assertEqual(images.count(), 1)
# Test that size was populated correctly
image = images.first()
self.assertEqual(image.width, 640)
self.assertEqual(image.height, 480)
return image
def edit_image(self, **params):
# Create an image to edit
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
# Edit it
post_data = {
'title': "Edited",
}
post_data.update(params)
response = self.client.post(reverse('wagtailimages:edit', args=(self.image.id,)), post_data)
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailimages:index'))
# Check that the image was edited
image = Image.objects.get(id=self.image.id)
self.assertEqual(image.title, "Edited")
return image
def test_issue_613_on_add(self):
# Reset the search index
self.search_backend.reset_index()
self.search_backend.add_type(Image)
# Add an image with some tags
image = self.add_image(tags="hello")
self.search_backend.refresh_index()
# Search for it by tag
results = self.search_backend.search("hello", Image)
# Check
self.assertEqual(len(results), 1)
self.assertEqual(results[0].id, image.id)
def test_issue_613_on_edit(self):
# Reset the search index
self.search_backend.reset_index()
self.search_backend.add_type(Image)
# Add an image with some tags
image = self.edit_image(tags="hello")
self.search_backend.refresh_index()
# Search for it by tag
results = self.search_backend.search("hello", Image)
# Check
self.assertEqual(len(results), 1)
self.assertEqual(results[0].id, image.id)
class TestIssue312(TestCase):
def test_duplicate_renditions(self):
# Create an image
image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
# Get two renditions and check that they're the same
rend1 = image.get_rendition('fill-100x100')
rend2 = image.get_rendition('fill-100x100')
self.assertEqual(rend1, rend2)
# Now manually duplicate the renditon and check that the database blocks it
self.assertRaises(
IntegrityError,
Rendition.objects.create,
image=rend1.image,
filter_spec=rend1.filter_spec,
width=rend1.width,
height=rend1.height,
focal_point_key=rend1.focal_point_key,
)
class TestFilenameReduction(TestCase):
"""
This tests for a bug which results in filenames without extensions
causing an infinite loop
"""
def test_filename_reduction_no_ext(self):
# Create an image with a big filename and no extension
image = Image.objects.create(
title="Test image",
file=get_test_image_file(
'thisisaverylongfilename-abcdefghijklmnopqrstuvwxyz-supercalifragilisticexpialidocioussuperlong'
)
)
# Saving file will result in infinite loop when bug is present
image.save()
self.assertEqual("original_images/thisisaverylongfilename-abcdefghijklmnopqrstuvwxyz-supercalifragilisticexpiali", image.file.name)
# Test for happy path. Long filename with extension
def test_filename_reduction_ext(self):
# Create an image with a big filename and extensions
image = Image.objects.create(
title="Test image",
file=get_test_image_file(
'thisisaverylongfilename-abcdefghijklmnopqrstuvwxyz-supercalifragilisticexpialidocioussuperlong.png'
)
)
image.save()
self.assertEqual("original_images/thisisaverylongfilename-abcdefghijklmnopqrstuvwxyz-supercalifragilisticexp.png", image.file.name) | unknown | codeparrot/codeparrot-clean | ||
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from selenium import selenium
import unittest
class TestGoogle(unittest.TestCase):
def setUp(self):
self.selenium = selenium("localhost", \
4444, "*firefox", "http://www.google.com/webhp")
self.selenium.start()
def test_google(self):
sel = self.selenium
sel.open("http://www.google.com/webhp")
sel.type("q", "hello world")
sel.click("btnG")
sel.wait_for_page_to_load(5000)
self.assertEqual("hello world - Google Search", sel.get_title())
def tearDown(self):
self.selenium.stop()
if __name__ == "__main__":
unittest.main() | unknown | codeparrot/codeparrot-clean | ||
#include <complex.h>
#include "opencv_lapack.h"
static char* check_fn1 = (char*)sgesv_;
static char* check_fn2 = (char*)sposv_;
static char* check_fn3 = (char*)spotrf_;
static char* check_fn4 = (char*)sgesdd_;
int main(int argc, char* argv[])
{
(void)argv;
if(argc > 1000)
return check_fn1[0] + check_fn2[0] + check_fn3[0] + check_fn4[0];
return 0;
} | cpp | github | https://github.com/opencv/opencv | cmake/checks/lapack_check.cpp |
from __future__ import division
import numpy
from chainer import backend
from chainer.backends import cuda
from chainer import optimizer
_default_hyperparam = optimizer.Hyperparameter()
_default_hyperparam.lr = 0.1
_default_hyperparam.beta = 0.9
_default_hyperparam.eta = 1.0
_default_hyperparam.weight_decay_rate = 0
class MSVAGRule(optimizer.UpdateRule):
"""Update rule of the M-SVAG optimization algorithm.
See: `Dissecting Adam: The Sign, Magnitude and Variance of Stochastic \
Gradients <https://arxiv.org/abs/1705.07774>`_
Modified for proper weight decay.
See: `Fixing Weight Decay Regularization in Adam \
<https://openreview.net/forum?id=rk6qdGgCZ>`_
See :class:`~chainer.optimizers.MSVAG` for the default values
of the hyperparameters.
Args:
parent_hyperparam (~chainer.optimizer.Hyperparameter): Hyperparameter
that provides the default values.
lr (float): Learning rate.
beta (float): Exponential decay rate of the first and second order
moment.
eta (float): Schedule multiplier, can be used for warm restarts.
weight_decay_rate (float): Weight decay rate.
"""
def __init__(self, parent_hyperparam=None,
lr=None, beta=None,
eta=None, weight_decay_rate=None):
super(MSVAGRule, self).__init__(
parent_hyperparam or _default_hyperparam)
if lr is not None:
self.hyperparam.lr = lr
if beta is not None:
self.hyperparam.beta = beta
if eta is not None:
self.hyperparam.eta = eta
if weight_decay_rate is not None:
self.hyperparam.weight_decay_rate = weight_decay_rate
self.beta_power = self.hyperparam.beta
def init_state(self, param):
xp = backend.get_array_module(param.data)
with cuda.get_device_from_array(param.data):
self.state['m'] = xp.zeros_like(param.data)
self.state['v'] = xp.zeros_like(param.data)
def update_core_cpu(self, param):
grad = param.grad
if grad is None:
return
hp = self.hyperparam
m, v = self.state['m'], self.state['v']
rho = (((1.0 - hp.beta) ** 2) * (1.0 - self.beta_power ** 2) /
(((1.0 - self.beta_power) ** 2) * (1.0 - hp.beta ** 2)))
rho = min(rho, 0.9999)
m += (1 - hp.beta) * (grad - m)
v += (1 - hp.beta) * (grad * grad - v)
mt = m / (1 - self.beta_power)
vt = v / (1 - self.beta_power)
mt2 = mt ** 2
s = (vt - mt2) / (1 - rho)
factor = numpy.clip(mt2 / (mt2 + rho * s), 0, 1)
if isinstance(factor, numpy.ndarray):
factor[numpy.isnan(factor)] = 0
else:
if numpy.isnan(factor):
factor = 0
param.data -= hp.eta * (hp.lr * mt * factor +
hp.weight_decay_rate * param.data)
self.beta_power *= hp.beta
def update_core_gpu(self, param):
grad = param.grad
if grad is None:
return
hp = self.hyperparam
rho = (((1.0 - hp.beta) ** 2) * (1.0 - self.beta_power ** 2) /
(((1.0 - self.beta_power) ** 2) * (1.0 - hp.beta ** 2)))
rho = min(rho, 0.9999)
cuda.elementwise(
'T grad, T lr, T one_minus_beta, T eta, \
T weight_decay_rate, T beta_power, T rho',
'T param, T m, T v',
'''m += one_minus_beta * (grad - m);
v += one_minus_beta * (grad * grad - v);
T mt = m / (1.0 - beta_power);
T vt = v / (1.0 - beta_power);
T mt2 = mt*mt;
T s = (vt - mt2) / (1.0 - rho);
T factor;
if (m == 0 && v == 0)
factor = 0.0;
else
factor = min(1.0, max(0.0, mt2 / (mt2 + rho * s)));
param -= eta * (lr * mt * factor +
weight_decay_rate * param);''',
'msvag')(grad, hp.lr, 1 - hp.beta,
hp.eta, hp.weight_decay_rate,
self.beta_power, rho,
param.data, self.state['m'], self.state['v'])
self.beta_power *= hp.beta
class MSVAG(optimizer.GradientMethod):
"""M-SVAG optimizer.
See: `Dissecting Adam: The Sign, Magnitude and Variance of Stochastic \
Gradients <https://arxiv.org/abs/1705.07774>`_
Modified for proper weight decay (also called AdamW).
AdamW introduces the additional parameters ``eta``
and ``weight_decay_rate``, which can be used to properly scale the
learning rate, and decouple the weight decay rate from ``alpha``,
as shown in the below paper.
See: `Fixing Weight Decay Regularization in Adam \
<https://openreview.net/forum?id=rk6qdGgCZ>`_
Args:
lr (float): Learning rate.
beta (float): Exponential decay rate of the first and second order
moment.
eta (float): Schedule multiplier, can be used for warm restarts.
weight_decay_rate (float): Weight decay rate.
"""
def __init__(self,
lr=_default_hyperparam.lr,
beta=_default_hyperparam.beta,
eta=_default_hyperparam.eta,
weight_decay_rate=_default_hyperparam.weight_decay_rate):
super(MSVAG, self).__init__()
self.hyperparam.lr = lr
self.hyperparam.beta = beta
self.hyperparam.eta = eta
self.hyperparam.weight_decay_rate = weight_decay_rate
lr = optimizer.HyperparameterProxy('lr')
beta = optimizer.HyperparameterProxy('beta')
eta = optimizer.HyperparameterProxy('eta')
weight_decay_rate = optimizer.HyperparameterProxy('weight_decay_rate')
def create_update_rule(self):
return MSVAGRule(self.hyperparam) | unknown | codeparrot/codeparrot-clean | ||
//===----------------------------------------------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "UseUsingCheck.h"
#include "../utils/LexerUtils.h"
#include "clang/AST/DeclGroup.h"
#include "clang/Basic/LangOptions.h"
#include "clang/Basic/SourceLocation.h"
#include "clang/Basic/SourceManager.h"
#include "clang/Basic/TokenKinds.h"
#include "clang/Lex/Lexer.h"
#include <string>
using namespace clang::ast_matchers;
namespace {
AST_MATCHER(clang::LinkageSpecDecl, isExternCLinkage) {
return Node.getLanguage() == clang::LinkageSpecLanguageIDs::C;
}
} // namespace
namespace clang::tidy::modernize {
static constexpr StringRef ExternCDeclName = "extern-c-decl";
static constexpr StringRef ParentDeclName = "parent-decl";
static constexpr StringRef TagDeclName = "tag-decl";
static constexpr StringRef TypedefName = "typedef";
static constexpr StringRef DeclStmtName = "decl-stmt";
UseUsingCheck::UseUsingCheck(StringRef Name, ClangTidyContext *Context)
: ClangTidyCheck(Name, Context),
IgnoreMacros(Options.get("IgnoreMacros", true)),
IgnoreExternC(Options.get("IgnoreExternC", false)) {}
void UseUsingCheck::storeOptions(ClangTidyOptions::OptionMap &Opts) {
Options.store(Opts, "IgnoreMacros", IgnoreMacros);
Options.store(Opts, "IgnoreExternC", IgnoreExternC);
}
void UseUsingCheck::registerMatchers(MatchFinder *Finder) {
Finder->addMatcher(
typedefDecl(
unless(isInstantiated()),
optionally(hasAncestor(
linkageSpecDecl(isExternCLinkage()).bind(ExternCDeclName))),
anyOf(hasParent(decl().bind(ParentDeclName)),
hasParent(declStmt().bind(DeclStmtName))))
.bind(TypedefName),
this);
// This matcher is used to find tag declarations in source code within
// typedefs. They appear in the AST just *prior* to the typedefs.
Finder->addMatcher(
tagDecl(
anyOf(allOf(unless(anyOf(isImplicit(),
classTemplateSpecializationDecl())),
anyOf(hasParent(decl().bind(ParentDeclName)),
hasParent(declStmt().bind(DeclStmtName)))),
// We want the parent of the ClassTemplateDecl, not the parent
// of the specialization.
classTemplateSpecializationDecl(hasAncestor(classTemplateDecl(
anyOf(hasParent(decl().bind(ParentDeclName)),
hasParent(declStmt().bind(DeclStmtName))))))))
.bind(TagDeclName),
this);
}
void UseUsingCheck::check(const MatchFinder::MatchResult &Result) {
const auto *ParentDecl = Result.Nodes.getNodeAs<Decl>(ParentDeclName);
if (!ParentDecl) {
const auto *ParentDeclStmt = Result.Nodes.getNodeAs<DeclStmt>(DeclStmtName);
if (ParentDeclStmt) {
if (ParentDeclStmt->isSingleDecl())
ParentDecl = ParentDeclStmt->getSingleDecl();
else
ParentDecl =
ParentDeclStmt->getDeclGroup().getDeclGroup()
[ParentDeclStmt->getDeclGroup().getDeclGroup().size() - 1];
}
}
if (!ParentDecl)
return;
const SourceManager &SM = *Result.SourceManager;
const LangOptions &LO = getLangOpts();
// Match CXXRecordDecl only to store the range of the last non-implicit full
// declaration, to later check whether it's within the typedef itself.
const auto *MatchedTagDecl = Result.Nodes.getNodeAs<TagDecl>(TagDeclName);
if (MatchedTagDecl) {
// It is not sufficient to just track the last TagDecl that we've seen,
// because if one struct or union is nested inside another, the last TagDecl
// before the typedef will be the nested one (PR#50990). Therefore, we also
// keep track of the parent declaration, so that we can look up the last
// TagDecl that is a sibling of the typedef in the AST.
if (MatchedTagDecl->isThisDeclarationADefinition())
LastTagDeclRanges[ParentDecl] = MatchedTagDecl->getSourceRange();
return;
}
const auto *MatchedDecl = Result.Nodes.getNodeAs<TypedefDecl>(TypedefName);
if (MatchedDecl->getLocation().isInvalid())
return;
const auto *ExternCDecl =
Result.Nodes.getNodeAs<LinkageSpecDecl>(ExternCDeclName);
if (ExternCDecl && IgnoreExternC)
return;
const SourceLocation StartLoc = MatchedDecl->getBeginLoc();
if (StartLoc.isMacroID() && IgnoreMacros)
return;
static constexpr StringRef UseUsingWarning =
"use 'using' instead of 'typedef'";
// Warn at StartLoc but do not fix if there is macro or array.
if (MatchedDecl->getUnderlyingType()->isArrayType() || StartLoc.isMacroID()) {
diag(StartLoc, UseUsingWarning);
return;
}
const TypeLoc TL = MatchedDecl->getTypeSourceInfo()->getTypeLoc();
bool FunctionPointerCase = false;
auto [Type, QualifierStr] = [MatchedDecl, this, &TL, &FunctionPointerCase,
&SM,
&LO]() -> std::pair<std::string, std::string> {
SourceRange TypeRange = TL.getSourceRange();
// Function pointer case, get the left and right side of the identifier
// without the identifier.
if (TypeRange.fullyContains(MatchedDecl->getLocation())) {
FunctionPointerCase = true;
SourceLocation StartLoc = MatchedDecl->getLocation();
SourceLocation EndLoc = MatchedDecl->getLocation();
while (true) {
const std::optional<Token> Prev =
utils::lexer::getPreviousToken(StartLoc, SM, LO);
const std::optional<Token> Next =
utils::lexer::findNextTokenSkippingComments(EndLoc, SM, LO);
if (!Prev || Prev->isNot(tok::l_paren) || !Next ||
Next->isNot(tok::r_paren))
break;
StartLoc = Prev->getLocation();
EndLoc = Next->getLocation();
}
const auto RangeLeftOfIdentifier =
CharSourceRange::getCharRange(TypeRange.getBegin(), StartLoc);
const auto RangeRightOfIdentifier = CharSourceRange::getCharRange(
Lexer::getLocForEndOfToken(EndLoc, 0, SM, LO),
Lexer::getLocForEndOfToken(TypeRange.getEnd(), 0, SM, LO));
const std::string VerbatimType =
(Lexer::getSourceText(RangeLeftOfIdentifier, SM, LO) +
Lexer::getSourceText(RangeRightOfIdentifier, SM, LO))
.str();
return {VerbatimType, ""};
}
StringRef ExtraReference = "";
if (MainTypeEndLoc.isValid() && TypeRange.fullyContains(MainTypeEndLoc)) {
// Each type introduced in a typedef can specify being a reference or
// pointer type separately, so we need to figure out if the new using-decl
// needs to be to a reference or pointer as well.
const SourceLocation Tok = utils::lexer::findPreviousAnyTokenKind(
MatchedDecl->getLocation(), SM, LO, tok::TokenKind::star,
tok::TokenKind::amp, tok::TokenKind::comma,
tok::TokenKind::kw_typedef);
ExtraReference = Lexer::getSourceText(
CharSourceRange::getCharRange(Tok, Tok.getLocWithOffset(1)), SM, LO);
if (ExtraReference != "*" && ExtraReference != "&")
ExtraReference = "";
TypeRange.setEnd(MainTypeEndLoc);
}
return {
Lexer::getSourceText(CharSourceRange::getTokenRange(TypeRange), SM, LO)
.str(),
ExtraReference.str()};
}();
const StringRef Name = MatchedDecl->getName();
SourceRange ReplaceRange = MatchedDecl->getSourceRange();
// typedefs with multiple comma-separated definitions produce multiple
// consecutive TypedefDecl nodes whose SourceRanges overlap. Each range starts
// at the "typedef" and then continues *across* previous definitions through
// the end of the current TypedefDecl definition.
// But also we need to check that the ranges belong to the same file because
// different files may contain overlapping ranges.
std::string Using = "using ";
if (ReplaceRange.getBegin().isMacroID() ||
(Result.SourceManager->getFileID(ReplaceRange.getBegin()) !=
Result.SourceManager->getFileID(LastReplacementEnd)) ||
(ReplaceRange.getBegin() >= LastReplacementEnd)) {
// This is the first (and possibly the only) TypedefDecl in a typedef. Save
// Type and Name in case we find subsequent TypedefDecl's in this typedef.
FirstTypedefType = Type;
FirstTypedefName = Name.str();
MainTypeEndLoc = TL.getEndLoc();
} else {
// This is additional TypedefDecl in a comma-separated typedef declaration.
// Start replacement *after* prior replacement and separate with semicolon.
ReplaceRange.setBegin(LastReplacementEnd);
Using = ";\nusing ";
// If this additional TypedefDecl's Type starts with the first TypedefDecl's
// type, make this using statement refer back to the first type, e.g. make
// "typedef int Foo, *Foo_p;" -> "using Foo = int;\nusing Foo_p = Foo*;"
if (Type == FirstTypedefType && !QualifierStr.empty())
Type = FirstTypedefName;
}
if (!ReplaceRange.getEnd().isMacroID()) {
const SourceLocation::IntTy Offset = FunctionPointerCase ? 0 : Name.size();
LastReplacementEnd = ReplaceRange.getEnd().getLocWithOffset(Offset);
}
auto Diag = diag(ReplaceRange.getBegin(), UseUsingWarning);
// If typedef contains a full tag declaration, extract its full text.
auto LastTagDeclRange = LastTagDeclRanges.find(ParentDecl);
if (LastTagDeclRange != LastTagDeclRanges.end() &&
LastTagDeclRange->second.isValid() &&
ReplaceRange.fullyContains(LastTagDeclRange->second)) {
Type = std::string(Lexer::getSourceText(
CharSourceRange::getTokenRange(LastTagDeclRange->second), SM, LO));
if (Type.empty())
return;
}
const std::string Replacement =
(Using + Name + " = " + Type + QualifierStr).str();
Diag << FixItHint::CreateReplacement(ReplaceRange, Replacement);
}
} // namespace clang::tidy::modernize | cpp | github | https://github.com/llvm/llvm-project | clang-tools-extra/clang-tidy/modernize/UseUsingCheck.cpp |
from __future__ import with_statement
from datetime import datetime
import importlib
import django
from django.contrib import admin
from django.utils import six
from django.utils.translation import ugettext_lazy as _
from django.conf.urls import url
from django.template.response import TemplateResponse
from django.contrib import messages
from django.contrib.admin.models import LogEntry, ADDITION, CHANGE, DELETION
from django.contrib.contenttypes.models import ContentType
from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
from django.conf import settings
from django.template.defaultfilters import pluralize
from django.utils.decorators import method_decorator
from django.views.decorators.http import require_POST
from .forms import (
ImportForm,
ConfirmImportForm,
ExportForm,
export_action_form_factory,
)
from .resources import (
modelresource_factory,
)
from .formats import base_formats
from .results import RowResult
from .tmp_storages import TempFolderStorage
from .signals import post_export, post_import
try:
from django.utils.encoding import force_text
except ImportError:
from django.utils.encoding import force_unicode as force_text
SKIP_ADMIN_LOG = getattr(settings, 'IMPORT_EXPORT_SKIP_ADMIN_LOG', False)
TMP_STORAGE_CLASS = getattr(settings, 'IMPORT_EXPORT_TMP_STORAGE_CLASS',
TempFolderStorage)
if isinstance(TMP_STORAGE_CLASS, six.string_types):
try:
# Nod to tastypie's use of importlib.
parts = TMP_STORAGE_CLASS.split('.')
module_path, class_name = '.'.join(parts[:-1]), parts[-1]
module = importlib.import_module(module_path)
TMP_STORAGE_CLASS = getattr(module, class_name)
except ImportError as e:
msg = "Could not import '%s' for import_export setting 'IMPORT_EXPORT_TMP_STORAGE_CLASS'" % TMP_STORAGE_CLASS
raise ImportError(msg)
#: These are the default formats for import and export. Whether they can be
#: used or not is depending on their implementation in the tablib library.
DEFAULT_FORMATS = (
base_formats.CSV,
base_formats.XLS,
base_formats.XLSX,
base_formats.TSV,
base_formats.ODS,
base_formats.JSON,
base_formats.YAML,
base_formats.HTML,
)
class ImportExportMixinBase(object):
def get_model_info(self):
# module_name is renamed to model_name in Django 1.8
app_label = self.model._meta.app_label
try:
return (app_label, self.model._meta.model_name,)
except AttributeError:
return (app_label, self.model._meta.module_name,)
class ImportMixin(ImportExportMixinBase):
"""
Import mixin.
"""
#: template for change_list view
change_list_template = 'admin/import_export/change_list_import.html'
#: template for import view
import_template_name = 'admin/import_export/import.html'
#: resource class
resource_class = None
#: available import formats
formats = DEFAULT_FORMATS
#: import data encoding
from_encoding = "utf-8"
skip_admin_log = None
# storage class for saving temporary files
tmp_storage_class = None
def get_skip_admin_log(self):
if self.skip_admin_log is None:
return SKIP_ADMIN_LOG
else:
return self.skip_admin_log
def get_tmp_storage_class(self):
if self.tmp_storage_class is None:
return TMP_STORAGE_CLASS
else:
return self.tmp_storage_class
def get_urls(self):
urls = super(ImportMixin, self).get_urls()
info = self.get_model_info()
my_urls = [
url(r'^process_import/$',
self.admin_site.admin_view(self.process_import),
name='%s_%s_process_import' % info),
url(r'^import/$',
self.admin_site.admin_view(self.import_action),
name='%s_%s_import' % info),
]
return my_urls + urls
def get_resource_kwargs(self, request, *args, **kwargs):
return {}
def get_import_resource_kwargs(self, request, *args, **kwargs):
return self.get_resource_kwargs(request, *args, **kwargs)
def get_resource_class(self):
if not self.resource_class:
return modelresource_factory(self.model)
else:
return self.resource_class
def get_import_resource_class(self):
"""
Returns ResourceClass to use for import.
"""
return self.get_resource_class()
def get_import_formats(self):
"""
Returns available import formats.
"""
return [f for f in self.formats if f().can_import()]
@method_decorator(require_POST)
def process_import(self, request, *args, **kwargs):
'''
Perform the actual import action (after the user has confirmed he
wishes to import)
'''
opts = self.model._meta
resource = self.get_import_resource_class()(**self.get_import_resource_kwargs(request, *args, **kwargs))
confirm_form = ConfirmImportForm(request.POST)
if confirm_form.is_valid():
import_formats = self.get_import_formats()
input_format = import_formats[
int(confirm_form.cleaned_data['input_format'])
]()
tmp_storage = self.get_tmp_storage_class()(name=confirm_form.cleaned_data['import_file_name'])
data = tmp_storage.read(input_format.get_read_mode())
if not input_format.is_binary() and self.from_encoding:
data = force_text(data, self.from_encoding)
dataset = input_format.create_dataset(data)
result = resource.import_data(dataset, dry_run=False,
raise_errors=True,
file_name=confirm_form.cleaned_data['original_file_name'],
user=request.user)
if not self.get_skip_admin_log():
# Add imported objects to LogEntry
logentry_map = {
RowResult.IMPORT_TYPE_NEW: ADDITION,
RowResult.IMPORT_TYPE_UPDATE: CHANGE,
RowResult.IMPORT_TYPE_DELETE: DELETION,
}
content_type_id = ContentType.objects.get_for_model(self.model).pk
for row in result:
if row.import_type != row.IMPORT_TYPE_ERROR and row.import_type != row.IMPORT_TYPE_SKIP:
LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=content_type_id,
object_id=row.object_id,
object_repr=row.object_repr,
action_flag=logentry_map[row.import_type],
change_message="%s through import_export" % row.import_type,
)
success_message = u'Import finished, with {} new {}{} and ' \
u'{} updated {}{}.'.format(result.totals[RowResult.IMPORT_TYPE_NEW],
opts.model_name,
pluralize(result.totals[RowResult.IMPORT_TYPE_NEW]),
result.totals[RowResult.IMPORT_TYPE_UPDATE],
opts.model_name,
pluralize(result.totals[RowResult.IMPORT_TYPE_UPDATE]))
messages.success(request, success_message)
tmp_storage.remove()
post_import.send(sender=None, model=self.model)
url = reverse('admin:%s_%s_changelist' % self.get_model_info(),
current_app=self.admin_site.name)
return HttpResponseRedirect(url)
def import_action(self, request, *args, **kwargs):
'''
Perform a dry_run of the import to make sure the import will not
result in errors. If there where no error, save the user
uploaded file to a local temp file that will be used by
'process_import' for the actual import.
'''
resource = self.get_import_resource_class()(**self.get_import_resource_kwargs(request, *args, **kwargs))
context = {}
import_formats = self.get_import_formats()
form = ImportForm(import_formats,
request.POST or None,
request.FILES or None)
if request.POST and form.is_valid():
input_format = import_formats[
int(form.cleaned_data['input_format'])
]()
import_file = form.cleaned_data['import_file']
# first always write the uploaded file to disk as it may be a
# memory file or else based on settings upload handlers
tmp_storage = self.get_tmp_storage_class()()
data = bytes()
for chunk in import_file.chunks():
data += chunk
tmp_storage.save(data, input_format.get_read_mode())
# then read the file, using the proper format-specific mode
# warning, big files may exceed memory
try:
data = tmp_storage.read(input_format.get_read_mode())
if not input_format.is_binary() and self.from_encoding:
data = force_text(data, self.from_encoding)
dataset = input_format.create_dataset(data)
except UnicodeDecodeError as e:
return HttpResponse(_(u"<h1>Imported file has a wrong encoding: %s</h1>" % e))
except Exception as e:
return HttpResponse(_(u"<h1>%s encountered while trying to read file: %s</h1>" % (type(e).__name__, import_file.name)))
result = resource.import_data(dataset, dry_run=True,
raise_errors=False,
file_name=import_file.name,
user=request.user)
context['result'] = result
if not result.has_errors():
context['confirm_form'] = ConfirmImportForm(initial={
'import_file_name': tmp_storage.name,
'original_file_name': import_file.name,
'input_format': form.cleaned_data['input_format'],
})
if django.VERSION >= (1, 8, 0):
context.update(self.admin_site.each_context(request))
elif django.VERSION >= (1, 7, 0):
context.update(self.admin_site.each_context())
context['form'] = form
context['opts'] = self.model._meta
context['fields'] = [f.column_name for f in resource.get_user_visible_fields()]
request.current_app = self.admin_site.name
return TemplateResponse(request, [self.import_template_name],
context)
class ExportMixin(ImportExportMixinBase):
"""
Export mixin.
"""
#: resource class
resource_class = None
#: template for change_list view
change_list_template = 'admin/import_export/change_list_export.html'
#: template for export view
export_template_name = 'admin/import_export/export.html'
#: available export formats
formats = DEFAULT_FORMATS
#: export data encoding
to_encoding = "utf-8"
def get_urls(self):
urls = super(ExportMixin, self).get_urls()
my_urls = [
url(r'^export/$',
self.admin_site.admin_view(self.export_action),
name='%s_%s_export' % self.get_model_info()),
]
return my_urls + urls
def get_resource_kwargs(self, request, *args, **kwargs):
return {}
def get_export_resource_kwargs(self, request, *args, **kwargs):
return self.get_resource_kwargs(request, *args, **kwargs)
def get_resource_class(self):
if not self.resource_class:
return modelresource_factory(self.model)
else:
return self.resource_class
def get_export_resource_class(self):
"""
Returns ResourceClass to use for export.
"""
return self.get_resource_class()
def get_export_formats(self):
"""
Returns available export formats.
"""
return [f for f in self.formats if f().can_export()]
def get_export_filename(self, file_format):
date_str = datetime.now().strftime('%Y-%m-%d')
filename = "%s-%s.%s" % (self.model.__name__,
date_str,
file_format.get_extension())
return filename
def get_export_queryset(self, request):
"""
Returns export queryset.
Default implementation respects applied search and filters.
"""
# copied from django/contrib/admin/options.py
list_display = self.get_list_display(request)
list_display_links = self.get_list_display_links(request, list_display)
ChangeList = self.get_changelist(request)
cl = ChangeList(request, self.model, list_display,
list_display_links, self.list_filter,
self.date_hierarchy, self.search_fields,
self.list_select_related, self.list_per_page,
self.list_max_show_all, self.list_editable,
self)
# query_set has been renamed to queryset in Django 1.8
try:
return cl.queryset
except AttributeError:
return cl.query_set
def get_export_data(self, file_format, queryset, *args, **kwargs):
"""
Returns file_format representation for given queryset.
"""
request = kwargs.pop("request")
resource_class = self.get_export_resource_class()
data = resource_class(**self.get_export_resource_kwargs(request)).export(queryset, *args, **kwargs)
export_data = file_format.export_data(data)
return export_data
def export_action(self, request, *args, **kwargs):
formats = self.get_export_formats()
form = ExportForm(formats, request.POST or None)
if form.is_valid():
file_format = formats[
int(form.cleaned_data['file_format'])
]()
queryset = self.get_export_queryset(request)
export_data = self.get_export_data(file_format, queryset, request=request)
content_type = file_format.get_content_type()
# Django 1.7 uses the content_type kwarg instead of mimetype
try:
response = HttpResponse(export_data, content_type=content_type)
except TypeError:
response = HttpResponse(export_data, mimetype=content_type)
response['Content-Disposition'] = 'attachment; filename=%s' % (
self.get_export_filename(file_format),
)
post_export.send(sender=None, model=self.model)
return response
context = {}
if django.VERSION >= (1, 8, 0):
context.update(self.admin_site.each_context(request))
elif django.VERSION >= (1, 7, 0):
context.update(self.admin_site.each_context())
context['form'] = form
context['opts'] = self.model._meta
request.current_app = self.admin_site.name
return TemplateResponse(request, [self.export_template_name],
context)
class ImportExportMixin(ImportMixin, ExportMixin):
"""
Import and export mixin.
"""
#: template for change_list view
change_list_template = 'admin/import_export/change_list_import_export.html'
class ImportExportModelAdmin(ImportExportMixin, admin.ModelAdmin):
"""
Subclass of ModelAdmin with import/export functionality.
"""
class ExportActionModelAdmin(ExportMixin, admin.ModelAdmin):
"""
Subclass of ModelAdmin with export functionality implemented as an
admin action.
"""
# Don't use custom change list template.
change_list_template = None
def __init__(self, *args, **kwargs):
"""
Adds a custom action form initialized with the available export
formats.
"""
choices = []
formats = self.get_export_formats()
if formats:
choices.append(('', '---'))
for i, f in enumerate(formats):
choices.append((str(i), f().get_title()))
self.action_form = export_action_form_factory(choices)
super(ExportActionModelAdmin, self).__init__(*args, **kwargs)
def export_admin_action(self, request, queryset):
"""
Exports the selected rows using file_format.
"""
export_format = request.POST.get('file_format')
if not export_format:
messages.warning(request, _('You must select an export format.'))
else:
formats = self.get_export_formats()
file_format = formats[int(export_format)]()
export_data = self.get_export_data(file_format, queryset, request=request)
content_type = file_format.get_content_type()
# Django 1.7 uses the content_type kwarg instead of mimetype
try:
response = HttpResponse(export_data, content_type=content_type)
except TypeError:
response = HttpResponse(export_data, mimetype=content_type)
response['Content-Disposition'] = 'attachment; filename=%s' % (
self.get_export_filename(file_format),
)
return response
export_admin_action.short_description = _(
'Export selected %(verbose_name_plural)s')
actions = [export_admin_action]
class Media:
js = ['import_export/action_formats.js']
class ImportExportActionModelAdmin(ImportMixin, ExportActionModelAdmin):
"""
Subclass of ExportActionModelAdmin with import/export functionality.
Export functionality is implemented as an admin action.
""" | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python2
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from test_framework.test_framework import ComparisonTestFramework
from test_framework.util import *
from test_framework.mininode import CTransaction, NetworkThread
from test_framework.blocktools import create_coinbase, create_block
from test_framework.comptool import TestInstance, TestManager
from test_framework.script import CScript
from binascii import hexlify, unhexlify
import cStringIO
import time
# A canonical signature consists of:
# <30> <total len> <02> <len R> <R> <02> <len S> <S> <hashtype>
def unDERify(tx):
'''
Make the signature in vin 0 of a tx non-DER-compliant,
by adding padding after the S-value.
'''
scriptSig = CScript(tx.vin[0].scriptSig)
newscript = []
for i in scriptSig:
if (len(newscript) == 0):
newscript.append(i[0:-1] + '\0' + i[-1])
else:
newscript.append(i)
tx.vin[0].scriptSig = CScript(newscript)
'''
This test is meant to exercise BIP66 (DER SIG).
Connect to a single node.
Mine 2 (version 2) blocks (save the coinbases for later).
Generate 98 more version 2 blocks, verify the node accepts.
Mine 749 version 3 blocks, verify the node accepts.
Check that the new DERSIG rules are not enforced on the 750th version 3 block.
Check that the new DERSIG rules are enforced on the 751st version 3 block.
Mine 199 new version blocks.
Mine 1 old-version block.
Mine 1 new version block.
Mine 1 old version block, see that the node rejects.
'''
class BIP66Test(ComparisonTestFramework):
def __init__(self):
self.num_nodes = 1
def setup_network(self):
# Must set the blockversion for this test
self.nodes = start_nodes(1, self.options.tmpdir,
extra_args=[['-debug', '-whitelist=127.0.0.1', '-blockversion=2']],
binary=[self.options.testbinary])
def run_test(self):
test = TestManager(self, self.options.tmpdir)
test.add_all_connections(self.nodes)
NetworkThread().start() # Start up network handling in another thread
test.run()
def create_transaction(self, node, coinbase, to_address, amount):
from_txid = node.getblock(coinbase)['tx'][0]
inputs = [{ "txid" : from_txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
tx = CTransaction()
f = cStringIO.StringIO(unhexlify(signresult['hex']))
tx.deserialize(f)
return tx
def get_tests(self):
self.coinbase_blocks = self.nodes[0].generate(2)
self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
self.nodeaddress = self.nodes[0].getnewaddress()
self.last_block_time = time.time()
''' 98 more version 2 blocks '''
test_blocks = []
for i in xrange(98):
block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
block.nVersion = 2
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
yield TestInstance(test_blocks, sync_every_block=False)
''' Mine 74 version 3 blocks '''
test_blocks = []
for i in xrange(74):
block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
block.nVersion = 3
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
yield TestInstance(test_blocks, sync_every_block=False)
'''
Check that the new DERSIG rules are not enforced in the 75th
version 3 block.
'''
spendtx = self.create_transaction(self.nodes[0],
self.coinbase_blocks[0], self.nodeaddress, 1.0)
unDERify(spendtx)
spendtx.rehash()
block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
block.nVersion = 3
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.last_block_time += 1
self.tip = block.sha256
yield TestInstance([[block, True]])
'''
Check that the new DERSIG rules are enforced in the 76th version 3
block.
'''
spendtx = self.create_transaction(self.nodes[0],
self.coinbase_blocks[1], self.nodeaddress, 1.0)
unDERify(spendtx)
spendtx.rehash()
block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
block.nVersion = 3
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.last_block_time += 1
yield TestInstance([[block, False]])
''' Mine 19 new version blocks on last valid tip '''
test_blocks = []
for i in xrange(19):
block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
block.nVersion = 3
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
yield TestInstance(test_blocks, sync_every_block=False)
''' Mine 1 old version block '''
block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
block.nVersion = 2
block.rehash()
block.solve()
self.last_block_time += 1
self.tip = block.sha256
yield TestInstance([[block, True]])
''' Mine 1 new version block '''
block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
block.nVersion = 3
block.rehash()
block.solve()
self.last_block_time += 1
self.tip = block.sha256
yield TestInstance([[block, True]])
''' Mine 1 old version block, should be invalid '''
block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
block.nVersion = 2
block.rehash()
block.solve()
self.last_block_time += 1
yield TestInstance([[block, False]])
if __name__ == '__main__':
BIP66Test().main() | unknown | codeparrot/codeparrot-clean | ||
from sqlalchemy import Integer, ForeignKey, String
from sqlalchemy.types import PickleType, TypeDecorator, VARCHAR
from sqlalchemy.orm import mapper, Session, composite
from sqlalchemy.orm.mapper import Mapper
from sqlalchemy.orm.instrumentation import ClassManager
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.testing import eq_, assert_raises_message
from sqlalchemy.testing.util import picklers
from sqlalchemy.testing import fixtures
from sqlalchemy.ext.mutable import MutableComposite
from sqlalchemy.ext.mutable import MutableDict
class Foo(fixtures.BasicEntity):
pass
class SubFoo(Foo):
pass
class FooWithEq(object):
def __init__(self, **kw):
for k in kw:
setattr(self, k, kw[k])
def __hash__(self):
return hash(self.id)
def __eq__(self, other):
return self.id == other.id
class Point(MutableComposite):
def __init__(self, x, y):
self.x = x
self.y = y
def __setattr__(self, key, value):
object.__setattr__(self, key, value)
self.changed()
def __composite_values__(self):
return self.x, self.y
def __getstate__(self):
return self.x, self.y
def __setstate__(self, state):
self.x, self.y = state
def __eq__(self, other):
return isinstance(other, Point) and \
other.x == self.x and \
other.y == self.y
class MyPoint(Point):
@classmethod
def coerce(cls, key, value):
if isinstance(value, tuple):
value = Point(*value)
return value
class _MutableDictTestFixture(object):
@classmethod
def _type_fixture(cls):
return MutableDict
def teardown(self):
# clear out mapper events
Mapper.dispatch._clear()
ClassManager.dispatch._clear()
super(_MutableDictTestFixture, self).teardown()
class _MutableDictTestBase(_MutableDictTestFixture):
run_define_tables = 'each'
def setup_mappers(cls):
foo = cls.tables.foo
mapper(Foo, foo)
def test_coerce_none(self):
sess = Session()
f1 = Foo(data=None)
sess.add(f1)
sess.commit()
eq_(f1.data, None)
def test_coerce_raise(self):
assert_raises_message(
ValueError,
"Attribute 'data' does not accept objects of type",
Foo, data=set([1, 2, 3])
)
def test_in_place_mutation(self):
sess = Session()
f1 = Foo(data={'a': 'b'})
sess.add(f1)
sess.commit()
f1.data['a'] = 'c'
sess.commit()
eq_(f1.data, {'a': 'c'})
def test_clear(self):
sess = Session()
f1 = Foo(data={'a': 'b'})
sess.add(f1)
sess.commit()
f1.data.clear()
sess.commit()
eq_(f1.data, {})
def test_update(self):
sess = Session()
f1 = Foo(data={'a': 'b'})
sess.add(f1)
sess.commit()
f1.data.update({'a': 'z'})
sess.commit()
eq_(f1.data, {'a': 'z'})
def test_setdefault(self):
sess = Session()
f1 = Foo(data={'a': 'b'})
sess.add(f1)
sess.commit()
eq_(f1.data.setdefault('c', 'd'), 'd')
sess.commit()
eq_(f1.data, {'a': 'b', 'c': 'd'})
eq_(f1.data.setdefault('c', 'q'), 'd')
sess.commit()
eq_(f1.data, {'a': 'b', 'c': 'd'})
def test_replace(self):
sess = Session()
f1 = Foo(data={'a': 'b'})
sess.add(f1)
sess.flush()
f1.data = {'b': 'c'}
sess.commit()
eq_(f1.data, {'b': 'c'})
def test_replace_itself_still_ok(self):
sess = Session()
f1 = Foo(data={'a': 'b'})
sess.add(f1)
sess.flush()
f1.data = f1.data
f1.data['b'] = 'c'
sess.commit()
eq_(f1.data, {'a': 'b', 'b': 'c'})
def test_pickle_parent(self):
sess = Session()
f1 = Foo(data={'a': 'b'})
sess.add(f1)
sess.commit()
f1.data
sess.close()
for loads, dumps in picklers():
sess = Session()
f2 = loads(dumps(f1))
sess.add(f2)
f2.data['a'] = 'c'
assert f2 in sess.dirty
def test_unrelated_flush(self):
sess = Session()
f1 = Foo(data={"a": "b"}, unrelated_data="unrelated")
sess.add(f1)
sess.flush()
f1.unrelated_data = "unrelated 2"
sess.flush()
f1.data["a"] = "c"
sess.commit()
eq_(f1.data["a"], "c")
def _test_non_mutable(self):
sess = Session()
f1 = Foo(non_mutable_data={'a': 'b'})
sess.add(f1)
sess.commit()
f1.non_mutable_data['a'] = 'c'
sess.commit()
eq_(f1.non_mutable_data, {'a': 'b'})
class MutableColumnDefaultTest(_MutableDictTestFixture, fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
MutableDict = cls._type_fixture()
mutable_pickle = MutableDict.as_mutable(PickleType)
Table(
'foo', metadata,
Column(
'id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', mutable_pickle, default={}),
)
def setup_mappers(cls):
foo = cls.tables.foo
mapper(Foo, foo)
def test_evt_on_flush_refresh(self):
# test for #3427
sess = Session()
f1 = Foo()
sess.add(f1)
sess.flush()
assert isinstance(f1.data, self._type_fixture())
assert f1 not in sess.dirty
f1.data['foo'] = 'bar'
assert f1 in sess.dirty
class MutableWithScalarPickleTest(_MutableDictTestBase, fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
MutableDict = cls._type_fixture()
mutable_pickle = MutableDict.as_mutable(PickleType)
Table('foo', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('skip', mutable_pickle),
Column('data', mutable_pickle),
Column('non_mutable_data', PickleType),
Column('unrelated_data', String(50))
)
def test_non_mutable(self):
self._test_non_mutable()
class MutableWithScalarJSONTest(_MutableDictTestBase, fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
import json
class JSONEncodedDict(TypeDecorator):
impl = VARCHAR(50)
def process_bind_param(self, value, dialect):
if value is not None:
value = json.dumps(value)
return value
def process_result_value(self, value, dialect):
if value is not None:
value = json.loads(value)
return value
MutableDict = cls._type_fixture()
Table('foo', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', MutableDict.as_mutable(JSONEncodedDict)),
Column('non_mutable_data', JSONEncodedDict),
Column('unrelated_data', String(50))
)
def test_non_mutable(self):
self._test_non_mutable()
class MutableAssocWithAttrInheritTest(_MutableDictTestBase,
fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('foo', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', PickleType),
Column('non_mutable_data', PickleType),
Column('unrelated_data', String(50))
)
Table('subfoo', metadata,
Column('id', Integer, ForeignKey('foo.id'), primary_key=True),
)
def setup_mappers(cls):
foo = cls.tables.foo
subfoo = cls.tables.subfoo
mapper(Foo, foo)
mapper(SubFoo, subfoo, inherits=Foo)
MutableDict.associate_with_attribute(Foo.data)
def test_in_place_mutation(self):
sess = Session()
f1 = SubFoo(data={'a': 'b'})
sess.add(f1)
sess.commit()
f1.data['a'] = 'c'
sess.commit()
eq_(f1.data, {'a': 'c'})
def test_replace(self):
sess = Session()
f1 = SubFoo(data={'a': 'b'})
sess.add(f1)
sess.flush()
f1.data = {'b': 'c'}
sess.commit()
eq_(f1.data, {'b': 'c'})
class MutableAssociationScalarPickleTest(_MutableDictTestBase,
fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
MutableDict = cls._type_fixture()
MutableDict.associate_with(PickleType)
Table('foo', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('skip', PickleType),
Column('data', PickleType),
Column('unrelated_data', String(50))
)
class MutableAssociationScalarJSONTest(_MutableDictTestBase,
fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
import json
class JSONEncodedDict(TypeDecorator):
impl = VARCHAR(50)
def process_bind_param(self, value, dialect):
if value is not None:
value = json.dumps(value)
return value
def process_result_value(self, value, dialect):
if value is not None:
value = json.loads(value)
return value
MutableDict = cls._type_fixture()
MutableDict.associate_with(JSONEncodedDict)
Table('foo', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', JSONEncodedDict),
Column('unrelated_data', String(50))
)
class CustomMutableAssociationScalarJSONTest(_MutableDictTestBase,
fixtures.MappedTest):
CustomMutableDict = None
@classmethod
def _type_fixture(cls):
if not(getattr(cls, 'CustomMutableDict')):
MutableDict = super(
CustomMutableAssociationScalarJSONTest, cls)._type_fixture()
class CustomMutableDict(MutableDict):
pass
cls.CustomMutableDict = CustomMutableDict
return cls.CustomMutableDict
@classmethod
def define_tables(cls, metadata):
import json
class JSONEncodedDict(TypeDecorator):
impl = VARCHAR(50)
def process_bind_param(self, value, dialect):
if value is not None:
value = json.dumps(value)
return value
def process_result_value(self, value, dialect):
if value is not None:
value = json.loads(value)
return value
CustomMutableDict = cls._type_fixture()
CustomMutableDict.associate_with(JSONEncodedDict)
Table('foo', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', JSONEncodedDict),
Column('unrelated_data', String(50))
)
def test_pickle_parent(self):
# Picklers don't know how to pickle CustomMutableDict,
# but we aren't testing that here
pass
def test_coerce(self):
sess = Session()
f1 = Foo(data={'a': 'b'})
sess.add(f1)
sess.flush()
eq_(type(f1.data), self._type_fixture())
class _CompositeTestBase(object):
@classmethod
def define_tables(cls, metadata):
Table('foo', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('x', Integer),
Column('y', Integer),
Column('unrelated_data', String(50))
)
def setup(self):
from sqlalchemy.ext import mutable
mutable._setup_composite_listener()
super(_CompositeTestBase, self).setup()
def teardown(self):
# clear out mapper events
Mapper.dispatch._clear()
ClassManager.dispatch._clear()
super(_CompositeTestBase, self).teardown()
@classmethod
def _type_fixture(cls):
return Point
class MutableCompositeColumnDefaultTest(_CompositeTestBase,
fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table(
'foo', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('x', Integer, default=5),
Column('y', Integer, default=9),
Column('unrelated_data', String(50))
)
@classmethod
def setup_mappers(cls):
foo = cls.tables.foo
cls.Point = cls._type_fixture()
mapper(Foo, foo, properties={
'data': composite(cls.Point, foo.c.x, foo.c.y)
})
def test_evt_on_flush_refresh(self):
# this still worked prior to #3427 being fixed in any case
sess = Session()
f1 = Foo(data=self.Point(None, None))
sess.add(f1)
sess.flush()
eq_(f1.data, self.Point(5, 9))
assert f1 not in sess.dirty
f1.data.x = 10
assert f1 in sess.dirty
class MutableCompositesUnpickleTest(_CompositeTestBase, fixtures.MappedTest):
@classmethod
def setup_mappers(cls):
foo = cls.tables.foo
cls.Point = cls._type_fixture()
mapper(FooWithEq, foo, properties={
'data': composite(cls.Point, foo.c.x, foo.c.y)
})
def test_unpickle_modified_eq(self):
u1 = FooWithEq(data=self.Point(3, 5))
for loads, dumps in picklers():
loads(dumps(u1))
class MutableCompositesTest(_CompositeTestBase, fixtures.MappedTest):
@classmethod
def setup_mappers(cls):
foo = cls.tables.foo
Point = cls._type_fixture()
mapper(Foo, foo, properties={
'data': composite(Point, foo.c.x, foo.c.y)
})
def test_in_place_mutation(self):
sess = Session()
d = Point(3, 4)
f1 = Foo(data=d)
sess.add(f1)
sess.commit()
f1.data.y = 5
sess.commit()
eq_(f1.data, Point(3, 5))
def test_pickle_of_parent(self):
sess = Session()
d = Point(3, 4)
f1 = Foo(data=d)
sess.add(f1)
sess.commit()
f1.data
assert 'data' in f1.__dict__
sess.close()
for loads, dumps in picklers():
sess = Session()
f2 = loads(dumps(f1))
sess.add(f2)
f2.data.y = 12
assert f2 in sess.dirty
def test_set_none(self):
sess = Session()
f1 = Foo(data=None)
sess.add(f1)
sess.commit()
eq_(f1.data, Point(None, None))
f1.data.y = 5
sess.commit()
eq_(f1.data, Point(None, 5))
def test_set_illegal(self):
f1 = Foo()
assert_raises_message(
ValueError,
"Attribute 'data' does not accept objects",
setattr, f1, 'data', 'foo'
)
def test_unrelated_flush(self):
sess = Session()
f1 = Foo(data=Point(3, 4), unrelated_data="unrelated")
sess.add(f1)
sess.flush()
f1.unrelated_data = "unrelated 2"
sess.flush()
f1.data.x = 5
sess.commit()
eq_(f1.data.x, 5)
class MutableCompositeCallableTest(_CompositeTestBase, fixtures.MappedTest):
@classmethod
def setup_mappers(cls):
foo = cls.tables.foo
Point = cls._type_fixture()
# in this case, this is not actually a MutableComposite.
# so we don't expect it to track changes
mapper(Foo, foo, properties={
'data': composite(lambda x, y: Point(x, y), foo.c.x, foo.c.y)
})
def test_basic(self):
sess = Session()
f1 = Foo(data=Point(3, 4))
sess.add(f1)
sess.flush()
f1.data.x = 5
sess.commit()
# we didn't get the change.
eq_(f1.data.x, 3)
class MutableCompositeCustomCoerceTest(_CompositeTestBase,
fixtures.MappedTest):
@classmethod
def _type_fixture(cls):
return MyPoint
@classmethod
def setup_mappers(cls):
foo = cls.tables.foo
Point = cls._type_fixture()
mapper(Foo, foo, properties={
'data': composite(Point, foo.c.x, foo.c.y)
})
def test_custom_coerce(self):
f = Foo()
f.data = (3, 4)
eq_(f.data, Point(3, 4))
def test_round_trip_ok(self):
sess = Session()
f = Foo()
f.data = (3, 4)
sess.add(f)
sess.commit()
eq_(f.data, Point(3, 4))
class MutableInheritedCompositesTest(_CompositeTestBase, fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('foo', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('x', Integer),
Column('y', Integer)
)
Table('subfoo', metadata,
Column('id', Integer, ForeignKey('foo.id'), primary_key=True),
)
@classmethod
def setup_mappers(cls):
foo = cls.tables.foo
subfoo = cls.tables.subfoo
Point = cls._type_fixture()
mapper(Foo, foo, properties={
'data': composite(Point, foo.c.x, foo.c.y)
})
mapper(SubFoo, subfoo, inherits=Foo)
def test_in_place_mutation_subclass(self):
sess = Session()
d = Point(3, 4)
f1 = SubFoo(data=d)
sess.add(f1)
sess.commit()
f1.data.y = 5
sess.commit()
eq_(f1.data, Point(3, 5))
def test_pickle_of_parent_subclass(self):
sess = Session()
d = Point(3, 4)
f1 = SubFoo(data=d)
sess.add(f1)
sess.commit()
f1.data
assert 'data' in f1.__dict__
sess.close()
for loads, dumps in picklers():
sess = Session()
f2 = loads(dumps(f1))
sess.add(f2)
f2.data.y = 12
assert f2 in sess.dirty | unknown | codeparrot/codeparrot-clean | ||
import logging
from autotest.client.shared import error
from virttest import virsh
def run(test, params, env):
"""
Test command: virsh help.
1.Get all parameters from configuration.
2.Perform virsh help operation.
3.Check help information valid or not.
4.Check result.
"""
extra = params.get("help_extra", "")
cmd = params.get("help_command", "")
test_target = params.get("help_target", "")
status_error = params.get("status_error", "no")
def help_check(test_target):
"""
Check all virsh commands or groups's help information
:param test_target: Test target,all virsh or all virsh groups
:return: True if check successfully
"""
help_list = []
if test_target == "all_command":
help_list = virsh.help_command_only("", False,
ignore_status=True)
elif test_target == "all_group":
help_list = virsh.help_command_group("", False,
ignore_status=True)
if len(help_list) == 0:
raise error.TestError("Cannot get any virsh command/group!")
fail_list = []
# If any command or group's check failed, the test failed
check_result = True
for virsh_cmd_group in help_list:
logging.info("Test command or group: '%s'", virsh_cmd_group)
result = virsh.help(virsh_cmd_group, ignore_status=True)
status = result.exit_status
output = result.stdout.strip()
if status != 0:
fail_list.append(virsh_cmd_group)
# No need to check output
continue
if not output:
fail_list.append(virsh_cmd_group)
# List all failed commands or groups
if len(fail_list) > 0:
check_result = False
logging.info("These commands or groups' check failed!!!")
for fail_cmd in fail_list:
logging.info("%s", fail_cmd)
return check_result
if test_target == "":
cmd = "%s %s" % (cmd, extra)
result = virsh.help(cmd, ignore_status=True)
else:
check_result = help_check(test_target)
if test_target == "":
status = result.exit_status
output = result.stdout.strip()
# Check status_error
if status_error == "yes":
if test_target == "":
if status == 0:
raise error.TestFail("Run successfully with wrong command!")
elif status_error == "no":
if test_target == "":
if status != 0:
raise error.TestFail("Run failed with right command")
if output == "":
raise error.TestFail("Cannot see help information")
else:
if not check_result:
raise error.TestFail(
"virsh help command or groups test failed") | unknown | codeparrot/codeparrot-clean | ||
# Community membership
This doc outlines the various responsibilities of contributor roles in etcd.
| Role | Responsibilities | Requirements | Defined by |
|------------|----------------------------------------------|---------------------------------------------------------------|-------------------------------|
| Member | Active contributor in the community | Sponsored by 2 reviewers and multiple contributions | etcd GitHub org member |
| Reviewer | Review contributions from other members | History of review and authorship | [OWNERS] file reviewer entry |
| Maintainer | Set direction and priorities for the project | Demonstrated responsibility and excellent technical judgement | [OWNERS] file approver entry |
## New contributors
New contributors should be welcomed to the community by existing members,
helped with PR workflow, and directed to relevant documentation and
communication channels.
## Established community members
Established community members are expected to demonstrate their adherence to the
principles in this document, familiarity with project organization, roles,
policies, procedures, conventions, etc., and technical and/or writing ability.
Role-specific expectations, responsibilities, and requirements are enumerated
below.
## Member
Members are continuously active contributors to the community. They can have
issues and PRs assigned to them. Members are expected to remain active
contributors to the community.
**Defined by:** Member of the etcd GitHub organization.
### Member requirements
- Enabled [two-factor authentication] on their GitHub account
- Have made multiple contributions to the project or community. Contribution may include, but is not limited to:
- Authoring or reviewing PRs on GitHub. At least one PR must be **merged**.
- Filing or commenting on issues on GitHub
- Contributing to community discussions (e.g. meetings, Slack, email discussion
forums, Stack Overflow)
- Subscribed to [etcd-dev@googlegroups.com](https://groups.google.com/g/etcd-dev)
- Have read the [contributor guide]
- Sponsored by two active maintainers or reviewers.
- Sponsors must be from multiple member companies to demonstrate integration across the community.
- With no objections from other maintainers
- Open a [membership nomination] issue against the `kubernetes/org` repo
- Ensure your sponsors are @mentioned on the issue
- Make sure that the list of contributions included is representative of your work on the project.
- Members can be removed by a supermajority of the maintainers or can resign by notifying
the maintainers.
### Member responsibilities and privileges
- Responsive to issues and PRs assigned to them
- Granted "triage access" to etcd project
- Active owner of code they have contributed (unless ownership is explicitly transferred)
- Code is well-tested
- Tests consistently pass
- Addresses bugs or issues discovered after code is accepted
**Note:** Members who frequently contribute code are expected to proactively
perform code reviews and work towards becoming a *reviewer*.
## Reviewers
Reviewers are contributors who have demonstrated greater skill in
reviewing the code from other contributors. They are knowledgeable about both
the codebase and software engineering principles. Their LGTM counts towards
merging a code change into the project. A reviewer is generally on the ladder towards
maintainership.
**Defined by:** *reviewers* entry in the [OWNERS] file.
### Reviewer requirements
- member for at least 3 months.
- Primary reviewer for at least 5 PRs to the codebase.
- Reviewed or contributed at least 20 substantial PRs to the codebase.
- Knowledgeable about the codebase.
- Sponsored by two active maintainers.
- Sponsors must be from multiple member companies to demonstrate integration across the community.
- With no objections from other maintainers
- Reviewers can be removed by a supermajority of the maintainers or can resign by notifying
the maintainers.
### Reviewer responsibilities and privileges
- Code reviewer status may be a precondition to accepting large code contributions
- Responsible for project quality control via code reviews
- Focus on code quality and correctness, including testing and factoring
- May also review for more holistic issues, but not a requirement
- Expected to be responsive to review requests
- Assigned PRs to review related to area of expertise
- Assigned test bugs related to area of expertise
- Granted "triage access" to etcd project
## Maintainers
Maintainers are first and foremost contributors who have shown they
are committed to the long-term success of a project. Maintainership is about building
trust with the current maintainers and being a person that they can
depend on to make decisions in the best interest of the project in a consistent manner.
**Defined by:** *approvers* entry in the [OWNERS] file.
### Maintainer requirements
- Deep understanding of the technical goals and direction of the project
- Deep understanding of the technical domain of the project
- Sustained contributions to design and direction by doing all of:
- Authoring and reviewing proposals
- Initiating, contributing, and resolving discussions (emails, GitHub issues, meetings)
- Identifying subtle or complex issues in the designs and implementation of PRs
- Directly contributed to the project through implementation and/or review
- Sponsored by two active maintainers and elected by supermajority
- Sponsors must be from multiple member companies to demonstrate integration across the community.
- To become a maintainer send an email with your candidacy to <etcd-maintainers-private@googlegroups.com>
- Ensure your sponsors are @mentioned in the email
- Include a list of contributions representative of your work on the project.
- Existing maintainers vote will privately and respond to the email with either acceptance or feedback for suggested improvement.
- With your membership approved you are expected to:
- Open a PR and add an entry to the [OWNERS] file
- Request to be added to the <etcd-maintainers@googlegroups.com> and <etcd-maintainers-private@googlegroups.com> mailing lists
- Request to join [etcd-maintainer teams of the etcd-io organization in GitHub](https://github.com/orgs/etcd-io/teams/maintainers-etcd)
- Request to join the private slack channel for etcd maintainers on [kubernetes slack](http://slack.kubernetes.io/)
- Request access to `etcd-development` GCP project where we publish releases
- Request access to passwords shared between maintainers
- Request cncf service desk access by emailing <projects@cncf.io>
- Raise cncf service desk ticket to be addded to [cncf-etcd-maintainers mailing list](https://lists.cncf.io/g/cncf-etcd-maintainers/directory)
### Maintainer responsibilities and privileges
- Make and approve technical design decisions
- Set technical direction and priorities
- Define milestones and releases
- Mentor and guide reviewers, and contributors to the project.
- Participate when called upon in the [security disclosure and release process]
- Ensure the continued health of the project
- Adequate test coverage to confidently release
- Tests are passing reliably (i.e. not flaky) and are fixed when they fail
- Ensure a healthy process for discussion and decision-making is in place.
- Work with other maintainers to maintain the project's overall health and success holistically
### Retiring
Life priorities, interests, and passions can change. Maintainers can retire and
move to [emeritus maintainers]. If a maintainer needs to step down, they should
inform other maintainers and, if possible, help find someone to pick up the related
work. At the very least, ensure the related work can be continued.
If a maintainer has not been performing their duties for 12 months,
they can be removed by other maintainers. In that case, the inactive maintainer will
be first notified via an email. If the situation doesn't improve, they will be
removed. If an emeritus maintainer wants to regain an active role, they can do
so by renewing their contributions. Active maintainers should welcome such a move.
Retiring other maintainers or regaining the status should require the approval
of at least two active maintainers.
Retiring maintainers must:
- Open a PR and move to emeritus approvers in the [OWNERS] file
- Open a PR to be removed from the [etcd-maintainer teams of the etcd-io organization in GitHub](https://github.com/orgs/etcd-io/teams/maintainers-etcd)
- Remove their access to `etcd-development` GCP project where we publish releases
- Raise cncf service desk ticket to be removed as a [cncf-etcd-maintainers mailing list](https://lists.cncf.io/g/cncf-etcd-maintainers/directory) admin
- Request to be removed as a member of the [etcd-maintainers](https://groups.google.com/g/etcd-maintainers) and [etcd-maintainers-private](https://groups.google.com/g/etcd-maintainers-private) Google groups
## Acknowledgements
Contributor roles and responsibilities were written based on [Kubernetes community membership]
[OWNERS]: /OWNERS
[contributor guide]: /CONTRIBUTING.md
[membership nomination]: https://github.com/kubernetes/org/issues/new?assignees=&labels=area%2Fgithub-membership&projects=&template=membership.yml&title=REQUEST%3A+New+membership+for+%3Cyour-GH-handle%3E
[Kubernetes community membership]: https://github.com/kubernetes/community/blob/master/community-membership.md
[emeritus maintainers]: /README.md#etcd-emeritus-maintainers
[security disclosure and release process]: /security/README.md
[two-factor authentication]: https://docs.github.com/en/authentication/securing-your-account-with-two-factor-authentication-2fa/about-two-factor-authentication | unknown | github | https://github.com/etcd-io/etcd | Documentation/contributor-guide/community-membership.md |
#!/usr/bin/env python
#############################################################################
# Copyright (c) 2017 SiteWare Corp. All right reserved
#############################################################################
from __future__ import absolute_import, print_function
import os
import sys
import six
import sd2
from .file_rewriter import FileRewriter
g_ssh_config_path = os.getenv('SD2_SSH_CONFIG',
os.path.join(os.getenv('HOME'), '.ssh', 'config'))
container_entry_template = '''
host {cont[name]}
HostName {cont[ip]}
User {host[User]}
Port {sshport}
StrictHostKeyChecking no
UserKnownHostsFile /dev/null
ServerAliveInterval 60
ConnectTimeout 5
'''
container_ssh_option_names = [
'IdentityFile',
"IdentitiesOnly",
"PKCS11Provider",
'ProxyCommand',
]
ssh_option_names = [
'HostName',
'Port',
'User',
'ServerAliveInterval',
'StrictHostKeyChecking',
'UserKnownHostsFile',
'ConnectTimeout',
'UseKeychain',
'AddKeysToAgent',
'ForwardAgent',
'ProxyCommand',
'ProxyJump',
"PKCS11Provider",
'SmartcardDevice',
'HostKeyAlias',
'LocalForward',
'RemoteForward',
'PubkeyAuthentication',
'PreferredAuthentications'
]
ssh_option_names.extend(container_ssh_option_names)
def generate_host_entry(host, name, more, exclude):
rr = ""
rr += 'host {}\n'.format(name)
for key, val in six.iteritems(host):
if not key in ssh_option_names:
continue
if key.lower() in [x.lower() for x in exclude]:
continue
if not isinstance(val, (list, tuple)):
val = [val]
for vv in val:
rr += ' {key} {value}\n'.format(key=key, value=vv)
for entry in more:
rr += entry + "\n"
rr += '\n'
return rr
def generate_for_host(host):
from . import util
rr = ''
rr += '''\n########## GENERATED DO NOT MODIFY #####################\n'''
sshport = 22 if util.is_localhost(host['name']) else 2222
if not util.is_localhost(host['name']):
if host.get('match'):
matches = host.get('match')
for hostname in [host['name'], host['name'] + '-ports']:
for match in matches:
rr += 'Match originalhost {hostname} exec "{match[condition]}"\n'.format(
hostname=hostname, match=match)
for key in six.viewkeys(match):
if not key in ssh_option_names:
continue
rr += ' {key} {value}\n'.format(key=key,
value=match[key])
rr += '\n'
rr += generate_host_entry(host, host['name'], [], ['LocalForward'])
rr += 'host {}-ports\n'.format(host['name'])
rr += " LogLevel ERROR\n"
if not 'HostName' in six.viewkeys(host):
host['HostName'] = host['name']
for key, val in six.iteritems(host):
if not key in ssh_option_names + ['LocalForward']:
continue
if not isinstance(val, (list, tuple)):
val = [val]
for vv in val:
rr += ' {key} {value}\n'.format(key=key, value=vv)
# rr += ' LocalForward {}-local:2375 localhost:2375\n'.format(host['name'])
for cont in host.get('containers', []):
ports = cont['image'].get('ports', [])
for port in ports + ["{}:22".format(sshport)]:
(p1, p2) = port.split(':')
rr += (
" LocalForward {0}:{1} {2}:{1}\n".format(
cont['name'], p1, cont['ip']))
rr += '\n'
for cont in host.get('containers', []):
rr += container_entry_template.format(**locals())
for key, val in six.iteritems(host):
if key in container_ssh_option_names:
rr += ' {} {}\n'.format(key, val)
return rr
def get_our_ssh_config():
rr = ''
for host in sd2.get_hosts(enabled=False):
if not host.get("User"):
host['User'] = os.getenv('USER')
try:
rr += generate_for_host(host)
except Exception as ex:
sys.stderr.write("ERROR: Processing host {}\n".format(host['name']))
raise
return rr
def gen_ssh_config():
if not os.path.exists(g_ssh_config_path):
ssh_config_dir = os.path.dirname(g_ssh_config_path)
if not os.path.exists(ssh_config_dir):
os.system("mkdir -p {}".format(ssh_config_dir))
os.system("chmod 700 {}".format(ssh_config_dir))
os.system("touch {}".format(g_ssh_config_path))
fr = FileRewriter(g_ssh_config_path)
before, after = fr.read_config()
rr = get_our_ssh_config()
fr.write_config(
before,
rr.split('\n'),
after
) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigiq_application_fasthttp
short_description: Manages BIG-IQ FastHTTP applications
description:
- Manages BIG-IQ applications used for load balancing an HTTP-based application, speeding
up connections and reducing the number of connections to the back-end server.
version_added: 2.6
options:
name:
description:
- Name of the new application.
required: True
description:
description:
- Description of the application.
servers:
description:
- A list of servers that the application is hosted on.
- If you are familiar with other BIG-IP setting, you might also refer to this
list as the list of pool members.
- When creating a new application, at least one server is required.
suboptions:
address:
description:
- The IP address of the server.
required: True
port:
description:
- The port of the server.
- When creating a new application and specifying a server, if this parameter
is not provided, the default of C(80) will be used.
default: 80
inbound_virtual:
description:
- Settings to configure the virtual which will receive the inbound connection.
- This virtual will be used to host the HTTP endpoint of the application.
suboptions:
address:
description:
- Specifies destination IP address information to which the virtual server
sends traffic.
- This parameter is required when creating a new application.
required: True
netmask:
description:
- Specifies the netmask to associate with the given C(destination).
- This parameter is required when creating a new application.
required: True
port:
description:
- The port that the virtual listens for connections on.
- When creating a new application, if this parameter is not specified, the
default value of C(80) will be used.
default: 80
service_environment:
description:
- Specifies the name of service environment that the application will be
deployed to.
- When creating a new application, this parameter is required.
- The service environment type will be discovered by this module automatically.
Therefore, it is crucial that you maintain unique names for items in the
different service environment types (at this time, SSGs and BIGIPs).
add_analytics:
description:
- Collects statistics of the BIG-IP that the application is deployed to.
- This parameter is only relevant when specifying a C(service_environment) which
is a BIG-IP; not an SSG.
type: bool
default: no
state:
description:
- The state of the resource on the system.
- When C(present), guarantees that the resource exists with the provided attributes.
- When C(absent), removes the resource from the system.
default: present
choices:
- absent
- present
wait:
description:
- If the module should wait for the application to be created, deleted or updated.
type: bool
default: yes
extends_documentation_fragment: f5
notes:
- This module does not support updating of your application (whether deployed or not).
If you need to update the application, the recommended practice is to remove and
re-create.
- Requires BIG-IQ version 6.0 or greater.
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Load balance an HTTP application on port 80 on BIG-IP
bigiq_application_fasthttp:
name: my-app
description: Fast HTTP
service_environment: my-ssg
servers:
- address: 1.2.3.4
port: 8080
- address: 5.6.7.8
port: 8080
inbound_virtual:
name: foo
address: 2.2.2.2
netmask: 255.255.255.255
port: 80
provider:
password: secret
server: lb.mydomain.com
user: admin
state: present
delegate_to: localhost
'''
RETURN = r'''
description:
description: The new description of the application of the resource.
returned: changed
type: str
sample: My application
service_environment:
description: The environment which the service was deployed to.
returned: changed
type: str
sample: my-ssg1
inbound_virtual_destination:
description: The destination of the virtual that was created.
returned: changed
type: str
sample: 6.7.8.9
inbound_virtual_netmask:
description: The network mask of the provided inbound destination.
returned: changed
type: str
sample: 255.255.255.0
inbound_virtual_port:
description: The port the inbound virtual address listens on.
returned: changed
type: int
sample: 80
servers:
description: List of servers, and their ports, that make up the application.
type: complex
returned: changed
contains:
address:
description: The IP address of the server.
returned: changed
type: str
sample: 2.3.4.5
port:
description: The port that the server listens on.
returned: changed
type: int
sample: 8080
sample: hash/dictionary of values
'''
import time
from ansible.module_utils.basic import AnsibleModule
try:
from library.module_utils.network.f5.bigiq import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import exit_json
from library.module_utils.network.f5.common import fail_json
from library.module_utils.network.f5.ipaddress import is_valid_ip
except ImportError:
from ansible.module_utils.network.f5.bigiq import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import exit_json
from ansible.module_utils.network.f5.common import fail_json
from ansible.module_utils.network.f5.ipaddress import is_valid_ip
class Parameters(AnsibleF5Parameters):
api_map = {
'templateReference': 'template_reference',
'subPath': 'sub_path',
'ssgReference': 'ssg_reference',
'configSetName': 'config_set_name',
'defaultDeviceReference': 'default_device_reference',
'addAnalytics': 'add_analytics'
}
api_attributes = [
'resources', 'description', 'configSetName', 'subPath', 'templateReference',
'ssgReference', 'defaultDeviceReference', 'addAnalytics'
]
returnables = [
'resources', 'description', 'config_set_name', 'sub_path', 'template_reference',
'ssg_reference', 'default_device_reference', 'servers', 'inbound_virtual',
'add_analytics'
]
updatables = [
'resources', 'description', 'config_set_name', 'sub_path', 'template_reference',
'ssg_reference', 'default_device_reference', 'servers', 'add_analytics'
]
class ApiParameters(Parameters):
pass
class ModuleParameters(Parameters):
@property
def http_profile(self):
return "profile_http"
@property
def config_set_name(self):
return self.name
@property
def sub_path(self):
return self.name
@property
def template_reference(self):
filter = "name+eq+'Default-f5-fastHTTP-lb-template'"
uri = "https://{0}:{1}/mgmt/cm/global/templates/?$filter={2}&$top=1&$select=selfLink".format(
self.client.provider['server'],
self.client.provider['server_port'],
filter
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status == 200 and response['totalItems'] == 0:
raise F5ModuleError(
"No default HTTP LB template was found."
)
elif 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp._content)
result = dict(
link=response['items'][0]['selfLink']
)
return result
@property
def default_device_reference(self):
if is_valid_ip(self.service_environment):
# An IP address was specified
filter = "address+eq+'{0}'".format(self.service_environment)
else:
# Assume a hostname was specified
filter = "hostname+eq+'{0}'".format(self.service_environment)
uri = "https://{0}:{1}/mgmt/shared/resolver/device-groups/cm-adccore-allbigipDevices/devices/?$filter={2}&$top=1&$select=selfLink".format(
self.client.provider['server'],
self.client.provider['server_port'],
filter
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status == 200 and response['totalItems'] == 0:
return None
elif 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp._content)
result = dict(
link=response['items'][0]['selfLink']
)
return result
@property
def ssg_reference(self):
filter = "name+eq+'{0}'".format(self.service_environment)
uri = "https://{0}:{1}/mgmt/cm/cloud/service-scaling-groups/?$filter={2}&$top=1&$select=selfLink".format(
self.client.provider['server'],
self.client.provider['server_port'],
filter
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status == 200 and response['totalItems'] == 0:
return None
elif 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp._content)
result = dict(
link=response['items'][0]['selfLink']
)
return result
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
@property
def resources(self):
result = dict()
result.update(self.http_profile)
result.update(self.http_monitor)
result.update(self.virtual)
result.update(self.pool)
result.update(self.nodes)
return result
@property
def virtual(self):
result = dict()
result['ltm:virtual:0257bb9bb997'] = [
dict(
parameters=dict(
name='virtual',
destinationAddress=self.inbound_virtual['address'],
mask=self.inbound_virtual['netmask'],
destinationPort=self.inbound_virtual['port']
),
subcollectionResources=self.profiles
)
]
return result
@property
def profiles(self):
result = {
'profiles:53f9b3028d90': [
dict(
parameters=dict()
)
],
'profiles:b2f39bda63fd': [
dict(
parameters=dict()
)
]
}
return result
@property
def pool(self):
result = dict()
result['ltm:pool:f76ae78f1de6'] = [
dict(
parameters=dict(
name='pool_0'
),
subcollectionResources=self.pool_members
)
]
return result
@property
def pool_members(self):
result = dict()
result['members:15ad51f7229e'] = []
for x in self.servers:
member = dict(
parameters=dict(
port=x['port'],
nodeReference=dict(
link='#/resources/ltm:node:0783ce16685f/{0}'.format(x['address']),
fullPath='# {0}'.format(x['address'])
)
)
)
result['members:15ad51f7229e'].append(member)
return result
@property
def http_profile(self):
result = dict()
result['ltm:profile:http:b2f39bda63fd'] = [
dict(
parameters=dict(
name='profile_http'
)
)
]
return result
@property
def http_monitor(self):
result = dict()
result['ltm:monitor:http:cf6f6e7ae758'] = [
dict(
parameters=dict(
name='monitor-http'
)
)
]
return result
@property
def nodes(self):
result = dict()
result['ltm:node:0783ce16685f'] = []
for x in self.servers:
tmp = dict(
parameters=dict(
name=x['address'],
address=x['address']
)
)
result['ltm:node:0783ce16685f'].append(tmp)
return result
@property
def node_addresses(self):
result = [x['address'] for x in self.servers]
return result
class ReportableChanges(Changes):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.want = ModuleParameters(params=self.module.params)
self.want.client = self.client
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return False
else:
return self.create()
def exists(self):
uri = "https://{0}:{1}/mgmt/ap/query/v1/tenants/default/reports/AllApplicationsList?$filter=name+eq+'{2}'".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.name
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status == 200 and 'result' in response and 'totalItems' in response['result'] and response['result']['totalItems'] == 0:
return False
return True
def remove(self):
if self.module.check_mode:
return True
self_link = self.remove_from_device()
if self.want.wait:
self.wait_for_apply_template_task(self_link)
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def has_no_service_environment(self):
if self.want.default_device_reference is None and self.want.ssg_reference is None:
return True
return False
def create(self):
if self.want.service_environment is None:
raise F5ModuleError(
"A 'service_environment' must be specified when creating a new application."
)
if self.want.servers is None:
raise F5ModuleError(
"At least one 'servers' item is needed when creating a new application."
)
if self.want.inbound_virtual is None:
raise F5ModuleError(
"An 'inbound_virtual' must be specified when creating a new application."
)
self._set_changed_options()
if self.has_no_service_environment():
raise F5ModuleError(
"The specified 'service_environment' ({0}) was not found.".format(self.want.service_environment)
)
if self.module.check_mode:
return True
self_link = self.create_on_device()
if self.want.wait:
self.wait_for_apply_template_task(self_link)
if not self.exists():
raise F5ModuleError(
"Failed to deploy application."
)
return True
def create_on_device(self):
params = self.changes.api_params()
params['mode'] = 'CREATE'
uri = 'https://{0}:{1}/mgmt/cm/global/tasks/apply-template'.format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp._content)
return response['selfLink']
def absent(self):
if self.exists():
return self.remove()
return False
def remove_from_device(self):
params = dict(
configSetName=self.want.name,
mode='DELETE'
)
uri = 'https://{0}:{1}/mgmt/cm/global/tasks/apply-template'.format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp._content)
return response['selfLink']
def wait_for_apply_template_task(self, self_link):
host = 'https://{0}:{1}'.format(
self.client.provider['server'],
self.client.provider['server_port']
)
uri = self_link.replace('https://localhost', host)
while True:
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if response['status'] == 'FINISHED' and response.get('currentStep', None) == 'DONE':
return True
elif 'errorMessage' in response:
raise F5ModuleError(response['errorMessage'])
time.sleep(5)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
description=dict(),
servers=dict(
type='list',
options=dict(
address=dict(required=True),
port=dict(default=80)
)
),
inbound_virtual=dict(
type='dict',
options=dict(
address=dict(required=True),
netmask=dict(required=True),
port=dict(default=80)
)
),
service_environment=dict(),
add_analytics=dict(type='bool', default='no'),
state=dict(
default='present',
choices=['present', 'absent']
),
wait=dict(type='bool', default='yes')
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
client = F5RestClient(**module.params)
try:
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
exit_json(module, results, client)
except F5ModuleError as ex:
fail_json(module, ex, client)
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
"""DBM-like dummy module"""
from collections import defaultdict
from typing import Any
class DummyDB(dict):
"""Provide dummy DBM-like interface."""
def close(self):
pass
error = KeyError
_DATABASES: defaultdict[Any, DummyDB] = defaultdict(DummyDB)
def open(file, flag="r", mode=0o666): # noqa: A001
"""Open or create a dummy database compatible.
Arguments ``flag`` and ``mode`` are ignored.
"""
# return same instance for same file argument
return _DATABASES[file] | python | github | https://github.com/scrapy/scrapy | tests/mocks/dummydbm.py |
"""You have deposited a specific amount of dollars into your bank account. Each year your balance increases at the same
growth rate. Find out how long it would take for your balance to pass a specific threshold with the assumption that you
don't make any additional deposits.
Example
For deposit = 100, rate = 20 and threshold = 170, the output should be
depositProfit(deposit, rate, threshold) = 3.
Each year the amount of money on your account increases by 20%. It means that throughout the years your balance would be:
year 0: 100;
year 1: 120;
year 2: 144;
year 3: 172,8.
Thus, it will take 3 years for your balance to pass the threshold, which is the answer.
Input/Output
[time limit] 4000ms (py)
[input] integer deposit
The initial deposit as a positive integer.
Guaranteed constraints:
1 <= deposit <= 100.
[input] integer rate
The rate of increase. Each year the balance increases by the rate percent of the current sum.
Guaranteed constraints:
1 <= rate <= 100.
[input] integer threshold
The target balance.
Guaranteed constraints:
deposit < threshold <= 200.
[output] integer
The number of years it would take to hit the threshold.
"""
def depositProfit(principal, rate, threshold):
time = 1
hundred = float(100)
percent = rate/hundred
while True:
# Compounding interest formula A = P(1 + r/n)**nt. Annual compounding means n = 1.
if int(principal) * (1 + percent)**time >= int(threshold):
return time
else:
time += 1
if __name__ == '__main__':
print depositProfit(100, 1, 101) | unknown | codeparrot/codeparrot-clean | ||
import copy
import random
import string
import uuid
from datetime import datetime
from itertools import product
import six
from django.conf import settings
from django.contrib.auth.models import (
AbstractBaseUser, BaseUserManager, PermissionsMixin,
)
from django.core.validators import RegexValidator
from django.db import models
from django.db.models import Q, Count
from django.template.defaultfilters import date as _date
from django.utils.functional import cached_property
from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from versions.models import (
Versionable as BaseVersionable, VersionedForeignKey,
VersionedManyToManyField, get_utc_now,
)
from pretix.base.i18n import I18nCharField, I18nTextField
from pretix.base.settings import SettingsProxy
from .types import VariationDict
class Versionable(BaseVersionable):
class Meta:
abstract = True
def clone_shallow(self, forced_version_date=None):
"""
This behaves like clone(), but misses all the Many2Many-relation-handling. This is
a performance optimization for cases in which we have to handle the Many2Many relations
by hand anyways.
"""
if not self.pk: # NOQA
raise ValueError('Instance must be saved before it can be cloned')
if self.version_end_date: # NOQA
raise ValueError('This is a historical item and can not be cloned.')
if forced_version_date: # NOQA
if not self.version_start_date <= forced_version_date <= get_utc_now():
raise ValueError('The clone date must be between the version start date and now.')
else:
forced_version_date = get_utc_now()
earlier_version = self
later_version = copy.copy(earlier_version)
later_version.version_end_date = None
later_version.version_start_date = forced_version_date
# set earlier_version's ID to a new UUID so the clone (later_version) can
# get the old one -- this allows 'head' to always have the original
# id allowing us to get at all historic foreign key relationships
earlier_version.id = six.u(str(uuid.uuid4()))
earlier_version.version_end_date = forced_version_date
earlier_version.save()
for field in earlier_version._meta.many_to_many:
earlier_version.clone_relations_shallow(later_version, field.attname, forced_version_date)
if hasattr(earlier_version._meta, 'many_to_many_related'):
for rel in earlier_version._meta.many_to_many_related:
earlier_version.clone_relations_shallow(later_version, rel.via_field_name, forced_version_date)
later_version.save()
return later_version
def clone_relations_shallow(self, clone, manager_field_name, forced_version_date):
# Source: the original object, where relations are currently pointing to
source = getattr(self, manager_field_name) # returns a VersionedRelatedManager instance
# Destination: the clone, where the cloned relations should point to
source.through.objects.filter(**{source.source_field.attname: clone.id}).update(**{
source.source_field.attname: self.id, 'version_end_date': forced_version_date
})
class UserManager(BaseUserManager):
"""
This is the user manager for our custom user model. See the User
model documentation to see what's so special about our user model.
"""
def create_user(self, email, password=None, **kwargs):
user = self.model(email=email, **kwargs)
user.set_password(password)
user.save()
return user
def create_superuser(self, email, password=None): # NOQA
# Not used in the software but required by Django
if password is None:
raise Exception("You must provide a password")
user = self.model(email=email)
user.is_staff = True
user.is_superuser = True
user.set_password(password)
user.save()
return user
class User(AbstractBaseUser, PermissionsMixin):
"""
This is the user model used by pretix for authentication.
:param email: The user's e-mail address, used for identification.
:type email: str
:param givenname: The user's given name. May be empty or null.
:type givenname: str
:param familyname: The user's given name. May be empty or null.
:type familyname: str
:param givenname: The user's given name. May be empty or null.
:type givenname: str
:param is_active: Whether this user account is activated.
:type is_active: bool
:param is_staff: ``True`` for system operators.
:type is_staff: bool
:param date_joined: The datetime of the user's registration.
:type date_joined: datetime
:param locale: The user's preferred locale code.
:type locale: str
:param timezone: The user's preferred timezone.
:type timezone: str
"""
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = []
email = models.EmailField(unique=True, db_index=True, null=True, blank=True,
verbose_name=_('E-mail'))
givenname = models.CharField(max_length=255, blank=True, null=True,
verbose_name=_('Given name'))
familyname = models.CharField(max_length=255, blank=True, null=True,
verbose_name=_('Family name'))
is_active = models.BooleanField(default=True,
verbose_name=_('Is active'))
is_staff = models.BooleanField(default=False,
verbose_name=_('Is site admin'))
date_joined = models.DateTimeField(auto_now_add=True,
verbose_name=_('Date joined'))
locale = models.CharField(max_length=50,
choices=settings.LANGUAGES,
default=settings.LANGUAGE_CODE,
verbose_name=_('Language'))
timezone = models.CharField(max_length=100,
default=settings.TIME_ZONE,
verbose_name=_('Timezone'))
objects = UserManager()
class Meta:
verbose_name = _("User")
verbose_name_plural = _("Users")
def save(self, *args, **kwargs):
self.email = self.email.lower()
super().save(*args, **kwargs)
def __str__(self):
return self.email
def get_short_name(self) -> str:
"""
Returns the first of the following user properties that is found to exist:
* Given name
* Family name
* E-mail address
"""
if self.givenname:
return self.givenname
elif self.familyname:
return self.familyname
else:
return self.email
def get_full_name(self) -> str:
"""
Returns the first of the following user properties that is found to exist:
* A combination of given name and family name, depending on the locale
* Given name
* Family name
* User name
"""
if self.givenname and not self.familyname:
return self.givenname
elif not self.givenname and self.familyname:
return self.familyname
elif self.familyname and self.givenname:
return _('%(family)s, %(given)s') % {
'family': self.familyname,
'given': self.givenname
}
else:
return self.email
def cachedfile_name(instance, filename):
return 'cachedfiles/%s.%s' % (instance.id, filename.split('.')[-1])
class CachedFile(models.Model):
"""
A cached file (e.g. pre-generated ticket PDF)
"""
id = models.UUIDField(primary_key=True, default=uuid.uuid4)
expires = models.DateTimeField(null=True, blank=True)
date = models.DateTimeField(null=True, blank=True)
filename = models.CharField(max_length=255)
type = models.CharField(max_length=255)
file = models.FileField(null=True, blank=True, upload_to=cachedfile_name)
class Organizer(Versionable):
"""
This model represents an entity organizing events, e.g. a company, institution,
charity, person, …
:param name: The organizer's name
:type name: str
:param slug: A globally unique, short name for this organizer, to be used
in URLs and similar places.
:type slug: str
"""
name = models.CharField(max_length=200,
verbose_name=_("Name"))
slug = models.SlugField(
max_length=50, db_index=True,
help_text=_(
"Should be short, only contain lowercase letters and numbers, and must be unique among your events. "
"This is being used in addresses and bank transfer references."),
validators=[
RegexValidator(
regex="^[a-zA-Z0-9.-]+$",
message=_("The slug may only contain letters, numbers, dots and dashes.")
)
],
verbose_name=_("Slug"),
)
permitted = models.ManyToManyField(User, through='OrganizerPermission',
related_name="organizers")
class Meta:
verbose_name = _("Organizer")
verbose_name_plural = _("Organizers")
ordering = ("name",)
def __str__(self):
return self.name
@cached_property
def settings(self) -> SettingsProxy:
"""
Returns an object representing this organizer's settings
"""
return SettingsProxy(self, type=OrganizerSetting)
class OrganizerPermission(Versionable):
"""
The relation between an Organizer and an User who has permissions to
access an organizer profile.
:param organizer: The organizer this relation refers to
:type organizer: Organizer
:param user: The user this set of permissions is valid for
:type user: User
:param can_create_events: Whether or not this user can create new events with this
organizer account.
:type can_create_events: bool
"""
organizer = VersionedForeignKey(Organizer)
user = models.ForeignKey(User, related_name="organizer_perms")
can_create_events = models.BooleanField(
default=True,
verbose_name=_("Can create events"),
)
class Meta:
verbose_name = _("Organizer permission")
verbose_name_plural = _("Organizer permissions")
def __str__(self):
return _("%(name)s on %(object)s") % {
'name': str(self.user),
'object': str(self.organizer),
}
class Event(Versionable):
"""
This model represents an event. An event is anything you can buy
tickets for.
:param organizer: The organizer this event belongs to
:type organizer: Organizer
:param name: This events full title
:type name: str
:param slug: A short, alphanumeric, all-lowercase name for use in URLs. The slug has to
be unique among the events of the same organizer.
:type slug: str
:param currency: The currency of all prices and payments of this event
:type currency: str
:param date_from: The datetime this event starts
:type date_from: datetime
:param date_to: The datetime this event ends
:type date_to: datetime
:param presale_start: No tickets will be sold before this date.
:type presale_start: datetime
:param presale_end: No tickets will be sold before this date.
:type presale_end: datetime
:param plugins: A comma-separated list of plugin names that are active for this
event.
:type plugins: str
"""
organizer = VersionedForeignKey(Organizer, related_name="events",
on_delete=models.PROTECT)
name = I18nCharField(
max_length=200,
verbose_name=_("Name"),
)
slug = models.SlugField(
max_length=50, db_index=True,
help_text=_(
"Should be short, only contain lowercase letters and numbers, and must be unique among your events. "
"This is being used in addresses and bank transfer references."),
validators=[
RegexValidator(
regex="^[a-zA-Z0-9.-]+$",
message=_("The slug may only contain letters, numbers, dots and dashes."),
)
],
verbose_name=_("Slug"),
)
permitted = models.ManyToManyField(User, through='EventPermission',
related_name="events", )
currency = models.CharField(max_length=10,
verbose_name=_("Default currency"),
default=settings.DEFAULT_CURRENCY)
date_from = models.DateTimeField(verbose_name=_("Event start time"))
date_to = models.DateTimeField(null=True, blank=True,
verbose_name=_("Event end time"))
presale_end = models.DateTimeField(
null=True, blank=True,
verbose_name=_("End of presale"),
help_text=_("No products will be sold after this date."),
)
presale_start = models.DateTimeField(
null=True, blank=True,
verbose_name=_("Start of presale"),
help_text=_("No products will be sold before this date."),
)
plugins = models.TextField(
null=True, blank=True,
verbose_name=_("Plugins"),
)
locked_here = False
class Meta:
verbose_name = _("Event")
verbose_name_plural = _("Events")
# unique_together = (("organizer", "slug"),) # TODO: Enforce manually
ordering = ("date_from", "name")
def __str__(self):
return str(self.name)
def save(self, *args, **kwargs):
obj = super().save(*args, **kwargs)
self.get_cache().clear()
return obj
def get_plugins(self) -> "list[str]":
"""
Get the names of the plugins activated for this event as a list.
"""
if self.plugins is None:
return []
return self.plugins.split(",")
def get_date_from_display(self) -> str:
"""
Returns a formatted string containing the start date of the event with respect
to the current locale and to the ``show_times`` setting.
"""
return _date(
self.date_from,
"DATETIME_FORMAT" if self.settings.show_times else "DATE_FORMAT"
)
def get_date_to_display(self) -> str:
"""
Returns a formatted string containing the start date of the event with respect
to the current locale and to the ``show_times`` setting. Returns an empty string
if ``show_date_to`` is ``False``.
"""
if not self.settings.show_date_to:
return ""
return _date(
self.date_to,
"DATETIME_FORMAT" if self.settings.show_times else "DATE_FORMAT"
)
def get_cache(self) -> "pretix.base.cache.EventRelatedCache":
"""
Returns an :py:class:`EventRelatedCache` object. This behaves equivalent to
Django's built-in cache backends, but puts you into an isolated environment for
this event, so you don't have to prefix your cache keys. In addition, the cache
is being cleared every time the event or one of its related objects change.
"""
from pretix.base.cache import EventRelatedCache
return EventRelatedCache(self)
@cached_property
def settings(self) -> SettingsProxy:
"""
Returns an object representing this event's settings
"""
return SettingsProxy(self, type=EventSetting, parent=self.organizer)
@property
def presale_has_ended(self):
if self.presale_end and now() > self.presale_end:
return True
return False
@property
def presale_is_running(self):
if self.presale_start and now() < self.presale_start:
return False
if self.presale_end and now() > self.presale_end:
return False
return True
def lock(self):
"""
Returns a contextmanager that can be used to lock an event for bookings
"""
from .services import locking
return locking.LockManager(self)
class EventPermission(Versionable):
"""
The relation between an Event and an User who has permissions to
access an event.
:param event: The event this refers to
:type event: Event
:param user: The user these permission set applies to
:type user: User
:param can_change_settings: If ``True``, the user can change all basic settings for this event.
:type can_change_settings: bool
:param can_change_items: If ``True``, the user can change and add items and related objects for this event.
:type can_change_items: bool
:param can_view_orders: If ``True``, the user can inspect details of all orders.
:type can_view_orders: bool
:param can_change_orders: If ``True``, the user can change details of orders
:type can_change_orders: bool
"""
event = VersionedForeignKey(Event)
user = models.ForeignKey(User, related_name="event_perms")
can_change_settings = models.BooleanField(
default=True,
verbose_name=_("Can change event settings")
)
can_change_items = models.BooleanField(
default=True,
verbose_name=_("Can change product settings")
)
can_view_orders = models.BooleanField(
default=True,
verbose_name=_("Can view orders")
)
can_change_permissions = models.BooleanField(
default=True,
verbose_name=_("Can change permissions")
)
can_change_orders = models.BooleanField(
default=True,
verbose_name=_("Can change orders")
)
class Meta:
verbose_name = _("Event permission")
verbose_name_plural = _("Event permissions")
def __str__(self):
return _("%(name)s on %(object)s") % {
'name': str(self.user),
'object': str(self.event),
}
class ItemCategory(Versionable):
"""
Items can be sorted into these categories.
:param event: The event this belongs to
:type event: Event
:param name: The name of this category
:type name: str
:param position: An integer, used for sorting
:type position: int
"""
event = VersionedForeignKey(
Event,
on_delete=models.CASCADE,
related_name='categories',
)
name = I18nCharField(
max_length=255,
verbose_name=_("Category name"),
)
position = models.IntegerField(
default=0
)
class Meta:
verbose_name = _("Product category")
verbose_name_plural = _("Product categories")
ordering = ('position', 'version_birth_date')
def __str__(self):
return str(self.name)
def delete(self, *args, **kwargs):
super().delete(*args, **kwargs)
if self.event:
self.event.get_cache().clear()
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
if self.event:
self.event.get_cache().clear()
@property
def sortkey(self):
return self.position, self.version_birth_date
def __lt__(self, other):
return self.sortkey < other.sortkey
class Property(Versionable):
"""
A property is a modifier which can be applied to an Item. For example
'Size' would be a property associated with the item 'T-Shirt'.
:param event: The event this belongs to
:type event: Event
:param name: The name of this property.
:type name: str
"""
event = VersionedForeignKey(
Event,
related_name="properties",
)
name = I18nCharField(
max_length=250,
verbose_name=_("Property name"),
)
class Meta:
verbose_name = _("Product property")
verbose_name_plural = _("Product properties")
def __str__(self):
return str(self.name)
def delete(self, *args, **kwargs):
super().delete(*args, **kwargs)
if self.event:
self.event.get_cache().clear()
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
if self.event:
self.event.get_cache().clear()
class PropertyValue(Versionable):
"""
A value of a property. If the property would be 'T-Shirt size',
this could be 'M' or 'L'.
:param prop: The property this value is a valid option for.
:type prop: Property
:param value: The value, as a human-readable string
:type value: str
:param position: An integer, used for sorting
:type position: int
"""
prop = VersionedForeignKey(
Property,
on_delete=models.CASCADE,
related_name="values"
)
value = I18nCharField(
max_length=250,
verbose_name=_("Value"),
)
position = models.IntegerField(
default=0
)
class Meta:
verbose_name = _("Property value")
verbose_name_plural = _("Property values")
ordering = ("position", "version_birth_date")
def __str__(self):
return "%s: %s" % (self.prop.name, self.value)
def delete(self, *args, **kwargs):
super().delete(*args, **kwargs)
if self.prop:
self.prop.event.get_cache().clear()
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
if self.prop:
self.prop.event.get_cache().clear()
@property
def sortkey(self):
return self.position, self.version_birth_date
def __lt__(self, other):
return self.sortkey < other.sortkey
class Question(Versionable):
"""
A question is an input field that can be used to extend a ticket
by custom information, e.g. "Attendee age". A question can allow one o several
input types, currently:
* a number (``TYPE_NUMBER``)
* a one-line string (``TYPE_STRING``)
* a multi-line string (``TYPE_TEXT``)
* a boolean (``TYPE_BOOLEAN``)
:param event: The event this question belongs to
:type event: Event
:param question: The question text. This will be displayed next to the input field.
:type question: str
:param type: One of the above types
:param required: Whether answering this question is required for submiting an order including
items associated with this question.
:type required: bool
"""
TYPE_NUMBER = "N"
TYPE_STRING = "S"
TYPE_TEXT = "T"
TYPE_BOOLEAN = "B"
TYPE_CHOICES = (
(TYPE_NUMBER, _("Number")),
(TYPE_STRING, _("Text (one line)")),
(TYPE_TEXT, _("Multiline text")),
(TYPE_BOOLEAN, _("Yes/No")),
)
event = VersionedForeignKey(
Event,
related_name="questions",
)
question = I18nTextField(
verbose_name=_("Question"),
)
type = models.CharField(
max_length=5,
choices=TYPE_CHOICES,
verbose_name=_("Question type"),
)
required = models.BooleanField(
default=False,
verbose_name=_("Required question"),
)
class Meta:
verbose_name = _("Question")
verbose_name_plural = _("Questions")
def __str__(self):
return str(self.question)
def delete(self, *args, **kwargs):
super().delete(*args, **kwargs)
if self.event:
self.event.get_cache().clear()
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
if self.event:
self.event.get_cache().clear()
def itempicture_upload_to(instance, filename):
return '%s/%s/item-%s.%s' % (
instance.event.organizer.slug, instance.event.slug, instance.identity,
filename.split('.')[-1]
)
class Item(Versionable):
"""
An item is a thing which can be sold. It belongs to an event and may or may not belong to a category.
Items are often also called 'products' but are named 'items' internally due to historic reasons.
It has a default price which might by overriden by restrictions.
:param event: The event this belongs to.
:type event: Event
:param category: The category this belongs to. May be null.
:type category: ItemCategory
:param name: The name of this item:
:type name: str
:param active: Whether this item is being sold
:type active: bool
:param short_description: A short description
:type short_description: str
:param long_description: A long description
:type long_description: str
:param default_price: The item's default price
:type default_price: decimal.Decimal
:param tax_rate: The VAT tax that is included in this item's price (in %)
:type tax_rate: decimal.Decimal
:param properties: A set of ``Property`` objects that should be applied to this item
:param questions: A set of ``Question`` objects that should be applied to this item
:param admission: ``True``, if this item allows persons to enter the event (as opposed to e.g. merchandise)
:type admission: bool
:param picture: A product picture to be shown next to the product description.
:type picture: File
"""
event = VersionedForeignKey(
Event,
on_delete=models.PROTECT,
related_name="items",
verbose_name=_("Event"),
)
category = VersionedForeignKey(
ItemCategory,
on_delete=models.PROTECT,
related_name="items",
blank=True, null=True,
verbose_name=_("Category"),
)
name = I18nCharField(
max_length=255,
verbose_name=_("Item name"),
)
active = models.BooleanField(
default=True,
verbose_name=_("Active"),
)
short_description = I18nTextField(
verbose_name=_("Short description"),
help_text=_("This is shown below the product name in lists."),
null=True, blank=True,
)
long_description = I18nTextField(
verbose_name=_("Long description"),
null=True, blank=True,
)
default_price = models.DecimalField(
verbose_name=_("Default price"),
max_digits=7, decimal_places=2, null=True
)
tax_rate = models.DecimalField(
null=True, blank=True,
verbose_name=_("Taxes included in percent"),
max_digits=7, decimal_places=2
)
properties = VersionedManyToManyField(
Property,
related_name='items',
verbose_name=_("Properties"),
blank=True,
help_text=_(
'The selected properties will be available for the user '
'to select. After saving this field, move to the '
'\'Variations\' tab to configure the details.'
)
)
questions = VersionedManyToManyField(
Question,
related_name='items',
verbose_name=_("Questions"),
blank=True,
help_text=_(
'The user will be asked to fill in answers for the '
'selected questions'
)
)
admission = models.BooleanField(
verbose_name=_("Is an admission ticket"),
help_text=_(
'Whether or not buying this product allows a person to enter '
'your event'
),
default=False
)
position = models.IntegerField(
default=0
)
picture = models.ImageField(
verbose_name=_("Product picture"),
null=True, blank=True,
upload_to=itempicture_upload_to
)
class Meta:
verbose_name = _("Product")
verbose_name_plural = _("Products")
ordering = ("category__position", "category", "position")
def __str__(self):
return str(self.name)
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
if self.event:
self.event.get_cache().clear()
def delete(self, *args, **kwargs):
super().delete(*args, **kwargs)
if self.event:
self.event.get_cache().clear()
def get_all_variations(self, use_cache: bool=False) -> "list[VariationDict]":
"""
This method returns a list containing all variations of this
item. The list contains one VariationDict per variation, where
the Proprty IDs are keys and the PropertyValue objects are
values. If an ItemVariation object exists, it is available in
the dictionary via the special key 'variation'.
VariationDicts differ from dicts only by specifying some extra
methods.
:param use_cache: If this parameter is set to ``True``, a second call to this method
on the same model instance won't query the database again but return
the previous result again.
:type use_cache: bool
"""
if use_cache and hasattr(self, '_get_all_variations_cache'):
return self._get_all_variations_cache
all_variations = self.variations.all().prefetch_related("values")
all_properties = self.properties.all().prefetch_related("values")
variations_cache = {}
for var in all_variations:
key = []
for v in var.values.all():
key.append((v.prop_id, v.identity))
key = tuple(sorted(key))
variations_cache[key] = var
result = []
for comb in product(*[prop.values.all() for prop in all_properties]):
if len(comb) == 0:
result.append(VariationDict())
continue
key = []
var = VariationDict()
for v in comb:
key.append((v.prop.identity, v.identity))
var[v.prop.identity] = v
key = tuple(sorted(key))
if key in variations_cache:
var['variation'] = variations_cache[key]
result.append(var)
self._get_all_variations_cache = result
return result
def _get_all_generated_variations(self):
propids = set([p.identity for p in self.properties.all()])
if len(propids) == 0:
variations = [VariationDict()]
else:
all_variations = list(
self.variations.annotate(
qc=Count('quotas')
).filter(qc__gt=0).prefetch_related(
"values", "values__prop", "quotas__event"
)
)
variations = []
for var in all_variations:
values = list(var.values.all())
# Make sure we don't expose stale ItemVariation objects which are
# still around altough they have an old set of properties
if set([v.prop.identity for v in values]) != propids:
continue
vardict = VariationDict()
for v in values:
vardict[v.prop.identity] = v
vardict['variation'] = var
variations.append(vardict)
return variations
def get_all_available_variations(self, use_cache: bool=False):
"""
This method returns a list of all variations which are theoretically
possible for sale. It DOES call all activated restriction plugins, and it
DOES only return variations which DO have an ItemVariation object, as all
variations without one CAN NOT be part of a Quota and therefore can never
be available for sale. The only exception is the empty variation
for items without properties, which never has an ItemVariation object.
This DOES NOT take into account quotas itself. Use ``is_available`` on the
ItemVariation objects (or the Item it self, if it does not have variations) to
determine availability by the terms of quotas.
It is recommended to call::
.prefetch_related('properties', 'variations__values__prop')
when retrieving Item objects you are going to use this method on.
"""
if use_cache and hasattr(self, '_get_all_available_variations_cache'):
return self._get_all_available_variations_cache
from .signals import determine_availability
variations = self._get_all_generated_variations()
responses = determine_availability.send(
self.event, item=self,
variations=variations, context=None,
cache=self.event.get_cache()
)
for i, var in enumerate(variations):
var['available'] = var['variation'].active if 'variation' in var else True
if 'variation' in var:
if var['variation'].default_price:
var['price'] = var['variation'].default_price
else:
var['price'] = self.default_price
else:
var['price'] = self.default_price
# It is possible, that *multiple* restriction plugins change the default price.
# In this case, the cheapest one wins. As soon as there is a restriction
# that changes the price, the default price has no effect.
newprice = None
for receiver, response in responses:
if 'available' in response[i] and not response[i]['available']:
var['available'] = False
break
if 'price' in response[i] and response[i]['price'] is not None \
and (newprice is None or response[i]['price'] < newprice):
newprice = response[i]['price']
var['price'] = newprice or var['price']
variations = [var for var in variations if var['available']]
self._get_all_available_variations_cache = variations
return variations
def check_quotas(self):
"""
This method is used to determine whether this Item is currently available
for sale.
:returns: any of the return codes of :py:meth:`Quota.availability()`.
:raises ValueError: if you call this on an item which has properties associated with it.
Please use the method on the ItemVariation object you are interested in.
"""
if self.properties.count() > 0: # NOQA
raise ValueError('Do not call this directly on items which have properties '
'but call this on their ItemVariation objects')
return min([q.availability() for q in self.quotas.all()])
def check_restrictions(self):
"""
This method is used to determine whether this ItemVariation is restricted
in sale by any restriction plugins.
:returns:
* ``False``, if the item is unavailable
* the item's price, otherwise
:raises ValueError: if you call this on an item which has properties associated with it.
Please use the method on the ItemVariation object you are interested in.
"""
if self.properties.count() > 0: # NOQA
raise ValueError('Do not call this directly on items which have properties '
'but call this on their ItemVariation objects')
from .signals import determine_availability
vd = VariationDict()
responses = determine_availability.send(
self.event, item=self,
variations=[vd], context=None,
cache=self.event.get_cache()
)
price = self.default_price
for receiver, response in responses:
if 'available' in response[0] and not response[0]['available']:
return False
elif 'price' in response[0] and response[0]['price'] is not None and response[0]['price'] < price:
price = response[0]['price']
return price
class ItemVariation(Versionable):
"""
A variation is an item combined with values for all properties
associated with the item. For example, if your item is 'T-Shirt'
and your properties are 'Size' and 'Color', then an example for an
variation would be 'T-Shirt XL read'.
Attention: _ALL_ combinations of PropertyValues _ALWAYS_ exist,
even if there is no ItemVariation object for them! ItemVariation objects
do NOT prove existance, they are only available to make it possible
to override default values (like the price) for certain combinations
of property values. However, appropriate ItemVariation objects will be
created as soon as you add your variations to a quota.
They also allow to explicitly EXCLUDE certain combinations of property
values by creating an ItemVariation object for them with active set to
False.
Restrictions can be not only set to items but also directly to variations.
:param item: The item this variation belongs to
:type item: Item
:param values: A set of ``PropertyValue`` objects defining this variation
:param active: Whether this value is to be sold.
:type active: bool
:param default_price: This variation's default price
:type default_price: decimal.Decimal
"""
item = VersionedForeignKey(
Item,
related_name='variations'
)
values = VersionedManyToManyField(
PropertyValue,
related_name='variations',
)
active = models.BooleanField(
default=True,
verbose_name=_("Active"),
)
default_price = models.DecimalField(
decimal_places=2, max_digits=7,
null=True, blank=True,
verbose_name=_("Default price"),
)
class Meta:
verbose_name = _("Product variation")
verbose_name_plural = _("Product variations")
def __str__(self):
return str(self.to_variation_dict())
def delete(self, *args, **kwargs):
super().delete(*args, **kwargs)
if self.item:
self.item.event.get_cache().clear()
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
if self.item:
self.item.event.get_cache().clear()
def check_quotas(self):
"""
This method is used to determine whether this ItemVariation is currently
available for sale in terms of quotas.
:returns: any of the return codes of :py:meth:`Quota.availability()`.
"""
return min([q.availability() for q in self.quotas.all()])
def to_variation_dict(self):
"""
:return: a :py:class:`VariationDict` representing this variation.
"""
vd = VariationDict()
for v in self.values.all():
vd[v.prop.identity] = v
vd['variation'] = self
return vd
def check_restrictions(self):
"""
This method is used to determine whether this ItemVariation is restricted
in sale by any restriction plugins.
:returns:
* ``False``, if the item is unavailable
* the item's price, otherwise
"""
from .signals import determine_availability
responses = determine_availability.send(
self.item.event, item=self.item,
variations=[self.to_variation_dict()], context=None,
cache=self.item.event.get_cache()
)
price = self.default_price if self.default_price is not None else self.item.default_price
for receiver, response in responses:
if 'available' in response[0] and not response[0]['available']:
return False
elif 'price' in response[0] and response[0]['price'] is not None and response[0]['price'] < price:
price = response[0]['price']
return price
def add_values_from_string(self, pk):
"""
Add values to this ItemVariation using a serialized string of the form
``property-id:value-id,ṗroperty-id:value-id``
"""
for pair in pk.split(","):
prop, value = pair.split(":")
self.values.add(
PropertyValue.objects.current.get(
identity=value,
prop_id=prop
)
)
class VariationsField(VersionedManyToManyField):
"""
This is a ManyToManyField using the pretixcontrol.views.forms.VariationsField
form field by default.
"""
def formfield(self, **kwargs):
from pretix.control.forms import VariationsField as FVariationsField
from django.db.models.fields.related import RelatedField
defaults = {
'form_class': FVariationsField,
# We don't need a queryset
'queryset': ItemVariation.objects.none(),
}
defaults.update(kwargs)
# If initial is passed in, it's a list of related objects, but the
# MultipleChoiceField takes a list of IDs.
if defaults.get('initial') is not None:
initial = defaults['initial']
if callable(initial):
initial = initial()
defaults['initial'] = [i.identity for i in initial]
# Skip ManyToManyField in dependency chain
return super(RelatedField, self).formfield(**defaults)
class BaseRestriction(Versionable):
"""
A restriction is the abstract concept of a rule that limits the availability
of Items or ItemVariations. This model is just an abstract base class to be
extended by restriction plugins.
"""
event = VersionedForeignKey(
Event,
on_delete=models.CASCADE,
related_name="restrictions_%(app_label)s_%(class)s",
verbose_name=_("Event"),
)
item = VersionedForeignKey(
Item,
blank=True, null=True,
verbose_name=_("Item"),
related_name="restrictions_%(app_label)s_%(class)s",
)
variations = VariationsField(
'pretixbase.ItemVariation',
blank=True,
verbose_name=_("Variations"),
related_name="restrictions_%(app_label)s_%(class)s",
)
class Meta:
abstract = True
verbose_name = _("Restriction")
verbose_name_plural = _("Restrictions")
def delete(self, *args, **kwargs):
super().delete(*args, **kwargs)
if self.event:
self.event.get_cache().clear()
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
if self.event:
self.event.get_cache().clear()
class Quota(Versionable):
"""
A quota is a "pool of tickets". It is there to limit the number of items
of a certain type to be sold. For example, you could have a quota of 500
applied to all your items (because you only have that much space in your
building), and also a quota of 100 applied to the VIP tickets for
exclusivity. In this case, no more than 500 tickets will be sold in total
and no more than 100 of them will be VIP tickets (but 450 normal and 50
VIP tickets will be fine).
As always, a quota can not only be tied to an item, but also to specific
variations.
Please read the documentation section on quotas carefully before doing
anything with quotas. This might confuse you otherwise.
http://docs.pretix.eu/en/latest/development/concepts.html#restriction-by-number
The AVAILABILITY_* constants represent various states of an quota allowing
its items/variations being for sale.
AVAILABILITY_OK
This item is available for sale.
AVAILABILITY_RESERVED
This item is currently not available for sale, because all available
items are in people's shopping carts. It might become available
again if those people do not proceed with checkout.
AVAILABILITY_ORDERED
This item is currently not availalbe for sale, because all available
items are ordered. It might become available again if those people
do not pay.
AVAILABILITY_GONE
This item is completely sold out.
:param event: The event this belongs to
:type event: Event
:param name: This quota's name
:type str:
:param size: The number of items in this quota
:type size: int
:param items: The set of :py:class:`Item` objects this quota applies to
:param variations: The set of :py:class:`ItemVariation` objects this quota applies to
"""
AVAILABILITY_GONE = 0
AVAILABILITY_ORDERED = 10
AVAILABILITY_RESERVED = 20
AVAILABILITY_OK = 100
event = VersionedForeignKey(
Event,
on_delete=models.CASCADE,
related_name="quotas",
verbose_name=_("Event"),
)
name = models.CharField(
max_length=200,
verbose_name=_("Name")
)
size = models.PositiveIntegerField(
verbose_name=_("Total capacity")
)
items = VersionedManyToManyField(
Item,
verbose_name=_("Item"),
related_name="quotas",
blank=True
)
variations = VariationsField(
ItemVariation,
related_name="quotas",
blank=True,
verbose_name=_("Variations")
)
class Meta:
verbose_name = _("Quota")
verbose_name_plural = _("Quotas")
def __str__(self):
return self.name
def delete(self, *args, **kwargs):
super().delete(*args, **kwargs)
if self.event:
self.event.get_cache().clear()
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
if self.event:
self.event.get_cache().clear()
def availability(self):
"""
This method is used to determine whether Items or ItemVariations belonging
to this quota should currently be available for sale.
:returns: a tuple where the first entry is one of the ``Quota.AVAILABILITY_`` constants
and the second is the number of available tickets.
"""
# TODO: These lookups are highly inefficient. However, we'll wait with optimizing
# until Django 1.8 is released, as the following feature might make it a
# lot easier:
# https://docs.djangoproject.com/en/1.8/ref/models/conditional-expressions/
# TODO: Test for interference with old versions of Item-Quota-relations, etc.
# TODO: Prevent corner-cases like people having ordered an item before it got
# its first variationsadded
quotalookup = (
( # Orders for items which do not have any variations
Q(variation__isnull=True)
& Q(item__quotas__in=[self])
) | ( # Orders for items which do have any variations
Q(variation__quotas__in=[self])
)
)
paid_orders = OrderPosition.objects.current.filter(
Q(order__status=Order.STATUS_PAID)
& quotalookup
).count()
if paid_orders >= self.size:
return Quota.AVAILABILITY_GONE, 0
pending_valid_orders = OrderPosition.objects.current.filter(
Q(order__status=Order.STATUS_PENDING)
& Q(order__expires__gte=now())
& quotalookup
).count()
if (paid_orders + pending_valid_orders) >= self.size:
return Quota.AVAILABILITY_ORDERED, 0
valid_cart_positions = CartPosition.objects.current.filter(
Q(expires__gte=now())
& quotalookup
).count()
if (paid_orders + pending_valid_orders + valid_cart_positions) >= self.size:
return Quota.AVAILABILITY_RESERVED, 0
return Quota.AVAILABILITY_OK, self.size - paid_orders - pending_valid_orders - valid_cart_positions
class QuotaExceededException(Exception):
pass
def generate_secret():
return ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(32))
class Order(Versionable):
"""
An order is created when a user clicks 'buy' on his cart. It holds
several OrderPositions and is connected to an user. It has an
expiration date: If items run out of capacity, orders which are over
their expiration date might be cancelled.
An order -- like all objects -- has an ID, which is globally unique,
but also a code, which is shorter and easier to memorize, but only
unique among a single conference.
:param code: In addition to the ID, which is globally unique, every
order has an order code, which is shorter and easier to
memorize, but is only unique among a single conference.
:param status: The status of this order. One of:
* ``STATUS_PENDING``
* ``STATUS_PAID``
* ``STATUS_EXPIRED``
* ``STATUS_CANCELLED``
* ``STATUS_REFUNDED``
:param event: The event this belongs to
:type event: Event
:param user: The user who ordered this
:type user: User
:param datetime: The datetime of the order placement
:type datetime: datetime
:param expires: The date until this order has to be paid to guarantee the
:type expires: datetime
:param payment_date: The date of the payment completion (null, if not yet paid).
:type payment_date: datetime
:param payment_provider: The payment provider selected by the user
:type payment_provider: str
:param payment_fee: The payment fee calculated at checkout time
:type payment_fee: decimal.Decimal
:param payment_info: Arbitrary information stored by the payment provider
:type payment_info: str
:param total: The total amount of the order, including the payment fee
:type total: decimal.Decimal
"""
STATUS_PENDING = "n"
STATUS_PAID = "p"
STATUS_EXPIRED = "e"
STATUS_CANCELLED = "c"
STATUS_REFUNDED = "r"
STATUS_CHOICE = (
(STATUS_PENDING, _("pending")),
(STATUS_PAID, _("paid")),
(STATUS_EXPIRED, _("expired")),
(STATUS_CANCELLED, _("cancelled")),
(STATUS_REFUNDED, _("refunded"))
)
code = models.CharField(
max_length=16,
verbose_name=_("Order code")
)
status = models.CharField(
max_length=3,
choices=STATUS_CHOICE,
verbose_name=_("Status")
)
event = VersionedForeignKey(
Event,
verbose_name=_("Event"),
related_name="orders"
)
user = models.ForeignKey(
User, null=True, blank=True,
verbose_name=_("User"),
related_name="orders"
)
guest_email = models.EmailField(
null=True, blank=True,
verbose_name=_('E-mail')
)
guest_locale = models.CharField(
null=True, blank=True, max_length=32,
verbose_name=_('Locale')
)
secret = models.CharField(max_length=32, default=generate_secret)
datetime = models.DateTimeField(
verbose_name=_("Date")
)
expires = models.DateTimeField(
verbose_name=_("Expiration date")
)
payment_date = models.DateTimeField(
verbose_name=_("Payment date"),
null=True, blank=True
)
payment_provider = models.CharField(
null=True, blank=True,
max_length=255,
verbose_name=_("Payment provider")
)
payment_fee = models.DecimalField(
decimal_places=2, max_digits=10,
default=0, verbose_name=_("Payment method fee")
)
payment_info = models.TextField(
verbose_name=_("Payment information"),
null=True, blank=True
)
payment_manual = models.BooleanField(
verbose_name=_("Payment state was manually modified"),
default=False
)
total = models.DecimalField(
decimal_places=2, max_digits=10,
verbose_name=_("Total amount")
)
class Meta:
verbose_name = _("Order")
verbose_name_plural = _("Orders")
ordering = ("-datetime",)
def str(self):
return self.full_code
@property
def full_code(self):
"""
A order code which is unique among all events of a single organizer,
built by contatenating the event slug and the order code.
"""
return self.event.slug.upper() + self.code
def save(self, *args, **kwargs):
if not self.code:
self.assign_code()
if not self.datetime:
self.datetime = now()
super().save(*args, **kwargs)
def assign_code(self):
charset = list('ABCDEFGHKLMNPQRSTUVWXYZ23456789')
while True:
code = "".join([random.choice(charset) for i in range(5)])
if not Order.objects.filter(event=self.event, code=code).exists():
self.code = code
return
@property
def can_modify_answers(self):
"""
Is ``True`` if the user can change the question answers / attendee names that are
related to the order. This checks order status and modification deadlines. It also
returns ``False``, if there are no questions that can be answered.
"""
if self.status not in (Order.STATUS_PENDING, Order.STATUS_PAID, Order.STATUS_EXPIRED):
return False
modify_deadline = self.event.settings.get('last_order_modification_date', as_type=datetime)
if modify_deadline is not None and now() > modify_deadline:
return False
ask_names = self.event.settings.get('attendee_names_asked', as_type=bool)
for cp in self.positions.all().prefetch_related('item__questions'):
if (cp.item.admission and ask_names) or cp.item.questions.all():
return True
return False # nothing there to modify
def mark_refunded(self):
"""
Mark this order as refunded. This clones the order object, sets the payment status and
returns the cloned order object.
"""
order = self.clone()
order.status = Order.STATUS_REFUNDED
order.save()
return order
def _can_be_paid(self):
error_messages = {
'late': _("The payment is too late to be accepted."),
}
if self.event.settings.get('payment_term_last') \
and now() > self.event.settings.get('payment_term_last'):
return error_messages['late']
if now() < self.expires:
return True
if not self.event.settings.get('payment_term_accept_late'):
return error_messages['late']
return self._is_still_available()
def _is_still_available(self):
error_messages = {
'unavailable': _('Some of the ordered products were no longer available.'),
'busy': _('We were not able to process the request completely as the '
'server was too busy.'),
}
positions = list(self.positions.all().select_related(
'item', 'variation'
).prefetch_related(
'variation__values', 'variation__values__prop',
'item__questions', 'answers'
))
quota_cache = {}
try:
with self.event.lock():
for i, op in enumerate(positions):
quotas = list(op.item.quotas.all()) if op.variation is None else list(op.variation.quotas.all())
if len(quotas) == 0:
raise Quota.QuotaExceededException(error_messages['unavailable'])
for quota in quotas:
# Lock the quota, so no other thread is allowed to perform sales covered by this
# quota while we're doing so.
if quota.identity not in quota_cache:
quota_cache[quota.identity] = quota
quota.cached_availability = quota.availability()[1]
else:
# Use cached version
quota = quota_cache[quota.identity]
quota.cached_availability -= 1
if quota.cached_availability < 0:
# This quota is sold out/currently unavailable, so do not sell this at all
raise Quota.QuotaExceededException(error_messages['unavailable'])
except Quota.QuotaExceededException as e:
return str(e)
except EventLock.LockTimeoutException:
# Is raised when there are too many threads asking for quota locks and we were
# unaible to get one
return error_messages['busy']
return True
@property
def locale(self):
if self.user:
return self.user.locale
return self.guest_locale
@property
def email(self):
if self.user:
return self.user.email
return self.guest_email
class CachedTicket(models.Model):
order = VersionedForeignKey(Order, on_delete=models.CASCADE)
cachedfile = models.ForeignKey(CachedFile, on_delete=models.CASCADE)
provider = models.CharField(max_length=255)
class QuestionAnswer(Versionable):
"""
The answer to a Question, connected to an OrderPosition or CartPosition.
:param orderposition: The order position this is related to, or null if this is
related to a cart position.
:type orderposition: OrderPosition
:param cartposition: The cart position this is related to, or null if this is related
to an order position.
:type cartposition: CartPosition
:param question: The question this is an answer for
:type question: Question
:param answer: The actual answer data
:type answer: str
"""
orderposition = models.ForeignKey(
'OrderPosition', null=True, blank=True,
related_name='answers'
)
cartposition = models.ForeignKey(
'CartPosition', null=True, blank=True,
related_name='answers'
)
question = VersionedForeignKey(
Question, related_name='answers'
)
answer = models.TextField()
class ObjectWithAnswers:
def cache_answers(self):
"""
Creates two properties on the object.
(1) answ: a dictionary of question.id → answer string
(2) questions: a list of Question objects, extended by an 'answer' property
"""
self.answ = {}
for a in self.answers.all():
self.answ[a.question_id] = a.answer
self.questions = []
for q in self.item.questions.all():
if q.identity in self.answ:
q.answer = self.answ[q.identity]
else:
q.answer = ""
self.questions.append(q)
class OrderPosition(ObjectWithAnswers, Versionable):
"""
An OrderPosition is one line of an order, representing one ordered items
of a specified type (or variation).
:param order: The order this is a part of
:type order: Order
:param item: The ordered item
:type item: Item
:param variation: The ordered ItemVariation or null, if the item has no properties
:type variation: ItemVariation
:param price: The price of this item
:type price: decimal.Decimal
:param attendee_name: The attendee's name, if entered.
:type attendee_name: str
"""
order = VersionedForeignKey(
Order,
verbose_name=_("Order"),
related_name='positions'
)
item = VersionedForeignKey(
Item,
verbose_name=_("Item"),
related_name='positions'
)
variation = VersionedForeignKey(
ItemVariation,
null=True, blank=True,
verbose_name=_("Variation")
)
price = models.DecimalField(
decimal_places=2, max_digits=10,
verbose_name=_("Price")
)
attendee_name = models.CharField(
max_length=255,
verbose_name=_("Attendee name"),
blank=True, null=True,
help_text=_("Empty, if this product is not an admission ticket")
)
class Meta:
verbose_name = _("Order position")
verbose_name_plural = _("Order positions")
@classmethod
def transform_cart_positions(cls, cp: list, order) -> list:
ops = []
for cartpos in cp:
op = OrderPosition(
order=order, item=cartpos.item, variation=cartpos.variation,
price=cartpos.price, attendee_name=cartpos.attendee_name
)
for answ in cartpos.answers.all():
answ = answ.clone()
answ.orderposition = op
answ.cartposition = None
answ.save()
op.save()
cartpos.delete()
ops.append(op)
class CartPosition(ObjectWithAnswers, Versionable):
"""
A cart position is similar to a order line, except that it is not
yet part of a binding order but just placed by some user in his or
her cart. It therefore normally has a much shorter expiration time
than an ordered position, but still blocks an item in the quota pool
as we do not want to throw out users while they're clicking through
the checkout process.
:param event: The event this belongs to
:type event: Evnt
:param item: The selected item
:type item: Item
:param user: The user who has this in his cart
:type user: User
:param variation: The selected ItemVariation or null, if the item has no properties
:type variation: ItemVariation
:param datetime: The datetime this item was put into the cart
:type datetime: datetime
:param expires: The date until this item is guarenteed to be reserved
:type expires: datetime
:param price: The price of this item
:type price: decimal.Decimal
:param attendee_name: The attendee's name, if entered.
:type attendee_name: str
"""
event = VersionedForeignKey(
Event,
verbose_name=_("Event")
)
user = models.ForeignKey(
User, null=True, blank=True,
verbose_name=_("User")
)
session = models.CharField(
max_length=255, null=True, blank=True,
verbose_name=_("Session")
)
item = VersionedForeignKey(
Item,
verbose_name=_("Item")
)
variation = VersionedForeignKey(
ItemVariation,
null=True, blank=True,
verbose_name=_("Variation")
)
price = models.DecimalField(
decimal_places=2, max_digits=10,
verbose_name=_("Price")
)
datetime = models.DateTimeField(
verbose_name=_("Date"),
auto_now_add=True
)
expires = models.DateTimeField(
verbose_name=_("Expiration date")
)
attendee_name = models.CharField(
max_length=255,
verbose_name=_("Attendee name"),
blank=True, null=True,
help_text=_("Empty, if this product is not an admission ticket")
)
class Meta:
verbose_name = _("Cart position")
verbose_name_plural = _("Cart positions")
class EventSetting(Versionable):
"""
An event settings is a key-value setting which can be set for a
specific event
"""
object = VersionedForeignKey(Event, related_name='setting_objects')
key = models.CharField(max_length=255)
value = models.TextField()
class OrganizerSetting(Versionable):
"""
An event option is a key-value setting which can be set for an
organizer. It will be inherited by the events of this organizer
"""
object = VersionedForeignKey(Organizer, related_name='setting_objects')
key = models.CharField(max_length=255)
value = models.TextField()
class EventLock(models.Model):
event = models.CharField(max_length=36, primary_key=True)
date = models.DateTimeField(auto_now=True)
class LockTimeoutException(Exception):
pass | unknown | codeparrot/codeparrot-clean | ||
# (c) 2012, Daniel Hokka Zakrisson <daniel@hozac.com>
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> and others
# (c) 2017, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from abc import ABCMeta
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_native
from ansible.module_utils.six import with_metaclass, string_types
from ansible.utils.display import Display
display = Display()
# Global so that all instances of a PluginLoader will share the caches
MODULE_CACHE = {}
PATH_CACHE = {}
PLUGIN_PATH_CACHE = {}
def get_plugin_class(obj):
if isinstance(obj, string_types):
return obj.lower().replace('module', '')
else:
return obj.__class__.__name__.lower().replace('module', '')
class AnsiblePlugin(with_metaclass(ABCMeta, object)):
# allow extra passthrough parameters
allow_extras = False
def __init__(self):
self._options = {}
def get_option(self, option, hostvars=None):
if option not in self._options:
try:
option_value = C.config.get_config_value(option, plugin_type=get_plugin_class(self), plugin_name=self._load_name, variables=hostvars)
except AnsibleError as e:
raise KeyError(to_native(e))
self.set_option(option, option_value)
return self._options.get(option)
def set_option(self, option, value):
self._options[option] = value
def set_options(self, task_keys=None, var_options=None, direct=None):
'''
Sets the _options attribute with the configuration/keyword information for this plugin
:arg task_keys: Dict with playbook keywords that affect this option
:arg var_options: Dict with either 'connection variables'
:arg direct: Dict with 'direct assignment'
'''
self._options = C.config.get_plugin_options(get_plugin_class(self), self._load_name, keys=task_keys, variables=var_options, direct=direct)
# allow extras/wildcards from vars that are not directly consumed in configuration
# this is needed to support things like winrm that can have extended protocol options we don't directly handle
if self.allow_extras and var_options and '_extras' in var_options:
self.set_option('_extras', var_options['_extras'])
def has_option(self, option):
if not self._options:
self.set_options()
return option in self._options
def _check_required(self):
# FIXME: standardize required check based on config
pass | unknown | codeparrot/codeparrot-clean | ||
"""
Benchmark scikit-learn's Ward implement compared to SciPy's
"""
import time
import numpy as np
from scipy.cluster import hierarchy
import pylab as pl
from sklearn.cluster import AgglomerativeClustering
ward = AgglomerativeClustering(n_clusters=3, linkage='ward')
n_samples = np.logspace(.5, 3, 9)
n_features = np.logspace(1, 3.5, 7)
N_samples, N_features = np.meshgrid(n_samples,
n_features)
scikits_time = np.zeros(N_samples.shape)
scipy_time = np.zeros(N_samples.shape)
for i, n in enumerate(n_samples):
for j, p in enumerate(n_features):
X = np.random.normal(size=(n, p))
t0 = time.time()
ward.fit(X)
scikits_time[j, i] = time.time() - t0
t0 = time.time()
hierarchy.ward(X)
scipy_time[j, i] = time.time() - t0
ratio = scikits_time / scipy_time
pl.figure("scikit-learn Ward's method benchmark results")
pl.imshow(np.log(ratio), aspect='auto', origin="lower")
pl.colorbar()
pl.contour(ratio, levels=[1, ], colors='k')
pl.yticks(range(len(n_features)), n_features.astype(np.int))
pl.ylabel('N features')
pl.xticks(range(len(n_samples)), n_samples.astype(np.int))
pl.xlabel('N samples')
pl.title("Scikit's time, in units of scipy time (log)")
pl.show() | unknown | codeparrot/codeparrot-clean | ||
import { createApp, h } from '../src'
describe('createApp for dom', () => {
// #2926
test('mount to SVG container', () => {
const root = document.createElementNS('http://www.w3.org/2000/svg', 'svg')
createApp({
render() {
return h('g')
},
}).mount(root)
expect(root.children.length).toBe(1)
expect(root.children[0]).toBeInstanceOf(SVGElement)
})
// #4398
test('should not mutate original root component options object', () => {
const originalObj = {
data() {
return {
counter: 0,
}
},
}
const handler = vi.fn(msg => {
expect(msg).toMatch(`Component is missing template or render function`)
})
const Root = { ...originalObj }
const app = createApp(Root)
app.config.warnHandler = handler
app.mount(document.createElement('div'))
// ensure mount is based on a copy of Root object rather than Root object itself
expect(app._component).not.toBe(Root)
// ensure no mutation happened to Root object
expect(originalObj).toMatchObject(Root)
})
}) | typescript | github | https://github.com/vuejs/core | packages/runtime-dom/__tests__/createApp.spec.ts |
#!/usr/bin/env python
# coding: utf-8
import time
from pykit import awssign
# to sign a request, you need to provide a dict which contain 'varb',
# 'uri', 'headers'
if __name__ == '__main__':
access_key = 'your access key'
secret_key = 'your secret key'
# use query string
request = {
'verb': 'GET',
'uri': '/aaa/b%20b?foo%2F&foo1=bar1%3F&foo2=bar2',
'headers': {'Host': 'foo.bar.com'},
}
signer = awssign.Signer(access_key, secret_key)
signer.add_auth(request)
print request['uri']
print request['headers']
# > /aaa/b%20b?foo%2F&foo1=bar1%3F&foo2=bar2
# > {'X-Amz-Content-SHA256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855',
# 'Host': 'foo.bar.com',
# 'Authorization': 'AWS4-HMAC-SHA256 Credential=/20161208/us-east-1/s3/aws4_request, SignedHeaders=host;x-amz-date, Signature=0eefb35f051809e21f487499220ec7ed8243b0202f5ce6ab87fc177662d308de',
# ''X-Amz-Date': '20161208T113610Z'}
# use args
request = {
'verb': 'GET',
'uri': '/aaa/b%20b',
'args': {'foo/': True, 'foo1': 'bar1?', 'foo2': 'bar2'},
'headers': {'Host': 'foo.bar.com'},
}
signer = awssign.Signer(access_key, secret_key)
signer.add_auth(request)
print request['uri']
print request['headers']
# > /aaa/b%20b?foo%2F&foo1=bar1%3F&foo2=bar2
# > {'X-Amz-Content-SHA256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855',
# 'Host': 'foo.bar.com',
# 'Authorization': 'AWS4-HMAC-SHA256 Credential=/20161208/us-east-1/s3/aws4_request, SignedHeaders=host;x-amz-date, Signature=0eefb35f051809e21f487499220ec7ed8243b0202f5ce6ab87fc177662d308de',
# ''X-Amz-Date': '20161208T113610Z'}
# query_auth
request = {
'verb': 'GET',
'uri': '/aaa/bbb',
'args': {'foo': ['bar1', 'bar2', True]},
'headers': {'Host': 'foo.bar.com'},
}
signer = awssign.Signer(access_key, secret_key)
signer.add_auth(request, query_auth=True)
print request['uri']
print request['headers']
# > /aaa/bbb?foo=bar1&foo=bar2&foo&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Expires=60&X-Amz-Credential=your%20access%20key%2F20161208%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-SignedHeaders=host&X-Amz-Date=20161208T114712Z&X-Amz-Signature=58485ef2a476102e36fcd92a16894faaa81cfc95b200f83153f2fafa6c5e5200
# > {'Host': 'foo.bar.com'}
# use custom expires
request = {
'verb': 'GET',
'uri': '/',
'headers': {'Host': 'foo.bar.com'},
}
signer = awssign.Signer(access_key, secret_key)
signer.add_auth(request, query_auth=True, expires=60 * 60 * 24)
print request['uri']
print request['headers']
# > /?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Expires=86400&X-Amz-Credential=your%20access%20key%2F20161208%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-SignedHeaders=host&X-Amz-Date=20161208T115310Z&X-Amz-Signature=3baa1b0bfeffb9ddb8f41a5ef245eef63021c521d368364b4dee488c5eee71e8
# > {'Host': 'foo.bar.com'}
# sign payload
request = {
'verb': 'PUT',
'uri': '/aaa/bbb',
'headers': {'Host': 'foo.bar.com'},
'body': 'bla bla'
}
signer = awssign.Signer(access_key, secret_key)
signer.add_auth(request, sign_payload=True)
print request['uri']
print request['headers']
# > /aaa/bbb
# > {'X-Amz-Content-SHA256': 'fdcf4254fc02e5e41e545599f0be4f9f65e8be431ebc1fd301a96ea88dd0d5d6', 'Host': 'foo.bar.com', 'Authorization': 'AWS4-HMAC-SHA256 Credential=your access key/20161208/us-east-1/s3/aws4_request, SignedHeaders=host;x-amz-content-sha256;x-amz-date, Signature=97e72cd9e0e0cbb5b7db3477a4be3e463957433e7a2033f3e8b7c13456577c1e', 'X-Amz-Date': '20161208T115708Z'}
# use custom request date
future_time = time.time() + 60 * 60 * 24 * 365
request = {
'verb': 'GET',
'uri': '/',
'headers': {'Host': 'foo.bar.com'},
}
signer = awssign.Signer(access_key, secret_key)
signer.add_auth(request, request_date=future_time)
print request['uri']
print request['headers']
# > /
# > {'X-Amz-Content-SHA256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', 'Host': 'foo.bar.com', 'Authorization': 'AWS4-HMAC-SHA256 Credential=your access key/20171208/us-east-1/s3/aws4_request, SignedHeaders=host;x-amz-date, Signature=347d31c95bf94901f4bb36e7d5d0c2b60328759fc53d27ec3567229da62b7da8', 'X-Amz-Date': '20171208T120718Z'}
# calculate the SHA256 of body by yourself
from hashlib import sha256
body = 'bla bla'
digest = sha256(body).hexdigest()
print digest
request = {
'verb': 'PUT',
'uri': '/aaa/bbb',
'headers': {'Host': 'foo.bar.com', 'X-Amz-Content-SHA256': digest},
}
signer = awssign.Signer(access_key, secret_key)
signer.add_auth(request)
print request['uri']
print request['headers']
# > fdcf4254fc02e5e41e545599f0be4f9f65e8be431ebc1fd301a96ea88dd0d5d6
# > /aaa/bbb
# > {'X-Amz-Content-SHA256': 'fdcf4254fc02e5e41e545599f0be4f9f65e8be431ebc1fd301a96ea88dd0d5d6', 'Host': 'foo.bar.com', 'Authorization': 'AWS4-HMAC-SHA256 Credential=your access key/20161208/us-east-1/s3/aws4_request, SignedHeaders=host;x-amz-date, Signature=2e3431de5980c3a90992e2e863a4e44f640bff9a425e0b71d9ebce7b17b0c006', 'X-Amz-Date': '20161208T122110Z'}
# set headers not to be signed
request = {
'verb': 'PUT',
'uri': '/aaa/bbb',
'headers': {'Host': 'foo.bar.com',
'will-be-signed-h1': 'foo',
'will-not-be-signed-h1': 'foo',
'will-be-signed-h2': 'foo',
'will-not-be-signed-h2': 'foo', },
}
signer = awssign.Signer(access_key, secret_key)
signer.add_auth(request, headers_not_to_sign=[
'will-not-be-signed-h1', 'will-not-be-signed-h2'])
print request['uri']
print request['headers']
# > /aaa/bbb
# > {'X-Amz-Content-SHA256': 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', 'Host': 'foo.bar.com', 'will-not-be-signed-h1': 'foo', 'X-Amz-Date': '20161208T122416Z', 'will-not-be-signed-h2': 'foo', 'will-be-signed-h1': 'foo', 'will-be-signed-h2': 'foo', 'Authorization': 'AWS4-HMAC-SHA256 Credential=your access key/20161208/us-east-1/s3/aws4_request, SignedHeaders=host;will-be-signed-h1;will-be-signed-h2;x-amz-date, Signature=113428763e851f99a88e6439b3d38cc480df42a4524861b70306001db4edb266'} | unknown | codeparrot/codeparrot-clean | ||
"""
>>> from django.core.paginator import Paginator
>>> from pagination.templatetags.pagination_tags import paginate
>>> from django.template import Template, Context
>>> p = Paginator(range(15), 2)
>>> paginate({'paginator': p, 'page_obj': p.page(1)})['pages']
[1, 2, 3, 4, 5, 6, 7, 8]
>>> p = Paginator(range(17), 2)
>>> paginate({'paginator': p, 'page_obj': p.page(1)})['pages']
[1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> p = Paginator(range(19), 2)
>>> paginate({'paginator': p, 'page_obj': p.page(1)})['pages']
[1, 2, 3, 4, None, 7, 8, 9, 10]
>>> p = Paginator(range(21), 2)
>>> paginate({'paginator': p, 'page_obj': p.page(1)})['pages']
[1, 2, 3, 4, None, 8, 9, 10, 11]
# Testing orphans
>>> p = Paginator(range(5), 2, 1)
>>> paginate({'paginator': p, 'page_obj': p.page(1)})['pages']
[1, 2]
>>> p = Paginator(range(21), 2, 1)
>>> paginate({'paginator': p, 'page_obj': p.page(1)})['pages']
[1, 2, 3, 4, None, 7, 8, 9, 10]
>>> t = Template("{% load pagination_tags %}{% autopaginate var 2 %}{% paginate %}")
# WARNING: Please, please nobody read this portion of the code!
>>> class GetProxy(object):
... def __iter__(self): yield self.__dict__.__iter__
... def copy(self): return self
... def urlencode(self): return u''
... def keys(self): return []
>>> class RequestProxy(object):
... page = 1
... GET = GetProxy()
>>>
# ENDWARNING
>>> t.render(Context({'var': range(21), 'request': RequestProxy()}))
u'\\n\\n<div class="pagination">...
>>>
>>> t = Template("{% load pagination_tags %}{% autopaginate var %}{% paginate %}")
>>> t.render(Context({'var': range(21), 'request': RequestProxy()}))
u'\\n\\n<div class="pagination">...
>>> t = Template("{% load pagination_tags %}{% autopaginate var 20 %}{% paginate %}")
>>> t.render(Context({'var': range(21), 'request': RequestProxy()}))
u'\\n\\n<div class="pagination">...
>>> t = Template("{% load pagination_tags %}{% autopaginate var by %}{% paginate %}")
>>> t.render(Context({'var': range(21), 'by': 20, 'request': RequestProxy()}))
u'\\n\\n<div class="pagination">...
>>> t = Template("{% load pagination_tags %}{% autopaginate var by as foo %}{{ foo }}")
>>> t.render(Context({'var': range(21), 'by': 20, 'request': RequestProxy()}))
u'[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]'
>>>
""" | unknown | codeparrot/codeparrot-clean | ||
import tensorflow as tf
from gan import GAN
from common import sample_mixture_of_gaussians, discriminator, generator
class WGAN(GAN):
def __init__(self, params):
self.params = params
self.z_dim = params['z_dim']
data_sampler = sample_mixture_of_gaussians(**params['data'])
z_sampler = tf.contrib.distributions.Normal(tf.zeros(self.z_dim), tf.ones(self.z_dim))
self.batch_size = tf.placeholder(tf.int32, shape=())
self.data = data_sampler.sample(self.batch_size)
data_score = discriminator(self.data, **params['discriminator'])
self.z = z_sampler.sample(self.batch_size)
self.samples = generator(self.z, **params['generator'])
samples_score = discriminator(self.samples, **params['discriminator'], reuse=True)
self.discriminator_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, 'discriminator')
self.generator_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, 'generator')
self.discriminator_loss = -tf.reduce_mean(data_score - samples_score)
if params['gradient_penalty']:
self.gradient_penalty = True
self.name = 'WGAN gradient penalty'
e = tf.contrib.distributions.Uniform().sample([tf.shape(self.data)[0], 1])
x = e * self.data + (1 - e) * self.samples
x_score = discriminator(x, **params['discriminator'], reuse=True)
gradients = tf.gradients(x_score, [x])[0]
gradients_l2 = tf.sqrt(tf.reduce_sum(gradients ** 2, axis=1))
gradient_penalty = tf.reduce_mean((gradients_l2 - 1) ** 2)
self.discriminator_loss += params['lambda'] * gradient_penalty
else:
self.gradient_penalty = False
self.name = 'WGAN'
self.generator_loss = -tf.reduce_mean(samples_score)
self._init_optimization()
def _create_optimizers(self):
if self.params['optimization']['algorithm'] == 'consensus':
self.optimizer = tf.train.RMSPropOptimizer(1e-4)
elif self.params['optimization']['algorithm'] == 'alternating':
if self.gradient_penalty:
self.discriminator_optimizer = tf.train.AdamOptimizer(learning_rate=1e-4, beta1=0.5, beta2=0.9)
self.generator_optimizer = tf.train.AdamOptimizer(learning_rate=1e-4, beta1=0.5, beta2=0.9)
else:
self.discriminator_optimizer = tf.train.RMSPropOptimizer(learning_rate=5e-5)
self.generator_optimizer = tf.train.RMSPropOptimizer(learning_rate=5e-5)
if not self.gradient_penalty:
self.clip_discriminator_op = [var.assign(tf.clip_by_value(var, -0.01, 0.01)) for var in self.discriminator_vars]
def _consensus_optimization(self, session, batch_size):
_, summary_d, summary_g = session.run([self.train_op,
self.summary_d, self.summary_g],
feed_dict={self.batch_size: batch_size})
if not self.gradient_penalty:
session.run(self.clip_discriminator_op)
return summary_d, summary_g
def _alternating_optimization(self, session, batch_size):
for j in range(self.discriminator_steps):
_, summary_d = session.run([self.discriminator_train_op, self.summary_d],
feed_dict={self.batch_size: batch_size})
if not self.gradient_penalty:
session.run(self.clip_discriminator_op)
_, summary_g = session.run([self.generator_train_op, self.summary_g],
feed_dict={self.batch_size: batch_size})
return summary_d, summary_g | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
"""
Use an aggregation query to answer the following question.
Which Region in India has the largest number of cities with longitude between 75 and 80?
Please modify only the 'make_pipeline' function so that it creates and returns an aggregation
pipeline that can be passed to the MongoDB aggregate function. As in our examples in this lesson,
the aggregation pipeline should be a list of one or more dictionary objects.
Please review the lesson examples if you are unsure of the syntax.
Your code will be run against a MongoDB instance that we have provided. If you want to run this
code locally on your machine, you have to install MongoDB, download and insert the dataset.
For instructions related to MongoDB setup and datasets please see Course Materials.
Please note that the dataset you are using here is a smaller version of the twitter dataset used in
examples in this lesson. If you attempt some of the same queries that we looked at in the lesson
examples, your results will be different.
"""
def get_db(db_name):
from pymongo import MongoClient
client = MongoClient('localhost:27017')
db = client[db_name]
return db
def make_pipeline():
# complete the aggregation pipeline
pipeline = [
{'$match': {'country': 'India',
'lon': {'$gte':75,
'$lt':80}}},
{'$unwind':'$isPartOf'},
{'$group' : {'_id':'$isPartOf',
'count' : {'$sum' : 1}}},
{'$sort': {'count':-1}},
{'$limit':1}
]
return pipeline
def aggregate(db, pipeline):
result = db.cities.aggregate(pipeline)
return result
if __name__ == '__main__':
db = get_db('examples')
pipeline = make_pipeline()
result = aggregate(db, pipeline)
import pprint
pprint.pprint(result["result"][0])
assert len(result["result"]) == 1
assert result["result"][0]["_id"] == 'Tamil Nadu' | unknown | codeparrot/codeparrot-clean | ||
# Generated by Django 2.0.9 on 2018-11-05 22:23
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('believe_his_prophets', '0003_auto_20181105_2152'),
]
operations = [
migrations.CreateModel(
name='SpiritProphecyLanguage',
fields=[
('id', models.AutoField(editable=False,
primary_key=True, serialize=False)),
('name', models.CharField(max_length=200)),
('translate_abrev', models.CharField(
blank=True, max_length=200, null=True)),
('translate_name', models.CharField(
blank=True, max_length=200, null=True)),
('language', models.ForeignKey(db_column='language_id', on_delete=django.db.models.deletion.PROTECT,
related_name='spirit_prophecy_language_language_set', to='believe_his_prophets.Language')),
],
options={
'verbose_name': 'SpiritProphecyLanguage',
'verbose_name_plural': 'SpiritProphecyLanguage',
'db_table': 'believe_spirit_prophecy_lang',
},
),
migrations.AlterModelOptions(
name='spiritprophecy',
options={'verbose_name': 'SpiritProphecy',
'verbose_name_plural': 'SpiritProphecy'},
),
migrations.RenameField(
model_name='spiritprophecyread',
old_name='date_read',
new_name='date',
),
migrations.RemoveField(
model_name='spiritprophecy',
name='abrev',
),
migrations.RemoveField(
model_name='spiritprophecy',
name='translate_abrev',
),
migrations.RemoveField(
model_name='spiritprophecy',
name='translate_name',
),
migrations.RemoveField(
model_name='spiritprophecyread',
name='chapter',
),
migrations.RemoveField(
model_name='spiritprophecyread',
name='chapter_title',
),
migrations.RemoveField(
model_name='spiritprophecyread',
name='spirit_prophecy',
),
migrations.RemoveField(
model_name='spiritprophecyread',
name='title',
),
migrations.RemoveField(
model_name='spiritprophecy',
name='language',
),
migrations.AlterField(
model_name='spiritprophecy',
name='name',
field=models.CharField(max_length=150),
),
migrations.AddField(
model_name='spiritprophecylanguage',
name='spirit_prophecy',
field=models.ForeignKey(db_column='spirit_prophecy_id', on_delete=django.db.models.deletion.PROTECT,
related_name='spirit_prophecy_language_spirit_prophecy_set', to='believe_his_prophets.SpiritProphecy'),
),
migrations.AddField(
model_name='spiritprophecy',
name='language',
field=models.ManyToManyField(
blank=True, related_name='spirit_prophecy_language_set', to='believe_his_prophets.SpiritProphecyLanguage'),
),
] | unknown | codeparrot/codeparrot-clean | ||
/*
* Copyright (c) 2007 Mockito contributors
* This program is made available under the terms of the MIT License.
*/
package org.mockito;
import static org.mockito.internal.stubbing.answers.AnswerFunctionalInterfaces.toAnswer;
import java.util.Collection;
import org.mockito.internal.stubbing.answers.AnswersWithDelay;
import org.mockito.internal.stubbing.answers.ReturnsArgumentAt;
import org.mockito.internal.stubbing.answers.ReturnsElementsOf;
import org.mockito.internal.stubbing.defaultanswers.ForwardsInvocations;
import org.mockito.stubbing.Answer;
import org.mockito.stubbing.Answer1;
import org.mockito.stubbing.Answer2;
import org.mockito.stubbing.Answer3;
import org.mockito.stubbing.Answer4;
import org.mockito.stubbing.Answer5;
import org.mockito.stubbing.Answer6;
import org.mockito.stubbing.VoidAnswer1;
import org.mockito.stubbing.VoidAnswer2;
import org.mockito.stubbing.VoidAnswer3;
import org.mockito.stubbing.VoidAnswer4;
import org.mockito.stubbing.VoidAnswer5;
import org.mockito.stubbing.VoidAnswer6;
/**
* Additional answers provides factory methods for answers.
*
* <p>Currently offer answers that can return the parameter of an invocation at a certain position,
* along with answers that draw on a strongly typed interface to provide a neater way to write custom answers
* that either return a value or are void (see answer interfaces in org.mockito.stubbing).
*
* <p>See factory methods for more information : {@link #returnsFirstArg}, {@link #returnsSecondArg},
* {@link #returnsLastArg}, {@link #returnsArgAt}, {@link #answer} and {@link #answerVoid}
*
* @since 1.9.5
*/
@SuppressWarnings("unchecked")
public final class AdditionalAnswers {
/**
* Returns the first parameter of an invocation.
*
* <p>
* This additional answer could be used at stub time using the
* <code>then|do|will{@link org.mockito.stubbing.Answer}</code> methods. For example :
*
* <pre class="code"><code class="java">
* given(carKeyFob.authenticate(carKey)).will(returnsFirstArg());
* doAnswer(returnsFirstArg()).when(carKeyFob).authenticate(carKey);
* </code></pre>
* </p>
*
* <p>
* This methods works with varargs as well, mockito will expand the vararg to return the argument
* at the given position. Suppose the following signature :
*
* <pre class="code"><code class="java">
* interface Person {
* Dream remember(Dream... dreams);
* }
*
* // returns dream1
* given(person.remember(dream1, dream2, dream3, dream4)).will(returnsFirstArg());
* </code></pre>
*
* Mockito will return the vararg array if the first argument is a vararg in the method
* and if the return type has the same type as the vararg array.
*
* <pre class="code"><code class="java">
* interface Person {
* Dream[] remember(Dream... otherDreams);
* }
*
* // returns otherDreams (happens to be a 4 elements array)
* given(person.remember(dream1, dream2, dream3, dream4)).will(returnsFirstArg());
* </code></pre>
* </p>
*
* @param <T> Return type of the invocation.
* @return Answer that will return the first argument of the invocation.
*
* @since 1.9.5
*/
public static <T> Answer<T> returnsFirstArg() {
return (Answer<T>) new ReturnsArgumentAt(0);
}
/**
* Returns the second parameter of an invocation.
*
* <p>
* This additional answer could be used at stub time using the
* <code>then|do|will{@link org.mockito.stubbing.Answer}</code> methods. For example :
*
* <pre class="code"><code class="java">
* given(trader.apply(leesFormula, onCreditDefaultSwap)).will(returnsSecondArg());
* doAnswer(returnsSecondArg()).when(trader).apply(leesFormula, onCreditDefaultSwap);
* </code></pre>
* </p>
*
* <p>
* This methods works with varargs as well, mockito will expand the vararg to return the argument
* at the given position. Suppose the following signature :
*
* <pre class="code"><code class="java">
* interface Person {
* Dream remember(Dream dream, Dream... otherDreams);
* }
*
* // returns dream2
* given(person.remember(dream1, dream2, dream3, dream4)).will(returnsSecondArg());
* </code></pre>
*
* Mockito will return the vararg array if the second argument is a vararg in the method
* and if the return type has the same type as the vararg array.
*
* <pre class="code"><code class="java">
* interface Person {
* Dream[] remember(Dream dream1, Dream... otherDreams);
* }
*
* // returns otherDreams (happens to be a 3 elements array)
* given(person.remember(dream1, dream2, dream3, dream4)).will(returnsSecondArg());
* </code></pre>
* </p>
*
* @param <T> Return type of the invocation.
* @return Answer that will return the second argument of the invocation.
*
* @since 1.9.5
*/
public static <T> Answer<T> returnsSecondArg() {
return (Answer<T>) new ReturnsArgumentAt(1);
}
/**
* Returns the last parameter of an invocation.
*
* <p>
* This additional answer could be used at stub time using the
* <code>then|do|will{@link org.mockito.stubbing.Answer}</code> methods. For example :
*
* <pre class="code"><code class="java">
* given(person.remember(dream1, dream2, dream3, dream4)).will(returnsLastArg());
* doAnswer(returnsLastArg()).when(person).remember(dream1, dream2, dream3, dream4);
* </code></pre>
* </p>
*
* <p>
* This methods works with varargs as well, mockito will expand the vararg to return the argument
* at the given position. Suppose the following signature :
*
* <pre class="code"><code class="java">
* interface Person {
* Dream remember(Dream dream, Dream... otherDreams);
* }
*
* // returns dream4
* given(person.remember(dream1, dream2, dream3, dream4)).will(returnsLastArg());
* </code></pre>
*
* Mockito will return the vararg array if the given {@code position} targets the vararg index in the method
* and if the return type has the same type as the vararg array.
*
* <pre class="code"><code class="java">
* interface Person {
* Dream[] remember(Dream dream1, Dream dream2, Dream dream3, Dream... otherDreams);
* }
*
* // returns otherDreams (happens to be a single element array)
* given(person.remember(dream1, dream2, dream3, dream4)).will(returnsLastArg());
* </code></pre>
* </p>
*
* @param <T> Return type of the invocation.
* @return Answer that will return the last argument of the invocation.
*
* @since 1.9.5
*/
public static <T> Answer<T> returnsLastArg() {
return (Answer<T>) new ReturnsArgumentAt(ReturnsArgumentAt.LAST_ARGUMENT);
}
/**
* Returns the parameter of an invocation at the given position.
*
* <p>
* This additional answer could be used at stub time using the
* <code>then|do|will{@link org.mockito.stubbing.Answer}</code> methods. For example :
*
* <pre class="code"><code class="java">
* given(person.remember(dream1, dream2, dream3, dream4)).will(returnsArgAt(3));
* doAnswer(returnsArgAt(3)).when(person).remember(dream1, dream2, dream3, dream4);
* </code></pre>
* </p>
*
* <p>
* This methods works with varargs as well, mockito will expand the vararg to return the argument
* at the given position. Suppose the following signature :
*
* <pre class="code"><code class="java">
* interface Person {
* Dream remember(Dream dream, Dream... otherDreams);
* }
*
* // returns dream 3
* given(person.remember(dream1, dream2, dream3, dream4)).will(returnsArgAt(2));
* </code></pre>
*
* Mockito will return the vararg array if the given {@code position} targets the vararg index in the method
* and if the return type has the same type as the vararg array.
*
* <pre class="code"><code class="java">
* interface Person {
* Dream[] remember(Dream dream, Dream... otherDreams);
* }
*
* // returns otherDreams array (contains dream2, dream,3, dream4)
* given(person.remember(dream1, dream2, dream3, dream4)).will(returnsArgAt(1));
* </code></pre>
* </p>
*
* @param <T> Return type of the invocation.
* @param position index of the argument from the list of arguments.
* @return Answer that will return the argument from the given position in the argument's list
*
* @since 1.9.5
*/
public static <T> Answer<T> returnsArgAt(int position) {
return (Answer<T>) new ReturnsArgumentAt(position);
}
/**
* An answer that directly forwards the calls to the delegate. The delegate may or may not be of the same type as the mock.
* If the type is different, a matching method needs to be found on delegate type otherwise an exception is thrown.
* <p>
* Useful for spies or partial mocks of objects that are difficult to mock
* or spy using the usual spy API. Possible use cases:
* <ul>
* <li>Final classes but with an interface</li>
* <li>Already custom proxied object</li>
* <li>Special objects with a finalize method, i.e. to avoid executing it 2 times</li>
* </ul>
*
* <p>
* The difference with the regular spy:
* <ul>
* <li>
* The regular spy ({@link Mockito#spy(Object)}) contains <strong>all</strong> state from the spied instance
* and the methods are invoked on the spy. The spied instance is only used at mock creation to copy the state from.
* If you call a method on a regular spy and it internally calls other methods on this spy, those calls are remembered
* for verifications, and they can be effectively stubbed.
* </li>
* <li>
* The mock that delegates simply delegates all methods to the delegate.
* The delegate is used all the time as methods are delegated onto it.
* If you call a method on a mock that delegates and it internally calls other methods on this mock,
* those calls are <strong>not</strong> remembered for verifications, stubbing does not have effect on them, too.
* Mock that delegates is less powerful than the regular spy but it is useful when the regular spy cannot be created.
* </li>
* </ul>
* An example with a final class that we want to delegate to:
* <p>
* <pre class="code"><code class="java">
* final class DontYouDareToMockMe implements list { ... }
*
* DontYouDareToMockMe awesomeList = new DontYouDareToMockMe();
*
* List mock = mock(List.class, delegatesTo(awesomeList));
* </code></pre>
*
* <p>
* This feature suffers from the same drawback as the spy.
* The mock will call the delegate if you use regular when().then() stubbing style.
* Since the real implementation is called this might have some side effects.
* Therefore, you should use the doReturn|Throw|Answer|CallRealMethod stubbing style. Example:
*
* <pre class="code"><code class="java">
* List listWithDelegate = mock(List.class, AdditionalAnswers.delegatesTo(awesomeList));
*
* //Impossible: real method is called so listWithDelegate.get(0) throws IndexOutOfBoundsException (the list is yet empty)
* when(listWithDelegate.get(0)).thenReturn("foo");
*
* //You have to use doReturn() for stubbing
* doReturn("foo").when(listWithDelegate).get(0);
* </code></pre>
*
* @param delegate The delegate to forward calls to. It does not have to be of the same type as the mock (although it usually is).
* The only requirement is that the instance should have compatible method signatures including the return values.
* Only the methods that were actually executed on the mock need to be present on the delegate type.
* @return the answer
*
* @since 1.9.5
*/
public static <T> Answer<T> delegatesTo(Object delegate) {
return (Answer<T>) new ForwardsInvocations(delegate);
}
/**
* Returns elements of the collection. Keeps returning the last element forever.
* Might be useful on occasion when you have a collection of elements to return.
* <p>
* <pre class="code"><code class="java">
* //this:
* when(mock.foo()).thenReturn(1, 2, 3);
*
* //is equivalent to:
* when(mock.foo()).thenAnswer(AdditionalAnswers.returnsElementsOf(Arrays.asList(1, 2, 3)));
* </code></pre>
*
* @param elements The collection of elements to return.
* @return the answer
*
* @since 1.9.5
*/
public static <T> Answer<T> returnsElementsOf(Collection<?> elements) {
return (Answer<T>) new ReturnsElementsOf(elements);
}
/**
* Returns an answer after a delay with a defined length.
*
* @param <T> return type
* @param sleepyTime the delay in milliseconds
* @param answer interface to the answer which provides the intended return value.
* @return the answer object to use
*
* @since 2.8.44
*/
public static <T> Answer<T> answersWithDelay(long sleepyTime, Answer<T> answer) {
return (Answer<T>) new AnswersWithDelay(sleepyTime, (Answer<Object>) answer);
}
/**
* Creates an answer from a functional interface - allows for a strongly typed answer to be created
* ideally in Java 8
* @param answer interface to the answer - which is expected to return something
* @param <T> return type
* @param <A> input parameter type 1
* @return the answer object to use
* @since 2.1.0
*/
public static <T, A> Answer<T> answer(Answer1<T, A> answer) {
return toAnswer(answer);
}
/**
* Creates an answer from a functional interface - allows for a strongly typed answer to be created
* ideally in Java 8
* @param answer interface to the answer - a void method
* @param <A> input parameter type 1
* @return the answer object to use
* @since 2.1.0
*/
public static <A> Answer<Void> answerVoid(VoidAnswer1<A> answer) {
return toAnswer(answer);
}
/**
* Creates an answer from a functional interface - allows for a strongly typed answer to be created
* ideally in Java 8
* @param answer interface to the answer - which is expected to return something
* @param <T> return type
* @param <A> input parameter type 1
* @param <B> input parameter type 2
* @return the answer object to use
* @since 2.1.0
*/
public static <T, A, B> Answer<T> answer(Answer2<T, A, B> answer) {
return toAnswer(answer);
}
/**
* Creates an answer from a functional interface - allows for a strongly typed answer to be created
* ideally in Java 8
* @param answer interface to the answer - a void method
* @param <A> input parameter type 1
* @param <B> input parameter type 2
* @return the answer object to use
* @since 2.1.0
*/
public static <A, B> Answer<Void> answerVoid(VoidAnswer2<A, B> answer) {
return toAnswer(answer);
}
/**
* Creates an answer from a functional interface - allows for a strongly typed answer to be created
* ideally in Java 8
* @param answer interface to the answer - which is expected to return something
* @param <T> return type
* @param <A> input parameter type 1
* @param <B> input parameter type 2
* @param <C> input parameter type 3
* @return the answer object to use
* @since 2.1.0
*/
public static <T, A, B, C> Answer<T> answer(Answer3<T, A, B, C> answer) {
return toAnswer(answer);
}
/**
* Creates an answer from a functional interface - allows for a strongly typed answer to be created
* ideally in Java 8
* @param answer interface to the answer - a void method
* @param <A> input parameter type 1
* @param <B> input parameter type 2
* @param <C> input parameter type 3
* @return the answer object to use
* @since 2.1.0
*/
public static <A, B, C> Answer<Void> answerVoid(VoidAnswer3<A, B, C> answer) {
return toAnswer(answer);
}
/**
* Creates an answer from a functional interface - allows for a strongly typed answer to be created
* ideally in Java 8
* @param answer interface to the answer - which is expected to return something
* @param <T> return type
* @param <A> input parameter type 1
* @param <B> input parameter type 2
* @param <C> input parameter type 3
* @param <D> input parameter type 4
* @return the answer object to use
* @since 2.1.0
*/
public static <T, A, B, C, D> Answer<T> answer(Answer4<T, A, B, C, D> answer) {
return toAnswer(answer);
}
/**
* Creates an answer from a functional interface - allows for a strongly typed answer to be created
* ideally in Java 8
* @param answer interface to the answer - a void method
* @param <A> input parameter type 1
* @param <B> input parameter type 2
* @param <C> input parameter type 3
* @param <D> input parameter type 4
* @return the answer object to use
* @since 2.1.0
*/
public static <A, B, C, D> Answer<Void> answerVoid(VoidAnswer4<A, B, C, D> answer) {
return toAnswer(answer);
}
/**
* Creates an answer from a functional interface - allows for a strongly typed answer to be created
* ideally in Java 8
* @param answer interface to the answer - which is expected to return something
* @param <T> return type
* @param <A> input parameter type 1
* @param <B> input parameter type 2
* @param <C> input parameter type 3
* @param <D> input parameter type 4
* @param <E> input parameter type 5
* @return the answer object to use
* @since 2.1.0
*/
public static <T, A, B, C, D, E> Answer<T> answer(Answer5<T, A, B, C, D, E> answer) {
return toAnswer(answer);
}
/**
* Creates an answer from a functional interface - allows for a strongly typed answer to be created
* ideally in Java 8
*
* @param answer interface to the answer - a void method
* @param <A> input parameter type 1
* @param <B> input parameter type 2
* @param <C> input parameter type 3
* @param <D> input parameter type 4
* @param <E> input parameter type 5
* @return the answer object to use
* @since 2.1.0
*/
public static <A, B, C, D, E> Answer<Void> answerVoid(VoidAnswer5<A, B, C, D, E> answer) {
return toAnswer(answer);
}
/**
* Creates an answer from a functional interface - allows for a strongly typed answer to be created
* idiomatically in Java 8
*
* @param answer interface to the answer - which is expected to return something
* @param <T> return type
* @param <A> input parameter type 1
* @param <B> input parameter type 2
* @param <C> input parameter type 3
* @param <D> input parameter type 4
* @param <E> input parameter type 5
* @param <F> input parameter type 6
* @return the answer object to use
* @since 2.26.0
*/
public static <T, A, B, C, D, E, F> Answer<T> answer(Answer6<T, A, B, C, D, E, F> answer) {
return toAnswer(answer);
}
/**
* Creates an answer from a functional interface - allows for a strongly typed answer to be created
* idiomatically in Java 8
*
* @param answer interface to the answer - a void method
* @param <A> input parameter type 1
* @param <B> input parameter type 2
* @param <C> input parameter type 3
* @param <D> input parameter type 4
* @param <E> input parameter type 5
* @param <F> input parameter type 6
* @return the answer object to use
* @since 2.26.0
*/
public static <A, B, C, D, E, F> Answer<Void> answerVoid(VoidAnswer6<A, B, C, D, E, F> answer) {
return toAnswer(answer);
}
private AdditionalAnswers() {}
} | java | github | https://github.com/mockito/mockito | mockito-core/src/main/java/org/mockito/AdditionalAnswers.java |
/*
* Copyright (c) 2017 Mockito contributors
* This program is made available under the terms of the MIT License.
*/
package org.mockitousage.stubbing;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
import java.util.ArrayList;
import java.util.List;
import org.junit.Test;
public class StubbingReturnsSelfTest {
@Test
public void should_stub_builder_method() {
Builder builder = mock(Builder.class, RETURNS_SELF);
assertThat(builder.returnSelf()).isEqualTo(builder);
}
@Test
public void should_return_default_return_when_not_a_builder() {
Builder builder = mock(Builder.class, RETURNS_SELF);
assertThat(builder.returnString()).isEqualTo(null);
}
@Test
public void should_return_self_when_call_on_method_in_superclass() {
BuilderSubClass builder = mock(BuilderSubClass.class, RETURNS_SELF);
assertThat(builder.returnSelf()).isEqualTo(builder);
}
@Test
public void should_return_self_when_call_on_method_in_subclass() {
BuilderSubClass builder = mock(BuilderSubClass.class, RETURNS_SELF);
assertThat(builder.returnsSubClass()).isEqualTo(builder);
}
@Test
public void should_return_self_when_call_on_method_in_subclass_returns_superclass() {
BuilderSubClass builder = mock(BuilderSubClass.class, RETURNS_SELF);
assertThat(builder.returnSuperClass()).isEqualTo(builder);
}
@Test
public void should_return_stubbed_answer_when_call_on_method_returns_self() {
Builder builder = mock(Builder.class, RETURNS_SELF);
Builder anotherBuilder = mock(Builder.class, RETURNS_SELF);
when(builder.returnSelf()).thenReturn(anotherBuilder);
assertThat(builder.returnSelf().returnSelf()).isEqualTo(anotherBuilder);
}
@Test
public void should_not_fail_when_calling_void_returning_method() {
Builder builder = mock(Builder.class, RETURNS_SELF);
builder.returnNothing();
}
@Test
public void should_not_fail_when_calling_primitive_returning_method() {
Builder builder = mock(Builder.class, RETURNS_SELF);
assertThat(builder.returnInt()).isEqualTo(0);
}
@Test
public void should_not_fail_when_calling_method_with_generic_return_type() {
Builder builder = mock(Builder.class, RETURNS_SELF);
assertThat(builder.returnGeneric("Generic Result")).isEqualTo(null);
}
@Test
public void use_full_builder_with_terminating_method() {
HttpBuilder builder = mock(HttpBuilder.class, RETURNS_SELF);
HttpRequesterWithHeaders requester = new HttpRequesterWithHeaders(builder);
String response = "StatusCode: 200";
when(builder.request()).thenReturn(response);
assertThat(requester.request("URI")).isEqualTo(response);
}
private static class Builder {
public Builder returnSelf() {
return this;
}
public String returnString() {
return "Self";
}
public void returnNothing() {}
public int returnInt() {
return 1;
}
public <T> T returnGeneric(T result) {
return result;
}
}
private static class BuilderSubClass extends Builder {
public BuilderSubClass returnsSubClass() {
return this;
}
public Builder returnSuperClass() {
return this;
}
}
private static class HttpRequesterWithHeaders {
private HttpBuilder builder;
public HttpRequesterWithHeaders(HttpBuilder builder) {
this.builder = builder;
}
public String request(String uri) {
return builder.withUrl(uri)
.withHeader("Content-type: application/json")
.withHeader("Authorization: Bearer")
.request();
}
}
private static class HttpBuilder {
private String uri;
private List<String> headers;
public HttpBuilder() {
this.headers = new ArrayList<String>();
}
public HttpBuilder withUrl(String uri) {
this.uri = uri;
return this;
}
public HttpBuilder withHeader(String header) {
this.headers.add(header);
return this;
}
public String request() {
return uri + headers.toString();
}
}
} | java | github | https://github.com/mockito/mockito | mockito-core/src/test/java/org/mockitousage/stubbing/StubbingReturnsSelfTest.java |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class CertificateUpdateParameters(Model):
"""The certificate update parameters.
:param certificate_policy: The management policy for the certificate.
:type certificate_policy: :class:`CertificatePolicy
<azure.keyvault.models.CertificatePolicy>`
:param certificate_attributes: The attributes of the certificate
(optional).
:type certificate_attributes: :class:`CertificateAttributes
<azure.keyvault.models.CertificateAttributes>`
:param tags: Application specific metadata in the form of key-value pairs.
:type tags: dict
"""
_attribute_map = {
'certificate_policy': {'key': 'policy', 'type': 'CertificatePolicy'},
'certificate_attributes': {'key': 'attributes', 'type': 'CertificateAttributes'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(self, certificate_policy=None, certificate_attributes=None, tags=None):
self.certificate_policy = certificate_policy
self.certificate_attributes = certificate_attributes
self.tags = tags | unknown | codeparrot/codeparrot-clean | ||
#
# The Python Imaging Library.
# $Id$
#
# im.show() drivers
#
# History:
# 2008-04-06 fl Created
#
# Copyright (c) Secret Labs AB 2008.
#
# See the README file for information on usage and redistribution.
#
import Image
import os, sys
_viewers = []
def register(viewer, order=1):
try:
if issubclass(viewer, Viewer):
viewer = viewer()
except TypeError:
pass # raised if viewer wasn't a class
if order > 0:
_viewers.append(viewer)
elif order < 0:
_viewers.insert(0, viewer)
##
# Displays a given image.
#
# @param image An image object.
# @param title Optional title. Not all viewers can display the title.
# @param **options Additional viewer options.
# @return True if a suitable viewer was found, false otherwise.
def show(image, title=None, **options):
for viewer in _viewers:
if viewer.show(image, title=title, **options):
return 1
return 0
##
# Base class for viewers.
class Viewer:
# main api
def show(self, image, **options):
# save temporary image to disk
if image.mode[:4] == "I;16":
# @PIL88 @PIL101
# "I;16" isn't an 'official' mode, but we still want to
# provide a simple way to show 16-bit images.
base = "L"
# FIXME: auto-contrast if max() > 255?
else:
base = Image.getmodebase(image.mode)
if base != image.mode and image.mode != "1":
image = image.convert(base)
self.show_image(image, **options)
# hook methods
format = None
def get_format(self, image):
# return format name, or None to save as PGM/PPM
return self.format
def get_command(self, file, **options):
raise NotImplementedError
def save_image(self, image):
# save to temporary file, and return filename
return image._dump(format=self.get_format(image))
def show_image(self, image, **options):
# display given image
return self.show_file(self.save_image(image), **options)
def show_file(self, file, **options):
# display given file
os.system(self.get_command(file, **options))
return 1
# --------------------------------------------------------------------
if sys.platform == "win32":
class WindowsViewer(Viewer):
format = "BMP"
def get_command(self, file, **options):
return "start /wait %s && del /f %s" % (file, file)
register(WindowsViewer)
elif sys.platform == "darwin":
class MacViewer(Viewer):
format = "BMP"
def get_command(self, file, **options):
# on darwin open returns immediately resulting in the temp
# file removal while app is opening
command = "open -a /Applications/Preview.app"
command = "(%s %s; sleep 20; rm -f %s)&" % (command, file, file)
return command
register(MacViewer)
else:
# unixoids
def which(executable):
path = os.environ.get("PATH")
if not path:
return None
for dirname in path.split(os.pathsep):
filename = os.path.join(dirname, executable)
if os.path.isfile(filename):
# FIXME: make sure it's executable
return filename
return None
class UnixViewer(Viewer):
def show_file(self, file, **options):
command, executable = self.get_command_ex(file, **options)
command = "(%s %s; rm -f %s)&" % (command, file, file)
os.system(command)
return 1
# implementations
class DisplayViewer(UnixViewer):
def get_command_ex(self, file, **options):
command = executable = "display"
return command, executable
if which("display"):
register(DisplayViewer)
class XVViewer(UnixViewer):
def get_command_ex(self, file, title=None, **options):
# note: xv is pretty outdated. most modern systems have
# imagemagick's display command instead.
command = executable = "xv"
if title:
# FIXME: do full escaping
command = command + " -name \"%s\"" % title
return command, executable
if which("xv"):
register(XVViewer)
if __name__ == "__main__":
# usage: python ImageShow.py imagefile [title]
print show(Image.open(sys.argv[1]), *sys.argv[2:]) | unknown | codeparrot/codeparrot-clean | ||
import os
from time import sleep
import unittest
from appium import webdriver
# Returns abs path relative to this file and not cwd
PATH = lambda p: os.path.abspath(
os.path.join(os.path.dirname(__file__), p)
)
# think times can be useful e.g. when testing with an emulator
THINK_TIME = 5.
class SimpleSalendroidTests(unittest.TestCase):
def setUp(self):
desired_caps = {}
desired_caps['platformName'] = 'Android'
desired_caps['platformVersion'] = '4.1'
desired_caps['deviceName'] = 'Android Emulator'
desired_caps['automationName'] = "selendroid"
desired_caps['app'] = PATH(
'../../../sample-code/apps/ApiDemos/bin/ApiDemos-debug.apk'
)
self.driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
def tearDown(self):
# end the session
self.driver.quit()
def test_selendroid(self):
el = self.driver.find_element_by_name("Animation")
# assert el.text == "Animation"
self.assertEqual('Animation', el.text)
el = self.driver.find_element_by_class_name("android.widget.TextView")
# assert el.text == "Accessibility"
self.assertEqual('Accessibility', el.text)
el = self.driver.find_element_by_name("App")
el.click()
sleep(THINK_TIME)
els = self.driver.find_elements_by_class_name("android.widget.TextView")
# Selendroid gets all the elements, not just the visible ones
self.assertLessEqual(30, len(els))
self.driver.find_element_by_name('Action Bar')
self.driver.back()
sleep(THINK_TIME)
el = self.driver.find_element_by_name("Animation")
self.assertEqual('Animation', el.text)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(SimpleSalendroidTests)
unittest.TextTestRunner(verbosity=2).run(suite) | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Daniele Simonetti
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
__author__ = 'Daniele'
from asq.initiators import query
import l5r.api as api
import l5r.api.character.schools
from l5r.api import __api
def all():
"""returns the list of spells"""
if not __api:
return []
return __api.ds.spells
def get(sid):
"""return the spell that matches the given id"""
if not sid:
return None
return query(all()).where(lambda x: x.id == sid).first_or_default(None)
def has_tag(sid, tag, school=None):
"""return True if the given spell has the given tag, with support for school-only tags"""
return tag in tags(sid, school)
def tags(sid, school=None):
"""return all the tags of the given spell, with support for school-only tags"""
s = get(sid)
if not s:
return []
school_id_list = []
if school is not None:
school_id_list = [school]
else:
school_id_list = api.character.schools.get_all()
return query(s.tags).where(
lambda x: x.school is None or x.school in school_id_list).select(lambda x: x.name).to_list()
def is_multi_element(sid):
"""returns true if the spell is multi element"""
s = get(sid)
if not s:
return False
return s.element == 'multi'
def is_dragon(sid):
"""returns true if the spell is a dragon spell"""
s = get(sid)
if not s:
return False
return s.element == 'dragon'
def get_maho_spells(ring, mastery):
"""returns all the maho spells for the given ring and mastery"""
return query(get_spells(ring, mastery)).where(lambda x: 'maho' in tags(x.id)).to_list()
def get_spells(ring, mastery, maho=True):
"""returns all the maho spells for the given ring and mastery, if maho include maho spells"""
including_maho = query(all()).where(lambda x: x.element == ring and x.mastery == mastery)
if not maho:
return query(including_maho).where(lambda x: 'maho' not in tags(x.id)).to_list()
return including_maho.to_list() | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Radim Rehurek <radimrehurek@seznam.cz>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
This module contains implementations (= different classes) which encapsulate the
idea of a Digital Library document source.
A document source is basically a collection of articles sharing the same format,
same location (type of access), same way of parsing them etc.
Different sources can be aggregated into a single corpus, which is what the
`DmlCorpus` class does (see the `dmlcorpus` module).
"""
import logging
import os
import os.path
import re
import xml.sax # for parsing arxmliv articles
from gensim import utils
PAT_TAG = re.compile('<(.*?)>(.*)</.*?>')
logger = logging.getLogger('gensim.corpora.sources')
class ArticleSource(object):
"""
Objects of this class describe a single source of articles.
A source is an abstraction over where the documents reside (the findArticles()
method), how to retrieve their fulltexts, their metadata, how to tokenize the
articles and how to normalize the tokens.
What is NOT abstracted away (ie. must hold for all sources) is the idea of
article identifiers (URIs), which uniquely identify each article within
one source.
This class is just an ABC interface; see eg. DmlSource or ArxmlivSource classes
for concrete instances.
"""
def __init__(self, sourceId):
self.sourceId = sourceId
def __str__(self):
return self.sourceId
def findArticles(self):
raise NotImplementedError('Abstract Base Class')
def getContent(self, uri):
raise NotImplementedError('Abstract Base Class')
def getMeta(self, uri):
raise NotImplementedError('Abstract Base Class')
def tokenize(self, content):
raise NotImplementedError('Abstract Base Class')
def normalizeWord(self, word):
raise NotImplementedError('Abstract Base Class')
#endclass ArticleSource
class DmlSource(ArticleSource):
"""
Article source for articles in DML format (DML-CZ, Numdam):
1) articles = directories starting with '#'
2) content is stored in fulltext.txt
3) metadata are stored in meta.xml
Article URI is currently (a part of) the article's path on filesystem.
See the ArticleSource class for general info on sources.
"""
def __init__(self, sourceId, baseDir):
self.sourceId = sourceId
self.baseDir = os.path.normpath(baseDir)
def __str__(self):
return self.sourceId
@classmethod
def parseDmlMeta(cls, xmlfile):
"""
Parse out all fields from meta.xml, return them as a dictionary.
"""
result = {}
xml = open(xmlfile)
for line in xml:
if line.find('<article>') >= 0: # skip until the beginning of <article> tag
break
for line in xml:
if line.find('</article>') >= 0: # end of <article>, we're done
break
p = re.search(PAT_TAG, line) # HAX assumes one element = one line; proper xml parsing probably better... but who cares
if p:
name, cont = p.groups()
name = name.split()[0]
name, cont = name.strip(), cont.strip()
if name == 'msc':
if len(cont) != 5:
logger.warning('invalid MSC=%s in %s' % (cont, xmlfile))
result.setdefault('msc', []).append(cont)
continue
if name == 'idMR':
cont = cont[2:] # omit MR from MR123456
if name and cont:
result[name] = cont
xml.close()
return result
def idFromDir(self, path):
assert len(path) > len(self.baseDir)
intId = path[1 + path.rfind('#') : ]
pathId = path[len(self.baseDir) + 1 : ]
return (intId, pathId)
def isArticle(self, path):
# in order to be valid, the article directory must start with '#'
if not os.path.basename(path).startswith('#'):
return False
# and contain the fulltext.txt file
if not os.path.exists(os.path.join(path, 'fulltext.txt')):
logger.info('missing fulltext in %s' % path)
return False
# and also the meta.xml file
if not os.path.exists(os.path.join(path, 'meta.xml')):
logger.info('missing meta.xml in %s' % path)
return False
return True
def findArticles(self):
dirTotal = artAccepted = 0
logger.info("looking for '%s' articles inside %s" % (self.sourceId, self.baseDir))
for root, dirs, files in os.walk(self.baseDir):
dirTotal += 1
root = os.path.normpath(root)
if self.isArticle(root):
artAccepted += 1
yield self.idFromDir(root)
logger.info('%i directories processed, found %i articles' %
(dirTotal, artAccepted))
def getContent(self, uri):
"""
Return article content as a single large string.
"""
intId, pathId = uri
filename = os.path.join(self.baseDir, pathId, 'fulltext.txt')
return open(filename).read()
def getMeta(self, uri):
"""
Return article metadata as a attribute->value dictionary.
"""
intId, pathId = uri
filename = os.path.join(self.baseDir, pathId, 'meta.xml')
return DmlSource.parseDmlMeta(filename)
def tokenize(self, content):
return [token.encode('utf8') for token in utils.tokenize(content, errors = 'ignore') if not token.isdigit()]
def normalizeWord(self, word):
wordU = unicode(word, 'utf8')
return wordU.lower().encode('utf8') # lowercase and then convert back to bytestring
#endclass DmlSource
class DmlCzSource(DmlSource):
"""
Article source for articles in DML-CZ format:
1) articles = directories starting with '#'
2) content is stored in fulltext.txt or fulltext_dspace.txt
3) there exists a dspace_id file, containing internal dmlcz id
3) metadata are stored in meta.xml
See the ArticleSource class for general info on sources.
"""
def idFromDir(self, path):
assert len(path) > len(self.baseDir)
dmlczId = open(os.path.join(path, 'dspace_id')).read().strip()
pathId = path[len(self.baseDir) + 1 : ]
return (dmlczId, pathId)
def isArticle(self, path):
# in order to be valid, the article directory must start with '#'
if not os.path.basename(path).startswith('#'):
return False
# and contain a dspace_id file
if not (os.path.exists(os.path.join(path, 'dspace_id'))):
logger.info('missing dspace_id in %s' % path)
return False
# and contain either fulltext.txt or fulltext_dspace.txt file
if not (os.path.exists(os.path.join(path, 'fulltext.txt')) or os.path.exists(os.path.join(path, 'fulltext-dspace.txt'))):
logger.info('missing fulltext in %s' % path)
return False
# and contain the meta.xml file
if not os.path.exists(os.path.join(path, 'meta.xml')):
logger.info('missing meta.xml in %s' % path)
return False
return True
def getContent(self, uri):
"""
Return article content as a single large string.
"""
intId, pathId = uri
filename1 = os.path.join(self.baseDir, pathId, 'fulltext.txt')
filename2 = os.path.join(self.baseDir, pathId, 'fulltext-dspace.txt')
if os.path.exists(filename1) and os.path.exists(filename2):
# if both fulltext and dspace files exist, pick the larger one
if os.path.getsize(filename1) < os.path.getsize(filename2):
filename = filename2
else:
filename = filename1
elif os.path.exists(filename1):
filename = filename1
else:
assert os.path.exists(filename2)
filename = filename2
return open(filename).read()
#endclass DmlCzSource
class ArxmlivSource(ArticleSource):
"""
Article source for articles in arxmliv format:
1) articles = directories starting with '#'
2) content is stored in tex.xml
3) metadata in special tags within tex.xml
Article URI is currently (a part of) the article's path on filesystem.
See the ArticleSource class for general info on sources.
"""
class ArxmlivContentHandler(xml.sax.handler.ContentHandler):
def __init__(self):
self.path = [''] # help structure for sax event parsing
self.tokens = [] # will contain tokens once parsing is finished
def startElement(self, name, attr):
# for math tokens, we only care about Math elements directly below <p>
if name == 'Math' and self.path[-1] == 'p' and attr.get('mode', '') == 'inline':
tex = attr.get('tex', '')
if tex and not tex.isdigit():
self.tokens.append('$%s$' % tex.encode('utf8'))
self.path.append(name)
def endElement(self, name):
self.path.pop()
def characters(self, text):
# for text, we only care about tokens directly within the <p> tag
if self.path[-1] == 'p':
tokens = [token.encode('utf8') for token in utils.tokenize(text, errors = 'ignore') if not token.isdigit()]
self.tokens.extend(tokens)
#endclass ArxmlivHandler
class ArxmlivErrorHandler(xml.sax.handler.ErrorHandler):
# Python2.5 implementation of xml.sax is broken -- character streams and
# byte encodings of InputSource are ignored, bad things sometimes happen
# in buffering of multi-byte files (such as utf8), characters get cut in
# the middle, resulting in invalid tokens...
# This is not really a problem with arxmliv xml files themselves, so ignore
# these errors silently.
def error(self, exception):
pass
# logger.debug("SAX error parsing xml: %s" % exception)
warning = fatalError = error
#endclass ArxmlivErrorHandler
def __init__(self, sourceId, baseDir):
self.sourceId = sourceId
self.baseDir = os.path.normpath(baseDir)
def __str__(self):
return self.sourceId
def idFromDir(self, path):
assert len(path) > len(self.baseDir)
intId = path[1 + path.rfind('#') : ]
pathId = path[len(self.baseDir) + 1 : ]
return (intId, pathId)
def isArticle(self, path):
# in order to be valid, the article directory must start with '#'
if not os.path.basename(path).startswith('#'):
return False
# and contain the tex.xml file
if not os.path.exists(os.path.join(path, 'tex.xml')):
logger.warning('missing tex.xml in %s' % path)
return False
return True
def findArticles(self):
dirTotal = artAccepted = 0
logger.info("looking for '%s' articles inside %s" % (self.sourceId, self.baseDir))
for root, dirs, files in os.walk(self.baseDir):
dirTotal += 1
root = os.path.normpath(root)
if self.isArticle(root):
artAccepted += 1
yield self.idFromDir(root)
logger.info('%i directories processed, found %i articles' %
(dirTotal, artAccepted))
def getContent(self, uri):
"""
Return article content as a single large string.
"""
intId, pathId = uri
filename = os.path.join(self.baseDir, pathId, 'tex.xml')
return open(filename).read()
def getMeta(self, uri):
"""
Return article metadata as an attribute->value dictionary.
"""
# intId, pathId = uri
# filename = os.path.join(self.baseDir, pathId, 'tex.xml')
return {'language': 'eng'} # TODO maybe parse out some meta; but currently not needed for anything...
def tokenize(self, content):
"""
Parse tokens out of xml. There are two types of token: normal text and
mathematics. Both are returned interspersed in a single list, in the same
order as they appeared in the content.
The math tokens will be returned in the form $tex_expression$, ie. with
a dollar sign prefix and suffix.
"""
handler = ArxmlivSource.ArxmlivContentHandler()
xml.sax.parseString(content, handler, ArxmlivSource.ArxmlivErrorHandler())
return handler.tokens
def normalizeWord(self, word):
if word[0] == '$': # ignore math tokens
return word
wordU = unicode(word, 'utf8')
return wordU.lower().encode('utf8') # lowercase and then convert back to bytestring
#endclass ArxmlivSource | unknown | codeparrot/codeparrot-clean | ||
/*-------------------------------------------------------------------------
*
* relfilenumbermap.c
* relfilenumber to oid mapping cache.
*
* Portions Copyright (c) 1996-2026, PostgreSQL Global Development Group
* Portions Copyright (c) 1994, Regents of the University of California
*
* IDENTIFICATION
* src/backend/utils/cache/relfilenumbermap.c
*
*-------------------------------------------------------------------------
*/
#include "postgres.h"
#include "access/genam.h"
#include "access/htup_details.h"
#include "access/table.h"
#include "catalog/pg_class.h"
#include "catalog/pg_tablespace.h"
#include "miscadmin.h"
#include "utils/catcache.h"
#include "utils/fmgroids.h"
#include "utils/hsearch.h"
#include "utils/inval.h"
#include "utils/relfilenumbermap.h"
#include "utils/relmapper.h"
/* Hash table for information about each relfilenumber <-> oid pair */
static HTAB *RelfilenumberMapHash = NULL;
/* built first time through in InitializeRelfilenumberMap */
static ScanKeyData relfilenumber_skey[2];
typedef struct
{
Oid reltablespace;
RelFileNumber relfilenumber;
} RelfilenumberMapKey;
typedef struct
{
RelfilenumberMapKey key; /* lookup key - must be first */
Oid relid; /* pg_class.oid */
} RelfilenumberMapEntry;
/*
* RelfilenumberMapInvalidateCallback
* Flush mapping entries when pg_class is updated in a relevant fashion.
*/
static void
RelfilenumberMapInvalidateCallback(Datum arg, Oid relid)
{
HASH_SEQ_STATUS status;
RelfilenumberMapEntry *entry;
/* callback only gets registered after creating the hash */
Assert(RelfilenumberMapHash != NULL);
hash_seq_init(&status, RelfilenumberMapHash);
while ((entry = (RelfilenumberMapEntry *) hash_seq_search(&status)) != NULL)
{
/*
* If relid is InvalidOid, signaling a complete reset, we must remove
* all entries, otherwise just remove the specific relation's entry.
* Always remove negative cache entries.
*/
if (relid == InvalidOid || /* complete reset */
entry->relid == InvalidOid || /* negative cache entry */
entry->relid == relid) /* individual flushed relation */
{
if (hash_search(RelfilenumberMapHash,
&entry->key,
HASH_REMOVE,
NULL) == NULL)
elog(ERROR, "hash table corrupted");
}
}
}
/*
* InitializeRelfilenumberMap
* Initialize cache, either on first use or after a reset.
*/
static void
InitializeRelfilenumberMap(void)
{
HASHCTL ctl;
int i;
/* Make sure we've initialized CacheMemoryContext. */
if (CacheMemoryContext == NULL)
CreateCacheMemoryContext();
/* build skey */
MemSet(&relfilenumber_skey, 0, sizeof(relfilenumber_skey));
for (i = 0; i < 2; i++)
{
fmgr_info_cxt(F_OIDEQ,
&relfilenumber_skey[i].sk_func,
CacheMemoryContext);
relfilenumber_skey[i].sk_strategy = BTEqualStrategyNumber;
relfilenumber_skey[i].sk_subtype = InvalidOid;
relfilenumber_skey[i].sk_collation = InvalidOid;
}
relfilenumber_skey[0].sk_attno = Anum_pg_class_reltablespace;
relfilenumber_skey[1].sk_attno = Anum_pg_class_relfilenode;
/*
* Only create the RelfilenumberMapHash now, so we don't end up partially
* initialized when fmgr_info_cxt() above ERRORs out with an out of memory
* error.
*/
ctl.keysize = sizeof(RelfilenumberMapKey);
ctl.entrysize = sizeof(RelfilenumberMapEntry);
ctl.hcxt = CacheMemoryContext;
RelfilenumberMapHash =
hash_create("RelfilenumberMap cache", 64, &ctl,
HASH_ELEM | HASH_BLOBS | HASH_CONTEXT);
/* Watch for invalidation events. */
CacheRegisterRelcacheCallback(RelfilenumberMapInvalidateCallback,
(Datum) 0);
}
/*
* Map a relation's (tablespace, relfilenumber) to a relation's oid and cache
* the result.
*
* A temporary relation may share its relfilenumber with a permanent relation
* or temporary relations created in other backends. Being able to uniquely
* identify a temporary relation would require a backend's proc number, which
* we do not know about. Hence, this function ignores this case.
*
* Returns InvalidOid if no relation matching the criteria could be found.
*/
Oid
RelidByRelfilenumber(Oid reltablespace, RelFileNumber relfilenumber)
{
RelfilenumberMapKey key;
RelfilenumberMapEntry *entry;
bool found;
SysScanDesc scandesc;
Relation relation;
HeapTuple ntp;
Oid relid;
if (RelfilenumberMapHash == NULL)
InitializeRelfilenumberMap();
/* pg_class will show 0 when the value is actually MyDatabaseTableSpace */
if (reltablespace == MyDatabaseTableSpace)
reltablespace = 0;
MemSet(&key, 0, sizeof(key));
key.reltablespace = reltablespace;
key.relfilenumber = relfilenumber;
/*
* Check cache and return entry if one is found. Even if no target
* relation can be found later on we store the negative match and return a
* InvalidOid from cache. That's not really necessary for performance
* since querying invalid values isn't supposed to be a frequent thing,
* but it's basically free.
*/
entry = hash_search(RelfilenumberMapHash, &key, HASH_FIND, &found);
if (found)
return entry->relid;
/* ok, no previous cache entry, do it the hard way */
/* initialize empty/negative cache entry before doing the actual lookups */
relid = InvalidOid;
if (reltablespace == GLOBALTABLESPACE_OID)
{
/*
* Ok, shared table, check relmapper.
*/
relid = RelationMapFilenumberToOid(relfilenumber, true);
}
else
{
ScanKeyData skey[2];
/*
* Not a shared table, could either be a plain relation or a
* non-shared, nailed one, like e.g. pg_class.
*/
/* check for plain relations by looking in pg_class */
relation = table_open(RelationRelationId, AccessShareLock);
/* copy scankey to local copy and set scan arguments */
memcpy(skey, relfilenumber_skey, sizeof(skey));
skey[0].sk_argument = ObjectIdGetDatum(reltablespace);
skey[1].sk_argument = ObjectIdGetDatum(relfilenumber);
scandesc = systable_beginscan(relation,
ClassTblspcRelfilenodeIndexId,
true,
NULL,
2,
skey);
found = false;
while (HeapTupleIsValid(ntp = systable_getnext(scandesc)))
{
Form_pg_class classform = (Form_pg_class) GETSTRUCT(ntp);
if (classform->relpersistence == RELPERSISTENCE_TEMP)
continue;
if (found)
elog(ERROR,
"unexpected duplicate for tablespace %u, relfilenumber %u",
reltablespace, relfilenumber);
found = true;
Assert(classform->reltablespace == reltablespace);
Assert(classform->relfilenode == relfilenumber);
relid = classform->oid;
}
systable_endscan(scandesc);
table_close(relation, AccessShareLock);
/* check for tables that are mapped but not shared */
if (!found)
relid = RelationMapFilenumberToOid(relfilenumber, false);
}
/*
* Only enter entry into cache now, our opening of pg_class could have
* caused cache invalidations to be executed which would have deleted a
* new entry if we had entered it above.
*/
entry = hash_search(RelfilenumberMapHash, &key, HASH_ENTER, &found);
if (found)
elog(ERROR, "corrupted hashtable");
entry->relid = relid;
return relid;
} | c | github | https://github.com/postgres/postgres | src/backend/utils/cache/relfilenumbermap.c |
// This file is part of ICU4X. For terms of use, please see the file
// called LICENSE at the top level of the ICU4X source tree
// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ).
//! This module contains types and implementations for the Coptic calendar.
//!
//! ```rust
//! use icu::calendar::{cal::Coptic, Date};
//!
//! let date_iso = Date::try_new_iso(1970, 1, 2)
//! .expect("Failed to initialize ISO Date instance.");
//! let date_coptic = Date::new_from_iso(date_iso, Coptic);
//!
//! assert_eq!(date_coptic.era_year().year, 1686);
//! assert_eq!(date_coptic.month().ordinal, 4);
//! assert_eq!(date_coptic.day_of_month().0, 24);
//! ```
use crate::cal::iso::{Iso, IsoDateInner};
use crate::calendar_arithmetic::{ArithmeticDate, CalendarArithmetic};
use crate::error::DateError;
use crate::{types, Calendar, Date, DateDuration, DateDurationUnit, RangeError};
use calendrical_calculations::helpers::I32CastError;
use calendrical_calculations::rata_die::RataDie;
use tinystr::tinystr;
/// The [Coptic Calendar]
///
/// The [Coptic calendar] is a solar calendar used by the Coptic Orthodox Church, with twelve normal months
/// and a thirteenth small epagomenal month.
///
/// This type can be used with [`Date`] to represent dates in this calendar.
///
/// [Coptic calendar]: https://en.wikipedia.org/wiki/Coptic_calendar
///
/// # Era codes
///
/// This calendar uses a single code: `am`, corresponding to the After Diocletian/Anno Martyrum
/// era. 1 A.M. is equivalent to 284 C.E.
///
/// # Month codes
///
/// This calendar supports 13 solar month codes (`"M01" - "M13"`), with `"M13"` being used for the short epagomenal month
/// at the end of the year.
#[derive(Copy, Clone, Debug, Hash, Default, Eq, PartialEq, PartialOrd, Ord)]
#[allow(clippy::exhaustive_structs)] // this type is stable
pub struct Coptic;
/// The inner date type used for representing [`Date`]s of [`Coptic`]. See [`Date`] and [`Coptic`] for more details.
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq, PartialOrd, Ord)]
pub struct CopticDateInner(pub(crate) ArithmeticDate<Coptic>);
impl CalendarArithmetic for Coptic {
type YearInfo = i32;
fn days_in_provided_month(year: i32, month: u8) -> u8 {
if (1..=12).contains(&month) {
30
} else if month == 13 {
if Self::provided_year_is_leap(year) {
6
} else {
5
}
} else {
0
}
}
fn months_in_provided_year(_: i32) -> u8 {
13
}
fn provided_year_is_leap(year: i32) -> bool {
year.rem_euclid(4) == 3
}
fn last_month_day_in_provided_year(year: i32) -> (u8, u8) {
if Self::provided_year_is_leap(year) {
(13, 6)
} else {
(13, 5)
}
}
fn days_in_provided_year(year: i32) -> u16 {
if Self::provided_year_is_leap(year) {
366
} else {
365
}
}
}
impl crate::cal::scaffold::UnstableSealed for Coptic {}
impl Calendar for Coptic {
type DateInner = CopticDateInner;
type Year = types::EraYear;
fn from_codes(
&self,
era: Option<&str>,
year: i32,
month_code: types::MonthCode,
day: u8,
) -> Result<Self::DateInner, DateError> {
let year = match era {
Some("am") | None => year,
Some(_) => return Err(DateError::UnknownEra),
};
ArithmeticDate::new_from_codes(self, year, month_code, day).map(CopticDateInner)
}
fn from_rata_die(&self, rd: RataDie) -> Self::DateInner {
CopticDateInner(
match calendrical_calculations::coptic::coptic_from_fixed(rd) {
Err(I32CastError::BelowMin) => ArithmeticDate::min_date(),
Err(I32CastError::AboveMax) => ArithmeticDate::max_date(),
Ok((year, month, day)) => ArithmeticDate::new_unchecked(year, month, day),
},
)
}
fn to_rata_die(&self, date: &Self::DateInner) -> RataDie {
calendrical_calculations::coptic::fixed_from_coptic(date.0.year, date.0.month, date.0.day)
}
fn from_iso(&self, iso: IsoDateInner) -> CopticDateInner {
self.from_rata_die(Iso.to_rata_die(&iso))
}
fn to_iso(&self, date: &Self::DateInner) -> IsoDateInner {
Iso.from_rata_die(self.to_rata_die(date))
}
fn months_in_year(&self, date: &Self::DateInner) -> u8 {
date.0.months_in_year()
}
fn days_in_year(&self, date: &Self::DateInner) -> u16 {
date.0.days_in_year()
}
fn days_in_month(&self, date: &Self::DateInner) -> u8 {
date.0.days_in_month()
}
fn offset_date(&self, date: &mut Self::DateInner, offset: DateDuration<Self>) {
date.0.offset_date(offset, &());
}
#[allow(clippy::field_reassign_with_default)]
fn until(
&self,
date1: &Self::DateInner,
date2: &Self::DateInner,
_calendar2: &Self,
_largest_unit: DateDurationUnit,
_smallest_unit: DateDurationUnit,
) -> DateDuration<Self> {
date1.0.until(date2.0, _largest_unit, _smallest_unit)
}
fn year_info(&self, date: &Self::DateInner) -> Self::Year {
let year = self.extended_year(date);
types::EraYear {
era: tinystr!(16, "am"),
era_index: Some(0),
year,
ambiguity: types::YearAmbiguity::CenturyRequired,
}
}
fn extended_year(&self, date: &Self::DateInner) -> i32 {
date.0.extended_year()
}
fn is_in_leap_year(&self, date: &Self::DateInner) -> bool {
Self::provided_year_is_leap(date.0.year)
}
fn month(&self, date: &Self::DateInner) -> types::MonthInfo {
date.0.month()
}
fn day_of_month(&self, date: &Self::DateInner) -> types::DayOfMonth {
date.0.day_of_month()
}
fn day_of_year(&self, date: &Self::DateInner) -> types::DayOfYear {
date.0.day_of_year()
}
fn debug_name(&self) -> &'static str {
"Coptic"
}
fn calendar_algorithm(&self) -> Option<crate::preferences::CalendarAlgorithm> {
Some(crate::preferences::CalendarAlgorithm::Coptic)
}
}
impl Date<Coptic> {
/// Construct new Coptic Date.
///
/// ```rust
/// use icu::calendar::Date;
///
/// let date_coptic = Date::try_new_coptic(1686, 5, 6)
/// .expect("Failed to initialize Coptic Date instance.");
///
/// assert_eq!(date_coptic.era_year().year, 1686);
/// assert_eq!(date_coptic.month().ordinal, 5);
/// assert_eq!(date_coptic.day_of_month().0, 6);
/// ```
pub fn try_new_coptic(year: i32, month: u8, day: u8) -> Result<Date<Coptic>, RangeError> {
ArithmeticDate::new_from_ordinals(year, month, day)
.map(CopticDateInner)
.map(|inner| Date::from_raw(inner, Coptic))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_coptic_regression() {
// https://github.com/unicode-org/icu4x/issues/2254
let iso_date = Date::try_new_iso(-100, 3, 3).unwrap();
let coptic = iso_date.to_calendar(Coptic);
let recovered_iso = coptic.to_iso();
assert_eq!(iso_date, recovered_iso);
}
} | rust | github | https://github.com/nodejs/node | deps/crates/vendor/icu_calendar/src/cal/coptic.rs |
---
navigation_title: "User agent"
mapped_pages:
- https://www.elastic.co/guide/en/elasticsearch/reference/current/user-agent-processor.html
---
# User agent processor [user-agent-processor]
The `user_agent` processor extracts details from the user agent string a browser sends with its web requests. This processor adds this information by default under the `user_agent` field.
The ingest-user-agent module ships by default with the regexes.yaml made available by uap-java with an Apache 2.0 license. For more details see [https://github.com/ua-parser/uap-core](https://github.com/ua-parser/uap-core).
## Using the user_agent Processor in a Pipeline [using-ingest-user-agent]
$$$ingest-user-agent-options$$$
| Name | Required | Default | Description |
| --- | --- | --- | --- |
| `field` | yes | - | The field containing the user agent string. |
| `target_field` | no | user_agent | The field that will be filled with the user agent details. |
| `regex_file` | no | - | The name of the file in the `config/ingest-user-agent` directory containing the regular expressions for parsing the user agent string. Both the directory and the file have to be created before starting Elasticsearch. If not specified, ingest-user-agent will use the regexes.yaml from uap-core it ships with (see below). |
| `properties` | no | [`name`, `os`, `device`, `original`, `version`] | Controls what properties are added to `target_field`. |
| `extract_device_type` | no | `false` | {applies_to}`stack: beta` {applies_to}`serverless: beta` Extracts device type from the user agent string on a best-effort basis. |
| `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document |
Here is an example that adds the user agent details to the `user_agent` field based on the `agent` field:
```console
PUT _ingest/pipeline/user_agent
{
"description" : "Add user agent information",
"processors" : [
{
"user_agent" : {
"field" : "agent"
}
}
]
}
PUT my-index-000001/_doc/my_id?pipeline=user_agent
{
"agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36"
}
GET my-index-000001/_doc/my_id
```
Which returns
```console-result
{
"found": true,
"_index": "my-index-000001",
"_id": "my_id",
"_version": 1,
"_seq_no": 22,
"_primary_term": 1,
"_source": {
"agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36",
"user_agent": {
"name": "Chrome",
"original": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36",
"version": "51.0.2704.103",
"os": {
"name": "Mac OS X",
"version": "10.10.5",
"full": "Mac OS X 10.10.5"
},
"device" : {
"name" : "Mac"
}
}
}
}
```
% TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term": 1/"_primary_term" : $body._primary_term/]
### Using a custom regex file [_using_a_custom_regex_file]
To use a custom regex file for parsing the user agents, that file has to be put into the `config/ingest-user-agent` directory and has to have a `.yml` filename extension. The file has to be present at node startup, any changes to it or any new files added while the node is running will not have any effect.
In practice, it will make most sense for any custom regex file to be a variant of the default file, either a more recent version or a customised version.
The default file included in `ingest-user-agent` is the `regexes.yaml` from uap-core: [https://github.com/ua-parser/uap-core/blob/master/regexes.yaml](https://github.com/ua-parser/uap-core/blob/master/regexes.yaml)
### Node Settings [ingest-user-agent-settings]
The `user_agent` processor supports the following setting:
`ingest.user_agent.cache_size`
: The maximum number of results that should be cached. Defaults to `1000`.
Note that these settings are node settings and apply to all `user_agent` processors, i.e. there is one cache for all defined `user_agent` processors. | unknown | github | https://github.com/elastic/elasticsearch | docs/reference/enrich-processor/user-agent-processor.md |
# Splashscreen example
This example demonstrates how a splashscreen can be implemented when waiting on an initialization code on Rust or on the UI.
## Running the example
Run the following scripts on the root directory of the repository:
```bash
$ cargo run --example splashscreen
``` | unknown | github | https://github.com/tauri-apps/tauri | examples/splashscreen/README.md |
/*
* Copyright 2012-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot;
import org.junit.jupiter.api.Test;
import org.springframework.boot.testsupport.classpath.ClassPathExclusions;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.support.StaticApplicationContext;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Tests for {@link SpringApplication} when spring web is not on the classpath.
*
* @author Stephane Nicoll
*/
@ClassPathExclusions("spring-web*.jar")
class SpringApplicationNoWebTests {
@Test
void detectWebApplicationTypeToNone() {
SpringApplication application = new SpringApplication(ExampleConfig.class);
assertThat(application.getWebApplicationType()).isEqualTo(WebApplicationType.NONE);
}
@Test
void specificApplicationContextClass() {
SpringApplication application = new SpringApplication(ExampleConfig.class);
application
.setApplicationContextFactory(ApplicationContextFactory.ofContextClass(StaticApplicationContext.class));
ConfigurableApplicationContext context = application.run();
assertThat(context).isInstanceOf(StaticApplicationContext.class);
context.close();
}
@Configuration(proxyBeanMethods = false)
static class ExampleConfig {
}
} | java | github | https://github.com/spring-projects/spring-boot | core/spring-boot/src/test/java/org/springframework/boot/SpringApplicationNoWebTests.java |
---
name: Bug report
description: Create a report to help us improve.
body:
- type: markdown
attributes:
value: |
Thank you for opening a bug report for Prometheus.
Please do *NOT* ask support questions in Github issues.
If your issue is not a feature request or bug report use our [community support](https://prometheus.io/community/).
There is also [commercial support](https://prometheus.io/support-training/) available.
- type: textarea
attributes:
label: What did you do?
description: Please provide steps for us to reproduce this issue.
validations:
required: true
- type: textarea
attributes:
label: What did you expect to see?
- type: textarea
attributes:
label: What did you see instead? Under which circumstances?
validations:
required: true
- type: markdown
attributes:
value: |
## Environment
- type: input
attributes:
label: System information
description: insert output of `uname -srm` here, or operating system version
placeholder: e.g. Linux 5.16.15 x86_64
- type: textarea
attributes:
label: Prometheus version
description: Insert output of `prometheus --version` here.
render: text
placeholder: |
e.g. prometheus, version 2.23.0 (branch: HEAD, revision: 26d89b4b0776fe4cd5a3656dfa520f119a375273)
build user: root@37609b3a0a21
build date: 20201126-10:56:17
go version: go1.15.5
platform: linux/amd64
- type: textarea
attributes:
label: Prometheus configuration file
description: Insert relevant configuration here. Don't forget to remove secrets.
render: yaml
- type: textarea
attributes:
label: Alertmanager version
description: Insert output of `alertmanager --version` here (if relevant to the issue).
render: text
placeholder: |
e.g. alertmanager, version 0.22.2 (branch: HEAD, revision: 44f8adc06af5101ad64bd8b9c8b18273f2922051)
build user: root@b595c7f32520
build date: 20210602-07:50:37
go version: go1.16.4
platform: linux/amd64
- type: textarea
attributes:
label: Alertmanager configuration file
description: Insert relevant configuration here. Don't forget to remove secrets.
render: yaml
- type: textarea
attributes:
label: Logs
description: Insert Prometheus and Alertmanager logs relevant to the issue here.
render: text | unknown | github | https://github.com/prometheus/prometheus | .github/ISSUE_TEMPLATE/bug_report.yml |
"""Test Home Assistant template helper methods."""
# pylint: disable=too-many-public-methods
import unittest
from unittest.mock import patch
from homeassistant.components import group
from homeassistant.exceptions import TemplateError
from homeassistant.helpers import template
import homeassistant.util.dt as dt_util
from tests.common import get_test_home_assistant
class TestUtilTemplate(unittest.TestCase):
"""Test the Template."""
def setUp(self): # pylint: disable=invalid-name
"""Setup the tests."""
self.hass = get_test_home_assistant()
def tearDown(self): # pylint: disable=invalid-name
"""Stop down stuff we started."""
self.hass.stop()
def test_referring_states_by_entity_id(self):
"""."""
self.hass.states.set('test.object', 'happy')
self.assertEqual(
'happy',
template.render(self.hass, '{{ states.test.object.state }}'))
def test_iterating_all_states(self):
"""."""
self.hass.states.set('test.object', 'happy')
self.hass.states.set('sensor.temperature', 10)
self.assertEqual(
'10happy',
template.render(
self.hass,
'{% for state in states %}{{ state.state }}{% endfor %}'))
def test_iterating_domain_states(self):
"""."""
self.hass.states.set('test.object', 'happy')
self.hass.states.set('sensor.back_door', 'open')
self.hass.states.set('sensor.temperature', 10)
self.assertEqual(
'open10',
template.render(
self.hass,
"""
{% for state in states.sensor %}{{ state.state }}{% endfor %}
"""))
def test_float(self):
"""."""
self.hass.states.set('sensor.temperature', '12')
self.assertEqual(
'12.0',
template.render(
self.hass,
'{{ float(states.sensor.temperature.state) }}'))
self.assertEqual(
'True',
template.render(
self.hass,
'{{ float(states.sensor.temperature.state) > 11 }}'))
def test_rounding_value(self):
"""."""
self.hass.states.set('sensor.temperature', 12.78)
self.assertEqual(
'12.8',
template.render(
self.hass,
'{{ states.sensor.temperature.state | round(1) }}'))
self.assertEqual(
'128',
template.render(
self.hass,
'{{ states.sensor.temperature.state | multiply(10) | round }}'
))
def test_rounding_value_get_original_value_on_error(self):
"""."""
self.assertEqual(
'None',
template.render(
self.hass,
'{{ None | round }}'
))
self.assertEqual(
'no_number',
template.render(
self.hass,
'{{ "no_number" | round }}'
))
def test_multiply(self):
"""."""
tests = {
None: 'None',
10: '100',
'"abcd"': 'abcd'
}
for inp, out in tests.items():
self.assertEqual(
out,
template.render(self.hass,
'{{ %s | multiply(10) | round }}' % inp))
def test_passing_vars_as_keywords(self):
"""."""
self.assertEqual(
'127', template.render(self.hass, '{{ hello }}', hello=127))
def test_passing_vars_as_vars(self):
"""."""
self.assertEqual(
'127', template.render(self.hass, '{{ hello }}', {'hello': 127}))
def test_render_with_possible_json_value_with_valid_json(self):
"""."""
self.assertEqual(
'world',
template.render_with_possible_json_value(
self.hass, '{{ value_json.hello }}', '{"hello": "world"}'))
def test_render_with_possible_json_value_with_invalid_json(self):
"""."""
self.assertEqual(
'',
template.render_with_possible_json_value(
self.hass, '{{ value_json }}', '{ I AM NOT JSON }'))
def test_render_with_possible_json_value_with_template_error(self):
"""."""
self.assertEqual(
'hello',
template.render_with_possible_json_value(
self.hass, '{{ value_json', 'hello'))
def test_render_with_possible_json_value_with_template_error_value(self):
"""."""
self.assertEqual(
'-',
template.render_with_possible_json_value(
self.hass, '{{ value_json', 'hello', '-'))
def test_raise_exception_on_error(self):
"""."""
with self.assertRaises(TemplateError):
template.render(self.hass, '{{ invalid_syntax')
def test_if_state_exists(self):
"""."""
self.hass.states.set('test.object', 'available')
self.assertEqual(
'exists',
template.render(
self.hass,
"""
{% if states.test.object %}exists{% else %}not exists{% endif %}
"""))
def test_is_state(self):
"""."""
self.hass.states.set('test.object', 'available')
self.assertEqual(
'yes',
template.render(
self.hass,
"""
{% if is_state("test.object", "available") %}yes{% else %}no{% endif %}
"""))
def test_is_state_attr(self):
"""."""
self.hass.states.set('test.object', 'available', {'mode': 'on'})
self.assertEqual(
'yes',
template.render(
self.hass,
"""
{% if is_state_attr("test.object", "mode", "on") %}yes{% else %}no{% endif %}
"""))
def test_states_function(self):
"""."""
self.hass.states.set('test.object', 'available')
self.assertEqual(
'available',
template.render(self.hass, '{{ states("test.object") }}'))
self.assertEqual(
'unknown',
template.render(self.hass, '{{ states("test.object2") }}'))
@patch('homeassistant.core.dt_util.utcnow', return_value=dt_util.utcnow())
@patch('homeassistant.helpers.template.TemplateEnvironment.'
'is_safe_callable', return_value=True)
def test_now(self, mock_is_safe, mock_utcnow):
"""."""
self.assertEqual(
dt_util.utcnow().isoformat(),
template.render(self.hass, '{{ now.isoformat() }}'))
@patch('homeassistant.core.dt_util.utcnow', return_value=dt_util.utcnow())
@patch('homeassistant.helpers.template.TemplateEnvironment.'
'is_safe_callable', return_value=True)
def test_utcnow(self, mock_is_safe, mock_utcnow):
"""."""
self.assertEqual(
dt_util.utcnow().isoformat(),
template.render(self.hass, '{{ utcnow.isoformat() }}'))
def test_utcnow_is_exactly_now(self):
"""."""
self.assertEqual(
'True',
template.render(self.hass, '{{ utcnow == now }}'))
def test_distance_function_with_1_state(self):
"""."""
self.hass.states.set('test.object', 'happy', {
'latitude': 32.87336,
'longitude': -117.22943,
})
self.assertEqual(
'187',
template.render(
self.hass, '{{ distance(states.test.object) | round }}'))
def test_distance_function_with_2_states(self):
"""."""
self.hass.states.set('test.object', 'happy', {
'latitude': 32.87336,
'longitude': -117.22943,
})
self.hass.states.set('test.object_2', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
self.assertEqual(
'187',
template.render(
self.hass,
'{{ distance(states.test.object, states.test.object_2)'
'| round }}'))
def test_distance_function_with_1_coord(self):
"""."""
self.assertEqual(
'187',
template.render(
self.hass, '{{ distance("32.87336", "-117.22943") | round }}'))
def test_distance_function_with_2_coords(self):
"""."""
self.assertEqual(
'187',
template.render(
self.hass,
'{{ distance("32.87336", "-117.22943", %s, %s) | round }}'
% (self.hass.config.latitude, self.hass.config.longitude)))
def test_distance_function_with_1_state_1_coord(self):
"""."""
self.hass.states.set('test.object_2', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
self.assertEqual(
'187',
template.render(
self.hass,
'{{ distance("32.87336", "-117.22943", states.test.object_2) '
'| round }}'))
self.assertEqual(
'187',
template.render(
self.hass,
'{{ distance(states.test.object_2, "32.87336", "-117.22943") '
'| round }}'))
def test_distance_function_return_None_if_invalid_state(self):
"""."""
self.hass.states.set('test.object_2', 'happy', {
'latitude': 10,
})
self.assertEqual(
'None',
template.render(
self.hass,
'{{ distance(states.test.object_2) | round }}'))
def test_distance_function_return_None_if_invalid_coord(self):
"""."""
self.assertEqual(
'None',
template.render(
self.hass,
'{{ distance("123", "abc") }}'))
self.assertEqual(
'None',
template.render(
self.hass,
'{{ distance("123") }}'))
self.hass.states.set('test.object_2', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
self.assertEqual(
'None',
template.render(
self.hass,
'{{ distance("123", states.test_object_2) }}'))
def test_closest_function_home_vs_domain(self):
"""."""
self.hass.states.set('test_domain.object', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('not_test_domain.but_closer', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
self.assertEqual(
'test_domain.object',
template.render(self.hass,
'{{ closest(states.test_domain).entity_id }}'))
def test_closest_function_home_vs_all_states(self):
"""."""
self.hass.states.set('test_domain.object', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('test_domain_2.and_closer', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
self.assertEqual(
'test_domain_2.and_closer',
template.render(self.hass,
'{{ closest(states).entity_id }}'))
def test_closest_function_home_vs_group_entity_id(self):
"""."""
self.hass.states.set('test_domain.object', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('not_in_group.but_closer', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
group.Group(self.hass, 'location group', ['test_domain.object'])
self.assertEqual(
'test_domain.object',
template.render(self.hass,
'{{ closest("group.location_group").entity_id }}'))
def test_closest_function_home_vs_group_state(self):
"""."""
self.hass.states.set('test_domain.object', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('not_in_group.but_closer', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
group.Group(self.hass, 'location group', ['test_domain.object'])
self.assertEqual(
'test_domain.object',
template.render(
self.hass,
'{{ closest(states.group.location_group).entity_id }}'))
def test_closest_function_to_coord(self):
"""."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('test_domain.closest_zone', 'happy', {
'latitude': self.hass.config.latitude + 0.2,
'longitude': self.hass.config.longitude + 0.2,
})
self.hass.states.set('zone.far_away', 'zoning', {
'latitude': self.hass.config.latitude + 0.3,
'longitude': self.hass.config.longitude + 0.3,
})
self.assertEqual(
'test_domain.closest_zone',
template.render(
self.hass,
'{{ closest("%s", %s, states.test_domain).entity_id }}'
% (self.hass.config.latitude + 0.3,
self.hass.config.longitude + 0.3))
)
def test_closest_function_to_entity_id(self):
"""."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('test_domain.closest_zone', 'happy', {
'latitude': self.hass.config.latitude + 0.2,
'longitude': self.hass.config.longitude + 0.2,
})
self.hass.states.set('zone.far_away', 'zoning', {
'latitude': self.hass.config.latitude + 0.3,
'longitude': self.hass.config.longitude + 0.3,
})
self.assertEqual(
'test_domain.closest_zone',
template.render(
self.hass,
'{{ closest("zone.far_away", states.test_domain).entity_id }}')
)
def test_closest_function_to_state(self):
"""."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('test_domain.closest_zone', 'happy', {
'latitude': self.hass.config.latitude + 0.2,
'longitude': self.hass.config.longitude + 0.2,
})
self.hass.states.set('zone.far_away', 'zoning', {
'latitude': self.hass.config.latitude + 0.3,
'longitude': self.hass.config.longitude + 0.3,
})
self.assertEqual(
'test_domain.closest_zone',
template.render(
self.hass,
'{{ closest(states.zone.far_away, '
'states.test_domain).entity_id }}')
)
def test_closest_function_invalid_state(self):
"""."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
for state in ('states.zone.non_existing', '"zone.non_existing"'):
self.assertEqual(
'None',
template.render(
self.hass, '{{ closest(%s, states) }}' % state))
def test_closest_function_state_with_invalid_location(self):
"""."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': 'invalid latitude',
'longitude': self.hass.config.longitude + 0.1,
})
self.assertEqual(
'None',
template.render(
self.hass,
'{{ closest(states.test_domain.closest_home, '
'states) }}'))
def test_closest_function_invalid_coordinates(self):
"""."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.assertEqual(
'None',
template.render(self.hass,
'{{ closest("invalid", "coord", states) }}'))
def test_closest_function_no_location_states(self):
"""."""
self.assertEqual('None',
template.render(self.hass, '{{ closest(states) }}')) | unknown | codeparrot/codeparrot-clean | ||
from flask import jsonify
from flask_restx import inputs
from flexget import plugin
from flexget.api import APIResource, api
from flexget.api.app import BadRequest, NotFoundError, etag
tmdb_api = api.namespace('tmdb', description='TMDB lookup endpoint')
class ObjectsContainer:
poster_object = {
'type': 'object',
'properties': {
'id': {'type': ['integer', 'null']},
'movie_id': {'type': ['integer', 'null']},
'urls': {'type': 'object'},
'file_path': {'type': 'string'},
'width': {'type': 'integer'},
'height': {'type': 'integer'},
'aspect_ratio': {'type': 'number'},
'vote_average': {'type': 'number'},
'vote_count': {'type': 'integer'},
'language_code': {'type': ['string', 'null']},
},
'required': [
'id',
'movie_id',
'urls',
'file_path',
'width',
'height',
'aspect_ratio',
'vote_average',
'vote_count',
'language_code',
],
'additionalProperties': False,
}
movie_object = {
'type': 'object',
'properties': {
'id': {'type': 'integer'},
'imdb_id': {'type': 'string'},
'name': {'type': 'string'},
'original_name': {'type': ['string', 'null']},
'alternative_name': {'type': ['string', 'null']},
'year': {'type': 'integer'},
'runtime': {'type': 'integer'},
'language': {'type': 'string'},
'overview': {'type': 'string'},
'tagline': {'type': 'string'},
'rating': {'type': ['number', 'null']},
'votes': {'type': ['integer', 'null']},
'popularity': {'type': ['number', 'null']},
'adult': {'type': 'boolean'},
'budget': {'type': ['integer', 'null']},
'revenue': {'type': ['integer', 'null']},
'homepage': {'type': ['string', 'null'], 'format': 'uri'},
'posters': {'type': 'array', 'items': poster_object},
'backdrops': {'type': 'array', 'items': poster_object},
'genres': {'type': 'array', 'items': {'type': 'string'}},
'updated': {'type': 'string', 'format': 'date-time'},
'lookup_language': {'type': ['string', 'null']},
},
'required': [
'id',
'name',
'year',
'original_name',
'alternative_name',
'runtime',
'language',
'overview',
'tagline',
'rating',
'votes',
'popularity',
'adult',
'budget',
'revenue',
'homepage',
'genres',
'updated',
],
'additionalProperties': False,
}
description = 'Either title, TMDB ID or IMDB ID are required for a lookup'
return_schema = api.schema_model('tmdb_search_schema', ObjectsContainer.movie_object)
tmdb_parser = api.parser()
tmdb_parser.add_argument('title', help='Movie title')
tmdb_parser.add_argument('tmdb_id', help='TMDB ID')
tmdb_parser.add_argument('imdb_id', help='IMDB ID')
tmdb_parser.add_argument('language', help='ISO 639-1 language code')
tmdb_parser.add_argument('year', type=int, help='Movie year')
tmdb_parser.add_argument('only_cached', type=int, help='Return only cached results')
tmdb_parser.add_argument(
'include_posters', type=inputs.boolean, default=False, help='Include posters in response'
)
tmdb_parser.add_argument(
'include_backdrops', type=inputs.boolean, default=False, help='Include backdrops in response'
)
tmdb_parser.add_argument(
'include_backdrops', type=inputs.boolean, default=False, help='Include backdrops in response'
)
@tmdb_api.route('/movies/')
@api.doc(description=description)
class TMDBMoviesAPI(APIResource):
@etag(cache_age=3600)
@api.response(200, model=return_schema)
@api.response(NotFoundError)
@api.response(BadRequest)
@api.doc(parser=tmdb_parser)
def get(self, session=None):
""" Get TMDB movie data """
args = tmdb_parser.parse_args()
title = args.get('title')
tmdb_id = args.get('tmdb_id')
imdb_id = args.get('imdb_id')
posters = args.pop('include_posters', False)
backdrops = args.pop('include_backdrops', False)
if not (title or tmdb_id or imdb_id):
raise BadRequest(description)
lookup = plugin.get('api_tmdb', 'tmdb.api').lookup
try:
movie = lookup(session=session, **args)
except LookupError as e:
raise NotFoundError(e.args[0])
return_movie = movie.to_dict()
if posters:
return_movie['posters'] = [p.to_dict() for p in movie.posters]
if backdrops:
return_movie['backdrops'] = [p.to_dict() for p in movie.backdrops]
return jsonify(return_movie) | unknown | codeparrot/codeparrot-clean | ||
//===----------------------------------------------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "UseConcisePreprocessorDirectivesCheck.h"
#include "clang/Basic/TokenKinds.h"
#include "clang/Lex/Lexer.h"
#include "clang/Lex/PPCallbacks.h"
#include "clang/Lex/Preprocessor.h"
#include <array>
namespace clang::tidy::readability {
namespace {
class IfPreprocessorCallbacks final : public PPCallbacks {
public:
IfPreprocessorCallbacks(ClangTidyCheck &Check, const Preprocessor &PP)
: Check(Check), PP(PP) {}
void If(SourceLocation Loc, SourceRange ConditionRange,
ConditionValueKind) override {
impl(Loc, ConditionRange, {"ifdef", "ifndef"});
}
void Elif(SourceLocation Loc, SourceRange ConditionRange, ConditionValueKind,
SourceLocation) override {
if (PP.getLangOpts().C23 || PP.getLangOpts().CPlusPlus23)
impl(Loc, ConditionRange, {"elifdef", "elifndef"});
}
private:
void impl(SourceLocation DirectiveLoc, SourceRange ConditionRange,
const std::array<StringRef, 2> &Replacements) {
// Lexer requires its input range to be null-terminated.
SmallString<128> Condition =
Lexer::getSourceText(CharSourceRange::getTokenRange(ConditionRange),
PP.getSourceManager(), PP.getLangOpts());
Condition.push_back('\0');
Lexer Lex(DirectiveLoc, PP.getLangOpts(), Condition.data(),
Condition.data(), Condition.data() + Condition.size() - 1);
Token Tok;
bool Inverted = false; // The inverted form of #*def is #*ndef.
std::size_t ParensNestingDepth = 0;
for (;;) {
if (Lex.LexFromRawLexer(Tok))
return;
if (Tok.is(tok::TokenKind::exclaim) ||
(PP.getLangOpts().CPlusPlus &&
Tok.is(tok::TokenKind::raw_identifier) &&
Tok.getRawIdentifier() == "not"))
Inverted = !Inverted;
else if (Tok.is(tok::TokenKind::l_paren))
++ParensNestingDepth;
else
break;
}
if (Tok.isNot(tok::TokenKind::raw_identifier) ||
Tok.getRawIdentifier() != "defined")
return;
bool NoMoreTokens = Lex.LexFromRawLexer(Tok);
if (Tok.is(tok::TokenKind::l_paren)) {
if (NoMoreTokens)
return;
++ParensNestingDepth;
NoMoreTokens = Lex.LexFromRawLexer(Tok);
}
if (Tok.isNot(tok::TokenKind::raw_identifier))
return;
const StringRef Macro = Tok.getRawIdentifier();
while (!NoMoreTokens) {
NoMoreTokens = Lex.LexFromRawLexer(Tok);
if (Tok.isNot(tok::TokenKind::r_paren))
return;
--ParensNestingDepth;
}
if (ParensNestingDepth != 0)
return;
Check.diag(
DirectiveLoc,
"preprocessor condition can be written more concisely using '#%0'")
<< FixItHint::CreateReplacement(
CharSourceRange::getCharRange(DirectiveLoc,
ConditionRange.getBegin()),
(Replacements[Inverted].str() + " "))
<< FixItHint::CreateReplacement(ConditionRange, Macro)
<< Replacements[Inverted];
}
ClangTidyCheck &Check;
const Preprocessor &PP;
};
} // namespace
void UseConcisePreprocessorDirectivesCheck::registerPPCallbacks(
const SourceManager &, Preprocessor *PP, Preprocessor *) {
PP->addPPCallbacks(std::make_unique<IfPreprocessorCallbacks>(*this, *PP));
}
} // namespace clang::tidy::readability | cpp | github | https://github.com/llvm/llvm-project | clang-tools-extra/clang-tidy/readability/UseConcisePreprocessorDirectivesCheck.cpp |
<?php
namespace Illuminate\Auth\Access;
use Exception;
use Throwable;
class AuthorizationException extends Exception
{
/**
* The response from the gate.
*
* @var \Illuminate\Auth\Access\Response
*/
protected $response;
/**
* The HTTP response status code.
*
* @var int|null
*/
protected $status;
/**
* Create a new authorization exception instance.
*
* @param string|null $message
* @param mixed $code
* @param \Throwable|null $previous
*/
public function __construct($message = null, $code = null, ?Throwable $previous = null)
{
parent::__construct($message ?? 'This action is unauthorized.', 0, $previous);
$this->code = $code ?: 0;
}
/**
* Get the response from the gate.
*
* @return \Illuminate\Auth\Access\Response
*/
public function response()
{
return $this->response;
}
/**
* Set the response from the gate.
*
* @param \Illuminate\Auth\Access\Response $response
* @return $this
*/
public function setResponse($response)
{
$this->response = $response;
return $this;
}
/**
* Set the HTTP response status code.
*
* @param int|null $status
* @return $this
*/
public function withStatus($status)
{
$this->status = $status;
return $this;
}
/**
* Set the HTTP response status code to 404.
*
* @return $this
*/
public function asNotFound()
{
return $this->withStatus(404);
}
/**
* Determine if the HTTP status code has been set.
*
* @return bool
*/
public function hasStatus()
{
return $this->status !== null;
}
/**
* Get the HTTP status code.
*
* @return int|null
*/
public function status()
{
return $this->status;
}
/**
* Create a deny response object from this exception.
*
* @return \Illuminate\Auth\Access\Response
*/
public function toResponse()
{
return Response::deny($this->message, $this->code)->withStatus($this->status);
}
} | php | github | https://github.com/laravel/framework | src/Illuminate/Auth/Access/AuthorizationException.php |
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for Chromium browser resources.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl/git cl, and see
http://www.chromium.org/developers/web-development-style-guide for the rules
we're checking against here.
"""
import os
import struct
class InvalidPNGException(Exception):
pass
class ResourceScaleFactors(object):
"""Verifier of image dimensions for Chromium resources.
This class verifies the image dimensions of resources in the various
resource subdirectories.
Attributes:
paths: An array of tuples giving the folders to check and their
relevant scale factors. For example:
[(100, 'default_100_percent'), (200, 'default_200_percent')]
"""
def __init__(self, input_api, output_api, paths):
""" Initializes ResourceScaleFactors with paths."""
self.input_api = input_api
self.output_api = output_api
self.paths = paths
def RunChecks(self):
"""Verifies the scale factors of resources being added or modified.
Returns:
An array of presubmit errors if any images were detected not
having the correct dimensions.
"""
def ImageSize(filename):
with open(filename, 'rb', buffering=0) as f:
data = f.read(24)
if data[:8] != '\x89PNG\r\n\x1A\n' or data[12:16] != 'IHDR':
raise InvalidPNGException
return struct.unpack('>ii', data[16:24])
# Returns a list of valid scaled image sizes. The valid sizes are the
# floor and ceiling of (base_size * scale_percent / 100). This is equivalent
# to requiring that the actual scaled size is less than one pixel away from
# the exact scaled size.
def ValidSizes(base_size, scale_percent):
return sorted(set([(base_size * scale_percent) / 100,
(base_size * scale_percent + 99) / 100]))
repository_path = self.input_api.os_path.relpath(
self.input_api.PresubmitLocalPath(),
self.input_api.change.RepositoryRoot())
results = []
# Check for affected files in any of the paths specified.
affected_files = self.input_api.AffectedFiles(include_deletes=False)
files = []
for f in affected_files:
for path_spec in self.paths:
path_root = self.input_api.os_path.join(
repository_path, path_spec[1])
if (f.LocalPath().endswith('.png') and
f.LocalPath().startswith(path_root)):
# Only save the relative path from the resource directory.
relative_path = self.input_api.os_path.relpath(f.LocalPath(),
path_root)
if relative_path not in files:
files.append(relative_path)
corrupt_png_error = ('Corrupt PNG in file %s. Note that binaries are not '
'correctly uploaded to the code review tool and must be directly '
'submitted using the dcommit command.')
for f in files:
base_image = self.input_api.os_path.join(self.paths[0][1], f)
if not os.path.exists(base_image):
results.append(self.output_api.PresubmitError(
'Base image %s does not exist' % self.input_api.os_path.join(
repository_path, base_image)))
continue
try:
base_dimensions = ImageSize(base_image)
except InvalidPNGException:
results.append(self.output_api.PresubmitError(corrupt_png_error %
self.input_api.os_path.join(repository_path, base_image)))
continue
# Find all scaled versions of the base image and verify their sizes.
for i in range(1, len(self.paths)):
image_path = self.input_api.os_path.join(self.paths[i][1], f)
if not os.path.exists(image_path):
continue
# Ensure that each image for a particular scale factor is the
# correct scale of the base image.
try:
scaled_dimensions = ImageSize(image_path)
except InvalidPNGException:
results.append(self.output_api.PresubmitError(corrupt_png_error %
self.input_api.os_path.join(repository_path, image_path)))
continue
for dimension_name, base_size, scaled_size in zip(
('width', 'height'), base_dimensions, scaled_dimensions):
valid_sizes = ValidSizes(base_size, self.paths[i][0])
if scaled_size not in valid_sizes:
results.append(self.output_api.PresubmitError(
'Image %s has %s %d, expected to be %s' % (
self.input_api.os_path.join(repository_path, image_path),
dimension_name,
scaled_size,
' or '.join(map(str, valid_sizes)))))
return results | unknown | codeparrot/codeparrot-clean | ||
// Copyright 2022 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package mvcc
import (
"fmt"
"math"
"testing"
"time"
"github.com/stretchr/testify/assert"
"go.uber.org/zap/zaptest"
"go.etcd.io/etcd/server/v3/storage/backend"
betesting "go.etcd.io/etcd/server/v3/storage/backend/testing"
"go.etcd.io/etcd/server/v3/storage/schema"
)
// TestScheduledCompact ensures that UnsafeSetScheduledCompact&UnsafeReadScheduledCompact work well together.
func TestScheduledCompact(t *testing.T) {
tcs := []struct {
value int64
}{
{
value: 1,
},
{
value: 0,
},
{
value: math.MaxInt64,
},
}
for _, tc := range tcs {
t.Run(fmt.Sprint(tc.value), func(t *testing.T) {
lg := zaptest.NewLogger(t)
be, tmpPath := betesting.NewTmpBackend(t, time.Microsecond, 10)
tx := be.BatchTx()
if tx == nil {
t.Fatal("batch tx is nil")
}
tx.Lock()
tx.UnsafeCreateBucket(schema.Meta)
UnsafeSetScheduledCompact(tx, tc.value)
tx.Unlock()
be.ForceCommit()
be.Close()
b := backend.NewDefaultBackend(lg, tmpPath)
defer b.Close()
v, found := UnsafeReadScheduledCompact(b.BatchTx())
assert.True(t, found)
assert.Equal(t, tc.value, v)
})
}
}
// TestFinishedCompact ensures that UnsafeSetFinishedCompact&UnsafeReadFinishedCompact work well together.
func TestFinishedCompact(t *testing.T) {
tcs := []struct {
value int64
}{
{
value: 1,
},
{
value: 0,
},
{
value: math.MaxInt64,
},
}
for _, tc := range tcs {
t.Run(fmt.Sprint(tc.value), func(t *testing.T) {
lg := zaptest.NewLogger(t)
be, tmpPath := betesting.NewTmpBackend(t, time.Microsecond, 10)
tx := be.BatchTx()
if tx == nil {
t.Fatal("batch tx is nil")
}
tx.Lock()
tx.UnsafeCreateBucket(schema.Meta)
UnsafeSetFinishedCompact(tx, tc.value)
tx.Unlock()
be.ForceCommit()
be.Close()
b := backend.NewDefaultBackend(lg, tmpPath)
defer b.Close()
v, found := UnsafeReadFinishedCompact(b.BatchTx())
assert.True(t, found)
assert.Equal(t, tc.value, v)
})
}
} | go | github | https://github.com/etcd-io/etcd | server/storage/mvcc/store_test.go |
# Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from nova.compute import vm_states
from nova.tests.integrated.v3 import test_servers
CONF = cfg.CONF
CONF.import_opt('osapi_hide_server_address_states',
'nova.api.openstack.compute.plugins.v3.hide_server_addresses')
class ServersSampleHideAddressesJsonTest(test_servers.ServersSampleJsonTest):
extension_name = 'os-hide-server-addresses'
# Override the sample dirname because
# test_servers.ServersSampleJsonTest does and so it won't default
# to the extension name
sample_dir = extension_name
def setUp(self):
# We override osapi_hide_server_address_states in order
# to have an example of in the json samples of the
# addresses being hidden
CONF.set_override("osapi_hide_server_address_states",
[vm_states.ACTIVE])
super(ServersSampleHideAddressesJsonTest, self).setUp() | unknown | codeparrot/codeparrot-clean | ||
- Feature Name: Index Recommendation Engine
- Status: in-progress
- Start Date: 2018-10-18
- Authors: Neha George
- RFC PR: [#71784](https://github.com/cockroachdb/cockroach/pull/71784)
- Cockroach Issue:
# Summary
This document describes an "index recommendation engine" that would suggest table indexes for CockroachDB users to add. As of now, users do not have insight regarding the contribution of indexes to their workload's performance.
This will be done by strategically selecting index subsets that *could* improve performance, and then using the optimizer costing algorithm to determine the best overall subset.
The potential impact of this project is boundless, as every user could see a boost in their workload's performance.
# Motivation
The main motivation behind this project is to capitalize on CockroachDB's performance potential.
Adding certain indexes can have drastic impacts on the performance of a query, and if this said query is executed repeatedly, the performance discrepancy is all the more important.
Index recommendation is universally applicable, meaning that this project could be used by all customers. The expected outcome is improved query performance on average.
# Technical design
## User Stories
The PMs in this area are Kevin Ngo and Vy Ton. There are existing user stories, linked [here](https://cockroachlabs.atlassian.net/wiki/spaces/SQLOBS/pages/2285207635/Index+recommendations+draft).
The tentative plan is to start with manual single-statement index recommendations of indexes to add to the database, including index recommendations with STORING columns (to potentially avoid lookup joins).
This functionality will then be extended to workload recommendations of indexes to add. From here, automatic recommendations of indexes to add *and* indexes to drop can be considered.
## General Overview
### Single Statement Recommendations
To begin, we will have a single-statement index recommendation feature. This will be implemented first as the logic it uses can be expanded to support workload-level recommendations, discussed next in the RFC.
The feature will output recommendations of indexes to add that will optimize a given query, which lends well to having it included with the `EXPLAIN` syntax, below the query plan.
There will be index recommendations in a table format, including the index columns, their direction, and SQL command to run to add each index.
For a single statement, the flow is planned as follows:
- Run the optbuilder's build function and walk through the statement to determine potential candidates, ensuring they do not already exist as indexes and that there are no duplicates.
- Add hypothetical indexes for each of the potential candidates for which an index does not already exist.
- Run the optimizer with these hypothetical indexes included and determine which potential candidates (if any) are actually being used in the optimal plan to find the "recommendation set."
- Connect this flow to the `EXPLAIN` output, showing the final recommendation set.
These ideas will be extended for workload recommendations, with the fundamental recommendation set and hypothetical index concepts being reused.
### Workload Recommendations
In the background, we collect SQL statements that are executed by the user. This is stored in an existing table, `crdb_internal.statement_statistics`.
Information regarding execution count and latencies, which will be used to assess a statement's tuning priority, can be obtained from this table.
There are three proposed interfaces, which will be implemented in the given order.
1. There is a new built-in function for index recommendations that takes in no parameters, called `crdb_internal.generate_index_recommendations()`. It generates index recommendations for the current database.
Using the collected SQL statements, we then run the index recommendation algorithm that takes the SQL statements as input and outputs index recommendations.
The index recommendations will populate a new `crdb_internal.index_recommendations` virtual table, stored in memory, with the following columns: `create_date`, `table`, `columns`, and `stored_columns`, which can then later be queried.
The data types of the columns would be as follows:
- `create_date`: TIMESTAMPTZ
- `table`: INT_8 (table ID)
- `columns`: an array of JSONB with each entry storing a column's column ID (INT_8) and direction (boolean representing ascending or descending).
- `stored_columns`: an array of integers with each column ID (INT_8)
2. We generate and surface index recommendations in the DB and CC console. There would be a UI showing the recommendations of indexes to create and drop in a table view, with an associated impact score or metric.
How this metric is determined is uncertain, but it would be based on the frequency of that index's use in the workload and its cost-impact on the statements which use it.
3. We automatically run the index recommendation algorithm periodically in the background and tune the database without user input.
This is an end goal, after we have refined our index recommendations and are confident in them. Similar to statistics creation, this would become a job that runs in the background.
The frequency of this would be configurable, and would also depend on the activity levels of the database (i.e. only run when there is low activity).
For a given user database no matter what the interface, a sample workload *W* must be determined, from which index recommendations can be decided.
*W* contains the top *x* DML statements ordered by execution count times the statement latency, where *x* must be high enough to ensure that the sum of _statement_latency*execution_count_ is beyond some threshold.
If this is not possible, we return an error to the user stating that there is not enough information to recommend indexes.
This is operating under the claim that indexes can only benefit and adversely impact DML-type statements.
Next we determine each statement's corresponding index recommendation set, if the statement has one (otherwise it will just be the empty set).
These statement recommendation sets are sets of indexes that are tailored to the statement and will potentially be recommended to the user to improve overall workload performance.
A statement will have a recommendation set if and only if it benefits from the addition of indexes.
From here, the optimizer costing algorithm is used to determine which amalgamated index set should be recommended to the user.
## Determining the Recommendation Set for a Single Statement
There are a large number of possible indexes for a given statement *(S)* that uses one or more tables, so we choose a candidate set [[1]](http://www.cs.toronto.edu/~alan/papers/icde00.pdf) [[2]](https://baozhifeng.net/papers/cikm20-IndexRec.pdf).
Choose *S's* candidate set as follows:
- Separate attributes that appear in *S* into 5 categories:
- **J**: Attributes that appear in JOIN conditions
- **R**: Attributes that appear in range or comparison conditions
- **EQ**: Attributes that appear in EQUAL conditions
- **O**: Attributes that appear in GROUP BY or ORDER BY clauses
- **USED**: Attributes that are referenced anywhere in the statement that are not in the above categories.
- Note that in order to access this information, we need to parse the statement string and build the canonical expression so that `tree.UnresolvedName` types are resolved.
- Using these categories, follow a set of rules to create candidate indexes. For succinctness, only some rules are listed.
These indexes are all ascending, other than multi-columned indexes created from an ORDER BY, where each column will depend on the direction of the ordering. By default, the first column will be ordered ascending.
If that contradicts its natural ordering, then all columns in the index will do the same, and vice versa. This is to avoid redundant indexes.
- When **O** attributes come from a single table, create an index using all attributes from that ordering/grouping.
- Create single-attribute indexes from **J**, **R**, and **EQ**.
- If there are join conditions with multiple attributes from a single table, create a single index on these attributes.
- Inject these indexes as *hypothetical indexes* into the schema and optimize the single statement (more information about hypothetical indexes in the following section).
Take every index that was used in the optimal plan and put it in this statement's recommendation set.
Consider this sample SQL query:
```sql
SELECT a FROM s JOIN t ON s.x = t.x
WHERE (s.x = s.y AND t.z > 10 AND t.z < 20)
ORDER BY s.y, s.z;
```
From this query and the rules listed, we would have for table *s* indexes:
```
J: (x)
EQ: (x), (y)
O: (y, z)
```
For table *t*:
```
J: (x)
R: (z)
```
From here, we would construct the recommendation set of the query, which could result in indexes on either table, on no table, or on both tables.
The reason not all candidate indexes are included is due to the fact that we only choose indexes used in the optimizer's best query plan.
To reiterate, the recommendation set depends on the plan chosen by the optimizer, while the candidate set does not.
## Using the Optimizer Costing Algorithm with Workload Recommendations
### Overview
The next step is applying the optimizer costing algorithm to determine the best set of indexes for the given workload *W*. That is, find a set of indexes *X* such that *Cost(W, X)* is minimized.
For each statement's recommendation set, determine the optimizer cost of *W* if that index subset were to be applied. Choose the statement recommendation sets with the lowest *Cost(W, X)*.
We must then check for index overlap and remove similar/identical indexes to avoid redundancy. An example of two similar indexes is having an index on `(x, y)` and then also having an index on `(x, y, z)`.
A strategy to determine which index to remove would be running *W* again with our chosen indexes, and of the redundant indexes choose the one that has the highest worth.
Meaning, the sum of the frequencies of the statements in which the index is used is the highest. When we re-run *W*, we should also remove any chosen indexes that are unused.
At this time, potential indexes can be compared with existing indexes. If indexes we want to recommend include existing indexes, we omit those recommendations.
In the case that no indexes remain, it means that no indexes will be recommended to the user to add.
In a similar way, if the addition of our hypothetical indexes caused some existing indexes to become unused or rarely used, we would recommend that these indexes be deleted.
If the index is still occasionally used, we need to ensure that removing it does not negatively affect overall performance. There would be some heuristics we make use of to do this.
To fully ensure that this has not caused a regression however, we should re-run *W* with the index hypothetically dropped.
Additionally, before we delete an index, we should ensure that no queries are regularly using hints to force that index.
After this step, we have our optimal *X* that will be recommended to the user.
In terms of the final output, we will have recommended indexes, if these recommendations exist. In addition, we should have a quantifiable "impact score" associated with an index recommendation that we can use to justify why the index would be beneficial to users.
We can also include further information with this, such as which queries are affected and/or affected the most. For "drop" recommendations, we should have a similar metric.
### Issues and Considerations
One issue with this approach is that it can become a feedback loop where adding new indexes affects the existing query plans, so we remove them, and then that allows for new potential indexes to be useful.
The existence of this feedback loop means that the final recommendation set may not be the most optimal.
This is a tradeoff that must be accepted - otherwise the algorithm could run infinitely. Plus, even if the recommendation set is not the *most* optimal, it will have still been proven to improve the sample workload performance, which is beneficial.
Another heuristic that could be added is random swapping of a small subset of indexes being tried with other indexes that were found in *W's* recommendation sets. If the total cost of *W* is lower with this random swapping, we keep this configuration and continue as described above.
The number of times this would be tried would be limited, to avoid having an inefficient algorithm.
Implementation will begin with recommending potential indexes to add, followed by recommending indexes to remove. As an aside, we can also independently use index usage metrics to determine if there are any unused indexes that we should recommend be deleted.
An additional issue is the lack of histograms on non-indexed columns. This will impact the plan that is chosen by the optimizer. Since statistics collection is a long and involved task, there is no clear way of mitigating this.
Instead, this is a limitation that we must accept for now, especially since this will not stop us from making recommendations (it will just potentially impact their quality).
It might be beneficial to also factor in the cost of index writes and index creation in our recommendation algorithm, which is not done by the optimizer's costing algorithm. For database reads, indexes can only have positive impact, whereas for writes, they can have a negative impact.
Also, creating an index has a storage cost. Deciding a *fair* cost to associate with creating an index and maintaining an index for database writes is a pending task.
This is largely dependent on user preference, as some users might prioritize read performance over write performance, and vice versa.
To handle this, we could have user settings that allow the user to indicate their preference, which will then affect the cost we use internally.
These user settings should be specific to each "application name", to deal with the fact that some applications may be more latency-sensitive than others.
Furthermore, creating and removing indexes has significant overhead, so we will use hypothetical indexes instead. There is an existing [prototype PR](https://github.com/cockroachdb/cockroach/pull/66111/) for this. However, these indexes persist to disk, and for our purposes, we only need the indexes in memory.
We will need to additionally create fake tables that we can tamper with, without interfering with planning on concurrent queries. The implementation idea is to create a struct that wraps `optTable` with additional information pertaining to the table's hypothetical indexes.
Our hypothetical indexes will be added and removed from this table, that is otherwise identical to the regular table.
Moreover, when recommending the removal of indexes we must be cautious with `UNIQUE` indexes. If a unique index already exists, we have to ensure that we do not remove the unique constraint. This can easily be done by keeping unique indexes.
For additional flexibility, however, we could consider adding a `UNIQUE WITHOUT INDEX` constraint, which would allow the unique index's removal.
Running the costing algorithm so many times is another hurdle in terms of computational cost.
We run the algorithm for each statement in *W*, for each statement's recommendation set, which is O(cardinality of *W*, squared) time.
Since the queries we are concerned with are a subset of the statements in *W*, the time complexity is not a tight upper bound. However, it shows that this algorithm has roughly quadratic time complexity, which is quite slow.
A way of mitigating this is by only allowing the recommendation algorithm to be run if database utilisation is below a certain threshold, similar to what Azure does [here](https://cockroachlabs.atlassian.net/wiki/spaces/SQLOBS/pages/2252767632/Index+recommendations#:~:text=it%E2%80%99s%20postponed%20if%2080%25%20resource%20utilizated.).
Another way of mitigating this is by ensuring the sample workload is a meaningful *sample* which is not too large. We do this by limiting the size of the sample workload when we fetch it.
If performance continues to be an issue, which is highly applicable for auto-tuning, this functionality can be disabled. Alternatively, a setting can be configured to tune the database less frequently.
When considering serverless' pricing based on RU consumption, this type of flexibility is vital.
Finally, a general issue with this project would be recommending indexes that slow down overall performance. In theory, with proper design, this should not be a major issue.
Albeit, there are cases in which this would happen. Since maintaining an index has an associated cost, it's not always beneficial to add more indexes. Thus, a certain middle ground must be achieved.
It is possible that this middle ground is most optimal for the SQL statements considered when choosing recommendations, but in practice the workload's demands could fluctuate.
Determining useful index recommendations in such a situation is a difficult task.
Still, in most cases, one can expect the workload's general characteristics to be consistent.
Also, index recommendations would only be made if there is enough sample data to do so. Meaning, index recommendations would always be based on patterns observed in a significant sample size.
## Rationale and Alternatives
For the single-statement recommendations, another suggested interface was adding index recommendations to a separate `EXPLAIN` option, as opposed to adding it to a vanilla `EXPLAIN`.
An advantage of this is it avoids cluttering the `EXPLAIN` output with unexpected information.
However, this would add new syntax that could confuse users. It would also reduce the visibility of the feature, and since users who run `EXPLAIN` often want to see how the plan can be improved, having index recommendations in the same view would be helpful.
Thus, it was decided that this would be included in the vanilla `EXPLAIN`.
To determine the statement recommendation set, a simpler heuristic could easily be used.
For example, the candidate set could be all single column indexes for attributes that appear in the query.
The recommendation set would still be determined by running the optimizer with all indexes in the candidate set added.
The reason this more involved method is chosen is that it considers more complex indexes that could potentially further improve performance.
Another portion of the algorithm is the optimizer costing. A viable alternative to this, seen in modern literature, would be using ML-based modelling to choose indexes from statement recommendation sets.
However, this seemed like overkill for our purposes. Although an impressive feat in academia, a simpler algorithm using our existing optimizer infrastructure can achieve largely the same goal.
Thus, it made sense to use our optimizer costing algorithm.
The impact of not doing this project at all is significant since established databases offer index recommendation. In not doing so, we are missing an important feature that some consumers expect.
# Unresolved questions
- **Will the engine recommend partial indexes, inverted indexes, or hash-sharded indexes?**
This algorithm will not consider these types of indexes to begin with as determining heuristics to recommend them is more difficult (notably partial and hash-sharded indexes). This could be an extension in the future.
A known limitation with not recommending hash-sharded indexes is the potential creation of hotspot ranges, see this [blog post](https://www.cockroachlabs.com/blog/hash-sharded-indexes-unlock-linear-scaling-for-sequential-workloads/).
- **How will we cost writes and index creation?**. This is TBD. General ideas can be determined experimentally, as development of this feature is underway.
One can create test data, SQL write statements and SQL queries in order to determine an index creation and update costing mechanism that makes sense.
- **Could we recommend changes other than adding or removing indexes?** Although this RFC deals with index recommendation specifically, there are other ways to tune the database to optimize performance.
For example, in a multi-region database, we could recommend the conversion of regional tables to global tables (and vice versa). These types of additional recommendations can be explored in the future, in a separate RFC.
## References
[1] http://www.cs.toronto.edu/~alan/papers/icde00.pdf
[2] https://baozhifeng.net/papers/cikm20-IndexRec.pdf | unknown | github | https://github.com/cockroachdb/cockroach | docs/RFCS/20211112_index_recommendation.md |
#!/usr/bin/env python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A conformance test implementation for the Python protobuf library.
See conformance.proto for more information.
"""
import struct
import sys
import os
from google.protobuf import json_format
from google.protobuf import message
from google.protobuf import test_messages_proto3_pb2
import conformance_pb2
sys.stdout = os.fdopen(sys.stdout.fileno(), 'wb', 0)
sys.stdin = os.fdopen(sys.stdin.fileno(), 'rb', 0)
test_count = 0
verbose = False
class ProtocolError(Exception):
pass
def do_test(request):
test_message = test_messages_proto3_pb2.TestAllTypes()
response = conformance_pb2.ConformanceResponse()
test_message = test_messages_proto3_pb2.TestAllTypes()
try:
if request.WhichOneof('payload') == 'protobuf_payload':
try:
test_message.ParseFromString(request.protobuf_payload)
except message.DecodeError as e:
response.parse_error = str(e)
return response
elif request.WhichOneof('payload') == 'json_payload':
try:
json_format.Parse(request.json_payload, test_message)
except Exception as e:
response.parse_error = str(e)
return response
else:
raise ProtocolError("Request didn't have payload.")
if request.requested_output_format == conformance_pb2.UNSPECIFIED:
raise ProtocolError("Unspecified output format")
elif request.requested_output_format == conformance_pb2.PROTOBUF:
response.protobuf_payload = test_message.SerializeToString()
elif request.requested_output_format == conformance_pb2.JSON:
try:
response.json_payload = json_format.MessageToJson(test_message)
except Exception as e:
response.serialize_error = str(e)
return response
except Exception as e:
response.runtime_error = str(e)
return response
def do_test_io():
length_bytes = sys.stdin.read(4)
if len(length_bytes) == 0:
return False # EOF
elif len(length_bytes) != 4:
raise IOError("I/O error")
# "I" is "unsigned int", so this depends on running on a platform with
# 32-bit "unsigned int" type. The Python struct module unfortunately
# has no format specifier for uint32_t.
length = struct.unpack("<I", length_bytes)[0]
serialized_request = sys.stdin.read(length)
if len(serialized_request) != length:
raise IOError("I/O error")
request = conformance_pb2.ConformanceRequest()
request.ParseFromString(serialized_request)
response = do_test(request)
serialized_response = response.SerializeToString()
sys.stdout.write(struct.pack("<I", len(serialized_response)))
sys.stdout.write(serialized_response)
sys.stdout.flush()
if verbose:
sys.stderr.write("conformance_python: request=%s, response=%s\n" % (
request.ShortDebugString().c_str(),
response.ShortDebugString().c_str()))
global test_count
test_count += 1
return True
while True:
if not do_test_io():
sys.stderr.write("conformance_python: received EOF from test runner " +
"after %s tests, exiting\n" % (test_count))
sys.exit(0) | unknown | codeparrot/codeparrot-clean | ||
lockfileVersion: '9.0'
settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
importers:
.:
dependencies:
'@angular/common':
specifier: link:./in-existing-linked-by-bazel
version: link:in-existing-linked-by-bazel
'@angular/compiler':
specifier: link:./in-existing-linked-by-bazel
version: link:in-existing-linked-by-bazel
'@angular/core':
specifier: link:./in-existing-linked-by-bazel
version: link:in-existing-linked-by-bazel
'@angular/forms':
specifier: link:./in-existing-linked-by-bazel
version: link:in-existing-linked-by-bazel
'@angular/platform-browser':
specifier: link:./in-existing-linked-by-bazel
version: link:in-existing-linked-by-bazel
'@angular/router':
specifier: link:./in-existing-linked-by-bazel
version: link:in-existing-linked-by-bazel
'@angular/ssr':
specifier: 21.1.0-rc.0
version: 21.1.0-rc.0(@angular/common@in-existing-linked-by-bazel)(@angular/core@in-existing-linked-by-bazel)(@angular/router@in-existing-linked-by-bazel)
rxjs:
specifier: ^7.0.0
version: 7.8.2
tslib:
specifier: ^2.3.0
version: 2.8.1
zone.js:
specifier: 0.16.0
version: 0.16.0
devDependencies:
'@angular-devkit/build-angular':
specifier: 21.1.0-rc.0
version: 21.1.0-rc.0(@angular/compiler-cli@in-existing-linked-by-bazel)(@angular/compiler@in-existing-linked-by-bazel)(@angular/core@in-existing-linked-by-bazel)(@angular/platform-browser@in-existing-linked-by-bazel)(@angular/ssr@21.1.0-rc.0(@angular/common@in-existing-linked-by-bazel)(@angular/core@in-existing-linked-by-bazel)(@angular/router@in-existing-linked-by-bazel))(@types/node@20.19.33)(jiti@2.6.1)(typescript@5.9.3)
'@angular/build':
specifier: 21.1.0-rc.0
version: 21.1.0-rc.0(@angular/compiler-cli@in-existing-linked-by-bazel)(@angular/compiler@in-existing-linked-by-bazel)(@angular/core@in-existing-linked-by-bazel)(@angular/platform-browser@in-existing-linked-by-bazel)(@angular/ssr@21.1.0-rc.0(@angular/common@in-existing-linked-by-bazel)(@angular/core@in-existing-linked-by-bazel)(@angular/router@in-existing-linked-by-bazel))(@types/node@20.19.33)(jiti@2.6.1)(less@4.4.2)(postcss@8.5.6)(terser@5.44.1)(tslib@2.8.1)(typescript@5.9.3)
'@angular/cli':
specifier: 21.1.0-rc.0
version: 21.1.0-rc.0(@types/node@20.19.33)(hono@4.11.9)
'@angular/compiler-cli':
specifier: link:./in-existing-linked-by-bazel
version: link:in-existing-linked-by-bazel
'@types/node':
specifier: ^20.14.8
version: 20.19.33
ts-node:
specifier: ^10.9.1
version: 10.9.2(@types/node@20.19.33)(typescript@5.9.3)
typescript:
specifier: 5.9.3
version: 5.9.3
packages:
'@algolia/abtesting@1.12.2':
resolution: {integrity: sha512-oWknd6wpfNrmRcH0vzed3UPX0i17o4kYLM5OMITyMVM2xLgaRbIafoxL0e8mcrNNb0iORCJA0evnNDKRYth5WQ==}
engines: {node: '>= 14.0.0'}
'@algolia/client-abtesting@5.46.2':
resolution: {integrity: sha512-oRSUHbylGIuxrlzdPA8FPJuwrLLRavOhAmFGgdAvMcX47XsyM+IOGa9tc7/K5SPvBqn4nhppOCEz7BrzOPWc4A==}
engines: {node: '>= 14.0.0'}
'@algolia/client-analytics@5.46.2':
resolution: {integrity: sha512-EPBN2Oruw0maWOF4OgGPfioTvd+gmiNwx0HmD9IgmlS+l75DatcBkKOPNJN+0z3wBQWUO5oq602ATxIfmTQ8bA==}
engines: {node: '>= 14.0.0'}
'@algolia/client-common@5.46.2':
resolution: {integrity: sha512-Hj8gswSJNKZ0oyd0wWissqyasm+wTz1oIsv5ZmLarzOZAp3vFEda8bpDQ8PUhO+DfkbiLyVnAxsPe4cGzWtqkg==}
engines: {node: '>= 14.0.0'}
'@algolia/client-insights@5.46.2':
resolution: {integrity: sha512-6dBZko2jt8FmQcHCbmNLB0kCV079Mx/DJcySTL3wirgDBUH7xhY1pOuUTLMiGkqM5D8moVZTvTdRKZUJRkrwBA==}
engines: {node: '>= 14.0.0'}
'@algolia/client-personalization@5.46.2':
resolution: {integrity: sha512-1waE2Uqh/PHNeDXGn/PM/WrmYOBiUGSVxAWqiJIj73jqPqvfzZgzdakHscIVaDl6Cp+j5dwjsZ5LCgaUr6DtmA==}
engines: {node: '>= 14.0.0'}
'@algolia/client-query-suggestions@5.46.2':
resolution: {integrity: sha512-EgOzTZkyDcNL6DV0V/24+oBJ+hKo0wNgyrOX/mePBM9bc9huHxIY2352sXmoZ648JXXY2x//V1kropF/Spx83w==}
engines: {node: '>= 14.0.0'}
'@algolia/client-search@5.46.2':
resolution: {integrity: sha512-ZsOJqu4HOG5BlvIFnMU0YKjQ9ZI6r3C31dg2jk5kMWPSdhJpYL9xa5hEe7aieE+707dXeMI4ej3diy6mXdZpgA==}
engines: {node: '>= 14.0.0'}
'@algolia/ingestion@1.46.2':
resolution: {integrity: sha512-1Uw2OslTWiOFDtt83y0bGiErJYy5MizadV0nHnOoHFWMoDqWW0kQoMFI65pXqRSkVvit5zjXSLik2xMiyQJDWQ==}
engines: {node: '>= 14.0.0'}
'@algolia/monitoring@1.46.2':
resolution: {integrity: sha512-xk9f+DPtNcddWN6E7n1hyNNsATBCHIqAvVGG2EAGHJc4AFYL18uM/kMTiOKXE/LKDPyy1JhIerrh9oYb7RBrgw==}
engines: {node: '>= 14.0.0'}
'@algolia/recommend@5.46.2':
resolution: {integrity: sha512-NApbTPj9LxGzNw4dYnZmj2BoXiAc8NmbbH6qBNzQgXklGklt/xldTvu+FACN6ltFsTzoNU6j2mWNlHQTKGC5+Q==}
engines: {node: '>= 14.0.0'}
'@algolia/requester-browser-xhr@5.46.2':
resolution: {integrity: sha512-ekotpCwpSp033DIIrsTpYlGUCF6momkgupRV/FA3m62SreTSZUKjgK6VTNyG7TtYfq9YFm/pnh65bATP/ZWJEg==}
engines: {node: '>= 14.0.0'}
'@algolia/requester-fetch@5.46.2':
resolution: {integrity: sha512-gKE+ZFi/6y7saTr34wS0SqYFDcjHW4Wminv8PDZEi0/mE99+hSrbKgJWxo2ztb5eqGirQTgIh1AMVacGGWM1iw==}
engines: {node: '>= 14.0.0'}
'@algolia/requester-node-http@5.46.2':
resolution: {integrity: sha512-ciPihkletp7ttweJ8Zt+GukSVLp2ANJHU+9ttiSxsJZThXc4Y2yJ8HGVWesW5jN1zrsZsezN71KrMx/iZsOYpg==}
engines: {node: '>= 14.0.0'}
'@ampproject/remapping@2.3.0':
resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==}
engines: {node: '>=6.0.0'}
'@angular-devkit/architect@0.2101.0-rc.0':
resolution: {integrity: sha512-QohNOLzTorQejqb4Kr5GFB9gCiaTuarmTza53g8oYjSMzB/zLDDN6O5e7SW6lp+wdfIla2mnUWyVBJSTNOa+Hg==}
engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0, npm: ^6.11.0 || ^7.5.6 || >=8.0.0, yarn: '>= 1.13.0'}
hasBin: true
'@angular-devkit/build-angular@21.1.0-rc.0':
resolution: {integrity: sha512-NjAPiPJaCB7jXGbX2Js+HMtebF9z24HMBkLZYTa/Moypfqpx0HMMC+Gn/1DrLw6xT39uET1JQRlUFtBkXFsRYw==}
engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0, npm: ^6.11.0 || ^7.5.6 || >=8.0.0, yarn: '>= 1.13.0'}
peerDependencies:
'@angular/compiler-cli': ^21.0.0 || ^21.1.0-next.0
'@angular/core': ^21.0.0 || ^21.1.0-next.0
'@angular/localize': ^21.0.0 || ^21.1.0-next.0
'@angular/platform-browser': ^21.0.0 || ^21.1.0-next.0
'@angular/platform-server': ^21.0.0 || ^21.1.0-next.0
'@angular/service-worker': ^21.0.0 || ^21.1.0-next.0
'@angular/ssr': ^21.1.0-rc.0
'@web/test-runner': ^0.20.0
browser-sync: ^3.0.2
jest: ^30.2.0
jest-environment-jsdom: ^30.2.0
karma: ^6.3.0
ng-packagr: ^21.0.0 || ^21.1.0-next.0
protractor: ^7.0.0
tailwindcss: ^2.0.0 || ^3.0.0 || ^4.0.0
typescript: '>=5.9 <6.0'
peerDependenciesMeta:
'@angular/core':
optional: true
'@angular/localize':
optional: true
'@angular/platform-browser':
optional: true
'@angular/platform-server':
optional: true
'@angular/service-worker':
optional: true
'@angular/ssr':
optional: true
'@web/test-runner':
optional: true
browser-sync:
optional: true
jest:
optional: true
jest-environment-jsdom:
optional: true
karma:
optional: true
ng-packagr:
optional: true
protractor:
optional: true
tailwindcss:
optional: true
'@angular-devkit/build-webpack@0.2101.0-rc.0':
resolution: {integrity: sha512-PkP9v/3PBpNA6RYY+79pG4c1BlalWKjC62Ccbfl1zPNGLKZ0Ie7LTsygACP3xRMkKd3dWtCsp7gGPTPlAmJU+g==}
engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0, npm: ^6.11.0 || ^7.5.6 || >=8.0.0, yarn: '>= 1.13.0'}
peerDependencies:
webpack: ^5.30.0
webpack-dev-server: ^5.0.2
'@angular-devkit/core@21.1.0-rc.0':
resolution: {integrity: sha512-BCMware2kmXEnmWJE8rCcX6p3LdqkGGR0GoEMafCuayM3EqOdJkBsQb8EptKypuZ7y7UREKjfBGs9Q8+WPFcng==}
engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0, npm: ^6.11.0 || ^7.5.6 || >=8.0.0, yarn: '>= 1.13.0'}
peerDependencies:
chokidar: ^5.0.0
peerDependenciesMeta:
chokidar:
optional: true
'@angular-devkit/schematics@21.1.0-rc.0':
resolution: {integrity: sha512-TcRuOpJzOAm8Z5YRNJS4qMxdjuYEXIrmpiwH3qyE4fhWKGOpZoDRgkDGEL5D0glb+nuD4Hwa167A2FBOCVe8zQ==}
engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0, npm: ^6.11.0 || ^7.5.6 || >=8.0.0, yarn: '>= 1.13.0'}
'@angular/build@21.1.0-rc.0':
resolution: {integrity: sha512-RAK4QHzWlKxHgSfcauwT9WC9aRYCvLiTb3rGwLWk9a/7uWuclqC6Fpw6m8U1t8znF0uXLoxM85RVYxm1fm/rQw==}
engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0, npm: ^6.11.0 || ^7.5.6 || >=8.0.0, yarn: '>= 1.13.0'}
peerDependencies:
'@angular/compiler': ^21.0.0 || ^21.1.0-next.0
'@angular/compiler-cli': ^21.0.0 || ^21.1.0-next.0
'@angular/core': ^21.0.0 || ^21.1.0-next.0
'@angular/localize': ^21.0.0 || ^21.1.0-next.0
'@angular/platform-browser': ^21.0.0 || ^21.1.0-next.0
'@angular/platform-server': ^21.0.0 || ^21.1.0-next.0
'@angular/service-worker': ^21.0.0 || ^21.1.0-next.0
'@angular/ssr': ^21.1.0-rc.0
karma: ^6.4.0
less: ^4.2.0
ng-packagr: ^21.0.0 || ^21.1.0-next.0
postcss: ^8.4.0
tailwindcss: ^2.0.0 || ^3.0.0 || ^4.0.0
tslib: ^2.3.0
typescript: '>=5.9 <6.0'
vitest: ^4.0.8
peerDependenciesMeta:
'@angular/core':
optional: true
'@angular/localize':
optional: true
'@angular/platform-browser':
optional: true
'@angular/platform-server':
optional: true
'@angular/service-worker':
optional: true
'@angular/ssr':
optional: true
karma:
optional: true
less:
optional: true
ng-packagr:
optional: true
postcss:
optional: true
tailwindcss:
optional: true
vitest:
optional: true
'@angular/cli@21.1.0-rc.0':
resolution: {integrity: sha512-Gqc4zXxnV0TXEurqyBvlLnLMx+bPw3Kpp7a4xBVT6C5ZRzR+KbA4Rpwuqt5D6eYt8byMVLeqARhSFtoRKvH0qQ==}
engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0, npm: ^6.11.0 || ^7.5.6 || >=8.0.0, yarn: '>= 1.13.0'}
hasBin: true
'@angular/ssr@21.1.0-rc.0':
resolution: {integrity: sha512-jnh3t91hbk7YGyPR3wG4BLSNK0SSvGTNZyoYeMPrk4NYkcWXSqf7FS+UKC8gCqGfW6rJMU3EMw0O84xYbt9mww==}
peerDependencies:
'@angular/common': ^21.0.0 || ^21.1.0-next.0
'@angular/core': ^21.0.0 || ^21.1.0-next.0
'@angular/platform-server': ^21.0.0 || ^21.1.0-next.0
'@angular/router': ^21.0.0 || ^21.1.0-next.0
peerDependenciesMeta:
'@angular/platform-server':
optional: true
'@babel/code-frame@7.29.0':
resolution: {integrity: sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==}
engines: {node: '>=6.9.0'}
'@babel/compat-data@7.29.0':
resolution: {integrity: sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==}
engines: {node: '>=6.9.0'}
'@babel/core@7.28.5':
resolution: {integrity: sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==}
engines: {node: '>=6.9.0'}
'@babel/generator@7.28.5':
resolution: {integrity: sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==}
engines: {node: '>=6.9.0'}
'@babel/generator@7.29.1':
resolution: {integrity: sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==}
engines: {node: '>=6.9.0'}
'@babel/helper-annotate-as-pure@7.27.3':
resolution: {integrity: sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==}
engines: {node: '>=6.9.0'}
'@babel/helper-compilation-targets@7.28.6':
resolution: {integrity: sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==}
engines: {node: '>=6.9.0'}
'@babel/helper-create-class-features-plugin@7.28.6':
resolution: {integrity: sha512-dTOdvsjnG3xNT9Y0AUg1wAl38y+4Rl4sf9caSQZOXdNqVn+H+HbbJ4IyyHaIqNR6SW9oJpA/RuRjsjCw2IdIow==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0
'@babel/helper-create-regexp-features-plugin@7.28.5':
resolution: {integrity: sha512-N1EhvLtHzOvj7QQOUCCS3NrPJP8c5W6ZXCHDn7Yialuy1iu4r5EmIYkXlKNqT99Ciw+W0mDqWoR6HWMZlFP3hw==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0
'@babel/helper-define-polyfill-provider@0.6.6':
resolution: {integrity: sha512-mOAsxeeKkUKayvZR3HeTYD/fICpCPLJrU5ZjelT/PA6WHtNDBOE436YiaEUvHN454bRM3CebhDsIpieCc4texA==}
peerDependencies:
'@babel/core': ^7.4.0 || ^8.0.0-0 <8.0.0
'@babel/helper-globals@7.28.0':
resolution: {integrity: sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==}
engines: {node: '>=6.9.0'}
'@babel/helper-member-expression-to-functions@7.28.5':
resolution: {integrity: sha512-cwM7SBRZcPCLgl8a7cY0soT1SptSzAlMH39vwiRpOQkJlh53r5hdHwLSCZpQdVLT39sZt+CRpNwYG4Y2v77atg==}
engines: {node: '>=6.9.0'}
'@babel/helper-module-imports@7.28.6':
resolution: {integrity: sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==}
engines: {node: '>=6.9.0'}
'@babel/helper-module-transforms@7.28.6':
resolution: {integrity: sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0
'@babel/helper-optimise-call-expression@7.27.1':
resolution: {integrity: sha512-URMGH08NzYFhubNSGJrpUEphGKQwMQYBySzat5cAByY1/YgIRkULnIy3tAMeszlL/so2HbeilYloUmSpd7GdVw==}
engines: {node: '>=6.9.0'}
'@babel/helper-plugin-utils@7.28.6':
resolution: {integrity: sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==}
engines: {node: '>=6.9.0'}
'@babel/helper-remap-async-to-generator@7.27.1':
resolution: {integrity: sha512-7fiA521aVw8lSPeI4ZOD3vRFkoqkJcS+z4hFo82bFSH/2tNd6eJ5qCVMS5OzDmZh/kaHQeBaeyxK6wljcPtveA==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0
'@babel/helper-replace-supers@7.28.6':
resolution: {integrity: sha512-mq8e+laIk94/yFec3DxSjCRD2Z0TAjhVbEJY3UQrlwVo15Lmt7C2wAUbK4bjnTs4APkwsYLTahXRraQXhb1WCg==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0
'@babel/helper-skip-transparent-expression-wrappers@7.27.1':
resolution: {integrity: sha512-Tub4ZKEXqbPjXgWLl2+3JpQAYBJ8+ikpQ2Ocj/q/r0LwE3UhENh7EUabyHjz2kCEsrRY83ew2DQdHluuiDQFzg==}
engines: {node: '>=6.9.0'}
'@babel/helper-split-export-declaration@7.24.7':
resolution: {integrity: sha512-oy5V7pD+UvfkEATUKvIjvIAH/xCzfsFVw7ygW2SI6NClZzquT+mwdTfgfdbUiceh6iQO0CHtCPsyze/MZ2YbAA==}
engines: {node: '>=6.9.0'}
'@babel/helper-string-parser@7.27.1':
resolution: {integrity: sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==}
engines: {node: '>=6.9.0'}
'@babel/helper-validator-identifier@7.28.5':
resolution: {integrity: sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==}
engines: {node: '>=6.9.0'}
'@babel/helper-validator-option@7.27.1':
resolution: {integrity: sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==}
engines: {node: '>=6.9.0'}
'@babel/helper-wrap-function@7.28.6':
resolution: {integrity: sha512-z+PwLziMNBeSQJonizz2AGnndLsP2DeGHIxDAn+wdHOGuo4Fo1x1HBPPXeE9TAOPHNNWQKCSlA2VZyYyyibDnQ==}
engines: {node: '>=6.9.0'}
'@babel/helpers@7.28.6':
resolution: {integrity: sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==}
engines: {node: '>=6.9.0'}
'@babel/parser@7.29.0':
resolution: {integrity: sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==}
engines: {node: '>=6.0.0'}
hasBin: true
'@babel/plugin-bugfix-firefox-class-in-computed-class-key@7.28.5':
resolution: {integrity: sha512-87GDMS3tsmMSi/3bWOte1UblL+YUTFMV8SZPZ2eSEL17s74Cw/l63rR6NmGVKMYW2GYi85nE+/d6Hw5N0bEk2Q==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0
'@babel/plugin-bugfix-safari-class-field-initializer-scope@7.27.1':
resolution: {integrity: sha512-qNeq3bCKnGgLkEXUuFry6dPlGfCdQNZbn7yUAPCInwAJHMU7THJfrBSozkcWq5sNM6RcF3S8XyQL2A52KNR9IA==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0
'@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@7.27.1':
resolution: {integrity: sha512-g4L7OYun04N1WyqMNjldFwlfPCLVkgB54A/YCXICZYBsvJJE3kByKv9c9+R/nAfmIfjl2rKYLNyMHboYbZaWaA==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0
'@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@7.27.1':
resolution: {integrity: sha512-oO02gcONcD5O1iTLi/6frMJBIwWEHceWGSGqrpCmEL8nogiS6J9PBlE48CaK20/Jx1LuRml9aDftLgdjXT8+Cw==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.13.0
'@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@7.28.6':
resolution: {integrity: sha512-a0aBScVTlNaiUe35UtfxAN7A/tehvvG4/ByO6+46VPKTRSlfnAFsgKy0FUh+qAkQrDTmhDkT+IBOKlOoMUxQ0g==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0
'@babel/plugin-proposal-private-property-in-object@7.21.0-placeholder-for-preset-env.2':
resolution: {integrity: sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY//qLFjCpQDFRvjdm2wA5pPm9w==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-syntax-import-assertions@7.28.6':
resolution: {integrity: sha512-pSJUpFHdx9z5nqTSirOCMtYVP2wFgoWhP0p3g8ONK/4IHhLIBd0B9NYqAvIUAhq+OkhO4VM1tENCt0cjlsNShw==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-syntax-import-attributes@7.28.6':
resolution: {integrity: sha512-jiLC0ma9XkQT3TKJ9uYvlakm66Pamywo+qwL+oL8HJOvc6TWdZXVfhqJr8CCzbSGUAbDOzlGHJC1U+vRfLQDvw==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-syntax-unicode-sets-regex@7.18.6':
resolution: {integrity: sha512-727YkEAPwSIQTv5im8QHz3upqp92JTWhidIC81Tdx4VJYIte/VndKf1qKrfnnhPLiPghStWfvC/iFaMCQu7Nqg==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0
'@babel/plugin-transform-arrow-functions@7.27.1':
resolution: {integrity: sha512-8Z4TGic6xW70FKThA5HYEKKyBpOOsucTOD1DjU3fZxDg+K3zBJcXMFnt/4yQiZnf5+MiOMSXQ9PaEK/Ilh1DeA==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-async-generator-functions@7.28.0':
resolution: {integrity: sha512-BEOdvX4+M765icNPZeidyADIvQ1m1gmunXufXxvRESy/jNNyfovIqUyE7MVgGBjWktCoJlzvFA1To2O4ymIO3Q==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-async-to-generator@7.27.1':
resolution: {integrity: sha512-NREkZsZVJS4xmTr8qzE5y8AfIPqsdQfRuUiLRTEzb7Qii8iFWCyDKaUV2c0rCuh4ljDZ98ALHP/PetiBV2nddA==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-block-scoped-functions@7.27.1':
resolution: {integrity: sha512-cnqkuOtZLapWYZUYM5rVIdv1nXYuFVIltZ6ZJ7nIj585QsjKM5dhL2Fu/lICXZ1OyIAFc7Qy+bvDAtTXqGrlhg==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-block-scoping@7.28.6':
resolution: {integrity: sha512-tt/7wOtBmwHPNMPu7ax4pdPz6shjFrmHDghvNC+FG9Qvj7D6mJcoRQIF5dy4njmxR941l6rgtvfSB2zX3VlUIw==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-class-properties@7.28.6':
resolution: {integrity: sha512-dY2wS3I2G7D697VHndN91TJr8/AAfXQNt5ynCTI/MpxMsSzHp+52uNivYT5wCPax3whc47DR8Ba7cmlQMg24bw==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-class-static-block@7.28.6':
resolution: {integrity: sha512-rfQ++ghVwTWTqQ7w8qyDxL1XGihjBss4CmTgGRCTAC9RIbhVpyp4fOeZtta0Lbf+dTNIVJer6ych2ibHwkZqsQ==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.12.0
'@babel/plugin-transform-classes@7.28.6':
resolution: {integrity: sha512-EF5KONAqC5zAqT783iMGuM2ZtmEBy+mJMOKl2BCvPZ2lVrwvXnB6o+OBWCS+CoeCCpVRF2sA2RBKUxvT8tQT5Q==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-computed-properties@7.28.6':
resolution: {integrity: sha512-bcc3k0ijhHbc2lEfpFHgx7eYw9KNXqOerKWfzbxEHUGKnS3sz9C4CNL9OiFN1297bDNfUiSO7DaLzbvHQQQ1BQ==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-destructuring@7.28.5':
resolution: {integrity: sha512-Kl9Bc6D0zTUcFUvkNuQh4eGXPKKNDOJQXVyyM4ZAQPMveniJdxi8XMJwLo+xSoW3MIq81bD33lcUe9kZpl0MCw==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-dotall-regex@7.28.6':
resolution: {integrity: sha512-SljjowuNKB7q5Oayv4FoPzeB74g3QgLt8IVJw9ADvWy3QnUb/01aw8I4AVv8wYnPvQz2GDDZ/g3GhcNyDBI4Bg==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-duplicate-keys@7.27.1':
resolution: {integrity: sha512-MTyJk98sHvSs+cvZ4nOauwTTG1JeonDjSGvGGUNHreGQns+Mpt6WX/dVzWBHgg+dYZhkC4X+zTDfkTU+Vy9y7Q==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-duplicate-named-capturing-groups-regex@7.29.0':
resolution: {integrity: sha512-zBPcW2lFGxdiD8PUnPwJjag2J9otbcLQzvbiOzDxpYXyCuYX9agOwMPGn1prVH0a4qzhCKu24rlH4c1f7yA8rw==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0
'@babel/plugin-transform-dynamic-import@7.27.1':
resolution: {integrity: sha512-MHzkWQcEmjzzVW9j2q8LGjwGWpG2mjwaaB0BNQwst3FIjqsg8Ct/mIZlvSPJvfi9y2AC8mi/ktxbFVL9pZ1I4A==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-explicit-resource-management@7.28.6':
resolution: {integrity: sha512-Iao5Konzx2b6g7EPqTy40UZbcdXE126tTxVFr/nAIj+WItNxjKSYTEw3RC+A2/ZetmdJsgueL1KhaMCQHkLPIg==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-exponentiation-operator@7.28.6':
resolution: {integrity: sha512-WitabqiGjV/vJ0aPOLSFfNY1u9U3R7W36B03r5I2KoNix+a3sOhJ3pKFB3R5It9/UiK78NiO0KE9P21cMhlPkw==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-export-namespace-from@7.27.1':
resolution: {integrity: sha512-tQvHWSZ3/jH2xuq/vZDy0jNn+ZdXJeM8gHvX4lnJmsc3+50yPlWdZXIc5ay+umX+2/tJIqHqiEqcJvxlmIvRvQ==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-for-of@7.27.1':
resolution: {integrity: sha512-BfbWFFEJFQzLCQ5N8VocnCtA8J1CLkNTe2Ms2wocj75dd6VpiqS5Z5quTYcUoo4Yq+DN0rtikODccuv7RU81sw==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-function-name@7.27.1':
resolution: {integrity: sha512-1bQeydJF9Nr1eBCMMbC+hdwmRlsv5XYOMu03YSWFwNs0HsAmtSxxF1fyuYPqemVldVyFmlCU7w8UE14LupUSZQ==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-json-strings@7.28.6':
resolution: {integrity: sha512-Nr+hEN+0geQkzhbdgQVPoqr47lZbm+5fCUmO70722xJZd0Mvb59+33QLImGj6F+DkK3xgDi1YVysP8whD6FQAw==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-literals@7.27.1':
resolution: {integrity: sha512-0HCFSepIpLTkLcsi86GG3mTUzxV5jpmbv97hTETW3yzrAij8aqlD36toB1D0daVFJM8NK6GvKO0gslVQmm+zZA==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-logical-assignment-operators@7.28.6':
resolution: {integrity: sha512-+anKKair6gpi8VsM/95kmomGNMD0eLz1NQ8+Pfw5sAwWH9fGYXT50E55ZpV0pHUHWf6IUTWPM+f/7AAff+wr9A==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-member-expression-literals@7.27.1':
resolution: {integrity: sha512-hqoBX4dcZ1I33jCSWcXrP+1Ku7kdqXf1oeah7ooKOIiAdKQ+uqftgCFNOSzA5AMS2XIHEYeGFg4cKRCdpxzVOQ==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-modules-amd@7.27.1':
resolution: {integrity: sha512-iCsytMg/N9/oFq6n+gFTvUYDZQOMK5kEdeYxmxt91fcJGycfxVP9CnrxoliM0oumFERba2i8ZtwRUCMhvP1LnA==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-modules-commonjs@7.28.6':
resolution: {integrity: sha512-jppVbf8IV9iWWwWTQIxJMAJCWBuuKx71475wHwYytrRGQ2CWiDvYlADQno3tcYpS/T2UUWFQp3nVtYfK/YBQrA==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-modules-systemjs@7.29.0':
resolution: {integrity: sha512-PrujnVFbOdUpw4UHiVwKvKRLMMic8+eC0CuNlxjsyZUiBjhFdPsewdXCkveh2KqBA9/waD0W1b4hXSOBQJezpQ==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-modules-umd@7.27.1':
resolution: {integrity: sha512-iQBE/xC5BV1OxJbp6WG7jq9IWiD+xxlZhLrdwpPkTX3ydmXdvoCpyfJN7acaIBZaOqTfr76pgzqBJflNbeRK+w==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-named-capturing-groups-regex@7.29.0':
resolution: {integrity: sha512-1CZQA5KNAD6ZYQLPw7oi5ewtDNxH/2vuCh+6SmvgDfhumForvs8a1o9n0UrEoBD8HU4djO2yWngTQlXl1NDVEQ==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0
'@babel/plugin-transform-new-target@7.27.1':
resolution: {integrity: sha512-f6PiYeqXQ05lYq3TIfIDu/MtliKUbNwkGApPUvyo6+tc7uaR4cPjPe7DFPr15Uyycg2lZU6btZ575CuQoYh7MQ==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-nullish-coalescing-operator@7.28.6':
resolution: {integrity: sha512-3wKbRgmzYbw24mDJXT7N+ADXw8BC/imU9yo9c9X9NKaLF1fW+e5H1U5QjMUBe4Qo4Ox/o++IyUkl1sVCLgevKg==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-numeric-separator@7.28.6':
resolution: {integrity: sha512-SJR8hPynj8outz+SlStQSwvziMN4+Bq99it4tMIf5/Caq+3iOc0JtKyse8puvyXkk3eFRIA5ID/XfunGgO5i6w==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-object-rest-spread@7.28.6':
resolution: {integrity: sha512-5rh+JR4JBC4pGkXLAcYdLHZjXudVxWMXbB6u6+E9lRL5TrGVbHt1TjxGbZ8CkmYw9zjkB7jutzOROArsqtncEA==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-object-super@7.27.1':
resolution: {integrity: sha512-SFy8S9plRPbIcxlJ8A6mT/CxFdJx/c04JEctz4jf8YZaVS2px34j7NXRrlGlHkN/M2gnpL37ZpGRGVFLd3l8Ng==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-optional-catch-binding@7.28.6':
resolution: {integrity: sha512-R8ja/Pyrv0OGAvAXQhSTmWyPJPml+0TMqXlO5w+AsMEiwb2fg3WkOvob7UxFSL3OIttFSGSRFKQsOhJ/X6HQdQ==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-optional-chaining@7.28.6':
resolution: {integrity: sha512-A4zobikRGJTsX9uqVFdafzGkqD30t26ck2LmOzAuLL8b2x6k3TIqRiT2xVvA9fNmFeTX484VpsdgmKNA0bS23w==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-parameters@7.27.7':
resolution: {integrity: sha512-qBkYTYCb76RRxUM6CcZA5KRu8K4SM8ajzVeUgVdMVO9NN9uI/GaVmBg/WKJJGnNokV9SY8FxNOVWGXzqzUidBg==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-private-methods@7.28.6':
resolution: {integrity: sha512-piiuapX9CRv7+0st8lmuUlRSmX6mBcVeNQ1b4AYzJxfCMuBfB0vBXDiGSmm03pKJw1v6cZ8KSeM+oUnM6yAExg==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-private-property-in-object@7.28.6':
resolution: {integrity: sha512-b97jvNSOb5+ehyQmBpmhOCiUC5oVK4PMnpRvO7+ymFBoqYjeDHIU9jnrNUuwHOiL9RpGDoKBpSViarV+BU+eVA==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-property-literals@7.27.1':
resolution: {integrity: sha512-oThy3BCuCha8kDZ8ZkgOg2exvPYUlprMukKQXI1r1pJ47NCvxfkEy8vK+r/hT9nF0Aa4H1WUPZZjHTFtAhGfmQ==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-regenerator@7.29.0':
resolution: {integrity: sha512-FijqlqMA7DmRdg/aINBSs04y8XNTYw/lr1gJ2WsmBnnaNw1iS43EPkJW+zK7z65auG3AWRFXWj+NcTQwYptUog==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-regexp-modifiers@7.28.6':
resolution: {integrity: sha512-QGWAepm9qxpaIs7UM9FvUSnCGlb8Ua1RhyM4/veAxLwt3gMat/LSGrZixyuj4I6+Kn9iwvqCyPTtbdxanYoWYg==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0
'@babel/plugin-transform-reserved-words@7.27.1':
resolution: {integrity: sha512-V2ABPHIJX4kC7HegLkYoDpfg9PVmuWy/i6vUM5eGK22bx4YVFD3M5F0QQnWQoDs6AGsUWTVOopBiMFQgHaSkVw==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-runtime@7.28.5':
resolution: {integrity: sha512-20NUVgOrinudkIBzQ2bNxP08YpKprUkRTiRSd2/Z5GOdPImJGkoN4Z7IQe1T5AdyKI1i5L6RBmluqdSzvaq9/w==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-shorthand-properties@7.27.1':
resolution: {integrity: sha512-N/wH1vcn4oYawbJ13Y/FxcQrWk63jhfNa7jef0ih7PHSIHX2LB7GWE1rkPrOnka9kwMxb6hMl19p7lidA+EHmQ==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-spread@7.28.6':
resolution: {integrity: sha512-9U4QObUC0FtJl05AsUcodau/RWDytrU6uKgkxu09mLR9HLDAtUMoPuuskm5huQsoktmsYpI+bGmq+iapDcriKA==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-sticky-regex@7.27.1':
resolution: {integrity: sha512-lhInBO5bi/Kowe2/aLdBAawijx+q1pQzicSgnkB6dUPc1+RC8QmJHKf2OjvU+NZWitguJHEaEmbV6VWEouT58g==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-template-literals@7.27.1':
resolution: {integrity: sha512-fBJKiV7F2DxZUkg5EtHKXQdbsbURW3DZKQUWphDum0uRP6eHGGa/He9mc0mypL680pb+e/lDIthRohlv8NCHkg==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-typeof-symbol@7.27.1':
resolution: {integrity: sha512-RiSILC+nRJM7FY5srIyc4/fGIwUhyDuuBSdWn4y6yT6gm652DpCHZjIipgn6B7MQ1ITOUnAKWixEUjQRIBIcLw==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-unicode-escapes@7.27.1':
resolution: {integrity: sha512-Ysg4v6AmF26k9vpfFuTZg8HRfVWzsh1kVfowA23y9j/Gu6dOuahdUVhkLqpObp3JIv27MLSii6noRnuKN8H0Mg==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-unicode-property-regex@7.28.6':
resolution: {integrity: sha512-4Wlbdl/sIZjzi/8St0evF0gEZrgOswVO6aOzqxh1kDZOl9WmLrHq2HtGhnOJZmHZYKP8WZ1MDLCt5DAWwRo57A==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-unicode-regex@7.27.1':
resolution: {integrity: sha512-xvINq24TRojDuyt6JGtHmkVkrfVV3FPT16uytxImLeBZqW3/H52yN+kM1MGuyPkIQxrzKwPHs5U/MP3qKyzkGw==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/plugin-transform-unicode-sets-regex@7.28.6':
resolution: {integrity: sha512-/wHc/paTUmsDYN7SZkpWxogTOBNnlx7nBQYfy6JJlCT7G3mVhltk3e++N7zV0XfgGsrqBxd4rJQt9H16I21Y1Q==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0
'@babel/preset-env@7.28.5':
resolution: {integrity: sha512-S36mOoi1Sb6Fz98fBfE+UZSpYw5mJm0NUHtIKrOuNcqeFauy1J6dIvXm2KRVKobOSaGq4t/hBXdN4HGU3wL9Wg==}
engines: {node: '>=6.9.0'}
peerDependencies:
'@babel/core': ^7.0.0-0
'@babel/preset-modules@0.1.6-no-external-plugins':
resolution: {integrity: sha512-HrcgcIESLm9aIR842yhJ5RWan/gebQUJ6E/E5+rf0y9o6oj7w0Br+sWuL6kEQ/o/AdfvR1Je9jG18/gnpwjEyA==}
peerDependencies:
'@babel/core': ^7.0.0-0 || ^8.0.0-0 <8.0.0
'@babel/runtime@7.28.4':
resolution: {integrity: sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==}
engines: {node: '>=6.9.0'}
'@babel/template@7.28.6':
resolution: {integrity: sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==}
engines: {node: '>=6.9.0'}
'@babel/traverse@7.29.0':
resolution: {integrity: sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==}
engines: {node: '>=6.9.0'}
'@babel/types@7.29.0':
resolution: {integrity: sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==}
engines: {node: '>=6.9.0'}
'@cspotcode/source-map-support@0.8.1':
resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==}
engines: {node: '>=12'}
'@discoveryjs/json-ext@0.6.3':
resolution: {integrity: sha512-4B4OijXeVNOPZlYA2oEwWOTkzyltLao+xbotHQeqN++Rv27Y6s818+n2Qkp8q+Fxhn0t/5lA5X1Mxktud8eayQ==}
engines: {node: '>=14.17.0'}
'@emnapi/core@1.8.1':
resolution: {integrity: sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg==}
'@emnapi/runtime@1.8.1':
resolution: {integrity: sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==}
'@emnapi/wasi-threads@1.1.0':
resolution: {integrity: sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==}
'@esbuild/aix-ppc64@0.27.2':
resolution: {integrity: sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==}
engines: {node: '>=18'}
cpu: [ppc64]
os: [aix]
'@esbuild/android-arm64@0.27.2':
resolution: {integrity: sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA==}
engines: {node: '>=18'}
cpu: [arm64]
os: [android]
'@esbuild/android-arm@0.27.2':
resolution: {integrity: sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA==}
engines: {node: '>=18'}
cpu: [arm]
os: [android]
'@esbuild/android-x64@0.27.2':
resolution: {integrity: sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A==}
engines: {node: '>=18'}
cpu: [x64]
os: [android]
'@esbuild/darwin-arm64@0.27.2':
resolution: {integrity: sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==}
engines: {node: '>=18'}
cpu: [arm64]
os: [darwin]
'@esbuild/darwin-x64@0.27.2':
resolution: {integrity: sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==}
engines: {node: '>=18'}
cpu: [x64]
os: [darwin]
'@esbuild/freebsd-arm64@0.27.2':
resolution: {integrity: sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g==}
engines: {node: '>=18'}
cpu: [arm64]
os: [freebsd]
'@esbuild/freebsd-x64@0.27.2':
resolution: {integrity: sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA==}
engines: {node: '>=18'}
cpu: [x64]
os: [freebsd]
'@esbuild/linux-arm64@0.27.2':
resolution: {integrity: sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw==}
engines: {node: '>=18'}
cpu: [arm64]
os: [linux]
'@esbuild/linux-arm@0.27.2':
resolution: {integrity: sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==}
engines: {node: '>=18'}
cpu: [arm]
os: [linux]
'@esbuild/linux-ia32@0.27.2':
resolution: {integrity: sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w==}
engines: {node: '>=18'}
cpu: [ia32]
os: [linux]
'@esbuild/linux-loong64@0.27.2':
resolution: {integrity: sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg==}
engines: {node: '>=18'}
cpu: [loong64]
os: [linux]
'@esbuild/linux-mips64el@0.27.2':
resolution: {integrity: sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw==}
engines: {node: '>=18'}
cpu: [mips64el]
os: [linux]
'@esbuild/linux-ppc64@0.27.2':
resolution: {integrity: sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ==}
engines: {node: '>=18'}
cpu: [ppc64]
os: [linux]
'@esbuild/linux-riscv64@0.27.2':
resolution: {integrity: sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA==}
engines: {node: '>=18'}
cpu: [riscv64]
os: [linux]
'@esbuild/linux-s390x@0.27.2':
resolution: {integrity: sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w==}
engines: {node: '>=18'}
cpu: [s390x]
os: [linux]
'@esbuild/linux-x64@0.27.2':
resolution: {integrity: sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==}
engines: {node: '>=18'}
cpu: [x64]
os: [linux]
'@esbuild/netbsd-arm64@0.27.2':
resolution: {integrity: sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw==}
engines: {node: '>=18'}
cpu: [arm64]
os: [netbsd]
'@esbuild/netbsd-x64@0.27.2':
resolution: {integrity: sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA==}
engines: {node: '>=18'}
cpu: [x64]
os: [netbsd]
'@esbuild/openbsd-arm64@0.27.2':
resolution: {integrity: sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA==}
engines: {node: '>=18'}
cpu: [arm64]
os: [openbsd]
'@esbuild/openbsd-x64@0.27.2':
resolution: {integrity: sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg==}
engines: {node: '>=18'}
cpu: [x64]
os: [openbsd]
'@esbuild/openharmony-arm64@0.27.2':
resolution: {integrity: sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag==}
engines: {node: '>=18'}
cpu: [arm64]
os: [openharmony]
'@esbuild/sunos-x64@0.27.2':
resolution: {integrity: sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg==}
engines: {node: '>=18'}
cpu: [x64]
os: [sunos]
'@esbuild/win32-arm64@0.27.2':
resolution: {integrity: sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg==}
engines: {node: '>=18'}
cpu: [arm64]
os: [win32]
'@esbuild/win32-ia32@0.27.2':
resolution: {integrity: sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ==}
engines: {node: '>=18'}
cpu: [ia32]
os: [win32]
'@esbuild/win32-x64@0.27.2':
resolution: {integrity: sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [win32]
'@hono/node-server@1.19.9':
resolution: {integrity: sha512-vHL6w3ecZsky+8P5MD+eFfaGTyCeOHUIFYMGpQGbrBTSmNNoxv0if69rEZ5giu36weC5saFuznL411gRX7bJDw==}
engines: {node: '>=18.14.1'}
peerDependencies:
hono: ^4
'@inquirer/ansi@1.0.2':
resolution: {integrity: sha512-S8qNSZiYzFd0wAcyG5AXCvUHC5Sr7xpZ9wZ2py9XR88jUz8wooStVx5M6dRzczbBWjic9NP7+rY0Xi7qqK/aMQ==}
engines: {node: '>=18'}
'@inquirer/checkbox@4.3.2':
resolution: {integrity: sha512-VXukHf0RR1doGe6Sm4F0Em7SWYLTHSsbGfJdS9Ja2bX5/D5uwVOEjr07cncLROdBvmnvCATYEWlHqYmXv2IlQA==}
engines: {node: '>=18'}
peerDependencies:
'@types/node': '>=18'
peerDependenciesMeta:
'@types/node':
optional: true
'@inquirer/confirm@5.1.21':
resolution: {integrity: sha512-KR8edRkIsUayMXV+o3Gv+q4jlhENF9nMYUZs9PA2HzrXeHI8M5uDag70U7RJn9yyiMZSbtF5/UexBtAVtZGSbQ==}
engines: {node: '>=18'}
peerDependencies:
'@types/node': '>=18'
peerDependenciesMeta:
'@types/node':
optional: true
'@inquirer/core@10.3.2':
resolution: {integrity: sha512-43RTuEbfP8MbKzedNqBrlhhNKVwoK//vUFNW3Q3vZ88BLcrs4kYpGg+B2mm5p2K/HfygoCxuKwJJiv8PbGmE0A==}
engines: {node: '>=18'}
peerDependencies:
'@types/node': '>=18'
peerDependenciesMeta:
'@types/node':
optional: true
'@inquirer/editor@4.2.23':
resolution: {integrity: sha512-aLSROkEwirotxZ1pBaP8tugXRFCxW94gwrQLxXfrZsKkfjOYC1aRvAZuhpJOb5cu4IBTJdsCigUlf2iCOu4ZDQ==}
engines: {node: '>=18'}
peerDependencies:
'@types/node': '>=18'
peerDependenciesMeta:
'@types/node':
optional: true
'@inquirer/expand@4.0.23':
resolution: {integrity: sha512-nRzdOyFYnpeYTTR2qFwEVmIWypzdAx/sIkCMeTNTcflFOovfqUk+HcFhQQVBftAh9gmGrpFj6QcGEqrDMDOiew==}
engines: {node: '>=18'}
peerDependencies:
'@types/node': '>=18'
peerDependenciesMeta:
'@types/node':
optional: true
'@inquirer/external-editor@1.0.3':
resolution: {integrity: sha512-RWbSrDiYmO4LbejWY7ttpxczuwQyZLBUyygsA9Nsv95hpzUWwnNTVQmAq3xuh7vNwCp07UTmE5i11XAEExx4RA==}
engines: {node: '>=18'}
peerDependencies:
'@types/node': '>=18'
peerDependenciesMeta:
'@types/node':
optional: true
'@inquirer/figures@1.0.15':
resolution: {integrity: sha512-t2IEY+unGHOzAaVM5Xx6DEWKeXlDDcNPeDyUpsRc6CUhBfU3VQOEl+Vssh7VNp1dR8MdUJBWhuObjXCsVpjN5g==}
engines: {node: '>=18'}
'@inquirer/input@4.3.1':
resolution: {integrity: sha512-kN0pAM4yPrLjJ1XJBjDxyfDduXOuQHrBB8aLDMueuwUGn+vNpF7Gq7TvyVxx8u4SHlFFj4trmj+a2cbpG4Jn1g==}
engines: {node: '>=18'}
peerDependencies:
'@types/node': '>=18'
peerDependenciesMeta:
'@types/node':
optional: true
'@inquirer/number@3.0.23':
resolution: {integrity: sha512-5Smv0OK7K0KUzUfYUXDXQc9jrf8OHo4ktlEayFlelCjwMXz0299Y8OrI+lj7i4gCBY15UObk76q0QtxjzFcFcg==}
engines: {node: '>=18'}
peerDependencies:
'@types/node': '>=18'
peerDependenciesMeta:
'@types/node':
optional: true
'@inquirer/password@4.0.23':
resolution: {integrity: sha512-zREJHjhT5vJBMZX/IUbyI9zVtVfOLiTO66MrF/3GFZYZ7T4YILW5MSkEYHceSii/KtRk+4i3RE7E1CUXA2jHcA==}
engines: {node: '>=18'}
peerDependencies:
'@types/node': '>=18'
peerDependenciesMeta:
'@types/node':
optional: true
'@inquirer/prompts@7.10.1':
resolution: {integrity: sha512-Dx/y9bCQcXLI5ooQ5KyvA4FTgeo2jYj/7plWfV5Ak5wDPKQZgudKez2ixyfz7tKXzcJciTxqLeK7R9HItwiByg==}
engines: {node: '>=18'}
peerDependencies:
'@types/node': '>=18'
peerDependenciesMeta:
'@types/node':
optional: true
'@inquirer/rawlist@4.1.11':
resolution: {integrity: sha512-+LLQB8XGr3I5LZN/GuAHo+GpDJegQwuPARLChlMICNdwW7OwV2izlCSCxN6cqpL0sMXmbKbFcItJgdQq5EBXTw==}
engines: {node: '>=18'}
peerDependencies:
'@types/node': '>=18'
peerDependenciesMeta:
'@types/node':
optional: true
'@inquirer/search@3.2.2':
resolution: {integrity: sha512-p2bvRfENXCZdWF/U2BXvnSI9h+tuA8iNqtUKb9UWbmLYCRQxd8WkvwWvYn+3NgYaNwdUkHytJMGG4MMLucI1kA==}
engines: {node: '>=18'}
peerDependencies:
'@types/node': '>=18'
peerDependenciesMeta:
'@types/node':
optional: true
'@inquirer/select@4.4.2':
resolution: {integrity: sha512-l4xMuJo55MAe+N7Qr4rX90vypFwCajSakx59qe/tMaC1aEHWLyw68wF4o0A4SLAY4E0nd+Vt+EyskeDIqu1M6w==}
engines: {node: '>=18'}
peerDependencies:
'@types/node': '>=18'
peerDependenciesMeta:
'@types/node':
optional: true
'@inquirer/type@3.0.10':
resolution: {integrity: sha512-BvziSRxfz5Ov8ch0z/n3oijRSEcEsHnhggm4xFZe93DHcUCTlutlq9Ox4SVENAfcRD22UQq7T/atg9Wr3k09eA==}
engines: {node: '>=18'}
peerDependencies:
'@types/node': '>=18'
peerDependenciesMeta:
'@types/node':
optional: true
'@isaacs/balanced-match@4.0.1':
resolution: {integrity: sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==}
engines: {node: 20 || >=22}
'@isaacs/brace-expansion@5.0.1':
resolution: {integrity: sha512-WMz71T1JS624nWj2n2fnYAuPovhv7EUhk69R6i9dsVyzxt5eM3bjwvgk9L+APE1TRscGysAVMANkB0jh0LQZrQ==}
engines: {node: 20 || >=22}
'@isaacs/fs-minipass@4.0.1':
resolution: {integrity: sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==}
engines: {node: '>=18.0.0'}
'@istanbuljs/schema@0.1.3':
resolution: {integrity: sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==}
engines: {node: '>=8'}
'@jridgewell/gen-mapping@0.3.13':
resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==}
'@jridgewell/remapping@2.3.5':
resolution: {integrity: sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==}
'@jridgewell/resolve-uri@3.1.2':
resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==}
engines: {node: '>=6.0.0'}
'@jridgewell/source-map@0.3.11':
resolution: {integrity: sha512-ZMp1V8ZFcPG5dIWnQLr3NSI1MiCU7UETdS/A0G8V/XWHvJv3ZsFqutJn1Y5RPmAPX6F3BiE397OqveU/9NCuIA==}
'@jridgewell/sourcemap-codec@1.5.5':
resolution: {integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==}
'@jridgewell/trace-mapping@0.3.31':
resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==}
'@jridgewell/trace-mapping@0.3.9':
resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==}
'@jsonjoy.com/base64@1.1.2':
resolution: {integrity: sha512-q6XAnWQDIMA3+FTiOYajoYqySkO+JSat0ytXGSuRdq9uXE7o92gzuQwQM14xaCRlBLGq3v5miDGC4vkVTn54xA==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
'@jsonjoy.com/base64@17.67.0':
resolution: {integrity: sha512-5SEsJGsm15aP8TQGkDfJvz9axgPwAEm98S5DxOuYe8e1EbfajcDmgeXXzccEjh+mLnjqEKrkBdjHWS5vFNwDdw==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
'@jsonjoy.com/buffers@1.2.1':
resolution: {integrity: sha512-12cdlDwX4RUM3QxmUbVJWqZ/mrK6dFQH4Zxq6+r1YXKXYBNgZXndx2qbCJwh3+WWkCSn67IjnlG3XYTvmvYtgA==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
'@jsonjoy.com/buffers@17.67.0':
resolution: {integrity: sha512-tfExRpYxBvi32vPs9ZHaTjSP4fHAfzSmcahOfNxtvGHcyJel+aibkPlGeBB+7AoC6hL7lXIE++8okecBxx7lcw==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
'@jsonjoy.com/codegen@1.0.0':
resolution: {integrity: sha512-E8Oy+08cmCf0EK/NMxpaJZmOxPqM+6iSe2S4nlSBrPZOORoDJILxtbSUEDKQyTamm/BVAhIGllOBNU79/dwf0g==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
'@jsonjoy.com/codegen@17.67.0':
resolution: {integrity: sha512-idnkUplROpdBOV0HMcwhsCUS5TRUi9poagdGs70A6S4ux9+/aPuKbh8+UYRTLYQHtXvAdNfQWXDqZEx5k4Dj2Q==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
'@jsonjoy.com/fs-core@4.56.10':
resolution: {integrity: sha512-PyAEA/3cnHhsGcdY+AmIU+ZPqTuZkDhCXQ2wkXypdLitSpd6d5Ivxhnq4wa2ETRWFVJGabYynBWxIijOswSmOw==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
'@jsonjoy.com/fs-fsa@4.56.10':
resolution: {integrity: sha512-/FVK63ysNzTPOnCCcPoPHt77TOmachdMS422txM4KhxddLdbW1fIbFMYH0AM0ow/YchCyS5gqEjKLNyv71j/5Q==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
'@jsonjoy.com/fs-node-builtins@4.56.10':
resolution: {integrity: sha512-uUnKz8R0YJyKq5jXpZtkGV9U0pJDt8hmYcLRrPjROheIfjMXsz82kXMgAA/qNg0wrZ1Kv+hrg7azqEZx6XZCVw==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
'@jsonjoy.com/fs-node-to-fsa@4.56.10':
resolution: {integrity: sha512-oH+O6Y4lhn9NyG6aEoFwIBNKZeYy66toP5LJcDOMBgL99BKQMUf/zWJspdRhMdn/3hbzQsZ8EHHsuekbFLGUWw==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
'@jsonjoy.com/fs-node-utils@4.56.10':
resolution: {integrity: sha512-8EuPBgVI2aDPwFdaNQeNpHsyqPi3rr+85tMNG/lHvQLiVjzoZsvxA//Xd8aB567LUhy4QS03ptT+unkD/DIsNg==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
'@jsonjoy.com/fs-node@4.56.10':
resolution: {integrity: sha512-7R4Gv3tkUdW3dXfXiOkqxkElxKNVdd8BDOWC0/dbERd0pXpPY+s2s1Mino+aTvkGrFPiY+mmVxA7zhskm4Ue4Q==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
'@jsonjoy.com/fs-print@4.56.10':
resolution: {integrity: sha512-JW4fp5mAYepzFsSGrQ48ep8FXxpg4niFWHdF78wDrFGof7F3tKDJln72QFDEn/27M1yHd4v7sKHHVPh78aWcEw==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
'@jsonjoy.com/fs-snapshot@4.56.10':
resolution: {integrity: sha512-DkR6l5fj7+qj0+fVKm/OOXMGfDFCGXLfyHkORH3DF8hxkpDgIHbhf/DwncBMs2igu/ST7OEkexn1gIqoU6Y+9g==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
'@jsonjoy.com/json-pack@1.21.0':
resolution: {integrity: sha512-+AKG+R2cfZMShzrF2uQw34v3zbeDYUqnQ+jg7ORic3BGtfw9p/+N6RJbq/kkV8JmYZaINknaEQ2m0/f693ZPpg==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
'@jsonjoy.com/json-pack@17.67.0':
resolution: {integrity: sha512-t0ejURcGaZsn1ClbJ/3kFqSOjlryd92eQY465IYrezsXmPcfHPE/av4twRSxf6WE+TkZgLY+71vCZbiIiFKA/w==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
'@jsonjoy.com/json-pointer@1.0.2':
resolution: {integrity: sha512-Fsn6wM2zlDzY1U+v4Nc8bo3bVqgfNTGcn6dMgs6FjrEnt4ZCe60o6ByKRjOGlI2gow0aE/Q41QOigdTqkyK5fg==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
'@jsonjoy.com/json-pointer@17.67.0':
resolution: {integrity: sha512-+iqOFInH+QZGmSuaybBUNdh7yvNrXvqR+h3wjXm0N/3JK1EyyFAeGJvqnmQL61d1ARLlk/wJdFKSL+LHJ1eaUA==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
'@jsonjoy.com/util@1.9.0':
resolution: {integrity: sha512-pLuQo+VPRnN8hfPqUTLTHk126wuYdXVxE6aDmjSeV4NCAgyxWbiOIeNJVtID3h1Vzpoi9m4jXezf73I6LgabgQ==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
'@jsonjoy.com/util@17.67.0':
resolution: {integrity: sha512-6+8xBaz1rLSohlGh68D1pdw3AwDi9xydm8QNlAFkvnavCJYSze+pxoW2VKP8p308jtlMRLs5NTHfPlZLd4w7ew==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
'@leichtgewicht/ip-codec@2.0.5':
resolution: {integrity: sha512-Vo+PSpZG2/fmgmiNzYK9qWRh8h/CHrwD0mo1h1DzL4yzHNSfWYujGTYsWGreD000gcgmZ7K4Ys6Tx9TxtsKdDw==}
'@listr2/prompt-adapter-inquirer@3.0.5':
resolution: {integrity: sha512-WELs+hj6xcilkloBXYf9XXK8tYEnKsgLj01Xl5ONUJpKjmT5hGVUzNUS5tooUxs7pGMrw+jFD/41WpqW4V3LDA==}
engines: {node: '>=20.0.0'}
peerDependencies:
'@inquirer/prompts': '>= 3 < 8'
listr2: 9.0.5
'@lmdb/lmdb-darwin-arm64@3.4.4':
resolution: {integrity: sha512-XaKL705gDWd6XVls3ATDj13ZdML/LqSIxwgnYpG8xTzH2ifArx8fMMDdvqGE/Emd+W6R90W2fveZcJ0AyS8Y0w==}
cpu: [arm64]
os: [darwin]
'@lmdb/lmdb-darwin-x64@3.4.4':
resolution: {integrity: sha512-GPHGEVcwJlkD01GmIr7B4kvbIcUDS2+kBadVEd7lU4can1RZaZQLDDBJRrrNfS2Kavvl0VLI/cMv7UASAXGrww==}
cpu: [x64]
os: [darwin]
'@lmdb/lmdb-linux-arm64@3.4.4':
resolution: {integrity: sha512-mALqr7DE42HsiwVTKpQWxacjHoJk+e9p00RWIJqTACh/hpucxp/0lK/XMh5XzWnU/TDCZLukq1+vNqnNumTP/Q==}
cpu: [arm64]
os: [linux]
'@lmdb/lmdb-linux-arm@3.4.4':
resolution: {integrity: sha512-cmev5/dZr5ACKri9f6GU6lZCXTjMhV72xujlbOhFCgFXrt4W0TxGsmY8kA1BITvH60JBKE50cSxsiulybAbrrw==}
cpu: [arm]
os: [linux]
'@lmdb/lmdb-linux-x64@3.4.4':
resolution: {integrity: sha512-QjLs8OcmCNcraAcLoZyFlo0atzBJniQLLwhtR+ymQqS5kLYpV5RqwriL87BW+ZiR9ZiGgZx3evrz5vnWPtJ1fQ==}
cpu: [x64]
os: [linux]
'@lmdb/lmdb-win32-arm64@3.4.4':
resolution: {integrity: sha512-tr/pwHDlZ33forLGAr0tI04cRmP4SgF93yHbb+2zvZiDEyln5yMHhbKDySxY66aUOkhvBvTuHq9q/3YmTj6ZHQ==}
cpu: [arm64]
os: [win32]
'@lmdb/lmdb-win32-x64@3.4.4':
resolution: {integrity: sha512-KRzfocJzB/mgoTCqnMawuLSKheHRVTqWfSmouIgYpFs6Hx4zvZSvsZKSCEb5gHmICy7qsx9l06jk3MFTtiFVAQ==}
cpu: [x64]
os: [win32]
'@modelcontextprotocol/sdk@1.25.2':
resolution: {integrity: sha512-LZFeo4F9M5qOhC/Uc1aQSrBHxMrvxett+9KLHt7OhcExtoiRN9DKgbZffMP/nxjutWDQpfMDfP3nkHI4X9ijww==}
engines: {node: '>=18'}
peerDependencies:
'@cfworker/json-schema': ^4.1.1
zod: ^3.25 || ^4.0
peerDependenciesMeta:
'@cfworker/json-schema':
optional: true
'@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3':
resolution: {integrity: sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw==}
cpu: [arm64]
os: [darwin]
'@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3':
resolution: {integrity: sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw==}
cpu: [x64]
os: [darwin]
'@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3':
resolution: {integrity: sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg==}
cpu: [arm64]
os: [linux]
'@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3':
resolution: {integrity: sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw==}
cpu: [arm]
os: [linux]
'@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3':
resolution: {integrity: sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg==}
cpu: [x64]
os: [linux]
'@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3':
resolution: {integrity: sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ==}
cpu: [x64]
os: [win32]
'@napi-rs/nice-android-arm-eabi@1.1.1':
resolution: {integrity: sha512-kjirL3N6TnRPv5iuHw36wnucNqXAO46dzK9oPb0wj076R5Xm8PfUVA9nAFB5ZNMmfJQJVKACAPd/Z2KYMppthw==}
engines: {node: '>= 10'}
cpu: [arm]
os: [android]
'@napi-rs/nice-android-arm64@1.1.1':
resolution: {integrity: sha512-blG0i7dXgbInN5urONoUCNf+DUEAavRffrO7fZSeoRMJc5qD+BJeNcpr54msPF6qfDD6kzs9AQJogZvT2KD5nw==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [android]
'@napi-rs/nice-darwin-arm64@1.1.1':
resolution: {integrity: sha512-s/E7w45NaLqTGuOjC2p96pct4jRfo61xb9bU1unM/MJ/RFkKlJyJDx7OJI/O0ll/hrfpqKopuAFDV8yo0hfT7A==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [darwin]
'@napi-rs/nice-darwin-x64@1.1.1':
resolution: {integrity: sha512-dGoEBnVpsdcC+oHHmW1LRK5eiyzLwdgNQq3BmZIav+9/5WTZwBYX7r5ZkQC07Nxd3KHOCkgbHSh4wPkH1N1LiQ==}
engines: {node: '>= 10'}
cpu: [x64]
os: [darwin]
'@napi-rs/nice-freebsd-x64@1.1.1':
resolution: {integrity: sha512-kHv4kEHAylMYmlNwcQcDtXjklYp4FCf0b05E+0h6nDHsZ+F0bDe04U/tXNOqrx5CmIAth4vwfkjjUmp4c4JktQ==}
engines: {node: '>= 10'}
cpu: [x64]
os: [freebsd]
'@napi-rs/nice-linux-arm-gnueabihf@1.1.1':
resolution: {integrity: sha512-E1t7K0efyKXZDoZg1LzCOLxgolxV58HCkaEkEvIYQx12ht2pa8hoBo+4OB3qh7e+QiBlp1SRf+voWUZFxyhyqg==}
engines: {node: '>= 10'}
cpu: [arm]
os: [linux]
'@napi-rs/nice-linux-arm64-gnu@1.1.1':
resolution: {integrity: sha512-CIKLA12DTIZlmTaaKhQP88R3Xao+gyJxNWEn04wZwC2wmRapNnxCUZkVwggInMJvtVElA+D4ZzOU5sX4jV+SmQ==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [linux]
libc: [glibc]
'@napi-rs/nice-linux-arm64-musl@1.1.1':
resolution: {integrity: sha512-+2Rzdb3nTIYZ0YJF43qf2twhqOCkiSrHx2Pg6DJaCPYhhaxbLcdlV8hCRMHghQ+EtZQWGNcS2xF4KxBhSGeutg==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [linux]
libc: [musl]
'@napi-rs/nice-linux-ppc64-gnu@1.1.1':
resolution: {integrity: sha512-4FS8oc0GeHpwvv4tKciKkw3Y4jKsL7FRhaOeiPei0X9T4Jd619wHNe4xCLmN2EMgZoeGg+Q7GY7BsvwKpL22Tg==}
engines: {node: '>= 10'}
cpu: [ppc64]
os: [linux]
libc: [glibc]
'@napi-rs/nice-linux-riscv64-gnu@1.1.1':
resolution: {integrity: sha512-HU0nw9uD4FO/oGCCk409tCi5IzIZpH2agE6nN4fqpwVlCn5BOq0MS1dXGjXaG17JaAvrlpV5ZeyZwSon10XOXw==}
engines: {node: '>= 10'}
cpu: [riscv64]
os: [linux]
libc: [glibc]
'@napi-rs/nice-linux-s390x-gnu@1.1.1':
resolution: {integrity: sha512-2YqKJWWl24EwrX0DzCQgPLKQBxYDdBxOHot1KWEq7aY2uYeX+Uvtv4I8xFVVygJDgf6/92h9N3Y43WPx8+PAgQ==}
engines: {node: '>= 10'}
cpu: [s390x]
os: [linux]
libc: [glibc]
'@napi-rs/nice-linux-x64-gnu@1.1.1':
resolution: {integrity: sha512-/gaNz3R92t+dcrfCw/96pDopcmec7oCcAQ3l/M+Zxr82KT4DljD37CpgrnXV+pJC263JkW572pdbP3hP+KjcIg==}
engines: {node: '>= 10'}
cpu: [x64]
os: [linux]
libc: [glibc]
'@napi-rs/nice-linux-x64-musl@1.1.1':
resolution: {integrity: sha512-xScCGnyj/oppsNPMnevsBe3pvNaoK7FGvMjT35riz9YdhB2WtTG47ZlbxtOLpjeO9SqqQ2J2igCmz6IJOD5JYw==}
engines: {node: '>= 10'}
cpu: [x64]
os: [linux]
libc: [musl]
'@napi-rs/nice-openharmony-arm64@1.1.1':
resolution: {integrity: sha512-6uJPRVwVCLDeoOaNyeiW0gp2kFIM4r7PL2MczdZQHkFi9gVlgm+Vn+V6nTWRcu856mJ2WjYJiumEajfSm7arPQ==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [openharmony]
'@napi-rs/nice-win32-arm64-msvc@1.1.1':
resolution: {integrity: sha512-uoTb4eAvM5B2aj/z8j+Nv8OttPf2m+HVx3UjA5jcFxASvNhQriyCQF1OB1lHL43ZhW+VwZlgvjmP5qF3+59atA==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [win32]
'@napi-rs/nice-win32-ia32-msvc@1.1.1':
resolution: {integrity: sha512-CNQqlQT9MwuCsg1Vd/oKXiuH+TcsSPJmlAFc5frFyX/KkOh0UpBLEj7aoY656d5UKZQMQFP7vJNa1DNUNORvug==}
engines: {node: '>= 10'}
cpu: [ia32]
os: [win32]
'@napi-rs/nice-win32-x64-msvc@1.1.1':
resolution: {integrity: sha512-vB+4G/jBQCAh0jelMTY3+kgFy00Hlx2f2/1zjMoH821IbplbWZOkLiTYXQkygNTzQJTq5cvwBDgn2ppHD+bglQ==}
engines: {node: '>= 10'}
cpu: [x64]
os: [win32]
'@napi-rs/nice@1.1.1':
resolution: {integrity: sha512-xJIPs+bYuc9ASBl+cvGsKbGrJmS6fAKaSZCnT0lhahT5rhA2VVy9/EcIgd2JhtEuFOJNx7UHNn/qiTPTY4nrQw==}
engines: {node: '>= 10'}
'@napi-rs/wasm-runtime@1.1.1':
resolution: {integrity: sha512-p64ah1M1ld8xjWv3qbvFwHiFVWrq1yFvV4f7w+mzaqiR4IlSgkqhcRdHwsGgomwzBH51sRY4NEowLxnaBjcW/A==}
'@ngtools/webpack@21.1.0-rc.0':
resolution: {integrity: sha512-ZFLz0m03wGSjQsp1U0rwnTEzW9Nv8fFfk4e1Yu+28VjlVcMxRujAtJ0jEXwQ4QsrSU9er7ZFacvPArAOzfWIXA==}
engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0, npm: ^6.11.0 || ^7.5.6 || >=8.0.0, yarn: '>= 1.13.0'}
peerDependencies:
'@angular/compiler-cli': ^21.0.0 || ^21.1.0-next.0
typescript: '>=5.9 <6.0'
webpack: ^5.54.0
'@npmcli/agent@4.0.0':
resolution: {integrity: sha512-kAQTcEN9E8ERLVg5AsGwLNoFb+oEG6engbqAU2P43gD4JEIkNGMHdVQ096FsOAAYpZPB0RSt0zgInKIAS1l5QA==}
engines: {node: ^20.17.0 || >=22.9.0}
'@npmcli/fs@5.0.0':
resolution: {integrity: sha512-7OsC1gNORBEawOa5+j2pXN9vsicaIOH5cPXxoR6fJOmH6/EXpJB2CajXOu1fPRFun2m1lktEFX11+P89hqO/og==}
engines: {node: ^20.17.0 || >=22.9.0}
'@npmcli/git@7.0.1':
resolution: {integrity: sha512-+XTFxK2jJF/EJJ5SoAzXk3qwIDfvFc5/g+bD274LZ7uY7LE8sTfG6Z8rOanPl2ZEvZWqNvmEdtXC25cE54VcoA==}
engines: {node: ^20.17.0 || >=22.9.0}
'@npmcli/installed-package-contents@4.0.0':
resolution: {integrity: sha512-yNyAdkBxB72gtZ4GrwXCM0ZUedo9nIbOMKfGjt6Cu6DXf0p8y1PViZAKDC8q8kv/fufx0WTjRBdSlyrvnP7hmA==}
engines: {node: ^20.17.0 || >=22.9.0}
hasBin: true
'@npmcli/node-gyp@5.0.0':
resolution: {integrity: sha512-uuG5HZFXLfyFKqg8QypsmgLQW7smiRjVc45bqD/ofZZcR/uxEjgQU8qDPv0s9TEeMUiAAU/GC5bR6++UdTirIQ==}
engines: {node: ^20.17.0 || >=22.9.0}
'@npmcli/package-json@7.0.4':
resolution: {integrity: sha512-0wInJG3j/K40OJt/33ax47WfWMzZTm6OQxB9cDhTt5huCP2a9g2GnlsxmfN+PulItNPIpPrZ+kfwwUil7eHcZQ==}
engines: {node: ^20.17.0 || >=22.9.0}
'@npmcli/promise-spawn@9.0.1':
resolution: {integrity: sha512-OLUaoqBuyxeTqUvjA3FZFiXUfYC1alp3Sa99gW3EUDz3tZ3CbXDdcZ7qWKBzicrJleIgucoWamWH1saAmH/l2Q==}
engines: {node: ^20.17.0 || >=22.9.0}
'@npmcli/redact@4.0.0':
resolution: {integrity: sha512-gOBg5YHMfZy+TfHArfVogwgfBeQnKbbGo3pSUyK/gSI0AVu+pEiDVcKlQb0D8Mg1LNRZILZ6XG8I5dJ4KuAd9Q==}
engines: {node: ^20.17.0 || >=22.9.0}
'@npmcli/run-script@10.0.3':
resolution: {integrity: sha512-ER2N6itRkzWbbtVmZ9WKaWxVlKlOeBFF1/7xx+KA5J1xKa4JjUwBdb6tDpk0v1qA+d+VDwHI9qmLcXSWcmi+Rw==}
engines: {node: ^20.17.0 || >=22.9.0}
'@oxc-project/types@0.106.0':
resolution: {integrity: sha512-QdsH3rZq480VnOHSHgPYOhjL8O8LBdcnSjM408BpPCCUc0JYYZPG9Gafl9i3OcGk/7137o+gweb4cCv3WAUykg==}
'@parcel/watcher-android-arm64@2.5.6':
resolution: {integrity: sha512-YQxSS34tPF/6ZG7r/Ih9xy+kP/WwediEUsqmtf0cuCV5TPPKw/PQHRhueUo6JdeFJaqV3pyjm0GdYjZotbRt/A==}
engines: {node: '>= 10.0.0'}
cpu: [arm64]
os: [android]
'@parcel/watcher-darwin-arm64@2.5.6':
resolution: {integrity: sha512-Z2ZdrnwyXvvvdtRHLmM4knydIdU9adO3D4n/0cVipF3rRiwP+3/sfzpAwA/qKFL6i1ModaabkU7IbpeMBgiVEA==}
engines: {node: '>= 10.0.0'}
cpu: [arm64]
os: [darwin]
'@parcel/watcher-darwin-x64@2.5.6':
resolution: {integrity: sha512-HgvOf3W9dhithcwOWX9uDZyn1lW9R+7tPZ4sug+NGrGIo4Rk1hAXLEbcH1TQSqxts0NYXXlOWqVpvS1SFS4fRg==}
engines: {node: '>= 10.0.0'}
cpu: [x64]
os: [darwin]
'@parcel/watcher-freebsd-x64@2.5.6':
resolution: {integrity: sha512-vJVi8yd/qzJxEKHkeemh7w3YAn6RJCtYlE4HPMoVnCpIXEzSrxErBW5SJBgKLbXU3WdIpkjBTeUNtyBVn8TRng==}
engines: {node: '>= 10.0.0'}
cpu: [x64]
os: [freebsd]
'@parcel/watcher-linux-arm-glibc@2.5.6':
resolution: {integrity: sha512-9JiYfB6h6BgV50CCfasfLf/uvOcJskMSwcdH1PHH9rvS1IrNy8zad6IUVPVUfmXr+u+Km9IxcfMLzgdOudz9EQ==}
engines: {node: '>= 10.0.0'}
cpu: [arm]
os: [linux]
libc: [glibc]
'@parcel/watcher-linux-arm-musl@2.5.6':
resolution: {integrity: sha512-Ve3gUCG57nuUUSyjBq/MAM0CzArtuIOxsBdQ+ftz6ho8n7s1i9E1Nmk/xmP323r2YL0SONs1EuwqBp2u1k5fxg==}
engines: {node: '>= 10.0.0'}
cpu: [arm]
os: [linux]
libc: [musl]
'@parcel/watcher-linux-arm64-glibc@2.5.6':
resolution: {integrity: sha512-f2g/DT3NhGPdBmMWYoxixqYr3v/UXcmLOYy16Bx0TM20Tchduwr4EaCbmxh1321TABqPGDpS8D/ggOTaljijOA==}
engines: {node: '>= 10.0.0'}
cpu: [arm64]
os: [linux]
libc: [glibc]
'@parcel/watcher-linux-arm64-musl@2.5.6':
resolution: {integrity: sha512-qb6naMDGlbCwdhLj6hgoVKJl2odL34z2sqkC7Z6kzir8b5W65WYDpLB6R06KabvZdgoHI/zxke4b3zR0wAbDTA==}
engines: {node: '>= 10.0.0'}
cpu: [arm64]
os: [linux]
libc: [musl]
'@parcel/watcher-linux-x64-glibc@2.5.6':
resolution: {integrity: sha512-kbT5wvNQlx7NaGjzPFu8nVIW1rWqV780O7ZtkjuWaPUgpv2NMFpjYERVi0UYj1msZNyCzGlaCWEtzc+exjMGbQ==}
engines: {node: '>= 10.0.0'}
cpu: [x64]
os: [linux]
libc: [glibc]
'@parcel/watcher-linux-x64-musl@2.5.6':
resolution: {integrity: sha512-1JRFeC+h7RdXwldHzTsmdtYR/Ku8SylLgTU/reMuqdVD7CtLwf0VR1FqeprZ0eHQkO0vqsbvFLXUmYm/uNKJBg==}
engines: {node: '>= 10.0.0'}
cpu: [x64]
os: [linux]
libc: [musl]
'@parcel/watcher-win32-arm64@2.5.6':
resolution: {integrity: sha512-3ukyebjc6eGlw9yRt678DxVF7rjXatWiHvTXqphZLvo7aC5NdEgFufVwjFfY51ijYEWpXbqF5jtrK275z52D4Q==}
engines: {node: '>= 10.0.0'}
cpu: [arm64]
os: [win32]
'@parcel/watcher-win32-ia32@2.5.6':
resolution: {integrity: sha512-k35yLp1ZMwwee3Ez/pxBi5cf4AoBKYXj00CZ80jUz5h8prpiaQsiRPKQMxoLstNuqe2vR4RNPEAEcjEFzhEz/g==}
engines: {node: '>= 10.0.0'}
cpu: [ia32]
os: [win32]
'@parcel/watcher-win32-x64@2.5.6':
resolution: {integrity: sha512-hbQlYcCq5dlAX9Qx+kFb0FHue6vbjlf0FrNzSKdYK2APUf7tGfGxQCk2ihEREmbR6ZMc0MVAD5RIX/41gpUzTw==}
engines: {node: '>= 10.0.0'}
cpu: [x64]
os: [win32]
'@parcel/watcher@2.5.6':
resolution: {integrity: sha512-tmmZ3lQxAe/k/+rNnXQRawJ4NjxO2hqiOLTHvWchtGZULp4RyFeh6aU4XdOYBFe2KE1oShQTv4AblOs2iOrNnQ==}
engines: {node: '>= 10.0.0'}
'@rolldown/binding-android-arm64@1.0.0-beta.58':
resolution: {integrity: sha512-mWj5eE4Qc8TbPdGGaaLvBb9XfDPvE1EmZkJQgiGKwchkWH4oAJcRAKMTw7ZHnb1L+t7Ah41sBkAecaIsuUgsug==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [arm64]
os: [android]
'@rolldown/binding-darwin-arm64@1.0.0-beta.58':
resolution: {integrity: sha512-wFxUymI/5R8bH8qZFYDfAxAN9CyISEIYke+95oZPiv6EWo88aa5rskjVcCpKA532R+klFmdqjbbaD56GNmTF4Q==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [arm64]
os: [darwin]
'@rolldown/binding-darwin-x64@1.0.0-beta.58':
resolution: {integrity: sha512-ybp3MkPj23VDV9PhtRwdU5qrGhlViWRV5BjKwO6epaSlUD5lW0WyY+roN3ZAzbma/9RrMTgZ/a/gtQq8YXOcqw==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [x64]
os: [darwin]
'@rolldown/binding-freebsd-x64@1.0.0-beta.58':
resolution: {integrity: sha512-Evxj3yh7FWvyklUYZa0qTVT9N2zX9TPDqGF056hl8hlCZ9/ndQ2xMv6uw9PD1VlLpukbsqL+/C6M0qwipL0QMg==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [x64]
os: [freebsd]
'@rolldown/binding-linux-arm-gnueabihf@1.0.0-beta.58':
resolution: {integrity: sha512-tYeXprDOrEgVHUbPXH6MPso4cM/c6RTkmJNICMQlYdki4hGMh92aj3yU6CKs+4X5gfG0yj5kVUw/L4M685SYag==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [arm]
os: [linux]
'@rolldown/binding-linux-arm64-gnu@1.0.0-beta.58':
resolution: {integrity: sha512-N78vmZzP6zG967Ohr+MasCjmKtis0geZ1SOVmxrA0/bklTQSzH5kHEjW5Qn+i1taFno6GEre1E40v0wuWsNOQw==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [arm64]
os: [linux]
libc: [glibc]
'@rolldown/binding-linux-arm64-musl@1.0.0-beta.58':
resolution: {integrity: sha512-l+p4QVtG72C7wI2SIkNQw/KQtSjuYwS3rV6AKcWrRBF62ClsFUcif5vLaZIEbPrCXu5OFRXigXFJnxYsVVZqdQ==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [arm64]
os: [linux]
libc: [musl]
'@rolldown/binding-linux-x64-gnu@1.0.0-beta.58':
resolution: {integrity: sha512-urzJX0HrXxIh0FfxwWRjfPCMeInU9qsImLQxHBgLp5ivji1EEUnOfux8KxPPnRQthJyneBrN2LeqUix9DYrNaQ==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [x64]
os: [linux]
libc: [glibc]
'@rolldown/binding-linux-x64-musl@1.0.0-beta.58':
resolution: {integrity: sha512-7ijfVK3GISnXIwq/1FZo+KyAUJjL3kWPJ7rViAL6MWeEBhEgRzJ0yEd9I8N9aut8Y8ab+EKFJyRNMWZuUBwQ0A==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [x64]
os: [linux]
libc: [musl]
'@rolldown/binding-openharmony-arm64@1.0.0-beta.58':
resolution: {integrity: sha512-/m7sKZCS+cUULbzyJTIlv8JbjNohxbpAOA6cM+lgWgqVzPee3U6jpwydrib328JFN/gF9A99IZEnuGYqEDJdww==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [arm64]
os: [openharmony]
'@rolldown/binding-wasm32-wasi@1.0.0-beta.58':
resolution: {integrity: sha512-6SZk7zMgv+y3wFFQ9qE5P9NnRHcRsptL1ypmudD26PDY+PvFCvfHRkJNfclWnvacVGxjowr7JOL3a9fd1wWhUw==}
engines: {node: '>=14.0.0'}
cpu: [wasm32]
'@rolldown/binding-win32-arm64-msvc@1.0.0-beta.58':
resolution: {integrity: sha512-sFqfYPnBZ6xBhMkadB7UD0yjEDRvs7ipR3nCggblN+N4ODCXY6qhg/bKL39+W+dgQybL7ErD4EGERVbW9DAWvg==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [arm64]
os: [win32]
'@rolldown/binding-win32-x64-msvc@1.0.0-beta.58':
resolution: {integrity: sha512-AnFWJdAqB8+IDPcGrATYs67Kik/6tnndNJV2jGRmwlbeNiQQ8GhRJU8ETRlINfII0pqi9k4WWLnb00p1QCxw/Q==}
engines: {node: ^20.19.0 || >=22.12.0}
cpu: [x64]
os: [win32]
'@rolldown/pluginutils@1.0.0-beta.58':
resolution: {integrity: sha512-qWhDs6yFGR5xDfdrwiSa3CWGIHxD597uGE/A9xGqytBjANvh4rLCTTkq7szhMV4+Ygh+PMS90KVJ8xWG/TkX4w==}
'@rollup/rollup-android-arm-eabi@4.57.1':
resolution: {integrity: sha512-A6ehUVSiSaaliTxai040ZpZ2zTevHYbvu/lDoeAteHI8QnaosIzm4qwtezfRg1jOYaUmnzLX1AOD6Z+UJjtifg==}
cpu: [arm]
os: [android]
'@rollup/rollup-android-arm64@4.57.1':
resolution: {integrity: sha512-dQaAddCY9YgkFHZcFNS/606Exo8vcLHwArFZ7vxXq4rigo2bb494/xKMMwRRQW6ug7Js6yXmBZhSBRuBvCCQ3w==}
cpu: [arm64]
os: [android]
'@rollup/rollup-darwin-arm64@4.57.1':
resolution: {integrity: sha512-crNPrwJOrRxagUYeMn/DZwqN88SDmwaJ8Cvi/TN1HnWBU7GwknckyosC2gd0IqYRsHDEnXf328o9/HC6OkPgOg==}
cpu: [arm64]
os: [darwin]
'@rollup/rollup-darwin-x64@4.57.1':
resolution: {integrity: sha512-Ji8g8ChVbKrhFtig5QBV7iMaJrGtpHelkB3lsaKzadFBe58gmjfGXAOfI5FV0lYMH8wiqsxKQ1C9B0YTRXVy4w==}
cpu: [x64]
os: [darwin]
'@rollup/rollup-freebsd-arm64@4.57.1':
resolution: {integrity: sha512-R+/WwhsjmwodAcz65guCGFRkMb4gKWTcIeLy60JJQbXrJ97BOXHxnkPFrP+YwFlaS0m+uWJTstrUA9o+UchFug==}
cpu: [arm64]
os: [freebsd]
'@rollup/rollup-freebsd-x64@4.57.1':
resolution: {integrity: sha512-IEQTCHeiTOnAUC3IDQdzRAGj3jOAYNr9kBguI7MQAAZK3caezRrg0GxAb6Hchg4lxdZEI5Oq3iov/w/hnFWY9Q==}
cpu: [x64]
os: [freebsd]
'@rollup/rollup-linux-arm-gnueabihf@4.57.1':
resolution: {integrity: sha512-F8sWbhZ7tyuEfsmOxwc2giKDQzN3+kuBLPwwZGyVkLlKGdV1nvnNwYD0fKQ8+XS6hp9nY7B+ZeK01EBUE7aHaw==}
cpu: [arm]
os: [linux]
libc: [glibc]
'@rollup/rollup-linux-arm-musleabihf@4.57.1':
resolution: {integrity: sha512-rGfNUfn0GIeXtBP1wL5MnzSj98+PZe/AXaGBCRmT0ts80lU5CATYGxXukeTX39XBKsxzFpEeK+Mrp9faXOlmrw==}
cpu: [arm]
os: [linux]
libc: [musl]
'@rollup/rollup-linux-arm64-gnu@4.57.1':
resolution: {integrity: sha512-MMtej3YHWeg/0klK2Qodf3yrNzz6CGjo2UntLvk2RSPlhzgLvYEB3frRvbEF2wRKh1Z2fDIg9KRPe1fawv7C+g==}
cpu: [arm64]
os: [linux]
libc: [glibc]
'@rollup/rollup-linux-arm64-musl@4.57.1':
resolution: {integrity: sha512-1a/qhaaOXhqXGpMFMET9VqwZakkljWHLmZOX48R0I/YLbhdxr1m4gtG1Hq7++VhVUmf+L3sTAf9op4JlhQ5u1Q==}
cpu: [arm64]
os: [linux]
libc: [musl]
'@rollup/rollup-linux-loong64-gnu@4.57.1':
resolution: {integrity: sha512-QWO6RQTZ/cqYtJMtxhkRkidoNGXc7ERPbZN7dVW5SdURuLeVU7lwKMpo18XdcmpWYd0qsP1bwKPf7DNSUinhvA==}
cpu: [loong64]
os: [linux]
libc: [glibc]
'@rollup/rollup-linux-loong64-musl@4.57.1':
resolution: {integrity: sha512-xpObYIf+8gprgWaPP32xiN5RVTi/s5FCR+XMXSKmhfoJjrpRAjCuuqQXyxUa/eJTdAE6eJ+KDKaoEqjZQxh3Gw==}
cpu: [loong64]
os: [linux]
libc: [musl]
'@rollup/rollup-linux-ppc64-gnu@4.57.1':
resolution: {integrity: sha512-4BrCgrpZo4hvzMDKRqEaW1zeecScDCR+2nZ86ATLhAoJ5FQ+lbHVD3ttKe74/c7tNT9c6F2viwB3ufwp01Oh2w==}
cpu: [ppc64]
os: [linux]
libc: [glibc]
'@rollup/rollup-linux-ppc64-musl@4.57.1':
resolution: {integrity: sha512-NOlUuzesGauESAyEYFSe3QTUguL+lvrN1HtwEEsU2rOwdUDeTMJdO5dUYl/2hKf9jWydJrO9OL/XSSf65R5+Xw==}
cpu: [ppc64]
os: [linux]
libc: [musl]
'@rollup/rollup-linux-riscv64-gnu@4.57.1':
resolution: {integrity: sha512-ptA88htVp0AwUUqhVghwDIKlvJMD/fmL/wrQj99PRHFRAG6Z5nbWoWG4o81Nt9FT+IuqUQi+L31ZKAFeJ5Is+A==}
cpu: [riscv64]
os: [linux]
libc: [glibc]
'@rollup/rollup-linux-riscv64-musl@4.57.1':
resolution: {integrity: sha512-S51t7aMMTNdmAMPpBg7OOsTdn4tySRQvklmL3RpDRyknk87+Sp3xaumlatU+ppQ+5raY7sSTcC2beGgvhENfuw==}
cpu: [riscv64]
os: [linux]
libc: [musl]
'@rollup/rollup-linux-s390x-gnu@4.57.1':
resolution: {integrity: sha512-Bl00OFnVFkL82FHbEqy3k5CUCKH6OEJL54KCyx2oqsmZnFTR8IoNqBF+mjQVcRCT5sB6yOvK8A37LNm/kPJiZg==}
cpu: [s390x]
os: [linux]
libc: [glibc]
'@rollup/rollup-linux-x64-gnu@4.57.1':
resolution: {integrity: sha512-ABca4ceT4N+Tv/GtotnWAeXZUZuM/9AQyCyKYyKnpk4yoA7QIAuBt6Hkgpw8kActYlew2mvckXkvx0FfoInnLg==}
cpu: [x64]
os: [linux]
libc: [glibc]
'@rollup/rollup-linux-x64-musl@4.57.1':
resolution: {integrity: sha512-HFps0JeGtuOR2convgRRkHCekD7j+gdAuXM+/i6kGzQtFhlCtQkpwtNzkNj6QhCDp7DRJ7+qC/1Vg2jt5iSOFw==}
cpu: [x64]
os: [linux]
libc: [musl]
'@rollup/rollup-openbsd-x64@4.57.1':
resolution: {integrity: sha512-H+hXEv9gdVQuDTgnqD+SQffoWoc0Of59AStSzTEj/feWTBAnSfSD3+Dql1ZruJQxmykT/JVY0dE8Ka7z0DH1hw==}
cpu: [x64]
os: [openbsd]
'@rollup/rollup-openharmony-arm64@4.57.1':
resolution: {integrity: sha512-4wYoDpNg6o/oPximyc/NG+mYUejZrCU2q+2w6YZqrAs2UcNUChIZXjtafAiiZSUc7On8v5NyNj34Kzj/Ltk6dQ==}
cpu: [arm64]
os: [openharmony]
'@rollup/rollup-win32-arm64-msvc@4.57.1':
resolution: {integrity: sha512-O54mtsV/6LW3P8qdTcamQmuC990HDfR71lo44oZMZlXU4tzLrbvTii87Ni9opq60ds0YzuAlEr/GNwuNluZyMQ==}
cpu: [arm64]
os: [win32]
'@rollup/rollup-win32-ia32-msvc@4.57.1':
resolution: {integrity: sha512-P3dLS+IerxCT/7D2q2FYcRdWRl22dNbrbBEtxdWhXrfIMPP9lQhb5h4Du04mdl5Woq05jVCDPCMF7Ub0NAjIew==}
cpu: [ia32]
os: [win32]
'@rollup/rollup-win32-x64-gnu@4.57.1':
resolution: {integrity: sha512-VMBH2eOOaKGtIJYleXsi2B8CPVADrh+TyNxJ4mWPnKfLB/DBUmzW+5m1xUrcwWoMfSLagIRpjUFeW5CO5hyciQ==}
cpu: [x64]
os: [win32]
'@rollup/rollup-win32-x64-msvc@4.57.1':
resolution: {integrity: sha512-mxRFDdHIWRxg3UfIIAwCm6NzvxG0jDX/wBN6KsQFTvKFqqg9vTrWUE68qEjHt19A5wwx5X5aUi2zuZT7YR0jrA==}
cpu: [x64]
os: [win32]
'@schematics/angular@21.1.0-rc.0':
resolution: {integrity: sha512-2xFCC2wGZZlnxKcf8+CJUaYC0cW/Zb8BO26LoGRLZLgW3E58YwizOa72DzqkCqGF6FDaVoh4V5U7RCtVSPtQGw==}
engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0, npm: ^6.11.0 || ^7.5.6 || >=8.0.0, yarn: '>= 1.13.0'}
'@sigstore/bundle@4.0.0':
resolution: {integrity: sha512-NwCl5Y0V6Di0NexvkTqdoVfmjTaQwoLM236r89KEojGmq/jMls8S+zb7yOwAPdXvbwfKDlP+lmXgAL4vKSQT+A==}
engines: {node: ^20.17.0 || >=22.9.0}
'@sigstore/core@3.1.0':
resolution: {integrity: sha512-o5cw1QYhNQ9IroioJxpzexmPjfCe7gzafd2RY3qnMpxr4ZEja+Jad/U8sgFpaue6bOaF+z7RVkyKVV44FN+N8A==}
engines: {node: ^20.17.0 || >=22.9.0}
'@sigstore/protobuf-specs@0.5.0':
resolution: {integrity: sha512-MM8XIwUjN2bwvCg1QvrMtbBmpcSHrkhFSCu1D11NyPvDQ25HEc4oG5/OcQfd/Tlf/OxmKWERDj0zGE23jQaMwA==}
engines: {node: ^18.17.0 || >=20.5.0}
'@sigstore/sign@4.1.0':
resolution: {integrity: sha512-Vx1RmLxLGnSUqx/o5/VsCjkuN5L7y+vxEEwawvc7u+6WtX2W4GNa7b9HEjmcRWohw/d6BpATXmvOwc78m+Swdg==}
engines: {node: ^20.17.0 || >=22.9.0}
'@sigstore/tuf@4.0.1':
resolution: {integrity: sha512-OPZBg8y5Vc9yZjmWCHrlWPMBqW5yd8+wFNl+thMdtcWz3vjVSoJQutF8YkrzI0SLGnkuFof4HSsWUhXrf219Lw==}
engines: {node: ^20.17.0 || >=22.9.0}
'@sigstore/verify@3.1.0':
resolution: {integrity: sha512-mNe0Iigql08YupSOGv197YdHpPPr+EzDZmfCgMc7RPNaZTw5aLN01nBl6CHJOh3BGtnMIj83EeN4butBchc8Ag==}
engines: {node: ^20.17.0 || >=22.9.0}
'@tsconfig/node10@1.0.12':
resolution: {integrity: sha512-UCYBaeFvM11aU2y3YPZ//O5Rhj+xKyzy7mvcIoAjASbigy8mHMryP5cK7dgjlz2hWxh1g5pLw084E0a/wlUSFQ==}
'@tsconfig/node12@1.0.11':
resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==}
'@tsconfig/node14@1.0.3':
resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==}
'@tsconfig/node16@1.0.4':
resolution: {integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==}
'@tufjs/canonical-json@2.0.0':
resolution: {integrity: sha512-yVtV8zsdo8qFHe+/3kw81dSLyF7D576A5cCFCi4X7B39tWT7SekaEFUnvnWJHz+9qO7qJTah1JbrDjWKqFtdWA==}
engines: {node: ^16.14.0 || >=18.0.0}
'@tufjs/models@4.1.0':
resolution: {integrity: sha512-Y8cK9aggNRsqJVaKUlEYs4s7CvQ1b1ta2DVPyAimb0I2qhzjNk+A+mxvll/klL0RlfuIUei8BF7YWiua4kQqww==}
engines: {node: ^20.17.0 || >=22.9.0}
'@tybys/wasm-util@0.10.1':
resolution: {integrity: sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==}
'@types/body-parser@1.19.6':
resolution: {integrity: sha512-HLFeCYgz89uk22N5Qg3dvGvsv46B8GLvKKo1zKG4NybA8U2DiEO3w9lqGg29t/tfLRJpJ6iQxnVw4OnB7MoM9g==}
'@types/bonjour@3.5.13':
resolution: {integrity: sha512-z9fJ5Im06zvUL548KvYNecEVlA7cVDkGUi6kZusb04mpyEFKCIZJvloCcmpmLaIahDpOQGHaHmG6imtPMmPXGQ==}
'@types/connect-history-api-fallback@1.5.4':
resolution: {integrity: sha512-n6Cr2xS1h4uAulPRdlw6Jl6s1oG8KrVilPN2yUITEs+K48EzMJJ3W1xy8K5eWuFvjp3R74AOIGSmp2UfBJ8HFw==}
'@types/connect@3.4.38':
resolution: {integrity: sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==}
'@types/eslint-scope@3.7.7':
resolution: {integrity: sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg==}
'@types/eslint@9.6.1':
resolution: {integrity: sha512-FXx2pKgId/WyYo2jXw63kk7/+TY7u7AziEJxJAnSFzHlqTAS3Ync6SvgYAN/k4/PQpnnVuzoMuVnByKK2qp0ag==}
'@types/estree@1.0.8':
resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==}
'@types/express-serve-static-core@4.19.8':
resolution: {integrity: sha512-02S5fmqeoKzVZCHPZid4b8JH2eM5HzQLZWN2FohQEy/0eXTq8VXZfSN6Pcr3F6N9R/vNrj7cpgbhjie6m/1tCA==}
'@types/express@4.17.25':
resolution: {integrity: sha512-dVd04UKsfpINUnK0yBoYHDF3xu7xVH4BuDotC/xGuycx4CgbP48X/KF/586bcObxT0HENHXEU8Nqtu6NR+eKhw==}
'@types/http-errors@2.0.5':
resolution: {integrity: sha512-r8Tayk8HJnX0FztbZN7oVqGccWgw98T/0neJphO91KkmOzug1KkofZURD4UaD5uH8AqcFLfdPErnBod0u71/qg==}
'@types/http-proxy@1.17.17':
resolution: {integrity: sha512-ED6LB+Z1AVylNTu7hdzuBqOgMnvG/ld6wGCG8wFnAzKX5uyW2K3WD52v0gnLCTK/VLpXtKckgWuyScYK6cSPaw==}
'@types/json-schema@7.0.15':
resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==}
'@types/mime@1.3.5':
resolution: {integrity: sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==}
'@types/node-forge@1.3.14':
resolution: {integrity: sha512-mhVF2BnD4BO+jtOp7z1CdzaK4mbuK0LLQYAvdOLqHTavxFNq4zA1EmYkpnFjP8HOUzedfQkRnp0E2ulSAYSzAw==}
'@types/node@20.19.33':
resolution: {integrity: sha512-Rs1bVAIdBs5gbTIKza/tgpMuG1k3U/UMJLWecIMxNdJFDMzcM5LOiLVRYh3PilWEYDIeUDv7bpiHPLPsbydGcw==}
'@types/qs@6.14.0':
resolution: {integrity: sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ==}
'@types/range-parser@1.2.7':
resolution: {integrity: sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==}
'@types/retry@0.12.2':
resolution: {integrity: sha512-XISRgDJ2Tc5q4TRqvgJtzsRkFYNJzZrhTdtMoGVBttwzzQJkPnS3WWTFc7kuDRoPtPakl+T+OfdEUjYJj7Jbow==}
'@types/send@0.17.6':
resolution: {integrity: sha512-Uqt8rPBE8SY0RK8JB1EzVOIZ32uqy8HwdxCnoCOsYrvnswqmFZ/k+9Ikidlk/ImhsdvBsloHbAlewb2IEBV/Og==}
'@types/send@1.2.1':
resolution: {integrity: sha512-arsCikDvlU99zl1g69TcAB3mzZPpxgw0UQnaHeC1Nwb015xp8bknZv5rIfri9xTOcMuaVgvabfIRA7PSZVuZIQ==}
'@types/serve-index@1.9.4':
resolution: {integrity: sha512-qLpGZ/c2fhSs5gnYsQxtDEq3Oy8SXPClIXkW5ghvAvsNuVSA8k+gCONcUCS/UjLEYvYps+e8uBtfgXgvhwfNug==}
'@types/serve-static@1.15.10':
resolution: {integrity: sha512-tRs1dB+g8Itk72rlSI2ZrW6vZg0YrLI81iQSTkMmOqnqCaNr/8Ek4VwWcN5vZgCYWbg/JJSGBlUaYGAOP73qBw==}
'@types/sockjs@0.3.36':
resolution: {integrity: sha512-MK9V6NzAS1+Ud7JV9lJLFqW85VbC9dq3LmwZCuBe4wBDgKC0Kj/jd8Xl+nSviU+Qc3+m7umHHyHg//2KSa0a0Q==}
'@types/ws@8.18.1':
resolution: {integrity: sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==}
'@vitejs/plugin-basic-ssl@2.1.0':
resolution: {integrity: sha512-dOxxrhgyDIEUADhb/8OlV9JIqYLgos03YorAueTIeOUskLJSEsfwCByjbu98ctXitUN3znXKp0bYD/WHSudCeA==}
engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0}
peerDependencies:
vite: ^6.0.0 || ^7.0.0
'@webassemblyjs/ast@1.14.1':
resolution: {integrity: sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==}
'@webassemblyjs/floating-point-hex-parser@1.13.2':
resolution: {integrity: sha512-6oXyTOzbKxGH4steLbLNOu71Oj+C8Lg34n6CqRvqfS2O71BxY6ByfMDRhBytzknj9yGUPVJ1qIKhRlAwO1AovA==}
'@webassemblyjs/helper-api-error@1.13.2':
resolution: {integrity: sha512-U56GMYxy4ZQCbDZd6JuvvNV/WFildOjsaWD3Tzzvmw/mas3cXzRJPMjP83JqEsgSbyrmaGjBfDtV7KDXV9UzFQ==}
'@webassemblyjs/helper-buffer@1.14.1':
resolution: {integrity: sha512-jyH7wtcHiKssDtFPRB+iQdxlDf96m0E39yb0k5uJVhFGleZFoNw1c4aeIcVUPPbXUVJ94wwnMOAqUHyzoEPVMA==}
'@webassemblyjs/helper-numbers@1.13.2':
resolution: {integrity: sha512-FE8aCmS5Q6eQYcV3gI35O4J789wlQA+7JrqTTpJqn5emA4U2hvwJmvFRC0HODS+3Ye6WioDklgd6scJ3+PLnEA==}
'@webassemblyjs/helper-wasm-bytecode@1.13.2':
resolution: {integrity: sha512-3QbLKy93F0EAIXLh0ogEVR6rOubA9AoZ+WRYhNbFyuB70j3dRdwH9g+qXhLAO0kiYGlg3TxDV+I4rQTr/YNXkA==}
'@webassemblyjs/helper-wasm-section@1.14.1':
resolution: {integrity: sha512-ds5mXEqTJ6oxRoqjhWDU83OgzAYjwsCV8Lo/N+oRsNDmx/ZDpqalmrtgOMkHwxsG0iI//3BwWAErYRHtgn0dZw==}
'@webassemblyjs/ieee754@1.13.2':
resolution: {integrity: sha512-4LtOzh58S/5lX4ITKxnAK2USuNEvpdVV9AlgGQb8rJDHaLeHciwG4zlGr0j/SNWlr7x3vO1lDEsuePvtcDNCkw==}
'@webassemblyjs/leb128@1.13.2':
resolution: {integrity: sha512-Lde1oNoIdzVzdkNEAWZ1dZ5orIbff80YPdHx20mrHwHrVNNTjNr8E3xz9BdpcGqRQbAEa+fkrCb+fRFTl/6sQw==}
'@webassemblyjs/utf8@1.13.2':
resolution: {integrity: sha512-3NQWGjKTASY1xV5m7Hr0iPeXD9+RDobLll3T9d2AO+g3my8xy5peVyjSag4I50mR1bBSN/Ct12lo+R9tJk0NZQ==}
'@webassemblyjs/wasm-edit@1.14.1':
resolution: {integrity: sha512-RNJUIQH/J8iA/1NzlE4N7KtyZNHi3w7at7hDjvRNm5rcUXa00z1vRz3glZoULfJ5mpvYhLybmVcwcjGrC1pRrQ==}
'@webassemblyjs/wasm-gen@1.14.1':
resolution: {integrity: sha512-AmomSIjP8ZbfGQhumkNvgC33AY7qtMCXnN6bL2u2Js4gVCg8fp735aEiMSBbDR7UQIj90n4wKAFUSEd0QN2Ukg==}
'@webassemblyjs/wasm-opt@1.14.1':
resolution: {integrity: sha512-PTcKLUNvBqnY2U6E5bdOQcSM+oVP/PmrDY9NzowJjislEjwP/C4an2303MCVS2Mg9d3AJpIGdUFIQQWbPds0Sw==}
'@webassemblyjs/wasm-parser@1.14.1':
resolution: {integrity: sha512-JLBl+KZ0R5qB7mCnud/yyX08jWFw5MsoalJ1pQ4EdFlgj9VdXKGuENGsiCIjegI1W7p91rUlcB/LB5yRJKNTcQ==}
'@webassemblyjs/wast-printer@1.14.1':
resolution: {integrity: sha512-kPSSXE6De1XOR820C90RIo2ogvZG+c3KiHzqUoO/F34Y2shGzesfqv7o57xrxovZJH/MetF5UjroJ/R/3isoiw==}
'@xtuc/ieee754@1.2.0':
resolution: {integrity: sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==}
'@xtuc/long@4.2.2':
resolution: {integrity: sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==}
'@yarnpkg/lockfile@1.1.0':
resolution: {integrity: sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ==}
abbrev@4.0.0:
resolution: {integrity: sha512-a1wflyaL0tHtJSmLSOVybYhy22vRih4eduhhrkcjgrWGnRfrZtovJ2FRjxuTtkkj47O/baf0R86QU5OuYpz8fA==}
engines: {node: ^20.17.0 || >=22.9.0}
accepts@1.3.8:
resolution: {integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==}
engines: {node: '>= 0.6'}
accepts@2.0.0:
resolution: {integrity: sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==}
engines: {node: '>= 0.6'}
acorn-import-phases@1.0.4:
resolution: {integrity: sha512-wKmbr/DDiIXzEOiWrTTUcDm24kQ2vGfZQvM2fwg2vXqR5uW6aapr7ObPtj1th32b9u90/Pf4AItvdTh42fBmVQ==}
engines: {node: '>=10.13.0'}
peerDependencies:
acorn: ^8.14.0
acorn-walk@8.3.4:
resolution: {integrity: sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==}
engines: {node: '>=0.4.0'}
acorn@8.15.0:
resolution: {integrity: sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==}
engines: {node: '>=0.4.0'}
hasBin: true
adjust-sourcemap-loader@4.0.0:
resolution: {integrity: sha512-OXwN5b9pCUXNQHJpwwD2qP40byEmSgzj8B4ydSN0uMNYWiFmJ6x6KwUllMmfk8Rwu/HJDFR7U8ubsWBoN0Xp0A==}
engines: {node: '>=8.9'}
agent-base@7.1.4:
resolution: {integrity: sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==}
engines: {node: '>= 14'}
ajv-formats@2.1.1:
resolution: {integrity: sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==}
peerDependencies:
ajv: ^8.0.0
peerDependenciesMeta:
ajv:
optional: true
ajv-formats@3.0.1:
resolution: {integrity: sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==}
peerDependencies:
ajv: ^8.0.0
peerDependenciesMeta:
ajv:
optional: true
ajv-keywords@5.1.0:
resolution: {integrity: sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==}
peerDependencies:
ajv: ^8.8.2
ajv@8.17.1:
resolution: {integrity: sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==}
algoliasearch@5.46.2:
resolution: {integrity: sha512-qqAXW9QvKf2tTyhpDA4qXv1IfBwD2eduSW6tUEBFIfCeE9gn9HQ9I5+MaKoenRuHrzk5sQoNh1/iof8mY7uD6Q==}
engines: {node: '>= 14.0.0'}
ansi-colors@4.1.3:
resolution: {integrity: sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==}
engines: {node: '>=6'}
ansi-escapes@7.3.0:
resolution: {integrity: sha512-BvU8nYgGQBxcmMuEeUEmNTvrMVjJNSH7RgW24vXexN4Ven6qCvy4TntnvlnwnMLTVlcRQQdbRY8NKnaIoeWDNg==}
engines: {node: '>=18'}
ansi-html-community@0.0.8:
resolution: {integrity: sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==}
engines: {'0': node >= 0.8.0}
hasBin: true
ansi-regex@5.0.1:
resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==}
engines: {node: '>=8'}
ansi-regex@6.2.2:
resolution: {integrity: sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==}
engines: {node: '>=12'}
ansi-styles@4.3.0:
resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==}
engines: {node: '>=8'}
ansi-styles@6.2.3:
resolution: {integrity: sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==}
engines: {node: '>=12'}
anymatch@3.1.3:
resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==}
engines: {node: '>= 8'}
arg@4.1.3:
resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==}
argparse@2.0.1:
resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==}
array-flatten@1.1.1:
resolution: {integrity: sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==}
autoprefixer@10.4.23:
resolution: {integrity: sha512-YYTXSFulfwytnjAPlw8QHncHJmlvFKtczb8InXaAx9Q0LbfDnfEYDE55omerIJKihhmU61Ft+cAOSzQVaBUmeA==}
engines: {node: ^10 || ^12 || >=14}
hasBin: true
peerDependencies:
postcss: ^8.1.0
babel-loader@10.0.0:
resolution: {integrity: sha512-z8jt+EdS61AMw22nSfoNJAZ0vrtmhPRVi6ghL3rCeRZI8cdNYFiV5xeV3HbE7rlZZNmGH8BVccwWt8/ED0QOHA==}
engines: {node: ^18.20.0 || ^20.10.0 || >=22.0.0}
peerDependencies:
'@babel/core': ^7.12.0
webpack: '>=5.61.0'
babel-plugin-polyfill-corejs2@0.4.15:
resolution: {integrity: sha512-hR3GwrRwHUfYwGfrisXPIDP3JcYfBrW7wKE7+Au6wDYl7fm/ka1NEII6kORzxNU556JjfidZeBsO10kYvtV1aw==}
peerDependencies:
'@babel/core': ^7.4.0 || ^8.0.0-0 <8.0.0
babel-plugin-polyfill-corejs3@0.13.0:
resolution: {integrity: sha512-U+GNwMdSFgzVmfhNm8GJUX88AadB3uo9KpJqS3FaqNIPKgySuvMb+bHPsOmmuWyIcuqZj/pzt1RUIUZns4y2+A==}
peerDependencies:
'@babel/core': ^7.4.0 || ^8.0.0-0 <8.0.0
babel-plugin-polyfill-regenerator@0.6.6:
resolution: {integrity: sha512-hYm+XLYRMvupxiQzrvXUj7YyvFFVfv5gI0R71AJzudg1g2AI2vyCPPIFEBjk162/wFzti3inBHo7isWFuEVS/A==}
peerDependencies:
'@babel/core': ^7.4.0 || ^8.0.0-0 <8.0.0
baseline-browser-mapping@2.9.19:
resolution: {integrity: sha512-ipDqC8FrAl/76p2SSWKSI+H9tFwm7vYqXQrItCuiVPt26Km0jS+NzSsBWAaBusvSbQcfJG+JitdMm+wZAgTYqg==}
hasBin: true
batch@0.6.1:
resolution: {integrity: sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw==}
beasties@0.3.5:
resolution: {integrity: sha512-NaWu+f4YrJxEttJSm16AzMIFtVldCvaJ68b1L098KpqXmxt9xOLtKoLkKxb8ekhOrLqEJAbvT6n6SEvB/sac7A==}
engines: {node: '>=14.0.0'}
big.js@5.2.2:
resolution: {integrity: sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==}
binary-extensions@2.3.0:
resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==}
engines: {node: '>=8'}
body-parser@1.20.4:
resolution: {integrity: sha512-ZTgYYLMOXY9qKU/57FAo8F+HA2dGX7bqGc71txDRC1rS4frdFI5R7NhluHxH6M0YItAP0sHB4uqAOcYKxO6uGA==}
engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16}
body-parser@2.2.2:
resolution: {integrity: sha512-oP5VkATKlNwcgvxi0vM0p/D3n2C3EReYVX+DNYs5TjZFn/oQt2j+4sVJtSMr18pdRr8wjTcBl6LoV+FUwzPmNA==}
engines: {node: '>=18'}
bonjour-service@1.3.0:
resolution: {integrity: sha512-3YuAUiSkWykd+2Azjgyxei8OWf8thdn8AITIog2M4UICzoqfjlqr64WIjEXZllf/W6vK1goqleSR6brGomxQqA==}
boolbase@1.0.0:
resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==}
braces@3.0.3:
resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
engines: {node: '>=8'}
browserslist@4.28.1:
resolution: {integrity: sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==}
engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7}
hasBin: true
buffer-from@1.1.2:
resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==}
bundle-name@4.1.0:
resolution: {integrity: sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q==}
engines: {node: '>=18'}
bytes@3.1.2:
resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==}
engines: {node: '>= 0.8'}
cacache@20.0.3:
resolution: {integrity: sha512-3pUp4e8hv07k1QlijZu6Kn7c9+ZpWWk4j3F8N3xPuCExULobqJydKYOTj1FTq58srkJsXvO7LbGAH4C0ZU3WGw==}
engines: {node: ^20.17.0 || >=22.9.0}
call-bind-apply-helpers@1.0.2:
resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==}
engines: {node: '>= 0.4'}
call-bound@1.0.4:
resolution: {integrity: sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==}
engines: {node: '>= 0.4'}
callsites@3.1.0:
resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==}
engines: {node: '>=6'}
caniuse-lite@1.0.30001769:
resolution: {integrity: sha512-BCfFL1sHijQlBGWBMuJyhZUhzo7wer5sVj9hqekB/7xn0Ypy+pER/edCYQm4exbXj4WiySGp40P8UuTh6w1srg==}
chalk@5.6.2:
resolution: {integrity: sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==}
engines: {node: ^12.17.0 || ^14.13 || >=16.0.0}
chardet@2.1.1:
resolution: {integrity: sha512-PsezH1rqdV9VvyNhxxOW32/d75r01NY7TQCmOqomRo15ZSOKbpTFVsfjghxo6JloQUCGnH4k1LGu0R4yCLlWQQ==}
chokidar@3.6.0:
resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==}
engines: {node: '>= 8.10.0'}
chokidar@4.0.3:
resolution: {integrity: sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==}
engines: {node: '>= 14.16.0'}
chownr@3.0.0:
resolution: {integrity: sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==}
engines: {node: '>=18'}
chrome-trace-event@1.0.4:
resolution: {integrity: sha512-rNjApaLzuwaOTjCiT8lSDdGN1APCiqkChLMJxJPWLunPAt5fy8xgU9/jNOchV84wfIxrA0lRQB7oCT8jrn/wrQ==}
engines: {node: '>=6.0'}
cli-cursor@5.0.0:
resolution: {integrity: sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==}
engines: {node: '>=18'}
cli-spinners@3.4.0:
resolution: {integrity: sha512-bXfOC4QcT1tKXGorxL3wbJm6XJPDqEnij2gQ2m7ESQuE+/z9YFIWnl/5RpTiKWbMq3EVKR4fRLJGn6DVfu0mpw==}
engines: {node: '>=18.20'}
cli-truncate@5.1.1:
resolution: {integrity: sha512-SroPvNHxUnk+vIW/dOSfNqdy1sPEFkrTk6TUtqLCnBlo3N7TNYYkzzN7uSD6+jVjrdO4+p8nH7JzH6cIvUem6A==}
engines: {node: '>=20'}
cli-width@4.1.0:
resolution: {integrity: sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==}
engines: {node: '>= 12'}
cliui@9.0.1:
resolution: {integrity: sha512-k7ndgKhwoQveBL+/1tqGJYNz097I7WOvwbmmU2AR5+magtbjPWQTS1C5vzGkBC8Ym8UWRzfKUzUUqFLypY4Q+w==}
engines: {node: '>=20'}
clone-deep@4.0.1:
resolution: {integrity: sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==}
engines: {node: '>=6'}
color-convert@2.0.1:
resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==}
engines: {node: '>=7.0.0'}
color-name@1.1.4:
resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==}
colorette@2.0.20:
resolution: {integrity: sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==}
commander@2.20.3:
resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==}
compressible@2.0.18:
resolution: {integrity: sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==}
engines: {node: '>= 0.6'}
compression@1.8.1:
resolution: {integrity: sha512-9mAqGPHLakhCLeNyxPkK4xVo746zQ/czLH1Ky+vkitMnWfWZps8r0qXuwhwizagCRttsL4lfG4pIOvaWLpAP0w==}
engines: {node: '>= 0.8.0'}
connect-history-api-fallback@2.0.0:
resolution: {integrity: sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA==}
engines: {node: '>=0.8'}
content-disposition@0.5.4:
resolution: {integrity: sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==}
engines: {node: '>= 0.6'}
content-disposition@1.0.1:
resolution: {integrity: sha512-oIXISMynqSqm241k6kcQ5UwttDILMK4BiurCfGEREw6+X9jkkpEe5T9FZaApyLGGOnFuyMWZpdolTXMtvEJ08Q==}
engines: {node: '>=18'}
content-type@1.0.5:
resolution: {integrity: sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==}
engines: {node: '>= 0.6'}
convert-source-map@1.9.0:
resolution: {integrity: sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==}
convert-source-map@2.0.0:
resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==}
cookie-signature@1.0.7:
resolution: {integrity: sha512-NXdYc3dLr47pBkpUCHtKSwIOQXLVn8dZEuywboCOJY/osA0wFSLlSawr3KN8qXJEyX66FcONTH8EIlVuK0yyFA==}
cookie-signature@1.2.2:
resolution: {integrity: sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==}
engines: {node: '>=6.6.0'}
cookie@0.7.2:
resolution: {integrity: sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==}
engines: {node: '>= 0.6'}
copy-anything@2.0.6:
resolution: {integrity: sha512-1j20GZTsvKNkc4BY3NpMOM8tt///wY3FpIzozTOFO2ffuZcV61nojHXVKIy3WM+7ADCy5FVhdZYHYDdgTU0yJw==}
copy-webpack-plugin@13.0.1:
resolution: {integrity: sha512-J+YV3WfhY6W/Xf9h+J1znYuqTye2xkBUIGyTPWuBAT27qajBa5mR4f8WBmfDY3YjRftT2kqZZiLi1qf0H+UOFw==}
engines: {node: '>= 18.12.0'}
peerDependencies:
webpack: ^5.1.0
core-js-compat@3.48.0:
resolution: {integrity: sha512-OM4cAF3D6VtH/WkLtWvyNC56EZVXsZdU3iqaMG2B4WvYrlqU831pc4UtG5yp0sE9z8Y02wVN7PjW5Zf9Gt0f1Q==}
core-util-is@1.0.3:
resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==}
cors@2.8.6:
resolution: {integrity: sha512-tJtZBBHA6vjIAaF6EnIaq6laBBP9aq/Y3ouVJjEfoHbRBcHBAHYcMh/w8LDrk2PvIMMq8gmopa5D4V8RmbrxGw==}
engines: {node: '>= 0.10'}
cosmiconfig@9.0.0:
resolution: {integrity: sha512-itvL5h8RETACmOTFc4UfIyB2RfEHi71Ax6E/PivVxq9NseKbOWpeyHEOIbmAw1rs8Ak0VursQNww7lf7YtUwzg==}
engines: {node: '>=14'}
peerDependencies:
typescript: '>=4.9.5'
peerDependenciesMeta:
typescript:
optional: true
create-require@1.1.1:
resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==}
cross-spawn@7.0.6:
resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==}
engines: {node: '>= 8'}
css-loader@7.1.2:
resolution: {integrity: sha512-6WvYYn7l/XEGN8Xu2vWFt9nVzrCn39vKyTEFf/ExEyoksJjjSZV/0/35XPlMbpnr6VGhZIUg5yJrL8tGfes/FA==}
engines: {node: '>= 18.12.0'}
peerDependencies:
'@rspack/core': 0.x || 1.x
webpack: ^5.27.0
peerDependenciesMeta:
'@rspack/core':
optional: true
webpack:
optional: true
css-select@6.0.0:
resolution: {integrity: sha512-rZZVSLle8v0+EY8QAkDWrKhpgt6SA5OtHsgBnsj6ZaLb5dmDVOWUDtQitd9ydxxvEjhewNudS6eTVU7uOyzvXw==}
css-what@7.0.0:
resolution: {integrity: sha512-wD5oz5xibMOPHzy13CyGmogB3phdvcDaB5t0W/Nr5Z2O/agcB8YwOz6e2Lsp10pNDzBoDO9nVa3RGs/2BttpHQ==}
engines: {node: '>= 6'}
cssesc@3.0.0:
resolution: {integrity: sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==}
engines: {node: '>=4'}
hasBin: true
debug@2.6.9:
resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==}
peerDependencies:
supports-color: '*'
peerDependenciesMeta:
supports-color:
optional: true
debug@4.4.3:
resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==}
engines: {node: '>=6.0'}
peerDependencies:
supports-color: '*'
peerDependenciesMeta:
supports-color:
optional: true
default-browser-id@5.0.1:
resolution: {integrity: sha512-x1VCxdX4t+8wVfd1so/9w+vQ4vx7lKd2Qp5tDRutErwmR85OgmfX7RlLRMWafRMY7hbEiXIbudNrjOAPa/hL8Q==}
engines: {node: '>=18'}
default-browser@5.5.0:
resolution: {integrity: sha512-H9LMLr5zwIbSxrmvikGuI/5KGhZ8E2zH3stkMgM5LpOWDutGM2JZaj460Udnf1a+946zc7YBgrqEWwbk7zHvGw==}
engines: {node: '>=18'}
define-lazy-prop@3.0.0:
resolution: {integrity: sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==}
engines: {node: '>=12'}
depd@1.1.2:
resolution: {integrity: sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==}
engines: {node: '>= 0.6'}
depd@2.0.0:
resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==}
engines: {node: '>= 0.8'}
destroy@1.2.0:
resolution: {integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==}
engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16}
detect-libc@2.1.2:
resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==}
engines: {node: '>=8'}
detect-node@2.1.0:
resolution: {integrity: sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==}
diff@4.0.4:
resolution: {integrity: sha512-X07nttJQkwkfKfvTPG/KSnE2OMdcUCao6+eXF3wmnIQRn2aPAHH3VxDbDOdegkd6JbPsXqShpvEOHfAT+nCNwQ==}
engines: {node: '>=0.3.1'}
dns-packet@5.6.1:
resolution: {integrity: sha512-l4gcSouhcgIKRvyy99RNVOgxXiicE+2jZoNmaNmZ6JXiGajBOJAesk1OBlJuM5k2c+eudGdLxDqXuPCKIj6kpw==}
engines: {node: '>=6'}
dom-serializer@2.0.0:
resolution: {integrity: sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==}
domelementtype@2.3.0:
resolution: {integrity: sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==}
domhandler@5.0.3:
resolution: {integrity: sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==}
engines: {node: '>= 4'}
domutils@3.2.2:
resolution: {integrity: sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==}
dunder-proto@1.0.1:
resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==}
engines: {node: '>= 0.4'}
ee-first@1.1.1:
resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==}
electron-to-chromium@1.5.286:
resolution: {integrity: sha512-9tfDXhJ4RKFNerfjdCcZfufu49vg620741MNs26a9+bhLThdB+plgMeou98CAaHu/WATj2iHOOHTp1hWtABj2A==}
emoji-regex@10.6.0:
resolution: {integrity: sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==}
emoji-regex@8.0.0:
resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==}
emojis-list@3.0.0:
resolution: {integrity: sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==}
engines: {node: '>= 4'}
encodeurl@2.0.0:
resolution: {integrity: sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==}
engines: {node: '>= 0.8'}
encoding@0.1.13:
resolution: {integrity: sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==}
enhanced-resolve@5.19.0:
resolution: {integrity: sha512-phv3E1Xl4tQOShqSte26C7Fl84EwUdZsyOuSSk9qtAGyyQs2s3jJzComh+Abf4g187lUUAvH+H26omrqia2aGg==}
engines: {node: '>=10.13.0'}
entities@4.5.0:
resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==}
engines: {node: '>=0.12'}
entities@6.0.1:
resolution: {integrity: sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==}
engines: {node: '>=0.12'}
entities@7.0.1:
resolution: {integrity: sha512-TWrgLOFUQTH994YUyl1yT4uyavY5nNB5muff+RtWaqNVCAK408b5ZnnbNAUEWLTCpum9w6arT70i1XdQ4UeOPA==}
engines: {node: '>=0.12'}
env-paths@2.2.1:
resolution: {integrity: sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==}
engines: {node: '>=6'}
environment@1.1.0:
resolution: {integrity: sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==}
engines: {node: '>=18'}
err-code@2.0.3:
resolution: {integrity: sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==}
errno@0.1.8:
resolution: {integrity: sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==}
hasBin: true
error-ex@1.3.4:
resolution: {integrity: sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==}
es-define-property@1.0.1:
resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==}
engines: {node: '>= 0.4'}
es-errors@1.3.0:
resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==}
engines: {node: '>= 0.4'}
es-module-lexer@2.0.0:
resolution: {integrity: sha512-5POEcUuZybH7IdmGsD8wlf0AI55wMecM9rVBTI/qEAy2c1kTOm3DjFYjrBdI2K3BaJjJYfYFeRtM0t9ssnRuxw==}
es-object-atoms@1.1.1:
resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==}
engines: {node: '>= 0.4'}
esbuild-wasm@0.27.2:
resolution: {integrity: sha512-eUTnl8eh+v8UZIZh4MrMOKDAc8Lm7+NqP3pyuTORGFY1s/o9WoiJgKnwXy+te2J3hX7iRbFSHEyig7GsPeeJyw==}
engines: {node: '>=18'}
hasBin: true
esbuild@0.27.2:
resolution: {integrity: sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw==}
engines: {node: '>=18'}
hasBin: true
escalade@3.2.0:
resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==}
engines: {node: '>=6'}
escape-html@1.0.3:
resolution: {integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==}
eslint-scope@5.1.1:
resolution: {integrity: sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==}
engines: {node: '>=8.0.0'}
esrecurse@4.3.0:
resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==}
engines: {node: '>=4.0'}
estraverse@4.3.0:
resolution: {integrity: sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==}
engines: {node: '>=4.0'}
estraverse@5.3.0:
resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==}
engines: {node: '>=4.0'}
esutils@2.0.3:
resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==}
engines: {node: '>=0.10.0'}
etag@1.8.1:
resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==}
engines: {node: '>= 0.6'}
eventemitter3@4.0.7:
resolution: {integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==}
eventemitter3@5.0.4:
resolution: {integrity: sha512-mlsTRyGaPBjPedk6Bvw+aqbsXDtoAyAzm5MO7JgU+yVRyMQ5O8bD4Kcci7BS85f93veegeCPkL8R4GLClnjLFw==}
events@3.3.0:
resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==}
engines: {node: '>=0.8.x'}
eventsource-parser@3.0.6:
resolution: {integrity: sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==}
engines: {node: '>=18.0.0'}
eventsource@3.0.7:
resolution: {integrity: sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA==}
engines: {node: '>=18.0.0'}
exponential-backoff@3.1.3:
resolution: {integrity: sha512-ZgEeZXj30q+I0EN+CbSSpIyPaJ5HVQD18Z1m+u1FXbAeT94mr1zw50q4q6jiiC447Nl/YTcIYSAftiGqetwXCA==}
express-rate-limit@7.5.1:
resolution: {integrity: sha512-7iN8iPMDzOMHPUYllBEsQdWVB6fPDMPqwjBaFrgr4Jgr/+okjvzAy+UHlYYL/Vs0OsOrMkwS6PJDkFlJwoxUnw==}
engines: {node: '>= 16'}
peerDependencies:
express: '>= 4.11'
express@4.22.1:
resolution: {integrity: sha512-F2X8g9P1X7uCPZMA3MVf9wcTqlyNp7IhH5qPCI0izhaOIYXaW9L535tGA3qmjRzpH+bZczqq7hVKxTR4NWnu+g==}
engines: {node: '>= 0.10.0'}
express@5.2.1:
resolution: {integrity: sha512-hIS4idWWai69NezIdRt2xFVofaF4j+6INOpJlVOLDO8zXGpUVEVzIYk12UUi2JzjEzWL3IOAxcTubgz9Po0yXw==}
engines: {node: '>= 18'}
fast-deep-equal@3.1.3:
resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==}
fast-uri@3.1.0:
resolution: {integrity: sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==}
faye-websocket@0.11.4:
resolution: {integrity: sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==}
engines: {node: '>=0.8.0'}
fdir@6.5.0:
resolution: {integrity: sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==}
engines: {node: '>=12.0.0'}
peerDependencies:
picomatch: ^3 || ^4
peerDependenciesMeta:
picomatch:
optional: true
fill-range@7.1.1:
resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
engines: {node: '>=8'}
finalhandler@1.3.2:
resolution: {integrity: sha512-aA4RyPcd3badbdABGDuTXCMTtOneUCAYH/gxoYRTZlIJdF0YPWuGqiAsIrhNnnqdXGswYk6dGujem4w80UJFhg==}
engines: {node: '>= 0.8'}
finalhandler@2.1.1:
resolution: {integrity: sha512-S8KoZgRZN+a5rNwqTxlZZePjT/4cnm0ROV70LedRHZ0p8u9fRID0hJUZQpkKLzro8LfmC8sx23bY6tVNxv8pQA==}
engines: {node: '>= 18.0.0'}
find-up@5.0.0:
resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==}
engines: {node: '>=10'}
flat@5.0.2:
resolution: {integrity: sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==}
hasBin: true
follow-redirects@1.15.11:
resolution: {integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==}
engines: {node: '>=4.0'}
peerDependencies:
debug: '*'
peerDependenciesMeta:
debug:
optional: true
forwarded@0.2.0:
resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==}
engines: {node: '>= 0.6'}
fraction.js@5.3.4:
resolution: {integrity: sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ==}
fresh@0.5.2:
resolution: {integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==}
engines: {node: '>= 0.6'}
fresh@2.0.0:
resolution: {integrity: sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==}
engines: {node: '>= 0.8'}
fs-minipass@3.0.3:
resolution: {integrity: sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw==}
engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0}
fsevents@2.3.3:
resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
os: [darwin]
function-bind@1.1.2:
resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==}
gensync@1.0.0-beta.2:
resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==}
engines: {node: '>=6.9.0'}
get-caller-file@2.0.5:
resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==}
engines: {node: 6.* || 8.* || >= 10.*}
get-east-asian-width@1.4.0:
resolution: {integrity: sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==}
engines: {node: '>=18'}
get-intrinsic@1.3.0:
resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==}
engines: {node: '>= 0.4'}
get-proto@1.0.1:
resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==}
engines: {node: '>= 0.4'}
glob-parent@5.1.2:
resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==}
engines: {node: '>= 6'}
glob-parent@6.0.2:
resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==}
engines: {node: '>=10.13.0'}
glob-to-regex.js@1.2.0:
resolution: {integrity: sha512-QMwlOQKU/IzqMUOAZWubUOT8Qft+Y0KQWnX9nK3ch0CJg0tTp4TvGZsTfudYKv2NzoQSyPcnA6TYeIQ3jGichQ==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
glob-to-regexp@0.4.1:
resolution: {integrity: sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==}
glob@13.0.2:
resolution: {integrity: sha512-035InabNu/c1lW0tzPhAgapKctblppqsKKG9ZaNzbr+gXwWMjXoiyGSyB9sArzrjG7jY+zntRq5ZSUYemrnWVQ==}
engines: {node: 20 || >=22}
gopd@1.2.0:
resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==}
engines: {node: '>= 0.4'}
graceful-fs@4.2.11:
resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==}
handle-thing@2.0.1:
resolution: {integrity: sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg==}
has-flag@4.0.0:
resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==}
engines: {node: '>=8'}
has-symbols@1.1.0:
resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==}
engines: {node: '>= 0.4'}
hasown@2.0.2:
resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==}
engines: {node: '>= 0.4'}
hono@4.11.9:
resolution: {integrity: sha512-Eaw2YTGM6WOxA6CXbckaEvslr2Ne4NFsKrvc0v97JD5awbmeBLO5w9Ho9L9kmKonrwF9RJlW6BxT1PVv/agBHQ==}
engines: {node: '>=16.9.0'}
hosted-git-info@9.0.2:
resolution: {integrity: sha512-M422h7o/BR3rmCQ8UHi7cyyMqKltdP9Uo+J2fXK+RSAY+wTcKOIRyhTuKv4qn+DJf3g+PL890AzId5KZpX+CBg==}
engines: {node: ^20.17.0 || >=22.9.0}
hpack.js@2.1.6:
resolution: {integrity: sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ==}
htmlparser2@10.1.0:
resolution: {integrity: sha512-VTZkM9GWRAtEpveh7MSF6SjjrpNVNNVJfFup7xTY3UpFtm67foy9HDVXneLtFVt4pMz5kZtgNcvCniNFb1hlEQ==}
http-cache-semantics@4.2.0:
resolution: {integrity: sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ==}
http-deceiver@1.2.7:
resolution: {integrity: sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw==}
http-errors@1.8.1:
resolution: {integrity: sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g==}
engines: {node: '>= 0.6'}
http-errors@2.0.1:
resolution: {integrity: sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==}
engines: {node: '>= 0.8'}
http-parser-js@0.5.10:
resolution: {integrity: sha512-Pysuw9XpUq5dVc/2SMHpuTY01RFl8fttgcyunjL7eEMhGM3cI4eOmiCycJDVCo/7O7ClfQD3SaI6ftDzqOXYMA==}
http-proxy-agent@7.0.2:
resolution: {integrity: sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==}
engines: {node: '>= 14'}
http-proxy-middleware@2.0.9:
resolution: {integrity: sha512-c1IyJYLYppU574+YI7R4QyX2ystMtVXZwIdzazUIPIJsHuWNd+mho2j+bKoHftndicGj9yh+xjd+l0yj7VeT1Q==}
engines: {node: '>=12.0.0'}
peerDependencies:
'@types/express': ^4.17.13
peerDependenciesMeta:
'@types/express':
optional: true
http-proxy-middleware@3.0.5:
resolution: {integrity: sha512-GLZZm1X38BPY4lkXA01jhwxvDoOkkXqjgVyUzVxiEK4iuRu03PZoYHhHRwxnfhQMDuaxi3vVri0YgSro/1oWqg==}
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
http-proxy@1.18.1:
resolution: {integrity: sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==}
engines: {node: '>=8.0.0'}
https-proxy-agent@7.0.6:
resolution: {integrity: sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==}
engines: {node: '>= 14'}
hyperdyperid@1.2.0:
resolution: {integrity: sha512-Y93lCzHYgGWdrJ66yIktxiaGULYc6oGiABxhcO5AufBeOyoIdZF7bIfLaOrbM0iGIOXQQgxxRrFEnb+Y6w1n4A==}
engines: {node: '>=10.18'}
iconv-lite@0.4.24:
resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==}
engines: {node: '>=0.10.0'}
iconv-lite@0.6.3:
resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==}
engines: {node: '>=0.10.0'}
iconv-lite@0.7.2:
resolution: {integrity: sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==}
engines: {node: '>=0.10.0'}
icss-utils@5.1.0:
resolution: {integrity: sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA==}
engines: {node: ^10 || ^12 || >= 14}
peerDependencies:
postcss: ^8.1.0
ignore-walk@8.0.0:
resolution: {integrity: sha512-FCeMZT4NiRQGh+YkeKMtWrOmBgWjHjMJ26WQWrRQyoyzqevdaGSakUaJW5xQYmjLlUVk2qUnCjYVBax9EKKg8A==}
engines: {node: ^20.17.0 || >=22.9.0}
image-size@0.5.5:
resolution: {integrity: sha512-6TDAlDPZxUFCv+fuOkIoXT/V/f3Qbq8e37p+YOiYrUv3v9cc3/6x78VdfPgFVaB9dZYeLUfKgHRebpkm/oP2VQ==}
engines: {node: '>=0.10.0'}
hasBin: true
immutable@5.1.4:
resolution: {integrity: sha512-p6u1bG3YSnINT5RQmx/yRZBpenIl30kVxkTLDyHLIMk0gict704Q9n+thfDI7lTRm9vXdDYutVzXhzcThxTnXA==}
import-fresh@3.3.1:
resolution: {integrity: sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==}
engines: {node: '>=6'}
imurmurhash@0.1.4:
resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==}
engines: {node: '>=0.8.19'}
inherits@2.0.4:
resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==}
ini@6.0.0:
resolution: {integrity: sha512-IBTdIkzZNOpqm7q3dRqJvMaldXjDHWkEDfrwGEQTs5eaQMWV+djAhR+wahyNNMAa+qpbDUhBMVt4ZKNwpPm7xQ==}
engines: {node: ^20.17.0 || >=22.9.0}
ip-address@10.1.0:
resolution: {integrity: sha512-XXADHxXmvT9+CRxhXg56LJovE+bmWnEWB78LB83VZTprKTmaC5QfruXocxzTZ2Kl0DNwKuBdlIhjL8LeY8Sf8Q==}
engines: {node: '>= 12'}
ipaddr.js@1.9.1:
resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==}
engines: {node: '>= 0.10'}
ipaddr.js@2.3.0:
resolution: {integrity: sha512-Zv/pA+ciVFbCSBBjGfaKUya/CcGmUHzTydLMaTwrUUEM2DIEO3iZvueGxmacvmN50fGpGVKeTXpb2LcYQxeVdg==}
engines: {node: '>= 10'}
is-arrayish@0.2.1:
resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==}
is-binary-path@2.1.0:
resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==}
engines: {node: '>=8'}
is-core-module@2.16.1:
resolution: {integrity: sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==}
engines: {node: '>= 0.4'}
is-docker@3.0.0:
resolution: {integrity: sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==}
engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
hasBin: true
is-extglob@2.1.1:
resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==}
engines: {node: '>=0.10.0'}
is-fullwidth-code-point@3.0.0:
resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==}
engines: {node: '>=8'}
is-fullwidth-code-point@5.1.0:
resolution: {integrity: sha512-5XHYaSyiqADb4RnZ1Bdad6cPp8Toise4TzEjcOYDHZkTCbKgiUl7WTUCpNWHuxmDt91wnsZBc9xinNzopv3JMQ==}
engines: {node: '>=18'}
is-glob@4.0.3:
resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==}
engines: {node: '>=0.10.0'}
is-in-ssh@1.0.0:
resolution: {integrity: sha512-jYa6Q9rH90kR1vKB6NM7qqd1mge3Fx4Dhw5TVlK1MUBqhEOuCagrEHMevNuCcbECmXZ0ThXkRm+Ymr51HwEPAw==}
engines: {node: '>=20'}
is-inside-container@1.0.0:
resolution: {integrity: sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==}
engines: {node: '>=14.16'}
hasBin: true
is-interactive@2.0.0:
resolution: {integrity: sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==}
engines: {node: '>=12'}
is-network-error@1.3.0:
resolution: {integrity: sha512-6oIwpsgRfnDiyEDLMay/GqCl3HoAtH5+RUKW29gYkL0QA+ipzpDLA16yQs7/RHCSu+BwgbJaOUqa4A99qNVQVw==}
engines: {node: '>=16'}
is-number@7.0.0:
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
engines: {node: '>=0.12.0'}
is-plain-obj@3.0.0:
resolution: {integrity: sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==}
engines: {node: '>=10'}
is-plain-object@2.0.4:
resolution: {integrity: sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==}
engines: {node: '>=0.10.0'}
is-plain-object@5.0.0:
resolution: {integrity: sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==}
engines: {node: '>=0.10.0'}
is-promise@4.0.0:
resolution: {integrity: sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==}
is-unicode-supported@2.1.0:
resolution: {integrity: sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ==}
engines: {node: '>=18'}
is-what@3.14.1:
resolution: {integrity: sha512-sNxgpk9793nzSs7bA6JQJGeIuRBQhAaNGG77kzYQgMkrID+lS6SlK07K5LaptscDlSaIgH+GPFzf+d75FVxozA==}
is-wsl@3.1.0:
resolution: {integrity: sha512-UcVfVfaK4Sc4m7X3dUSoHoozQGBEFeDC+zVo06t98xe8CzHSZZBekNXH+tu0NalHolcJ/QAGqS46Hef7QXBIMw==}
engines: {node: '>=16'}
isarray@1.0.0:
resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==}
isexe@2.0.0:
resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==}
isexe@4.0.0:
resolution: {integrity: sha512-FFUtZMpoZ8RqHS3XeXEmHWLA4thH+ZxCv2lOiPIn1Xc7CxrqhWzNSDzD+/chS/zbYezmiwWLdQC09JdQKmthOw==}
engines: {node: '>=20'}
isobject@3.0.1:
resolution: {integrity: sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==}
engines: {node: '>=0.10.0'}
istanbul-lib-coverage@3.2.2:
resolution: {integrity: sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==}
engines: {node: '>=8'}
istanbul-lib-instrument@6.0.3:
resolution: {integrity: sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==}
engines: {node: '>=10'}
jest-worker@27.5.1:
resolution: {integrity: sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==}
engines: {node: '>= 10.13.0'}
jiti@2.6.1:
resolution: {integrity: sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==}
hasBin: true
jose@6.1.3:
resolution: {integrity: sha512-0TpaTfihd4QMNwrz/ob2Bp7X04yuxJkjRGi4aKmOqwhov54i6u79oCv7T+C7lo70MKH6BesI3vscD1yb/yzKXQ==}
js-tokens@4.0.0:
resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==}
js-yaml@4.1.1:
resolution: {integrity: sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==}
hasBin: true
jsesc@3.1.0:
resolution: {integrity: sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==}
engines: {node: '>=6'}
hasBin: true
json-parse-even-better-errors@2.3.1:
resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==}
json-parse-even-better-errors@5.0.0:
resolution: {integrity: sha512-ZF1nxZ28VhQouRWhUcVlUIN3qwSgPuswK05s/HIaoetAoE/9tngVmCHjSxmSQPav1nd+lPtTL0YZ/2AFdR/iYQ==}
engines: {node: ^20.17.0 || >=22.9.0}
json-schema-traverse@1.0.0:
resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==}
json-schema-typed@8.0.2:
resolution: {integrity: sha512-fQhoXdcvc3V28x7C7BMs4P5+kNlgUURe2jmUT1T//oBRMDrqy1QPelJimwZGo7Hg9VPV3EQV5Bnq4hbFy2vetA==}
json5@2.2.3:
resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==}
engines: {node: '>=6'}
hasBin: true
jsonc-parser@3.3.1:
resolution: {integrity: sha512-HUgH65KyejrUFPvHFPbqOY0rsFip3Bo5wb4ngvdi1EpCYWUQDC5V+Y7mZws+DLkr4M//zQJoanu1SP+87Dv1oQ==}
jsonparse@1.3.1:
resolution: {integrity: sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==}
engines: {'0': node >= 0.2.0}
karma-source-map-support@1.4.0:
resolution: {integrity: sha512-RsBECncGO17KAoJCYXjv+ckIz+Ii9NCi+9enk+rq6XC81ezYkb4/RHE6CTXdA7IOJqoF3wcaLfVG0CPmE5ca6A==}
kind-of@6.0.3:
resolution: {integrity: sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==}
engines: {node: '>=0.10.0'}
launch-editor@2.12.0:
resolution: {integrity: sha512-giOHXoOtifjdHqUamwKq6c49GzBdLjvxrd2D+Q4V6uOHopJv7p9VJxikDsQ/CBXZbEITgUqSVHXLTG3VhPP1Dg==}
less-loader@12.3.0:
resolution: {integrity: sha512-0M6+uYulvYIWs52y0LqN4+QM9TqWAohYSNTo4htE8Z7Cn3G/qQMEmktfHmyJT23k+20kU9zHH2wrfFXkxNLtVw==}
engines: {node: '>= 18.12.0'}
peerDependencies:
'@rspack/core': 0.x || 1.x
less: ^3.5.0 || ^4.0.0
webpack: ^5.0.0
peerDependenciesMeta:
'@rspack/core':
optional: true
webpack:
optional: true
less@4.4.2:
resolution: {integrity: sha512-j1n1IuTX1VQjIy3tT7cyGbX7nvQOsFLoIqobZv4ttI5axP923gA44zUj6miiA6R5Aoms4sEGVIIcucXUbRI14g==}
engines: {node: '>=14'}
hasBin: true
license-webpack-plugin@4.0.2:
resolution: {integrity: sha512-771TFWFD70G1wLTC4oU2Cw4qvtmNrIw+wRvBtn+okgHl7slJVi7zfNcdmqDL72BojM30VNJ2UHylr1o77U37Jw==}
peerDependencies:
webpack: '*'
peerDependenciesMeta:
webpack:
optional: true
lines-and-columns@1.2.4:
resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==}
listr2@9.0.5:
resolution: {integrity: sha512-ME4Fb83LgEgwNw96RKNvKV4VTLuXfoKudAmm2lP8Kk87KaMK0/Xrx/aAkMWmT8mDb+3MlFDspfbCs7adjRxA2g==}
engines: {node: '>=20.0.0'}
lmdb@3.4.4:
resolution: {integrity: sha512-+Y2DqovevLkb6DrSQ6SXTYLEd6kvlRbhsxzgJrk7BUfOVA/mt21ak6pFDZDKxiAczHMWxrb02kXBTSTIA0O94A==}
hasBin: true
loader-runner@4.3.1:
resolution: {integrity: sha512-IWqP2SCPhyVFTBtRcgMHdzlf9ul25NwaFx4wCEH/KjAXuuHY4yNjvPXsBokp8jCB936PyWRaPKUNh8NvylLp2Q==}
engines: {node: '>=6.11.5'}
loader-utils@2.0.4:
resolution: {integrity: sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==}
engines: {node: '>=8.9.0'}
loader-utils@3.3.1:
resolution: {integrity: sha512-FMJTLMXfCLMLfJxcX9PFqX5qD88Z5MRGaZCVzfuqeZSPsyiBzs+pahDQjbIWz2QIzPZz0NX9Zy4FX3lmK6YHIg==}
engines: {node: '>= 12.13.0'}
locate-path@6.0.0:
resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==}
engines: {node: '>=10'}
lodash.debounce@4.0.8:
resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==}
log-symbols@7.0.1:
resolution: {integrity: sha512-ja1E3yCr9i/0hmBVaM0bfwDjnGy8I/s6PP4DFp+yP+a+mrHO4Rm7DtmnqROTUkHIkqffC84YY7AeqX6oFk0WFg==}
engines: {node: '>=18'}
log-update@6.1.0:
resolution: {integrity: sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==}
engines: {node: '>=18'}
lru-cache@11.2.6:
resolution: {integrity: sha512-ESL2CrkS/2wTPfuend7Zhkzo2u0daGJ/A2VucJOgQ/C48S/zB8MMeMHSGKYpXhIjbPxfuezITkaBH1wqv00DDQ==}
engines: {node: 20 || >=22}
lru-cache@5.1.1:
resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==}
magic-string@0.30.21:
resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==}
make-dir@2.1.0:
resolution: {integrity: sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==}
engines: {node: '>=6'}
make-error@1.3.6:
resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==}
make-fetch-happen@15.0.3:
resolution: {integrity: sha512-iyyEpDty1mwW3dGlYXAJqC/azFn5PPvgKVwXayOGBSmKLxhKZ9fg4qIan2ePpp1vJIwfFiO34LAPZgq9SZW9Aw==}
engines: {node: ^20.17.0 || >=22.9.0}
math-intrinsics@1.1.0:
resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==}
engines: {node: '>= 0.4'}
media-typer@0.3.0:
resolution: {integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==}
engines: {node: '>= 0.6'}
media-typer@1.1.0:
resolution: {integrity: sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==}
engines: {node: '>= 0.8'}
memfs@4.56.10:
resolution: {integrity: sha512-eLvzyrwqLHnLYalJP7YZ3wBe79MXktMdfQbvMrVD80K+NhrIukCVBvgP30zTJYEEDh9hZ/ep9z0KOdD7FSHo7w==}
peerDependencies:
tslib: '2'
merge-descriptors@1.0.3:
resolution: {integrity: sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==}
merge-descriptors@2.0.0:
resolution: {integrity: sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==}
engines: {node: '>=18'}
merge-stream@2.0.0:
resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==}
methods@1.1.2:
resolution: {integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==}
engines: {node: '>= 0.6'}
micromatch@4.0.8:
resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==}
engines: {node: '>=8.6'}
mime-db@1.52.0:
resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==}
engines: {node: '>= 0.6'}
mime-db@1.54.0:
resolution: {integrity: sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==}
engines: {node: '>= 0.6'}
mime-types@2.1.35:
resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==}
engines: {node: '>= 0.6'}
mime-types@3.0.2:
resolution: {integrity: sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==}
engines: {node: '>=18'}
mime@1.6.0:
resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==}
engines: {node: '>=4'}
hasBin: true
mimic-function@5.0.1:
resolution: {integrity: sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==}
engines: {node: '>=18'}
mini-css-extract-plugin@2.9.4:
resolution: {integrity: sha512-ZWYT7ln73Hptxqxk2DxPU9MmapXRhxkJD6tkSR04dnQxm8BGu2hzgKLugK5yySD97u/8yy7Ma7E76k9ZdvtjkQ==}
engines: {node: '>= 12.13.0'}
peerDependencies:
webpack: ^5.0.0
minimalistic-assert@1.0.1:
resolution: {integrity: sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==}
minimatch@10.1.2:
resolution: {integrity: sha512-fu656aJ0n2kcXwsnwnv9g24tkU5uSmOlTjd6WyyaKm2Z+h1qmY6bAjrcaIxF/BslFqbZ8UBtbJi7KgQOZD2PTw==}
engines: {node: 20 || >=22}
minipass-collect@2.0.1:
resolution: {integrity: sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw==}
engines: {node: '>=16 || 14 >=14.17'}
minipass-fetch@5.0.1:
resolution: {integrity: sha512-yHK8pb0iCGat0lDrs/D6RZmCdaBT64tULXjdxjSMAqoDi18Q3qKEUTHypHQZQd9+FYpIS+lkvpq6C/R6SbUeRw==}
engines: {node: ^20.17.0 || >=22.9.0}
minipass-flush@1.0.5:
resolution: {integrity: sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==}
engines: {node: '>= 8'}
minipass-pipeline@1.2.4:
resolution: {integrity: sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==}
engines: {node: '>=8'}
minipass-sized@2.0.0:
resolution: {integrity: sha512-zSsHhto5BcUVM2m1LurnXY6M//cGhVaegT71OfOXoprxT6o780GZd792ea6FfrQkuU4usHZIUczAQMRUE2plzA==}
engines: {node: '>=8'}
minipass@3.3.6:
resolution: {integrity: sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==}
engines: {node: '>=8'}
minipass@7.1.2:
resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==}
engines: {node: '>=16 || 14 >=14.17'}
minizlib@3.1.0:
resolution: {integrity: sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==}
engines: {node: '>= 18'}
mrmime@2.0.1:
resolution: {integrity: sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==}
engines: {node: '>=10'}
ms@2.0.0:
resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==}
ms@2.1.3:
resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
msgpackr-extract@3.0.3:
resolution: {integrity: sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA==}
hasBin: true
msgpackr@1.11.8:
resolution: {integrity: sha512-bC4UGzHhVvgDNS7kn9tV8fAucIYUBuGojcaLiz7v+P63Lmtm0Xeji8B/8tYKddALXxJLpwIeBmUN3u64C4YkRA==}
multicast-dns@7.2.5:
resolution: {integrity: sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg==}
hasBin: true
mute-stream@2.0.0:
resolution: {integrity: sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==}
engines: {node: ^18.17.0 || >=20.5.0}
nanoid@3.3.11:
resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==}
engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1}
hasBin: true
needle@3.3.1:
resolution: {integrity: sha512-6k0YULvhpw+RoLNiQCRKOl09Rv1dPLr8hHnVjHqdolKwDrdNyk+Hmrthi4lIGPPz3r39dLx0hsF5s40sZ3Us4Q==}
engines: {node: '>= 4.4.x'}
hasBin: true
negotiator@0.6.3:
resolution: {integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==}
engines: {node: '>= 0.6'}
negotiator@0.6.4:
resolution: {integrity: sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==}
engines: {node: '>= 0.6'}
negotiator@1.0.0:
resolution: {integrity: sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==}
engines: {node: '>= 0.6'}
neo-async@2.6.2:
resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==}
node-addon-api@6.1.0:
resolution: {integrity: sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA==}
node-addon-api@7.1.1:
resolution: {integrity: sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==}
node-forge@1.3.3:
resolution: {integrity: sha512-rLvcdSyRCyouf6jcOIPe/BgwG/d7hKjzMKOas33/pHEr6gbq18IK9zV7DiPvzsz0oBJPme6qr6H6kGZuI9/DZg==}
engines: {node: '>= 6.13.0'}
node-gyp-build-optional-packages@5.2.2:
resolution: {integrity: sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==}
hasBin: true
node-gyp@12.2.0:
resolution: {integrity: sha512-q23WdzrQv48KozXlr0U1v9dwO/k59NHeSzn6loGcasyf0UnSrtzs8kRxM+mfwJSf0DkX0s43hcqgnSO4/VNthQ==}
engines: {node: ^20.17.0 || >=22.9.0}
hasBin: true
node-releases@2.0.27:
resolution: {integrity: sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==}
nopt@9.0.0:
resolution: {integrity: sha512-Zhq3a+yFKrYwSBluL4H9XP3m3y5uvQkB/09CwDruCiRmR/UJYnn9W4R48ry0uGC70aeTPKLynBtscP9efFFcPw==}
engines: {node: ^20.17.0 || >=22.9.0}
hasBin: true
normalize-path@3.0.0:
resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==}
engines: {node: '>=0.10.0'}
npm-bundled@5.0.0:
resolution: {integrity: sha512-JLSpbzh6UUXIEoqPsYBvVNVmyrjVZ1fzEFbqxKkTJQkWBO3xFzFT+KDnSKQWwOQNbuWRwt5LSD6HOTLGIWzfrw==}
engines: {node: ^20.17.0 || >=22.9.0}
npm-install-checks@8.0.0:
resolution: {integrity: sha512-ScAUdMpyzkbpxoNekQ3tNRdFI8SJ86wgKZSQZdUxT+bj0wVFpsEMWnkXP0twVe1gJyNF5apBWDJhhIbgrIViRA==}
engines: {node: ^20.17.0 || >=22.9.0}
npm-normalize-package-bin@5.0.0:
resolution: {integrity: sha512-CJi3OS4JLsNMmr2u07OJlhcrPxCeOeP/4xq67aWNai6TNWWbTrlNDgl8NcFKVlcBKp18GPj+EzbNIgrBfZhsag==}
engines: {node: ^20.17.0 || >=22.9.0}
npm-package-arg@13.0.2:
resolution: {integrity: sha512-IciCE3SY3uE84Ld8WZU23gAPPV9rIYod4F+rc+vJ7h7cwAJt9Vk6TVsK60ry7Uj3SRS3bqRRIGuTp9YVlk6WNA==}
engines: {node: ^20.17.0 || >=22.9.0}
npm-packlist@10.0.3:
resolution: {integrity: sha512-zPukTwJMOu5X5uvm0fztwS5Zxyvmk38H/LfidkOMt3gbZVCyro2cD/ETzwzVPcWZA3JOyPznfUN/nkyFiyUbxg==}
engines: {node: ^20.17.0 || >=22.9.0}
npm-pick-manifest@11.0.3:
resolution: {integrity: sha512-buzyCfeoGY/PxKqmBqn1IUJrZnUi1VVJTdSSRPGI60tJdUhUoSQFhs0zycJokDdOznQentgrpf8LayEHyyYlqQ==}
engines: {node: ^20.17.0 || >=22.9.0}
npm-registry-fetch@19.1.1:
resolution: {integrity: sha512-TakBap6OM1w0H73VZVDf44iFXsOS3h+L4wVMXmbWOQroZgFhMch0juN6XSzBNlD965yIKvWg2dfu7NSiaYLxtw==}
engines: {node: ^20.17.0 || >=22.9.0}
nth-check@2.1.1:
resolution: {integrity: sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==}
object-assign@4.1.1:
resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==}
engines: {node: '>=0.10.0'}
object-inspect@1.13.4:
resolution: {integrity: sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==}
engines: {node: '>= 0.4'}
obuf@1.1.2:
resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==}
on-finished@2.4.1:
resolution: {integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==}
engines: {node: '>= 0.8'}
on-headers@1.1.0:
resolution: {integrity: sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A==}
engines: {node: '>= 0.8'}
once@1.4.0:
resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==}
onetime@7.0.0:
resolution: {integrity: sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==}
engines: {node: '>=18'}
open@10.2.0:
resolution: {integrity: sha512-YgBpdJHPyQ2UE5x+hlSXcnejzAvD0b22U2OuAP+8OnlJT+PjWPxtgmGqKKc+RgTM63U9gN0YzrYc71R2WT/hTA==}
engines: {node: '>=18'}
open@11.0.0:
resolution: {integrity: sha512-smsWv2LzFjP03xmvFoJ331ss6h+jixfA4UUV/Bsiyuu4YJPfN+FIQGOIiv4w9/+MoHkfkJ22UIaQWRVFRfH6Vw==}
engines: {node: '>=20'}
ora@9.0.0:
resolution: {integrity: sha512-m0pg2zscbYgWbqRR6ABga5c3sZdEon7bSgjnlXC64kxtxLOyjRcbbUkLj7HFyy/FTD+P2xdBWu8snGhYI0jc4A==}
engines: {node: '>=20'}
ordered-binary@1.6.1:
resolution: {integrity: sha512-QkCdPooczexPLiXIrbVOPYkR3VO3T6v2OyKRkR1Xbhpy7/LAVXwahnRCgRp78Oe/Ehf0C/HATAxfSr6eA1oX+w==}
p-limit@3.1.0:
resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==}
engines: {node: '>=10'}
p-locate@5.0.0:
resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==}
engines: {node: '>=10'}
p-map@7.0.4:
resolution: {integrity: sha512-tkAQEw8ysMzmkhgw8k+1U/iPhWNhykKnSk4Rd5zLoPJCuJaGRPo6YposrZgaxHKzDHdDWWZvE/Sk7hsL2X/CpQ==}
engines: {node: '>=18'}
p-retry@6.2.1:
resolution: {integrity: sha512-hEt02O4hUct5wtwg4H4KcWgDdm+l1bOaEy/hWzd8xtXB9BqxTWBBhb+2ImAtH4Cv4rPjV76xN3Zumqk3k3AhhQ==}
engines: {node: '>=16.17'}
pacote@21.0.4:
resolution: {integrity: sha512-RplP/pDW0NNNDh3pnaoIWYPvNenS7UqMbXyvMqJczosiFWTeGGwJC2NQBLqKf4rGLFfwCOnntw1aEp9Jiqm1MA==}
engines: {node: ^20.17.0 || >=22.9.0}
hasBin: true
parent-module@1.0.1:
resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==}
engines: {node: '>=6'}
parse-json@5.2.0:
resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==}
engines: {node: '>=8'}
parse-node-version@1.0.1:
resolution: {integrity: sha512-3YHlOa/JgH6Mnpr05jP9eDG254US9ek25LyIxZlDItp2iJtwyaXQb57lBYLdT3MowkUFYEV2XXNAYIPlESvJlA==}
engines: {node: '>= 0.10'}
parse5-html-rewriting-stream@8.0.0:
resolution: {integrity: sha512-wzh11mj8KKkno1pZEu+l2EVeWsuKDfR5KNWZOTsslfUX8lPDZx77m9T0kIoAVkFtD1nx6YF8oh4BnPHvxMtNMw==}
parse5-sax-parser@8.0.0:
resolution: {integrity: sha512-/dQ8UzHZwnrzs3EvDj6IkKrD/jIZyTlB+8XrHJvcjNgRdmWruNdN9i9RK/JtxakmlUdPwKubKPTCqvbTgzGhrw==}
parse5@8.0.0:
resolution: {integrity: sha512-9m4m5GSgXjL4AjumKzq1Fgfp3Z8rsvjRNbnkVwfu2ImRqE5D0LnY2QfDen18FSY9C573YU5XxSapdHZTZ2WolA==}
parseurl@1.3.3:
resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==}
engines: {node: '>= 0.8'}
path-exists@4.0.0:
resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==}
engines: {node: '>=8'}
path-key@3.1.1:
resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==}
engines: {node: '>=8'}
path-parse@1.0.7:
resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==}
path-scurry@2.0.1:
resolution: {integrity: sha512-oWyT4gICAu+kaA7QWk/jvCHWarMKNs6pXOGWKDTr7cw4IGcUbW+PeTfbaQiLGheFRpjo6O9J0PmyMfQPjH71oA==}
engines: {node: 20 || >=22}
path-to-regexp@0.1.12:
resolution: {integrity: sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==}
path-to-regexp@8.3.0:
resolution: {integrity: sha512-7jdwVIRtsP8MYpdXSwOS0YdD0Du+qOoF/AEPIt88PcCFrZCzx41oxku1jD88hZBwbNUIEfpqvuhjFaMAqMTWnA==}
picocolors@1.1.1:
resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==}
picomatch@2.3.1:
resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==}
engines: {node: '>=8.6'}
picomatch@4.0.3:
resolution: {integrity: sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==}
engines: {node: '>=12'}
pify@4.0.1:
resolution: {integrity: sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==}
engines: {node: '>=6'}
piscina@5.1.4:
resolution: {integrity: sha512-7uU4ZnKeQq22t9AsmHGD2w4OYQGonwFnTypDypaWi7Qr2EvQIFVtG8J5D/3bE7W123Wdc9+v4CZDu5hJXVCtBg==}
engines: {node: '>=20.x'}
pkce-challenge@5.0.1:
resolution: {integrity: sha512-wQ0b/W4Fr01qtpHlqSqspcj3EhBvimsdh0KlHhH8HRZnMsEa0ea2fTULOXOS9ccQr3om+GcGRk4e+isrZWV8qQ==}
engines: {node: '>=16.20.0'}
postcss-loader@8.2.0:
resolution: {integrity: sha512-tHX+RkpsXVcc7st4dSdDGliI+r4aAQDuv+v3vFYHixb6YgjreG5AG4SEB0kDK8u2s6htqEEpKlkhSBUTvWKYnA==}
engines: {node: '>= 18.12.0'}
peerDependencies:
'@rspack/core': 0.x || 1.x
postcss: ^7.0.0 || ^8.0.1
webpack: ^5.0.0
peerDependenciesMeta:
'@rspack/core':
optional: true
webpack:
optional: true
postcss-media-query-parser@0.2.3:
resolution: {integrity: sha512-3sOlxmbKcSHMjlUXQZKQ06jOswE7oVkXPxmZdoB1r5l0q6gTFTQSHxNxOrCccElbW7dxNytifNEo8qidX2Vsig==}
postcss-modules-extract-imports@3.1.0:
resolution: {integrity: sha512-k3kNe0aNFQDAZGbin48pL2VNidTF0w4/eASDsxlyspobzU3wZQLOGj7L9gfRe0Jo9/4uud09DsjFNH7winGv8Q==}
engines: {node: ^10 || ^12 || >= 14}
peerDependencies:
postcss: ^8.1.0
postcss-modules-local-by-default@4.2.0:
resolution: {integrity: sha512-5kcJm/zk+GJDSfw+V/42fJ5fhjL5YbFDl8nVdXkJPLLW+Vf9mTD5Xe0wqIaDnLuL2U6cDNpTr+UQ+v2HWIBhzw==}
engines: {node: ^10 || ^12 || >= 14}
peerDependencies:
postcss: ^8.1.0
postcss-modules-scope@3.2.1:
resolution: {integrity: sha512-m9jZstCVaqGjTAuny8MdgE88scJnCiQSlSrOWcTQgM2t32UBe+MUmFSO5t7VMSfAf/FJKImAxBav8ooCHJXCJA==}
engines: {node: ^10 || ^12 || >= 14}
peerDependencies:
postcss: ^8.1.0
postcss-modules-values@4.0.0:
resolution: {integrity: sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ==}
engines: {node: ^10 || ^12 || >= 14}
peerDependencies:
postcss: ^8.1.0
postcss-selector-parser@7.1.1:
resolution: {integrity: sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==}
engines: {node: '>=4'}
postcss-value-parser@4.2.0:
resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==}
postcss@8.5.6:
resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==}
engines: {node: ^10 || ^12 || >=14}
powershell-utils@0.1.0:
resolution: {integrity: sha512-dM0jVuXJPsDN6DvRpea484tCUaMiXWjuCn++HGTqUWzGDjv5tZkEZldAJ/UMlqRYGFrD/etByo4/xOuC/snX2A==}
engines: {node: '>=20'}
proc-log@6.1.0:
resolution: {integrity: sha512-iG+GYldRf2BQ0UDUAd6JQ/RwzaQy6mXmsk/IzlYyal4A4SNFw54MeH4/tLkF4I5WoWG9SQwuqWzS99jaFQHBuQ==}
engines: {node: ^20.17.0 || >=22.9.0}
process-nextick-args@2.0.1:
resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==}
promise-retry@2.0.1:
resolution: {integrity: sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==}
engines: {node: '>=10'}
proxy-addr@2.0.7:
resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==}
engines: {node: '>= 0.10'}
prr@1.0.1:
resolution: {integrity: sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw==}
qs@6.14.1:
resolution: {integrity: sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==}
engines: {node: '>=0.6'}
randombytes@2.1.0:
resolution: {integrity: sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==}
range-parser@1.2.1:
resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==}
engines: {node: '>= 0.6'}
raw-body@2.5.3:
resolution: {integrity: sha512-s4VSOf6yN0rvbRZGxs8Om5CWj6seneMwK3oDb4lWDH0UPhWcxwOWw5+qk24bxq87szX1ydrwylIOp2uG1ojUpA==}
engines: {node: '>= 0.8'}
raw-body@3.0.2:
resolution: {integrity: sha512-K5zQjDllxWkf7Z5xJdV0/B0WTNqx6vxG70zJE4N0kBs4LovmEYWJzQGxC9bS9RAKu3bgM40lrd5zoLJ12MQ5BA==}
engines: {node: '>= 0.10'}
readable-stream@2.3.8:
resolution: {integrity: sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==}
readable-stream@3.6.2:
resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==}
engines: {node: '>= 6'}
readdirp@3.6.0:
resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==}
engines: {node: '>=8.10.0'}
readdirp@4.1.2:
resolution: {integrity: sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==}
engines: {node: '>= 14.18.0'}
regenerate-unicode-properties@10.2.2:
resolution: {integrity: sha512-m03P+zhBeQd1RGnYxrGyDAPpWX/epKirLrp8e3qevZdVkKtnCrjjWczIbYc8+xd6vcTStVlqfycTx1KR4LOr0g==}
engines: {node: '>=4'}
regenerate@1.4.2:
resolution: {integrity: sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==}
regex-parser@2.3.1:
resolution: {integrity: sha512-yXLRqatcCuKtVHsWrNg0JL3l1zGfdXeEvDa0bdu4tCDQw0RpMDZsqbkyRTUnKMR0tXF627V2oEWjBEaEdqTwtQ==}
regexpu-core@6.4.0:
resolution: {integrity: sha512-0ghuzq67LI9bLXpOX/ISfve/Mq33a4aFRzoQYhnnok1JOFpmE/A2TBGkNVenOGEeSBCjIiWcc6MVOG5HEQv0sA==}
engines: {node: '>=4'}
regjsgen@0.8.0:
resolution: {integrity: sha512-RvwtGe3d7LvWiDQXeQw8p5asZUmfU1G/l6WbUXeHta7Y2PEIvBTwH6E2EfmYUK8pxcxEdEmaomqyp0vZZ7C+3Q==}
regjsparser@0.13.0:
resolution: {integrity: sha512-NZQZdC5wOE/H3UT28fVGL+ikOZcEzfMGk/c3iN9UGxzWHMa1op7274oyiUVrAG4B2EuFhus8SvkaYnhvW92p9Q==}
hasBin: true
require-from-string@2.0.2:
resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==}
engines: {node: '>=0.10.0'}
requires-port@1.0.0:
resolution: {integrity: sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==}
resolve-from@4.0.0:
resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==}
engines: {node: '>=4'}
resolve-url-loader@5.0.0:
resolution: {integrity: sha512-uZtduh8/8srhBoMx//5bwqjQ+rfYOUq8zC9NrMUGtjBiGTtFJM42s58/36+hTqeqINcnYe08Nj3LkK9lW4N8Xg==}
engines: {node: '>=12'}
resolve@1.22.11:
resolution: {integrity: sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==}
engines: {node: '>= 0.4'}
hasBin: true
restore-cursor@5.1.0:
resolution: {integrity: sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==}
engines: {node: '>=18'}
retry@0.12.0:
resolution: {integrity: sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==}
engines: {node: '>= 4'}
retry@0.13.1:
resolution: {integrity: sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==}
engines: {node: '>= 4'}
rfdc@1.4.1:
resolution: {integrity: sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==}
rolldown@1.0.0-beta.58:
resolution: {integrity: sha512-v1FCjMZCan7f+xGAHBi+mqiE4MlH7I+SXEHSQSJoMOGNNB2UYtvMiejsq9YuUOiZjNeUeV/a21nSFbrUR+4ZCQ==}
engines: {node: ^20.19.0 || >=22.12.0}
hasBin: true
rollup@4.57.1:
resolution: {integrity: sha512-oQL6lgK3e2QZeQ7gcgIkS2YZPg5slw37hYufJ3edKlfQSGGm8ICoxswK15ntSzF/a8+h7ekRy7k7oWc3BQ7y8A==}
engines: {node: '>=18.0.0', npm: '>=8.0.0'}
hasBin: true
router@2.2.0:
resolution: {integrity: sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==}
engines: {node: '>= 18'}
run-applescript@7.1.0:
resolution: {integrity: sha512-DPe5pVFaAsinSaV6QjQ6gdiedWDcRCbUuiQfQa2wmWV7+xC9bGulGI8+TdRmoFkAPaBXk8CrAbnlY2ISniJ47Q==}
engines: {node: '>=18'}
rxjs@7.8.2:
resolution: {integrity: sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==}
safe-buffer@5.1.2:
resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==}
safe-buffer@5.2.1:
resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==}
safer-buffer@2.1.2:
resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==}
sass-loader@16.0.6:
resolution: {integrity: sha512-sglGzId5gmlfxNs4gK2U3h7HlVRfx278YK6Ono5lwzuvi1jxig80YiuHkaDBVsYIKFhx8wN7XSCI0M2IDS/3qA==}
engines: {node: '>= 18.12.0'}
peerDependencies:
'@rspack/core': 0.x || 1.x
node-sass: ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0 || ^9.0.0
sass: ^1.3.0
sass-embedded: '*'
webpack: ^5.0.0
peerDependenciesMeta:
'@rspack/core':
optional: true
node-sass:
optional: true
sass:
optional: true
sass-embedded:
optional: true
webpack:
optional: true
sass@1.97.1:
resolution: {integrity: sha512-uf6HoO8fy6ClsrShvMgaKUn14f2EHQLQRtpsZZLeU/Mv0Q1K5P0+x2uvH6Cub39TVVbWNSrraUhDAoFph6vh0A==}
engines: {node: '>=14.0.0'}
hasBin: true
sax@1.4.4:
resolution: {integrity: sha512-1n3r/tGXO6b6VXMdFT54SHzT9ytu9yr7TaELowdYpMqY/Ao7EnlQGmAQ1+RatX7Tkkdm6hONI2owqNx2aZj5Sw==}
engines: {node: '>=11.0.0'}
schema-utils@4.3.3:
resolution: {integrity: sha512-eflK8wEtyOE6+hsaRVPxvUKYCpRgzLqDTb8krvAsRIwOGlHoSgYLgBXoubGgLd2fT41/OUYdb48v4k4WWHQurA==}
engines: {node: '>= 10.13.0'}
select-hose@2.0.0:
resolution: {integrity: sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg==}
selfsigned@2.4.1:
resolution: {integrity: sha512-th5B4L2U+eGLq1TVh7zNRGBapioSORUeymIydxgFpwww9d2qyKvtuPU2jJuHvYAwwqi2Y596QBL3eEqcPEYL8Q==}
engines: {node: '>=10'}
semver@5.7.2:
resolution: {integrity: sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==}
hasBin: true
semver@6.3.1:
resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==}
hasBin: true
semver@7.7.3:
resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==}
engines: {node: '>=10'}
hasBin: true
send@0.19.2:
resolution: {integrity: sha512-VMbMxbDeehAxpOtWJXlcUS5E8iXh6QmN+BkRX1GARS3wRaXEEgzCcB10gTQazO42tpNIya8xIyNx8fll1OFPrg==}
engines: {node: '>= 0.8.0'}
send@1.2.1:
resolution: {integrity: sha512-1gnZf7DFcoIcajTjTwjwuDjzuz4PPcY2StKPlsGAQ1+YH20IRVrBaXSWmdjowTJ6u8Rc01PoYOGHXfP1mYcZNQ==}
engines: {node: '>= 18'}
serialize-javascript@6.0.2:
resolution: {integrity: sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==}
serve-index@1.9.2:
resolution: {integrity: sha512-KDj11HScOaLmrPxl70KYNW1PksP4Nb/CLL2yvC+Qd2kHMPEEpfc4Re2e4FOay+bC/+XQl/7zAcWON3JVo5v3KQ==}
engines: {node: '>= 0.8.0'}
serve-static@1.16.3:
resolution: {integrity: sha512-x0RTqQel6g5SY7Lg6ZreMmsOzncHFU7nhnRWkKgWuMTu5NN0DR5oruckMqRvacAN9d5w6ARnRBXl9xhDCgfMeA==}
engines: {node: '>= 0.8.0'}
serve-static@2.2.1:
resolution: {integrity: sha512-xRXBn0pPqQTVQiC8wyQrKs2MOlX24zQ0POGaj0kultvoOCstBQM5yvOhAVSUwOMjQtTvsPWoNCHfPGwaaQJhTw==}
engines: {node: '>= 18'}
setprototypeof@1.2.0:
resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==}
shallow-clone@3.0.1:
resolution: {integrity: sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==}
engines: {node: '>=8'}
shebang-command@2.0.0:
resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==}
engines: {node: '>=8'}
shebang-regex@3.0.0:
resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==}
engines: {node: '>=8'}
shell-quote@1.8.3:
resolution: {integrity: sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==}
engines: {node: '>= 0.4'}
side-channel-list@1.0.0:
resolution: {integrity: sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==}
engines: {node: '>= 0.4'}
side-channel-map@1.0.1:
resolution: {integrity: sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==}
engines: {node: '>= 0.4'}
side-channel-weakmap@1.0.2:
resolution: {integrity: sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==}
engines: {node: '>= 0.4'}
side-channel@1.1.0:
resolution: {integrity: sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==}
engines: {node: '>= 0.4'}
signal-exit@4.1.0:
resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==}
engines: {node: '>=14'}
sigstore@4.1.0:
resolution: {integrity: sha512-/fUgUhYghuLzVT/gaJoeVehLCgZiUxPCPMcyVNY0lIf/cTCz58K/WTI7PefDarXxp9nUKpEwg1yyz3eSBMTtgA==}
engines: {node: ^20.17.0 || >=22.9.0}
slice-ansi@7.1.2:
resolution: {integrity: sha512-iOBWFgUX7caIZiuutICxVgX1SdxwAVFFKwt1EvMYYec/NWO5meOJ6K5uQxhrYBdQJne4KxiqZc+KptFOWFSI9w==}
engines: {node: '>=18'}
smart-buffer@4.2.0:
resolution: {integrity: sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==}
engines: {node: '>= 6.0.0', npm: '>= 3.0.0'}
sockjs@0.3.24:
resolution: {integrity: sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ==}
socks-proxy-agent@8.0.5:
resolution: {integrity: sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw==}
engines: {node: '>= 14'}
socks@2.8.7:
resolution: {integrity: sha512-HLpt+uLy/pxB+bum/9DzAgiKS8CX1EvbWxI4zlmgGCExImLdiad2iCwXT5Z4c9c3Eq8rP2318mPW2c+QbtjK8A==}
engines: {node: '>= 10.0.0', npm: '>= 3.0.0'}
source-map-js@1.2.1:
resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==}
engines: {node: '>=0.10.0'}
source-map-loader@5.0.0:
resolution: {integrity: sha512-k2Dur7CbSLcAH73sBcIkV5xjPV4SzqO1NJ7+XaQl8if3VODDUj3FNchNGpqgJSKbvUfJuhVdv8K2Eu8/TNl2eA==}
engines: {node: '>= 18.12.0'}
peerDependencies:
webpack: ^5.72.1
source-map-support@0.5.21:
resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==}
source-map@0.6.1:
resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==}
engines: {node: '>=0.10.0'}
source-map@0.7.6:
resolution: {integrity: sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ==}
engines: {node: '>= 12'}
spdx-correct@3.2.0:
resolution: {integrity: sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==}
spdx-exceptions@2.5.0:
resolution: {integrity: sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==}
spdx-expression-parse@3.0.1:
resolution: {integrity: sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==}
spdx-license-ids@3.0.22:
resolution: {integrity: sha512-4PRT4nh1EImPbt2jASOKHX7PB7I+e4IWNLvkKFDxNhJlfjbYlleYQh285Z/3mPTHSAK/AvdMmw5BNNuYH8ShgQ==}
spdy-transport@3.0.0:
resolution: {integrity: sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw==}
spdy@4.0.2:
resolution: {integrity: sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA==}
engines: {node: '>=6.0.0'}
ssri@13.0.1:
resolution: {integrity: sha512-QUiRf1+u9wPTL/76GTYlKttDEBWV1ga9ZXW8BG6kfdeyyM8LGPix9gROyg9V2+P0xNyF3X2Go526xKFdMZrHSQ==}
engines: {node: ^20.17.0 || >=22.9.0}
statuses@1.5.0:
resolution: {integrity: sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==}
engines: {node: '>= 0.6'}
statuses@2.0.2:
resolution: {integrity: sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==}
engines: {node: '>= 0.8'}
stdin-discarder@0.2.2:
resolution: {integrity: sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ==}
engines: {node: '>=18'}
string-width@4.2.3:
resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==}
engines: {node: '>=8'}
string-width@7.2.0:
resolution: {integrity: sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==}
engines: {node: '>=18'}
string-width@8.1.1:
resolution: {integrity: sha512-KpqHIdDL9KwYk22wEOg/VIqYbrnLeSApsKT/bSj6Ez7pn3CftUiLAv2Lccpq1ALcpLV9UX1Ppn92npZWu2w/aw==}
engines: {node: '>=20'}
string_decoder@1.1.1:
resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==}
string_decoder@1.3.0:
resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==}
strip-ansi@6.0.1:
resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==}
engines: {node: '>=8'}
strip-ansi@7.1.2:
resolution: {integrity: sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==}
engines: {node: '>=12'}
supports-color@8.1.1:
resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==}
engines: {node: '>=10'}
supports-preserve-symlinks-flag@1.0.0:
resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==}
engines: {node: '>= 0.4'}
tapable@2.3.0:
resolution: {integrity: sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==}
engines: {node: '>=6'}
tar@7.5.7:
resolution: {integrity: sha512-fov56fJiRuThVFXD6o6/Q354S7pnWMJIVlDBYijsTNx6jKSE4pvrDTs6lUnmGvNyfJwFQQwWy3owKz1ucIhveQ==}
engines: {node: '>=18'}
terser-webpack-plugin@5.3.16:
resolution: {integrity: sha512-h9oBFCWrq78NyWWVcSwZarJkZ01c2AyGrzs1crmHZO3QUg9D61Wu4NPjBy69n7JqylFF5y+CsUZYmYEIZ3mR+Q==}
engines: {node: '>= 10.13.0'}
peerDependencies:
'@swc/core': '*'
esbuild: '*'
uglify-js: '*'
webpack: ^5.1.0
peerDependenciesMeta:
'@swc/core':
optional: true
esbuild:
optional: true
uglify-js:
optional: true
terser@5.44.1:
resolution: {integrity: sha512-t/R3R/n0MSwnnazuPpPNVO60LX0SKL45pyl9YlvxIdkH0Of7D5qM2EVe+yASRIlY5pZ73nclYJfNANGWPwFDZw==}
engines: {node: '>=10'}
hasBin: true
thingies@2.5.0:
resolution: {integrity: sha512-s+2Bwztg6PhWUD7XMfeYm5qliDdSiZm7M7n8KjTkIsm3l/2lgVRc2/Gx/v+ZX8lT4FMA+i8aQvhcWylldc+ZNw==}
engines: {node: '>=10.18'}
peerDependencies:
tslib: ^2
thunky@1.1.0:
resolution: {integrity: sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==}
tinyglobby@0.2.15:
resolution: {integrity: sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==}
engines: {node: '>=12.0.0'}
to-regex-range@5.0.1:
resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
engines: {node: '>=8.0'}
toidentifier@1.0.1:
resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==}
engines: {node: '>=0.6'}
tree-dump@1.1.0:
resolution: {integrity: sha512-rMuvhU4MCDbcbnleZTFezWsaZXRFemSqAM+7jPnzUl1fo9w3YEKOxAeui0fz3OI4EU4hf23iyA7uQRVko+UaBA==}
engines: {node: '>=10.0'}
peerDependencies:
tslib: '2'
tree-kill@1.2.2:
resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==}
hasBin: true
ts-node@10.9.2:
resolution: {integrity: sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==}
hasBin: true
peerDependencies:
'@swc/core': '>=1.2.50'
'@swc/wasm': '>=1.2.50'
'@types/node': '*'
typescript: '>=2.7'
peerDependenciesMeta:
'@swc/core':
optional: true
'@swc/wasm':
optional: true
tslib@2.8.1:
resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==}
tuf-js@4.1.0:
resolution: {integrity: sha512-50QV99kCKH5P/Vs4E2Gzp7BopNV+KzTXqWeaxrfu5IQJBOULRsTIS9seSsOVT8ZnGXzCyx55nYWAi4qJzpZKEQ==}
engines: {node: ^20.17.0 || >=22.9.0}
type-is@1.6.18:
resolution: {integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==}
engines: {node: '>= 0.6'}
type-is@2.0.1:
resolution: {integrity: sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==}
engines: {node: '>= 0.6'}
typed-assert@1.0.9:
resolution: {integrity: sha512-KNNZtayBCtmnNmbo5mG47p1XsCyrx6iVqomjcZnec/1Y5GGARaxPs6r49RnSPeUP3YjNYiU9sQHAtY4BBvnZwg==}
typescript@5.9.3:
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
engines: {node: '>=14.17'}
hasBin: true
undici-types@6.21.0:
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
undici@7.18.0:
resolution: {integrity: sha512-CfPufgPFHCYu0W4h1NiKW9+tNJ39o3kWm7Cm29ET1enSJx+AERfz7A2wAr26aY0SZbYzZlTBQtcHy15o60VZfQ==}
engines: {node: '>=20.18.1'}
unicode-canonical-property-names-ecmascript@2.0.1:
resolution: {integrity: sha512-dA8WbNeb2a6oQzAQ55YlT5vQAWGV9WXOsi3SskE3bcCdM0P4SDd+24zS/OCacdRq5BkdsRj9q3Pg6YyQoxIGqg==}
engines: {node: '>=4'}
unicode-match-property-ecmascript@2.0.0:
resolution: {integrity: sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==}
engines: {node: '>=4'}
unicode-match-property-value-ecmascript@2.2.1:
resolution: {integrity: sha512-JQ84qTuMg4nVkx8ga4A16a1epI9H6uTXAknqxkGF/aFfRLw1xC/Bp24HNLaZhHSkWd3+84t8iXnp1J0kYcZHhg==}
engines: {node: '>=4'}
unicode-property-aliases-ecmascript@2.2.0:
resolution: {integrity: sha512-hpbDzxUY9BFwX+UeBnxv3Sh1q7HFxj48DTmXchNgRa46lO8uj3/1iEn3MiNUYTg1g9ctIqXCCERn8gYZhHC5lQ==}
engines: {node: '>=4'}
unique-filename@5.0.0:
resolution: {integrity: sha512-2RaJTAvAb4owyjllTfXzFClJ7WsGxlykkPvCr9pA//LD9goVq+m4PPAeBgNodGZ7nSrntT/auWpJ6Y5IFXcfjg==}
engines: {node: ^20.17.0 || >=22.9.0}
unique-slug@6.0.0:
resolution: {integrity: sha512-4Lup7Ezn8W3d52/xBhZBVdx323ckxa7DEvd9kPQHppTkLoJXw6ltrBCyj5pnrxj0qKDxYMJ56CoxNuFCscdTiw==}
engines: {node: ^20.17.0 || >=22.9.0}
unpipe@1.0.0:
resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==}
engines: {node: '>= 0.8'}
update-browserslist-db@1.2.3:
resolution: {integrity: sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==}
hasBin: true
peerDependencies:
browserslist: '>= 4.21.0'
util-deprecate@1.0.2:
resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==}
utils-merge@1.0.1:
resolution: {integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==}
engines: {node: '>= 0.4.0'}
uuid@8.3.2:
resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==}
hasBin: true
v8-compile-cache-lib@3.0.1:
resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==}
validate-npm-package-license@3.0.4:
resolution: {integrity: sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==}
validate-npm-package-name@7.0.2:
resolution: {integrity: sha512-hVDIBwsRruT73PbK7uP5ebUt+ezEtCmzZz3F59BSr2F6OVFnJ/6h8liuvdLrQ88Xmnk6/+xGGuq+pG9WwTuy3A==}
engines: {node: ^20.17.0 || >=22.9.0}
vary@1.1.2:
resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==}
engines: {node: '>= 0.8'}
vite@7.3.0:
resolution: {integrity: sha512-dZwN5L1VlUBewiP6H9s2+B3e3Jg96D0vzN+Ry73sOefebhYr9f94wwkMNN/9ouoU8pV1BqA1d1zGk8928cx0rg==}
engines: {node: ^20.19.0 || >=22.12.0}
hasBin: true
peerDependencies:
'@types/node': ^20.19.0 || >=22.12.0
jiti: '>=1.21.0'
less: ^4.0.0
lightningcss: ^1.21.0
sass: ^1.70.0
sass-embedded: ^1.70.0
stylus: '>=0.54.8'
sugarss: ^5.0.0
terser: ^5.16.0
tsx: ^4.8.1
yaml: ^2.4.2
peerDependenciesMeta:
'@types/node':
optional: true
jiti:
optional: true
less:
optional: true
lightningcss:
optional: true
sass:
optional: true
sass-embedded:
optional: true
stylus:
optional: true
sugarss:
optional: true
terser:
optional: true
tsx:
optional: true
yaml:
optional: true
watchpack@2.5.0:
resolution: {integrity: sha512-e6vZvY6xboSwLz2GD36c16+O/2Z6fKvIf4pOXptw2rY9MVwE/TXc6RGqxD3I3x0a28lwBY7DE+76uTPSsBrrCA==}
engines: {node: '>=10.13.0'}
watchpack@2.5.1:
resolution: {integrity: sha512-Zn5uXdcFNIA1+1Ei5McRd+iRzfhENPCe7LeABkJtNulSxjma+l7ltNx55BWZkRlwRnpOgHqxnjyaDgJnNXnqzg==}
engines: {node: '>=10.13.0'}
wbuf@1.7.3:
resolution: {integrity: sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA==}
weak-lru-cache@1.2.2:
resolution: {integrity: sha512-DEAoo25RfSYMuTGc9vPJzZcZullwIqRDSI9LOy+fkCJPi6hykCnfKaXTuPBDuXAUcqHXyOgFtHNp/kB2FjYHbw==}
webpack-dev-middleware@7.4.5:
resolution: {integrity: sha512-uxQ6YqGdE4hgDKNf7hUiPXOdtkXvBJXrfEGYSx7P7LC8hnUYGK70X6xQXUvXeNyBDDcsiQXpG2m3G9vxowaEuA==}
engines: {node: '>= 18.12.0'}
peerDependencies:
webpack: ^5.0.0
peerDependenciesMeta:
webpack:
optional: true
webpack-dev-server@5.2.2:
resolution: {integrity: sha512-QcQ72gh8a+7JO63TAx/6XZf/CWhgMzu5m0QirvPfGvptOusAxG12w2+aua1Jkjr7hzaWDnJ2n6JFeexMHI+Zjg==}
engines: {node: '>= 18.12.0'}
hasBin: true
peerDependencies:
webpack: ^5.0.0
webpack-cli: '*'
peerDependenciesMeta:
webpack:
optional: true
webpack-cli:
optional: true
webpack-merge@6.0.1:
resolution: {integrity: sha512-hXXvrjtx2PLYx4qruKl+kyRSLc52V+cCvMxRjmKwoA+CBbbF5GfIBtR6kCvl0fYGqTUPKB+1ktVmTHqMOzgCBg==}
engines: {node: '>=18.0.0'}
webpack-sources@3.3.3:
resolution: {integrity: sha512-yd1RBzSGanHkitROoPFd6qsrxt+oFhg/129YzheDGqeustzX0vTZJZsSsQjVQC4yzBQ56K55XU8gaNCtIzOnTg==}
engines: {node: '>=10.13.0'}
webpack-subresource-integrity@5.1.0:
resolution: {integrity: sha512-sacXoX+xd8r4WKsy9MvH/q/vBtEHr86cpImXwyg74pFIpERKt6FmB8cXpeuh0ZLgclOlHI4Wcll7+R5L02xk9Q==}
engines: {node: '>= 12'}
peerDependencies:
html-webpack-plugin: '>= 5.0.0-beta.1 < 6'
webpack: ^5.12.0
peerDependenciesMeta:
html-webpack-plugin:
optional: true
webpack@5.104.1:
resolution: {integrity: sha512-Qphch25abbMNtekmEGJmeRUhLDbe+QfiWTiqpKYkpCOWY64v9eyl+KRRLmqOFA2AvKPpc9DC6+u2n76tQLBoaA==}
engines: {node: '>=10.13.0'}
hasBin: true
peerDependencies:
webpack-cli: '*'
peerDependenciesMeta:
webpack-cli:
optional: true
websocket-driver@0.7.4:
resolution: {integrity: sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==}
engines: {node: '>=0.8.0'}
websocket-extensions@0.1.4:
resolution: {integrity: sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==}
engines: {node: '>=0.8.0'}
which@2.0.2:
resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==}
engines: {node: '>= 8'}
hasBin: true
which@6.0.1:
resolution: {integrity: sha512-oGLe46MIrCRqX7ytPUf66EAYvdeMIZYn3WaocqqKZAxrBpkqHfL/qvTyJ/bTk5+AqHCjXmrv3CEWgy368zhRUg==}
engines: {node: ^20.17.0 || >=22.9.0}
hasBin: true
wildcard@2.0.1:
resolution: {integrity: sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ==}
wrap-ansi@6.2.0:
resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==}
engines: {node: '>=8'}
wrap-ansi@9.0.2:
resolution: {integrity: sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==}
engines: {node: '>=18'}
wrappy@1.0.2:
resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==}
ws@8.19.0:
resolution: {integrity: sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==}
engines: {node: '>=10.0.0'}
peerDependencies:
bufferutil: ^4.0.1
utf-8-validate: '>=5.0.2'
peerDependenciesMeta:
bufferutil:
optional: true
utf-8-validate:
optional: true
wsl-utils@0.1.0:
resolution: {integrity: sha512-h3Fbisa2nKGPxCpm89Hk33lBLsnaGBvctQopaBSOW/uIs6FTe1ATyAnKFJrzVs9vpGdsTe73WF3V4lIsk4Gacw==}
engines: {node: '>=18'}
wsl-utils@0.3.1:
resolution: {integrity: sha512-g/eziiSUNBSsdDJtCLB8bdYEUMj4jR7AGeUo96p/3dTafgjHhpF4RiCFPiRILwjQoDXx5MqkBr4fwWtR3Ky4Wg==}
engines: {node: '>=20'}
y18n@5.0.8:
resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==}
engines: {node: '>=10'}
yallist@3.1.1:
resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==}
yallist@4.0.0:
resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==}
yallist@5.0.0:
resolution: {integrity: sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==}
engines: {node: '>=18'}
yargs-parser@22.0.0:
resolution: {integrity: sha512-rwu/ClNdSMpkSrUb+d6BRsSkLUq1fmfsY6TOpYzTwvwkg1/NRG85KBy3kq++A8LKQwX6lsu+aWad+2khvuXrqw==}
engines: {node: ^20.19.0 || ^22.12.0 || >=23}
yargs@18.0.0:
resolution: {integrity: sha512-4UEqdc2RYGHZc7Doyqkrqiln3p9X2DZVxaGbwhn2pi7MrRagKaOcIKe8L3OxYcbhXLgLFUS3zAYuQjKBQgmuNg==}
engines: {node: ^20.19.0 || ^22.12.0 || >=23}
yn@3.1.1:
resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==}
engines: {node: '>=6'}
yocto-queue@0.1.0:
resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==}
engines: {node: '>=10'}
yoctocolors-cjs@2.1.3:
resolution: {integrity: sha512-U/PBtDf35ff0D8X8D0jfdzHYEPFxAI7jJlxZXwCSez5M3190m+QobIfh+sWDWSHMCWWJN2AWamkegn6vr6YBTw==}
engines: {node: '>=18'}
yoctocolors@2.1.2:
resolution: {integrity: sha512-CzhO+pFNo8ajLM2d2IW/R93ipy99LWjtwblvC1RsoSUMZgyLbYFr221TnSNT7GjGdYui6P459mw9JH/g/zW2ug==}
engines: {node: '>=18'}
zod-to-json-schema@3.25.1:
resolution: {integrity: sha512-pM/SU9d3YAggzi6MtR4h7ruuQlqKtad8e9S0fmxcMi+ueAK5Korys/aWcV9LIIHTVbj01NdzxcnXSN+O74ZIVA==}
peerDependencies:
zod: ^3.25 || ^4
zod@4.3.5:
resolution: {integrity: sha512-k7Nwx6vuWx1IJ9Bjuf4Zt1PEllcwe7cls3VNzm4CQ1/hgtFUK2bRNG3rvnpPUhFjmqJKAKtjV576KnUkHocg/g==}
zone.js@0.16.0:
resolution: {integrity: sha512-LqLPpIQANebrlxY6jKcYKdgN5DTXyyHAKnnWWjE5pPfEQ4n7j5zn7mOEEpwNZVKGqx3kKKmvplEmoBrvpgROTA==}
snapshots:
'@algolia/abtesting@1.12.2':
dependencies:
'@algolia/client-common': 5.46.2
'@algolia/requester-browser-xhr': 5.46.2
'@algolia/requester-fetch': 5.46.2
'@algolia/requester-node-http': 5.46.2
'@algolia/client-abtesting@5.46.2':
dependencies:
'@algolia/client-common': 5.46.2
'@algolia/requester-browser-xhr': 5.46.2
'@algolia/requester-fetch': 5.46.2
'@algolia/requester-node-http': 5.46.2
'@algolia/client-analytics@5.46.2':
dependencies:
'@algolia/client-common': 5.46.2
'@algolia/requester-browser-xhr': 5.46.2
'@algolia/requester-fetch': 5.46.2
'@algolia/requester-node-http': 5.46.2
'@algolia/client-common@5.46.2': {}
'@algolia/client-insights@5.46.2':
dependencies:
'@algolia/client-common': 5.46.2
'@algolia/requester-browser-xhr': 5.46.2
'@algolia/requester-fetch': 5.46.2
'@algolia/requester-node-http': 5.46.2
'@algolia/client-personalization@5.46.2':
dependencies:
'@algolia/client-common': 5.46.2
'@algolia/requester-browser-xhr': 5.46.2
'@algolia/requester-fetch': 5.46.2
'@algolia/requester-node-http': 5.46.2
'@algolia/client-query-suggestions@5.46.2':
dependencies:
'@algolia/client-common': 5.46.2
'@algolia/requester-browser-xhr': 5.46.2
'@algolia/requester-fetch': 5.46.2
'@algolia/requester-node-http': 5.46.2
'@algolia/client-search@5.46.2':
dependencies:
'@algolia/client-common': 5.46.2
'@algolia/requester-browser-xhr': 5.46.2
'@algolia/requester-fetch': 5.46.2
'@algolia/requester-node-http': 5.46.2
'@algolia/ingestion@1.46.2':
dependencies:
'@algolia/client-common': 5.46.2
'@algolia/requester-browser-xhr': 5.46.2
'@algolia/requester-fetch': 5.46.2
'@algolia/requester-node-http': 5.46.2
'@algolia/monitoring@1.46.2':
dependencies:
'@algolia/client-common': 5.46.2
'@algolia/requester-browser-xhr': 5.46.2
'@algolia/requester-fetch': 5.46.2
'@algolia/requester-node-http': 5.46.2
'@algolia/recommend@5.46.2':
dependencies:
'@algolia/client-common': 5.46.2
'@algolia/requester-browser-xhr': 5.46.2
'@algolia/requester-fetch': 5.46.2
'@algolia/requester-node-http': 5.46.2
'@algolia/requester-browser-xhr@5.46.2':
dependencies:
'@algolia/client-common': 5.46.2
'@algolia/requester-fetch@5.46.2':
dependencies:
'@algolia/client-common': 5.46.2
'@algolia/requester-node-http@5.46.2':
dependencies:
'@algolia/client-common': 5.46.2
'@ampproject/remapping@2.3.0':
dependencies:
'@jridgewell/gen-mapping': 0.3.13
'@jridgewell/trace-mapping': 0.3.31
'@angular-devkit/architect@0.2101.0-rc.0':
dependencies:
'@angular-devkit/core': 21.1.0-rc.0
rxjs: 7.8.2
transitivePeerDependencies:
- chokidar
'@angular-devkit/build-angular@21.1.0-rc.0(@angular/compiler-cli@in-existing-linked-by-bazel)(@angular/compiler@in-existing-linked-by-bazel)(@angular/core@in-existing-linked-by-bazel)(@angular/platform-browser@in-existing-linked-by-bazel)(@angular/ssr@21.1.0-rc.0(@angular/common@in-existing-linked-by-bazel)(@angular/core@in-existing-linked-by-bazel)(@angular/router@in-existing-linked-by-bazel))(@types/node@20.19.33)(jiti@2.6.1)(typescript@5.9.3)':
dependencies:
'@ampproject/remapping': 2.3.0
'@angular-devkit/architect': 0.2101.0-rc.0
'@angular-devkit/build-webpack': 0.2101.0-rc.0(webpack-dev-server@5.2.2(tslib@2.8.1)(webpack@5.104.1))(webpack@5.104.1(esbuild@0.27.2))
'@angular-devkit/core': 21.1.0-rc.0
'@angular/build': 21.1.0-rc.0(@angular/compiler-cli@in-existing-linked-by-bazel)(@angular/compiler@in-existing-linked-by-bazel)(@angular/core@in-existing-linked-by-bazel)(@angular/platform-browser@in-existing-linked-by-bazel)(@angular/ssr@21.1.0-rc.0(@angular/common@in-existing-linked-by-bazel)(@angular/core@in-existing-linked-by-bazel)(@angular/router@in-existing-linked-by-bazel))(@types/node@20.19.33)(jiti@2.6.1)(less@4.4.2)(postcss@8.5.6)(terser@5.44.1)(tslib@2.8.1)(typescript@5.9.3)
'@angular/compiler-cli': link:in-existing-linked-by-bazel
'@babel/core': 7.28.5
'@babel/generator': 7.28.5
'@babel/helper-annotate-as-pure': 7.27.3
'@babel/helper-split-export-declaration': 7.24.7
'@babel/plugin-transform-async-generator-functions': 7.28.0(@babel/core@7.28.5)
'@babel/plugin-transform-async-to-generator': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-runtime': 7.28.5(@babel/core@7.28.5)
'@babel/preset-env': 7.28.5(@babel/core@7.28.5)
'@babel/runtime': 7.28.4
'@discoveryjs/json-ext': 0.6.3
'@ngtools/webpack': 21.1.0-rc.0(@angular/compiler-cli@in-existing-linked-by-bazel)(typescript@5.9.3)(webpack@5.104.1(esbuild@0.27.2))
ansi-colors: 4.1.3
autoprefixer: 10.4.23(postcss@8.5.6)
babel-loader: 10.0.0(@babel/core@7.28.5)(webpack@5.104.1(esbuild@0.27.2))
browserslist: 4.28.1
copy-webpack-plugin: 13.0.1(webpack@5.104.1(esbuild@0.27.2))
css-loader: 7.1.2(webpack@5.104.1(esbuild@0.27.2))
esbuild-wasm: 0.27.2
http-proxy-middleware: 3.0.5
istanbul-lib-instrument: 6.0.3
jsonc-parser: 3.3.1
karma-source-map-support: 1.4.0
less: 4.4.2
less-loader: 12.3.0(less@4.4.2)(webpack@5.104.1(esbuild@0.27.2))
license-webpack-plugin: 4.0.2(webpack@5.104.1(esbuild@0.27.2))
loader-utils: 3.3.1
mini-css-extract-plugin: 2.9.4(webpack@5.104.1(esbuild@0.27.2))
open: 11.0.0
ora: 9.0.0
picomatch: 4.0.3
piscina: 5.1.4
postcss: 8.5.6
postcss-loader: 8.2.0(postcss@8.5.6)(typescript@5.9.3)(webpack@5.104.1(esbuild@0.27.2))
resolve-url-loader: 5.0.0
rxjs: 7.8.2
sass: 1.97.1
sass-loader: 16.0.6(sass@1.97.1)(webpack@5.104.1(esbuild@0.27.2))
semver: 7.7.3
source-map-loader: 5.0.0(webpack@5.104.1(esbuild@0.27.2))
source-map-support: 0.5.21
terser: 5.44.1
tinyglobby: 0.2.15
tree-kill: 1.2.2
tslib: 2.8.1
typescript: 5.9.3
webpack: 5.104.1(esbuild@0.27.2)
webpack-dev-middleware: 7.4.5(tslib@2.8.1)(webpack@5.104.1)
webpack-dev-server: 5.2.2(tslib@2.8.1)(webpack@5.104.1)
webpack-merge: 6.0.1
webpack-subresource-integrity: 5.1.0(webpack@5.104.1(esbuild@0.27.2))
optionalDependencies:
'@angular/core': link:in-existing-linked-by-bazel
'@angular/platform-browser': link:in-existing-linked-by-bazel
'@angular/ssr': 21.1.0-rc.0(@angular/common@in-existing-linked-by-bazel)(@angular/core@in-existing-linked-by-bazel)(@angular/router@in-existing-linked-by-bazel)
esbuild: 0.27.2
transitivePeerDependencies:
- '@angular/compiler'
- '@rspack/core'
- '@swc/core'
- '@types/node'
- bufferutil
- chokidar
- debug
- html-webpack-plugin
- jiti
- lightningcss
- node-sass
- sass-embedded
- stylus
- sugarss
- supports-color
- tsx
- uglify-js
- utf-8-validate
- vitest
- webpack-cli
- yaml
'@angular-devkit/build-webpack@0.2101.0-rc.0(webpack-dev-server@5.2.2(tslib@2.8.1)(webpack@5.104.1))(webpack@5.104.1(esbuild@0.27.2))':
dependencies:
'@angular-devkit/architect': 0.2101.0-rc.0
rxjs: 7.8.2
webpack: 5.104.1(esbuild@0.27.2)
webpack-dev-server: 5.2.2(tslib@2.8.1)(webpack@5.104.1)
transitivePeerDependencies:
- chokidar
'@angular-devkit/core@21.1.0-rc.0':
dependencies:
ajv: 8.17.1
ajv-formats: 3.0.1(ajv@8.17.1)
jsonc-parser: 3.3.1
picomatch: 4.0.3
rxjs: 7.8.2
source-map: 0.7.6
'@angular-devkit/schematics@21.1.0-rc.0':
dependencies:
'@angular-devkit/core': 21.1.0-rc.0
jsonc-parser: 3.3.1
magic-string: 0.30.21
ora: 9.0.0
rxjs: 7.8.2
transitivePeerDependencies:
- chokidar
'@angular/build@21.1.0-rc.0(@angular/compiler-cli@in-existing-linked-by-bazel)(@angular/compiler@in-existing-linked-by-bazel)(@angular/core@in-existing-linked-by-bazel)(@angular/platform-browser@in-existing-linked-by-bazel)(@angular/ssr@21.1.0-rc.0(@angular/common@in-existing-linked-by-bazel)(@angular/core@in-existing-linked-by-bazel)(@angular/router@in-existing-linked-by-bazel))(@types/node@20.19.33)(jiti@2.6.1)(less@4.4.2)(postcss@8.5.6)(terser@5.44.1)(tslib@2.8.1)(typescript@5.9.3)':
dependencies:
'@ampproject/remapping': 2.3.0
'@angular-devkit/architect': 0.2101.0-rc.0
'@angular/compiler': link:in-existing-linked-by-bazel
'@angular/compiler-cli': link:in-existing-linked-by-bazel
'@babel/core': 7.28.5
'@babel/helper-annotate-as-pure': 7.27.3
'@babel/helper-split-export-declaration': 7.24.7
'@inquirer/confirm': 5.1.21(@types/node@20.19.33)
'@vitejs/plugin-basic-ssl': 2.1.0(vite@7.3.0(@types/node@20.19.33)(jiti@2.6.1)(less@4.4.2)(sass@1.97.1)(terser@5.44.1))
beasties: 0.3.5
browserslist: 4.28.1
esbuild: 0.27.2
https-proxy-agent: 7.0.6
istanbul-lib-instrument: 6.0.3
jsonc-parser: 3.3.1
listr2: 9.0.5
magic-string: 0.30.21
mrmime: 2.0.1
parse5-html-rewriting-stream: 8.0.0
picomatch: 4.0.3
piscina: 5.1.4
rolldown: 1.0.0-beta.58
sass: 1.97.1
semver: 7.7.3
source-map-support: 0.5.21
tinyglobby: 0.2.15
tslib: 2.8.1
typescript: 5.9.3
undici: 7.18.0
vite: 7.3.0(@types/node@20.19.33)(jiti@2.6.1)(less@4.4.2)(sass@1.97.1)(terser@5.44.1)
watchpack: 2.5.0
optionalDependencies:
'@angular/core': link:in-existing-linked-by-bazel
'@angular/platform-browser': link:in-existing-linked-by-bazel
'@angular/ssr': 21.1.0-rc.0(@angular/common@in-existing-linked-by-bazel)(@angular/core@in-existing-linked-by-bazel)(@angular/router@in-existing-linked-by-bazel)
less: 4.4.2
lmdb: 3.4.4
postcss: 8.5.6
transitivePeerDependencies:
- '@types/node'
- chokidar
- jiti
- lightningcss
- sass-embedded
- stylus
- sugarss
- supports-color
- terser
- tsx
- yaml
'@angular/cli@21.1.0-rc.0(@types/node@20.19.33)(hono@4.11.9)':
dependencies:
'@angular-devkit/architect': 0.2101.0-rc.0
'@angular-devkit/core': 21.1.0-rc.0
'@angular-devkit/schematics': 21.1.0-rc.0
'@inquirer/prompts': 7.10.1(@types/node@20.19.33)
'@listr2/prompt-adapter-inquirer': 3.0.5(@inquirer/prompts@7.10.1(@types/node@20.19.33))(@types/node@20.19.33)(listr2@9.0.5)
'@modelcontextprotocol/sdk': 1.25.2(hono@4.11.9)(zod@4.3.5)
'@schematics/angular': 21.1.0-rc.0
'@yarnpkg/lockfile': 1.1.0
algoliasearch: 5.46.2
ini: 6.0.0
jsonc-parser: 3.3.1
listr2: 9.0.5
npm-package-arg: 13.0.2
pacote: 21.0.4
parse5-html-rewriting-stream: 8.0.0
resolve: 1.22.11
semver: 7.7.3
yargs: 18.0.0
zod: 4.3.5
transitivePeerDependencies:
- '@cfworker/json-schema'
- '@types/node'
- chokidar
- hono
- supports-color
'@angular/ssr@21.1.0-rc.0(@angular/common@in-existing-linked-by-bazel)(@angular/core@in-existing-linked-by-bazel)(@angular/router@in-existing-linked-by-bazel)':
dependencies:
'@angular/common': link:in-existing-linked-by-bazel
'@angular/core': link:in-existing-linked-by-bazel
'@angular/router': link:in-existing-linked-by-bazel
tslib: 2.8.1
'@babel/code-frame@7.29.0':
dependencies:
'@babel/helper-validator-identifier': 7.28.5
js-tokens: 4.0.0
picocolors: 1.1.1
'@babel/compat-data@7.29.0': {}
'@babel/core@7.28.5':
dependencies:
'@babel/code-frame': 7.29.0
'@babel/generator': 7.28.5
'@babel/helper-compilation-targets': 7.28.6
'@babel/helper-module-transforms': 7.28.6(@babel/core@7.28.5)
'@babel/helpers': 7.28.6
'@babel/parser': 7.29.0
'@babel/template': 7.28.6
'@babel/traverse': 7.29.0
'@babel/types': 7.29.0
'@jridgewell/remapping': 2.3.5
convert-source-map: 2.0.0
debug: 4.4.3
gensync: 1.0.0-beta.2
json5: 2.2.3
semver: 6.3.1
transitivePeerDependencies:
- supports-color
'@babel/generator@7.28.5':
dependencies:
'@babel/parser': 7.29.0
'@babel/types': 7.29.0
'@jridgewell/gen-mapping': 0.3.13
'@jridgewell/trace-mapping': 0.3.31
jsesc: 3.1.0
'@babel/generator@7.29.1':
dependencies:
'@babel/parser': 7.29.0
'@babel/types': 7.29.0
'@jridgewell/gen-mapping': 0.3.13
'@jridgewell/trace-mapping': 0.3.31
jsesc: 3.1.0
'@babel/helper-annotate-as-pure@7.27.3':
dependencies:
'@babel/types': 7.29.0
'@babel/helper-compilation-targets@7.28.6':
dependencies:
'@babel/compat-data': 7.29.0
'@babel/helper-validator-option': 7.27.1
browserslist: 4.28.1
lru-cache: 5.1.1
semver: 6.3.1
'@babel/helper-create-class-features-plugin@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-annotate-as-pure': 7.27.3
'@babel/helper-member-expression-to-functions': 7.28.5
'@babel/helper-optimise-call-expression': 7.27.1
'@babel/helper-replace-supers': 7.28.6(@babel/core@7.28.5)
'@babel/helper-skip-transparent-expression-wrappers': 7.27.1
'@babel/traverse': 7.29.0
semver: 6.3.1
transitivePeerDependencies:
- supports-color
'@babel/helper-create-regexp-features-plugin@7.28.5(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-annotate-as-pure': 7.27.3
regexpu-core: 6.4.0
semver: 6.3.1
'@babel/helper-define-polyfill-provider@0.6.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-compilation-targets': 7.28.6
'@babel/helper-plugin-utils': 7.28.6
debug: 4.4.3
lodash.debounce: 4.0.8
resolve: 1.22.11
transitivePeerDependencies:
- supports-color
'@babel/helper-globals@7.28.0': {}
'@babel/helper-member-expression-to-functions@7.28.5':
dependencies:
'@babel/traverse': 7.29.0
'@babel/types': 7.29.0
transitivePeerDependencies:
- supports-color
'@babel/helper-module-imports@7.28.6':
dependencies:
'@babel/traverse': 7.29.0
'@babel/types': 7.29.0
transitivePeerDependencies:
- supports-color
'@babel/helper-module-transforms@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-module-imports': 7.28.6
'@babel/helper-validator-identifier': 7.28.5
'@babel/traverse': 7.29.0
transitivePeerDependencies:
- supports-color
'@babel/helper-optimise-call-expression@7.27.1':
dependencies:
'@babel/types': 7.29.0
'@babel/helper-plugin-utils@7.28.6': {}
'@babel/helper-remap-async-to-generator@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-annotate-as-pure': 7.27.3
'@babel/helper-wrap-function': 7.28.6
'@babel/traverse': 7.29.0
transitivePeerDependencies:
- supports-color
'@babel/helper-replace-supers@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-member-expression-to-functions': 7.28.5
'@babel/helper-optimise-call-expression': 7.27.1
'@babel/traverse': 7.29.0
transitivePeerDependencies:
- supports-color
'@babel/helper-skip-transparent-expression-wrappers@7.27.1':
dependencies:
'@babel/traverse': 7.29.0
'@babel/types': 7.29.0
transitivePeerDependencies:
- supports-color
'@babel/helper-split-export-declaration@7.24.7':
dependencies:
'@babel/types': 7.29.0
'@babel/helper-string-parser@7.27.1': {}
'@babel/helper-validator-identifier@7.28.5': {}
'@babel/helper-validator-option@7.27.1': {}
'@babel/helper-wrap-function@7.28.6':
dependencies:
'@babel/template': 7.28.6
'@babel/traverse': 7.29.0
'@babel/types': 7.29.0
transitivePeerDependencies:
- supports-color
'@babel/helpers@7.28.6':
dependencies:
'@babel/template': 7.28.6
'@babel/types': 7.29.0
'@babel/parser@7.29.0':
dependencies:
'@babel/types': 7.29.0
'@babel/plugin-bugfix-firefox-class-in-computed-class-key@7.28.5(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/traverse': 7.29.0
transitivePeerDependencies:
- supports-color
'@babel/plugin-bugfix-safari-class-field-initializer-scope@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/helper-skip-transparent-expression-wrappers': 7.27.1
'@babel/plugin-transform-optional-chaining': 7.28.6(@babel/core@7.28.5)
transitivePeerDependencies:
- supports-color
'@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/traverse': 7.29.0
transitivePeerDependencies:
- supports-color
'@babel/plugin-proposal-private-property-in-object@7.21.0-placeholder-for-preset-env.2(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/plugin-syntax-import-assertions@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-syntax-import-attributes@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-syntax-unicode-sets-regex@7.18.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-create-regexp-features-plugin': 7.28.5(@babel/core@7.28.5)
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-arrow-functions@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-async-generator-functions@7.28.0(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/helper-remap-async-to-generator': 7.27.1(@babel/core@7.28.5)
'@babel/traverse': 7.29.0
transitivePeerDependencies:
- supports-color
'@babel/plugin-transform-async-to-generator@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-module-imports': 7.28.6
'@babel/helper-plugin-utils': 7.28.6
'@babel/helper-remap-async-to-generator': 7.27.1(@babel/core@7.28.5)
transitivePeerDependencies:
- supports-color
'@babel/plugin-transform-block-scoped-functions@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-block-scoping@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-class-properties@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-create-class-features-plugin': 7.28.6(@babel/core@7.28.5)
'@babel/helper-plugin-utils': 7.28.6
transitivePeerDependencies:
- supports-color
'@babel/plugin-transform-class-static-block@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-create-class-features-plugin': 7.28.6(@babel/core@7.28.5)
'@babel/helper-plugin-utils': 7.28.6
transitivePeerDependencies:
- supports-color
'@babel/plugin-transform-classes@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-annotate-as-pure': 7.27.3
'@babel/helper-compilation-targets': 7.28.6
'@babel/helper-globals': 7.28.0
'@babel/helper-plugin-utils': 7.28.6
'@babel/helper-replace-supers': 7.28.6(@babel/core@7.28.5)
'@babel/traverse': 7.29.0
transitivePeerDependencies:
- supports-color
'@babel/plugin-transform-computed-properties@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/template': 7.28.6
'@babel/plugin-transform-destructuring@7.28.5(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/traverse': 7.29.0
transitivePeerDependencies:
- supports-color
'@babel/plugin-transform-dotall-regex@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-create-regexp-features-plugin': 7.28.5(@babel/core@7.28.5)
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-duplicate-keys@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-duplicate-named-capturing-groups-regex@7.29.0(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-create-regexp-features-plugin': 7.28.5(@babel/core@7.28.5)
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-dynamic-import@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-explicit-resource-management@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-destructuring': 7.28.5(@babel/core@7.28.5)
transitivePeerDependencies:
- supports-color
'@babel/plugin-transform-exponentiation-operator@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-export-namespace-from@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-for-of@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/helper-skip-transparent-expression-wrappers': 7.27.1
transitivePeerDependencies:
- supports-color
'@babel/plugin-transform-function-name@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-compilation-targets': 7.28.6
'@babel/helper-plugin-utils': 7.28.6
'@babel/traverse': 7.29.0
transitivePeerDependencies:
- supports-color
'@babel/plugin-transform-json-strings@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-literals@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-logical-assignment-operators@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-member-expression-literals@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-modules-amd@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-module-transforms': 7.28.6(@babel/core@7.28.5)
'@babel/helper-plugin-utils': 7.28.6
transitivePeerDependencies:
- supports-color
'@babel/plugin-transform-modules-commonjs@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-module-transforms': 7.28.6(@babel/core@7.28.5)
'@babel/helper-plugin-utils': 7.28.6
transitivePeerDependencies:
- supports-color
'@babel/plugin-transform-modules-systemjs@7.29.0(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-module-transforms': 7.28.6(@babel/core@7.28.5)
'@babel/helper-plugin-utils': 7.28.6
'@babel/helper-validator-identifier': 7.28.5
'@babel/traverse': 7.29.0
transitivePeerDependencies:
- supports-color
'@babel/plugin-transform-modules-umd@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-module-transforms': 7.28.6(@babel/core@7.28.5)
'@babel/helper-plugin-utils': 7.28.6
transitivePeerDependencies:
- supports-color
'@babel/plugin-transform-named-capturing-groups-regex@7.29.0(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-create-regexp-features-plugin': 7.28.5(@babel/core@7.28.5)
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-new-target@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-nullish-coalescing-operator@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-numeric-separator@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-object-rest-spread@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-compilation-targets': 7.28.6
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-destructuring': 7.28.5(@babel/core@7.28.5)
'@babel/plugin-transform-parameters': 7.27.7(@babel/core@7.28.5)
'@babel/traverse': 7.29.0
transitivePeerDependencies:
- supports-color
'@babel/plugin-transform-object-super@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/helper-replace-supers': 7.28.6(@babel/core@7.28.5)
transitivePeerDependencies:
- supports-color
'@babel/plugin-transform-optional-catch-binding@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-optional-chaining@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/helper-skip-transparent-expression-wrappers': 7.27.1
transitivePeerDependencies:
- supports-color
'@babel/plugin-transform-parameters@7.27.7(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-private-methods@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-create-class-features-plugin': 7.28.6(@babel/core@7.28.5)
'@babel/helper-plugin-utils': 7.28.6
transitivePeerDependencies:
- supports-color
'@babel/plugin-transform-private-property-in-object@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-annotate-as-pure': 7.27.3
'@babel/helper-create-class-features-plugin': 7.28.6(@babel/core@7.28.5)
'@babel/helper-plugin-utils': 7.28.6
transitivePeerDependencies:
- supports-color
'@babel/plugin-transform-property-literals@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-regenerator@7.29.0(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-regexp-modifiers@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-create-regexp-features-plugin': 7.28.5(@babel/core@7.28.5)
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-reserved-words@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-runtime@7.28.5(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-module-imports': 7.28.6
'@babel/helper-plugin-utils': 7.28.6
babel-plugin-polyfill-corejs2: 0.4.15(@babel/core@7.28.5)
babel-plugin-polyfill-corejs3: 0.13.0(@babel/core@7.28.5)
babel-plugin-polyfill-regenerator: 0.6.6(@babel/core@7.28.5)
semver: 6.3.1
transitivePeerDependencies:
- supports-color
'@babel/plugin-transform-shorthand-properties@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-spread@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/helper-skip-transparent-expression-wrappers': 7.27.1
transitivePeerDependencies:
- supports-color
'@babel/plugin-transform-sticky-regex@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-template-literals@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-typeof-symbol@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-unicode-escapes@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-unicode-property-regex@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-create-regexp-features-plugin': 7.28.5(@babel/core@7.28.5)
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-unicode-regex@7.27.1(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-create-regexp-features-plugin': 7.28.5(@babel/core@7.28.5)
'@babel/helper-plugin-utils': 7.28.6
'@babel/plugin-transform-unicode-sets-regex@7.28.6(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-create-regexp-features-plugin': 7.28.5(@babel/core@7.28.5)
'@babel/helper-plugin-utils': 7.28.6
'@babel/preset-env@7.28.5(@babel/core@7.28.5)':
dependencies:
'@babel/compat-data': 7.29.0
'@babel/core': 7.28.5
'@babel/helper-compilation-targets': 7.28.6
'@babel/helper-plugin-utils': 7.28.6
'@babel/helper-validator-option': 7.27.1
'@babel/plugin-bugfix-firefox-class-in-computed-class-key': 7.28.5(@babel/core@7.28.5)
'@babel/plugin-bugfix-safari-class-field-initializer-scope': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-proposal-private-property-in-object': 7.21.0-placeholder-for-preset-env.2(@babel/core@7.28.5)
'@babel/plugin-syntax-import-assertions': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-syntax-import-attributes': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-syntax-unicode-sets-regex': 7.18.6(@babel/core@7.28.5)
'@babel/plugin-transform-arrow-functions': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-async-generator-functions': 7.28.0(@babel/core@7.28.5)
'@babel/plugin-transform-async-to-generator': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-block-scoped-functions': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-block-scoping': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-class-properties': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-class-static-block': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-classes': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-computed-properties': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-destructuring': 7.28.5(@babel/core@7.28.5)
'@babel/plugin-transform-dotall-regex': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-duplicate-keys': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-duplicate-named-capturing-groups-regex': 7.29.0(@babel/core@7.28.5)
'@babel/plugin-transform-dynamic-import': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-explicit-resource-management': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-exponentiation-operator': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-export-namespace-from': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-for-of': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-function-name': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-json-strings': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-literals': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-logical-assignment-operators': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-member-expression-literals': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-modules-amd': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-modules-commonjs': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-modules-systemjs': 7.29.0(@babel/core@7.28.5)
'@babel/plugin-transform-modules-umd': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-named-capturing-groups-regex': 7.29.0(@babel/core@7.28.5)
'@babel/plugin-transform-new-target': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-nullish-coalescing-operator': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-numeric-separator': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-object-rest-spread': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-object-super': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-optional-catch-binding': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-optional-chaining': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-parameters': 7.27.7(@babel/core@7.28.5)
'@babel/plugin-transform-private-methods': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-private-property-in-object': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-property-literals': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-regenerator': 7.29.0(@babel/core@7.28.5)
'@babel/plugin-transform-regexp-modifiers': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-reserved-words': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-shorthand-properties': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-spread': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-sticky-regex': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-template-literals': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-typeof-symbol': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-unicode-escapes': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-unicode-property-regex': 7.28.6(@babel/core@7.28.5)
'@babel/plugin-transform-unicode-regex': 7.27.1(@babel/core@7.28.5)
'@babel/plugin-transform-unicode-sets-regex': 7.28.6(@babel/core@7.28.5)
'@babel/preset-modules': 0.1.6-no-external-plugins(@babel/core@7.28.5)
babel-plugin-polyfill-corejs2: 0.4.15(@babel/core@7.28.5)
babel-plugin-polyfill-corejs3: 0.13.0(@babel/core@7.28.5)
babel-plugin-polyfill-regenerator: 0.6.6(@babel/core@7.28.5)
core-js-compat: 3.48.0
semver: 6.3.1
transitivePeerDependencies:
- supports-color
'@babel/preset-modules@0.1.6-no-external-plugins(@babel/core@7.28.5)':
dependencies:
'@babel/core': 7.28.5
'@babel/helper-plugin-utils': 7.28.6
'@babel/types': 7.29.0
esutils: 2.0.3
'@babel/runtime@7.28.4': {}
'@babel/template@7.28.6':
dependencies:
'@babel/code-frame': 7.29.0
'@babel/parser': 7.29.0
'@babel/types': 7.29.0
'@babel/traverse@7.29.0':
dependencies:
'@babel/code-frame': 7.29.0
'@babel/generator': 7.29.1
'@babel/helper-globals': 7.28.0
'@babel/parser': 7.29.0
'@babel/template': 7.28.6
'@babel/types': 7.29.0
debug: 4.4.3
transitivePeerDependencies:
- supports-color
'@babel/types@7.29.0':
dependencies:
'@babel/helper-string-parser': 7.27.1
'@babel/helper-validator-identifier': 7.28.5
'@cspotcode/source-map-support@0.8.1':
dependencies:
'@jridgewell/trace-mapping': 0.3.9
'@discoveryjs/json-ext@0.6.3': {}
'@emnapi/core@1.8.1':
dependencies:
'@emnapi/wasi-threads': 1.1.0
tslib: 2.8.1
optional: true
'@emnapi/runtime@1.8.1':
dependencies:
tslib: 2.8.1
optional: true
'@emnapi/wasi-threads@1.1.0':
dependencies:
tslib: 2.8.1
optional: true
'@esbuild/aix-ppc64@0.27.2':
optional: true
'@esbuild/android-arm64@0.27.2':
optional: true
'@esbuild/android-arm@0.27.2':
optional: true
'@esbuild/android-x64@0.27.2':
optional: true
'@esbuild/darwin-arm64@0.27.2':
optional: true
'@esbuild/darwin-x64@0.27.2':
optional: true
'@esbuild/freebsd-arm64@0.27.2':
optional: true
'@esbuild/freebsd-x64@0.27.2':
optional: true
'@esbuild/linux-arm64@0.27.2':
optional: true
'@esbuild/linux-arm@0.27.2':
optional: true
'@esbuild/linux-ia32@0.27.2':
optional: true
'@esbuild/linux-loong64@0.27.2':
optional: true
'@esbuild/linux-mips64el@0.27.2':
optional: true
'@esbuild/linux-ppc64@0.27.2':
optional: true
'@esbuild/linux-riscv64@0.27.2':
optional: true
'@esbuild/linux-s390x@0.27.2':
optional: true
'@esbuild/linux-x64@0.27.2':
optional: true
'@esbuild/netbsd-arm64@0.27.2':
optional: true
'@esbuild/netbsd-x64@0.27.2':
optional: true
'@esbuild/openbsd-arm64@0.27.2':
optional: true
'@esbuild/openbsd-x64@0.27.2':
optional: true
'@esbuild/openharmony-arm64@0.27.2':
optional: true
'@esbuild/sunos-x64@0.27.2':
optional: true
'@esbuild/win32-arm64@0.27.2':
optional: true
'@esbuild/win32-ia32@0.27.2':
optional: true
'@esbuild/win32-x64@0.27.2':
optional: true
'@hono/node-server@1.19.9(hono@4.11.9)':
dependencies:
hono: 4.11.9
'@inquirer/ansi@1.0.2': {}
'@inquirer/checkbox@4.3.2(@types/node@20.19.33)':
dependencies:
'@inquirer/ansi': 1.0.2
'@inquirer/core': 10.3.2(@types/node@20.19.33)
'@inquirer/figures': 1.0.15
'@inquirer/type': 3.0.10(@types/node@20.19.33)
yoctocolors-cjs: 2.1.3
optionalDependencies:
'@types/node': 20.19.33
'@inquirer/confirm@5.1.21(@types/node@20.19.33)':
dependencies:
'@inquirer/core': 10.3.2(@types/node@20.19.33)
'@inquirer/type': 3.0.10(@types/node@20.19.33)
optionalDependencies:
'@types/node': 20.19.33
'@inquirer/core@10.3.2(@types/node@20.19.33)':
dependencies:
'@inquirer/ansi': 1.0.2
'@inquirer/figures': 1.0.15
'@inquirer/type': 3.0.10(@types/node@20.19.33)
cli-width: 4.1.0
mute-stream: 2.0.0
signal-exit: 4.1.0
wrap-ansi: 6.2.0
yoctocolors-cjs: 2.1.3
optionalDependencies:
'@types/node': 20.19.33
'@inquirer/editor@4.2.23(@types/node@20.19.33)':
dependencies:
'@inquirer/core': 10.3.2(@types/node@20.19.33)
'@inquirer/external-editor': 1.0.3(@types/node@20.19.33)
'@inquirer/type': 3.0.10(@types/node@20.19.33)
optionalDependencies:
'@types/node': 20.19.33
'@inquirer/expand@4.0.23(@types/node@20.19.33)':
dependencies:
'@inquirer/core': 10.3.2(@types/node@20.19.33)
'@inquirer/type': 3.0.10(@types/node@20.19.33)
yoctocolors-cjs: 2.1.3
optionalDependencies:
'@types/node': 20.19.33
'@inquirer/external-editor@1.0.3(@types/node@20.19.33)':
dependencies:
chardet: 2.1.1
iconv-lite: 0.7.2
optionalDependencies:
'@types/node': 20.19.33
'@inquirer/figures@1.0.15': {}
'@inquirer/input@4.3.1(@types/node@20.19.33)':
dependencies:
'@inquirer/core': 10.3.2(@types/node@20.19.33)
'@inquirer/type': 3.0.10(@types/node@20.19.33)
optionalDependencies:
'@types/node': 20.19.33
'@inquirer/number@3.0.23(@types/node@20.19.33)':
dependencies:
'@inquirer/core': 10.3.2(@types/node@20.19.33)
'@inquirer/type': 3.0.10(@types/node@20.19.33)
optionalDependencies:
'@types/node': 20.19.33
'@inquirer/password@4.0.23(@types/node@20.19.33)':
dependencies:
'@inquirer/ansi': 1.0.2
'@inquirer/core': 10.3.2(@types/node@20.19.33)
'@inquirer/type': 3.0.10(@types/node@20.19.33)
optionalDependencies:
'@types/node': 20.19.33
'@inquirer/prompts@7.10.1(@types/node@20.19.33)':
dependencies:
'@inquirer/checkbox': 4.3.2(@types/node@20.19.33)
'@inquirer/confirm': 5.1.21(@types/node@20.19.33)
'@inquirer/editor': 4.2.23(@types/node@20.19.33)
'@inquirer/expand': 4.0.23(@types/node@20.19.33)
'@inquirer/input': 4.3.1(@types/node@20.19.33)
'@inquirer/number': 3.0.23(@types/node@20.19.33)
'@inquirer/password': 4.0.23(@types/node@20.19.33)
'@inquirer/rawlist': 4.1.11(@types/node@20.19.33)
'@inquirer/search': 3.2.2(@types/node@20.19.33)
'@inquirer/select': 4.4.2(@types/node@20.19.33)
optionalDependencies:
'@types/node': 20.19.33
'@inquirer/rawlist@4.1.11(@types/node@20.19.33)':
dependencies:
'@inquirer/core': 10.3.2(@types/node@20.19.33)
'@inquirer/type': 3.0.10(@types/node@20.19.33)
yoctocolors-cjs: 2.1.3
optionalDependencies:
'@types/node': 20.19.33
'@inquirer/search@3.2.2(@types/node@20.19.33)':
dependencies:
'@inquirer/core': 10.3.2(@types/node@20.19.33)
'@inquirer/figures': 1.0.15
'@inquirer/type': 3.0.10(@types/node@20.19.33)
yoctocolors-cjs: 2.1.3
optionalDependencies:
'@types/node': 20.19.33
'@inquirer/select@4.4.2(@types/node@20.19.33)':
dependencies:
'@inquirer/ansi': 1.0.2
'@inquirer/core': 10.3.2(@types/node@20.19.33)
'@inquirer/figures': 1.0.15
'@inquirer/type': 3.0.10(@types/node@20.19.33)
yoctocolors-cjs: 2.1.3
optionalDependencies:
'@types/node': 20.19.33
'@inquirer/type@3.0.10(@types/node@20.19.33)':
optionalDependencies:
'@types/node': 20.19.33
'@isaacs/balanced-match@4.0.1': {}
'@isaacs/brace-expansion@5.0.1':
dependencies:
'@isaacs/balanced-match': 4.0.1
'@isaacs/fs-minipass@4.0.1':
dependencies:
minipass: 7.1.2
'@istanbuljs/schema@0.1.3': {}
'@jridgewell/gen-mapping@0.3.13':
dependencies:
'@jridgewell/sourcemap-codec': 1.5.5
'@jridgewell/trace-mapping': 0.3.31
'@jridgewell/remapping@2.3.5':
dependencies:
'@jridgewell/gen-mapping': 0.3.13
'@jridgewell/trace-mapping': 0.3.31
'@jridgewell/resolve-uri@3.1.2': {}
'@jridgewell/source-map@0.3.11':
dependencies:
'@jridgewell/gen-mapping': 0.3.13
'@jridgewell/trace-mapping': 0.3.31
'@jridgewell/sourcemap-codec@1.5.5': {}
'@jridgewell/trace-mapping@0.3.31':
dependencies:
'@jridgewell/resolve-uri': 3.1.2
'@jridgewell/sourcemap-codec': 1.5.5
'@jridgewell/trace-mapping@0.3.9':
dependencies:
'@jridgewell/resolve-uri': 3.1.2
'@jridgewell/sourcemap-codec': 1.5.5
'@jsonjoy.com/base64@1.1.2(tslib@2.8.1)':
dependencies:
tslib: 2.8.1
'@jsonjoy.com/base64@17.67.0(tslib@2.8.1)':
dependencies:
tslib: 2.8.1
'@jsonjoy.com/buffers@1.2.1(tslib@2.8.1)':
dependencies:
tslib: 2.8.1
'@jsonjoy.com/buffers@17.67.0(tslib@2.8.1)':
dependencies:
tslib: 2.8.1
'@jsonjoy.com/codegen@1.0.0(tslib@2.8.1)':
dependencies:
tslib: 2.8.1
'@jsonjoy.com/codegen@17.67.0(tslib@2.8.1)':
dependencies:
tslib: 2.8.1
'@jsonjoy.com/fs-core@4.56.10(tslib@2.8.1)':
dependencies:
'@jsonjoy.com/fs-node-builtins': 4.56.10(tslib@2.8.1)
'@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1)
thingies: 2.5.0(tslib@2.8.1)
tslib: 2.8.1
'@jsonjoy.com/fs-fsa@4.56.10(tslib@2.8.1)':
dependencies:
'@jsonjoy.com/fs-core': 4.56.10(tslib@2.8.1)
'@jsonjoy.com/fs-node-builtins': 4.56.10(tslib@2.8.1)
'@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1)
thingies: 2.5.0(tslib@2.8.1)
tslib: 2.8.1
'@jsonjoy.com/fs-node-builtins@4.56.10(tslib@2.8.1)':
dependencies:
tslib: 2.8.1
'@jsonjoy.com/fs-node-to-fsa@4.56.10(tslib@2.8.1)':
dependencies:
'@jsonjoy.com/fs-fsa': 4.56.10(tslib@2.8.1)
'@jsonjoy.com/fs-node-builtins': 4.56.10(tslib@2.8.1)
'@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1)
tslib: 2.8.1
'@jsonjoy.com/fs-node-utils@4.56.10(tslib@2.8.1)':
dependencies:
'@jsonjoy.com/fs-node-builtins': 4.56.10(tslib@2.8.1)
tslib: 2.8.1
'@jsonjoy.com/fs-node@4.56.10(tslib@2.8.1)':
dependencies:
'@jsonjoy.com/fs-core': 4.56.10(tslib@2.8.1)
'@jsonjoy.com/fs-node-builtins': 4.56.10(tslib@2.8.1)
'@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1)
'@jsonjoy.com/fs-print': 4.56.10(tslib@2.8.1)
'@jsonjoy.com/fs-snapshot': 4.56.10(tslib@2.8.1)
glob-to-regex.js: 1.2.0(tslib@2.8.1)
thingies: 2.5.0(tslib@2.8.1)
tslib: 2.8.1
'@jsonjoy.com/fs-print@4.56.10(tslib@2.8.1)':
dependencies:
'@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1)
tree-dump: 1.1.0(tslib@2.8.1)
tslib: 2.8.1
'@jsonjoy.com/fs-snapshot@4.56.10(tslib@2.8.1)':
dependencies:
'@jsonjoy.com/buffers': 17.67.0(tslib@2.8.1)
'@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1)
'@jsonjoy.com/json-pack': 17.67.0(tslib@2.8.1)
'@jsonjoy.com/util': 17.67.0(tslib@2.8.1)
tslib: 2.8.1
'@jsonjoy.com/json-pack@1.21.0(tslib@2.8.1)':
dependencies:
'@jsonjoy.com/base64': 1.1.2(tslib@2.8.1)
'@jsonjoy.com/buffers': 1.2.1(tslib@2.8.1)
'@jsonjoy.com/codegen': 1.0.0(tslib@2.8.1)
'@jsonjoy.com/json-pointer': 1.0.2(tslib@2.8.1)
'@jsonjoy.com/util': 1.9.0(tslib@2.8.1)
hyperdyperid: 1.2.0
thingies: 2.5.0(tslib@2.8.1)
tree-dump: 1.1.0(tslib@2.8.1)
tslib: 2.8.1
'@jsonjoy.com/json-pack@17.67.0(tslib@2.8.1)':
dependencies:
'@jsonjoy.com/base64': 17.67.0(tslib@2.8.1)
'@jsonjoy.com/buffers': 17.67.0(tslib@2.8.1)
'@jsonjoy.com/codegen': 17.67.0(tslib@2.8.1)
'@jsonjoy.com/json-pointer': 17.67.0(tslib@2.8.1)
'@jsonjoy.com/util': 17.67.0(tslib@2.8.1)
hyperdyperid: 1.2.0
thingies: 2.5.0(tslib@2.8.1)
tree-dump: 1.1.0(tslib@2.8.1)
tslib: 2.8.1
'@jsonjoy.com/json-pointer@1.0.2(tslib@2.8.1)':
dependencies:
'@jsonjoy.com/codegen': 1.0.0(tslib@2.8.1)
'@jsonjoy.com/util': 1.9.0(tslib@2.8.1)
tslib: 2.8.1
'@jsonjoy.com/json-pointer@17.67.0(tslib@2.8.1)':
dependencies:
'@jsonjoy.com/util': 17.67.0(tslib@2.8.1)
tslib: 2.8.1
'@jsonjoy.com/util@1.9.0(tslib@2.8.1)':
dependencies:
'@jsonjoy.com/buffers': 1.2.1(tslib@2.8.1)
'@jsonjoy.com/codegen': 1.0.0(tslib@2.8.1)
tslib: 2.8.1
'@jsonjoy.com/util@17.67.0(tslib@2.8.1)':
dependencies:
'@jsonjoy.com/buffers': 17.67.0(tslib@2.8.1)
'@jsonjoy.com/codegen': 17.67.0(tslib@2.8.1)
tslib: 2.8.1
'@leichtgewicht/ip-codec@2.0.5': {}
'@listr2/prompt-adapter-inquirer@3.0.5(@inquirer/prompts@7.10.1(@types/node@20.19.33))(@types/node@20.19.33)(listr2@9.0.5)':
dependencies:
'@inquirer/prompts': 7.10.1(@types/node@20.19.33)
'@inquirer/type': 3.0.10(@types/node@20.19.33)
listr2: 9.0.5
transitivePeerDependencies:
- '@types/node'
'@lmdb/lmdb-darwin-arm64@3.4.4':
optional: true
'@lmdb/lmdb-darwin-x64@3.4.4':
optional: true
'@lmdb/lmdb-linux-arm64@3.4.4':
optional: true
'@lmdb/lmdb-linux-arm@3.4.4':
optional: true
'@lmdb/lmdb-linux-x64@3.4.4':
optional: true
'@lmdb/lmdb-win32-arm64@3.4.4':
optional: true
'@lmdb/lmdb-win32-x64@3.4.4':
optional: true
'@modelcontextprotocol/sdk@1.25.2(hono@4.11.9)(zod@4.3.5)':
dependencies:
'@hono/node-server': 1.19.9(hono@4.11.9)
ajv: 8.17.1
ajv-formats: 3.0.1(ajv@8.17.1)
content-type: 1.0.5
cors: 2.8.6
cross-spawn: 7.0.6
eventsource: 3.0.7
eventsource-parser: 3.0.6
express: 5.2.1
express-rate-limit: 7.5.1(express@5.2.1)
jose: 6.1.3
json-schema-typed: 8.0.2
pkce-challenge: 5.0.1
raw-body: 3.0.2
zod: 4.3.5
zod-to-json-schema: 3.25.1(zod@4.3.5)
transitivePeerDependencies:
- hono
- supports-color
'@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3':
optional: true
'@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3':
optional: true
'@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3':
optional: true
'@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3':
optional: true
'@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3':
optional: true
'@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3':
optional: true
'@napi-rs/nice-android-arm-eabi@1.1.1':
optional: true
'@napi-rs/nice-android-arm64@1.1.1':
optional: true
'@napi-rs/nice-darwin-arm64@1.1.1':
optional: true
'@napi-rs/nice-darwin-x64@1.1.1':
optional: true
'@napi-rs/nice-freebsd-x64@1.1.1':
optional: true
'@napi-rs/nice-linux-arm-gnueabihf@1.1.1':
optional: true
'@napi-rs/nice-linux-arm64-gnu@1.1.1':
optional: true
'@napi-rs/nice-linux-arm64-musl@1.1.1':
optional: true
'@napi-rs/nice-linux-ppc64-gnu@1.1.1':
optional: true
'@napi-rs/nice-linux-riscv64-gnu@1.1.1':
optional: true
'@napi-rs/nice-linux-s390x-gnu@1.1.1':
optional: true
'@napi-rs/nice-linux-x64-gnu@1.1.1':
optional: true
'@napi-rs/nice-linux-x64-musl@1.1.1':
optional: true
'@napi-rs/nice-openharmony-arm64@1.1.1':
optional: true
'@napi-rs/nice-win32-arm64-msvc@1.1.1':
optional: true
'@napi-rs/nice-win32-ia32-msvc@1.1.1':
optional: true
'@napi-rs/nice-win32-x64-msvc@1.1.1':
optional: true
'@napi-rs/nice@1.1.1':
optionalDependencies:
'@napi-rs/nice-android-arm-eabi': 1.1.1
'@napi-rs/nice-android-arm64': 1.1.1
'@napi-rs/nice-darwin-arm64': 1.1.1
'@napi-rs/nice-darwin-x64': 1.1.1
'@napi-rs/nice-freebsd-x64': 1.1.1
'@napi-rs/nice-linux-arm-gnueabihf': 1.1.1
'@napi-rs/nice-linux-arm64-gnu': 1.1.1
'@napi-rs/nice-linux-arm64-musl': 1.1.1
'@napi-rs/nice-linux-ppc64-gnu': 1.1.1
'@napi-rs/nice-linux-riscv64-gnu': 1.1.1
'@napi-rs/nice-linux-s390x-gnu': 1.1.1
'@napi-rs/nice-linux-x64-gnu': 1.1.1
'@napi-rs/nice-linux-x64-musl': 1.1.1
'@napi-rs/nice-openharmony-arm64': 1.1.1
'@napi-rs/nice-win32-arm64-msvc': 1.1.1
'@napi-rs/nice-win32-ia32-msvc': 1.1.1
'@napi-rs/nice-win32-x64-msvc': 1.1.1
optional: true
'@napi-rs/wasm-runtime@1.1.1':
dependencies:
'@emnapi/core': 1.8.1
'@emnapi/runtime': 1.8.1
'@tybys/wasm-util': 0.10.1
optional: true
'@ngtools/webpack@21.1.0-rc.0(@angular/compiler-cli@in-existing-linked-by-bazel)(typescript@5.9.3)(webpack@5.104.1(esbuild@0.27.2))':
dependencies:
'@angular/compiler-cli': link:in-existing-linked-by-bazel
typescript: 5.9.3
webpack: 5.104.1(esbuild@0.27.2)
'@npmcli/agent@4.0.0':
dependencies:
agent-base: 7.1.4
http-proxy-agent: 7.0.2
https-proxy-agent: 7.0.6
lru-cache: 11.2.6
socks-proxy-agent: 8.0.5
transitivePeerDependencies:
- supports-color
'@npmcli/fs@5.0.0':
dependencies:
semver: 7.7.3
'@npmcli/git@7.0.1':
dependencies:
'@npmcli/promise-spawn': 9.0.1
ini: 6.0.0
lru-cache: 11.2.6
npm-pick-manifest: 11.0.3
proc-log: 6.1.0
promise-retry: 2.0.1
semver: 7.7.3
which: 6.0.1
'@npmcli/installed-package-contents@4.0.0':
dependencies:
npm-bundled: 5.0.0
npm-normalize-package-bin: 5.0.0
'@npmcli/node-gyp@5.0.0': {}
'@npmcli/package-json@7.0.4':
dependencies:
'@npmcli/git': 7.0.1
glob: 13.0.2
hosted-git-info: 9.0.2
json-parse-even-better-errors: 5.0.0
proc-log: 6.1.0
semver: 7.7.3
validate-npm-package-license: 3.0.4
'@npmcli/promise-spawn@9.0.1':
dependencies:
which: 6.0.1
'@npmcli/redact@4.0.0': {}
'@npmcli/run-script@10.0.3':
dependencies:
'@npmcli/node-gyp': 5.0.0
'@npmcli/package-json': 7.0.4
'@npmcli/promise-spawn': 9.0.1
node-gyp: 12.2.0
proc-log: 6.1.0
which: 6.0.1
transitivePeerDependencies:
- supports-color
'@oxc-project/types@0.106.0': {}
'@parcel/watcher-android-arm64@2.5.6':
optional: true
'@parcel/watcher-darwin-arm64@2.5.6':
optional: true
'@parcel/watcher-darwin-x64@2.5.6':
optional: true
'@parcel/watcher-freebsd-x64@2.5.6':
optional: true
'@parcel/watcher-linux-arm-glibc@2.5.6':
optional: true
'@parcel/watcher-linux-arm-musl@2.5.6':
optional: true
'@parcel/watcher-linux-arm64-glibc@2.5.6':
optional: true
'@parcel/watcher-linux-arm64-musl@2.5.6':
optional: true
'@parcel/watcher-linux-x64-glibc@2.5.6':
optional: true
'@parcel/watcher-linux-x64-musl@2.5.6':
optional: true
'@parcel/watcher-win32-arm64@2.5.6':
optional: true
'@parcel/watcher-win32-ia32@2.5.6':
optional: true
'@parcel/watcher-win32-x64@2.5.6':
optional: true
'@parcel/watcher@2.5.6':
dependencies:
detect-libc: 2.1.2
is-glob: 4.0.3
node-addon-api: 7.1.1
picomatch: 4.0.3
optionalDependencies:
'@parcel/watcher-android-arm64': 2.5.6
'@parcel/watcher-darwin-arm64': 2.5.6
'@parcel/watcher-darwin-x64': 2.5.6
'@parcel/watcher-freebsd-x64': 2.5.6
'@parcel/watcher-linux-arm-glibc': 2.5.6
'@parcel/watcher-linux-arm-musl': 2.5.6
'@parcel/watcher-linux-arm64-glibc': 2.5.6
'@parcel/watcher-linux-arm64-musl': 2.5.6
'@parcel/watcher-linux-x64-glibc': 2.5.6
'@parcel/watcher-linux-x64-musl': 2.5.6
'@parcel/watcher-win32-arm64': 2.5.6
'@parcel/watcher-win32-ia32': 2.5.6
'@parcel/watcher-win32-x64': 2.5.6
optional: true
'@rolldown/binding-android-arm64@1.0.0-beta.58':
optional: true
'@rolldown/binding-darwin-arm64@1.0.0-beta.58':
optional: true
'@rolldown/binding-darwin-x64@1.0.0-beta.58':
optional: true
'@rolldown/binding-freebsd-x64@1.0.0-beta.58':
optional: true
'@rolldown/binding-linux-arm-gnueabihf@1.0.0-beta.58':
optional: true
'@rolldown/binding-linux-arm64-gnu@1.0.0-beta.58':
optional: true
'@rolldown/binding-linux-arm64-musl@1.0.0-beta.58':
optional: true
'@rolldown/binding-linux-x64-gnu@1.0.0-beta.58':
optional: true
'@rolldown/binding-linux-x64-musl@1.0.0-beta.58':
optional: true
'@rolldown/binding-openharmony-arm64@1.0.0-beta.58':
optional: true
'@rolldown/binding-wasm32-wasi@1.0.0-beta.58':
dependencies:
'@napi-rs/wasm-runtime': 1.1.1
optional: true
'@rolldown/binding-win32-arm64-msvc@1.0.0-beta.58':
optional: true
'@rolldown/binding-win32-x64-msvc@1.0.0-beta.58':
optional: true
'@rolldown/pluginutils@1.0.0-beta.58': {}
'@rollup/rollup-android-arm-eabi@4.57.1':
optional: true
'@rollup/rollup-android-arm64@4.57.1':
optional: true
'@rollup/rollup-darwin-arm64@4.57.1':
optional: true
'@rollup/rollup-darwin-x64@4.57.1':
optional: true
'@rollup/rollup-freebsd-arm64@4.57.1':
optional: true
'@rollup/rollup-freebsd-x64@4.57.1':
optional: true
'@rollup/rollup-linux-arm-gnueabihf@4.57.1':
optional: true
'@rollup/rollup-linux-arm-musleabihf@4.57.1':
optional: true
'@rollup/rollup-linux-arm64-gnu@4.57.1':
optional: true
'@rollup/rollup-linux-arm64-musl@4.57.1':
optional: true
'@rollup/rollup-linux-loong64-gnu@4.57.1':
optional: true
'@rollup/rollup-linux-loong64-musl@4.57.1':
optional: true
'@rollup/rollup-linux-ppc64-gnu@4.57.1':
optional: true
'@rollup/rollup-linux-ppc64-musl@4.57.1':
optional: true
'@rollup/rollup-linux-riscv64-gnu@4.57.1':
optional: true
'@rollup/rollup-linux-riscv64-musl@4.57.1':
optional: true
'@rollup/rollup-linux-s390x-gnu@4.57.1':
optional: true
'@rollup/rollup-linux-x64-gnu@4.57.1':
optional: true
'@rollup/rollup-linux-x64-musl@4.57.1':
optional: true
'@rollup/rollup-openbsd-x64@4.57.1':
optional: true
'@rollup/rollup-openharmony-arm64@4.57.1':
optional: true
'@rollup/rollup-win32-arm64-msvc@4.57.1':
optional: true
'@rollup/rollup-win32-ia32-msvc@4.57.1':
optional: true
'@rollup/rollup-win32-x64-gnu@4.57.1':
optional: true
'@rollup/rollup-win32-x64-msvc@4.57.1':
optional: true
'@schematics/angular@21.1.0-rc.0':
dependencies:
'@angular-devkit/core': 21.1.0-rc.0
'@angular-devkit/schematics': 21.1.0-rc.0
jsonc-parser: 3.3.1
transitivePeerDependencies:
- chokidar
'@sigstore/bundle@4.0.0':
dependencies:
'@sigstore/protobuf-specs': 0.5.0
'@sigstore/core@3.1.0': {}
'@sigstore/protobuf-specs@0.5.0': {}
'@sigstore/sign@4.1.0':
dependencies:
'@sigstore/bundle': 4.0.0
'@sigstore/core': 3.1.0
'@sigstore/protobuf-specs': 0.5.0
make-fetch-happen: 15.0.3
proc-log: 6.1.0
promise-retry: 2.0.1
transitivePeerDependencies:
- supports-color
'@sigstore/tuf@4.0.1':
dependencies:
'@sigstore/protobuf-specs': 0.5.0
tuf-js: 4.1.0
transitivePeerDependencies:
- supports-color
'@sigstore/verify@3.1.0':
dependencies:
'@sigstore/bundle': 4.0.0
'@sigstore/core': 3.1.0
'@sigstore/protobuf-specs': 0.5.0
'@tsconfig/node10@1.0.12': {}
'@tsconfig/node12@1.0.11': {}
'@tsconfig/node14@1.0.3': {}
'@tsconfig/node16@1.0.4': {}
'@tufjs/canonical-json@2.0.0': {}
'@tufjs/models@4.1.0':
dependencies:
'@tufjs/canonical-json': 2.0.0
minimatch: 10.1.2
'@tybys/wasm-util@0.10.1':
dependencies:
tslib: 2.8.1
optional: true
'@types/body-parser@1.19.6':
dependencies:
'@types/connect': 3.4.38
'@types/node': 20.19.33
'@types/bonjour@3.5.13':
dependencies:
'@types/node': 20.19.33
'@types/connect-history-api-fallback@1.5.4':
dependencies:
'@types/express-serve-static-core': 4.19.8
'@types/node': 20.19.33
'@types/connect@3.4.38':
dependencies:
'@types/node': 20.19.33
'@types/eslint-scope@3.7.7':
dependencies:
'@types/eslint': 9.6.1
'@types/estree': 1.0.8
'@types/eslint@9.6.1':
dependencies:
'@types/estree': 1.0.8
'@types/json-schema': 7.0.15
'@types/estree@1.0.8': {}
'@types/express-serve-static-core@4.19.8':
dependencies:
'@types/node': 20.19.33
'@types/qs': 6.14.0
'@types/range-parser': 1.2.7
'@types/send': 1.2.1
'@types/express@4.17.25':
dependencies:
'@types/body-parser': 1.19.6
'@types/express-serve-static-core': 4.19.8
'@types/qs': 6.14.0
'@types/serve-static': 1.15.10
'@types/http-errors@2.0.5': {}
'@types/http-proxy@1.17.17':
dependencies:
'@types/node': 20.19.33
'@types/json-schema@7.0.15': {}
'@types/mime@1.3.5': {}
'@types/node-forge@1.3.14':
dependencies:
'@types/node': 20.19.33
'@types/node@20.19.33':
dependencies:
undici-types: 6.21.0
'@types/qs@6.14.0': {}
'@types/range-parser@1.2.7': {}
'@types/retry@0.12.2': {}
'@types/send@0.17.6':
dependencies:
'@types/mime': 1.3.5
'@types/node': 20.19.33
'@types/send@1.2.1':
dependencies:
'@types/node': 20.19.33
'@types/serve-index@1.9.4':
dependencies:
'@types/express': 4.17.25
'@types/serve-static@1.15.10':
dependencies:
'@types/http-errors': 2.0.5
'@types/node': 20.19.33
'@types/send': 0.17.6
'@types/sockjs@0.3.36':
dependencies:
'@types/node': 20.19.33
'@types/ws@8.18.1':
dependencies:
'@types/node': 20.19.33
'@vitejs/plugin-basic-ssl@2.1.0(vite@7.3.0(@types/node@20.19.33)(jiti@2.6.1)(less@4.4.2)(sass@1.97.1)(terser@5.44.1))':
dependencies:
vite: 7.3.0(@types/node@20.19.33)(jiti@2.6.1)(less@4.4.2)(sass@1.97.1)(terser@5.44.1)
'@webassemblyjs/ast@1.14.1':
dependencies:
'@webassemblyjs/helper-numbers': 1.13.2
'@webassemblyjs/helper-wasm-bytecode': 1.13.2
'@webassemblyjs/floating-point-hex-parser@1.13.2': {}
'@webassemblyjs/helper-api-error@1.13.2': {}
'@webassemblyjs/helper-buffer@1.14.1': {}
'@webassemblyjs/helper-numbers@1.13.2':
dependencies:
'@webassemblyjs/floating-point-hex-parser': 1.13.2
'@webassemblyjs/helper-api-error': 1.13.2
'@xtuc/long': 4.2.2
'@webassemblyjs/helper-wasm-bytecode@1.13.2': {}
'@webassemblyjs/helper-wasm-section@1.14.1':
dependencies:
'@webassemblyjs/ast': 1.14.1
'@webassemblyjs/helper-buffer': 1.14.1
'@webassemblyjs/helper-wasm-bytecode': 1.13.2
'@webassemblyjs/wasm-gen': 1.14.1
'@webassemblyjs/ieee754@1.13.2':
dependencies:
'@xtuc/ieee754': 1.2.0
'@webassemblyjs/leb128@1.13.2':
dependencies:
'@xtuc/long': 4.2.2
'@webassemblyjs/utf8@1.13.2': {}
'@webassemblyjs/wasm-edit@1.14.1':
dependencies:
'@webassemblyjs/ast': 1.14.1
'@webassemblyjs/helper-buffer': 1.14.1
'@webassemblyjs/helper-wasm-bytecode': 1.13.2
'@webassemblyjs/helper-wasm-section': 1.14.1
'@webassemblyjs/wasm-gen': 1.14.1
'@webassemblyjs/wasm-opt': 1.14.1
'@webassemblyjs/wasm-parser': 1.14.1
'@webassemblyjs/wast-printer': 1.14.1
'@webassemblyjs/wasm-gen@1.14.1':
dependencies:
'@webassemblyjs/ast': 1.14.1
'@webassemblyjs/helper-wasm-bytecode': 1.13.2
'@webassemblyjs/ieee754': 1.13.2
'@webassemblyjs/leb128': 1.13.2
'@webassemblyjs/utf8': 1.13.2
'@webassemblyjs/wasm-opt@1.14.1':
dependencies:
'@webassemblyjs/ast': 1.14.1
'@webassemblyjs/helper-buffer': 1.14.1
'@webassemblyjs/wasm-gen': 1.14.1
'@webassemblyjs/wasm-parser': 1.14.1
'@webassemblyjs/wasm-parser@1.14.1':
dependencies:
'@webassemblyjs/ast': 1.14.1
'@webassemblyjs/helper-api-error': 1.13.2
'@webassemblyjs/helper-wasm-bytecode': 1.13.2
'@webassemblyjs/ieee754': 1.13.2
'@webassemblyjs/leb128': 1.13.2
'@webassemblyjs/utf8': 1.13.2
'@webassemblyjs/wast-printer@1.14.1':
dependencies:
'@webassemblyjs/ast': 1.14.1
'@xtuc/long': 4.2.2
'@xtuc/ieee754@1.2.0': {}
'@xtuc/long@4.2.2': {}
'@yarnpkg/lockfile@1.1.0': {}
abbrev@4.0.0: {}
accepts@1.3.8:
dependencies:
mime-types: 2.1.35
negotiator: 0.6.3
accepts@2.0.0:
dependencies:
mime-types: 3.0.2
negotiator: 1.0.0
acorn-import-phases@1.0.4(acorn@8.15.0):
dependencies:
acorn: 8.15.0
acorn-walk@8.3.4:
dependencies:
acorn: 8.15.0
acorn@8.15.0: {}
adjust-sourcemap-loader@4.0.0:
dependencies:
loader-utils: 2.0.4
regex-parser: 2.3.1
agent-base@7.1.4: {}
ajv-formats@2.1.1(ajv@8.17.1):
optionalDependencies:
ajv: 8.17.1
ajv-formats@3.0.1(ajv@8.17.1):
optionalDependencies:
ajv: 8.17.1
ajv-keywords@5.1.0(ajv@8.17.1):
dependencies:
ajv: 8.17.1
fast-deep-equal: 3.1.3
ajv@8.17.1:
dependencies:
fast-deep-equal: 3.1.3
fast-uri: 3.1.0
json-schema-traverse: 1.0.0
require-from-string: 2.0.2
algoliasearch@5.46.2:
dependencies:
'@algolia/abtesting': 1.12.2
'@algolia/client-abtesting': 5.46.2
'@algolia/client-analytics': 5.46.2
'@algolia/client-common': 5.46.2
'@algolia/client-insights': 5.46.2
'@algolia/client-personalization': 5.46.2
'@algolia/client-query-suggestions': 5.46.2
'@algolia/client-search': 5.46.2
'@algolia/ingestion': 1.46.2
'@algolia/monitoring': 1.46.2
'@algolia/recommend': 5.46.2
'@algolia/requester-browser-xhr': 5.46.2
'@algolia/requester-fetch': 5.46.2
'@algolia/requester-node-http': 5.46.2
ansi-colors@4.1.3: {}
ansi-escapes@7.3.0:
dependencies:
environment: 1.1.0
ansi-html-community@0.0.8: {}
ansi-regex@5.0.1: {}
ansi-regex@6.2.2: {}
ansi-styles@4.3.0:
dependencies:
color-convert: 2.0.1
ansi-styles@6.2.3: {}
anymatch@3.1.3:
dependencies:
normalize-path: 3.0.0
picomatch: 2.3.1
arg@4.1.3: {}
argparse@2.0.1: {}
array-flatten@1.1.1: {}
autoprefixer@10.4.23(postcss@8.5.6):
dependencies:
browserslist: 4.28.1
caniuse-lite: 1.0.30001769
fraction.js: 5.3.4
picocolors: 1.1.1
postcss: 8.5.6
postcss-value-parser: 4.2.0
babel-loader@10.0.0(@babel/core@7.28.5)(webpack@5.104.1(esbuild@0.27.2)):
dependencies:
'@babel/core': 7.28.5
find-up: 5.0.0
webpack: 5.104.1(esbuild@0.27.2)
babel-plugin-polyfill-corejs2@0.4.15(@babel/core@7.28.5):
dependencies:
'@babel/compat-data': 7.29.0
'@babel/core': 7.28.5
'@babel/helper-define-polyfill-provider': 0.6.6(@babel/core@7.28.5)
semver: 6.3.1
transitivePeerDependencies:
- supports-color
babel-plugin-polyfill-corejs3@0.13.0(@babel/core@7.28.5):
dependencies:
'@babel/core': 7.28.5
'@babel/helper-define-polyfill-provider': 0.6.6(@babel/core@7.28.5)
core-js-compat: 3.48.0
transitivePeerDependencies:
- supports-color
babel-plugin-polyfill-regenerator@0.6.6(@babel/core@7.28.5):
dependencies:
'@babel/core': 7.28.5
'@babel/helper-define-polyfill-provider': 0.6.6(@babel/core@7.28.5)
transitivePeerDependencies:
- supports-color
baseline-browser-mapping@2.9.19: {}
batch@0.6.1: {}
beasties@0.3.5:
dependencies:
css-select: 6.0.0
css-what: 7.0.0
dom-serializer: 2.0.0
domhandler: 5.0.3
htmlparser2: 10.1.0
picocolors: 1.1.1
postcss: 8.5.6
postcss-media-query-parser: 0.2.3
big.js@5.2.2: {}
binary-extensions@2.3.0: {}
body-parser@1.20.4:
dependencies:
bytes: 3.1.2
content-type: 1.0.5
debug: 2.6.9
depd: 2.0.0
destroy: 1.2.0
http-errors: 2.0.1
iconv-lite: 0.4.24
on-finished: 2.4.1
qs: 6.14.1
raw-body: 2.5.3
type-is: 1.6.18
unpipe: 1.0.0
transitivePeerDependencies:
- supports-color
body-parser@2.2.2:
dependencies:
bytes: 3.1.2
content-type: 1.0.5
debug: 4.4.3
http-errors: 2.0.1
iconv-lite: 0.7.2
on-finished: 2.4.1
qs: 6.14.1
raw-body: 3.0.2
type-is: 2.0.1
transitivePeerDependencies:
- supports-color
bonjour-service@1.3.0:
dependencies:
fast-deep-equal: 3.1.3
multicast-dns: 7.2.5
boolbase@1.0.0: {}
braces@3.0.3:
dependencies:
fill-range: 7.1.1
browserslist@4.28.1:
dependencies:
baseline-browser-mapping: 2.9.19
caniuse-lite: 1.0.30001769
electron-to-chromium: 1.5.286
node-releases: 2.0.27
update-browserslist-db: 1.2.3(browserslist@4.28.1)
buffer-from@1.1.2: {}
bundle-name@4.1.0:
dependencies:
run-applescript: 7.1.0
bytes@3.1.2: {}
cacache@20.0.3:
dependencies:
'@npmcli/fs': 5.0.0
fs-minipass: 3.0.3
glob: 13.0.2
lru-cache: 11.2.6
minipass: 7.1.2
minipass-collect: 2.0.1
minipass-flush: 1.0.5
minipass-pipeline: 1.2.4
p-map: 7.0.4
ssri: 13.0.1
unique-filename: 5.0.0
call-bind-apply-helpers@1.0.2:
dependencies:
es-errors: 1.3.0
function-bind: 1.1.2
call-bound@1.0.4:
dependencies:
call-bind-apply-helpers: 1.0.2
get-intrinsic: 1.3.0
callsites@3.1.0: {}
caniuse-lite@1.0.30001769: {}
chalk@5.6.2: {}
chardet@2.1.1: {}
chokidar@3.6.0:
dependencies:
anymatch: 3.1.3
braces: 3.0.3
glob-parent: 5.1.2
is-binary-path: 2.1.0
is-glob: 4.0.3
normalize-path: 3.0.0
readdirp: 3.6.0
optionalDependencies:
fsevents: 2.3.3
chokidar@4.0.3:
dependencies:
readdirp: 4.1.2
chownr@3.0.0: {}
chrome-trace-event@1.0.4: {}
cli-cursor@5.0.0:
dependencies:
restore-cursor: 5.1.0
cli-spinners@3.4.0: {}
cli-truncate@5.1.1:
dependencies:
slice-ansi: 7.1.2
string-width: 8.1.1
cli-width@4.1.0: {}
cliui@9.0.1:
dependencies:
string-width: 7.2.0
strip-ansi: 7.1.2
wrap-ansi: 9.0.2
clone-deep@4.0.1:
dependencies:
is-plain-object: 2.0.4
kind-of: 6.0.3
shallow-clone: 3.0.1
color-convert@2.0.1:
dependencies:
color-name: 1.1.4
color-name@1.1.4: {}
colorette@2.0.20: {}
commander@2.20.3: {}
compressible@2.0.18:
dependencies:
mime-db: 1.54.0
compression@1.8.1:
dependencies:
bytes: 3.1.2
compressible: 2.0.18
debug: 2.6.9
negotiator: 0.6.4
on-headers: 1.1.0
safe-buffer: 5.2.1
vary: 1.1.2
transitivePeerDependencies:
- supports-color
connect-history-api-fallback@2.0.0: {}
content-disposition@0.5.4:
dependencies:
safe-buffer: 5.2.1
content-disposition@1.0.1: {}
content-type@1.0.5: {}
convert-source-map@1.9.0: {}
convert-source-map@2.0.0: {}
cookie-signature@1.0.7: {}
cookie-signature@1.2.2: {}
cookie@0.7.2: {}
copy-anything@2.0.6:
dependencies:
is-what: 3.14.1
copy-webpack-plugin@13.0.1(webpack@5.104.1(esbuild@0.27.2)):
dependencies:
glob-parent: 6.0.2
normalize-path: 3.0.0
schema-utils: 4.3.3
serialize-javascript: 6.0.2
tinyglobby: 0.2.15
webpack: 5.104.1(esbuild@0.27.2)
core-js-compat@3.48.0:
dependencies:
browserslist: 4.28.1
core-util-is@1.0.3: {}
cors@2.8.6:
dependencies:
object-assign: 4.1.1
vary: 1.1.2
cosmiconfig@9.0.0(typescript@5.9.3):
dependencies:
env-paths: 2.2.1
import-fresh: 3.3.1
js-yaml: 4.1.1
parse-json: 5.2.0
optionalDependencies:
typescript: 5.9.3
create-require@1.1.1: {}
cross-spawn@7.0.6:
dependencies:
path-key: 3.1.1
shebang-command: 2.0.0
which: 2.0.2
css-loader@7.1.2(webpack@5.104.1(esbuild@0.27.2)):
dependencies:
icss-utils: 5.1.0(postcss@8.5.6)
postcss: 8.5.6
postcss-modules-extract-imports: 3.1.0(postcss@8.5.6)
postcss-modules-local-by-default: 4.2.0(postcss@8.5.6)
postcss-modules-scope: 3.2.1(postcss@8.5.6)
postcss-modules-values: 4.0.0(postcss@8.5.6)
postcss-value-parser: 4.2.0
semver: 7.7.3
optionalDependencies:
webpack: 5.104.1(esbuild@0.27.2)
css-select@6.0.0:
dependencies:
boolbase: 1.0.0
css-what: 7.0.0
domhandler: 5.0.3
domutils: 3.2.2
nth-check: 2.1.1
css-what@7.0.0: {}
cssesc@3.0.0: {}
debug@2.6.9:
dependencies:
ms: 2.0.0
debug@4.4.3:
dependencies:
ms: 2.1.3
default-browser-id@5.0.1: {}
default-browser@5.5.0:
dependencies:
bundle-name: 4.1.0
default-browser-id: 5.0.1
define-lazy-prop@3.0.0: {}
depd@1.1.2: {}
depd@2.0.0: {}
destroy@1.2.0: {}
detect-libc@2.1.2:
optional: true
detect-node@2.1.0: {}
diff@4.0.4: {}
dns-packet@5.6.1:
dependencies:
'@leichtgewicht/ip-codec': 2.0.5
dom-serializer@2.0.0:
dependencies:
domelementtype: 2.3.0
domhandler: 5.0.3
entities: 4.5.0
domelementtype@2.3.0: {}
domhandler@5.0.3:
dependencies:
domelementtype: 2.3.0
domutils@3.2.2:
dependencies:
dom-serializer: 2.0.0
domelementtype: 2.3.0
domhandler: 5.0.3
dunder-proto@1.0.1:
dependencies:
call-bind-apply-helpers: 1.0.2
es-errors: 1.3.0
gopd: 1.2.0
ee-first@1.1.1: {}
electron-to-chromium@1.5.286: {}
emoji-regex@10.6.0: {}
emoji-regex@8.0.0: {}
emojis-list@3.0.0: {}
encodeurl@2.0.0: {}
encoding@0.1.13:
dependencies:
iconv-lite: 0.6.3
optional: true
enhanced-resolve@5.19.0:
dependencies:
graceful-fs: 4.2.11
tapable: 2.3.0
entities@4.5.0: {}
entities@6.0.1: {}
entities@7.0.1: {}
env-paths@2.2.1: {}
environment@1.1.0: {}
err-code@2.0.3: {}
errno@0.1.8:
dependencies:
prr: 1.0.1
optional: true
error-ex@1.3.4:
dependencies:
is-arrayish: 0.2.1
es-define-property@1.0.1: {}
es-errors@1.3.0: {}
es-module-lexer@2.0.0: {}
es-object-atoms@1.1.1:
dependencies:
es-errors: 1.3.0
esbuild-wasm@0.27.2: {}
esbuild@0.27.2:
optionalDependencies:
'@esbuild/aix-ppc64': 0.27.2
'@esbuild/android-arm': 0.27.2
'@esbuild/android-arm64': 0.27.2
'@esbuild/android-x64': 0.27.2
'@esbuild/darwin-arm64': 0.27.2
'@esbuild/darwin-x64': 0.27.2
'@esbuild/freebsd-arm64': 0.27.2
'@esbuild/freebsd-x64': 0.27.2
'@esbuild/linux-arm': 0.27.2
'@esbuild/linux-arm64': 0.27.2
'@esbuild/linux-ia32': 0.27.2
'@esbuild/linux-loong64': 0.27.2
'@esbuild/linux-mips64el': 0.27.2
'@esbuild/linux-ppc64': 0.27.2
'@esbuild/linux-riscv64': 0.27.2
'@esbuild/linux-s390x': 0.27.2
'@esbuild/linux-x64': 0.27.2
'@esbuild/netbsd-arm64': 0.27.2
'@esbuild/netbsd-x64': 0.27.2
'@esbuild/openbsd-arm64': 0.27.2
'@esbuild/openbsd-x64': 0.27.2
'@esbuild/openharmony-arm64': 0.27.2
'@esbuild/sunos-x64': 0.27.2
'@esbuild/win32-arm64': 0.27.2
'@esbuild/win32-ia32': 0.27.2
'@esbuild/win32-x64': 0.27.2
escalade@3.2.0: {}
escape-html@1.0.3: {}
eslint-scope@5.1.1:
dependencies:
esrecurse: 4.3.0
estraverse: 4.3.0
esrecurse@4.3.0:
dependencies:
estraverse: 5.3.0
estraverse@4.3.0: {}
estraverse@5.3.0: {}
esutils@2.0.3: {}
etag@1.8.1: {}
eventemitter3@4.0.7: {}
eventemitter3@5.0.4: {}
events@3.3.0: {}
eventsource-parser@3.0.6: {}
eventsource@3.0.7:
dependencies:
eventsource-parser: 3.0.6
exponential-backoff@3.1.3: {}
express-rate-limit@7.5.1(express@5.2.1):
dependencies:
express: 5.2.1
express@4.22.1:
dependencies:
accepts: 1.3.8
array-flatten: 1.1.1
body-parser: 1.20.4
content-disposition: 0.5.4
content-type: 1.0.5
cookie: 0.7.2
cookie-signature: 1.0.7
debug: 2.6.9
depd: 2.0.0
encodeurl: 2.0.0
escape-html: 1.0.3
etag: 1.8.1
finalhandler: 1.3.2
fresh: 0.5.2
http-errors: 2.0.1
merge-descriptors: 1.0.3
methods: 1.1.2
on-finished: 2.4.1
parseurl: 1.3.3
path-to-regexp: 0.1.12
proxy-addr: 2.0.7
qs: 6.14.1
range-parser: 1.2.1
safe-buffer: 5.2.1
send: 0.19.2
serve-static: 1.16.3
setprototypeof: 1.2.0
statuses: 2.0.2
type-is: 1.6.18
utils-merge: 1.0.1
vary: 1.1.2
transitivePeerDependencies:
- supports-color
express@5.2.1:
dependencies:
accepts: 2.0.0
body-parser: 2.2.2
content-disposition: 1.0.1
content-type: 1.0.5
cookie: 0.7.2
cookie-signature: 1.2.2
debug: 4.4.3
depd: 2.0.0
encodeurl: 2.0.0
escape-html: 1.0.3
etag: 1.8.1
finalhandler: 2.1.1
fresh: 2.0.0
http-errors: 2.0.1
merge-descriptors: 2.0.0
mime-types: 3.0.2
on-finished: 2.4.1
once: 1.4.0
parseurl: 1.3.3
proxy-addr: 2.0.7
qs: 6.14.1
range-parser: 1.2.1
router: 2.2.0
send: 1.2.1
serve-static: 2.2.1
statuses: 2.0.2
type-is: 2.0.1
vary: 1.1.2
transitivePeerDependencies:
- supports-color
fast-deep-equal@3.1.3: {}
fast-uri@3.1.0: {}
faye-websocket@0.11.4:
dependencies:
websocket-driver: 0.7.4
fdir@6.5.0(picomatch@4.0.3):
optionalDependencies:
picomatch: 4.0.3
fill-range@7.1.1:
dependencies:
to-regex-range: 5.0.1
finalhandler@1.3.2:
dependencies:
debug: 2.6.9
encodeurl: 2.0.0
escape-html: 1.0.3
on-finished: 2.4.1
parseurl: 1.3.3
statuses: 2.0.2
unpipe: 1.0.0
transitivePeerDependencies:
- supports-color
finalhandler@2.1.1:
dependencies:
debug: 4.4.3
encodeurl: 2.0.0
escape-html: 1.0.3
on-finished: 2.4.1
parseurl: 1.3.3
statuses: 2.0.2
transitivePeerDependencies:
- supports-color
find-up@5.0.0:
dependencies:
locate-path: 6.0.0
path-exists: 4.0.0
flat@5.0.2: {}
follow-redirects@1.15.11(debug@4.4.3):
optionalDependencies:
debug: 4.4.3
forwarded@0.2.0: {}
fraction.js@5.3.4: {}
fresh@0.5.2: {}
fresh@2.0.0: {}
fs-minipass@3.0.3:
dependencies:
minipass: 7.1.2
fsevents@2.3.3:
optional: true
function-bind@1.1.2: {}
gensync@1.0.0-beta.2: {}
get-caller-file@2.0.5: {}
get-east-asian-width@1.4.0: {}
get-intrinsic@1.3.0:
dependencies:
call-bind-apply-helpers: 1.0.2
es-define-property: 1.0.1
es-errors: 1.3.0
es-object-atoms: 1.1.1
function-bind: 1.1.2
get-proto: 1.0.1
gopd: 1.2.0
has-symbols: 1.1.0
hasown: 2.0.2
math-intrinsics: 1.1.0
get-proto@1.0.1:
dependencies:
dunder-proto: 1.0.1
es-object-atoms: 1.1.1
glob-parent@5.1.2:
dependencies:
is-glob: 4.0.3
glob-parent@6.0.2:
dependencies:
is-glob: 4.0.3
glob-to-regex.js@1.2.0(tslib@2.8.1):
dependencies:
tslib: 2.8.1
glob-to-regexp@0.4.1: {}
glob@13.0.2:
dependencies:
minimatch: 10.1.2
minipass: 7.1.2
path-scurry: 2.0.1
gopd@1.2.0: {}
graceful-fs@4.2.11: {}
handle-thing@2.0.1: {}
has-flag@4.0.0: {}
has-symbols@1.1.0: {}
hasown@2.0.2:
dependencies:
function-bind: 1.1.2
hono@4.11.9: {}
hosted-git-info@9.0.2:
dependencies:
lru-cache: 11.2.6
hpack.js@2.1.6:
dependencies:
inherits: 2.0.4
obuf: 1.1.2
readable-stream: 2.3.8
wbuf: 1.7.3
htmlparser2@10.1.0:
dependencies:
domelementtype: 2.3.0
domhandler: 5.0.3
domutils: 3.2.2
entities: 7.0.1
http-cache-semantics@4.2.0: {}
http-deceiver@1.2.7: {}
http-errors@1.8.1:
dependencies:
depd: 1.1.2
inherits: 2.0.4
setprototypeof: 1.2.0
statuses: 1.5.0
toidentifier: 1.0.1
http-errors@2.0.1:
dependencies:
depd: 2.0.0
inherits: 2.0.4
setprototypeof: 1.2.0
statuses: 2.0.2
toidentifier: 1.0.1
http-parser-js@0.5.10: {}
http-proxy-agent@7.0.2:
dependencies:
agent-base: 7.1.4
debug: 4.4.3
transitivePeerDependencies:
- supports-color
http-proxy-middleware@2.0.9(@types/express@4.17.25):
dependencies:
'@types/http-proxy': 1.17.17
http-proxy: 1.18.1(debug@4.4.3)
is-glob: 4.0.3
is-plain-obj: 3.0.0
micromatch: 4.0.8
optionalDependencies:
'@types/express': 4.17.25
transitivePeerDependencies:
- debug
http-proxy-middleware@3.0.5:
dependencies:
'@types/http-proxy': 1.17.17
debug: 4.4.3
http-proxy: 1.18.1(debug@4.4.3)
is-glob: 4.0.3
is-plain-object: 5.0.0
micromatch: 4.0.8
transitivePeerDependencies:
- supports-color
http-proxy@1.18.1(debug@4.4.3):
dependencies:
eventemitter3: 4.0.7
follow-redirects: 1.15.11(debug@4.4.3)
requires-port: 1.0.0
transitivePeerDependencies:
- debug
https-proxy-agent@7.0.6:
dependencies:
agent-base: 7.1.4
debug: 4.4.3
transitivePeerDependencies:
- supports-color
hyperdyperid@1.2.0: {}
iconv-lite@0.4.24:
dependencies:
safer-buffer: 2.1.2
iconv-lite@0.6.3:
dependencies:
safer-buffer: 2.1.2
iconv-lite@0.7.2:
dependencies:
safer-buffer: 2.1.2
icss-utils@5.1.0(postcss@8.5.6):
dependencies:
postcss: 8.5.6
ignore-walk@8.0.0:
dependencies:
minimatch: 10.1.2
image-size@0.5.5:
optional: true
immutable@5.1.4: {}
import-fresh@3.3.1:
dependencies:
parent-module: 1.0.1
resolve-from: 4.0.0
imurmurhash@0.1.4: {}
inherits@2.0.4: {}
ini@6.0.0: {}
ip-address@10.1.0: {}
ipaddr.js@1.9.1: {}
ipaddr.js@2.3.0: {}
is-arrayish@0.2.1: {}
is-binary-path@2.1.0:
dependencies:
binary-extensions: 2.3.0
is-core-module@2.16.1:
dependencies:
hasown: 2.0.2
is-docker@3.0.0: {}
is-extglob@2.1.1: {}
is-fullwidth-code-point@3.0.0: {}
is-fullwidth-code-point@5.1.0:
dependencies:
get-east-asian-width: 1.4.0
is-glob@4.0.3:
dependencies:
is-extglob: 2.1.1
is-in-ssh@1.0.0: {}
is-inside-container@1.0.0:
dependencies:
is-docker: 3.0.0
is-interactive@2.0.0: {}
is-network-error@1.3.0: {}
is-number@7.0.0: {}
is-plain-obj@3.0.0: {}
is-plain-object@2.0.4:
dependencies:
isobject: 3.0.1
is-plain-object@5.0.0: {}
is-promise@4.0.0: {}
is-unicode-supported@2.1.0: {}
is-what@3.14.1: {}
is-wsl@3.1.0:
dependencies:
is-inside-container: 1.0.0
isarray@1.0.0: {}
isexe@2.0.0: {}
isexe@4.0.0: {}
isobject@3.0.1: {}
istanbul-lib-coverage@3.2.2: {}
istanbul-lib-instrument@6.0.3:
dependencies:
'@babel/core': 7.28.5
'@babel/parser': 7.29.0
'@istanbuljs/schema': 0.1.3
istanbul-lib-coverage: 3.2.2
semver: 7.7.3
transitivePeerDependencies:
- supports-color
jest-worker@27.5.1:
dependencies:
'@types/node': 20.19.33
merge-stream: 2.0.0
supports-color: 8.1.1
jiti@2.6.1: {}
jose@6.1.3: {}
js-tokens@4.0.0: {}
js-yaml@4.1.1:
dependencies:
argparse: 2.0.1
jsesc@3.1.0: {}
json-parse-even-better-errors@2.3.1: {}
json-parse-even-better-errors@5.0.0: {}
json-schema-traverse@1.0.0: {}
json-schema-typed@8.0.2: {}
json5@2.2.3: {}
jsonc-parser@3.3.1: {}
jsonparse@1.3.1: {}
karma-source-map-support@1.4.0:
dependencies:
source-map-support: 0.5.21
kind-of@6.0.3: {}
launch-editor@2.12.0:
dependencies:
picocolors: 1.1.1
shell-quote: 1.8.3
less-loader@12.3.0(less@4.4.2)(webpack@5.104.1(esbuild@0.27.2)):
dependencies:
less: 4.4.2
optionalDependencies:
webpack: 5.104.1(esbuild@0.27.2)
less@4.4.2:
dependencies:
copy-anything: 2.0.6
parse-node-version: 1.0.1
tslib: 2.8.1
optionalDependencies:
errno: 0.1.8
graceful-fs: 4.2.11
image-size: 0.5.5
make-dir: 2.1.0
mime: 1.6.0
needle: 3.3.1
source-map: 0.6.1
license-webpack-plugin@4.0.2(webpack@5.104.1(esbuild@0.27.2)):
dependencies:
webpack-sources: 3.3.3
optionalDependencies:
webpack: 5.104.1(esbuild@0.27.2)
lines-and-columns@1.2.4: {}
listr2@9.0.5:
dependencies:
cli-truncate: 5.1.1
colorette: 2.0.20
eventemitter3: 5.0.4
log-update: 6.1.0
rfdc: 1.4.1
wrap-ansi: 9.0.2
lmdb@3.4.4:
dependencies:
msgpackr: 1.11.8
node-addon-api: 6.1.0
node-gyp-build-optional-packages: 5.2.2
ordered-binary: 1.6.1
weak-lru-cache: 1.2.2
optionalDependencies:
'@lmdb/lmdb-darwin-arm64': 3.4.4
'@lmdb/lmdb-darwin-x64': 3.4.4
'@lmdb/lmdb-linux-arm': 3.4.4
'@lmdb/lmdb-linux-arm64': 3.4.4
'@lmdb/lmdb-linux-x64': 3.4.4
'@lmdb/lmdb-win32-arm64': 3.4.4
'@lmdb/lmdb-win32-x64': 3.4.4
optional: true
loader-runner@4.3.1: {}
loader-utils@2.0.4:
dependencies:
big.js: 5.2.2
emojis-list: 3.0.0
json5: 2.2.3
loader-utils@3.3.1: {}
locate-path@6.0.0:
dependencies:
p-locate: 5.0.0
lodash.debounce@4.0.8: {}
log-symbols@7.0.1:
dependencies:
is-unicode-supported: 2.1.0
yoctocolors: 2.1.2
log-update@6.1.0:
dependencies:
ansi-escapes: 7.3.0
cli-cursor: 5.0.0
slice-ansi: 7.1.2
strip-ansi: 7.1.2
wrap-ansi: 9.0.2
lru-cache@11.2.6: {}
lru-cache@5.1.1:
dependencies:
yallist: 3.1.1
magic-string@0.30.21:
dependencies:
'@jridgewell/sourcemap-codec': 1.5.5
make-dir@2.1.0:
dependencies:
pify: 4.0.1
semver: 5.7.2
optional: true
make-error@1.3.6: {}
make-fetch-happen@15.0.3:
dependencies:
'@npmcli/agent': 4.0.0
cacache: 20.0.3
http-cache-semantics: 4.2.0
minipass: 7.1.2
minipass-fetch: 5.0.1
minipass-flush: 1.0.5
minipass-pipeline: 1.2.4
negotiator: 1.0.0
proc-log: 6.1.0
promise-retry: 2.0.1
ssri: 13.0.1
transitivePeerDependencies:
- supports-color
math-intrinsics@1.1.0: {}
media-typer@0.3.0: {}
media-typer@1.1.0: {}
memfs@4.56.10(tslib@2.8.1):
dependencies:
'@jsonjoy.com/fs-core': 4.56.10(tslib@2.8.1)
'@jsonjoy.com/fs-fsa': 4.56.10(tslib@2.8.1)
'@jsonjoy.com/fs-node': 4.56.10(tslib@2.8.1)
'@jsonjoy.com/fs-node-builtins': 4.56.10(tslib@2.8.1)
'@jsonjoy.com/fs-node-to-fsa': 4.56.10(tslib@2.8.1)
'@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1)
'@jsonjoy.com/fs-print': 4.56.10(tslib@2.8.1)
'@jsonjoy.com/fs-snapshot': 4.56.10(tslib@2.8.1)
'@jsonjoy.com/json-pack': 1.21.0(tslib@2.8.1)
'@jsonjoy.com/util': 1.9.0(tslib@2.8.1)
glob-to-regex.js: 1.2.0(tslib@2.8.1)
thingies: 2.5.0(tslib@2.8.1)
tree-dump: 1.1.0(tslib@2.8.1)
tslib: 2.8.1
merge-descriptors@1.0.3: {}
merge-descriptors@2.0.0: {}
merge-stream@2.0.0: {}
methods@1.1.2: {}
micromatch@4.0.8:
dependencies:
braces: 3.0.3
picomatch: 2.3.1
mime-db@1.52.0: {}
mime-db@1.54.0: {}
mime-types@2.1.35:
dependencies:
mime-db: 1.52.0
mime-types@3.0.2:
dependencies:
mime-db: 1.54.0
mime@1.6.0: {}
mimic-function@5.0.1: {}
mini-css-extract-plugin@2.9.4(webpack@5.104.1(esbuild@0.27.2)):
dependencies:
schema-utils: 4.3.3
tapable: 2.3.0
webpack: 5.104.1(esbuild@0.27.2)
minimalistic-assert@1.0.1: {}
minimatch@10.1.2:
dependencies:
'@isaacs/brace-expansion': 5.0.1
minipass-collect@2.0.1:
dependencies:
minipass: 7.1.2
minipass-fetch@5.0.1:
dependencies:
minipass: 7.1.2
minipass-sized: 2.0.0
minizlib: 3.1.0
optionalDependencies:
encoding: 0.1.13
minipass-flush@1.0.5:
dependencies:
minipass: 3.3.6
minipass-pipeline@1.2.4:
dependencies:
minipass: 3.3.6
minipass-sized@2.0.0:
dependencies:
minipass: 7.1.2
minipass@3.3.6:
dependencies:
yallist: 4.0.0
minipass@7.1.2: {}
minizlib@3.1.0:
dependencies:
minipass: 7.1.2
mrmime@2.0.1: {}
ms@2.0.0: {}
ms@2.1.3: {}
msgpackr-extract@3.0.3:
dependencies:
node-gyp-build-optional-packages: 5.2.2
optionalDependencies:
'@msgpackr-extract/msgpackr-extract-darwin-arm64': 3.0.3
'@msgpackr-extract/msgpackr-extract-darwin-x64': 3.0.3
'@msgpackr-extract/msgpackr-extract-linux-arm': 3.0.3
'@msgpackr-extract/msgpackr-extract-linux-arm64': 3.0.3
'@msgpackr-extract/msgpackr-extract-linux-x64': 3.0.3
'@msgpackr-extract/msgpackr-extract-win32-x64': 3.0.3
optional: true
msgpackr@1.11.8:
optionalDependencies:
msgpackr-extract: 3.0.3
optional: true
multicast-dns@7.2.5:
dependencies:
dns-packet: 5.6.1
thunky: 1.1.0
mute-stream@2.0.0: {}
nanoid@3.3.11: {}
needle@3.3.1:
dependencies:
iconv-lite: 0.6.3
sax: 1.4.4
optional: true
negotiator@0.6.3: {}
negotiator@0.6.4: {}
negotiator@1.0.0: {}
neo-async@2.6.2: {}
node-addon-api@6.1.0:
optional: true
node-addon-api@7.1.1:
optional: true
node-forge@1.3.3: {}
node-gyp-build-optional-packages@5.2.2:
dependencies:
detect-libc: 2.1.2
optional: true
node-gyp@12.2.0:
dependencies:
env-paths: 2.2.1
exponential-backoff: 3.1.3
graceful-fs: 4.2.11
make-fetch-happen: 15.0.3
nopt: 9.0.0
proc-log: 6.1.0
semver: 7.7.3
tar: 7.5.7
tinyglobby: 0.2.15
which: 6.0.1
transitivePeerDependencies:
- supports-color
node-releases@2.0.27: {}
nopt@9.0.0:
dependencies:
abbrev: 4.0.0
normalize-path@3.0.0: {}
npm-bundled@5.0.0:
dependencies:
npm-normalize-package-bin: 5.0.0
npm-install-checks@8.0.0:
dependencies:
semver: 7.7.3
npm-normalize-package-bin@5.0.0: {}
npm-package-arg@13.0.2:
dependencies:
hosted-git-info: 9.0.2
proc-log: 6.1.0
semver: 7.7.3
validate-npm-package-name: 7.0.2
npm-packlist@10.0.3:
dependencies:
ignore-walk: 8.0.0
proc-log: 6.1.0
npm-pick-manifest@11.0.3:
dependencies:
npm-install-checks: 8.0.0
npm-normalize-package-bin: 5.0.0
npm-package-arg: 13.0.2
semver: 7.7.3
npm-registry-fetch@19.1.1:
dependencies:
'@npmcli/redact': 4.0.0
jsonparse: 1.3.1
make-fetch-happen: 15.0.3
minipass: 7.1.2
minipass-fetch: 5.0.1
minizlib: 3.1.0
npm-package-arg: 13.0.2
proc-log: 6.1.0
transitivePeerDependencies:
- supports-color
nth-check@2.1.1:
dependencies:
boolbase: 1.0.0
object-assign@4.1.1: {}
object-inspect@1.13.4: {}
obuf@1.1.2: {}
on-finished@2.4.1:
dependencies:
ee-first: 1.1.1
on-headers@1.1.0: {}
once@1.4.0:
dependencies:
wrappy: 1.0.2
onetime@7.0.0:
dependencies:
mimic-function: 5.0.1
open@10.2.0:
dependencies:
default-browser: 5.5.0
define-lazy-prop: 3.0.0
is-inside-container: 1.0.0
wsl-utils: 0.1.0
open@11.0.0:
dependencies:
default-browser: 5.5.0
define-lazy-prop: 3.0.0
is-in-ssh: 1.0.0
is-inside-container: 1.0.0
powershell-utils: 0.1.0
wsl-utils: 0.3.1
ora@9.0.0:
dependencies:
chalk: 5.6.2
cli-cursor: 5.0.0
cli-spinners: 3.4.0
is-interactive: 2.0.0
is-unicode-supported: 2.1.0
log-symbols: 7.0.1
stdin-discarder: 0.2.2
string-width: 8.1.1
strip-ansi: 7.1.2
ordered-binary@1.6.1:
optional: true
p-limit@3.1.0:
dependencies:
yocto-queue: 0.1.0
p-locate@5.0.0:
dependencies:
p-limit: 3.1.0
p-map@7.0.4: {}
p-retry@6.2.1:
dependencies:
'@types/retry': 0.12.2
is-network-error: 1.3.0
retry: 0.13.1
pacote@21.0.4:
dependencies:
'@npmcli/git': 7.0.1
'@npmcli/installed-package-contents': 4.0.0
'@npmcli/package-json': 7.0.4
'@npmcli/promise-spawn': 9.0.1
'@npmcli/run-script': 10.0.3
cacache: 20.0.3
fs-minipass: 3.0.3
minipass: 7.1.2
npm-package-arg: 13.0.2
npm-packlist: 10.0.3
npm-pick-manifest: 11.0.3
npm-registry-fetch: 19.1.1
proc-log: 6.1.0
promise-retry: 2.0.1
sigstore: 4.1.0
ssri: 13.0.1
tar: 7.5.7
transitivePeerDependencies:
- supports-color
parent-module@1.0.1:
dependencies:
callsites: 3.1.0
parse-json@5.2.0:
dependencies:
'@babel/code-frame': 7.29.0
error-ex: 1.3.4
json-parse-even-better-errors: 2.3.1
lines-and-columns: 1.2.4
parse-node-version@1.0.1: {}
parse5-html-rewriting-stream@8.0.0:
dependencies:
entities: 6.0.1
parse5: 8.0.0
parse5-sax-parser: 8.0.0
parse5-sax-parser@8.0.0:
dependencies:
parse5: 8.0.0
parse5@8.0.0:
dependencies:
entities: 6.0.1
parseurl@1.3.3: {}
path-exists@4.0.0: {}
path-key@3.1.1: {}
path-parse@1.0.7: {}
path-scurry@2.0.1:
dependencies:
lru-cache: 11.2.6
minipass: 7.1.2
path-to-regexp@0.1.12: {}
path-to-regexp@8.3.0: {}
picocolors@1.1.1: {}
picomatch@2.3.1: {}
picomatch@4.0.3: {}
pify@4.0.1:
optional: true
piscina@5.1.4:
optionalDependencies:
'@napi-rs/nice': 1.1.1
pkce-challenge@5.0.1: {}
postcss-loader@8.2.0(postcss@8.5.6)(typescript@5.9.3)(webpack@5.104.1(esbuild@0.27.2)):
dependencies:
cosmiconfig: 9.0.0(typescript@5.9.3)
jiti: 2.6.1
postcss: 8.5.6
semver: 7.7.3
optionalDependencies:
webpack: 5.104.1(esbuild@0.27.2)
transitivePeerDependencies:
- typescript
postcss-media-query-parser@0.2.3: {}
postcss-modules-extract-imports@3.1.0(postcss@8.5.6):
dependencies:
postcss: 8.5.6
postcss-modules-local-by-default@4.2.0(postcss@8.5.6):
dependencies:
icss-utils: 5.1.0(postcss@8.5.6)
postcss: 8.5.6
postcss-selector-parser: 7.1.1
postcss-value-parser: 4.2.0
postcss-modules-scope@3.2.1(postcss@8.5.6):
dependencies:
postcss: 8.5.6
postcss-selector-parser: 7.1.1
postcss-modules-values@4.0.0(postcss@8.5.6):
dependencies:
icss-utils: 5.1.0(postcss@8.5.6)
postcss: 8.5.6
postcss-selector-parser@7.1.1:
dependencies:
cssesc: 3.0.0
util-deprecate: 1.0.2
postcss-value-parser@4.2.0: {}
postcss@8.5.6:
dependencies:
nanoid: 3.3.11
picocolors: 1.1.1
source-map-js: 1.2.1
powershell-utils@0.1.0: {}
proc-log@6.1.0: {}
process-nextick-args@2.0.1: {}
promise-retry@2.0.1:
dependencies:
err-code: 2.0.3
retry: 0.12.0
proxy-addr@2.0.7:
dependencies:
forwarded: 0.2.0
ipaddr.js: 1.9.1
prr@1.0.1:
optional: true
qs@6.14.1:
dependencies:
side-channel: 1.1.0
randombytes@2.1.0:
dependencies:
safe-buffer: 5.2.1
range-parser@1.2.1: {}
raw-body@2.5.3:
dependencies:
bytes: 3.1.2
http-errors: 2.0.1
iconv-lite: 0.4.24
unpipe: 1.0.0
raw-body@3.0.2:
dependencies:
bytes: 3.1.2
http-errors: 2.0.1
iconv-lite: 0.7.2
unpipe: 1.0.0
readable-stream@2.3.8:
dependencies:
core-util-is: 1.0.3
inherits: 2.0.4
isarray: 1.0.0
process-nextick-args: 2.0.1
safe-buffer: 5.1.2
string_decoder: 1.1.1
util-deprecate: 1.0.2
readable-stream@3.6.2:
dependencies:
inherits: 2.0.4
string_decoder: 1.3.0
util-deprecate: 1.0.2
readdirp@3.6.0:
dependencies:
picomatch: 2.3.1
readdirp@4.1.2: {}
regenerate-unicode-properties@10.2.2:
dependencies:
regenerate: 1.4.2
regenerate@1.4.2: {}
regex-parser@2.3.1: {}
regexpu-core@6.4.0:
dependencies:
regenerate: 1.4.2
regenerate-unicode-properties: 10.2.2
regjsgen: 0.8.0
regjsparser: 0.13.0
unicode-match-property-ecmascript: 2.0.0
unicode-match-property-value-ecmascript: 2.2.1
regjsgen@0.8.0: {}
regjsparser@0.13.0:
dependencies:
jsesc: 3.1.0
require-from-string@2.0.2: {}
requires-port@1.0.0: {}
resolve-from@4.0.0: {}
resolve-url-loader@5.0.0:
dependencies:
adjust-sourcemap-loader: 4.0.0
convert-source-map: 1.9.0
loader-utils: 2.0.4
postcss: 8.5.6
source-map: 0.6.1
resolve@1.22.11:
dependencies:
is-core-module: 2.16.1
path-parse: 1.0.7
supports-preserve-symlinks-flag: 1.0.0
restore-cursor@5.1.0:
dependencies:
onetime: 7.0.0
signal-exit: 4.1.0
retry@0.12.0: {}
retry@0.13.1: {}
rfdc@1.4.1: {}
rolldown@1.0.0-beta.58:
dependencies:
'@oxc-project/types': 0.106.0
'@rolldown/pluginutils': 1.0.0-beta.58
optionalDependencies:
'@rolldown/binding-android-arm64': 1.0.0-beta.58
'@rolldown/binding-darwin-arm64': 1.0.0-beta.58
'@rolldown/binding-darwin-x64': 1.0.0-beta.58
'@rolldown/binding-freebsd-x64': 1.0.0-beta.58
'@rolldown/binding-linux-arm-gnueabihf': 1.0.0-beta.58
'@rolldown/binding-linux-arm64-gnu': 1.0.0-beta.58
'@rolldown/binding-linux-arm64-musl': 1.0.0-beta.58
'@rolldown/binding-linux-x64-gnu': 1.0.0-beta.58
'@rolldown/binding-linux-x64-musl': 1.0.0-beta.58
'@rolldown/binding-openharmony-arm64': 1.0.0-beta.58
'@rolldown/binding-wasm32-wasi': 1.0.0-beta.58
'@rolldown/binding-win32-arm64-msvc': 1.0.0-beta.58
'@rolldown/binding-win32-x64-msvc': 1.0.0-beta.58
rollup@4.57.1:
dependencies:
'@types/estree': 1.0.8
optionalDependencies:
'@rollup/rollup-android-arm-eabi': 4.57.1
'@rollup/rollup-android-arm64': 4.57.1
'@rollup/rollup-darwin-arm64': 4.57.1
'@rollup/rollup-darwin-x64': 4.57.1
'@rollup/rollup-freebsd-arm64': 4.57.1
'@rollup/rollup-freebsd-x64': 4.57.1
'@rollup/rollup-linux-arm-gnueabihf': 4.57.1
'@rollup/rollup-linux-arm-musleabihf': 4.57.1
'@rollup/rollup-linux-arm64-gnu': 4.57.1
'@rollup/rollup-linux-arm64-musl': 4.57.1
'@rollup/rollup-linux-loong64-gnu': 4.57.1
'@rollup/rollup-linux-loong64-musl': 4.57.1
'@rollup/rollup-linux-ppc64-gnu': 4.57.1
'@rollup/rollup-linux-ppc64-musl': 4.57.1
'@rollup/rollup-linux-riscv64-gnu': 4.57.1
'@rollup/rollup-linux-riscv64-musl': 4.57.1
'@rollup/rollup-linux-s390x-gnu': 4.57.1
'@rollup/rollup-linux-x64-gnu': 4.57.1
'@rollup/rollup-linux-x64-musl': 4.57.1
'@rollup/rollup-openbsd-x64': 4.57.1
'@rollup/rollup-openharmony-arm64': 4.57.1
'@rollup/rollup-win32-arm64-msvc': 4.57.1
'@rollup/rollup-win32-ia32-msvc': 4.57.1
'@rollup/rollup-win32-x64-gnu': 4.57.1
'@rollup/rollup-win32-x64-msvc': 4.57.1
fsevents: 2.3.3
router@2.2.0:
dependencies:
debug: 4.4.3
depd: 2.0.0
is-promise: 4.0.0
parseurl: 1.3.3
path-to-regexp: 8.3.0
transitivePeerDependencies:
- supports-color
run-applescript@7.1.0: {}
rxjs@7.8.2:
dependencies:
tslib: 2.8.1
safe-buffer@5.1.2: {}
safe-buffer@5.2.1: {}
safer-buffer@2.1.2: {}
sass-loader@16.0.6(sass@1.97.1)(webpack@5.104.1(esbuild@0.27.2)):
dependencies:
neo-async: 2.6.2
optionalDependencies:
sass: 1.97.1
webpack: 5.104.1(esbuild@0.27.2)
sass@1.97.1:
dependencies:
chokidar: 4.0.3
immutable: 5.1.4
source-map-js: 1.2.1
optionalDependencies:
'@parcel/watcher': 2.5.6
sax@1.4.4:
optional: true
schema-utils@4.3.3:
dependencies:
'@types/json-schema': 7.0.15
ajv: 8.17.1
ajv-formats: 2.1.1(ajv@8.17.1)
ajv-keywords: 5.1.0(ajv@8.17.1)
select-hose@2.0.0: {}
selfsigned@2.4.1:
dependencies:
'@types/node-forge': 1.3.14
node-forge: 1.3.3
semver@5.7.2:
optional: true
semver@6.3.1: {}
semver@7.7.3: {}
send@0.19.2:
dependencies:
debug: 2.6.9
depd: 2.0.0
destroy: 1.2.0
encodeurl: 2.0.0
escape-html: 1.0.3
etag: 1.8.1
fresh: 0.5.2
http-errors: 2.0.1
mime: 1.6.0
ms: 2.1.3
on-finished: 2.4.1
range-parser: 1.2.1
statuses: 2.0.2
transitivePeerDependencies:
- supports-color
send@1.2.1:
dependencies:
debug: 4.4.3
encodeurl: 2.0.0
escape-html: 1.0.3
etag: 1.8.1
fresh: 2.0.0
http-errors: 2.0.1
mime-types: 3.0.2
ms: 2.1.3
on-finished: 2.4.1
range-parser: 1.2.1
statuses: 2.0.2
transitivePeerDependencies:
- supports-color
serialize-javascript@6.0.2:
dependencies:
randombytes: 2.1.0
serve-index@1.9.2:
dependencies:
accepts: 1.3.8
batch: 0.6.1
debug: 2.6.9
escape-html: 1.0.3
http-errors: 1.8.1
mime-types: 2.1.35
parseurl: 1.3.3
transitivePeerDependencies:
- supports-color
serve-static@1.16.3:
dependencies:
encodeurl: 2.0.0
escape-html: 1.0.3
parseurl: 1.3.3
send: 0.19.2
transitivePeerDependencies:
- supports-color
serve-static@2.2.1:
dependencies:
encodeurl: 2.0.0
escape-html: 1.0.3
parseurl: 1.3.3
send: 1.2.1
transitivePeerDependencies:
- supports-color
setprototypeof@1.2.0: {}
shallow-clone@3.0.1:
dependencies:
kind-of: 6.0.3
shebang-command@2.0.0:
dependencies:
shebang-regex: 3.0.0
shebang-regex@3.0.0: {}
shell-quote@1.8.3: {}
side-channel-list@1.0.0:
dependencies:
es-errors: 1.3.0
object-inspect: 1.13.4
side-channel-map@1.0.1:
dependencies:
call-bound: 1.0.4
es-errors: 1.3.0
get-intrinsic: 1.3.0
object-inspect: 1.13.4
side-channel-weakmap@1.0.2:
dependencies:
call-bound: 1.0.4
es-errors: 1.3.0
get-intrinsic: 1.3.0
object-inspect: 1.13.4
side-channel-map: 1.0.1
side-channel@1.1.0:
dependencies:
es-errors: 1.3.0
object-inspect: 1.13.4
side-channel-list: 1.0.0
side-channel-map: 1.0.1
side-channel-weakmap: 1.0.2
signal-exit@4.1.0: {}
sigstore@4.1.0:
dependencies:
'@sigstore/bundle': 4.0.0
'@sigstore/core': 3.1.0
'@sigstore/protobuf-specs': 0.5.0
'@sigstore/sign': 4.1.0
'@sigstore/tuf': 4.0.1
'@sigstore/verify': 3.1.0
transitivePeerDependencies:
- supports-color
slice-ansi@7.1.2:
dependencies:
ansi-styles: 6.2.3
is-fullwidth-code-point: 5.1.0
smart-buffer@4.2.0: {}
sockjs@0.3.24:
dependencies:
faye-websocket: 0.11.4
uuid: 8.3.2
websocket-driver: 0.7.4
socks-proxy-agent@8.0.5:
dependencies:
agent-base: 7.1.4
debug: 4.4.3
socks: 2.8.7
transitivePeerDependencies:
- supports-color
socks@2.8.7:
dependencies:
ip-address: 10.1.0
smart-buffer: 4.2.0
source-map-js@1.2.1: {}
source-map-loader@5.0.0(webpack@5.104.1(esbuild@0.27.2)):
dependencies:
iconv-lite: 0.6.3
source-map-js: 1.2.1
webpack: 5.104.1(esbuild@0.27.2)
source-map-support@0.5.21:
dependencies:
buffer-from: 1.1.2
source-map: 0.6.1
source-map@0.6.1: {}
source-map@0.7.6: {}
spdx-correct@3.2.0:
dependencies:
spdx-expression-parse: 3.0.1
spdx-license-ids: 3.0.22
spdx-exceptions@2.5.0: {}
spdx-expression-parse@3.0.1:
dependencies:
spdx-exceptions: 2.5.0
spdx-license-ids: 3.0.22
spdx-license-ids@3.0.22: {}
spdy-transport@3.0.0:
dependencies:
debug: 4.4.3
detect-node: 2.1.0
hpack.js: 2.1.6
obuf: 1.1.2
readable-stream: 3.6.2
wbuf: 1.7.3
transitivePeerDependencies:
- supports-color
spdy@4.0.2:
dependencies:
debug: 4.4.3
handle-thing: 2.0.1
http-deceiver: 1.2.7
select-hose: 2.0.0
spdy-transport: 3.0.0
transitivePeerDependencies:
- supports-color
ssri@13.0.1:
dependencies:
minipass: 7.1.2
statuses@1.5.0: {}
statuses@2.0.2: {}
stdin-discarder@0.2.2: {}
string-width@4.2.3:
dependencies:
emoji-regex: 8.0.0
is-fullwidth-code-point: 3.0.0
strip-ansi: 6.0.1
string-width@7.2.0:
dependencies:
emoji-regex: 10.6.0
get-east-asian-width: 1.4.0
strip-ansi: 7.1.2
string-width@8.1.1:
dependencies:
get-east-asian-width: 1.4.0
strip-ansi: 7.1.2
string_decoder@1.1.1:
dependencies:
safe-buffer: 5.1.2
string_decoder@1.3.0:
dependencies:
safe-buffer: 5.2.1
strip-ansi@6.0.1:
dependencies:
ansi-regex: 5.0.1
strip-ansi@7.1.2:
dependencies:
ansi-regex: 6.2.2
supports-color@8.1.1:
dependencies:
has-flag: 4.0.0
supports-preserve-symlinks-flag@1.0.0: {}
tapable@2.3.0: {}
tar@7.5.7:
dependencies:
'@isaacs/fs-minipass': 4.0.1
chownr: 3.0.0
minipass: 7.1.2
minizlib: 3.1.0
yallist: 5.0.0
terser-webpack-plugin@5.3.16(esbuild@0.27.2)(webpack@5.104.1):
dependencies:
'@jridgewell/trace-mapping': 0.3.31
jest-worker: 27.5.1
schema-utils: 4.3.3
serialize-javascript: 6.0.2
terser: 5.44.1
webpack: 5.104.1(esbuild@0.27.2)
optionalDependencies:
esbuild: 0.27.2
terser@5.44.1:
dependencies:
'@jridgewell/source-map': 0.3.11
acorn: 8.15.0
commander: 2.20.3
source-map-support: 0.5.21
thingies@2.5.0(tslib@2.8.1):
dependencies:
tslib: 2.8.1
thunky@1.1.0: {}
tinyglobby@0.2.15:
dependencies:
fdir: 6.5.0(picomatch@4.0.3)
picomatch: 4.0.3
to-regex-range@5.0.1:
dependencies:
is-number: 7.0.0
toidentifier@1.0.1: {}
tree-dump@1.1.0(tslib@2.8.1):
dependencies:
tslib: 2.8.1
tree-kill@1.2.2: {}
ts-node@10.9.2(@types/node@20.19.33)(typescript@5.9.3):
dependencies:
'@cspotcode/source-map-support': 0.8.1
'@tsconfig/node10': 1.0.12
'@tsconfig/node12': 1.0.11
'@tsconfig/node14': 1.0.3
'@tsconfig/node16': 1.0.4
'@types/node': 20.19.33
acorn: 8.15.0
acorn-walk: 8.3.4
arg: 4.1.3
create-require: 1.1.1
diff: 4.0.4
make-error: 1.3.6
typescript: 5.9.3
v8-compile-cache-lib: 3.0.1
yn: 3.1.1
tslib@2.8.1: {}
tuf-js@4.1.0:
dependencies:
'@tufjs/models': 4.1.0
debug: 4.4.3
make-fetch-happen: 15.0.3
transitivePeerDependencies:
- supports-color
type-is@1.6.18:
dependencies:
media-typer: 0.3.0
mime-types: 2.1.35
type-is@2.0.1:
dependencies:
content-type: 1.0.5
media-typer: 1.1.0
mime-types: 3.0.2
typed-assert@1.0.9: {}
typescript@5.9.3: {}
undici-types@6.21.0: {}
undici@7.18.0: {}
unicode-canonical-property-names-ecmascript@2.0.1: {}
unicode-match-property-ecmascript@2.0.0:
dependencies:
unicode-canonical-property-names-ecmascript: 2.0.1
unicode-property-aliases-ecmascript: 2.2.0
unicode-match-property-value-ecmascript@2.2.1: {}
unicode-property-aliases-ecmascript@2.2.0: {}
unique-filename@5.0.0:
dependencies:
unique-slug: 6.0.0
unique-slug@6.0.0:
dependencies:
imurmurhash: 0.1.4
unpipe@1.0.0: {}
update-browserslist-db@1.2.3(browserslist@4.28.1):
dependencies:
browserslist: 4.28.1
escalade: 3.2.0
picocolors: 1.1.1
util-deprecate@1.0.2: {}
utils-merge@1.0.1: {}
uuid@8.3.2: {}
v8-compile-cache-lib@3.0.1: {}
validate-npm-package-license@3.0.4:
dependencies:
spdx-correct: 3.2.0
spdx-expression-parse: 3.0.1
validate-npm-package-name@7.0.2: {}
vary@1.1.2: {}
vite@7.3.0(@types/node@20.19.33)(jiti@2.6.1)(less@4.4.2)(sass@1.97.1)(terser@5.44.1):
dependencies:
esbuild: 0.27.2
fdir: 6.5.0(picomatch@4.0.3)
picomatch: 4.0.3
postcss: 8.5.6
rollup: 4.57.1
tinyglobby: 0.2.15
optionalDependencies:
'@types/node': 20.19.33
fsevents: 2.3.3
jiti: 2.6.1
less: 4.4.2
sass: 1.97.1
terser: 5.44.1
watchpack@2.5.0:
dependencies:
glob-to-regexp: 0.4.1
graceful-fs: 4.2.11
watchpack@2.5.1:
dependencies:
glob-to-regexp: 0.4.1
graceful-fs: 4.2.11
wbuf@1.7.3:
dependencies:
minimalistic-assert: 1.0.1
weak-lru-cache@1.2.2:
optional: true
webpack-dev-middleware@7.4.5(tslib@2.8.1)(webpack@5.104.1):
dependencies:
colorette: 2.0.20
memfs: 4.56.10(tslib@2.8.1)
mime-types: 3.0.2
on-finished: 2.4.1
range-parser: 1.2.1
schema-utils: 4.3.3
optionalDependencies:
webpack: 5.104.1(esbuild@0.27.2)
transitivePeerDependencies:
- tslib
webpack-dev-server@5.2.2(tslib@2.8.1)(webpack@5.104.1):
dependencies:
'@types/bonjour': 3.5.13
'@types/connect-history-api-fallback': 1.5.4
'@types/express': 4.17.25
'@types/express-serve-static-core': 4.19.8
'@types/serve-index': 1.9.4
'@types/serve-static': 1.15.10
'@types/sockjs': 0.3.36
'@types/ws': 8.18.1
ansi-html-community: 0.0.8
bonjour-service: 1.3.0
chokidar: 3.6.0
colorette: 2.0.20
compression: 1.8.1
connect-history-api-fallback: 2.0.0
express: 4.22.1
graceful-fs: 4.2.11
http-proxy-middleware: 2.0.9(@types/express@4.17.25)
ipaddr.js: 2.3.0
launch-editor: 2.12.0
open: 10.2.0
p-retry: 6.2.1
schema-utils: 4.3.3
selfsigned: 2.4.1
serve-index: 1.9.2
sockjs: 0.3.24
spdy: 4.0.2
webpack-dev-middleware: 7.4.5(tslib@2.8.1)(webpack@5.104.1)
ws: 8.19.0
optionalDependencies:
webpack: 5.104.1(esbuild@0.27.2)
transitivePeerDependencies:
- bufferutil
- debug
- supports-color
- tslib
- utf-8-validate
webpack-merge@6.0.1:
dependencies:
clone-deep: 4.0.1
flat: 5.0.2
wildcard: 2.0.1
webpack-sources@3.3.3: {}
webpack-subresource-integrity@5.1.0(webpack@5.104.1(esbuild@0.27.2)):
dependencies:
typed-assert: 1.0.9
webpack: 5.104.1(esbuild@0.27.2)
webpack@5.104.1(esbuild@0.27.2):
dependencies:
'@types/eslint-scope': 3.7.7
'@types/estree': 1.0.8
'@types/json-schema': 7.0.15
'@webassemblyjs/ast': 1.14.1
'@webassemblyjs/wasm-edit': 1.14.1
'@webassemblyjs/wasm-parser': 1.14.1
acorn: 8.15.0
acorn-import-phases: 1.0.4(acorn@8.15.0)
browserslist: 4.28.1
chrome-trace-event: 1.0.4
enhanced-resolve: 5.19.0
es-module-lexer: 2.0.0
eslint-scope: 5.1.1
events: 3.3.0
glob-to-regexp: 0.4.1
graceful-fs: 4.2.11
json-parse-even-better-errors: 2.3.1
loader-runner: 4.3.1
mime-types: 2.1.35
neo-async: 2.6.2
schema-utils: 4.3.3
tapable: 2.3.0
terser-webpack-plugin: 5.3.16(esbuild@0.27.2)(webpack@5.104.1)
watchpack: 2.5.1
webpack-sources: 3.3.3
transitivePeerDependencies:
- '@swc/core'
- esbuild
- uglify-js
websocket-driver@0.7.4:
dependencies:
http-parser-js: 0.5.10
safe-buffer: 5.2.1
websocket-extensions: 0.1.4
websocket-extensions@0.1.4: {}
which@2.0.2:
dependencies:
isexe: 2.0.0
which@6.0.1:
dependencies:
isexe: 4.0.0
wildcard@2.0.1: {}
wrap-ansi@6.2.0:
dependencies:
ansi-styles: 4.3.0
string-width: 4.2.3
strip-ansi: 6.0.1
wrap-ansi@9.0.2:
dependencies:
ansi-styles: 6.2.3
string-width: 7.2.0
strip-ansi: 7.1.2
wrappy@1.0.2: {}
ws@8.19.0: {}
wsl-utils@0.1.0:
dependencies:
is-wsl: 3.1.0
wsl-utils@0.3.1:
dependencies:
is-wsl: 3.1.0
powershell-utils: 0.1.0
y18n@5.0.8: {}
yallist@3.1.1: {}
yallist@4.0.0: {}
yallist@5.0.0: {}
yargs-parser@22.0.0: {}
yargs@18.0.0:
dependencies:
cliui: 9.0.1
escalade: 3.2.0
get-caller-file: 2.0.5
string-width: 7.2.0
y18n: 5.0.8
yargs-parser: 22.0.0
yn@3.1.1: {}
yocto-queue@0.1.0: {}
yoctocolors-cjs@2.1.3: {}
yoctocolors@2.1.2: {}
zod-to-json-schema@3.25.1(zod@4.3.5):
dependencies:
zod: 4.3.5
zod@4.3.5: {}
zone.js@0.16.0: {} | unknown | github | https://github.com/angular/angular | integration/cli-hello-world/pnpm-lock.yaml |
/*!
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { Box, Flex, HStack, Text, VStack } from "@chakra-ui/react";
import { useTranslation } from "react-i18next";
import type { PoolResponse } from "openapi/requests/types.gen";
import { PoolBar } from "src/components/PoolBar";
import { StateIcon } from "src/components/StateIcon";
import { Tooltip } from "src/components/ui";
import DeletePoolButton from "./DeletePoolButton";
import EditPoolButton from "./EditPoolButton";
type PoolBarCardProps = {
readonly pool: PoolResponse;
};
const PoolBarCard = ({ pool }: PoolBarCardProps) => {
const { t: translate } = useTranslation("admin");
return (
<Box borderColor="border.emphasized" borderRadius={8} borderWidth={1} mb={2} overflow="hidden">
<Flex alignItems="center" bg="bg.muted" justifyContent="space-between" p={4}>
<VStack align="start" flex="1">
<HStack justifyContent="space-between" width="100%">
<Text fontSize="lg" fontWeight="bold" whiteSpace="normal" wordBreak="break-word">
{pool.name} ({pool.slots} {translate("pools.form.slots")})
{pool.team_name !== null && ` (${pool.team_name})`}
{pool.include_deferred ? (
<Tooltip content={translate("pools.deferredSlotsIncluded")}>
<StateIcon size={18} state="deferred" style={{ display: "inline", marginLeft: 6 }} />
</Tooltip>
) : undefined}
</Text>
<HStack gap={0}>
<EditPoolButton pool={pool} />
{pool.name === "default_pool" ? undefined : <DeletePoolButton poolName={pool.name} />}
</HStack>
</HStack>
{pool.description ?? (
<Text color="fg.muted" fontSize="sm">
{pool.description}
</Text>
)}
</VStack>
</Flex>
<Box margin={4}>
<PoolBar pool={pool} totalSlots={pool.slots} />
</Box>
</Box>
);
};
export default PoolBarCard; | typescript | github | https://github.com/apache/airflow | airflow-core/src/airflow/ui/src/pages/Pools/PoolBarCard.tsx |
/* inftrees.c -- generate Huffman trees for efficient decoding
* Copyright (C) 1995-2024 Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*/
#include "zbuild.h"
#include "zutil.h"
#include "inftrees.h"
const char PREFIX(inflate_copyright)[] = " inflate 1.3.1 Copyright 1995-2024 Mark Adler ";
/*
If you use the zlib library in a product, an acknowledgment is welcome
in the documentation of your product. If for some reason you cannot
include such an acknowledgment, I would appreciate that you keep this
copyright string in the executable of your product.
*/
/*
Build a set of tables to decode the provided canonical Huffman code.
The code lengths are lens[0..codes-1]. The result starts at *table,
whose indices are 0..2^bits-1. work is a writable array of at least
lens shorts, which is used as a work area. type is the type of code
to be generated, CODES, LENS, or DISTS. On return, zero is success,
-1 is an invalid code, and +1 means that ENOUGH isn't enough. table
on return points to the next available entry's address. bits is the
requested root table index bits, and on return it is the actual root
table index bits. It will differ if the request is greater than the
longest code or if it is less than the shortest code.
*/
int Z_INTERNAL zng_inflate_table(codetype type, uint16_t *lens, unsigned codes,
code * *table, unsigned *bits, uint16_t *work) {
unsigned len; /* a code's length in bits */
unsigned sym; /* index of code symbols */
unsigned min, max; /* minimum and maximum code lengths */
unsigned root; /* number of index bits for root table */
unsigned curr; /* number of index bits for current table */
unsigned drop; /* code bits to drop for sub-table */
int left; /* number of prefix codes available */
unsigned used; /* code entries in table used */
unsigned huff; /* Huffman code */
unsigned incr; /* for incrementing code, index */
unsigned fill; /* index for replicating entries */
unsigned low; /* low bits for current root entry */
unsigned mask; /* mask for low root bits */
code here; /* table entry for duplication */
code *next; /* next available space in table */
const uint16_t *base; /* base value table to use */
const uint16_t *extra; /* extra bits table to use */
unsigned match; /* use base and extra for symbol >= match */
uint16_t count[MAX_BITS+1]; /* number of codes of each length */
uint16_t offs[MAX_BITS+1]; /* offsets in table for each length */
static const uint16_t lbase[31] = { /* Length codes 257..285 base */
3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
static const uint16_t lext[31] = { /* Length codes 257..285 extra */
16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18,
19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 203, 77};
static const uint16_t dbase[32] = { /* Distance codes 0..29 base */
1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
8193, 12289, 16385, 24577, 0, 0};
static const uint16_t dext[32] = { /* Distance codes 0..29 extra */
16, 16, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22,
23, 23, 24, 24, 25, 25, 26, 26, 27, 27,
28, 28, 29, 29, 64, 64};
/*
Process a set of code lengths to create a canonical Huffman code. The
code lengths are lens[0..codes-1]. Each length corresponds to the
symbols 0..codes-1. The Huffman code is generated by first sorting the
symbols by length from short to long, and retaining the symbol order
for codes with equal lengths. Then the code starts with all zero bits
for the first code of the shortest length, and the codes are integer
increments for the same length, and zeros are appended as the length
increases. For the deflate format, these bits are stored backwards
from their more natural integer increment ordering, and so when the
decoding tables are built in the large loop below, the integer codes
are incremented backwards.
This routine assumes, but does not check, that all of the entries in
lens[] are in the range 0..MAXBITS. The caller must assure this.
1..MAXBITS is interpreted as that code length. zero means that that
symbol does not occur in this code.
The codes are sorted by computing a count of codes for each length,
creating from that a table of starting indices for each length in the
sorted table, and then entering the symbols in order in the sorted
table. The sorted table is work[], with that space being provided by
the caller.
The length counts are used for other purposes as well, i.e. finding
the minimum and maximum length codes, determining if there are any
codes at all, checking for a valid set of lengths, and looking ahead
at length counts to determine sub-table sizes when building the
decoding tables.
*/
/* accumulate lengths for codes (assumes lens[] all in 0..MAXBITS) */
for (len = 0; len <= MAX_BITS; len++)
count[len] = 0;
for (sym = 0; sym < codes; sym++)
count[lens[sym]]++;
/* bound code lengths, force root to be within code lengths */
root = *bits;
for (max = MAX_BITS; max >= 1; max--)
if (count[max] != 0) break;
root = MIN(root, max);
if (UNLIKELY(max == 0)) { /* no symbols to code at all */
here.op = (unsigned char)64; /* invalid code marker */
here.bits = (unsigned char)1;
here.val = (uint16_t)0;
*(*table)++ = here; /* make a table to force an error */
*(*table)++ = here;
*bits = 1;
return 0; /* no symbols, but wait for decoding to report error */
}
for (min = 1; min < max; min++)
if (count[min] != 0) break;
root = MAX(root, min);
/* check for an over-subscribed or incomplete set of lengths */
left = 1;
for (len = 1; len <= MAX_BITS; len++) {
left <<= 1;
left -= count[len];
if (left < 0) return -1; /* over-subscribed */
}
if (left > 0 && (type == CODES || max != 1))
return -1; /* incomplete set */
/* generate offsets into symbol table for each length for sorting */
offs[1] = 0;
for (len = 1; len < MAX_BITS; len++)
offs[len + 1] = offs[len] + count[len];
/* sort symbols by length, by symbol order within each length */
for (sym = 0; sym < codes; sym++)
if (lens[sym] != 0) work[offs[lens[sym]]++] = (uint16_t)sym;
/*
Create and fill in decoding tables. In this loop, the table being
filled is at next and has curr index bits. The code being used is huff
with length len. That code is converted to an index by dropping drop
bits off of the bottom. For codes where len is less than drop + curr,
those top drop + curr - len bits are incremented through all values to
fill the table with replicated entries.
root is the number of index bits for the root table. When len exceeds
root, sub-tables are created pointed to by the root entry with an index
of the low root bits of huff. This is saved in low to check for when a
new sub-table should be started. drop is zero when the root table is
being filled, and drop is root when sub-tables are being filled.
When a new sub-table is needed, it is necessary to look ahead in the
code lengths to determine what size sub-table is needed. The length
counts are used for this, and so count[] is decremented as codes are
entered in the tables.
used keeps track of how many table entries have been allocated from the
provided *table space. It is checked for LENS and DIST tables against
the constants ENOUGH_LENS and ENOUGH_DISTS to guard against changes in
the initial root table size constants. See the comments in inftrees.h
for more information.
sym increments through all symbols, and the loop terminates when
all codes of length max, i.e. all codes, have been processed. This
routine permits incomplete codes, so another loop after this one fills
in the rest of the decoding tables with invalid code markers.
*/
/* set up for code type */
switch (type) {
case CODES:
base = extra = work; /* dummy value--not used */
match = 20;
break;
case LENS:
base = lbase;
extra = lext;
match = 257;
break;
default: /* DISTS */
base = dbase;
extra = dext;
match = 0;
}
/* initialize state for loop */
huff = 0; /* starting code */
sym = 0; /* starting code symbol */
len = min; /* starting code length */
next = *table; /* current table to fill in */
curr = root; /* current table index bits */
drop = 0; /* current bits to drop from code for index */
low = (unsigned)(-1); /* trigger new sub-table when len > root */
used = 1U << root; /* use root table entries */
mask = used - 1; /* mask for comparing low */
/* check available table space */
if ((type == LENS && used > ENOUGH_LENS) ||
(type == DISTS && used > ENOUGH_DISTS))
return 1;
/* process all codes and make table entries */
for (;;) {
/* create table entry */
here.bits = (unsigned char)(len - drop);
if (LIKELY(work[sym] >= match)) {
here.op = (unsigned char)(extra[work[sym] - match]);
here.val = base[work[sym] - match];
} else if (work[sym] + 1U < match) {
here.op = (unsigned char)0;
here.val = work[sym];
} else {
here.op = (unsigned char)(32 + 64); /* end of block */
here.val = 0;
}
/* replicate for those indices with low len bits equal to huff */
incr = 1U << (len - drop);
fill = 1U << curr;
min = fill; /* save offset to next table */
do {
fill -= incr;
next[(huff >> drop) + fill] = here;
} while (fill != 0);
/* backwards increment the len-bit code huff */
incr = 1U << (len - 1);
while (huff & incr)
incr >>= 1;
if (incr != 0) {
huff &= incr - 1;
huff += incr;
} else {
huff = 0;
}
/* go to next symbol, update count, len */
sym++;
if (--(count[len]) == 0) {
if (len == max)
break;
len = lens[work[sym]];
}
/* create new sub-table if needed */
if (len > root && (huff & mask) != low) {
/* if first time, transition to sub-tables */
if (drop == 0)
drop = root;
/* increment past last table */
next += min; /* here min is 1 << curr */
/* determine length of next table */
curr = len - drop;
left = (int)(1 << curr);
while (curr + drop < max) {
left -= count[curr + drop];
if (left <= 0)
break;
curr++;
left <<= 1;
}
/* check for enough space */
used += 1U << curr;
if ((type == LENS && used > ENOUGH_LENS) || (type == DISTS && used > ENOUGH_DISTS))
return 1;
/* point entry in root table to sub-table */
low = huff & mask;
(*table)[low].op = (unsigned char)curr;
(*table)[low].bits = (unsigned char)root;
(*table)[low].val = (uint16_t)(next - *table);
}
}
/* fill in remaining table entry if code is incomplete (guaranteed to have
at most one remaining entry, since if the code is incomplete, the
maximum code length that was allowed to get this far is one bit) */
if (UNLIKELY(huff != 0)) {
here.op = (unsigned char)64; /* invalid code marker */
here.bits = (unsigned char)(len - drop);
here.val = (uint16_t)0;
next[huff] = here;
}
/* set return parameters */
*table += used;
*bits = root;
return 0;
} | c | github | https://github.com/opencv/opencv | 3rdparty/zlib-ng/inftrees.c |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Synchronize replicas for training."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.framework import types_pb2
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import optimizer
from tensorflow.python.training import queue_runner
from tensorflow.python.training import session_manager
from tensorflow.python.training import session_run_hook
# Please note that the gradients from replicas are averaged instead of summed
# (as in the old sync_replicas_optimizer) so you need to increase the learning
# rate according to the number of replicas. This change is introduced to be
# consistent with how gradients are aggregated (averaged) within a batch in a
# replica.
class SyncReplicasOptimizer(optimizer.Optimizer):
"""Class to synchronize, aggregate gradients and pass them to the optimizer.
In a typical asynchronous training environment, it's common to have some
stale gradients. For example, with a N-replica asynchronous training,
gradients will be applied to the variables N times independently. Depending
on each replica's training speed, some gradients might be calculated from
copies of the variable from several steps back (N-1 steps on average). This
optimizer avoids stale gradients by collecting gradients from all replicas,
averaging them, then applying them to the variables in one shot, after
which replicas can fetch the new variables and continue.
The following accumulators/queue are created:
<empty line>
* N `gradient accumulators`, one per variable to train. Gradients are pushed
to them and the chief worker will wait until enough gradients are collected
and then average them before applying to variables. The accumulator will
drop all stale gradients (more details in the accumulator op).
* 1 `token` queue where the optimizer pushes the new global_step value after
all variables are updated.
The following local variable is created:
* `sync_rep_local_step`, one per replica. Compared against the global_step in
each accumulator to check for staleness of the gradients.
The optimizer adds nodes to the graph to collect gradients and pause the
trainers until variables are updated.
For the Parameter Server job:
<empty line>
1. An accumulator is created for each variable, and each replica pushes the
gradients into the accumulators instead of directly applying them to the
variables.
2. Each accumulator averages once enough gradients (replicas_to_aggregate)
have been accumulated.
3. Apply the averaged gradients to the variables.
4. Only after all variables have been updated, increment the global step.
5. Only after step 4, pushes `global_step` in the `token_queue`, once for
each worker replica. The workers can now fetch the global step, use it to
update its local_step variable and start the next batch.
For the replicas:
<empty line>
1. Start a step: fetch variables and compute gradients.
2. Once the gradients have been computed, push them into gradient
accumulators. Each accumulator will check the staleness and drop the stale.
3. After pushing all the gradients, dequeue an updated value of global_step
from the token queue and record that step to its local_step variable. Note
that this is effectively a barrier.
4. Start the next batch.
### Usage
```python
# Create any optimizer to update the variables, say a simple SGD:
opt = GradientDescentOptimizer(learning_rate=0.1)
# Wrap the optimizer with sync_replicas_optimizer with 50 replicas: at each
# step the optimizer collects 50 gradients before applying to variables.
# Note that if you want to have 2 backup replicas, you can change
# total_num_replicas=52 and make sure this number matches how many physical
# replicas you started in your job.
opt = tf.SyncReplicasOptimizer(opt, replicas_to_aggregate=50,
total_num_replicas=50)
# Some models have startup_delays to help stabilize the model but when using
# sync_replicas training, set it to 0.
# Now you can call `minimize()` or `compute_gradients()` and
# `apply_gradients()` normally
training_op = opt.minimize(total_loss, global_step=self.global_step)
# You can create the hook which handles initialization and queues.
sync_replicas_hook = opt.make_session_run_hook(is_chief)
```
In the training program, every worker will run the train_op as if not
synchronized.
```python
with training.MonitoredTrainingSession(
master=workers[worker_id].target, is_chief=is_chief,
hooks=[sync_replicas_hook]) as mon_sess:
while not mon_sess.should_stop():
mon_sess.run(training_op)
```
@@__init__
@@compute_gradients
@@apply_gradients
@@get_chief_queue_runner
@@get_init_tokens_op
"""
def __init__(self,
opt,
replicas_to_aggregate,
total_num_replicas=None,
variable_averages=None,
variables_to_average=None,
use_locking=False,
name="sync_replicas"):
"""Construct a sync_replicas optimizer.
Args:
opt: The actual optimizer that will be used to compute and apply the
gradients. Must be one of the Optimizer classes.
replicas_to_aggregate: number of replicas to aggregate for each variable
update.
total_num_replicas: Total number of tasks/workers/replicas, could be
different from replicas_to_aggregate.
If total_num_replicas > replicas_to_aggregate: it is backup_replicas +
replicas_to_aggregate.
If total_num_replicas < replicas_to_aggregate: Replicas compute
multiple batches per update to variables.
variable_averages: Optional `ExponentialMovingAverage` object, used to
maintain moving averages for the variables passed in
`variables_to_average`.
variables_to_average: a list of variables that need to be averaged. Only
needed if variable_averages is passed in.
use_locking: If True use locks for update operation.
name: string. Optional name of the returned operation.
"""
if total_num_replicas is None:
total_num_replicas = replicas_to_aggregate
super(SyncReplicasOptimizer, self).__init__(use_locking, name)
logging.info(
"SyncReplicasV2: replicas_to_aggregate=%s; total_num_replicas=%s",
replicas_to_aggregate, total_num_replicas)
self._opt = opt
self._replicas_to_aggregate = replicas_to_aggregate
self._gradients_applied = False
self._variable_averages = variable_averages
self._variables_to_average = variables_to_average
self._total_num_replicas = total_num_replicas
self._tokens_per_step = max(total_num_replicas, replicas_to_aggregate)
self._global_step = None
self._sync_token_queue = None
# The synchronization op will be executed in a queue runner which should
# only be executed by one of the replicas (usually the chief).
self._chief_queue_runner = None
# Remember which accumulator is on which device to set the initial step in
# the accumulator to be global step. This list contains list of the
# following format: (accumulator, device).
self._accumulator_list = []
def compute_gradients(self, *args, **kwargs):
"""Compute gradients of "loss" for the variables in "var_list".
This simply wraps the compute_gradients() from the real optimizer. The
gradients will be aggregated in the apply_gradients() so that user can
modify the gradients like clipping with per replica global norm if needed.
The global norm with aggregated gradients can be bad as one replica's huge
gradients can hurt the gradients from other replicas.
Args:
*args: Arguments for compute_gradients().
**kwargs: Keyword arguments for compute_gradients().
Returns:
A list of (gradient, variable) pairs.
"""
return self._opt.compute_gradients(*args, **kwargs)
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
"""Apply gradients to variables.
This contains most of the synchronization implementation and also wraps the
apply_gradients() from the real optimizer.
Args:
grads_and_vars: List of (gradient, variable) pairs as returned by
compute_gradients().
global_step: Optional Variable to increment by one after the
variables have been updated.
name: Optional name for the returned operation. Default to the
name passed to the Optimizer constructor.
Returns:
train_op: The op to dequeue a token so the replicas can exit this batch
and start the next one. This is executed by each replica.
Raises:
ValueError: If the grads_and_vars is empty.
ValueError: If global step is not provided, the staleness cannot be
checked.
"""
if not grads_and_vars:
raise ValueError("Must supply at least one variable")
if global_step is None:
raise ValueError("Global step is required to check staleness")
self._global_step = global_step
train_ops = []
aggregated_grad = []
var_list = []
self._local_step = variables.Variable(
initial_value=0,
trainable=False,
collections=[ops.GraphKeys.LOCAL_VARIABLES],
dtype=global_step.dtype.base_dtype,
name="sync_rep_local_step")
self.local_step_init_op = state_ops.assign(self._local_step, global_step)
chief_init_ops = [self.local_step_init_op]
self.ready_for_local_init_op = variables.report_uninitialized_variables(
variables.global_variables())
with ops.name_scope(None, self._name):
for grad, var in grads_and_vars:
var_list.append(var)
with ops.device(var.device):
# Dense gradients.
if grad is None:
aggregated_grad.append(None) # pass-through.
continue
elif isinstance(grad, ops.Tensor):
grad_accum = data_flow_ops.ConditionalAccumulator(
grad.dtype,
shape=var.get_shape(),
shared_name=var.name + "/grad_accum")
train_ops.append(grad_accum.apply_grad(
grad, local_step=self._local_step))
aggregated_grad.append(grad_accum.take_grad(
self._replicas_to_aggregate))
else:
if not isinstance(grad, ops.IndexedSlices):
raise ValueError("Unknown grad type!")
grad_accum = data_flow_ops.SparseConditionalAccumulator(
grad.dtype, shape=(), shared_name=var.name + "/grad_accum")
train_ops.append(grad_accum.apply_indexed_slices_grad(
grad, local_step=self._local_step))
aggregated_grad.append(grad_accum.take_indexed_slices_grad(
self._replicas_to_aggregate))
self._accumulator_list.append((grad_accum, var.device))
aggregated_grads_and_vars = zip(aggregated_grad, var_list)
# sync_op will be assigned to the same device as the global step.
with ops.device(global_step.device), ops.name_scope(""):
update_op = self._opt.apply_gradients(aggregated_grads_and_vars,
global_step)
# Create token queue.
with ops.device(global_step.device), ops.name_scope(""):
sync_token_queue = (
data_flow_ops.FIFOQueue(-1,
global_step.dtype.base_dtype,
shapes=(),
name="sync_token_q",
shared_name="sync_token_q"))
self._sync_token_queue = sync_token_queue
# dummy_queue is passed to the queue runner. Don't use the real queues
# because the queue runner doesn't automatically reopen it once it
# closed queues in PS devices.
dummy_queue = (
data_flow_ops.FIFOQueue(1,
types_pb2.DT_INT32,
shapes=(),
name="dummy_queue",
shared_name="dummy_queue"))
with ops.device(global_step.device), ops.name_scope(""):
# Replicas have to wait until they can get a token from the token queue.
with ops.control_dependencies(train_ops):
token = sync_token_queue.dequeue()
train_op = state_ops.assign(self._local_step, token)
with ops.control_dependencies([update_op]):
# Sync_op needs to insert tokens to the token queue at the end of the
# step so the replicas can fetch them to start the next step.
tokens = array_ops.fill([self._tokens_per_step], global_step)
sync_op = sync_token_queue.enqueue_many((tokens,))
if self._variable_averages is not None:
with ops.control_dependencies([sync_op]), ops.name_scope(""):
sync_op = self._variable_averages.apply(
self._variables_to_average)
self._chief_queue_runner = queue_runner.QueueRunner(dummy_queue,
[sync_op])
for accum, dev in self._accumulator_list:
with ops.device(dev):
chief_init_ops.append(
accum.set_global_step(
global_step, name="SetGlobalStep"))
self.chief_init_op = control_flow_ops.group(*(chief_init_ops))
self._gradients_applied = True
return train_op
def get_chief_queue_runner(self):
"""Returns the QueueRunner for the chief to execute.
This includes the operations to synchronize replicas: aggregate gradients,
apply to variables, increment global step, insert tokens to token queue.
Note that this can only be called after calling apply_gradients() which
actually generates this queuerunner.
Returns:
A `QueueRunner` for chief to execute.
Raises:
ValueError: If this is called before apply_gradients().
"""
if self._gradients_applied is False:
raise ValueError("Should be called after apply_gradients().")
return self._chief_queue_runner
def get_slot(self, *args, **kwargs):
"""Return a slot named "name" created for "var" by the Optimizer.
This simply wraps the get_slot() from the actual optimizer.
Args:
*args: Arguments for get_slot().
**kwargs: Keyword arguments for get_slot().
Returns:
The `Variable` for the slot if it was created, `None` otherwise.
"""
return self._opt.get_slot(*args, **kwargs)
def get_slot_names(self, *args, **kwargs):
"""Return a list of the names of slots created by the `Optimizer`.
This simply wraps the get_slot_names() from the actual optimizer.
Args:
*args: Arguments for get_slot().
**kwargs: Keyword arguments for get_slot().
Returns:
A list of strings.
"""
return self._opt.get_slot_names(*args, **kwargs)
def get_init_tokens_op(self, num_tokens=-1):
"""Returns the op to fill the sync_token_queue with the tokens.
This is supposed to be executed in the beginning of the chief/sync thread
so that even if the total_num_replicas is less than replicas_to_aggregate,
the model can still proceed as the replicas can compute multiple steps per
variable update. Make sure:
`num_tokens >= replicas_to_aggregate - total_num_replicas`.
Args:
num_tokens: Number of tokens to add to the queue.
Returns:
An op for the chief/sync replica to fill the token queue.
Raises:
ValueError: If this is called before apply_gradients().
ValueError: If num_tokens are smaller than replicas_to_aggregate -
total_num_replicas.
"""
if self._gradients_applied is False:
raise ValueError(
"get_init_tokens_op() should be called after apply_gradients().")
tokens_needed = self._replicas_to_aggregate - self._total_num_replicas
if num_tokens == -1:
num_tokens = self._replicas_to_aggregate
elif num_tokens < tokens_needed:
raise ValueError(
"Too few tokens to finish the first step: %d (given) vs %d (needed)" %
(num_tokens, tokens_needed))
if num_tokens > 0:
with ops.device(self._global_step.device), ops.name_scope(""):
tokens = array_ops.fill([num_tokens], self._global_step)
init_tokens = self._sync_token_queue.enqueue_many((tokens,))
else:
init_tokens = control_flow_ops.no_op(name="no_init_tokens")
return init_tokens
def make_session_run_hook(self, is_chief, num_tokens=-1):
"""Creates a hook to handle SyncReplicasHook ops such as initialization."""
if is_chief:
return _SyncReplicasOptimizerHook(self.chief_init_op,
self.ready_for_local_init_op,
self.get_chief_queue_runner(),
self.get_init_tokens_op(num_tokens))
return _SyncReplicasOptimizerHook(self.local_step_init_op,
self.ready_for_local_init_op, None, None)
class _SyncReplicasOptimizerHook(session_run_hook.SessionRunHook):
"""A SessionRunHook handles ops related to SyncReplicasOptimizer."""
def __init__(self, local_init_op, ready_for_local_init_op, q_runner,
init_tokens_op):
"""Creates hook to handle SyncReplicaOptimizer initialization ops.
Args:
local_init_op: Either `SyncReplicasOptimizer.chief_init_op` or
`SyncReplicasOptimizer.local_step_init_op`.
ready_for_local_init_op: `SyncReplicasOptimizer.ready_for_local_init_op`
q_runner: Either `SyncReplicasOptimizer.get_chief_queue_runner` or `None`
init_tokens_op: `SyncReplicasOptimizer.get_init_tokens_op` or None
"""
self._local_init_op = local_init_op
self._ready_for_local_init_op = ready_for_local_init_op
self._q_runner = q_runner
self._init_tokens_op = init_tokens_op
def after_create_session(self, session, coord):
"""Runs SyncReplicasOptimizer initialization ops."""
local_init_success, msg = session_manager._ready( # pylint: disable=protected-access
self._ready_for_local_init_op, session,
"Model is not ready for SyncReplicasOptimizer local init.")
if not local_init_success:
raise RuntimeError(
"Init operations did not make model ready for SyncReplicasOptimizer "
"local_init. Init op: %s, error: %s" %
(self._local_init_op.name, msg))
session.run(self._local_init_op)
if self._init_tokens_op is not None:
session.run(self._init_tokens_op)
if self._q_runner is not None:
self._q_runner.create_threads(
session, coord=coord, daemon=True, start=True) | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
"""
pygments.lexers.theorem
~~~~~~~~~~~~~~~~~~~~~~~
Lexers for theorem-proving languages.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, default, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic
__all__ = ['CoqLexer', 'IsabelleLexer', 'LeanLexer']
class CoqLexer(RegexLexer):
"""
For the `Coq <http://coq.inria.fr/>`_ theorem prover.
.. versionadded:: 1.5
"""
name = 'Coq'
aliases = ['coq']
filenames = ['*.v']
mimetypes = ['text/x-coq']
keywords1 = (
# Vernacular commands
'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable',
'Variables', 'Parameter', 'Parameters', 'Axiom', 'Hypothesis',
'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope',
'Open', 'Close', 'Bind', 'Delimit', 'Definition', 'Let', 'Ltac',
'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit',
'Arguments', 'Set', 'Unset', 'Contextual', 'Strict', 'Prenex',
'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure',
'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Corollary',
'Proposition', 'Fact', 'Remark', 'Example', 'Proof', 'Goal', 'Save',
'Qed', 'Defined', 'Hint', 'Resolve', 'Rewrite', 'View', 'Search',
'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
'outside', 'Check',
)
keywords2 = (
# Gallina
'forall', 'exists', 'exists2', 'fun', 'fix', 'cofix', 'struct',
'match', 'end', 'in', 'return', 'let', 'if', 'is', 'then', 'else',
'for', 'of', 'nosimpl', 'with', 'as',
)
keywords3 = (
# Sorts
'Type', 'Prop',
)
keywords4 = (
# Tactics
'pose', 'set', 'move', 'case', 'elim', 'apply', 'clear', 'hnf', 'intro',
'intros', 'generalize', 'rename', 'pattern', 'after', 'destruct',
'induction', 'using', 'refine', 'inversion', 'injection', 'rewrite',
'congr', 'unlock', 'compute', 'ring', 'field', 'replace', 'fold',
'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog',
'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial',
'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto',
'split', 'left', 'right', 'autorewrite', 'tauto',
)
keywords5 = (
# Terminators
'by', 'done', 'exact', 'reflexivity', 'tauto', 'romega', 'omega',
'assumption', 'solve', 'contradiction', 'discriminate',
)
keywords6 = (
# Control
'do', 'last', 'first', 'try', 'idtac', 'repeat',
)
# 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
# 'downto', 'else', 'end', 'exception', 'external', 'false',
# 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
# 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
# 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
# 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
# 'type', 'val', 'virtual', 'when', 'while', 'with'
keyopts = (
'!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-', r'-\.',
'->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<', '<-',
'<->', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~', '=>',
r'/\\', r'\\/',
u'Π', u'λ',
)
operators = r'[!$%&*+\./:<=>?@^|~-]'
word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or')
prefix_syms = r'[!?~]'
infix_syms = r'[=<>@^|&+\*/$%-]'
primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list',
'array')
tokens = {
'root': [
(r'\s+', Text),
(r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
(r'\(\*', Comment, 'comment'),
(words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
(words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
(words(keywords4, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keywords5, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
(words(keywords6, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
(r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
(r'\b([A-Z][\w\']*)', Name.Class),
(r'(%s)' % '|'.join(keyopts[::-1]), Operator),
(r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
(r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
(r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
(r"[^\W\d][\w']*", Name),
(r'\d[\d_]*', Number.Integer),
(r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
(r'0[oO][0-7][0-7_]*', Number.Oct),
(r'0[bB][01][01_]*', Number.Bin),
(r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
(r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
String.Char),
(r"'.'", String.Char),
(r"'", Keyword), # a stray quote is another syntax element
(r'"', String.Double, 'string'),
(r'[~?][a-z][\w\']*:', Name.Variable),
],
'comment': [
(r'[^(*)]+', Comment),
(r'\(\*', Comment, '#push'),
(r'\*\)', Comment, '#pop'),
(r'[(*)]', Comment),
],
'string': [
(r'[^"]+', String.Double),
(r'""', String.Double),
(r'"', String.Double, '#pop'),
],
'dotted': [
(r'\s+', Text),
(r'\.', Punctuation),
(r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
(r'[A-Z][\w\']*', Name.Class, '#pop'),
(r'[a-z][a-z0-9_\']*', Name, '#pop'),
default('#pop')
],
}
def analyse_text(text):
if text.startswith('(*'):
return True
class IsabelleLexer(RegexLexer):
"""
For the `Isabelle <http://isabelle.in.tum.de/>`_ proof assistant.
.. versionadded:: 2.0
"""
name = 'Isabelle'
aliases = ['isabelle']
filenames = ['*.thy']
mimetypes = ['text/x-isabelle']
keyword_minor = (
'and', 'assumes', 'attach', 'avoids', 'binder', 'checking',
'class_instance', 'class_relation', 'code_module', 'congs',
'constant', 'constrains', 'datatypes', 'defines', 'file', 'fixes',
'for', 'functions', 'hints', 'identifier', 'if', 'imports', 'in',
'includes', 'infix', 'infixl', 'infixr', 'is', 'keywords', 'lazy',
'module_name', 'monos', 'morphisms', 'no_discs_sels', 'notes',
'obtains', 'open', 'output', 'overloaded', 'parametric', 'permissive',
'pervasive', 'rep_compat', 'shows', 'structure', 'type_class',
'type_constructor', 'unchecked', 'unsafe', 'where',
)
keyword_diag = (
'ML_command', 'ML_val', 'class_deps', 'code_deps', 'code_thms',
'display_drafts', 'find_consts', 'find_theorems', 'find_unused_assms',
'full_prf', 'help', 'locale_deps', 'nitpick', 'pr', 'prf',
'print_abbrevs', 'print_antiquotations', 'print_attributes',
'print_binds', 'print_bnfs', 'print_bundles',
'print_case_translations', 'print_cases', 'print_claset',
'print_classes', 'print_codeproc', 'print_codesetup',
'print_coercions', 'print_commands', 'print_context',
'print_defn_rules', 'print_dependencies', 'print_facts',
'print_induct_rules', 'print_inductives', 'print_interps',
'print_locale', 'print_locales', 'print_methods', 'print_options',
'print_orders', 'print_quot_maps', 'print_quotconsts',
'print_quotients', 'print_quotientsQ3', 'print_quotmapsQ3',
'print_rules', 'print_simpset', 'print_state', 'print_statement',
'print_syntax', 'print_theorems', 'print_theory', 'print_trans_rules',
'prop', 'pwd', 'quickcheck', 'refute', 'sledgehammer', 'smt_status',
'solve_direct', 'spark_status', 'term', 'thm', 'thm_deps', 'thy_deps',
'try', 'try0', 'typ', 'unused_thms', 'value', 'values', 'welcome',
'print_ML_antiquotations', 'print_term_bindings', 'values_prolog',
)
keyword_thy = ('theory', 'begin', 'end')
keyword_section = ('header', 'chapter')
keyword_subsection = (
'section', 'subsection', 'subsubsection', 'sect', 'subsect',
'subsubsect',
)
keyword_theory_decl = (
'ML', 'ML_file', 'abbreviation', 'adhoc_overloading', 'arities',
'atom_decl', 'attribute_setup', 'axiomatization', 'bundle',
'case_of_simps', 'class', 'classes', 'classrel', 'codatatype',
'code_abort', 'code_class', 'code_const', 'code_datatype',
'code_identifier', 'code_include', 'code_instance', 'code_modulename',
'code_monad', 'code_printing', 'code_reflect', 'code_reserved',
'code_type', 'coinductive', 'coinductive_set', 'consts', 'context',
'datatype', 'datatype_new', 'datatype_new_compat', 'declaration',
'declare', 'default_sort', 'defer_recdef', 'definition', 'defs',
'domain', 'domain_isomorphism', 'domaindef', 'equivariance',
'export_code', 'extract', 'extract_type', 'fixrec', 'fun',
'fun_cases', 'hide_class', 'hide_const', 'hide_fact', 'hide_type',
'import_const_map', 'import_file', 'import_tptp', 'import_type_map',
'inductive', 'inductive_set', 'instantiation', 'judgment', 'lemmas',
'lifting_forget', 'lifting_update', 'local_setup', 'locale',
'method_setup', 'nitpick_params', 'no_adhoc_overloading',
'no_notation', 'no_syntax', 'no_translations', 'no_type_notation',
'nominal_datatype', 'nonterminal', 'notation', 'notepad', 'oracle',
'overloading', 'parse_ast_translation', 'parse_translation',
'partial_function', 'primcorec', 'primrec', 'primrec_new',
'print_ast_translation', 'print_translation', 'quickcheck_generator',
'quickcheck_params', 'realizability', 'realizers', 'recdef', 'record',
'refute_params', 'setup', 'setup_lifting', 'simproc_setup',
'simps_of_case', 'sledgehammer_params', 'spark_end', 'spark_open',
'spark_open_siv', 'spark_open_vcg', 'spark_proof_functions',
'spark_types', 'statespace', 'syntax', 'syntax_declaration', 'text',
'text_raw', 'theorems', 'translations', 'type_notation',
'type_synonym', 'typed_print_translation', 'typedecl', 'hoarestate',
'install_C_file', 'install_C_types', 'wpc_setup', 'c_defs', 'c_types',
'memsafe', 'SML_export', 'SML_file', 'SML_import', 'approximate',
'bnf_axiomatization', 'cartouche', 'datatype_compat',
'free_constructors', 'functor', 'nominal_function',
'nominal_termination', 'permanent_interpretation',
'binds', 'defining', 'smt2_status', 'term_cartouche',
'boogie_file', 'datatype_compat', 'text_cartouche',
)
keyword_theory_script = ('inductive_cases', 'inductive_simps')
keyword_theory_goal = (
'ax_specification', 'bnf', 'code_pred', 'corollary', 'cpodef',
'crunch', 'crunch_ignore',
'enriched_type', 'function', 'instance', 'interpretation', 'lemma',
'lift_definition', 'nominal_inductive', 'nominal_inductive2',
'nominal_primrec', 'pcpodef', 'primcorecursive',
'quotient_definition', 'quotient_type', 'recdef_tc', 'rep_datatype',
'schematic_corollary', 'schematic_lemma', 'schematic_theorem',
'spark_vc', 'specification', 'subclass', 'sublocale', 'termination',
'theorem', 'typedef', 'wrap_free_constructors',
)
keyword_qed = ('by', 'done', 'qed')
keyword_abandon_proof = ('sorry', 'oops')
keyword_proof_goal = ('have', 'hence', 'interpret')
keyword_proof_block = ('next', 'proof')
keyword_proof_chain = (
'finally', 'from', 'then', 'ultimately', 'with',
)
keyword_proof_decl = (
'ML_prf', 'also', 'include', 'including', 'let', 'moreover', 'note',
'txt', 'txt_raw', 'unfolding', 'using', 'write',
)
keyword_proof_asm = ('assume', 'case', 'def', 'fix', 'presume')
keyword_proof_asm_goal = ('guess', 'obtain', 'show', 'thus')
keyword_proof_script = (
'apply', 'apply_end', 'apply_trace', 'back', 'defer', 'prefer',
)
operators = (
'::', ':', '(', ')', '[', ']', '_', '=', ',', '|',
'+', '-', '!', '?',
)
proof_operators = ('{', '}', '.', '..')
tokens = {
'root': [
(r'\s+', Text),
(r'\(\*', Comment, 'comment'),
(r'\{\*', Comment, 'text'),
(words(operators), Operator),
(words(proof_operators), Operator.Word),
(words(keyword_minor, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
(words(keyword_diag, prefix=r'\b', suffix=r'\b'), Keyword.Type),
(words(keyword_thy, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_theory_decl, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_section, prefix=r'\b', suffix=r'\b'), Generic.Heading),
(words(keyword_subsection, prefix=r'\b', suffix=r'\b'), Generic.Subheading),
(words(keyword_theory_goal, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
(words(keyword_theory_script, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
(words(keyword_abandon_proof, prefix=r'\b', suffix=r'\b'), Generic.Error),
(words(keyword_qed, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_goal, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_block, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_decl, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_chain, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_asm, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_asm_goal, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keyword_proof_script, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
(r'\\<\w*>', Text.Symbol),
(r"[^\W\d][.\w']*", Name),
(r"\?[^\W\d][.\w']*", Name),
(r"'[^\W\d][.\w']*", Name.Type),
(r'\d[\d_]*', Name), # display numbers as name
(r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
(r'0[oO][0-7][0-7_]*', Number.Oct),
(r'0[bB][01][01_]*', Number.Bin),
(r'"', String, 'string'),
(r'`', String.Other, 'fact'),
],
'comment': [
(r'[^(*)]+', Comment),
(r'\(\*', Comment, '#push'),
(r'\*\)', Comment, '#pop'),
(r'[(*)]', Comment),
],
'text': [
(r'[^\*\}]+', Comment),
(r'\*\}', Comment, '#pop'),
(r'\*', Comment),
(r'\}', Comment),
],
'string': [
(r'[^"\\]+', String),
(r'\\<\w*>', String.Symbol),
(r'\\"', String),
(r'\\', String),
(r'"', String, '#pop'),
],
'fact': [
(r'[^`\\]+', String.Other),
(r'\\<\w*>', String.Symbol),
(r'\\`', String.Other),
(r'\\', String.Other),
(r'`', String.Other, '#pop'),
],
}
class LeanLexer(RegexLexer):
"""
For the `Lean <https://github.com/leanprover/lean>`_
theorem prover.
.. versionadded:: 2.0
"""
name = 'Lean'
aliases = ['lean']
filenames = ['*.lean']
mimetypes = ['text/x-lean']
flags = re.MULTILINE | re.UNICODE
keywords1 = ('import', 'abbreviation', 'opaque_hint', 'tactic_hint', 'definition', 'renaming',
'inline', 'hiding', 'exposing', 'parameter', 'parameters', 'conjecture',
'hypothesis', 'lemma', 'corollary', 'variable', 'variables', 'print', 'theorem',
'axiom', 'inductive', 'structure', 'universe', 'alias', 'help',
'options', 'precedence', 'postfix', 'prefix', 'calc_trans', 'calc_subst', 'calc_refl',
'infix', 'infixl', 'infixr', 'notation', 'eval', 'check', 'exit', 'coercion', 'end',
'private', 'using', 'namespace', 'including', 'instance', 'section', 'context',
'protected', 'expose', 'export', 'set_option', 'add_rewrite', 'extends')
keywords2 = (
'forall', 'exists', 'fun', 'Pi', 'obtain', 'from', 'have', 'show', 'assume', 'take',
'let', 'if', 'else', 'then', 'by', 'in', 'with', 'begin', 'proof', 'qed', 'calc'
)
keywords3 = (
# Sorts
'Type', 'Prop',
)
keywords4 = (
# Tactics
'apply', 'and_then', 'or_else', 'append', 'interleave', 'par', 'fixpoint', 'repeat',
'at_most', 'discard', 'focus_at', 'rotate', 'try_for', 'now', 'assumption', 'eassumption',
'state', 'intro', 'generalize', 'exact', 'unfold', 'beta', 'trace', 'focus', 'repeat1',
'determ', 'destruct', 'try', 'auto', 'intros'
)
operators = (
'!=', '#', '&', '&&', '*', '+', '-', '/', '#', '@',
'-.', '->', '.', '..', '...', '::', ':>', ';', ';;', '<',
'<-', '=', '==', '>', '_', '`', '|', '||', '~', '=>', '<=', '>=',
'/\\', '\\/', u'∀', u'Π', u'λ', u'↔', u'∧', u'∨', u'≠', u'≤', u'≥',
u'¬', u'⁻¹', u'⬝', u'▸', u'→', u'∃', u'ℕ', u'ℤ', u'≈'
)
word_operators = ('and', 'or', 'not', 'iff', 'eq')
punctuation = ('(', ')', ':', '{', '}', '[', ']', u'⦃', u'⦄', ':=', ',')
primitives = ('unit', 'int', 'bool', 'string', 'char', 'list',
'array', 'prod', 'sum', 'pair', 'real', 'nat', 'num', 'path')
tokens = {
'root': [
(r'\s+', Text),
(r'\b(false|true)\b|\(\)|\[\]', Name.Builtin.Pseudo),
(r'/-', Comment, 'comment'),
(r'--.*?$', Comment.Single),
(words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
(words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
(words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
(words(keywords4, prefix=r'\b', suffix=r'\b'), Keyword),
(words(operators), Name.Builtin.Pseudo),
(words(word_operators, prefix=r'\b', suffix=r'\b'), Name.Builtin.Pseudo),
(words(punctuation), Operator),
(words(primitives, prefix=r'\b', suffix=r'\b'), Keyword.Type),
(u"[A-Za-z_\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2100-\u214f]"
u"[A-Za-z_'\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2070-\u2079"
u"\u207f-\u2089\u2090-\u209c\u2100-\u214f]*", Name),
(r'\d+', Number.Integer),
(r'"', String.Double, 'string'),
(r'[~?][a-z][\w\']*:', Name.Variable)
],
'comment': [
# Multiline Comments
(r'[^/-]', Comment.Multiline),
(r'/-', Comment.Multiline, '#push'),
(r'-/', Comment.Multiline, '#pop'),
(r'[/-]', Comment.Multiline)
],
'string': [
(r'[^\\"]+', String.Double),
(r'\\[n"\\]', String.Escape),
('"', String.Double, '#pop'),
],
} | unknown | codeparrot/codeparrot-clean | ||
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import annotations
DOCUMENTATION = """
name: free
short_description: Executes tasks without waiting for all hosts
description:
- Task execution is as fast as possible per batch as defined by C(serial) (default all).
Ansible will not wait for other hosts to finish the current task before queuing more tasks for other hosts.
All hosts are still attempted for the current task, but it prevents blocking new tasks for hosts that have already finished.
- With the free strategy, unlike the default linear strategy, a host that is slow or stuck on a specific task
won't hold up the rest of the hosts and tasks.
version_added: "2.0"
author: Ansible Core Team
"""
import time
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.playbook.handler import Handler
from ansible.playbook.included_file import IncludedFile
from ansible.plugins.loader import action_loader
from ansible.plugins.strategy import StrategyBase
from ansible._internal._templating._engine import TemplateEngine
from ansible.utils.display import Display
display = Display()
class StrategyModule(StrategyBase):
# This strategy manages throttling on its own, so we don't want it done in queue_task
ALLOW_BASE_THROTTLING = False
def __init__(self, tqm):
super(StrategyModule, self).__init__(tqm)
self._host_pinned = False
def run(self, iterator, play_context):
"""
The "free" strategy is a bit more complex, in that it allows tasks to
be sent to hosts as quickly as they can be processed. This means that
some hosts may finish very quickly if run tasks result in little or no
work being done versus other systems.
The algorithm used here also tries to be more "fair" when iterating
through hosts by remembering the last host in the list to be given a task
and starting the search from there as opposed to the top of the hosts
list again, which would end up favoring hosts near the beginning of the
list.
"""
# the last host to be given a task
last_host = 0
result = self._tqm.RUN_OK
# start with all workers being counted as being free
workers_free = len(self._workers)
self._set_hosts_cache(iterator._play)
if iterator._play.max_fail_percentage is not None:
display.warning("Using max_fail_percentage with the free strategy is not supported, as tasks are executed independently on each host")
work_to_do = True
while work_to_do and not self._tqm._terminated:
hosts_left = self.get_hosts_left(iterator)
if len(hosts_left) == 0:
self._tqm.send_callback('v2_playbook_on_no_hosts_remaining')
result = False
break
work_to_do = False # assume we have no more work to do
starting_host = last_host # save current position so we know when we've looped back around and need to break
# try and find an unblocked host with a task to run
host_results = []
meta_task_dummy_results_count = 0
while True:
host = hosts_left[last_host]
display.debug("next free host: %s" % host)
host_name = host.get_name()
# peek at the next task for the host, to see if there's
# anything to do do for this host
(state, task) = iterator.get_next_task_for_host(host, peek=True)
display.debug("free host state: %s" % state, host=host_name)
display.debug("free host task: %s" % task, host=host_name)
# check if there is work to do, either there is a task or the host is still blocked which could
# mean that it is processing an include task and after its result is processed there might be
# more tasks to run
if (task or self._blocked_hosts.get(host_name, False)) and not self._tqm._unreachable_hosts.get(host_name, False):
display.debug("this host has work to do", host=host_name)
# set the flag so the outer loop knows we've still found
# some work which needs to be done
work_to_do = True
if not self._tqm._unreachable_hosts.get(host_name, False) and task:
# check to see if this host is blocked (still executing a previous task)
if not self._blocked_hosts.get(host_name, False):
display.debug("getting variables", host=host_name)
task_vars = self._variable_manager.get_vars(play=iterator._play, host=host, task=task,
_hosts=self._hosts_cache,
_hosts_all=self._hosts_cache_all)
self.add_tqm_variables(task_vars, play=iterator._play)
templar = TemplateEngine(loader=self._loader, variables=task_vars)
display.debug("done getting variables", host=host_name)
try:
throttle = int(templar.template(task.throttle))
except Exception as ex:
raise AnsibleError("Failed to convert the throttle value to an integer.", obj=task.throttle) from ex
if throttle > 0:
same_tasks = 0
for worker in self._workers:
if worker and worker.is_alive() and worker._task._uuid == task._uuid:
same_tasks += 1
display.debug("task: %s, same_tasks: %d" % (task.get_name(), same_tasks))
if same_tasks >= throttle:
break
# advance the host, mark the host blocked, and queue it
self._blocked_hosts[host_name] = True
iterator.set_state_for_host(host.name, state)
if isinstance(task, Handler):
task.remove_host(host)
try:
action = action_loader.get(task.action, class_only=True, collection_list=task.collections)
except KeyError:
# we don't care here, because the action may simply not have a
# corresponding action plugin
action = None
task.post_validate_attribute("name", templar=templar)
run_once = templar.template(task.run_once) or action and getattr(action, 'BYPASS_HOST_LOOP', False)
if run_once:
if action and getattr(action, 'BYPASS_HOST_LOOP', False):
raise AnsibleError("The '%s' module bypasses the host loop, which is currently not supported in the free strategy "
"and would instead execute for every host in the inventory list." % task.action, obj=task._ds)
else:
display.warning("Using run_once with the free strategy is not currently supported. This task will still be "
"executed for every host in the inventory list.")
if task.action in C._ACTION_META:
if self._host_pinned:
meta_task_dummy_results_count += 1
workers_free -= 1
self._execute_meta(task, play_context, iterator, target_host=host)
self._blocked_hosts[host_name] = False
else:
# handle step if needed, skip meta actions as they are used internally
if not self._step or self._take_step(task, host_name):
if task.any_errors_fatal:
display.warning("Using any_errors_fatal with the free strategy is not supported, "
"as tasks are executed independently on each host")
if isinstance(task, Handler):
self._tqm.send_callback('v2_playbook_on_handler_task_start', task)
else:
self._tqm.send_callback('v2_playbook_on_task_start', task, is_conditional=False)
self._queue_task(host, task, task_vars, play_context)
# each task is counted as a worker being busy
workers_free -= 1
del task_vars
else:
display.debug("%s is blocked, skipping for now" % host_name)
# all workers have tasks to do (and the current host isn't done with the play).
# loop back to starting host and break out
if self._host_pinned and workers_free == 0 and work_to_do:
last_host = starting_host
break
# move on to the next host and make sure we
# haven't gone past the end of our hosts list
last_host += 1
if last_host > len(hosts_left) - 1:
last_host = 0
# if we've looped around back to the start, break out
if last_host == starting_host:
break
results = self._process_pending_results(iterator)
host_results.extend(results)
# each result is counted as a worker being free again
workers_free += len(results) + meta_task_dummy_results_count
self.update_active_connections(results)
included_files = IncludedFile.process_include_results(
host_results,
iterator=iterator,
loader=self._loader,
variable_manager=self._variable_manager
)
if len(included_files) > 0:
all_blocks = dict((host, []) for host in hosts_left)
failed_includes_hosts = set()
for included_file in included_files:
display.debug("collecting new blocks for %s" % included_file)
is_handler = False
try:
if included_file._is_role:
new_ir = self._copy_included_file(included_file)
new_blocks, handler_blocks = new_ir.get_block_list(
play=iterator._play,
variable_manager=self._variable_manager,
loader=self._loader,
)
else:
is_handler = isinstance(included_file._task, Handler)
new_blocks = self._load_included_file(
included_file,
iterator=iterator,
is_handler=is_handler,
)
# let PlayIterator know about any new handlers included via include_role or
# import_role within include_role/include_taks
iterator.handlers = [h for b in iterator._play.handlers for h in b.block]
except AnsibleParserError:
raise
except AnsibleError as ex:
# FIXME: send the error to the callback; don't directly write to display here
display.error(ex)
for r in included_file._results:
r._return_data['failed'] = True
r._return_data['reason'] = str(ex)
self._tqm._stats.increment('failures', r.host.name)
self._tqm.send_callback('v2_runner_on_failed', r)
failed_includes_hosts.add(r.host)
continue
else:
# since we skip incrementing the stats when the task result is
# first processed, we do so now for each host in the list
for host in included_file._hosts:
self._tqm._stats.increment('ok', host.name)
self._tqm.send_callback('v2_playbook_on_include', included_file)
for new_block in new_blocks:
if is_handler:
for task in new_block.block:
task.notified_hosts = included_file._hosts[:]
final_block = new_block
else:
task_vars = self._variable_manager.get_vars(
play=iterator._play,
task=new_block.get_first_parent_include(),
_hosts=self._hosts_cache,
_hosts_all=self._hosts_cache_all,
)
final_block = new_block.filter_tagged_tasks(task_vars)
for host in hosts_left:
if host in included_file._hosts:
all_blocks[host].append(final_block)
display.debug("done collecting new blocks for %s" % included_file)
for host in failed_includes_hosts:
self._tqm._failed_hosts[host.name] = True
iterator.mark_host_failed(host)
display.debug("adding all collected blocks from %d included file(s) to iterator" % len(included_files))
for host in hosts_left:
iterator.add_tasks(host, all_blocks[host])
display.debug("done adding collected blocks to iterator")
# pause briefly so we don't spin lock
time.sleep(C.DEFAULT_INTERNAL_POLL_INTERVAL)
# collect all the final results
results = self._wait_on_pending_results(iterator)
# run the base class run() method, which executes the cleanup function
# and runs any outstanding handlers which have been triggered
return super(StrategyModule, self).run(iterator, play_context, result) | python | github | https://github.com/ansible/ansible | lib/ansible/plugins/strategy/free.py |
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package html_test
import (
"fmt"
"html"
)
func ExampleEscapeString() {
const s = `"Fran & Freddie's Diner" <tasty@example.com>`
fmt.Println(html.EscapeString(s))
// Output: "Fran & Freddie's Diner" <tasty@example.com>
}
func ExampleUnescapeString() {
const s = `"Fran & Freddie's Diner" <tasty@example.com>`
fmt.Println(html.UnescapeString(s))
// Output: "Fran & Freddie's Diner" <tasty@example.com>
} | go | github | https://github.com/golang/go | src/html/example_test.go |
from abjad.tools import abctools
from abjad.tools import systemtools
class TimespanCollection(abctools.AbjadObject):
r'''A mutable always-sorted collection of timespans.
::
>>> timespans = (
... abjad.Timespan(0, 3),
... abjad.Timespan(1, 3),
... abjad.Timespan(1, 2),
... abjad.Timespan(2, 5),
... abjad.Timespan(6, 9),
... )
>>> timespan_collection = consort.TimespanCollection(timespans)
'''
### CLASS VARIABLES ###
__slots__ = (
'_root_node',
)
### INITIALIZER ###
def __init__(
self,
timespans=None,
):
self._root_node = None
if timespans is not None and timespans:
self.insert(timespans)
### SPECIAL METHODS ###
def __contains__(self, timespan):
r'''Is true if this timespan collection contains `timespan`. Otherwise
false.
::
>>> timespans = (
... abjad.Timespan(0, 3),
... abjad.Timespan(1, 3),
... abjad.Timespan(1, 2),
... abjad.Timespan(2, 5),
... abjad.Timespan(6, 9),
... )
>>> timespan_collection = consort.TimespanCollection(timespans)
::
>>> timespans[0] in timespan_collection
True
::
>>> abjad.Timespan(-1, 100) in timespan_collection
False
Returns boolean.
'''
assert TimespanCollection._is_timespan(timespan)
candidates = self.find_timespans_starting_at(timespan.start_offset)
result = timespan in candidates
return result
def __getitem__(self, i):
r'''Gets timespan at index `i`.
::
>>> timespans = (
... abjad.Timespan(0, 3),
... abjad.Timespan(1, 3),
... abjad.Timespan(1, 2),
... abjad.Timespan(2, 5),
... abjad.Timespan(6, 9),
... )
>>> timespan_collection = consort.TimespanCollection(timespans)
::
>>> timespan_collection[-1]
Timespan(start_offset=Offset(6, 1), stop_offset=Offset(9, 1))
::
>>> for timespan in timespan_collection[:3]:
... timespan
...
Timespan(start_offset=Offset(0, 1), stop_offset=Offset(3, 1))
Timespan(start_offset=Offset(1, 1), stop_offset=Offset(2, 1))
Timespan(start_offset=Offset(1, 1), stop_offset=Offset(3, 1))
Returns timespan or timespans.
'''
def recurse_by_index(node, index):
if node.node_start_index <= index < node.node_stop_index:
return node.payload[index - node.node_start_index]
elif node.left_child and index < node.node_start_index:
return recurse_by_index(node.left_child, index)
elif node.right_child and node.node_stop_index <= index:
return recurse_by_index(node.right_child, index)
def recurse_by_slice(node, start, stop):
result = []
if node is None:
return result
if start < node.node_start_index and node.left_child:
result.extend(recurse_by_slice(node.left_child, start, stop))
if start < node.node_stop_index and node.node_start_index < stop:
node_start = start - node.node_start_index
if node_start < 0:
node_start = 0
node_stop = stop - node.node_start_index
result.extend(node.payload[node_start:node_stop])
if node.node_stop_index <= stop and node.right_child:
result.extend(recurse_by_slice(node.right_child, start, stop))
return result
if isinstance(i, int):
if self._root_node is None:
raise IndexError
if i < 0:
i = self._root_node.subtree_stop_index + i
if i < 0 or self._root_node.subtree_stop_index <= i:
raise IndexError
return recurse_by_index(self._root_node, i)
elif isinstance(i, slice):
if self._root_node is None:
return []
indices = i.indices(self._root_node.subtree_stop_index)
start, stop = indices[0], indices[1]
return recurse_by_slice(self._root_node, start, stop)
raise TypeError('Indices must be integers or slices, got {}'.format(i))
def __iter__(self):
r'''Iterates timespans in this timespan collection.
::
>>> timespans = (
... abjad.Timespan(0, 3),
... abjad.Timespan(1, 3),
... abjad.Timespan(1, 2),
... abjad.Timespan(2, 5),
... abjad.Timespan(6, 9),
... )
>>> timespan_collection = consort.TimespanCollection(timespans)
::
>>> for timespan in timespan_collection:
... timespan
...
Timespan(start_offset=Offset(0, 1), stop_offset=Offset(3, 1))
Timespan(start_offset=Offset(1, 1), stop_offset=Offset(2, 1))
Timespan(start_offset=Offset(1, 1), stop_offset=Offset(3, 1))
Timespan(start_offset=Offset(2, 1), stop_offset=Offset(5, 1))
Timespan(start_offset=Offset(6, 1), stop_offset=Offset(9, 1))
Returns generator.
'''
def recurse(node):
if node is not None:
if node.left_child is not None:
for timespan in recurse(node.left_child):
yield timespan
for timespan in node.payload:
yield timespan
if node.right_child is not None:
for timespan in recurse(node.right_child):
yield timespan
return recurse(self._root_node)
def __len__(self):
r'''Gets length of this timespan collection.
::
>>> timespans = (
... abjad.Timespan(0, 3),
... abjad.Timespan(1, 3),
... abjad.Timespan(1, 2),
... abjad.Timespan(2, 5),
... abjad.Timespan(6, 9),
... )
>>> timespan_collection = consort.TimespanCollection(timespans)
::
>>> len(timespan_collection)
5
Returns integer.
'''
if self._root_node is None:
return 0
return self._root_node.subtree_stop_index
def __setitem__(self, i, new):
r'''Sets timespans at index `i` to `new`.
::
>>> timespans = (
... abjad.Timespan(0, 3),
... abjad.Timespan(1, 3),
... abjad.Timespan(1, 2),
... abjad.Timespan(2, 5),
... abjad.Timespan(6, 9),
... )
>>> timespan_collection = consort.TimespanCollection(timespans)
::
>>> timespan_collection[:3] = [abjad.Timespan(100, 200)]
Returns none.
'''
if isinstance(i, (int, slice)):
old = self[i]
self.remove(old)
self.insert(new)
else:
message = 'Indices must be ints or slices, got {}'.format(i)
raise TypeError(message)
def __sub__(self, timespan):
r'''Delete material that intersects `timespan`:
::
>>> timespan_collection = consort.TimespanCollection([
... abjad.Timespan(0, 16),
... abjad.Timespan(5, 12),
... abjad.Timespan(-2, 8),
... ])
::
>>> timespan = abjad.Timespan(5, 10)
>>> result = timespan_collection - timespan
::
>>> print(format(timespan_collection))
consort.tools.TimespanCollection(
[
abjad.Timespan(
start_offset=abjad.Offset(-2, 1),
stop_offset=abjad.Offset(5, 1),
),
abjad.Timespan(
start_offset=abjad.Offset(0, 1),
stop_offset=abjad.Offset(5, 1),
),
abjad.Timespan(
start_offset=abjad.Offset(10, 1),
stop_offset=abjad.Offset(12, 1),
),
abjad.Timespan(
start_offset=abjad.Offset(10, 1),
stop_offset=abjad.Offset(16, 1),
),
]
)
Operates in place and returns timespan collection.
'''
intersecting_timespans = self.find_timespans_intersecting_timespan(
timespan)
self.remove(intersecting_timespans)
for intersecting_timespan in intersecting_timespans:
for x in (intersecting_timespan - timespan):
self.insert(x)
return self
### PRIVATE METHODS ###
def _insert_node(self, node, start_offset):
import consort
if node is None:
return consort.TimespanCollectionNode(start_offset)
if start_offset < node.start_offset:
node.left_child = self._insert_node(node.left_child, start_offset)
elif node.start_offset < start_offset:
node.right_child = self._insert_node(node.right_child, start_offset)
return self._rebalance(node)
def _insert_timespan(self, timespan):
self._root_node = self._insert_node(
self._root_node,
timespan.start_offset,
)
node = self._search(self._root_node, timespan.start_offset)
node.payload.append(timespan)
node.payload.sort(key=lambda x: x.stop_offset)
@staticmethod
def _is_timespan(expr):
if hasattr(expr, 'start_offset') and hasattr(expr, 'stop_offset'):
return True
return False
def _rebalance(self, node):
if node is not None:
if 1 < node.balance:
if 0 <= node.right_child.balance:
node = self._rotate_right_right(node)
else:
node = self._rotate_right_left(node)
elif node.balance < -1:
if node.left_child.balance <= 0:
node = self._rotate_left_left(node)
else:
node = self._rotate_left_right(node)
assert -1 <= node.balance <= 1
return node
def _remove_node(self, node, start_offset):
if node is not None:
if node.start_offset == start_offset:
if node.left_child and node.right_child:
next_node = node.right_child
while next_node.left_child:
next_node = next_node.left_child
node._start_offset = next_node._start_offset
node._payload = next_node._payload
node.right_child = self._remove_node(
node.right_child,
next_node.start_offset,
)
else:
node = node.left_child or node.right_child
elif start_offset < node.start_offset:
node.left_child = self._remove_node(
node.left_child,
start_offset,
)
elif node.start_offset < start_offset:
node.right_child = self._remove_node(
node.right_child,
start_offset,
)
return self._rebalance(node)
def _remove_timespan(self, timespan, old_start_offset=None):
start_offset = timespan.start_offset
if old_start_offset is not None:
start_offset = old_start_offset
node = self._search(self._root_node, start_offset)
if node is None:
return
if timespan in node.payload:
node.payload.remove(timespan)
if not node.payload:
self._root_node = self._remove_node(
self._root_node,
start_offset,
)
if isinstance(timespan, TimespanCollection):
timespan._parents.remove(self)
def _rotate_left_left(self, node):
next_node = node.left_child
node.left_child = next_node.right_child
next_node.right_child = node
return next_node
def _rotate_left_right(self, node):
node.left_child = self._rotate_right_right(node.left_child)
next_node = self._rotate_left_left(node)
return next_node
def _rotate_right_left(self, node):
node.right_child = self._rotate_left_left(node.right_child)
next_node = self._rotate_right_right(node)
return next_node
def _rotate_right_right(self, node):
next_node = node.right_child
node.right_child = next_node.left_child
next_node.left_child = node
return next_node
def _search(self, node, start_offset):
if node is not None:
if node.start_offset == start_offset:
return node
elif node.left_child and start_offset < node.start_offset:
return self._search(node.left_child, start_offset)
elif node.right_child and node.start_offset < start_offset:
return self._search(node.right_child, start_offset)
return None
def _update_indices(
self,
node,
):
def recurse(
node,
parent_stop_index=None,
):
if node is None:
return
if node.left_child is not None:
recurse(
node.left_child,
parent_stop_index=parent_stop_index,
)
node._node_start_index = node.left_child.subtree_stop_index
node._subtree_start_index = node.left_child.subtree_start_index
elif parent_stop_index is None:
node._node_start_index = 0
node._subtree_start_index = 0
else:
node._node_start_index = parent_stop_index
node._subtree_start_index = parent_stop_index
node._node_stop_index = node.node_start_index + len(node.payload)
node._subtree_stop_index = node.node_stop_index
if node.right_child is not None:
recurse(
node.right_child,
parent_stop_index=node.node_stop_index,
)
node._subtree_stop_index = node.right_child.subtree_stop_index
recurse(node)
def _update_offsets(
self,
node,
):
if node is None:
return
stop_offset_low = min(x.stop_offset for x in node.payload)
stop_offset_high = max(x.stop_offset for x in node.payload)
if node.left_child:
left_child = self._update_offsets(
node.left_child,
)
if left_child.stop_offset_low < stop_offset_low:
stop_offset_low = left_child.stop_offset_low
if stop_offset_high < left_child.stop_offset_high:
stop_offset_high = left_child.stop_offset_high
if node.right_child:
right_child = self._update_offsets(
node.right_child,
)
if right_child.stop_offset_low < stop_offset_low:
stop_offset_low = right_child.stop_offset_low
if stop_offset_high < right_child.stop_offset_high:
stop_offset_high = right_child.stop_offset_high
node._stop_offset_low = stop_offset_low
node._stop_offset_high = stop_offset_high
return node
def _get_format_specification(self):
values = []
timespans = [x for x in self]
if timespans:
values.append(timespans)
names = []
return systemtools.FormatSpecification(
client=self,
storage_format_args_values=values,
storage_format_kwargs_names=names,
)
### PUBLIC METHODS ###
def find_timespans_starting_at(self, offset):
results = []
node = self._search(self._root_node, offset)
if node is not None:
results.extend(node.payload)
return tuple(results)
def find_timespans_stopping_at(self, offset):
def recurse(node, offset):
result = []
if node is not None:
if node.stop_offset_low <= offset <= node.stop_offset_high:
for timespan in node.payload:
if timespan.stop_offset == offset:
result.append(timespan)
if node.left_child is not None:
result.extend(recurse(node.left_child, offset))
if node.right_child is not None:
result.extend(recurse(node.right_child, offset))
return result
results = recurse(self._root_node, offset)
results.sort(key=lambda x: (x.start_offset, x.stop_offset))
return tuple(results)
def find_timespans_overlapping_offset(self, offset):
r'''Finds timespans overlapping `offset`.
::
>>> timespans = (
... abjad.Timespan(0, 3),
... abjad.Timespan(1, 3),
... abjad.Timespan(1, 2),
... abjad.Timespan(2, 5),
... abjad.Timespan(6, 9),
... )
>>> timespan_collection = consort.TimespanCollection(timespans)
::
>>> for x in timespan_collection.find_timespans_overlapping_offset(1.5):
... x
...
Timespan(start_offset=Offset(0, 1), stop_offset=Offset(3, 1))
Timespan(start_offset=Offset(1, 1), stop_offset=Offset(2, 1))
Timespan(start_offset=Offset(1, 1), stop_offset=Offset(3, 1))
Returns tuple of 0 or more timespans.
'''
def recurse(node, offset, indent=0):
result = []
if node is not None:
if node.start_offset < offset < node.stop_offset_high:
result.extend(recurse(node.left_child, offset, indent + 1))
for timespan in node.payload:
if offset < timespan.stop_offset:
result.append(timespan)
result.extend(recurse(node.right_child, offset, indent + 1))
elif offset <= node.start_offset:
result.extend(recurse(node.left_child, offset, indent + 1))
return result
results = recurse(self._root_node, offset)
results.sort(key=lambda x: (x.start_offset, x.stop_offset))
return tuple(results)
def find_timespans_intersecting_timespan(self, timespan):
r'''Finds timespans overlapping `timespan`.
::
>>> timespans = (
... abjad.Timespan(0, 3),
... abjad.Timespan(1, 3),
... abjad.Timespan(1, 2),
... abjad.Timespan(2, 5),
... abjad.Timespan(6, 9),
... )
>>> timespan_collection = consort.TimespanCollection(timespans)
::
>>> timespan = abjad.Timespan(2, 4)
>>> for x in timespan_collection.find_timespans_intersecting_timespan(timespan):
... x
...
Timespan(start_offset=Offset(0, 1), stop_offset=Offset(3, 1))
Timespan(start_offset=Offset(1, 1), stop_offset=Offset(3, 1))
Timespan(start_offset=Offset(2, 1), stop_offset=Offset(5, 1))
Returns tuple of 0 or more timespans.
'''
def recurse(node, timespan):
result = []
if node is not None:
if timespan.intersects_timespan(node):
result.extend(recurse(node.left_child, timespan))
for candidate_timespan in node.payload:
if candidate_timespan.intersects_timespan(timespan):
result.append(candidate_timespan)
result.extend(recurse(node.right_child, timespan))
elif (timespan.start_offset <= node.start_offset) or \
(timespan.stop_offset <= node.start_offset):
result.extend(recurse(node.left_child, timespan))
return result
results = recurse(self._root_node, timespan)
results.sort(key=lambda x: (x.start_offset, x.stop_offset))
return tuple(results)
def get_simultaneity_at(self, offset):
r'''Gets simultaneity at `offset`.
::
>>> timespans = (
... abjad.Timespan(0, 3),
... abjad.Timespan(1, 3),
... abjad.Timespan(1, 2),
... abjad.Timespan(2, 5),
... abjad.Timespan(6, 9),
... )
>>> timespan_collection = consort.TimespanCollection(timespans)
::
>>> timespan_collection.get_simultaneity_at(1)
<TimespanSimultaneity(1 <<3>>)>
::
>>> timespan_collection.get_simultaneity_at(6.5)
<TimespanSimultaneity(6.5 <<1>>)>
'''
import consort
start_timespans = self.find_timespans_starting_at(offset)
stop_timespans = self.find_timespans_stopping_at(offset)
overlap_timespans = self.find_timespans_overlapping_offset(offset)
simultaneity = consort.TimespanSimultaneity(
timespan_collection=self,
overlap_timespans=overlap_timespans,
start_timespans=start_timespans,
start_offset=offset,
stop_timespans=stop_timespans,
)
return simultaneity
def get_start_offset_after(self, offset):
r'''Gets start offst in this timespan collection after `offset`.
::
>>> timespans = (
... abjad.Timespan(0, 3),
... abjad.Timespan(1, 3),
... abjad.Timespan(1, 2),
... abjad.Timespan(2, 5),
... abjad.Timespan(6, 9),
... )
>>> timespan_collection = consort.TimespanCollection(timespans)
::
>>> timespan_collection.get_start_offset_after(-1)
Offset(0, 1)
::
>>> timespan_collection.get_start_offset_after(0)
Offset(1, 1)
::
>>> timespan_collection.get_start_offset_after(1)
Offset(2, 1)
::
>>> timespan_collection.get_start_offset_after(2)
Offset(6, 1)
::
>>> timespan_collection.get_start_offset_after(6) is None
True
'''
def recurse(node, offset):
if node is None:
return None
result = None
if node.start_offset <= offset and node.right_child:
result = recurse(node.right_child, offset)
elif offset < node.start_offset:
result = recurse(node.left_child, offset) or node
return result
result = recurse(self._root_node, offset)
if result is None:
return None
return result.start_offset
def get_start_offset_before(self, offset):
r'''Gets start offst in this timespan collection before `offset`.
::
>>> timespans = (
... abjad.Timespan(0, 3),
... abjad.Timespan(1, 3),
... abjad.Timespan(1, 2),
... abjad.Timespan(2, 5),
... abjad.Timespan(6, 9),
... )
>>> timespan_collection = consort.TimespanCollection(timespans)
::
>>> timespan_collection.get_start_offset_before(7)
Offset(6, 1)
::
>>> timespan_collection.get_start_offset_before(6)
Offset(2, 1)
::
>>> timespan_collection.get_start_offset_before(2)
Offset(1, 1)
::
>>> timespan_collection.get_start_offset_before(1)
Offset(0, 1)
::
>>> timespan_collection.get_start_offset_before(0) is None
True
'''
def recurse(node, offset):
if node is None:
return None
result = None
if node.start_offset < offset:
result = recurse(node.right_child, offset) or node
elif offset <= node.start_offset and node.left_child:
result = recurse(node.left_child, offset)
return result
result = recurse(self._root_node, offset)
if result is None:
return None
return result.start_offset
def index(self, timespan):
assert self._is_timespan(timespan)
node = self._search(self._root_node, timespan.start_offset)
if node is None or timespan not in node.payload:
raise ValueError('{} not in timespan collection.'.format(timespan))
index = node.payload.index(timespan) + node.node_start_index
return index
def insert(self, timespans):
r'''Inserts `timespans` into this timespan collection.
::
>>> timespan_collection = consort.TimespanCollection()
>>> timespan_collection.insert(abjad.Timespan(1, 3))
>>> timespan_collection.insert((
... abjad.Timespan(0, 4),
... abjad.Timespan(2, 6),
... ))
::
>>> for x in timespan_collection:
... x
...
Timespan(start_offset=Offset(0, 1), stop_offset=Offset(4, 1))
Timespan(start_offset=Offset(1, 1), stop_offset=Offset(3, 1))
Timespan(start_offset=Offset(2, 1), stop_offset=Offset(6, 1))
`timespans` may be a single timespan or an iterable of timespans.
Returns none.
'''
if self._is_timespan(timespans):
timespans = [timespans]
for timespan in timespans:
if not self._is_timespan(timespan):
continue
self._insert_timespan(timespan)
self._update_indices(self._root_node)
self._update_offsets(self._root_node)
def iterate_simultaneities(
self,
reverse=False,
):
r'''Iterates simultaneities in this timespan collection.
::
>>> timespans = (
... abjad.Timespan(0, 3),
... abjad.Timespan(1, 3),
... abjad.Timespan(1, 2),
... abjad.Timespan(2, 5),
... abjad.Timespan(6, 9),
... )
>>> timespan_collection = consort.TimespanCollection(timespans)
::
>>> for x in timespan_collection.iterate_simultaneities():
... x
...
<TimespanSimultaneity(0 <<1>>)>
<TimespanSimultaneity(1 <<3>>)>
<TimespanSimultaneity(2 <<3>>)>
<TimespanSimultaneity(6 <<1>>)>
::
>>> for x in timespan_collection.iterate_simultaneities(
... reverse=True):
... x
...
<TimespanSimultaneity(6 <<1>>)>
<TimespanSimultaneity(2 <<3>>)>
<TimespanSimultaneity(1 <<3>>)>
<TimespanSimultaneity(0 <<1>>)>
Returns generator.
'''
if reverse:
start_offset = self.latest_start_offset
simultaneity = self.get_simultaneity_at(start_offset)
yield simultaneity
simultaneity = simultaneity.previous_simultaneity
while simultaneity is not None:
yield simultaneity
simultaneity = simultaneity.previous_simultaneity
else:
start_offset = self.earliest_start_offset
simultaneity = self.get_simultaneity_at(start_offset)
yield simultaneity
simultaneity = simultaneity.next_simultaneity
while simultaneity is not None:
yield simultaneity
simultaneity = simultaneity.next_simultaneity
def iterate_simultaneities_nwise(
self,
n=3,
reverse=False,
):
r'''Iterates simultaneities in this timespan collection in groups of
`n`.
::
>>> timespans = (
... abjad.Timespan(0, 3),
... abjad.Timespan(1, 3),
... abjad.Timespan(1, 2),
... abjad.Timespan(2, 5),
... abjad.Timespan(6, 9),
... )
>>> timespan_collection = consort.TimespanCollection(timespans)
::
>>> for x in timespan_collection.iterate_simultaneities_nwise(n=2):
... x
...
(<TimespanSimultaneity(0 <<1>>)>, <TimespanSimultaneity(1 <<3>>)>)
(<TimespanSimultaneity(1 <<3>>)>, <TimespanSimultaneity(2 <<3>>)>)
(<TimespanSimultaneity(2 <<3>>)>, <TimespanSimultaneity(6 <<1>>)>)
::
>>> for x in timespan_collection.iterate_simultaneities_nwise(
... n=2, reverse=True):
... x
...
(<TimespanSimultaneity(2 <<3>>)>, <TimespanSimultaneity(6 <<1>>)>)
(<TimespanSimultaneity(1 <<3>>)>, <TimespanSimultaneity(2 <<3>>)>)
(<TimespanSimultaneity(0 <<1>>)>, <TimespanSimultaneity(1 <<3>>)>)
Returns generator.
'''
n = int(n)
assert 0 < n
if reverse:
for simultaneity in self.iterate_simultaneities(reverse=True):
simultaneities = [simultaneity]
while len(simultaneities) < n:
next_simultaneity = simultaneities[-1].next_simultaneity
if next_simultaneity is None:
break
simultaneities.append(next_simultaneity)
if len(simultaneities) == n:
yield tuple(simultaneities)
else:
for simultaneity in self.iterate_simultaneities():
simultaneities = [simultaneity]
while len(simultaneities) < n:
previous_simultaneity = simultaneities[-1].previous_simultaneity
if previous_simultaneity is None:
break
simultaneities.append(previous_simultaneity)
if len(simultaneities) == n:
yield tuple(reversed(simultaneities))
def remove(self, timespans):
r'''Removes timespans from this timespan collection.
::
>>> timespans = (
... abjad.Timespan(0, 3),
... abjad.Timespan(1, 3),
... abjad.Timespan(1, 2),
... abjad.Timespan(2, 5),
... abjad.Timespan(6, 9),
... )
>>> timespan_collection = consort.TimespanCollection(timespans)
::
>>> timespan_collection.remove(timespans[1:-1])
::
>>> for timespan in timespan_collection:
... timespan
...
Timespan(start_offset=Offset(0, 1), stop_offset=Offset(3, 1))
Timespan(start_offset=Offset(6, 1), stop_offset=Offset(9, 1))
'''
if self._is_timespan(timespans):
timespans = [timespans]
for timespan in timespans:
if not self._is_timespan(timespan):
continue
self._remove_timespan(timespan)
self._update_indices(self._root_node)
self._update_offsets(self._root_node)
### PUBLIC PROPERTIES ###
@property
def all_offsets(self):
offsets = set()
for timespan in self:
offsets.add(timespan.start_offset)
offsets.add(timespan.stop_offset)
return tuple(sorted(offsets))
@property
def all_start_offsets(self):
start_offsets = set()
for timespan in self:
start_offsets.add(timespan.start_offset)
return tuple(sorted(start_offsets))
@property
def all_stop_offsets(self):
stop_offsets = set()
for timespan in self:
stop_offsets.add(timespan.stop_offset)
return tuple(sorted(stop_offsets))
@property
def earliest_start_offset(self):
def recurse(node):
if node.left_child is not None:
return recurse(node.left_child)
return node.start_offset
if self._root_node is not None:
return recurse(self._root_node)
return float('-inf')
@property
def earliest_stop_offset(self):
if self._root_node is not None:
return self._root_node.stop_offset_low
return float('inf')
@property
def latest_start_offset(self):
def recurse(node):
if node.right_child is not None:
return recurse(node._right_child)
return node.start_offset
if self._root_node is not None:
return recurse(self._root_node)
return float('-inf')
@property
def latest_stop_offset(self):
if self._root_node is not None:
return self._root_node.stop_offset_high
return float('inf')
@property
def start_offset(self):
return self.earliest_start_offset
@property
def stop_offset(self):
return self.latest_stop_offset | unknown | codeparrot/codeparrot-clean | ||
from autosklearn.pipeline.implementations.MultilabelClassifier import \
MultilabelClassifier
from HPOlibConfigSpace.configuration_space import ConfigurationSpace
from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \
UniformIntegerHyperparameter, CategoricalHyperparameter
from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm
from autosklearn.pipeline.constants import *
class AdaboostClassifier(AutoSklearnClassificationAlgorithm):
def __init__(self, n_estimators, learning_rate, algorithm, max_depth,
random_state=None):
self.n_estimators = int(n_estimators)
self.learning_rate = float(learning_rate)
self.algorithm = algorithm
self.random_state = random_state
self.max_depth = max_depth
self.estimator = None
def fit(self, X, Y, sample_weight=None):
import sklearn.ensemble
import sklearn.tree
self.n_estimators = int(self.n_estimators)
self.learning_rate = float(self.learning_rate)
self.max_depth = int(self.max_depth)
base_estimator = sklearn.tree.DecisionTreeClassifier(max_depth=self.max_depth)
estimator = sklearn.ensemble.AdaBoostClassifier(
base_estimator=base_estimator,
n_estimators=self.n_estimators,
learning_rate=self.learning_rate,
algorithm=self.algorithm,
random_state=self.random_state
)
if len(Y.shape) == 2 and Y.shape[1] > 1:
estimator = MultilabelClassifier(estimator, n_jobs=1)
estimator.fit(X, Y, sample_weight=sample_weight)
else:
estimator.fit(X, Y, sample_weight=sample_weight)
self.estimator = estimator
return self
def predict(self, X):
if self.estimator is None:
raise NotImplementedError
return self.estimator.predict(X)
def predict_proba(self, X):
if self.estimator is None:
raise NotImplementedError()
return self.estimator.predict_proba(X)
@staticmethod
def get_properties(dataset_properties=None):
return {'shortname': 'AB',
'name': 'AdaBoost Classifier',
'handles_regression': False,
'handles_classification': True,
'handles_multiclass': True,
'handles_multilabel': True,
'is_deterministic': True,
'input': (DENSE, SPARSE, UNSIGNED_DATA),
'output': (PREDICTIONS,)}
@staticmethod
def get_hyperparameter_search_space(dataset_properties=None):
cs = ConfigurationSpace()
# base_estimator = Constant(name="base_estimator", value="None")
n_estimators = cs.add_hyperparameter(UniformIntegerHyperparameter(
name="n_estimators", lower=50, upper=500, default=50, log=False))
learning_rate = cs.add_hyperparameter(UniformFloatHyperparameter(
name="learning_rate", lower=0.0001, upper=2, default=0.1, log=True))
algorithm = cs.add_hyperparameter(CategoricalHyperparameter(
name="algorithm", choices=["SAMME.R", "SAMME"], default="SAMME.R"))
max_depth = cs.add_hyperparameter(UniformIntegerHyperparameter(
name="max_depth", lower=1, upper=10, default=1, log=False))
return cs | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# update-database.py - Create and update Mycodo SQLite databases
#
# Copyright (C) 2015 Kyle T. Gabriel
#
# This file is part of Mycodo
#
# Mycodo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Mycodo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Mycodo. If not, see <http://www.gnu.org/licenses/>.
#
# Contact at kylegabriel.com
import re
import sys
def is_email(email):
"""
Minimal Email validation
:param email: Submitted email address
:type email: str
:return: Whether the string matches an Email address
:rtype: Bool
"""
pattern = '[^@]+@[^@]+\.[^@]+'
if re.match(pattern, email) is None:
print("This doesn't appear to be an email address")
return False
else:
return True
def pass_length_min(pw, min_len=6):
"""
Validate password length
:param pw: Password
:type pw: str
:param min_len: Minimum length
:type min_len: int
:return: Whether the password is long enough
:rtype: bool
"""
if not len(pw) >= min_len:
print("The password provided is too short.")
return False
else:
return True
def characters(un):
"""
Validate Username/Password is only letters and/or numbers
:param un: Username or Password
:type un: str
:return: Whether the string only has numbers and letters
:rtype: bool
"""
if not un.isalnum():
print("A special character was detected. Please use only Letters and Numbers.")
return False
else:
return True
def user_length_min(un, min_len=2):
"""
Validate Username length
:param un: Username
:type un: str
:param min_len: Minimum length
:type min_len: int
:return: Whether the Username length is long enough
:rtype: bool
"""
if not len(un) >= min_len:
print("This username is too short.")
return False
else:
return True
def user_length_max(un, max_len=64):
"""
Validate Username length
:param un:
:type un:
:param max_len:
:type max_len:
:return:
:rtype:
"""
if not len(un) <= max_len:
print("This username is too long.")
return False
else:
return True
def test_username(un, addl_tests=None, test_defaults=True):
"""
Run tests on the supplied username and validate that it passes.
:param un: Username
:type un: str
:param addl_tests: A list of functions that will be called on the username
:type addl_tests: iterator
:param test_defaults: Run the default test on the username.
:type test_defaults: bool
:return: Does the username pass the tests?
:rtype: bool
"""
tests = []
if test_defaults:
tests += [characters, user_length_min, user_length_max]
if addl_tests:
tests += addl_tests
return validate_string(un, tests)
def test_password(pw, addl_tests=None, test_defaults=True):
"""
Run tests on the supplied password and validate that it passes.
:param pw:
:type pw:
:param addl_tests:
:type addl_tests:
:param test_defaults:
:type test_defaults:
:return:
:rtype:
"""
tests = []
if test_defaults:
tests += [characters, pass_length_min]
if addl_tests:
tests += addl_tests
return validate_string(pw, tests)
def validate_string(a_str, tests):
"""
Run tests on a string and make sure that they pass.
:type a_str: str
:param tests: A iterable of functions to apply to the string. Should be boolean tests.
:type tests: iter
:rtype: bool
"""
for test in tests:
if not test(a_str):
return False
return True
def query_yes_no(question, default="yes"):
"""Ask a yes/no question via raw_input() and return their answer.
"question" is a string that is presented to the user.
"default" is the presumed answer if the user just hits <Enter>.
It must be "yes" (the default), "no" or None (meaning
an answer is required of the user).
The "answer" return value is True for "yes" or False for "no".
"""
valid = {"yes": True, "y": True, "ye": True,
"no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
sys.stdout.write(question + prompt)
choice = raw_input().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
sys.stdout.write("Please respond with 'y' or 'n').\n") | unknown | codeparrot/codeparrot-clean | ||
"""
Classes representing uploaded files.
"""
import errno
import os
from io import BytesIO
from django.conf import settings
from django.core.files import temp as tempfile
from django.core.files.base import File
from django.utils.encoding import force_str
__all__ = ('UploadedFile', 'TemporaryUploadedFile', 'InMemoryUploadedFile',
'SimpleUploadedFile')
class UploadedFile(File):
"""
A abstract uploaded file (``TemporaryUploadedFile`` and
``InMemoryUploadedFile`` are the built-in concrete subclasses).
An ``UploadedFile`` object behaves somewhat like a file object and
represents some file data that the user submitted with a form.
"""
DEFAULT_CHUNK_SIZE = 64 * 2 ** 10
def __init__(self, file=None, name=None, content_type=None, size=None, charset=None, content_type_extra=None):
super(UploadedFile, self).__init__(file, name)
self.size = size
self.content_type = content_type
self.charset = charset
self.content_type_extra = content_type_extra
def __repr__(self):
return force_str("<%s: %s (%s)>" % (
self.__class__.__name__, self.name, self.content_type))
def _get_name(self):
return self._name
def _set_name(self, name):
# Sanitize the file name so that it can't be dangerous.
if name is not None:
# Just use the basename of the file -- anything else is dangerous.
name = os.path.basename(name)
# File names longer than 255 characters can cause problems on older OSes.
if len(name) > 255:
name, ext = os.path.splitext(name)
ext = ext[:255]
name = name[:255 - len(ext)] + ext
self._name = name
name = property(_get_name, _set_name)
class TemporaryUploadedFile(UploadedFile):
"""
A file uploaded to a temporary location (i.e. stream-to-disk).
"""
def __init__(self, name, content_type, size, charset, content_type_extra=None):
file = tempfile.NamedTemporaryFile(suffix='.upload', dir=settings.FILE_UPLOAD_TEMP_DIR)
super(TemporaryUploadedFile, self).__init__(file, name, content_type, size, charset, content_type_extra)
def temporary_file_path(self):
"""
Returns the full path of this file.
"""
return self.file.name
def close(self):
try:
return self.file.close()
except OSError as e:
if e.errno != errno.ENOENT:
# Means the file was moved or deleted before the tempfile
# could unlink it. Still sets self.file.close_called and
# calls self.file.file.close() before the exception
raise
class InMemoryUploadedFile(UploadedFile):
"""
A file uploaded into memory (i.e. stream-to-memory).
"""
def __init__(self, file, field_name, name, content_type, size, charset, content_type_extra=None):
super(InMemoryUploadedFile, self).__init__(file, name, content_type, size, charset, content_type_extra)
self.field_name = field_name
def open(self, mode=None):
self.file.seek(0)
def chunks(self, chunk_size=None):
self.file.seek(0)
yield self.read()
def multiple_chunks(self, chunk_size=None):
# Since it's in memory, we'll never have multiple chunks.
return False
class SimpleUploadedFile(InMemoryUploadedFile):
"""
A simple representation of a file, which just has content, size, and a name.
"""
def __init__(self, name, content, content_type='text/plain'):
content = content or b''
super(SimpleUploadedFile, self).__init__(BytesIO(content), None, name,
content_type, len(content), None, None)
@classmethod
def from_dict(cls, file_dict):
"""
Creates a SimpleUploadedFile object from
a dictionary object with the following keys:
- filename
- content-type
- content
"""
return cls(file_dict['filename'],
file_dict['content'],
file_dict.get('content-type', 'text/plain')) | unknown | codeparrot/codeparrot-clean | ||
""" Tests to quickly see if the backends look good.
This tests only to see if all the necessary methods are implemented,
whether all the right events are mentioned, and whether the keymap
contains all keys that should be supported.
This test basically checks whether nothing was forgotten, not that the
implementation is corect.
"""
from inspect import getargspec
import vispy
from vispy import keys
from vispy.testing import (requires_application, assert_in, run_tests_if_main,
assert_raises)
from vispy.app import use_app, Application
from vispy.app.backends import _template
class DummyApplication(Application):
def _use(self, backend_namd):
pass
def _test_module_properties(_module=None):
"""Test application module"""
if _module is None:
app = use_app()
_module = app.backend_module
# Test that the keymap contains all keys supported by vispy.
module_fname = _module.__name__.split('.')[-1]
if module_fname not in ('_egl', '_osmesa'): # skip keys for EGL, osmesa
keymap = _module.KEYMAP
vispy_keys = keymap.values()
for keyname in dir(keys):
if keyname.upper() != keyname:
continue
key = getattr(keys, keyname)
assert_in(key, vispy_keys)
# For Qt backend, we have a common implementation
alt_modname = ''
if module_fname in ('_pyside', '_pyqt4', '_pyqt5'):
alt_modname = _module.__name__.rsplit('.', 1)[0] + '._qt'
# Test that all _vispy_x methods are there.
exceptions = (
'_vispy_get_native_canvas',
'_vispy_get_native_timer',
'_vispy_get_native_app',
'_vispy_reuse',
'_vispy_mouse_move',
'_vispy_mouse_press',
'_vispy_mouse_release',
'_vispy_mouse_double_click',
'_vispy_detect_double_click',
'_vispy_get_geometry',
'_vispy_get_physical_size',
'_vispy_sleep',
'_process_backend_kwargs') # defined in base class
class KlassRef(vispy.app.base.BaseCanvasBackend):
def __init__(self, *args, **kwargs):
pass # Do not call the base class, since it will check for Canvas
Klass = _module.CanvasBackend
base = KlassRef()
for key in dir(KlassRef):
if not key.startswith('__'):
method = getattr(Klass, key)
if key not in exceptions:
print(key)
args = [None] * (len(getargspec(method).args) - 1)
assert_raises(NotImplementedError, getattr(base, key), *args)
if hasattr(method, '__module__'):
mod_str = method.__module__ # Py3k
else:
mod_str = method.im_func.__module__
assert_in(mod_str, (_module.__name__, alt_modname),
"Method %s.%s not defined in %s"
% (Klass, key, _module.__name__))
Klass = _module.TimerBackend
KlassRef = vispy.app.timer.TimerBackend
for key in dir(KlassRef):
if not key.startswith('__'):
method = getattr(Klass, key)
if key not in exceptions:
if hasattr(method, '__module__'):
# Py3k
assert_in(method.__module__,
(_module.__name__, alt_modname))
else:
t = method.im_func.__module__ == _module.__name__
assert t
Klass = _module.ApplicationBackend
KlassRef = vispy.app.application.ApplicationBackend
for key in dir(KlassRef):
if not key.startswith('__'):
method = getattr(Klass, key)
if key not in exceptions:
if hasattr(method, '__module__'):
# Py3k
assert_in(method.__module__,
(_module.__name__, alt_modname))
else:
t = method.im_func.__module__ == _module.__name__
assert t
# Test that all events seem to be emitted.
# Get text
fname = _module.__file__.rstrip('c') # "strip" will break windows!
with open(fname, 'rb') as fid:
text = fid.read().decode('utf-8')
canvas = vispy.app.Canvas(create_native=False, app=DummyApplication())
# Stylus and touch are ignored because they are not yet implemented.
# Mouse events are emitted from the CanvasBackend base class.
ignore = set(['stylus', 'touch', 'mouse_press', 'paint',
'mouse_move', 'mouse_release', 'mouse_double_click',
'detect_double_click', 'close'])
if module_fname in ('_egl', '_osmesa'):
ignore = ignore.union(['mouse_wheel', 'key_release', 'key_press'])
eventNames = set(canvas.events._emitters.keys()) - ignore
if not alt_modname: # Only check for non-proxy modules
for name in eventNames:
assert_in('events.%s' % name, text,
'events.%s does not appear in %s' % (name, fname))
def test_template():
"""Test application module template"""
_test_module_properties(_template)
assert_raises(NotImplementedError, _template._set_config, dict())
a = _template.ApplicationBackend()
print(a._vispy_get_backend_name())
for method in (a._vispy_process_events, a._vispy_run, a._vispy_quit,
a._vispy_get_native_app):
assert_raises(NotImplementedError, method)
class TemplateCanvasBackend(_template.CanvasBackend):
def __init__(self, *args, **kwargs):
pass # Do not call the base class, since it will check for Canvas
c = TemplateCanvasBackend() # _template.CanvasBackend(None)
print(c._vispy_get_native_canvas())
for method in (c._vispy_set_current, c._vispy_swap_buffers, c._vispy_close,
c._vispy_update, c._vispy_get_size, c._vispy_get_position):
assert_raises(NotImplementedError, method)
for method in (c._vispy_set_title, c._vispy_set_visible):
assert_raises(NotImplementedError, method, 0)
for method in (c._vispy_set_size, c._vispy_set_position):
assert_raises(NotImplementedError, method, 0, 0)
@requires_application()
def test_actual():
"""Test actual application module"""
_test_module_properties(None)
run_tests_if_main() | unknown | codeparrot/codeparrot-clean | ||
import sys
import xbmc
import xbmcplugin
from xbmcaddon import Addon
from moviesets import getFullMovieSetsDetails
# constants
ADDON = Addon( "plugin.moviesets" )
ADDON_NAME = ADDON.getAddonInfo( "name" )
Language = ADDON.getLocalizedString # ADDON strings
LangXBMC = xbmc.getLocalizedString # XBMC strings
class PluginView:
def __init__( self ):
listitems = getFullMovieSetsDetails( ADDON.getSetting( "allsets" ) == "true" )
ok = self._add_directory_items( listitems )
xbmcplugin.setProperty( int( sys.argv[ 1 ] ), "Content", "MovieSets" )
xbmcplugin.setProperty( int( sys.argv[ 1 ] ), "TotalSets", str( len( listitems ) ) )
#xbmcplugin.setProperty( int( sys.argv[ 1 ] ), "FolderName", ADDON_NAME )
#xbmcplugin.setPluginCategory( int( sys.argv[ 1 ] ), ADDON_NAME )
self._set_content( ok )
def _add_directory_item( self, url, listitem, isFolder, totalItems=0 ):
""" addDirectoryItem(handle, url, listitem [,isFolder, totalItems])
handle : integer - handle the plugin was started with.
url : string - url of the entry. would be plugin:// for another virtual directory
listitem : ListItem - item to add.
isFolder : [opt] bool - True=folder / False=not a folder(default).
totalItems : [opt] integer - total number of items that will be passed.(used for progressbar)
"""
return xbmcplugin.addDirectoryItem( int( sys.argv[ 1 ] ), url, listitem, isFolder, totalItems )
def _add_directory_items( self, listitems ):
""" addDirectoryItems(handle, items [,totalItems])
handle : integer - handle the plugin was started with.
items : List - list of (url, listitem[, isFolder]) as a tuple to add.
totalItems : [opt] integer - total number of items that will be passed.(used for progressbar)
"""
return xbmcplugin.addDirectoryItems( int( sys.argv[ 1 ] ), listitems, len( listitems ) )
def _set_content( self, succeeded, content="movies", sort=True ):
if ( succeeded ):
#content = ( "addons", "files", "movies", "tvshows", "episodes", "musicvideos", "albums", "artists", "songs" )[ 2 ]
#content = "moviesets"
xbmcplugin.setContent( int( sys.argv[ 1 ] ), content )
if sort:
self._add_sort_methods( succeeded )
else:
self._end_of_directory( succeeded )
def _add_sort_methods( self, succeeded ):
if ( succeeded ):
#xbmcplugin.addSortMethod( int( sys.argv[ 1 ] ), xbmcplugin.SORT_METHOD_UNSORTED )
xbmcplugin.addSortMethod( int( sys.argv[ 1 ] ), xbmcplugin.SORT_METHOD_LABEL_IGNORE_THE )
xbmcplugin.addSortMethod( int( sys.argv[ 1 ] ), xbmcplugin.SORT_METHOD_VIDEO_RATING )
xbmcplugin.addSortMethod( int( sys.argv[ 1 ] ), xbmcplugin.SORT_METHOD_VIDEO_YEAR )
xbmcplugin.addSortMethod( int( sys.argv[ 1 ] ), xbmcplugin.SORT_METHOD_GENRE )
xbmcplugin.addSortMethod( int( sys.argv[ 1 ] ), xbmcplugin.SORT_METHOD_MPAA_RATING )
xbmcplugin.addSortMethod( int( sys.argv[ 1 ] ), xbmcplugin.SORT_METHOD_PROGRAM_COUNT )
xbmcplugin.addSortMethod( int( sys.argv[ 1 ] ), xbmcplugin.SORT_METHOD_DATE )
self._end_of_directory( succeeded )
def _end_of_directory( self, succeeded ):
xbmcplugin.endOfDirectory( int( sys.argv[ 1 ] ), succeeded )
#PluginView() | unknown | codeparrot/codeparrot-clean | ||
package kotlinx.coroutines
import kotlinx.coroutines.testing.*
import org.junit.Test
import kotlin.concurrent.thread
/**
* Tests concurrent cancel & dispose of the jobs.
*/
class JobDisposeStressTest: TestBase() {
private val TEST_DURATION = 3 * stressTestMultiplier // seconds
@Volatile
private var done = false
@Volatile
private var job: TestJob? = null
@Volatile
private var handle: DisposableHandle? = null
@Volatile
private var exception: Throwable? = null
private fun testThread(name: String, block: () -> Unit): Thread =
thread(start = false, name = name, block = block).apply {
setUncaughtExceptionHandler { t, e ->
exception = e
println("Exception in ${t.name}: $e")
e.printStackTrace()
}
}
@Test
fun testConcurrentDispose() {
// create threads
val threads = mutableListOf<Thread>()
threads += testThread("creator") {
while (!done) {
val job = TestJob()
val handle = job.invokeOnCompletion(onCancelling = true) { /* nothing */ }
this.job = job // post job to cancelling thread
this.handle = handle // post handle to concurrent disposer thread
handle.dispose() // dispose of handle from this thread (concurrently with other disposer)
}
}
threads += testThread("canceller") {
while (!done) {
val job = this.job ?: continue
job.cancel()
// Always returns true, TestJob never completes
}
}
threads += testThread("disposer") {
while (!done) {
handle?.dispose()
}
}
// start threads
threads.forEach { it.start() }
// wait
for (i in 1..TEST_DURATION) {
println("$i: Running")
Thread.sleep(1000)
if (exception != null) break
}
// done
done = true
// join threads
threads.forEach { it.join() }
// rethrow exception if any
}
@Suppress("DEPRECATION_ERROR")
private class TestJob : JobSupport(active = true)
} | kotlin | github | https://github.com/Kotlin/kotlinx.coroutines | kotlinx-coroutines-core/jvm/test/JobDisposeStressTest.kt |
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cv_server_provision
version_added: "2.4"
author: "EOS+ CS (ansible-dev@arista.com) (@mharista)"
short_description:
Provision server port by applying or removing template configuration to an
Arista CloudVision Portal configlet that is applied to a switch.
description:
- This module allows a server team to provision server network ports for
new servers without having to access Arista CVP or asking the network team
to do it for them. Provide the information for connecting to CVP, switch
rack, port the new server is connected to, optional vlan, and an action
and the module will apply the configuration to the switch port via CVP.
Actions are add (applies template config to port),
remove (defaults the interface config) and
show (returns the current port config).
options:
host:
description:
- The hostname or IP address of the CVP node being connected to.
required: true
port:
description:
- The port number to use when making API calls to the CVP node. This
will default to the default port for the specified protocol. Port 80
for http and port 443 for https.
protocol:
description:
- The protocol to use when making API calls to CVP. CVP defaults to https
and newer versions of CVP no longer support http.
default: https
choices: [https, http]
username:
description:
- The user that will be used to connect to CVP for making API calls.
required: true
password:
description:
- The password of the user that will be used to connect to CVP for API
calls.
required: true
server_name:
description:
- The hostname or identifier for the server that is having it's switch
port provisioned.
required: true
switch_name:
description:
- The hostname of the switch is being configured for the server being
provisioned.
required: true
switch_port:
description:
- The physical port number on the switch that the new server is
connected to.
required: true
port_vlan:
description:
- The vlan that should be applied to the port for this server.
This parameter is dependent on a proper template that supports single
vlan provisioning with it. If a port vlan is specified by the template
specified does not support this the module will exit out with no
changes. If a template is specified that requires a port vlan but no
port vlan is specified the module will exit out with no changes.
template:
description:
- A path to a Jinja formatted template file that contains the
configuration block that will be applied to the specified switch port.
This template will have variable fields replaced by the module before
being applied to the switch configuration.
required: true
action:
description:
- The action for the module to take. The actions are add, which applies
the specified template config to port, remove, which defaults the
specified interface configuration, and show, which will return the
current port configuration with no changes.
default: show
choices: [show, add, remove]
auto_run:
description:
- Flag that determines whether or not the module will execute the CVP
task spawned as a result of changes to a switch configlet. When an
add or remove action is taken which results in a change to a switch
configlet, CVP will spawn a task that needs to be executed for the
configuration to be applied to the switch. If this option is True then
the module will determined the task number created by the configuration
change, execute it and wait for the task to complete. If the option
is False then the task will remain in the Pending state in CVP for
a network administrator to review and execute.
type: bool
default: 'no'
requirements: [Jinja2, cvprac >= 0.7.0]
'''
EXAMPLES = '''
- name: Get current configuration for interface Ethernet2
cv_server_provision:
host: cvp_node
username: cvp_user
password: cvp_pass
protocol: https
server_name: new_server
switch_name: eos_switch_1
switch_port: 2
template: template_file.j2
action: show
- name: Remove existing configuration from interface Ethernet2. Run task.
cv_server_provision:
host: cvp_node
username: cvp_user
password: cvp_pass
protocol: https
server_name: new_server
switch_name: eos_switch_1
switch_port: 2
template: template_file.j2
action: remove
auto_run: True
- name: Add template configuration to interface Ethernet2. No VLAN. Run task.
cv_server_provision:
host: cvp_node
username: cvp_user
password: cvp_pass
protocol: https
server_name: new_server
switch_name: eos_switch_1
switch_port: 2
template: single_attached_trunk.j2
action: add
auto_run: True
- name: Add template with VLAN configuration to interface Ethernet2. Run task.
cv_server_provision:
host: cvp_node
username: cvp_user
password: cvp_pass
protocol: https
server_name: new_server
switch_name: eos_switch_1
switch_port: 2
port_vlan: 22
template: single_attached_vlan.j2
action: add
auto_run: True
'''
RETURN = '''
changed:
description: Signifies if a change was made to the configlet
returned: success
type: bool
sample: true
currentConfigBlock:
description: The current config block for the user specified interface
returned: when action = show
type: str
sample: |
interface Ethernet4
!
newConfigBlock:
description: The new config block for the user specified interface
returned: when action = add or remove
type: str
sample: |
interface Ethernet3
description example
no switchport
!
oldConfigBlock:
description: The current config block for the user specified interface
before any changes are made
returned: when action = add or remove
type: str
sample: |
interface Ethernet3
!
fullConfig:
description: The full config of the configlet after being updated
returned: when action = add or remove
type: str
sample: |
!
interface Ethernet3
!
interface Ethernet4
!
updateConfigletResponse:
description: Response returned from CVP when configlet update is triggered
returned: when action = add or remove and configuration changes
type: str
sample: "Configlet veos1-server successfully updated and task initiated."
portConfigurable:
description: Signifies if the user specified port has an entry in the
configlet that Ansible has access to
returned: success
type: bool
sample: true
switchConfigurable:
description: Signifies if the user specified switch has a configlet
applied to it that CVP is allowed to edit
returned: success
type: bool
sample: true
switchInfo:
description: Information from CVP describing the switch being configured
returned: success
type: dict
sample: {"architecture": "i386",
"bootupTimeStamp": 1491264298.21,
"complianceCode": "0000",
"complianceIndication": "NONE",
"deviceInfo": "Registered",
"deviceStatus": "Registered",
"fqdn": "veos1",
"hardwareRevision": "",
"internalBuildId": "12-12",
"internalVersion": "4.17.1F-11111.4171F",
"ipAddress": "192.168.1.20",
"isDANZEnabled": "no",
"isMLAGEnabled": "no",
"key": "00:50:56:5d:e5:e0",
"lastSyncUp": 1496432895799,
"memFree": 472976,
"memTotal": 1893460,
"modelName": "vEOS",
"parentContainerId": "container_13_5776759195930",
"serialNumber": "",
"systemMacAddress": "00:50:56:5d:e5:e0",
"taskIdList": [],
"tempAction": null,
"type": "netelement",
"unAuthorized": false,
"version": "4.17.1F",
"ztpMode": "false"}
taskCompleted:
description: Signifies if the task created and executed has completed successfully
returned: when action = add or remove, and auto_run = true,
and configuration changes
type: bool
sample: true
taskCreated:
description: Signifies if a task was created due to configlet changes
returned: when action = add or remove, and auto_run = true or false,
and configuration changes
type: bool
sample: true
taskExecuted:
description: Signifies if the automation executed the spawned task
returned: when action = add or remove, and auto_run = true,
and configuration changes
type: bool
sample: true
taskId:
description: The task ID created by CVP because of changes to configlet
returned: when action = add or remove, and auto_run = true or false,
and configuration changes
type: str
sample: "500"
'''
import re
import time
from ansible.module_utils.basic import AnsibleModule
try:
import jinja2
from jinja2 import meta
HAS_JINJA2 = True
except ImportError:
HAS_JINJA2 = False
try:
from cvprac.cvp_client import CvpClient
from cvprac.cvp_client_errors import CvpLoginError, CvpApiError
HAS_CVPRAC = True
except ImportError:
HAS_CVPRAC = False
def connect(module):
''' Connects to CVP device using user provided credentials from playbook.
:param module: Ansible module with parameters and client connection.
:return: CvpClient object with connection instantiated.
'''
client = CvpClient()
try:
client.connect([module.params['host']],
module.params['username'],
module.params['password'],
protocol=module.params['protocol'],
port=module.params['port'])
except CvpLoginError as e:
module.fail_json(msg=str(e))
return client
def switch_info(module):
''' Get dictionary of switch info from CVP.
:param module: Ansible module with parameters and client connection.
:return: Dict of switch info from CVP or exit with failure if no
info for device is found.
'''
switch_name = module.params['switch_name']
switch_info = module.client.api.get_device_by_name(switch_name)
if not switch_info:
module.fail_json(msg=str("Device with name '%s' does not exist."
% switch_name))
return switch_info
def switch_in_compliance(module, sw_info):
''' Check if switch is currently in compliance.
:param module: Ansible module with parameters and client connection.
:param sw_info: Dict of switch info.
:return: Nothing or exit with failure if device is not in compliance.
'''
compliance = module.client.api.check_compliance(sw_info['key'],
sw_info['type'])
if compliance['complianceCode'] != '0000':
module.fail_json(msg=str('Switch %s is not in compliance. Returned'
' compliance code %s.'
% (sw_info['fqdn'],
compliance['complianceCode'])))
def server_configurable_configlet(module, sw_info):
''' Check CVP that the user specified switch has a configlet assigned to
it that Ansible is allowed to edit.
:param module: Ansible module with parameters and client connection.
:param sw_info: Dict of switch info.
:return: Dict of configlet information or None.
'''
configurable_configlet = None
configlet_name = module.params['switch_name'] + '-server'
switch_configlets = module.client.api.get_configlets_by_device_id(
sw_info['key'])
for configlet in switch_configlets:
if configlet['name'] == configlet_name:
configurable_configlet = configlet
return configurable_configlet
def port_configurable(module, configlet):
''' Check configlet if the user specified port has a configuration entry
in the configlet to determine if Ansible is allowed to configure the
port on this switch.
:param module: Ansible module with parameters and client connection.
:param configlet: Dict of configlet info.
:return: True or False.
'''
configurable = False
regex = r'^interface Ethernet%s' % module.params['switch_port']
for config_line in configlet['config'].split('\n'):
if re.match(regex, config_line):
configurable = True
return configurable
def configlet_action(module, configlet):
''' Take appropriate action based on current state of device and user
requested action.
Return current config block for specified port if action is show.
If action is add or remove make the appropriate changes to the
configlet and return the associated information.
:param module: Ansible module with parameters and client connection.
:param configlet: Dict of configlet info.
:return: Dict of information to updated results with.
'''
result = dict()
existing_config = current_config(module, configlet['config'])
if module.params['action'] == 'show':
result['currentConfigBlock'] = existing_config
return result
elif module.params['action'] == 'add':
result['newConfigBlock'] = config_from_template(module)
elif module.params['action'] == 'remove':
result['newConfigBlock'] = ('interface Ethernet%s\n!'
% module.params['switch_port'])
result['oldConfigBlock'] = existing_config
result['fullConfig'] = updated_configlet_content(module,
configlet['config'],
result['newConfigBlock'])
resp = module.client.api.update_configlet(result['fullConfig'],
configlet['key'],
configlet['name'])
if 'data' in resp:
result['updateConfigletResponse'] = resp['data']
if 'task' in resp['data']:
result['changed'] = True
result['taskCreated'] = True
return result
def current_config(module, config):
''' Parse the full port configuration for the user specified port out of
the full configlet configuration and return as a string.
:param module: Ansible module with parameters and client connection.
:param config: Full config to parse specific port config from.
:return: String of current config block for user specified port.
'''
regex = r'^interface Ethernet%s' % module.params['switch_port']
match = re.search(regex, config, re.M)
if not match:
module.fail_json(msg=str('interface section not found - %s'
% config))
block_start, line_end = match.regs[0]
match = re.search(r'!', config[line_end:], re.M)
if not match:
return config[block_start:]
_, block_end = match.regs[0]
block_end = line_end + block_end
return config[block_start:block_end]
def valid_template(port, template):
''' Test if the user provided Jinja template is valid.
:param port: User specified port.
:param template: Contents of Jinja template.
:return: True or False
'''
valid = True
regex = r'^interface Ethernet%s' % port
match = re.match(regex, template, re.M)
if not match:
valid = False
return valid
def config_from_template(module):
''' Load the Jinja template and apply user provided parameters in necessary
places. Fail if template is not found. Fail if rendered template does
not reference the correct port. Fail if the template requires a VLAN
but the user did not provide one with the port_vlan parameter.
:param module: Ansible module with parameters and client connection.
:return: String of Jinja template rendered with parameters or exit with
failure.
'''
template_loader = jinja2.FileSystemLoader('./templates')
env = jinja2.Environment(loader=template_loader,
undefined=jinja2.DebugUndefined)
template = env.get_template(module.params['template'])
if not template:
module.fail_json(msg=str('Could not find template - %s'
% module.params['template']))
data = {'switch_port': module.params['switch_port'],
'server_name': module.params['server_name']}
temp_source = env.loader.get_source(env, module.params['template'])[0]
parsed_content = env.parse(temp_source)
temp_vars = list(meta.find_undeclared_variables(parsed_content))
if 'port_vlan' in temp_vars:
if module.params['port_vlan']:
data['port_vlan'] = module.params['port_vlan']
else:
module.fail_json(msg=str('Template %s requires a vlan. Please'
' re-run with vlan number provided.'
% module.params['template']))
template = template.render(data)
if not valid_template(module.params['switch_port'], template):
module.fail_json(msg=str('Template content does not configure proper'
' interface - %s' % template))
return template
def updated_configlet_content(module, existing_config, new_config):
''' Update the configlet configuration with the new section for the port
specified by the user.
:param module: Ansible module with parameters and client connection.
:param existing_config: String of current configlet configuration.
:param new_config: String of configuration for user specified port to
replace in the existing config.
:return: String of the full updated configuration.
'''
regex = r'^interface Ethernet%s' % module.params['switch_port']
match = re.search(regex, existing_config, re.M)
if not match:
module.fail_json(msg=str('interface section not found - %s'
% existing_config))
block_start, line_end = match.regs[0]
updated_config = existing_config[:block_start] + new_config
match = re.search(r'!\n', existing_config[line_end:], re.M)
if match:
_, block_end = match.regs[0]
block_end = line_end + block_end
updated_config += '\n%s' % existing_config[block_end:]
return updated_config
def configlet_update_task(module):
''' Poll device info of switch from CVP up to three times to see if the
configlet updates have spawned a task. It sometimes takes a second for
the task to be spawned after configlet updates. If a task is found
return the task ID. Otherwise return None.
:param module: Ansible module with parameters and client connection.
:return: Task ID or None.
'''
for num in range(3):
device_info = switch_info(module)
if (('taskIdList' in device_info) and
(len(device_info['taskIdList']) > 0)):
for task in device_info['taskIdList']:
if ('Configlet Assign' in task['description'] and
task['data']['WORKFLOW_ACTION'] == 'Configlet Push'):
return task['workOrderId']
time.sleep(1)
return None
def wait_for_task_completion(module, task):
''' Poll CVP for the executed task to complete. There is currently no
timeout. Exits with failure if task status is Failed or Cancelled.
:param module: Ansible module with parameters and client connection.
:param task: Task ID to poll for completion.
:return: True or exit with failure if task is cancelled or fails.
'''
task_complete = False
while not task_complete:
task_info = module.client.api.get_task_by_id(task)
task_status = task_info['workOrderUserDefinedStatus']
if task_status == 'Completed':
return True
elif task_status in ['Failed', 'Cancelled']:
module.fail_json(msg=str('Task %s has reported status %s. Please'
' consult the CVP admins for more'
' information.' % (task, task_status)))
time.sleep(2)
def main():
""" main entry point for module execution
"""
argument_spec = dict(
host=dict(required=True),
port=dict(required=False, default=None),
protocol=dict(default='https', choices=['http', 'https']),
username=dict(required=True),
password=dict(required=True, no_log=True),
server_name=dict(required=True),
switch_name=dict(required=True),
switch_port=dict(required=True),
port_vlan=dict(required=False, default=None),
template=dict(require=True),
action=dict(default='show', choices=['show', 'add', 'remove']),
auto_run=dict(type='bool', default=False))
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=False)
if not HAS_JINJA2:
module.fail_json(msg='The Jinja2 python module is required.')
if not HAS_CVPRAC:
module.fail_json(msg='The cvprac python module is required.')
result = dict(changed=False)
module.client = connect(module)
try:
result['switchInfo'] = switch_info(module)
if module.params['action'] in ['add', 'remove']:
switch_in_compliance(module, result['switchInfo'])
switch_configlet = server_configurable_configlet(module,
result['switchInfo'])
if not switch_configlet:
module.fail_json(msg=str('Switch %s has no configurable server'
' ports.' % module.params['switch_name']))
result['switchConfigurable'] = True
if not port_configurable(module, switch_configlet):
module.fail_json(msg=str('Port %s is not configurable as a server'
' port on switch %s.'
% (module.params['switch_port'],
module.params['switch_name'])))
result['portConfigurable'] = True
result['taskCreated'] = False
result['taskExecuted'] = False
result['taskCompleted'] = False
result.update(configlet_action(module, switch_configlet))
if module.params['auto_run'] and module.params['action'] != 'show':
task_id = configlet_update_task(module)
if task_id:
result['taskId'] = task_id
note = ('Update config on %s with %s action from Ansible.'
% (module.params['switch_name'],
module.params['action']))
module.client.api.add_note_to_task(task_id, note)
module.client.api.execute_task(task_id)
result['taskExecuted'] = True
task_completed = wait_for_task_completion(module, task_id)
if task_completed:
result['taskCompleted'] = True
else:
result['taskCreated'] = False
except CvpApiError as e:
module.fail_json(msg=str(e))
module.exit_json(**result)
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
//// [tests/cases/conformance/classes/propertyMemberDeclarations/accessorsOverrideProperty9.ts] ////
//// [accessorsOverrideProperty9.ts]
// #41347, based on microsoft/rushstack
// Mixin utilities
export type Constructor<T = {}> = new (...args: any[]) => T;
export type PropertiesOf<T> = { [K in keyof T]: T[K] };
interface IApiItemConstructor extends Constructor<ApiItem>, PropertiesOf<typeof ApiItem> {}
// Base class
class ApiItem {
public get members(): ReadonlyArray<ApiItem> {
return [];
}
}
// Normal subclass
class ApiEnumMember extends ApiItem {
}
// Mixin base class
interface ApiItemContainerMixin extends ApiItem {
readonly members: ReadonlyArray<ApiItem>;
}
function ApiItemContainerMixin<TBaseClass extends IApiItemConstructor>(
baseClass: TBaseClass
): TBaseClass & (new (...args: any[]) => ApiItemContainerMixin) {
abstract class MixedClass extends baseClass implements ApiItemContainerMixin {
public constructor(...args: any[]) {
super(...args);
}
public get members(): ReadonlyArray<ApiItem> {
return [];
}
}
return MixedClass;
}
// Subclass inheriting from mixin
export class ApiEnum extends ApiItemContainerMixin(ApiItem) {
// This worked prior to TypeScript 4.0:
public get members(): ReadonlyArray<ApiEnumMember> {
return [];
}
}
//// [accessorsOverrideProperty9.js]
// #41347, based on microsoft/rushstack
// Base class
class ApiItem {
get members() {
return [];
}
}
// Normal subclass
class ApiEnumMember extends ApiItem {
}
function ApiItemContainerMixin(baseClass) {
class MixedClass extends baseClass {
constructor(...args) {
super(...args);
}
get members() {
return [];
}
}
return MixedClass;
}
// Subclass inheriting from mixin
export class ApiEnum extends ApiItemContainerMixin(ApiItem) {
// This worked prior to TypeScript 4.0:
get members() {
return [];
}
} | javascript | github | https://github.com/microsoft/TypeScript | tests/baselines/reference/accessorsOverrideProperty9.js |
/*
* Copyright 2002-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.context.annotation;
import java.util.List;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.core.annotation.Order;
import static org.assertj.core.api.Assertions.assertThat;
/**
* @author Stephane Nicoll
*/
class Spr11310Tests {
@Test
void orderedList() {
ConfigurableApplicationContext context = new AnnotationConfigApplicationContext(Config.class);
StringHolder holder = context.getBean(StringHolder.class);
assertThat(holder.itemsList).containsExactly("second", "first", "unknownOrder");
context.close();
}
@Test
void orderedArray() {
ConfigurableApplicationContext context = new AnnotationConfigApplicationContext(Config.class);
StringHolder holder = context.getBean(StringHolder.class);
assertThat(holder.itemsArray).containsExactly("second", "first", "unknownOrder");
context.close();
}
@Configuration
static class Config {
@Bean
@Order(50)
public String first() {
return "first";
}
@Bean
public String unknownOrder() {
return "unknownOrder";
}
@Bean
@Order(5)
public String second() {
return "second";
}
@Bean
public StringHolder stringHolder() {
return new StringHolder();
}
}
private static class StringHolder {
@Autowired
private List<String> itemsList;
@Autowired
private String[] itemsArray;
}
} | java | github | https://github.com/spring-projects/spring-framework | spring-context/src/test/java/org/springframework/context/annotation/Spr11310Tests.java |
<!--
Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
SPDX-License-Identifier: curl
-->
# How to get started helping out in the curl project
We are always in need of more help. If you are new to the project and are
looking for ways to contribute and help out, this document aims to give a few
good starting points.
You may subscribe to the [curl-library mailing
list](https://lists.haxx.se/listinfo/curl-library) to keep track of the
current discussion topics; or if you are registered on GitHub, you can use the
[Discussions section](https://github.com/curl/curl/discussions) on the main
curl repository.
## Scratch your own itch
One of the best ways is to start working on any problems or issues you have
found yourself or perhaps got annoyed at in the past. It can be a spelling
error in an error text or a weirdly phrased section in a man page. Hunt it
down and report the bug. Or make your first pull request with a fix for that.
## Smaller tasks
Some projects mark small issues as "beginner friendly", "bite-sized" or
similar. We do not do that in curl since such issues never linger around long
enough. Simple issues get handled fast.
If you are looking for a smaller or simpler task in the project to help out
with as an entry-point into the project, perhaps because you are a newcomer or
even maybe not a terribly experienced developer, here's our advice:
- Read through this document to get a grasp on a general approach to use
- Consider adding a test case for something not currently tested (correctly)
- Consider updating or adding documentation
- One way to get started gently in the project, is to participate in an
existing issue/PR and help out by reproducing the issue, review the code in
the PR etc.
## Help wanted
In the issue tracker we occasionally mark bugs with [help
wanted](https://github.com/curl/curl/labels/help%20wanted), as a sign that the
bug is acknowledged to exist and that there is nobody known to work on this
issue for the moment. Those are bugs that are fine to "grab" and provide a
pull request for. The complexity level of these of course varies, so pick one
that piques your interest.
## Work on known bugs
Some bugs are known and have not yet received attention and work enough to get
fixed. We collect such known existing flaws in the
[KNOWN_BUGS](https://curl.se/docs/knownbugs.html) page. Many of them link
to the original bug report with some additional details, but some may also
have aged a bit and may require some verification that the bug still exists in
the same way and that what was said about it in the past is still valid.
## Fix autobuild problems
On the [autobuilds page](https://curl.se/dev/builds.html) we show a
collection of test results from the automatic curl build and tests that are
performed by volunteers. Fixing compiler warnings and errors shown there is
something we value greatly. Also, if you own or run systems or architectures
that are not already tested in the autobuilds, we also appreciate more
volunteers running builds automatically to help us keep curl portable.
## TODO items
Ideas for features and functions that we have considered worthwhile to
implement and provide are kept in the
[TODO](https://curl.se/docs/todo.html) file. Some of the ideas are
rough. Some are well thought out. Some probably are not really suitable
anymore.
Before you invest a lot of time on a TODO item, do bring it up for discussion
on the mailing list. For discussion on applicability but also for ideas and
brainstorming on specific ways to do the implementation etc.
## You decide
You can also come up with a completely new thing you think we should do. Or
not do. Or fix. Or add to the project. You then either bring it to the mailing
list first to see if people shoot down the idea at once, or you bring a first
draft of the idea as a pull request and take the discussion there around the
specific implementation. Either way is fine.
## CONTRIBUTE
We offer [guidelines](https://curl.se/dev/contribute.html) that are suitable
to be familiar with before you decide to contribute to curl. If you are used
to open source development, you probably do not find many surprises there. | unknown | github | https://github.com/curl/curl | docs/HELP-US.md |
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Performance test for the Oppia reader view.
Before running this script, exploration 0 should be loaded in the target
server.
Run this script from the Oppia root directory:
python core/tests/reader_view_load_test.py --thread_count=5 --start_uid=1 \
https://my-oppia-instance.appspot.com
"""
__author__ = 'Sean Lip (sll@google.com)'
import argparse
import cookielib
import json
import logging
import sys
import threading
import time
import urllib
import urllib2
XSSI_PREFIX = ')]}\'\n'
# command line arguments parser
PARSER = argparse.ArgumentParser()
PARSER.add_argument(
'base_url', help=('Base URL of the Oppia installation to test'), type=str)
PARSER.add_argument(
'--start_uid',
help='Initial value for unique thread identifier.', default=1, type=int)
PARSER.add_argument(
'--thread_count',
help='Number of concurrent threads for executing the test.',
default=1, type=int)
PARSER.add_argument(
'--iteration_count',
help='Number of iterations for executing the test. Each thread of each '
'iteration acts as a unique user with the uid equal to:'
'start_uid + thread_count * iteration_index.',
default=1, type=int)
def assert_contains(needle, haystack):
if needle not in haystack:
raise Exception('Expected to find term: %s\n%s', needle, haystack)
def assert_does_not_contain(needle, haystack):
if needle in haystack:
raise Exception(
'Did not expect to find term: %s\n%s', needle, haystack)
def assert_equals(expected, actual):
if expected != actual:
raise Exception('Expected equality of %s and %s.', expected, actual)
class WebSession(object):
"""A class that allows navigation of web pages keeping cookie session."""
PROGRESS_LOCK = threading.Lock()
MAX_RETRIES = 3
RETRY_SLEEP_SEC = 3
GET_COUNT = 0
POST_COUNT = 0
RETRY_COUNT = 0
PROGRESS_BATCH = 10
RESPONSE_TIME_HISTOGRAM = [0, 0, 0, 0, 0, 0]
def __init__(self, uid, common_headers=None):
if common_headers is None:
common_headers = {}
self.uid = uid
self.common_headers = common_headers
self.cj = cookielib.CookieJar()
self.opener = urllib2.build_opener(
urllib2.HTTPCookieProcessor(self.cj))
@classmethod
def increment_duration_bucket(cls, index):
cls.RESPONSE_TIME_HISTOGRAM[index] += 1
@classmethod
def update_duration(cls, duration):
if duration > 30:
cls.increment_duration_bucket(0)
elif duration > 15:
cls.increment_duration_bucket(1)
elif duration > 7:
cls.increment_duration_bucket(2)
elif duration > 3:
cls.increment_duration_bucket(3)
elif duration > 1:
cls.increment_duration_bucket(4)
else:
cls.increment_duration_bucket(5)
@classmethod
def log_progress(cls, force=False):
update = ((cls.GET_COUNT + cls.POST_COUNT) % (
cls.PROGRESS_BATCH) == 0)
if update or force:
logging.info(
'GET/POST:[%s, %s], RETRIES:[%s], SLA:%s',
cls.GET_COUNT, cls.POST_COUNT, cls.RETRY_COUNT,
cls.RESPONSE_TIME_HISTOGRAM)
def get_cookie_value(self, name):
for cookie in self.cj:
if cookie.name == name:
return cookie.value
return None
def is_soft_error(self, http_error):
"""Checks if HTTPError is due to starvation of frontend instances."""
body = http_error.fp.read()
# this is the text specific to the front end instance starvation, which
# is a retriable error for both GET and POST; normal HTTP error 500 has
# this specific text '<h1>500 Internal Server Error</h1>'
if http_error.code == 500 and '<h1>Error: Server Error</h1>' in body:
return True
logging.error(
'Non-retriable HTTP %s error:\n%s', http_error.code, body)
return False
def open(self, request, hint):
"""Executes any HTTP request."""
start_time = time.time()
try:
try_count = 0
while True:
try:
return self.opener.open(request)
except urllib2.HTTPError as he:
if (try_count < WebSession.MAX_RETRIES and
self.is_soft_error(he)):
try_count += 1
with WebSession.PROGRESS_LOCK:
WebSession.RETRY_COUNT += 1
time.sleep(WebSession.RETRY_SLEEP_SEC)
continue
raise he
except Exception as e:
logging.info(
'Error in session %s executing: %s', self.uid, hint)
raise e
finally:
with WebSession.PROGRESS_LOCK:
self.update_duration(time.time() - start_time)
def get(self, url, expected_code=200):
"""HTTP GET."""
with WebSession.PROGRESS_LOCK:
WebSession.GET_COUNT += 1
self.log_progress()
request = urllib2.Request(url)
for key, value in self.common_headers.items():
request.add_header(key, value)
response = self.open(request, 'GET %s' % url)
assert_equals(expected_code, response.code)
return response.read()
def post(self, url, args_dict, expected_code=200):
"""HTTP POST."""
with WebSession.PROGRESS_LOCK:
WebSession.POST_COUNT += 1
self.log_progress()
data = urllib.urlencode(args_dict)
request = urllib2.Request(url, data)
response = self.open(request, 'POST %s' % url)
assert_equals(expected_code, response.code)
return response.read()
class TaskThread(threading.Thread):
"""Runs a task in a separate thread."""
def __init__(self, func, name=None):
super(TaskThread, self).__init__()
self.func = func
self.exception = None
self.name = name
@classmethod
def start_all_tasks(cls, tasks):
"""Starts all tasks."""
for task in tasks:
task.start()
@classmethod
def check_all_tasks(cls, tasks):
"""Checks results of all tasks; fails on the first exception found."""
failed_count = 0
for task in tasks:
while True:
# Timeouts should happen after 30 seconds.
task.join(30)
if task.isAlive():
logging.info('Still waiting for: %s.', task.name)
continue
else:
break
if task.exception:
failed_count += 1
if failed_count:
raise Exception('Tasks failed: %s', failed_count)
@classmethod
def execute_task_list(cls, tasks):
"""Starts all tasks and checks the results."""
cls.start_all_tasks(tasks)
cls.check_all_tasks(tasks)
def run(self):
try:
self.func()
except Exception as e: # pylint: disable-msg=broad-except
logging.error('Error in %s: %s', self.name, e)
self.exc_info = sys.exc_info()
raise self.exc_info[1], None, self.exc_info[2]
class ReaderViewLoadTest(object):
"""A reader view load test."""
def __init__(self, base_url, uid):
self.uid = uid
self.host = base_url
self.exp_id = None
self.last_state_name = None
self.last_params = None
self.state_history = None
self.session = WebSession(uid=uid)
def run(self):
self.init_player(
'0', 'Welcome to Oppia!', 'do you know where the name \'Oppia\'')
self.submit_and_compare(
'0', 'In fact, the word Oppia means \'learn\'.')
self.submit_and_compare('Finish', 'Check your spelling!')
self.submit_and_compare(
'Finnish', 'Yes! Oppia is the Finnish word for learn.')
def _get(self, url):
return self.session.get(url)
def _get_json(self, url):
"""Get a JSON response, transformed to a Python object."""
json_body = self.session.get(url)
if not json_body.startswith(XSSI_PREFIX):
raise Exception('Expected an XSSI prefix; found none.')
return json.loads(json_body[len(XSSI_PREFIX):])
def _post(self, url, data):
return self.session.post(url, data)
def _post_json(self, url, data):
"""Post a JSON request, returning the response as a Python object."""
json_body = self.session.post(str(url), {'payload': json.dumps(data)})
if not json_body.startswith(XSSI_PREFIX):
raise Exception('Expected an XSSI prefix; found none.')
return json.loads(json_body[len(XSSI_PREFIX):])
def init_player(self, exploration_id, expected_title, expected_response):
self.exp_id = exploration_id
body = self._get('%s/explore/%s' % (self.host, self.exp_id))
assert_contains('Learn', body)
assert_contains('Return to the gallery', body)
body = self._get_json(
'%s/explorehandler/init/%s' % (self.host, self.exp_id))
assert_equals(body['title'], expected_title)
assert_contains(expected_response, body['init_html'])
self.last_state_name = body['state_name']
self.last_params = body['params']
self.state_history = [self.last_state_name]
def submit_and_compare(self, answer, expected_response):
url = '%s/explorehandler/transition/%s/%s' % (
self.host, self.exp_id, urllib.quote(self.last_state_name))
body = self._post_json(url, {
'answer': answer, 'handler': 'submit', 'params': self.last_params,
'state_history': self.state_history,
})
assert_contains(expected_response, body['oppia_html'])
self.last_state_name = body['state_name']
self.last_params = body['params']
self.state_history += [self.last_state_name]
def run_all(args):
"""Runs test scenario in multiple threads."""
if args.thread_count < 1 or args.thread_count > 256:
raise Exception('Please use between 1 and 256 threads.')
if not args.base_url:
raise Exception('Please specify a base URL to load-test against.')
start_time = time.time()
logging.info('Started testing: %s', args.base_url)
logging.info('base_url: %s', args.base_url)
logging.info('start_uid: %s', args.start_uid)
logging.info('thread_count: %s', args.thread_count)
logging.info('iteration_count: %s', args.iteration_count)
logging.info('SLAs are [>30s, >15s, >7s, >3s, >1s, <1s]')
try:
for iteration_index in range(0, args.iteration_count):
logging.info('Started iteration: %s', iteration_index)
tasks = []
WebSession.PROGRESS_BATCH = args.thread_count
for index in range(0, args.thread_count):
test = ReaderViewLoadTest(args.base_url, (
args.start_uid + iteration_index * args.thread_count +
index))
task = TaskThread(
test.run, name='ReaderViewLoadTest-%s' % index)
tasks.append(task)
try:
TaskThread.execute_task_list(tasks)
except Exception as e:
logging.info('Failed iteration: %s', iteration_index)
raise e
finally:
WebSession.log_progress(force=True)
logging.info('Done! Duration (s): %s', time.time() - start_time)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
run_all(PARSER.parse_args()) | unknown | codeparrot/codeparrot-clean | ||
from __future__ import absolute_import
import os
import lit.Test
import lit.util
class TestFormat(object):
pass
###
class FileBasedTest(TestFormat):
def getTestsInDirectory(self, testSuite, path_in_suite,
litConfig, localConfig):
source_path = testSuite.getSourcePath(path_in_suite)
for filename in os.listdir(source_path):
# Ignore dot files and excluded tests.
if (filename.startswith('.') or
filename in localConfig.excludes):
continue
filepath = os.path.join(source_path, filename)
if not os.path.isdir(filepath):
base,ext = os.path.splitext(filename)
if ext in localConfig.suffixes:
yield lit.Test.Test(testSuite, path_in_suite + (filename,),
localConfig)
###
import re
import tempfile
class OneCommandPerFileTest(TestFormat):
# FIXME: Refactor into generic test for running some command on a directory
# of inputs.
def __init__(self, command, dir, recursive=False,
pattern=".*", useTempInput=False):
if isinstance(command, str):
self.command = [command]
else:
self.command = list(command)
if dir is not None:
dir = str(dir)
self.dir = dir
self.recursive = bool(recursive)
self.pattern = re.compile(pattern)
self.useTempInput = useTempInput
def getTestsInDirectory(self, testSuite, path_in_suite,
litConfig, localConfig):
dir = self.dir
if dir is None:
dir = testSuite.getSourcePath(path_in_suite)
for dirname,subdirs,filenames in os.walk(dir):
if not self.recursive:
subdirs[:] = []
subdirs[:] = [d for d in subdirs
if (d != '.svn' and
d not in localConfig.excludes)]
for filename in filenames:
if (filename.startswith('.') or
not self.pattern.match(filename) or
filename in localConfig.excludes):
continue
path = os.path.join(dirname,filename)
suffix = path[len(dir):]
if suffix.startswith(os.sep):
suffix = suffix[1:]
test = lit.Test.Test(
testSuite, path_in_suite + tuple(suffix.split(os.sep)),
localConfig)
# FIXME: Hack?
test.source_path = path
yield test
def createTempInput(self, tmp, test):
raise NotImplementedError('This is an abstract method.')
def execute(self, test, litConfig):
if test.config.unsupported:
return (lit.Test.UNSUPPORTED, 'Test is unsupported')
cmd = list(self.command)
# If using temp input, create a temporary file and hand it to the
# subclass.
if self.useTempInput:
tmp = tempfile.NamedTemporaryFile(suffix='.cpp')
self.createTempInput(tmp, test)
tmp.flush()
cmd.append(tmp.name)
elif hasattr(test, 'source_path'):
cmd.append(test.source_path)
else:
cmd.append(test.getSourcePath())
out, err, exitCode = lit.util.executeCommand(cmd)
diags = out + err
if not exitCode and not diags.strip():
return lit.Test.PASS,''
# Try to include some useful information.
report = """Command: %s\n""" % ' '.join(["'%s'" % a
for a in cmd])
if self.useTempInput:
report += """Temporary File: %s\n""" % tmp.name
report += "--\n%s--\n""" % open(tmp.name).read()
report += """Output:\n--\n%s--""" % diags
return lit.Test.FAIL, report | unknown | codeparrot/codeparrot-clean | ||
import hal
import glib
import time
class HandlerClass:
'''
class with gladevcp callback handlers
'''
def on_button_press(self,widget,data=None):
'''
a callback method
parameters are:
the generating object instance, likte a GtkButton instance
user data passed if any - this is currently unused but
the convention should be retained just in case
'''
print "on_button_press called"
self.nhits += 1
self.builder.get_object('hits').set_label("Hits: %d" % (self.nhits))
def __init__(self, halcomp,builder,useropts):
'''
Handler classes are instantiated in the following state:
- the widget tree is created, but not yet realized (no toplevel window.show() executed yet)
- the halcomp HAL component is set up and the widhget tree's HAL pins have already been added to it
- it is safe to add more hal pins because halcomp.ready() has not yet been called at this point.
after all handlers are instantiated in command line and get_handlers() order, callbacks will be
connected with connect_signals()/signal_autoconnect()
The builder may be either of libglade or GtkBuilder type depending on the glade file format.
'''
self.halcomp = halcomp
self.builder = builder
self.nhits = 0
def get_handlers(halcomp,builder,useropts):
'''
this function is called by gladevcp at import time (when this module is passed with '-u <modname>.py')
return a list of object instances whose methods should be connected as callback handlers
any method whose name does not begin with an underscore ('_') is a callback candidate
the 'get_handlers' name is reserved - gladevcp expects it, so do not change
'''
return [HandlerClass(halcomp,builder,useropts)] | unknown | codeparrot/codeparrot-clean | ||
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals;
import org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule;
import org.apache.kafka.common.security.oauthbearer.internals.OAuthBearerSaslClient.OAuthBearerSaslClientFactory;
import java.security.Provider;
import java.security.Security;
public final class OAuthBearerSaslClientProvider extends Provider {
private static final long serialVersionUID = 1L;
private OAuthBearerSaslClientProvider() {
super("SASL/OAUTHBEARER Client Provider", "1.0", "SASL/OAUTHBEARER Client Provider for Kafka");
put("SaslClientFactory." + OAuthBearerLoginModule.OAUTHBEARER_MECHANISM,
OAuthBearerSaslClientFactory.class.getName());
}
public static void initialize() {
Security.addProvider(new OAuthBearerSaslClientProvider());
}
} | java | github | https://github.com/apache/kafka | clients/src/main/java/org/apache/kafka/common/security/oauthbearer/internals/OAuthBearerSaslClientProvider.java |
#!/usr/bin/env python
"""
C to C++ Translator
Convert a C program or whole project to C++
Copyright (C) 2001-2009 Denis Sureau
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
webmaster@scriptol.com
http://www.scriptol.com
PMAKE
Compile a list of sources
"""
import os
import string
import sys
# remove unwanted codes from lines
def chop(n):
while (len(n) > 1) & (n[-1] in ("\n", "\r")):
n = n[0:-1]
return n
path = os.getcwd()
# read the list of files
fic = open("cdlist.prj","r")
liste = fic.readlines()
fic.close()
sortie = open("test", "w")
sys.stdout = sortie
# scan the list of sources and compile each .C one
for n in liste:
n = chop(n)
if os.path.isdir(n): continue
node, ext = os.path.splitext(n)
ext = string.upper(ext)
if ext in [ ".c", ".C" ]:
print "compiling " + n,
os.system("bcc32 -c " + node)
sortie.close() | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
# This file is part of Tryton. The COPYRIGHT file at the top level of
# this repository contains the full copyright notices and license terms.
from setuptools import setup
import re
import os
import ConfigParser
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def get_require_version(name):
if minor_version % 2:
require = '%s >= %s.%s.dev0, < %s.%s'
else:
require = '%s >= %s.%s, < %s.%s'
require %= (name, major_version, minor_version,
major_version, minor_version + 1)
return require
config = ConfigParser.ConfigParser()
config.readfp(open('tryton.cfg'))
info = dict(config.items('tryton'))
for key in ('depends', 'extras_depend', 'xml'):
if key in info:
info[key] = info[key].strip().splitlines()
version = info.get('version', '0.0.1')
major_version, minor_version, _ = version.split('.', 2)
major_version = int(major_version)
minor_version = int(minor_version)
name = 'trytond_sale_supply'
download_url = 'http://downloads.tryton.org/%s.%s/' % (
major_version, minor_version)
if minor_version % 2:
version = '%s.%s.dev0' % (major_version, minor_version)
download_url = (
'hg+http://hg.tryton.org/modules/%s#egg=%s-%s' % (
name[8:], name, version))
requires = []
for dep in info.get('depends', []):
if not re.match(r'(ir|res|webdav)(\W|$)', dep):
requires.append(get_require_version('trytond_%s' % dep))
requires.append(get_require_version('trytond'))
tests_require = [get_require_version('proteus')]
dependency_links = []
if minor_version % 2:
# Add development index for testing with proteus
dependency_links.append('https://trydevpi.tryton.org/')
setup(name=name,
version=version,
description='Tryton module for sale supply',
long_description=read('README'),
author='Tryton',
author_email='issue_tracker@tryton.org',
url='http://www.tryton.org/',
download_url=download_url,
keywords='tryton sale supply',
package_dir={'trytond.modules.sale_supply': '.'},
packages=[
'trytond.modules.sale_supply',
'trytond.modules.sale_supply.tests',
],
package_data={
'trytond.modules.sale_supply': (info.get('xml', [])
+ ['tryton.cfg', 'view/*.xml', 'locale/*.po', 'tests/*.rst']),
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Plugins',
'Framework :: Tryton',
'Intended Audience :: Developers',
'Intended Audience :: Financial and Insurance Industry',
'Intended Audience :: Legal Industry',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: Bulgarian',
'Natural Language :: Catalan',
'Natural Language :: Czech',
'Natural Language :: Dutch',
'Natural Language :: English',
'Natural Language :: French',
'Natural Language :: German',
'Natural Language :: Hungarian',
'Natural Language :: Italian',
'Natural Language :: Portuguese (Brazilian)',
'Natural Language :: Russian',
'Natural Language :: Slovenian',
'Natural Language :: Spanish',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Office/Business',
'Topic :: Office/Business :: Financial :: Accounting',
],
license='GPL-3',
install_requires=requires,
dependency_links=dependency_links,
zip_safe=False,
entry_points="""
[trytond.modules]
sale_supply = trytond.modules.sale_supply
""",
test_suite='tests',
test_loader='trytond.test_loader:Loader',
tests_require=tests_require,
) | unknown | codeparrot/codeparrot-clean | ||
//// [tests/cases/compiler/assignmentCompatability28.ts] ////
//// [assignmentCompatability28.ts]
namespace __test1__ {
export interface interfaceWithPublicAndOptional<T,U> { one: T; two?: U; }; var obj4: interfaceWithPublicAndOptional<number,string> = { one: 1 };;
export var __val__obj4 = obj4;
}
namespace __test2__ {
export declare var aa:{one:boolean;};;
export var __val__aa = aa;
}
__test2__.__val__aa = __test1__.__val__obj4
//// [assignmentCompatability28.js]
"use strict";
var __test1__;
(function (__test1__) {
;
var obj4 = { one: 1 };
;
__test1__.__val__obj4 = obj4;
})(__test1__ || (__test1__ = {}));
var __test2__;
(function (__test2__) {
;
__test2__.__val__aa = __test2__.aa;
})(__test2__ || (__test2__ = {}));
__test2__.__val__aa = __test1__.__val__obj4; | javascript | github | https://github.com/microsoft/TypeScript | tests/baselines/reference/assignmentCompatability28.js |
/*
* Copyright 2002-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.scheduling.quartz;
import org.springframework.core.NestedRuntimeException;
import org.springframework.util.MethodInvoker;
/**
* Unchecked exception that wraps an exception thrown from a target method.
* Propagated to the Quartz scheduler from a Job that reflectively invokes
* an arbitrary target method.
*
* @author Juergen Hoeller
* @since 2.5.3
* @see MethodInvokingJobDetailFactoryBean
*/
@SuppressWarnings("serial")
public class JobMethodInvocationFailedException extends NestedRuntimeException {
/**
* Constructor for JobMethodInvocationFailedException.
* @param methodInvoker the MethodInvoker used for reflective invocation
* @param cause the root cause (as thrown from the target method)
*/
public JobMethodInvocationFailedException(MethodInvoker methodInvoker, Throwable cause) {
super("Invocation of method '" + methodInvoker.getTargetMethod() +
"' on target class [" + methodInvoker.getTargetClass() + "] failed", cause);
}
} | java | github | https://github.com/spring-projects/spring-framework | spring-context-support/src/main/java/org/springframework/scheduling/quartz/JobMethodInvocationFailedException.java |
# -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
__all__ = ['normal', 'uniform', 'poisson']
from ..defmatrix import *
# Special object used internally to specify the placeholder which will be replaced by output ID
# This helps to provide dml containing output ID in constructSamplingNode
OUTPUT_ID = '$$OutputID$$'
def constructSamplingNode(inputs, dml):
"""
Convenient utility to create an intermediate of AST.
Parameters
----------
inputs = list of input matrix objects and/or DMLOp
dml = list of DML string (which will be eventually joined before execution). To specify out.ID, please use the placeholder
"""
dmlOp = DMLOp(inputs)
out = matrix(None, op=dmlOp)
dmlOp.dml = [out.ID if x == OUTPUT_ID else x for x in dml]
return out
INPUTS = []
def asStr(arg):
"""
Internal use only: Convenient utility to update inputs and return appropriate string value
"""
if isinstance(arg, matrix):
INPUTS = INPUTS + [arg]
return arg.ID
else:
return str(arg)
def normal(loc=0.0, scale=1.0, size=(1, 1), sparsity=1.0):
"""
Draw random samples from a normal (Gaussian) distribution.
Parameters
----------
loc: Mean ("centre") of the distribution.
scale: Standard deviation (spread or "width") of the distribution.
size: Output shape (only tuple of length 2, i.e. (m, n), supported).
sparsity: Sparsity (between 0.0 and 1.0).
Examples
--------
>>> import systemml as sml
>>> import numpy as np
>>> sml.setSparkContext(sc)
>>> from systemml import random
>>> m1 = sml.random.normal(loc=3, scale=2, size=(3,3))
>>> m1.toNumPy()
array([[ 3.48857226, 6.17261819, 2.51167259],
[ 3.60506708, -1.90266305, 3.97601633],
[ 3.62245706, 5.9430881 , 2.53070413]])
"""
if len(size) != 2:
raise TypeError('Incorrect type for size. Expected tuple of length 2')
INPUTS = []
rows = asStr(size[0])
cols = asStr(size[1])
loc = asStr(loc)
scale = asStr(scale)
sparsity = asStr(sparsity)
# loc + scale*standard normal
return constructSamplingNode(INPUTS, [
OUTPUT_ID, ' = ', loc, ' + ', scale, ' * random.normal(', rows, ',', cols, ',', sparsity, ')\n'])
def uniform(low=0.0, high=1.0, size=(1, 1), sparsity=1.0):
"""
Draw samples from a uniform distribution.
Parameters
----------
low: Lower boundary of the output interval.
high: Upper boundary of the output interval.
size: Output shape (only tuple of length 2, i.e. (m, n), supported).
sparsity: Sparsity (between 0.0 and 1.0).
Examples
--------
>>> import systemml as sml
>>> import numpy as np
>>> sml.setSparkContext(sc)
>>> from systemml import random
>>> m1 = sml.random.uniform(size=(3,3))
>>> m1.toNumPy()
array([[ 0.54511396, 0.11937437, 0.72975775],
[ 0.14135946, 0.01944448, 0.52544478],
[ 0.67582422, 0.87068849, 0.02766852]])
"""
if len(size) != 2:
raise TypeError('Incorrect type for size. Expected tuple of length 2')
INPUTS = []
rows = asStr(size[0])
cols = asStr(size[1])
low = asStr(low)
high = asStr(high)
sparsity = asStr(sparsity)
return constructSamplingNode(INPUTS, [
OUTPUT_ID, ' = random.uniform(', rows, ',', cols, ',', sparsity, ',', low, ',', high, ')\n'])
def poisson(lam=1.0, size=(1, 1), sparsity=1.0):
"""
Draw samples from a Poisson distribution.
Parameters
----------
lam: Expectation of interval, should be > 0.
size: Output shape (only tuple of length 2, i.e. (m, n), supported).
sparsity: Sparsity (between 0.0 and 1.0).
Examples
--------
>>> import systemml as sml
>>> import numpy as np
>>> sml.setSparkContext(sc)
>>> from systemml import random
>>> m1 = sml.random.poisson(lam=1, size=(3,3))
>>> m1.toNumPy()
array([[ 1., 0., 2.],
[ 1., 0., 0.],
[ 0., 0., 0.]])
"""
if len(size) != 2:
raise TypeError('Incorrect type for size. Expected tuple of length 2')
INPUTS = []
rows = asStr(size[0])
cols = asStr(size[1])
lam = asStr(lam)
sparsity = asStr(sparsity)
return constructSamplingNode(INPUTS, [
OUTPUT_ID, ' = random.poisson(', rows, ',', cols, ',', sparsity, ',', lam, ')\n']) | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2024 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING
from ...utils import _LazyModule
from ...utils.import_utils import define_import_structure
if TYPE_CHECKING:
from .configuration_flaubert import *
from .modeling_flaubert import *
from .tokenization_flaubert import *
else:
import sys
_file = globals()["__file__"]
sys.modules[__name__] = _LazyModule(__name__, _file, define_import_structure(_file), module_spec=__spec__) | python | github | https://github.com/huggingface/transformers | src/transformers/models/flaubert/__init__.py |
# -*- coding: latin-1 -*-
''' Nose test generators
Need function load / save / roundtrip tests
'''
from __future__ import division, print_function, absolute_import
import os
from os.path import join as pjoin, dirname
from glob import glob
from io import BytesIO
from tempfile import mkdtemp
from scipy._lib.six import u, text_type, string_types
import warnings
import shutil
import gzip
from numpy.testing import (assert_array_equal, assert_array_almost_equal,
assert_equal, assert_)
from pytest import raises as assert_raises
from scipy._lib._numpy_compat import suppress_warnings
import numpy as np
from numpy import array
import scipy.sparse as SP
import scipy.io.matlab.byteordercodes as boc
from scipy.io.matlab.miobase import matdims, MatWriteError, MatReadError
from scipy.io.matlab.mio import (mat_reader_factory, loadmat, savemat, whosmat)
from scipy.io.matlab.mio5 import (MatlabObject, MatFile5Writer, MatFile5Reader,
MatlabFunction, varmats_from_mat,
to_writeable, EmptyStructMarker)
from scipy.io.matlab import mio5_params as mio5p
test_data_path = pjoin(dirname(__file__), 'data')
def mlarr(*args, **kwargs):
"""Convenience function to return matlab-compatible 2D array."""
arr = np.array(*args, **kwargs)
arr.shape = matdims(arr)
return arr
# Define cases to test
theta = np.pi/4*np.arange(9,dtype=float).reshape(1,9)
case_table4 = [
{'name': 'double',
'classes': {'testdouble': 'double'},
'expected': {'testdouble': theta}
}]
case_table4.append(
{'name': 'string',
'classes': {'teststring': 'char'},
'expected': {'teststring':
array([u('"Do nine men interpret?" "Nine men," I nod.')])}
})
case_table4.append(
{'name': 'complex',
'classes': {'testcomplex': 'double'},
'expected': {'testcomplex': np.cos(theta) + 1j*np.sin(theta)}
})
A = np.zeros((3,5))
A[0] = list(range(1,6))
A[:,0] = list(range(1,4))
case_table4.append(
{'name': 'matrix',
'classes': {'testmatrix': 'double'},
'expected': {'testmatrix': A},
})
case_table4.append(
{'name': 'sparse',
'classes': {'testsparse': 'sparse'},
'expected': {'testsparse': SP.coo_matrix(A)},
})
B = A.astype(complex)
B[0,0] += 1j
case_table4.append(
{'name': 'sparsecomplex',
'classes': {'testsparsecomplex': 'sparse'},
'expected': {'testsparsecomplex': SP.coo_matrix(B)},
})
case_table4.append(
{'name': 'multi',
'classes': {'theta': 'double', 'a': 'double'},
'expected': {'theta': theta, 'a': A},
})
case_table4.append(
{'name': 'minus',
'classes': {'testminus': 'double'},
'expected': {'testminus': mlarr(-1)},
})
case_table4.append(
{'name': 'onechar',
'classes': {'testonechar': 'char'},
'expected': {'testonechar': array([u('r')])},
})
# Cell arrays stored as object arrays
CA = mlarr(( # tuple for object array creation
[],
mlarr([1]),
mlarr([[1,2]]),
mlarr([[1,2,3]])), dtype=object).reshape(1,-1)
CA[0,0] = array(
[u('This cell contains this string and 3 arrays of increasing length')])
case_table5 = [
{'name': 'cell',
'classes': {'testcell': 'cell'},
'expected': {'testcell': CA}}]
CAE = mlarr(( # tuple for object array creation
mlarr(1),
mlarr(2),
mlarr([]),
mlarr([]),
mlarr(3)), dtype=object).reshape(1,-1)
objarr = np.empty((1,1),dtype=object)
objarr[0,0] = mlarr(1)
case_table5.append(
{'name': 'scalarcell',
'classes': {'testscalarcell': 'cell'},
'expected': {'testscalarcell': objarr}
})
case_table5.append(
{'name': 'emptycell',
'classes': {'testemptycell': 'cell'},
'expected': {'testemptycell': CAE}})
case_table5.append(
{'name': 'stringarray',
'classes': {'teststringarray': 'char'},
'expected': {'teststringarray': array(
[u('one '), u('two '), u('three')])},
})
case_table5.append(
{'name': '3dmatrix',
'classes': {'test3dmatrix': 'double'},
'expected': {
'test3dmatrix': np.transpose(np.reshape(list(range(1,25)), (4,3,2)))}
})
st_sub_arr = array([np.sqrt(2),np.exp(1),np.pi]).reshape(1,3)
dtype = [(n, object) for n in ['stringfield', 'doublefield', 'complexfield']]
st1 = np.zeros((1,1), dtype)
st1['stringfield'][0,0] = array([u('Rats live on no evil star.')])
st1['doublefield'][0,0] = st_sub_arr
st1['complexfield'][0,0] = st_sub_arr * (1 + 1j)
case_table5.append(
{'name': 'struct',
'classes': {'teststruct': 'struct'},
'expected': {'teststruct': st1}
})
CN = np.zeros((1,2), dtype=object)
CN[0,0] = mlarr(1)
CN[0,1] = np.zeros((1,3), dtype=object)
CN[0,1][0,0] = mlarr(2, dtype=np.uint8)
CN[0,1][0,1] = mlarr([[3]], dtype=np.uint8)
CN[0,1][0,2] = np.zeros((1,2), dtype=object)
CN[0,1][0,2][0,0] = mlarr(4, dtype=np.uint8)
CN[0,1][0,2][0,1] = mlarr(5, dtype=np.uint8)
case_table5.append(
{'name': 'cellnest',
'classes': {'testcellnest': 'cell'},
'expected': {'testcellnest': CN},
})
st2 = np.empty((1,1), dtype=[(n, object) for n in ['one', 'two']])
st2[0,0]['one'] = mlarr(1)
st2[0,0]['two'] = np.empty((1,1), dtype=[('three', object)])
st2[0,0]['two'][0,0]['three'] = array([u('number 3')])
case_table5.append(
{'name': 'structnest',
'classes': {'teststructnest': 'struct'},
'expected': {'teststructnest': st2}
})
a = np.empty((1,2), dtype=[(n, object) for n in ['one', 'two']])
a[0,0]['one'] = mlarr(1)
a[0,0]['two'] = mlarr(2)
a[0,1]['one'] = array([u('number 1')])
a[0,1]['two'] = array([u('number 2')])
case_table5.append(
{'name': 'structarr',
'classes': {'teststructarr': 'struct'},
'expected': {'teststructarr': a}
})
ODT = np.dtype([(n, object) for n in
['expr', 'inputExpr', 'args',
'isEmpty', 'numArgs', 'version']])
MO = MatlabObject(np.zeros((1,1), dtype=ODT), 'inline')
m0 = MO[0,0]
m0['expr'] = array([u('x')])
m0['inputExpr'] = array([u(' x = INLINE_INPUTS_{1};')])
m0['args'] = array([u('x')])
m0['isEmpty'] = mlarr(0)
m0['numArgs'] = mlarr(1)
m0['version'] = mlarr(1)
case_table5.append(
{'name': 'object',
'classes': {'testobject': 'object'},
'expected': {'testobject': MO}
})
fp_u_str = open(pjoin(test_data_path, 'japanese_utf8.txt'), 'rb')
u_str = fp_u_str.read().decode('utf-8')
fp_u_str.close()
case_table5.append(
{'name': 'unicode',
'classes': {'testunicode': 'char'},
'expected': {'testunicode': array([u_str])}
})
case_table5.append(
{'name': 'sparse',
'classes': {'testsparse': 'sparse'},
'expected': {'testsparse': SP.coo_matrix(A)},
})
case_table5.append(
{'name': 'sparsecomplex',
'classes': {'testsparsecomplex': 'sparse'},
'expected': {'testsparsecomplex': SP.coo_matrix(B)},
})
case_table5.append(
{'name': 'bool',
'classes': {'testbools': 'logical'},
'expected': {'testbools':
array([[True], [False]])},
})
case_table5_rt = case_table5[:]
# Inline functions can't be concatenated in matlab, so RT only
case_table5_rt.append(
{'name': 'objectarray',
'classes': {'testobjectarray': 'object'},
'expected': {'testobjectarray': np.repeat(MO, 2).reshape(1,2)}})
def types_compatible(var1, var2):
"""Check if types are same or compatible.
0-D numpy scalars are compatible with bare python scalars.
"""
type1 = type(var1)
type2 = type(var2)
if type1 is type2:
return True
if type1 is np.ndarray and var1.shape == ():
return type(var1.item()) is type2
if type2 is np.ndarray and var2.shape == ():
return type(var2.item()) is type1
return False
def _check_level(label, expected, actual):
""" Check one level of a potentially nested array """
if SP.issparse(expected): # allow different types of sparse matrices
assert_(SP.issparse(actual))
assert_array_almost_equal(actual.todense(),
expected.todense(),
err_msg=label,
decimal=5)
return
# Check types are as expected
assert_(types_compatible(expected, actual),
"Expected type %s, got %s at %s" %
(type(expected), type(actual), label))
# A field in a record array may not be an ndarray
# A scalar from a record array will be type np.void
if not isinstance(expected,
(np.void, np.ndarray, MatlabObject)):
assert_equal(expected, actual)
return
# This is an ndarray-like thing
assert_(expected.shape == actual.shape,
msg='Expected shape %s, got %s at %s' % (expected.shape,
actual.shape,
label))
ex_dtype = expected.dtype
if ex_dtype.hasobject: # array of objects
if isinstance(expected, MatlabObject):
assert_equal(expected.classname, actual.classname)
for i, ev in enumerate(expected):
level_label = "%s, [%d], " % (label, i)
_check_level(level_label, ev, actual[i])
return
if ex_dtype.fields: # probably recarray
for fn in ex_dtype.fields:
level_label = "%s, field %s, " % (label, fn)
_check_level(level_label,
expected[fn], actual[fn])
return
if ex_dtype.type in (text_type, # string or bool
np.unicode_,
np.bool_):
assert_equal(actual, expected, err_msg=label)
return
# Something numeric
assert_array_almost_equal(actual, expected, err_msg=label, decimal=5)
def _load_check_case(name, files, case):
for file_name in files:
matdict = loadmat(file_name, struct_as_record=True)
label = "test %s; file %s" % (name, file_name)
for k, expected in case.items():
k_label = "%s, variable %s" % (label, k)
assert_(k in matdict, "Missing key at %s" % k_label)
_check_level(k_label, expected, matdict[k])
def _whos_check_case(name, files, case, classes):
for file_name in files:
label = "test %s; file %s" % (name, file_name)
whos = whosmat(file_name)
expected_whos = []
for k, expected in case.items():
expected_whos.append((k, expected.shape, classes[k]))
whos.sort()
expected_whos.sort()
assert_equal(whos, expected_whos,
"%s: %r != %r" % (label, whos, expected_whos)
)
# Round trip tests
def _rt_check_case(name, expected, format):
mat_stream = BytesIO()
savemat(mat_stream, expected, format=format)
mat_stream.seek(0)
_load_check_case(name, [mat_stream], expected)
# generator for load tests
def test_load():
for case in case_table4 + case_table5:
name = case['name']
expected = case['expected']
filt = pjoin(test_data_path, 'test%s_*.mat' % name)
files = glob(filt)
assert_(len(files) > 0,
"No files for test %s using filter %s" % (name, filt))
_load_check_case(name, files, expected)
# generator for whos tests
def test_whos():
for case in case_table4 + case_table5:
name = case['name']
expected = case['expected']
classes = case['classes']
filt = pjoin(test_data_path, 'test%s_*.mat' % name)
files = glob(filt)
assert_(len(files) > 0,
"No files for test %s using filter %s" % (name, filt))
_whos_check_case(name, files, expected, classes)
# generator for round trip tests
def test_round_trip():
for case in case_table4 + case_table5_rt:
case_table4_names = [case['name'] for case in case_table4]
name = case['name'] + '_round_trip'
expected = case['expected']
for format in (['4', '5'] if case['name'] in case_table4_names else ['5']):
_rt_check_case(name, expected, format)
def test_gzip_simple():
xdense = np.zeros((20,20))
xdense[2,3] = 2.3
xdense[4,5] = 4.5
x = SP.csc_matrix(xdense)
name = 'gzip_test'
expected = {'x':x}
format = '4'
tmpdir = mkdtemp()
try:
fname = pjoin(tmpdir,name)
mat_stream = gzip.open(fname,mode='wb')
savemat(mat_stream, expected, format=format)
mat_stream.close()
mat_stream = gzip.open(fname,mode='rb')
actual = loadmat(mat_stream, struct_as_record=True)
mat_stream.close()
finally:
shutil.rmtree(tmpdir)
assert_array_almost_equal(actual['x'].todense(),
expected['x'].todense(),
err_msg=repr(actual))
def test_multiple_open():
# Ticket #1039, on Windows: check that files are not left open
tmpdir = mkdtemp()
try:
x = dict(x=np.zeros((2, 2)))
fname = pjoin(tmpdir, "a.mat")
# Check that file is not left open
savemat(fname, x)
os.unlink(fname)
savemat(fname, x)
loadmat(fname)
os.unlink(fname)
# Check that stream is left open
f = open(fname, 'wb')
savemat(f, x)
f.seek(0)
f.close()
f = open(fname, 'rb')
loadmat(f)
f.seek(0)
f.close()
finally:
shutil.rmtree(tmpdir)
def test_mat73():
# Check any hdf5 files raise an error
filenames = glob(
pjoin(test_data_path, 'testhdf5*.mat'))
assert_(len(filenames) > 0)
for filename in filenames:
fp = open(filename, 'rb')
assert_raises(NotImplementedError,
loadmat,
fp,
struct_as_record=True)
fp.close()
def test_warnings():
# This test is an echo of the previous behavior, which was to raise a
# warning if the user triggered a search for mat files on the Python system
# path. We can remove the test in the next version after upcoming (0.13)
fname = pjoin(test_data_path, 'testdouble_7.1_GLNX86.mat')
with warnings.catch_warnings():
warnings.simplefilter('error')
# This should not generate a warning
mres = loadmat(fname, struct_as_record=True)
# This neither
mres = loadmat(fname, struct_as_record=False)
def test_regression_653():
# Saving a dictionary with only invalid keys used to raise an error. Now we
# save this as an empty struct in matlab space.
sio = BytesIO()
savemat(sio, {'d':{1:2}}, format='5')
back = loadmat(sio)['d']
# Check we got an empty struct equivalent
assert_equal(back.shape, (1,1))
assert_equal(back.dtype, np.dtype(object))
assert_(back[0,0] is None)
def test_structname_len():
# Test limit for length of field names in structs
lim = 31
fldname = 'a' * lim
st1 = np.zeros((1,1), dtype=[(fldname, object)])
savemat(BytesIO(), {'longstruct': st1}, format='5')
fldname = 'a' * (lim+1)
st1 = np.zeros((1,1), dtype=[(fldname, object)])
assert_raises(ValueError, savemat, BytesIO(),
{'longstruct': st1}, format='5')
def test_4_and_long_field_names_incompatible():
# Long field names option not supported in 4
my_struct = np.zeros((1,1),dtype=[('my_fieldname',object)])
assert_raises(ValueError, savemat, BytesIO(),
{'my_struct':my_struct}, format='4', long_field_names=True)
def test_long_field_names():
# Test limit for length of field names in structs
lim = 63
fldname = 'a' * lim
st1 = np.zeros((1,1), dtype=[(fldname, object)])
savemat(BytesIO(), {'longstruct': st1}, format='5',long_field_names=True)
fldname = 'a' * (lim+1)
st1 = np.zeros((1,1), dtype=[(fldname, object)])
assert_raises(ValueError, savemat, BytesIO(),
{'longstruct': st1}, format='5',long_field_names=True)
def test_long_field_names_in_struct():
# Regression test - long_field_names was erased if you passed a struct
# within a struct
lim = 63
fldname = 'a' * lim
cell = np.ndarray((1,2),dtype=object)
st1 = np.zeros((1,1), dtype=[(fldname, object)])
cell[0,0] = st1
cell[0,1] = st1
savemat(BytesIO(), {'longstruct': cell}, format='5',long_field_names=True)
#
# Check to make sure it fails with long field names off
#
assert_raises(ValueError, savemat, BytesIO(),
{'longstruct': cell}, format='5', long_field_names=False)
def test_cell_with_one_thing_in_it():
# Regression test - make a cell array that's 1 x 2 and put two
# strings in it. It works. Make a cell array that's 1 x 1 and put
# a string in it. It should work but, in the old days, it didn't.
cells = np.ndarray((1,2),dtype=object)
cells[0,0] = 'Hello'
cells[0,1] = 'World'
savemat(BytesIO(), {'x': cells}, format='5')
cells = np.ndarray((1,1),dtype=object)
cells[0,0] = 'Hello, world'
savemat(BytesIO(), {'x': cells}, format='5')
def test_writer_properties():
# Tests getting, setting of properties of matrix writer
mfw = MatFile5Writer(BytesIO())
assert_equal(mfw.global_vars, [])
mfw.global_vars = ['avar']
assert_equal(mfw.global_vars, ['avar'])
assert_equal(mfw.unicode_strings, False)
mfw.unicode_strings = True
assert_equal(mfw.unicode_strings, True)
assert_equal(mfw.long_field_names, False)
mfw.long_field_names = True
assert_equal(mfw.long_field_names, True)
def test_use_small_element():
# Test whether we're using small data element or not
sio = BytesIO()
wtr = MatFile5Writer(sio)
# First check size for no sde for name
arr = np.zeros(10)
wtr.put_variables({'aaaaa': arr})
w_sz = len(sio.getvalue())
# Check small name results in largish difference in size
sio.truncate(0)
sio.seek(0)
wtr.put_variables({'aaaa': arr})
assert_(w_sz - len(sio.getvalue()) > 4)
# Whereas increasing name size makes less difference
sio.truncate(0)
sio.seek(0)
wtr.put_variables({'aaaaaa': arr})
assert_(len(sio.getvalue()) - w_sz < 4)
def test_save_dict():
# Test that dict can be saved (as recarray), loaded as matstruct
dict_types = ((dict, False),)
try:
from collections import OrderedDict
except ImportError:
pass
else:
dict_types += ((OrderedDict, True),)
ab_exp = np.array([[(1, 2)]], dtype=[('a', object), ('b', object)])
ba_exp = np.array([[(2, 1)]], dtype=[('b', object), ('a', object)])
for dict_type, is_ordered in dict_types:
# Initialize with tuples to keep order for OrderedDict
d = dict_type([('a', 1), ('b', 2)])
stream = BytesIO()
savemat(stream, {'dict': d})
stream.seek(0)
vals = loadmat(stream)['dict']
assert_equal(set(vals.dtype.names), set(['a', 'b']))
if is_ordered: # Input was ordered, output in ab order
assert_array_equal(vals, ab_exp)
else: # Not ordered input, either order output
if vals.dtype.names[0] == 'a':
assert_array_equal(vals, ab_exp)
else:
assert_array_equal(vals, ba_exp)
def test_1d_shape():
# New 5 behavior is 1D -> row vector
arr = np.arange(5)
for format in ('4', '5'):
# Column is the default
stream = BytesIO()
savemat(stream, {'oned': arr}, format=format)
vals = loadmat(stream)
assert_equal(vals['oned'].shape, (1, 5))
# can be explicitly 'column' for oned_as
stream = BytesIO()
savemat(stream, {'oned':arr},
format=format,
oned_as='column')
vals = loadmat(stream)
assert_equal(vals['oned'].shape, (5,1))
# but different from 'row'
stream = BytesIO()
savemat(stream, {'oned':arr},
format=format,
oned_as='row')
vals = loadmat(stream)
assert_equal(vals['oned'].shape, (1,5))
def test_compression():
arr = np.zeros(100).reshape((5,20))
arr[2,10] = 1
stream = BytesIO()
savemat(stream, {'arr':arr})
raw_len = len(stream.getvalue())
vals = loadmat(stream)
assert_array_equal(vals['arr'], arr)
stream = BytesIO()
savemat(stream, {'arr':arr}, do_compression=True)
compressed_len = len(stream.getvalue())
vals = loadmat(stream)
assert_array_equal(vals['arr'], arr)
assert_(raw_len > compressed_len)
# Concatenate, test later
arr2 = arr.copy()
arr2[0,0] = 1
stream = BytesIO()
savemat(stream, {'arr':arr, 'arr2':arr2}, do_compression=False)
vals = loadmat(stream)
assert_array_equal(vals['arr2'], arr2)
stream = BytesIO()
savemat(stream, {'arr':arr, 'arr2':arr2}, do_compression=True)
vals = loadmat(stream)
assert_array_equal(vals['arr2'], arr2)
def test_single_object():
stream = BytesIO()
savemat(stream, {'A':np.array(1, dtype=object)})
def test_skip_variable():
# Test skipping over the first of two variables in a MAT file
# using mat_reader_factory and put_variables to read them in.
#
# This is a regression test of a problem that's caused by
# using the compressed file reader seek instead of the raw file
# I/O seek when skipping over a compressed chunk.
#
# The problem arises when the chunk is large: this file has
# a 256x256 array of random (uncompressible) doubles.
#
filename = pjoin(test_data_path,'test_skip_variable.mat')
#
# Prove that it loads with loadmat
#
d = loadmat(filename, struct_as_record=True)
assert_('first' in d)
assert_('second' in d)
#
# Make the factory
#
factory, file_opened = mat_reader_factory(filename, struct_as_record=True)
#
# This is where the factory breaks with an error in MatMatrixGetter.to_next
#
d = factory.get_variables('second')
assert_('second' in d)
factory.mat_stream.close()
def test_empty_struct():
# ticket 885
filename = pjoin(test_data_path,'test_empty_struct.mat')
# before ticket fix, this would crash with ValueError, empty data
# type
d = loadmat(filename, struct_as_record=True)
a = d['a']
assert_equal(a.shape, (1,1))
assert_equal(a.dtype, np.dtype(object))
assert_(a[0,0] is None)
stream = BytesIO()
arr = np.array((), dtype='U')
# before ticket fix, this used to give data type not understood
savemat(stream, {'arr':arr})
d = loadmat(stream)
a2 = d['arr']
assert_array_equal(a2, arr)
def test_save_empty_dict():
# saving empty dict also gives empty struct
stream = BytesIO()
savemat(stream, {'arr': {}})
d = loadmat(stream)
a = d['arr']
assert_equal(a.shape, (1,1))
assert_equal(a.dtype, np.dtype(object))
assert_(a[0,0] is None)
def assert_any_equal(output, alternatives):
""" Assert `output` is equal to at least one element in `alternatives`
"""
one_equal = False
for expected in alternatives:
if np.all(output == expected):
one_equal = True
break
assert_(one_equal)
def test_to_writeable():
# Test to_writeable function
res = to_writeable(np.array([1])) # pass through ndarrays
assert_equal(res.shape, (1,))
assert_array_equal(res, 1)
# Dict fields can be written in any order
expected1 = np.array([(1, 2)], dtype=[('a', '|O8'), ('b', '|O8')])
expected2 = np.array([(2, 1)], dtype=[('b', '|O8'), ('a', '|O8')])
alternatives = (expected1, expected2)
assert_any_equal(to_writeable({'a':1,'b':2}), alternatives)
# Fields with underscores discarded
assert_any_equal(to_writeable({'a':1,'b':2, '_c':3}), alternatives)
# Not-string fields discarded
assert_any_equal(to_writeable({'a':1,'b':2, 100:3}), alternatives)
# String fields that are valid Python identifiers discarded
assert_any_equal(to_writeable({'a':1,'b':2, '99':3}), alternatives)
# Object with field names is equivalent
class klass(object):
pass
c = klass
c.a = 1
c.b = 2
assert_any_equal(to_writeable(c), alternatives)
# empty list and tuple go to empty array
res = to_writeable([])
assert_equal(res.shape, (0,))
assert_equal(res.dtype.type, np.float64)
res = to_writeable(())
assert_equal(res.shape, (0,))
assert_equal(res.dtype.type, np.float64)
# None -> None
assert_(to_writeable(None) is None)
# String to strings
assert_equal(to_writeable('a string').dtype.type, np.str_)
# Scalars to numpy to numpy scalars
res = to_writeable(1)
assert_equal(res.shape, ())
assert_equal(res.dtype.type, np.array(1).dtype.type)
assert_array_equal(res, 1)
# Empty dict returns EmptyStructMarker
assert_(to_writeable({}) is EmptyStructMarker)
# Object does not have (even empty) __dict__
assert_(to_writeable(object()) is None)
# Custom object does have empty __dict__, returns EmptyStructMarker
class C(object):
pass
assert_(to_writeable(c()) is EmptyStructMarker)
# dict keys with legal characters are convertible
res = to_writeable({'a': 1})['a']
assert_equal(res.shape, (1,))
assert_equal(res.dtype.type, np.object_)
# Only fields with illegal characters, falls back to EmptyStruct
assert_(to_writeable({'1':1}) is EmptyStructMarker)
assert_(to_writeable({'_a':1}) is EmptyStructMarker)
# Unless there are valid fields, in which case structured array
assert_equal(to_writeable({'1':1, 'f': 2}),
np.array([(2,)], dtype=[('f', '|O8')]))
def test_recarray():
# check roundtrip of structured array
dt = [('f1', 'f8'),
('f2', 'S10')]
arr = np.zeros((2,), dtype=dt)
arr[0]['f1'] = 0.5
arr[0]['f2'] = 'python'
arr[1]['f1'] = 99
arr[1]['f2'] = 'not perl'
stream = BytesIO()
savemat(stream, {'arr': arr})
d = loadmat(stream, struct_as_record=False)
a20 = d['arr'][0,0]
assert_equal(a20.f1, 0.5)
assert_equal(a20.f2, 'python')
d = loadmat(stream, struct_as_record=True)
a20 = d['arr'][0,0]
assert_equal(a20['f1'], 0.5)
assert_equal(a20['f2'], 'python')
# structs always come back as object types
assert_equal(a20.dtype, np.dtype([('f1', 'O'),
('f2', 'O')]))
a21 = d['arr'].flat[1]
assert_equal(a21['f1'], 99)
assert_equal(a21['f2'], 'not perl')
def test_save_object():
class C(object):
pass
c = C()
c.field1 = 1
c.field2 = 'a string'
stream = BytesIO()
savemat(stream, {'c': c})
d = loadmat(stream, struct_as_record=False)
c2 = d['c'][0,0]
assert_equal(c2.field1, 1)
assert_equal(c2.field2, 'a string')
d = loadmat(stream, struct_as_record=True)
c2 = d['c'][0,0]
assert_equal(c2['field1'], 1)
assert_equal(c2['field2'], 'a string')
def test_read_opts():
# tests if read is seeing option sets, at initialization and after
# initialization
arr = np.arange(6).reshape(1,6)
stream = BytesIO()
savemat(stream, {'a': arr})
rdr = MatFile5Reader(stream)
back_dict = rdr.get_variables()
rarr = back_dict['a']
assert_array_equal(rarr, arr)
rdr = MatFile5Reader(stream, squeeze_me=True)
assert_array_equal(rdr.get_variables()['a'], arr.reshape((6,)))
rdr.squeeze_me = False
assert_array_equal(rarr, arr)
rdr = MatFile5Reader(stream, byte_order=boc.native_code)
assert_array_equal(rdr.get_variables()['a'], arr)
# inverted byte code leads to error on read because of swapped
# header etc
rdr = MatFile5Reader(stream, byte_order=boc.swapped_code)
assert_raises(Exception, rdr.get_variables)
rdr.byte_order = boc.native_code
assert_array_equal(rdr.get_variables()['a'], arr)
arr = np.array(['a string'])
stream.truncate(0)
stream.seek(0)
savemat(stream, {'a': arr})
rdr = MatFile5Reader(stream)
assert_array_equal(rdr.get_variables()['a'], arr)
rdr = MatFile5Reader(stream, chars_as_strings=False)
carr = np.atleast_2d(np.array(list(arr.item()), dtype='U1'))
assert_array_equal(rdr.get_variables()['a'], carr)
rdr.chars_as_strings = True
assert_array_equal(rdr.get_variables()['a'], arr)
def test_empty_string():
# make sure reading empty string does not raise error
estring_fname = pjoin(test_data_path, 'single_empty_string.mat')
fp = open(estring_fname, 'rb')
rdr = MatFile5Reader(fp)
d = rdr.get_variables()
fp.close()
assert_array_equal(d['a'], np.array([], dtype='U1'))
# empty string round trip. Matlab cannot distiguish
# between a string array that is empty, and a string array
# containing a single empty string, because it stores strings as
# arrays of char. There is no way of having an array of char that
# is not empty, but contains an empty string.
stream = BytesIO()
savemat(stream, {'a': np.array([''])})
rdr = MatFile5Reader(stream)
d = rdr.get_variables()
assert_array_equal(d['a'], np.array([], dtype='U1'))
stream.truncate(0)
stream.seek(0)
savemat(stream, {'a': np.array([], dtype='U1')})
rdr = MatFile5Reader(stream)
d = rdr.get_variables()
assert_array_equal(d['a'], np.array([], dtype='U1'))
stream.close()
def test_corrupted_data():
import zlib
for exc, fname in [(ValueError, 'corrupted_zlib_data.mat'),
(zlib.error, 'corrupted_zlib_checksum.mat')]:
with open(pjoin(test_data_path, fname), 'rb') as fp:
rdr = MatFile5Reader(fp)
assert_raises(exc, rdr.get_variables)
def test_corrupted_data_check_can_be_disabled():
with open(pjoin(test_data_path, 'corrupted_zlib_data.mat'), 'rb') as fp:
rdr = MatFile5Reader(fp, verify_compressed_data_integrity=False)
rdr.get_variables()
def test_read_both_endian():
# make sure big- and little- endian data is read correctly
for fname in ('big_endian.mat', 'little_endian.mat'):
fp = open(pjoin(test_data_path, fname), 'rb')
rdr = MatFile5Reader(fp)
d = rdr.get_variables()
fp.close()
assert_array_equal(d['strings'],
np.array([['hello'],
['world']], dtype=object))
assert_array_equal(d['floats'],
np.array([[2., 3.],
[3., 4.]], dtype=np.float32))
def test_write_opposite_endian():
# We don't support writing opposite endian .mat files, but we need to behave
# correctly if the user supplies an other-endian numpy array to write out
float_arr = np.array([[2., 3.],
[3., 4.]])
int_arr = np.arange(6).reshape((2, 3))
uni_arr = np.array(['hello', 'world'], dtype='U')
stream = BytesIO()
savemat(stream, {'floats': float_arr.byteswap().newbyteorder(),
'ints': int_arr.byteswap().newbyteorder(),
'uni_arr': uni_arr.byteswap().newbyteorder()})
rdr = MatFile5Reader(stream)
d = rdr.get_variables()
assert_array_equal(d['floats'], float_arr)
assert_array_equal(d['ints'], int_arr)
assert_array_equal(d['uni_arr'], uni_arr)
stream.close()
def test_logical_array():
# The roundtrip test doesn't verify that we load the data up with the
# correct (bool) dtype
with open(pjoin(test_data_path, 'testbool_8_WIN64.mat'), 'rb') as fobj:
rdr = MatFile5Reader(fobj, mat_dtype=True)
d = rdr.get_variables()
x = np.array([[True], [False]], dtype=np.bool_)
assert_array_equal(d['testbools'], x)
assert_equal(d['testbools'].dtype, x.dtype)
def test_logical_out_type():
# Confirm that bool type written as uint8, uint8 class
# See gh-4022
stream = BytesIO()
barr = np.array([False, True, False])
savemat(stream, {'barray': barr})
stream.seek(0)
reader = MatFile5Reader(stream)
reader.initialize_read()
reader.read_file_header()
hdr, _ = reader.read_var_header()
assert_equal(hdr.mclass, mio5p.mxUINT8_CLASS)
assert_equal(hdr.is_logical, True)
var = reader.read_var_array(hdr, False)
assert_equal(var.dtype.type, np.uint8)
def test_mat4_3d():
# test behavior when writing 3D arrays to matlab 4 files
stream = BytesIO()
arr = np.arange(24).reshape((2,3,4))
assert_raises(ValueError, savemat, stream, {'a': arr}, True, '4')
def test_func_read():
func_eg = pjoin(test_data_path, 'testfunc_7.4_GLNX86.mat')
fp = open(func_eg, 'rb')
rdr = MatFile5Reader(fp)
d = rdr.get_variables()
fp.close()
assert_(isinstance(d['testfunc'], MatlabFunction))
stream = BytesIO()
wtr = MatFile5Writer(stream)
assert_raises(MatWriteError, wtr.put_variables, d)
def test_mat_dtype():
double_eg = pjoin(test_data_path, 'testmatrix_6.1_SOL2.mat')
fp = open(double_eg, 'rb')
rdr = MatFile5Reader(fp, mat_dtype=False)
d = rdr.get_variables()
fp.close()
assert_equal(d['testmatrix'].dtype.kind, 'u')
fp = open(double_eg, 'rb')
rdr = MatFile5Reader(fp, mat_dtype=True)
d = rdr.get_variables()
fp.close()
assert_equal(d['testmatrix'].dtype.kind, 'f')
def test_sparse_in_struct():
# reproduces bug found by DC where Cython code was insisting on
# ndarray return type, but getting sparse matrix
st = {'sparsefield': SP.coo_matrix(np.eye(4))}
stream = BytesIO()
savemat(stream, {'a':st})
d = loadmat(stream, struct_as_record=True)
assert_array_equal(d['a'][0,0]['sparsefield'].todense(), np.eye(4))
def test_mat_struct_squeeze():
stream = BytesIO()
in_d = {'st':{'one':1, 'two':2}}
savemat(stream, in_d)
# no error without squeeze
out_d = loadmat(stream, struct_as_record=False)
# previous error was with squeeze, with mat_struct
out_d = loadmat(stream,
struct_as_record=False,
squeeze_me=True,
)
def test_scalar_squeeze():
stream = BytesIO()
in_d = {'scalar': [[0.1]], 'string': 'my name', 'st':{'one':1, 'two':2}}
savemat(stream, in_d)
out_d = loadmat(stream, squeeze_me=True)
assert_(isinstance(out_d['scalar'], float))
assert_(isinstance(out_d['string'], string_types))
assert_(isinstance(out_d['st'], np.ndarray))
def test_str_round():
# from report by Angus McMorland on mailing list 3 May 2010
stream = BytesIO()
in_arr = np.array(['Hello', 'Foob'])
out_arr = np.array(['Hello', 'Foob '])
savemat(stream, dict(a=in_arr))
res = loadmat(stream)
# resulted in ['HloolFoa', 'elWrdobr']
assert_array_equal(res['a'], out_arr)
stream.truncate(0)
stream.seek(0)
# Make Fortran ordered version of string
in_str = in_arr.tostring(order='F')
in_from_str = np.ndarray(shape=a.shape,
dtype=in_arr.dtype,
order='F',
buffer=in_str)
savemat(stream, dict(a=in_from_str))
assert_array_equal(res['a'], out_arr)
# unicode save did lead to buffer too small error
stream.truncate(0)
stream.seek(0)
in_arr_u = in_arr.astype('U')
out_arr_u = out_arr.astype('U')
savemat(stream, {'a': in_arr_u})
res = loadmat(stream)
assert_array_equal(res['a'], out_arr_u)
def test_fieldnames():
# Check that field names are as expected
stream = BytesIO()
savemat(stream, {'a': {'a':1, 'b':2}})
res = loadmat(stream)
field_names = res['a'].dtype.names
assert_equal(set(field_names), set(('a', 'b')))
def test_loadmat_varnames():
# Test that we can get just one variable from a mat file using loadmat
mat5_sys_names = ['__globals__',
'__header__',
'__version__']
for eg_file, sys_v_names in (
(pjoin(test_data_path, 'testmulti_4.2c_SOL2.mat'), []), (pjoin(
test_data_path, 'testmulti_7.4_GLNX86.mat'), mat5_sys_names)):
vars = loadmat(eg_file)
assert_equal(set(vars.keys()), set(['a', 'theta'] + sys_v_names))
vars = loadmat(eg_file, variable_names='a')
assert_equal(set(vars.keys()), set(['a'] + sys_v_names))
vars = loadmat(eg_file, variable_names=['a'])
assert_equal(set(vars.keys()), set(['a'] + sys_v_names))
vars = loadmat(eg_file, variable_names=['theta'])
assert_equal(set(vars.keys()), set(['theta'] + sys_v_names))
vars = loadmat(eg_file, variable_names=('theta',))
assert_equal(set(vars.keys()), set(['theta'] + sys_v_names))
vars = loadmat(eg_file, variable_names=[])
assert_equal(set(vars.keys()), set(sys_v_names))
vnames = ['theta']
vars = loadmat(eg_file, variable_names=vnames)
assert_equal(vnames, ['theta'])
def test_round_types():
# Check that saving, loading preserves dtype in most cases
arr = np.arange(10)
stream = BytesIO()
for dts in ('f8','f4','i8','i4','i2','i1',
'u8','u4','u2','u1','c16','c8'):
stream.truncate(0)
stream.seek(0) # needed for BytesIO in python 3
savemat(stream, {'arr': arr.astype(dts)})
vars = loadmat(stream)
assert_equal(np.dtype(dts), vars['arr'].dtype)
def test_varmats_from_mat():
# Make a mat file with several variables, write it, read it back
names_vars = (('arr', mlarr(np.arange(10))),
('mystr', mlarr('a string')),
('mynum', mlarr(10)))
# Dict like thing to give variables in defined order
class C(object):
def items(self):
return names_vars
stream = BytesIO()
savemat(stream, C())
varmats = varmats_from_mat(stream)
assert_equal(len(varmats), 3)
for i in range(3):
name, var_stream = varmats[i]
exp_name, exp_res = names_vars[i]
assert_equal(name, exp_name)
res = loadmat(var_stream)
assert_array_equal(res[name], exp_res)
def test_one_by_zero():
# Test 1x0 chars get read correctly
func_eg = pjoin(test_data_path, 'one_by_zero_char.mat')
fp = open(func_eg, 'rb')
rdr = MatFile5Reader(fp)
d = rdr.get_variables()
fp.close()
assert_equal(d['var'].shape, (0,))
def test_load_mat4_le():
# We were getting byte order wrong when reading little-endian floa64 dense
# matrices on big-endian platforms
mat4_fname = pjoin(test_data_path, 'test_mat4_le_floats.mat')
vars = loadmat(mat4_fname)
assert_array_equal(vars['a'], [[0.1, 1.2]])
def test_unicode_mat4():
# Mat4 should save unicode as latin1
bio = BytesIO()
var = {'second_cat': u('Schrödinger')}
savemat(bio, var, format='4')
var_back = loadmat(bio)
assert_equal(var_back['second_cat'], var['second_cat'])
def test_logical_sparse():
# Test we can read logical sparse stored in mat file as bytes.
# See https://github.com/scipy/scipy/issues/3539.
# In some files saved by MATLAB, the sparse data elements (Real Part
# Subelement in MATLAB speak) are stored with apparent type double
# (miDOUBLE) but are in fact single bytes.
filename = pjoin(test_data_path,'logical_sparse.mat')
# Before fix, this would crash with:
# ValueError: indices and data should have the same size
d = loadmat(filename, struct_as_record=True)
log_sp = d['sp_log_5_4']
assert_(isinstance(log_sp, SP.csc_matrix))
assert_equal(log_sp.dtype.type, np.bool_)
assert_array_equal(log_sp.toarray(),
[[True, True, True, False],
[False, False, True, False],
[False, False, True, False],
[False, False, False, False],
[False, False, False, False]])
def test_empty_sparse():
# Can we read empty sparse matrices?
sio = BytesIO()
import scipy.sparse
empty_sparse = scipy.sparse.csr_matrix([[0,0],[0,0]])
savemat(sio, dict(x=empty_sparse))
sio.seek(0)
res = loadmat(sio)
assert_array_equal(res['x'].shape, empty_sparse.shape)
assert_array_equal(res['x'].todense(), 0)
# Do empty sparse matrices get written with max nnz 1?
# See https://github.com/scipy/scipy/issues/4208
sio.seek(0)
reader = MatFile5Reader(sio)
reader.initialize_read()
reader.read_file_header()
hdr, _ = reader.read_var_header()
assert_equal(hdr.nzmax, 1)
def test_empty_mat_error():
# Test we get a specific warning for an empty mat file
sio = BytesIO()
assert_raises(MatReadError, loadmat, sio)
def test_miuint32_compromise():
# Reader should accept miUINT32 for miINT32, but check signs
# mat file with miUINT32 for miINT32, but OK values
filename = pjoin(test_data_path, 'miuint32_for_miint32.mat')
res = loadmat(filename)
assert_equal(res['an_array'], np.arange(10)[None, :])
# mat file with miUINT32 for miINT32, with negative value
filename = pjoin(test_data_path, 'bad_miuint32.mat')
with suppress_warnings() as sup:
sup.filter(message="unclosed file") # Py3k ResourceWarning
assert_raises(ValueError, loadmat, filename)
def test_miutf8_for_miint8_compromise():
# Check reader accepts ascii as miUTF8 for array names
filename = pjoin(test_data_path, 'miutf8_array_name.mat')
res = loadmat(filename)
assert_equal(res['array_name'], [[1]])
# mat file with non-ascii utf8 name raises error
filename = pjoin(test_data_path, 'bad_miutf8_array_name.mat')
with suppress_warnings() as sup:
sup.filter(message="unclosed file") # Py3k ResourceWarning
assert_raises(ValueError, loadmat, filename)
def test_bad_utf8():
# Check that reader reads bad UTF with 'replace' option
filename = pjoin(test_data_path,'broken_utf8.mat')
res = loadmat(filename)
assert_equal(res['bad_string'],
b'\x80 am broken'.decode('utf8', 'replace')) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Dag Wieers (@dagwieers) <dag@wieers.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: aci_rest
short_description: Direct access to the Cisco APIC REST API
description:
- Enables the management of the Cisco ACI fabric through direct access to the Cisco APIC REST API.
- Thanks to the idempotent nature of the APIC, this module is idempotent and reports changes.
version_added: '2.4'
requirements:
- lxml (when using XML payload)
- xmljson >= 0.1.8 (when using XML payload)
- python 2.7+ (when using xmljson)
options:
method:
description:
- The HTTP method of the request.
- Using C(delete) is typically used for deleting objects.
- Using C(get) is typically used for querying objects.
- Using C(post) is typically used for modifying objects.
type: str
choices: [ delete, get, post ]
default: get
aliases: [ action ]
path:
description:
- URI being used to execute API calls.
- Must end in C(.xml) or C(.json).
type: str
required: yes
aliases: [ uri ]
content:
description:
- When used instead of C(src), sets the payload of the API request directly.
- This may be convenient to template simple requests.
- For anything complex use the C(template) lookup plugin (see examples)
or the M(template) module with parameter C(src).
type: raw
src:
description:
- Name of the absolute path of the filename that includes the body
of the HTTP request being sent to the ACI fabric.
- If you require a templated payload, use the C(content) parameter
together with the C(template) lookup plugin, or use M(template).
type: path
aliases: [ config_file ]
extends_documentation_fragment: aci
notes:
- Certain payloads are known not to be idempotent, so be careful when constructing payloads,
e.g. using C(status="created") will cause idempotency issues, use C(status="modified") instead.
More information in :ref:`the ACI documentation <aci_guide_known_issues>`.
- Certain payloads (and used paths) are known to report no changes happened when changes did happen.
This is a known APIC problem and has been reported to the vendor. A workaround for this issue exists.
More information in :ref:`the ACI documentation <aci_guide_known_issues>`.
- XML payloads require the C(lxml) and C(xmljson) python libraries. For JSON payloads nothing special is needed.
seealso:
- module: aci_tenant
- name: Cisco APIC REST API Configuration Guide
description: More information about the APIC REST API.
link: http://www.cisco.com/c/en/us/td/docs/switches/datacenter/aci/apic/sw/2-x/rest_cfg/2_1_x/b_Cisco_APIC_REST_API_Configuration_Guide.html
author:
- Dag Wieers (@dagwieers)
'''
EXAMPLES = r'''
- name: Add a tenant using certificate authentication
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
method: post
path: /api/mo/uni.xml
src: /home/cisco/ansible/aci/configs/aci_config.xml
delegate_to: localhost
- name: Add a tenant from a templated payload file from templates/
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
method: post
path: /api/mo/uni.xml
content: "{{ lookup('template', 'aci/tenant.xml.j2') }}"
delegate_to: localhost
- name: Add a tenant using inline YAML
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
validate_certs: no
path: /api/mo/uni.json
method: post
content:
fvTenant:
attributes:
name: Sales
descr: Sales department
delegate_to: localhost
- name: Add a tenant using a JSON string
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
validate_certs: no
path: /api/mo/uni.json
method: post
content:
{
"fvTenant": {
"attributes": {
"name": "Sales",
"descr": "Sales department"
}
}
}
delegate_to: localhost
- name: Add a tenant using an XML string
aci_rest:
host: apic
username: admin
private_key: pki/{{ aci_username }}.key
validate_certs: no
path: /api/mo/uni.xml
method: post
content: '<fvTenant name="Sales" descr="Sales departement"/>'
delegate_to: localhost
- name: Get tenants using password authentication
aci_rest:
host: apic
username: admin
password: SomeSecretPassword
method: get
path: /api/node/class/fvTenant.json
delegate_to: localhost
register: query_result
- name: Configure contracts
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
method: post
path: /api/mo/uni.xml
src: /home/cisco/ansible/aci/configs/contract_config.xml
delegate_to: localhost
- name: Register leaves and spines
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
validate_certs: no
method: post
path: /api/mo/uni/controller/nodeidentpol.xml
content: |
<fabricNodeIdentPol>
<fabricNodeIdentP name="{{ item.name }}" nodeId="{{ item.nodeid }}" status="{{ item.status }}" serial="{{ item.serial }}"/>
</fabricNodeIdentPol>
with_items:
- '{{ apic_leavesspines }}'
delegate_to: localhost
- name: Wait for all controllers to become ready
aci_rest:
host: apic
username: admin
private_key: pki/admin.key
validate_certs: no
path: /api/node/class/topSystem.json?query-target-filter=eq(topSystem.role,"controller")
register: apics
until: "'totalCount' in apics and apics.totalCount|int >= groups['apic']|count"
retries: 120
delay: 30
delegate_to: localhost
run_once: yes
'''
RETURN = r'''
error_code:
description: The REST ACI return code, useful for troubleshooting on failure
returned: always
type: int
sample: 122
error_text:
description: The REST ACI descriptive text, useful for troubleshooting on failure
returned: always
type: str
sample: unknown managed object class foo
imdata:
description: Converted output returned by the APIC REST (register this for post-processing)
returned: always
type: str
sample: [{"error": {"attributes": {"code": "122", "text": "unknown managed object class foo"}}}]
payload:
description: The (templated) payload send to the APIC REST API (xml or json)
returned: always
type: str
sample: '<foo bar="boo"/>'
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: str
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
response:
description: HTTP response string
returned: always
type: str
sample: 'HTTP Error 400: Bad Request'
status:
description: HTTP status code
returned: always
type: int
sample: 400
totalCount:
description: Number of items in the imdata array
returned: always
type: str
sample: '0'
url:
description: URL used for APIC REST call
returned: success
type: str
sample: https://1.2.3.4/api/mo/uni/tn-[Dag].json?rsp-subtree=modified
'''
import json
import os
try:
from ansible.module_utils.six.moves.urllib.parse import parse_qsl, urlencode, urlparse, urlunparse
HAS_URLPARSE = True
except Exception:
HAS_URLPARSE = False
# Optional, only used for XML payload
try:
import lxml.etree # noqa
HAS_LXML_ETREE = True
except ImportError:
HAS_LXML_ETREE = False
# Optional, only used for XML payload
try:
from xmljson import cobra # noqa
HAS_XMLJSON_COBRA = True
except ImportError:
HAS_XMLJSON_COBRA = False
# Optional, only used for YAML validation
try:
import yaml
HAS_YAML = True
except Exception:
HAS_YAML = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.urls import fetch_url
from ansible.module_utils._text import to_text
def update_qsl(url, params):
''' Add or update a URL query string '''
if HAS_URLPARSE:
url_parts = list(urlparse(url))
query = dict(parse_qsl(url_parts[4]))
query.update(params)
url_parts[4] = urlencode(query)
return urlunparse(url_parts)
elif '?' in url:
return url + '&' + '&'.join(['%s=%s' % (k, v) for k, v in params.items()])
else:
return url + '?' + '&'.join(['%s=%s' % (k, v) for k, v in params.items()])
class ACIRESTModule(ACIModule):
def changed(self, d):
''' Check ACI response for changes '''
if isinstance(d, dict):
for k, v in d.items():
if k == 'status' and v in ('created', 'modified', 'deleted'):
return True
elif self.changed(v) is True:
return True
elif isinstance(d, list):
for i in d:
if self.changed(i) is True:
return True
return False
def response_type(self, rawoutput, rest_type='xml'):
''' Handle APIC response output '''
if rest_type == 'json':
self.response_json(rawoutput)
else:
self.response_xml(rawoutput)
# Use APICs built-in idempotency
if HAS_URLPARSE:
self.result['changed'] = self.changed(self.imdata)
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
path=dict(type='str', required=True, aliases=['uri']),
method=dict(type='str', default='get', choices=['delete', 'get', 'post'], aliases=['action']),
src=dict(type='path', aliases=['config_file']),
content=dict(type='raw'),
)
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive=[['content', 'src']],
)
content = module.params['content']
path = module.params['path']
src = module.params['src']
# Report missing file
file_exists = False
if src:
if os.path.isfile(src):
file_exists = True
else:
module.fail_json(msg="Cannot find/access src '%s'" % src)
# Find request type
if path.find('.xml') != -1:
rest_type = 'xml'
if not HAS_LXML_ETREE:
module.fail_json(msg='The lxml python library is missing, or lacks etree support.')
if not HAS_XMLJSON_COBRA:
module.fail_json(msg='The xmljson python library is missing, or lacks cobra support.')
elif path.find('.json') != -1:
rest_type = 'json'
else:
module.fail_json(msg='Failed to find REST API payload type (neither .xml nor .json).')
aci = ACIRESTModule(module)
aci.result['status'] = -1 # Ensure we always return a status
# We include the payload as it may be templated
payload = content
if file_exists:
with open(src, 'r') as config_object:
# TODO: Would be nice to template this, requires action-plugin
payload = config_object.read()
# Validate payload
if rest_type == 'json':
if content and isinstance(content, dict):
# Validate inline YAML/JSON
payload = json.dumps(payload)
elif payload and isinstance(payload, str) and HAS_YAML:
try:
# Validate YAML/JSON string
payload = json.dumps(yaml.safe_load(payload))
except Exception as e:
module.fail_json(msg='Failed to parse provided JSON/YAML payload: %s' % to_text(e), exception=to_text(e), payload=payload)
elif rest_type == 'xml' and HAS_LXML_ETREE:
if content and isinstance(content, dict) and HAS_XMLJSON_COBRA:
# Validate inline YAML/JSON
# FIXME: Converting from a dictionary to XML is unsupported at this time
# payload = etree.tostring(payload)
pass
elif payload and isinstance(payload, str):
try:
# Validate XML string
payload = lxml.etree.tostring(lxml.etree.fromstring(payload))
except Exception as e:
module.fail_json(msg='Failed to parse provided XML payload: %s' % to_text(e), payload=payload)
# Perform actual request using auth cookie (Same as aci.request(), but also supports XML)
if 'port' in aci.params and aci.params['port'] is not None:
aci.url = '%(protocol)s://%(host)s:%(port)s/' % aci.params + path.lstrip('/')
else:
aci.url = '%(protocol)s://%(host)s/' % aci.params + path.lstrip('/')
if aci.params['method'] != 'get':
path += '?rsp-subtree=modified'
aci.url = update_qsl(aci.url, {'rsp-subtree': 'modified'})
# Sign and encode request as to APIC's wishes
if aci.params['private_key'] is not None:
aci.cert_auth(path=path, payload=payload)
aci.method = aci.params['method'].upper()
# Perform request
resp, info = fetch_url(module, aci.url,
data=payload,
headers=aci.headers,
method=aci.method,
timeout=aci.params['timeout'],
use_proxy=aci.params['use_proxy'])
aci.response = info['msg']
aci.status = info['status']
# Report failure
if info['status'] != 200:
try:
# APIC error
aci.response_type(info['body'], rest_type)
aci.fail_json(msg='APIC Error %(code)s: %(text)s' % aci.error)
except KeyError:
# Connection error
aci.fail_json(msg='Connection failed for %(url)s. %(msg)s' % info)
aci.response_type(resp.read(), rest_type)
aci.result['imdata'] = aci.imdata
aci.result['totalCount'] = aci.totalCount
# Report success
aci.exit_json(**aci.result)
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
/*
* Copyright (C) 2009 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.cache;
import static com.google.common.cache.ReflectionFreeAssertThrows.assertThrows;
import static com.google.common.cache.TestingCacheLoaders.constantLoader;
import static com.google.common.cache.TestingCacheLoaders.identityLoader;
import static com.google.common.cache.TestingRemovalListeners.countingRemovalListener;
import static com.google.common.cache.TestingRemovalListeners.nullRemovalListener;
import static com.google.common.cache.TestingRemovalListeners.queuingRemovalListener;
import static com.google.common.cache.TestingWeighers.constantWeigher;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static java.util.concurrent.Executors.newFixedThreadPool;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.base.Ticker;
import com.google.common.cache.TestingRemovalListeners.CountingRemovalListener;
import com.google.common.cache.TestingRemovalListeners.QueuingRemovalListener;
import com.google.common.collect.Sets;
import com.google.common.testing.NullPointerTester;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.time.Duration;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import junit.framework.TestCase;
import org.jspecify.annotations.NullUnmarked;
/** Unit tests for CacheBuilder. */
@GwtCompatible
// We are intentionally testing the TimeUnit overloads, too.
@SuppressWarnings("LongTimeUnit_ExpireAfterWrite_Seconds")
@NullUnmarked
public class CacheBuilderTest extends TestCase {
public void testNewBuilder() {
CacheLoader<Object, Integer> loader = constantLoader(1);
LoadingCache<String, Integer> cache =
CacheBuilder.newBuilder().removalListener(countingRemovalListener()).build(loader);
assertThat(cache.getUnchecked("one")).isEqualTo(1);
assertThat(cache.size()).isEqualTo(1);
}
public void testInitialCapacity_negative() {
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder();
assertThrows(IllegalArgumentException.class, () -> builder.initialCapacity(-1));
}
public void testInitialCapacity_setTwice() {
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder().initialCapacity(16);
assertThrows(IllegalStateException.class, () -> builder.initialCapacity(16));
}
@GwtIncompatible // CacheTesting
public void testInitialCapacity_small() {
LoadingCache<?, ?> cache = CacheBuilder.newBuilder().initialCapacity(5).build(identityLoader());
LocalCache<?, ?> map = CacheTesting.toLocalCache(cache);
assertThat(map.segments).hasLength(4);
assertThat(map.segments[0].table.length()).isEqualTo(2);
assertThat(map.segments[1].table.length()).isEqualTo(2);
assertThat(map.segments[2].table.length()).isEqualTo(2);
assertThat(map.segments[3].table.length()).isEqualTo(2);
}
@GwtIncompatible // CacheTesting
public void testInitialCapacity_smallest() {
LoadingCache<?, ?> cache = CacheBuilder.newBuilder().initialCapacity(0).build(identityLoader());
LocalCache<?, ?> map = CacheTesting.toLocalCache(cache);
assertThat(map.segments).hasLength(4);
// 1 is as low as it goes, not 0. it feels dirty to know this/test this.
assertThat(map.segments[0].table.length()).isEqualTo(1);
assertThat(map.segments[1].table.length()).isEqualTo(1);
assertThat(map.segments[2].table.length()).isEqualTo(1);
assertThat(map.segments[3].table.length()).isEqualTo(1);
}
public void testInitialCapacity_large() {
CacheBuilder.newBuilder().initialCapacity(Integer.MAX_VALUE);
// that the builder didn't blow up is enough;
// don't actually create this monster!
}
public void testConcurrencyLevel_zero() {
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder();
assertThrows(IllegalArgumentException.class, () -> builder.concurrencyLevel(0));
}
public void testConcurrencyLevel_setTwice() {
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder().concurrencyLevel(16);
assertThrows(IllegalStateException.class, () -> builder.concurrencyLevel(16));
}
@GwtIncompatible // CacheTesting
public void testConcurrencyLevel_small() {
LoadingCache<?, ?> cache =
CacheBuilder.newBuilder().concurrencyLevel(1).build(identityLoader());
LocalCache<?, ?> map = CacheTesting.toLocalCache(cache);
assertThat(map.segments).hasLength(1);
}
public void testConcurrencyLevel_large() {
CacheBuilder.newBuilder().concurrencyLevel(Integer.MAX_VALUE);
// don't actually build this beast
}
public void testMaximumSize_negative() {
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder();
assertThrows(IllegalArgumentException.class, () -> builder.maximumSize(-1));
}
public void testMaximumSize_setTwice() {
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder().maximumSize(16);
assertThrows(IllegalStateException.class, () -> builder.maximumSize(16));
}
@GwtIncompatible // maximumWeight
public void testMaximumSize_andWeight() {
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder().maximumSize(16);
assertThrows(IllegalStateException.class, () -> builder.maximumWeight(16));
}
@GwtIncompatible // digs into internals of the non-GWT implementation
public void testMaximumSize_largerThanInt() {
CacheBuilder<Object, Object> builder =
CacheBuilder.newBuilder().initialCapacity(512).maximumSize(Long.MAX_VALUE);
LocalCache<?, ?> cache = ((LocalCache.LocalManualCache<?, ?>) builder.build()).localCache;
assertThat(cache.segments.length * cache.segments[0].table.length()).isEqualTo(512);
}
@GwtIncompatible // maximumWeight
public void testMaximumWeight_negative() {
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder();
assertThrows(IllegalArgumentException.class, () -> builder.maximumWeight(-1));
}
@GwtIncompatible // maximumWeight
public void testMaximumWeight_setTwice() {
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder().maximumWeight(16);
assertThrows(IllegalStateException.class, () -> builder.maximumWeight(16));
assertThrows(IllegalStateException.class, () -> builder.maximumSize(16));
}
@GwtIncompatible // maximumWeight
public void testMaximumWeight_withoutWeigher() {
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder().maximumWeight(1);
assertThrows(IllegalStateException.class, () -> builder.build(identityLoader()));
}
@GwtIncompatible // weigher
public void testWeigher_withoutMaximumWeight() {
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder().weigher(constantWeigher(42));
assertThrows(IllegalStateException.class, () -> builder.build(identityLoader()));
}
@GwtIncompatible // weigher
public void testWeigher_withMaximumSize() {
assertThrows(
IllegalStateException.class,
() -> CacheBuilder.newBuilder().weigher(constantWeigher(42)).maximumSize(1));
assertThrows(
IllegalStateException.class,
() -> CacheBuilder.newBuilder().maximumSize(1).weigher(constantWeigher(42)));
}
@GwtIncompatible // weakKeys
public void testKeyStrengthSetTwice() {
CacheBuilder<Object, Object> builder1 = CacheBuilder.newBuilder().weakKeys();
assertThrows(IllegalStateException.class, () -> builder1.weakKeys());
}
@GwtIncompatible // weakValues
public void testValueStrengthSetTwice() {
CacheBuilder<Object, Object> builder1 = CacheBuilder.newBuilder().weakValues();
assertThrows(IllegalStateException.class, () -> builder1.weakValues());
assertThrows(IllegalStateException.class, () -> builder1.softValues());
CacheBuilder<Object, Object> builder2 = CacheBuilder.newBuilder().softValues();
assertThrows(IllegalStateException.class, () -> builder2.softValues());
assertThrows(IllegalStateException.class, () -> builder2.weakValues());
}
@GwtIncompatible // Duration
@IgnoreJRERequirement // No more dangerous than wherever the caller got the Duration from
public void testLargeDurationsAreOk() {
Duration threeHundredYears = Duration.ofDays(365 * 300);
CacheBuilder<Object, Object> unused =
CacheBuilder.newBuilder()
.expireAfterWrite(threeHundredYears)
.expireAfterAccess(threeHundredYears)
.refreshAfterWrite(threeHundredYears);
}
public void testTimeToLive_negative() {
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder();
assertThrows(IllegalArgumentException.class, () -> builder.expireAfterWrite(-1, SECONDS));
}
@GwtIncompatible // Duration
public void testTimeToLive_negative_duration() {
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder();
assertThrows(
IllegalArgumentException.class, () -> builder.expireAfterWrite(Duration.ofSeconds(-1)));
}
@SuppressWarnings("ReturnValueIgnored")
public void testTimeToLive_small() {
CacheBuilder.newBuilder().expireAfterWrite(1, NANOSECONDS).build(identityLoader());
// well, it didn't blow up.
}
public void testTimeToLive_setTwice() {
CacheBuilder<Object, Object> builder =
CacheBuilder.newBuilder().expireAfterWrite(3600, SECONDS);
assertThrows(IllegalStateException.class, () -> builder.expireAfterWrite(3600, SECONDS));
}
@GwtIncompatible // Duration
public void testTimeToLive_setTwice_duration() {
CacheBuilder<Object, Object> builder =
CacheBuilder.newBuilder().expireAfterWrite(Duration.ofHours(1));
assertThrows(IllegalStateException.class, () -> builder.expireAfterWrite(Duration.ofHours(1)));
}
public void testTimeToIdle_negative() {
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder();
assertThrows(IllegalArgumentException.class, () -> builder.expireAfterAccess(-1, SECONDS));
}
@GwtIncompatible // Duration
public void testTimeToIdle_negative_duration() {
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder();
assertThrows(
IllegalArgumentException.class, () -> builder.expireAfterAccess(Duration.ofSeconds(-1)));
}
@SuppressWarnings("ReturnValueIgnored")
public void testTimeToIdle_small() {
CacheBuilder.newBuilder().expireAfterAccess(1, NANOSECONDS).build(identityLoader());
// well, it didn't blow up.
}
public void testTimeToIdle_setTwice() {
CacheBuilder<Object, Object> builder =
CacheBuilder.newBuilder().expireAfterAccess(3600, SECONDS);
assertThrows(IllegalStateException.class, () -> builder.expireAfterAccess(3600, SECONDS));
}
@GwtIncompatible // Duration
public void testTimeToIdle_setTwice_duration() {
CacheBuilder<Object, Object> builder =
CacheBuilder.newBuilder().expireAfterAccess(Duration.ofHours(1));
assertThrows(IllegalStateException.class, () -> builder.expireAfterAccess(Duration.ofHours(1)));
}
public void testTimeToIdleAndToLive() {
LoadingCache<?, ?> unused =
CacheBuilder.newBuilder()
.expireAfterWrite(1, NANOSECONDS)
.expireAfterAccess(1, NANOSECONDS)
.build(identityLoader());
// well, it didn't blow up.
}
@GwtIncompatible // refreshAfterWrite
public void testRefresh_zero() {
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder();
assertThrows(IllegalArgumentException.class, () -> builder.refreshAfterWrite(0, SECONDS));
}
@GwtIncompatible // Duration
public void testRefresh_zero_duration() {
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder();
assertThrows(IllegalArgumentException.class, () -> builder.refreshAfterWrite(Duration.ZERO));
}
@GwtIncompatible // refreshAfterWrite
public void testRefresh_setTwice() {
CacheBuilder<Object, Object> builder =
CacheBuilder.newBuilder().refreshAfterWrite(3600, SECONDS);
assertThrows(IllegalStateException.class, () -> builder.refreshAfterWrite(3600, SECONDS));
}
@GwtIncompatible // Duration
public void testRefresh_setTwice_duration() {
CacheBuilder<Object, Object> builder =
CacheBuilder.newBuilder().refreshAfterWrite(Duration.ofHours(1));
assertThrows(IllegalStateException.class, () -> builder.refreshAfterWrite(Duration.ofHours(1)));
}
public void testTicker_setTwice() {
Ticker testTicker = Ticker.systemTicker();
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder().ticker(testTicker);
assertThrows(IllegalStateException.class, () -> builder.ticker(testTicker));
}
public void testRemovalListener_setTwice() {
RemovalListener<Object, Object> testListener = nullRemovalListener();
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder().removalListener(testListener);
assertThrows(IllegalStateException.class, () -> builder.removalListener(testListener));
}
public void testValuesIsNotASet() {
assertThat(CacheBuilder.newBuilder().build().asMap().values() instanceof Set).isFalse();
}
@GwtIncompatible // CacheTesting
public void testNullCache() {
CountingRemovalListener<Object, Object> listener = countingRemovalListener();
LoadingCache<Object, Object> nullCache =
CacheBuilder.newBuilder().maximumSize(0).removalListener(listener).build(identityLoader());
assertThat(nullCache.size()).isEqualTo(0);
Object key = new Object();
assertThat(nullCache.getUnchecked(key)).isSameInstanceAs(key);
assertThat(listener.getCount()).isEqualTo(1);
assertThat(nullCache.size()).isEqualTo(0);
CacheTesting.checkEmpty(nullCache.asMap());
}
@GwtIncompatible // QueuingRemovalListener
public void testRemovalNotification_clear() throws InterruptedException {
// If a clear() happens while a computation is pending, we should not get a removal
// notification.
AtomicBoolean shouldWait = new AtomicBoolean(false);
CountDownLatch computingLatch = new CountDownLatch(1);
CacheLoader<String, String> computingFunction =
new CacheLoader<String, String>() {
@Override
public String load(String key) throws InterruptedException {
if (shouldWait.get()) {
computingLatch.await();
}
return key;
}
};
QueuingRemovalListener<String, String> listener = queuingRemovalListener();
LoadingCache<String, String> cache =
CacheBuilder.newBuilder()
.concurrencyLevel(1)
.removalListener(listener)
.build(computingFunction);
// seed the map, so its segment's count > 0
cache.getUnchecked("a");
shouldWait.set(true);
CountDownLatch computationStarted = new CountDownLatch(1);
CountDownLatch computationComplete = new CountDownLatch(1);
new Thread(
new Runnable() {
@Override
public void run() {
computationStarted.countDown();
cache.getUnchecked("b");
computationComplete.countDown();
}
})
.start();
// wait for the computingEntry to be created
computationStarted.await();
cache.invalidateAll();
// let the computation proceed
computingLatch.countDown();
// don't check cache.size() until we know the get("b") call is complete
computationComplete.await();
// At this point, the listener should be holding the seed value (a -> a), and the map should
// contain the computed value (b -> b), since the clear() happened before the computation
// completed.
assertThat(listener).hasSize(1);
RemovalNotification<String, String> notification = listener.remove();
assertThat(notification.getKey()).isEqualTo("a");
assertThat(notification.getValue()).isEqualTo("a");
assertThat(cache.size()).isEqualTo(1);
assertThat(cache.getUnchecked("b")).isEqualTo("b");
}
// "Basher tests", where we throw a bunch of stuff at a LoadingCache and check basic invariants.
/**
* This is a less carefully-controlled version of {@link #testRemovalNotification_clear} - this is
* a black-box test that tries to create lots of different thread-interleavings, and asserts that
* each computation is affected by a call to {@code clear()} (and therefore gets passed to the
* removal listener), or else is not affected by the {@code clear()} (and therefore exists in the
* cache afterward).
*/
@GwtIncompatible // QueuingRemovalListener
@SuppressWarnings("ThreadPriorityCheck") // TODO: b/175898629 - Consider onSpinWait.
public void testRemovalNotification_clear_basher() throws InterruptedException {
// If a clear() happens close to the end of computation, one of two things should happen:
// - computation ends first: the removal listener is called, and the cache does not contain the
// key/value pair
// - clear() happens first: the removal listener is not called, and the cache contains the pair
AtomicBoolean computationShouldWait = new AtomicBoolean();
CountDownLatch computationLatch = new CountDownLatch(1);
QueuingRemovalListener<String, String> listener = queuingRemovalListener();
LoadingCache<String, String> cache =
CacheBuilder.newBuilder()
.removalListener(listener)
.concurrencyLevel(20)
.build(new DelayingIdentityLoader<String>(computationShouldWait, computationLatch));
int nThreads = 100;
int nTasks = 1000;
int nSeededEntries = 100;
Set<String> expectedKeys = Sets.newHashSetWithExpectedSize(nTasks + nSeededEntries);
// seed the map, so its segments have a count>0; otherwise, clear() won't visit the in-progress
// entries
for (int i = 0; i < nSeededEntries; i++) {
String s = "b" + i;
cache.getUnchecked(s);
expectedKeys.add(s);
}
computationShouldWait.set(true);
AtomicInteger computedCount = new AtomicInteger();
ExecutorService threadPool = newFixedThreadPool(nThreads);
CountDownLatch tasksFinished = new CountDownLatch(nTasks);
for (int i = 0; i < nTasks; i++) {
String s = "a" + i;
@SuppressWarnings("unused") // https://errorprone.info/bugpattern/FutureReturnValueIgnored
Future<?> possiblyIgnoredError =
threadPool.submit(
new Runnable() {
@Override
public void run() {
cache.getUnchecked(s);
computedCount.incrementAndGet();
tasksFinished.countDown();
}
});
expectedKeys.add(s);
}
computationLatch.countDown();
// let some computations complete
while (computedCount.get() < nThreads) {
Thread.yield();
}
cache.invalidateAll();
tasksFinished.await();
// Check all of the removal notifications we received: they should have had correctly-associated
// keys and values. (An earlier bug saw removal notifications for in-progress computations,
// which had real keys with null values.)
Map<String, String> removalNotifications = new HashMap<>();
for (RemovalNotification<String, String> notification : listener) {
removalNotifications.put(notification.getKey(), notification.getValue());
assertWithMessage("Unexpected key/value pair passed to removalListener")
.that(notification.getValue())
.isEqualTo(notification.getKey());
}
// All of the seed values should have been visible, so we should have gotten removal
// notifications for all of them.
for (int i = 0; i < nSeededEntries; i++) {
assertThat(removalNotifications.get("b" + i)).isEqualTo("b" + i);
}
// Each of the values added to the map should either still be there, or have seen a removal
// notification.
assertThat(Sets.union(cache.asMap().keySet(), removalNotifications.keySet()))
.isEqualTo(expectedKeys);
assertThat(cache.asMap().keySet()).containsNoneIn(removalNotifications.keySet());
threadPool.shutdown();
threadPool.awaitTermination(300, SECONDS);
}
/**
* Calls get() repeatedly from many different threads, and tests that all of the removed entries
* (removed because of size limits or expiration) trigger appropriate removal notifications.
*/
@GwtIncompatible // QueuingRemovalListener
public void testRemovalNotification_get_basher() throws InterruptedException {
int nTasks = 1000;
int nThreads = 100;
int getsPerTask = 1000;
int nUniqueKeys = 10000;
Random random = new Random(); // Randoms.insecureRandom();
QueuingRemovalListener<String, String> removalListener = queuingRemovalListener();
AtomicInteger computeCount = new AtomicInteger();
AtomicInteger exceptionCount = new AtomicInteger();
AtomicInteger computeNullCount = new AtomicInteger();
@SuppressWarnings("CacheLoaderNull") // test of handling of erroneous implementation
CacheLoader<String, String> countingIdentityLoader =
new CacheLoader<String, String>() {
@Override
public String load(String key) throws InterruptedException {
int behavior = random.nextInt(4);
if (behavior == 0) { // throw an exception
exceptionCount.incrementAndGet();
throw new RuntimeException("fake exception for test");
} else if (behavior == 1) { // return null
computeNullCount.incrementAndGet();
return null;
} else if (behavior == 2) { // slight delay before returning
Thread.sleep(5);
computeCount.incrementAndGet();
return key;
} else {
computeCount.incrementAndGet();
return key;
}
}
};
LoadingCache<String, String> cache =
CacheBuilder.newBuilder()
.recordStats()
.concurrencyLevel(2)
.expireAfterWrite(100, MILLISECONDS)
.removalListener(removalListener)
.maximumSize(5000)
.build(countingIdentityLoader);
ExecutorService threadPool = newFixedThreadPool(nThreads);
for (int i = 0; i < nTasks; i++) {
@SuppressWarnings("unused") // https://errorprone.info/bugpattern/FutureReturnValueIgnored
Future<?> possiblyIgnoredError =
threadPool.submit(
new Runnable() {
@Override
public void run() {
for (int j = 0; j < getsPerTask; j++) {
try {
cache.getUnchecked("key" + random.nextInt(nUniqueKeys));
} catch (RuntimeException e) {
}
}
}
});
}
threadPool.shutdown();
threadPool.awaitTermination(300, SECONDS);
// Since we're not doing any more cache operations, and the cache only expires/evicts when doing
// other operations, the cache and the removal queue won't change from this point on.
// Verify that each received removal notification was valid
for (RemovalNotification<String, String> notification : removalListener) {
assertWithMessage("Invalid removal notification")
.that(notification.getValue())
.isEqualTo(notification.getKey());
}
CacheStats stats = cache.stats();
assertThat(stats.evictionCount()).isEqualTo(removalListener.size());
assertThat(stats.loadSuccessCount()).isEqualTo(computeCount.get());
assertThat(stats.loadExceptionCount()).isEqualTo(exceptionCount.get() + computeNullCount.get());
// each computed value is still in the cache, or was passed to the removal listener
assertThat(cache.size() + removalListener.size()).isEqualTo(computeCount.get());
}
@GwtIncompatible // NullPointerTester
public void testNullParameters() throws Exception {
NullPointerTester tester = new NullPointerTester();
CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder();
tester.testAllPublicInstanceMethods(builder);
}
@GwtIncompatible // CacheTesting
public void testSizingDefaults() {
LoadingCache<?, ?> cache = CacheBuilder.newBuilder().build(identityLoader());
LocalCache<?, ?> map = CacheTesting.toLocalCache(cache);
assertThat(map.segments).hasLength(4); // concurrency level
assertThat(map.segments[0].table.length()).isEqualTo(4); // capacity / conc level
}
@GwtIncompatible // CountDownLatch
static final class DelayingIdentityLoader<T> extends CacheLoader<T, T> {
private final AtomicBoolean shouldWait;
private final CountDownLatch delayLatch;
DelayingIdentityLoader(AtomicBoolean shouldWait, CountDownLatch delayLatch) {
this.shouldWait = shouldWait;
this.delayLatch = delayLatch;
}
@CanIgnoreReturnValue // Sure, why not?
@Override
public T load(T key) throws InterruptedException {
if (shouldWait.get()) {
delayLatch.await();
}
return key;
}
}
} | java | github | https://github.com/google/guava | android/guava-tests/test/com/google/common/cache/CacheBuilderTest.java |
#!/usr/bin/env python2.5
#
# Copyright 2009 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GCITaskSubscription (Model) query functions.
"""
__authors__ = [
'"Madhusudan.C.S" <madhusudancs@gmail.com>'
]
from soc.logic.models import base
import soc.models.base
import soc.modules.gci.models.task_subscription
class Logic(base.Logic):
"""Logic methods for the GCITaskSubsciption model.
"""
def __init__(
self,
model=soc.modules.gci.models.task_subscription.GCITaskSubscription,
base_model=soc.models.base.ModelWithFieldAttributes, id_based=True):
"""Defines the name, key_name and model for this entity.
"""
super(Logic, self).__init__(model, base_model=base_model,
id_based=id_based)
def getOrCreateTaskSubscriptionForTask(self, task_entity):
"""Gets or creates a TaskSubscription entity for the given GCITask.
Args:
task_entity: GCITask entity
Returns:
Existing TaskSubscription entity iff any exists, otherwise a new
TaskSubscription entity.
"""
fields = {'task': task_entity}
task_subscription = self.getForFields(fields, unique=True)
if not task_subscription:
task_subscription = self.updateOrCreateFromFields(fields)
return task_subscription
def subscribeUser(self, task_entity, user_entity, toggle=None):
"""Adds a new subscriber to the subscription depending upon
the previous subscription
Args:
task_entity: GCITask entity
user_entity: User entity
toggle: If True and if the user already exists, removes the user
from subscription, if the user doesn't exist adds the user,
if False just returns the current status
if None adds the user for subscription irrespective of current
subscription status
Returns:
'add' if the user was added, 'remove' if the user was removed and
None if the operation failed.
"""
data = None
entity = self.getOrCreateTaskSubscriptionForTask(task_entity)
if toggle == True:
if user_entity.key() not in entity.subscribers:
entity.subscribers.append(user_entity.key())
data = 'add'
else:
entity.subscribers.remove(user_entity.key())
data = 'remove'
elif toggle == False:
if user_entity.key() not in entity.subscribers:
data = 'add'
elif toggle == None:
if user_entity.key() not in entity.subscribers:
entity.subscribers.append(user_entity.key())
data = 'add'
if entity.put():
return data
else:
return None
logic = Logic() | unknown | codeparrot/codeparrot-clean | ||
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package testing
import (
"sync"
"github.com/zclconf/go-cty/cty"
)
// ResourceStore is a simple data store, that can let the mock provider defined
// in this package store and return interesting values for resources and data
// sources.
type ResourceStore struct {
mutex sync.RWMutex
Resources map[string]cty.Value
}
func NewResourceStore() *ResourceStore {
return &ResourceStore{
Resources: map[string]cty.Value{},
}
}
func (rs *ResourceStore) Get(id string) (cty.Value, bool) {
rs.mutex.RLock()
defer rs.mutex.RUnlock()
value, exists := rs.Resources[id]
return value, exists
}
func (rs *ResourceStore) Set(id string, value cty.Value) {
rs.mutex.Lock()
defer rs.mutex.Unlock()
rs.Resources[id] = value
}
func (rs *ResourceStore) Delete(id string) {
rs.mutex.Lock()
defer rs.mutex.Unlock()
delete(rs.Resources, id)
}
// ResourceStoreBuilder is an implementation of the builder pattern for building
// a ResourceStore with prepopulated values.
type ResourceStoreBuilder struct {
store *ResourceStore
}
func NewResourceStoreBuilder() *ResourceStoreBuilder {
return &ResourceStoreBuilder{
store: NewResourceStore(),
}
}
func (b *ResourceStoreBuilder) AddResource(id string, value cty.Value) *ResourceStoreBuilder {
if b.store == nil {
panic("cannot add resources after calling Build()")
}
b.store.Set(id, value)
return b
}
func (b *ResourceStoreBuilder) Build() *ResourceStore {
if b.store == nil {
panic("cannot call Build() more than once")
}
store := b.store
b.store = nil
return store
} | go | github | https://github.com/hashicorp/terraform | internal/stacks/stackruntime/testing/store.go |
from abc import ABCMeta, abstractmethod, abstractproperty
import importlib
import os
import shlex
import subprocess
import socket
import time
import rpyc
from dpa.app.entity import EntityRegistry
from dpa.env.vars import DpaVars
from dpa.ptask.area import PTaskArea
from dpa.ptask import PTaskError, PTask
from dpa.singleton import Singleton
# -----------------------------------------------------------------------------
class SessionRegistry(Singleton):
# -------------------------------------------------------------------------
def init(self):
self._registry = {}
# -------------------------------------------------------------------------
def current(self):
for registered_cls in self._registry.values():
if registered_cls.current():
return registered_cls()
return None
# -------------------------------------------------------------------------
def register(self, cls):
self._registry[cls.app_name] = cls
# -----------------------------------------------------------------------------
class Session(object):
__metaclass__ = ABCMeta
app_name = None
# -------------------------------------------------------------------------
@classmethod
def current(cls):
return None
# -------------------------------------------------------------------------
def __init__(self):
pass
# -------------------------------------------------------------------------
@abstractmethod
def close(self):
"""Close the current file."""
# -------------------------------------------------------------------------
def list_entities(self, categories=None):
"""List entities in the session."""
entities = []
entity_classes = EntityRegistry().get_entity_classes(
self.__class__.app_name)
for entity_class in entity_classes:
entities.extend(entity_class.list(self))
if categories:
filtered = [e for e in entities if e.category in categories]
else:
filtered = entities
return filtered
# -------------------------------------------------------------------------
@classmethod
def open_file(self, filepath):
"""Open a new session with the supplied file."""
# -------------------------------------------------------------------------
@abstractmethod
def save(self, filepath=None):
"""Save the current session. Save to the file path if provided."""
# -------------------------------------------------------------------------
@abstractproperty
def in_session(self):
"""Returns True if inside a current app session."""
# -------------------------------------------------------------------------
def init_module(self, module_path):
_module = None
if self.in_session:
try:
_module = importlib.import_module(module_path)
except ImportError:
pass # will raise below
if not _module:
raise SessionError(
"Failed to initialize session. " + \
"'{mod}' module could not be imported.".format(mod=module_path)
)
return _module
# -------------------------------------------------------------------------
def require_executable(self, executable):
"""Returns the full path for the supplied executable name."""
(path, file_name) = os.path.split(executable)
# path already included
if path:
if not os.path.isfile(executable):
raise SessionError("Unable to locate executable: " + executable)
elif not os.access(executable, os.X_OK):
raise SessionError("File is not executable: " + executable)
else:
return executable
else:
bin_paths = DpaVars.path()
bin_paths.get()
for path in bin_paths.list:
executable_path = os.path.join(path, executable)
if (os.path.isfile(executable_path) and
os.access(executable_path, os.X_OK)):
return executable_path
raise SessionError("Unable to locate executable: " + executable)
# -------------------------------------------------------------------------
@property
def app_name(self):
return self.__class__.app_name
# -------------------------------------------------------------------------
@property
def ptask_area(self):
"""Return the current ptask area for this session."""
if not hasattr(self, '_ptask_area'):
self._ptask_area = PTaskArea.current()
return self._ptask_area
# -------------------------------------------------------------------------
@property
def ptask(self):
if not hasattr(self, '_ptask'):
ptask_area = self.ptask_area
if not ptask_area.spec:
self._ptask = None
else:
try:
self._ptask = PTask.get(ptask_area.spec)
except PTaskError as e:
raise SessionError("Unable to determine ptask.")
return self._ptask
# -------------------------------------------------------------------------
@property
def ptask_version(self):
"""Return the current ptask version for this session."""
if not hasattr(self, '_ptask_version'):
ptask = self.ptask
if not ptask:
self._ptask_version = None
else:
self._ptask_version = ptask.latest_version
return self._ptask_version
# -----------------------------------------------------------------------------
class RemoteMixin(object):
__metaclass__ = ABCMeta
# -------------------------------------------------------------------------
def __init__(self, remote=False):
self._remote = remote
# -------------------------------------------------------------------------
def __del__(self):
self.shutdown()
# -------------------------------------------------------------------------
def __enter__(self):
return self
# -------------------------------------------------------------------------
def __exit__(self, exc_type, exc_value, traceback):
self.shutdown()
# -------------------------------------------------------------------------
@staticmethod
def _get_port():
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(("",0))
port = sock.getsockname()[1]
sock.close()
return port
# -------------------------------------------------------------------------
@property
def remote(self):
"""Returns True if in a session, False otherwise."""
return self._remote
# -------------------------------------------------------------------------
@property
def remote_connection(self):
if not hasattr(self, '_remote_connection'):
self._remote_connection = self._connect_remote()
return self._remote_connection
# -------------------------------------------------------------------------
@abstractproperty
def server_executable(self):
"""The executable for starting the remote app server."""
# -------------------------------------------------------------------------
def shutdown(self):
if hasattr(self, '_remote_connection'):
try:
self._remote_connection.root.shutdown()
except EOFError:
# this is the expected error on shutdown
pass
else:
self._remote_connection = None
# -------------------------------------------------------------------------
def init_module(self, module_path):
_module = None
if self.remote:
# need to give time for standalone app to import properly
tries = 0
while not _module or tries < 30:
try:
self.remote_connection.execute("import " + module_path)
_module = getattr(
self.remote_connection.modules, module_path)
break
except ImportError:
tries += 1
time.sleep(1)
if not _module:
self.shutdown()
elif self.in_session:
try:
_module = importlib.import_module(module_path)
except ImportError:
pass # will raise below
if not _module:
raise SessionError(
"Failed to initialize session. " + \
"'{mod}' module could not be imported.".format(mod=module_path)
)
return _module
# -------------------------------------------------------------------------
def _connect_remote(self):
port = self._get_port()
cmd = "{cmd} {port}".format(cmd=self.server_executable, port=port)
args = shlex.split(cmd)
subprocess.Popen(args)
connection = None
tries = 0
while not connection or tries < 30:
try:
connection = rpyc.classic.connect("localhost", port)
break
except socket.error:
tries += 1
time.sleep(1)
if not connection:
raise SessionError("Unable connect to remote session.")
return connection
# -----------------------------------------------------------------------------
class SessionError(Exception):
pass | unknown | codeparrot/codeparrot-clean | ||
from __future__ import unicode_literals
from django.contrib.admin.utils import quote
from django.contrib.auth.models import User
from django.template.response import TemplateResponse
from django.test import TestCase, override_settings
from django.urls import reverse
from .models import Action, Car, Person
@override_settings(ROOT_URLCONF='admin_custom_urls.urls',)
class AdminCustomUrlsTest(TestCase):
"""
Remember that:
* The Action model has a CharField PK.
* The ModelAdmin for Action customizes the add_view URL, it's
'<app name>/<model name>/!add/'
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='super@example.com')
Action.objects.create(name='delete', description='Remove things.')
Action.objects.create(name='rename', description='Gives things other names.')
Action.objects.create(name='add', description='Add things.')
Action.objects.create(name='path/to/file/', description="An action with '/' in its name.")
Action.objects.create(
name='path/to/html/document.html',
description='An action with a name similar to a HTML doc path.'
)
Action.objects.create(
name='javascript:alert(\'Hello world\');">Click here</a>',
description='An action with a name suspected of being a XSS attempt'
)
def setUp(self):
self.client.force_login(self.superuser)
def test_basic_add_GET(self):
"""
Ensure GET on the add_view works.
"""
add_url = reverse('admin_custom_urls:admin_custom_urls_action_add')
self.assertTrue(add_url.endswith('/!add/'))
response = self.client.get(add_url)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_add_with_GET_args(self):
"""
Ensure GET on the add_view plus specifying a field value in the query
string works.
"""
response = self.client.get(reverse('admin_custom_urls:admin_custom_urls_action_add'), {'name': 'My Action'})
self.assertContains(response, 'value="My Action"')
def test_basic_add_POST(self):
"""
Ensure POST on add_view works.
"""
post_data = {
'_popup': '1',
"name": 'Action added through a popup',
"description": "Description of added action",
}
response = self.client.post(reverse('admin_custom_urls:admin_custom_urls_action_add'), post_data)
self.assertContains(response, 'Action added through a popup')
def test_admin_URLs_no_clash(self):
# Should get the change_view for model instance with PK 'add', not show
# the add_view
url = reverse('admin_custom_urls:%s_action_change' % Action._meta.app_label, args=(quote('add'),))
response = self.client.get(url)
self.assertContains(response, 'Change action')
# Should correctly get the change_view for the model instance with the
# funny-looking PK (the one with a 'path/to/html/document.html' value)
url = reverse(
'admin_custom_urls:%s_action_change' % Action._meta.app_label,
args=(quote("path/to/html/document.html"),)
)
response = self.client.get(url)
self.assertContains(response, 'Change action')
self.assertContains(response, 'value="path/to/html/document.html"')
def test_post_save_add_redirect(self):
"""
ModelAdmin.response_post_save_add() controls the redirection after
the 'Save' button has been pressed when adding a new object.
"""
post_data = {'name': 'John Doe'}
self.assertEqual(Person.objects.count(), 0)
response = self.client.post(reverse('admin_custom_urls:admin_custom_urls_person_add'), post_data)
persons = Person.objects.all()
self.assertEqual(len(persons), 1)
redirect_url = reverse('admin_custom_urls:admin_custom_urls_person_history', args=[persons[0].pk])
self.assertRedirects(response, redirect_url)
def test_post_save_change_redirect(self):
"""
ModelAdmin.response_post_save_change() controls the redirection after
the 'Save' button has been pressed when editing an existing object.
"""
Person.objects.create(name='John Doe')
self.assertEqual(Person.objects.count(), 1)
person = Person.objects.all()[0]
post_url = reverse('admin_custom_urls:admin_custom_urls_person_change', args=[person.pk])
response = self.client.post(post_url, {'name': 'Jack Doe'})
self.assertRedirects(response, reverse('admin_custom_urls:admin_custom_urls_person_delete', args=[person.pk]))
def test_post_url_continue(self):
"""
The ModelAdmin.response_add()'s parameter `post_url_continue` controls
the redirection after an object has been created.
"""
post_data = {'name': 'SuperFast', '_continue': '1'}
self.assertEqual(Car.objects.count(), 0)
response = self.client.post(reverse('admin_custom_urls:admin_custom_urls_car_add'), post_data)
cars = Car.objects.all()
self.assertEqual(len(cars), 1)
self.assertRedirects(response, reverse('admin_custom_urls:admin_custom_urls_car_history', args=[cars[0].pk])) | unknown | codeparrot/codeparrot-clean | ||
<!---
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-->
# Apache Hadoop 3.3.4 Release Notes
These release notes cover new developer and user-facing incompatibilities, important issues, features, and major improvements.
---
* [HDFS-16453](https://issues.apache.org/jira/browse/HDFS-16453) | *Major* | **Upgrade okhttp from 2.7.5 to 4.9.3**
okhttp has been updated to address CVE-2021-0341
---
* [HADOOP-18237](https://issues.apache.org/jira/browse/HADOOP-18237) | *Major* | **Upgrade Apache Xerces Java to 2.12.2**
Apache Xerces has been updated to 2.12.2 to fix CVE-2022-23437
---
* [HADOOP-18307](https://issues.apache.org/jira/browse/HADOOP-18307) | *Major* | **remove hadoop-cos as a dependency of hadoop-cloud-storage**
We have recently become aware that libraries which include a shaded apache httpclient libraries (hadoop-client-runtime.jar, aws-java-sdk-bundle.jar, gcs-connector-shaded.jar, cos\_api-bundle-5.6.19.jar) all load and use the unshaded resource mozilla/public-suffix-list.txt. If an out of date version of this is found on the classpath first, attempts to negotiate TLS connections may fail with the error "Certificate doesn't match any of the subject alternative names". This release does not declare the hadoop-cos library to be a dependency of the hadoop-cloud-storage POM, so applications depending on that module are no longer exposed to this issue. If an application requires use of the hadoop-cos module, please declare an explicit dependency.
---
* [HADOOP-18332](https://issues.apache.org/jira/browse/HADOOP-18332) | *Major* | **Remove rs-api dependency by downgrading jackson to 2.12.7**
Downgrades Jackson from 2.13.2 to 2.12.7 to fix class conflicts in downstream projects. This version of jackson does contain the fix for CVE-2020-36518.
---
* [HADOOP-18079](https://issues.apache.org/jira/browse/HADOOP-18079) | *Major* | **Upgrade Netty to 4.1.77.Final**
Netty has been updated to address CVE-2019-20444, CVE-2019-20445 and CVE-2022-24823
---
* [HADOOP-18344](https://issues.apache.org/jira/browse/HADOOP-18344) | *Major* | **AWS SDK update to 1.12.262 to address jackson CVE-2018-7489**
The AWS SDK has been updated to 1.12.262 to address jackson CVE-2018-7489 | unknown | github | https://github.com/apache/hadoop | hadoop-common-project/hadoop-common/src/site/markdown/release/3.3.4/RELEASENOTES.3.3.4.md |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Simple MNIST classifier example with JIT XLA and timelines.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
from tensorflow.python.client import timeline
FLAGS = None
def main(_):
# Import data
mnist = input_data.read_data_sets(FLAGS.data_dir)
# Create the model
x = tf.placeholder(tf.float32, [None, 784])
w = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))
y = tf.matmul(x, w) + b
# Define loss and optimizer
y_ = tf.placeholder(tf.int64, [None])
# The raw formulation of cross-entropy,
#
# tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(tf.nn.softmax(y)),
# reduction_indices=[1]))
#
# can be numerically unstable.
#
# So here we use tf.losses.sparse_softmax_cross_entropy on the raw
# logit outputs of 'y', and then average across the batch.
cross_entropy = tf.losses.sparse_softmax_cross_entropy(labels=y_, logits=y)
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
config = tf.ConfigProto()
jit_level = 0
if FLAGS.xla:
# Turns on XLA JIT compilation.
jit_level = tf.OptimizerOptions.ON_1
config.graph_options.optimizer_options.global_jit_level = jit_level
run_metadata = tf.RunMetadata()
sess = tf.Session(config=config)
tf.global_variables_initializer().run(session=sess)
# Train
train_loops = 1000
for i in range(train_loops):
batch_xs, batch_ys = mnist.train.next_batch(100)
# Create a timeline for the last loop and export to json to view with
# chrome://tracing/.
if i == train_loops - 1:
sess.run(train_step,
feed_dict={x: batch_xs,
y_: batch_ys},
options=tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE),
run_metadata=run_metadata)
trace = timeline.Timeline(step_stats=run_metadata.step_stats)
with open('timeline.ctf.json', 'w') as trace_file:
trace_file.write(trace.generate_chrome_trace_format())
else:
sess.run(train_step, feed_dict={x: batch_xs, y_: batch_ys})
# Test trained model
correct_prediction = tf.equal(tf.argmax(y, 1), y_)
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print(sess.run(accuracy,
feed_dict={x: mnist.test.images,
y_: mnist.test.labels}))
sess.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--data_dir',
type=str,
default='/tmp/tensorflow/mnist/input_data',
help='Directory for storing input data')
parser.add_argument(
'--xla', type=bool, default=True, help='Turn xla via JIT on')
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed) | unknown | codeparrot/codeparrot-clean | ||
from flask.globals import session, request
from flask.helpers import flash, url_for
from flask.templating import render_template
from werkzeug.utils import redirect
from edsudoku.server import app
from edsudoku.server.users import User
__author__ = 'Eli Daian <elidaian@gmail.com>'
@app.route('/')
def main_page():
"""
Webserver index page.
:return: The main page.
:rtype: flask.Response
"""
if session.get('logged_in', False):
user = User.get_by_id(session['user'])
else:
user = None
return render_template('main_page.html', user=user)
@app.route('/login', methods=['GET', 'POST'])
def login():
"""
Show the login page and handle login requests.
:return: The login page.
:rtype: flask.Response
"""
if request.method == 'POST':
try:
username = request.form.get('username', None)
password = request.form.get('password', None)
if username is None or password is None:
flash('Invalid data', 'danger')
return redirect(url_for('login'))
user = User.query().filter_by(username=username).first()
if user is None or not user.check_password(password):
flash('Invalid login credentials', 'danger')
else:
flash('You were logged in successfully!', 'success')
session['logged_in'] = True
session['user'] = user.id
if request.args.get('next', None):
return redirect(request.args['next'])
return redirect(url_for('main_page'))
except KeyError:
flash('Missing username or password', 'info')
return render_template('login.html')
@app.route('/logout')
def logout():
"""
Log out and end the current session (if any).
Later redirect to the main page (see :func:`~edsudoku.server.login.main_page`).
:return: A redirection.
:rtype: flask.Response
"""
session.clear()
session['logged_in'] = False
return redirect(url_for('main_page')) | unknown | codeparrot/codeparrot-clean | ||
/*
* Copyright 2014-2024 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license.
*/
package io.ktor.network.tls
public data class OID(public val identifier: String) {
public val asArray: IntArray = identifier.split(".", " ").map { it.trim().toInt() }.toIntArray()
public companion object {
public val OrganizationName: OID = OID("2.5.4.10")
public val OrganizationalUnitName: OID = OID("2.5.4.11")
public val CountryName: OID = OID("2.5.4.6")
public val CommonName: OID = OID("2.5.4.3")
public val SubjectAltName: OID = OID("2.5.29.17")
/**
* CA OID
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.network.tls.OID.Companion.BasicConstraints)
* */
public val BasicConstraints: OID = OID("2.5.29.19")
public val KeyUsage: OID = OID("2.5.29.15")
public val ExtKeyUsage: OID = OID("2.5.29.37")
public val ServerAuth: OID = OID("1.3.6.1.5.5.7.3.1")
public val ClientAuth: OID = OID("1.3.6.1.5.5.7.3.2")
/**
* Encryption OID
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.network.tls.OID.Companion.RSAEncryption)
*/
public val RSAEncryption: OID = OID("1 2 840 113549 1 1 1")
public val ECEncryption: OID = OID("1.2.840.10045.2.1")
/**
* Algorithm OID
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.network.tls.OID.Companion.ECDSAwithSHA384Encryption)
*/
public val ECDSAwithSHA384Encryption: OID = OID("1.2.840.10045.4.3.3")
public val ECDSAwithSHA256Encryption: OID = OID("1.2.840.10045.4.3.2")
public val RSAwithSHA512Encryption: OID = OID("1.2.840.113549.1.1.13")
public val RSAwithSHA384Encryption: OID = OID("1.2.840.113549.1.1.12")
public val RSAwithSHA256Encryption: OID = OID("1.2.840.113549.1.1.11")
public val RSAwithSHA1Encryption: OID = OID("1.2.840.113549.1.1.5")
/**
* EC curves
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.network.tls.OID.Companion.secp256r1)
*/
public val secp256r1: OID = OID("1.2.840.10045.3.1.7")
public fun fromAlgorithm(algorithm: String): OID = when (algorithm) {
"SHA1withRSA" -> RSAwithSHA1Encryption
"SHA384withECDSA" -> ECDSAwithSHA384Encryption
"SHA256withECDSA" -> ECDSAwithSHA256Encryption
"SHA384withRSA" -> RSAwithSHA384Encryption
"SHA256withRSA" -> RSAwithSHA256Encryption
else -> error("Could't find OID for $algorithm")
}
}
}
/**
* Converts the provided [algorithm] name from the standard Signature algorithms into the corresponding
* KeyPairGenerator algorithm name.
*
* See the
* [Signature](https://docs.oracle.com/en/java/javase/17/docs/specs/security/standard-names.html#signature-algorithms)
* and
* [KeyPairGenerator](https://docs.oracle.com/en/java/javase/17/docs/specs/security/standard-names.html#keypairgenerator-algorithms)
* sections in the Java Security Standard Algorithm Names Specification for information about standard algorithm names.
*
* [Report a problem](https://ktor.io/feedback/?fqname=io.ktor.network.tls.keysGenerationAlgorithm)
*/
public fun keysGenerationAlgorithm(algorithm: String): String = when {
algorithm.endsWith("ecdsa", ignoreCase = true) -> "EC"
algorithm.endsWith("dsa", ignoreCase = true) -> "DSA"
algorithm.endsWith("rsa", ignoreCase = true) -> "RSA"
else -> error("Couldn't find KeyPairGenerator algorithm for $algorithm")
} | kotlin | github | https://github.com/ktorio/ktor | ktor-network/ktor-network-tls/common/src/io/ktor/network/tls/OID.kt |
from calaccess_raw.admin.base import BaseAdmin
from calaccess_raw.admin.campaign import (
CvrSoCdAdmin,
Cvr2SoCdAdmin,
CvrCampaignDisclosureCdAdmin,
Cvr2CampaignDisclosureCdAdmin,
RcptCdAdmin,
Cvr3VerificationInfoCdAdmin,
LoanCdAdmin,
S401CdAdmin,
ExpnCdAdmin,
F495P2CdAdmin,
DebtCdAdmin,
S496CdAdmin,
SpltCdAdmin,
S497CdAdmin,
F501502CdAdmin,
S498CdAdmin,
)
from calaccess_raw.admin.lobbying import (
CvrRegistrationCdAdmin,
Cvr2RegistrationCdAdmin,
CvrLobbyDisclosureCdAdmin,
Cvr2LobbyDisclosureCdAdmin,
LobbyAmendmentsCdAdmin,
F690P2CdAdmin,
LattCdAdmin,
LexpCdAdmin,
LccmCdAdmin,
LothCdAdmin,
LempCdAdmin,
LpayCdAdmin,
)
from calaccess_raw.admin.common import (
FilernameCdAdmin,
FilerFilingsCdAdmin,
FilingsCdAdmin,
SmryCdAdmin,
CvrE530CdAdmin,
TextMemoCdAdmin,
)
from calaccess_raw.admin.other import (
AcronymsCdAdmin,
AddressCdAdmin,
BallotMeasuresCdAdmin,
EfsFilingLogCdAdmin,
FilersCdAdmin,
FilerAcronymsCdAdmin,
FilerAddressCdAdmin,
FilerEthicsClassCdAdmin,
FilerInterestsCdAdmin,
FilerLinksCdAdmin,
FilerStatusTypesCdAdmin,
FilerToFilerTypeCdAdmin,
FilerTypesCdAdmin,
FilerXrefCdAdmin,
FilingPeriodCdAdmin,
GroupTypesCdAdmin,
HeaderCdAdmin,
HdrCdAdmin,
ImageLinksCdAdmin,
LegislativeSessionsCdAdmin,
LobbyingChgLogCdAdmin,
LobbyistContributions1CdAdmin,
LobbyistContributions2CdAdmin,
LobbyistContributions3CdAdmin,
LobbyistEmployer1CdAdmin,
LobbyistEmployer2CdAdmin,
LobbyistEmployer3CdAdmin,
LobbyistEmployerFirms1CdAdmin,
LobbyistEmployerFirms2CdAdmin,
LobbyistEmpLobbyist1CdAdmin,
LobbyistEmpLobbyist2CdAdmin,
LobbyistFirm1CdAdmin,
LobbyistFirm2CdAdmin,
LobbyistFirm3CdAdmin,
LobbyistFirmEmployer1CdAdmin,
LobbyistFirmEmployer2CdAdmin,
LobbyistFirmLobbyist1CdAdmin,
LobbyistFirmLobbyist2CdAdmin,
LookupCodeAdmin,
NamesCdAdmin,
ReceivedFilingsCdAdmin,
ReportsCdAdmin,
)
__all__ = [
'BaseAdmin',
'CvrSoCdAdmin',
'Cvr2SoCdAdmin',
'CvrCampaignDisclosureCdAdmin',
'Cvr2CampaignDisclosureCdAdmin',
'RcptCdAdmin',
'Cvr3VerificationInfoCdAdmin',
'LoanCdAdmin',
'S401CdAdmin',
'ExpnCdAdmin',
'F495P2CdAdmin',
'DebtCdAdmin',
'S496CdAdmin',
'SpltCdAdmin',
'S497CdAdmin',
'F501502CdAdmin',
'S498CdAdmin',
'CvrRegistrationCdAdmin',
'Cvr2RegistrationCdAdmin',
'CvrLobbyDisclosureCdAdmin',
'Cvr2LobbyDisclosureCdAdmin',
'LobbyAmendmentsCdAdmin',
'F690P2CdAdmin',
'LattCdAdmin',
'LexpCdAdmin',
'LccmCdAdmin',
'LothCdAdmin',
'LempCdAdmin',
'LpayCdAdmin',
'FilerFilingsCdAdmin',
'FilingsCdAdmin',
'SmryCdAdmin',
'CvrE530CdAdmin',
'TextMemoCdAdmin',
'AcronymsCdAdmin',
'AddressCdAdmin',
'BallotMeasuresCdAdmin',
'EfsFilingLogCdAdmin',
'FilernameCdAdmin',
'FilersCdAdmin',
'FilerAcronymsCdAdmin',
'FilerAddressCdAdmin',
'FilerEthicsClassCdAdmin',
'FilerInterestsCdAdmin',
'FilerLinksCdAdmin',
'FilerStatusTypesCdAdmin',
'FilerToFilerTypeCdAdmin',
'FilerTypesCdAdmin',
'FilerXrefCdAdmin',
'FilingPeriodCdAdmin',
'GroupTypesCdAdmin',
'HeaderCdAdmin',
'HdrCdAdmin',
'ImageLinksCdAdmin',
'LegislativeSessionsCdAdmin',
'LobbyingChgLogCdAdmin',
'LobbyistContributions1CdAdmin',
'LobbyistContributions2CdAdmin',
'LobbyistContributions3CdAdmin',
'LobbyistEmployer1CdAdmin',
'LobbyistEmployer2CdAdmin',
'LobbyistEmployer3CdAdmin',
'LobbyistEmployerFirms1CdAdmin',
'LobbyistEmployerFirms2CdAdmin',
'LobbyistEmpLobbyist1CdAdmin',
'LobbyistEmpLobbyist2CdAdmin',
'LobbyistFirm1CdAdmin',
'LobbyistFirm2CdAdmin',
'LobbyistFirm3CdAdmin',
'LobbyistFirmEmployer1CdAdmin',
'LobbyistFirmEmployer2CdAdmin',
'LobbyistFirmLobbyist1CdAdmin',
'LobbyistFirmLobbyist2CdAdmin',
'LookupCodeAdmin',
'NamesCdAdmin',
'ReceivedFilingsCdAdmin',
'ReportsCdAdmin',
] | unknown | codeparrot/codeparrot-clean | ||
'''
This is a test script for the RNN Encoder-Decoder
'''
from groundhog.datasets import TMIteratorPytables
from groundhog.trainer.SGD_adadelta import SGD
from groundhog.mainLoop import MainLoop
from groundhog.layers import MultiLayer, \
RecurrentLayer, \
SoftmaxLayer, \
LastState, \
DropOp, \
UnaryOp, \
Operator, \
Shift, \
GaussianNoise
from groundhog.layers import last
from groundhog.models import LM_Model
from theano.sandbox.scan import scan
import numpy
import theano
import theano.tensor as TT
import sys
import cPickle as pkl
theano.config.allow_gc = True
def get_data(state):
rng = numpy.random.RandomState(123)
def out_format (x, y, new_format=None):
mx = numpy.minimum(state['seqlen'], max([len(xx) for xx in x[0]]))+1
my = numpy.minimum(state['seqlen'], max([len(xx) for xx in y[0]]))+1
n = state['bs'] # FIXME: may become inefficient later with a large minibatch
X = numpy.zeros((mx, n), dtype='int64')
Y0 = numpy.zeros((my, n), dtype='int64')
Y = numpy.zeros((my, n), dtype='int64')
Xmask = numpy.zeros((mx, n), dtype='float32')
Ymask = numpy.zeros((my, n), dtype='float32')
for idx in xrange(len(x[0])):
if mx < len(x[0][idx]):
if state['randstart']:
stx = numpy.random.randint(0, len(x[0][idx]) - mx)
else:
stx = 0
X[:mx, idx] = x[0][idx][stx:stx+mx]
else:
X[:len(x[0][idx]), idx] = x[0][idx][:mx]
if len(x[0][idx]) < mx:
X[len(x[0][idx]):, idx] = state['null_sym_source']
Xmask[:len(x[0][idx]), idx] = 1.
if len(x[0][idx]) < mx:
Xmask[len(x[0][idx]), idx] = 1.
for idx in xrange(len(y[0])):
Y0[:len(y[0][idx]), idx] = y[0][idx][:my]
if len(y[0][idx]) < my:
Y0[len(y[0][idx]):, idx] = state['null_sym_target']
Ymask[:len(y[0][idx]), idx] = 1.
if len(y[0][idx]) < my:
Ymask[len(y[0][idx]), idx] = 1.
Y = Y0.copy()
null_inputs = numpy.zeros(X.shape[1])
for idx in xrange(X.shape[1]):
if numpy.sum(Xmask[:,idx]) == 0 and numpy.sum(Ymask[:,idx]) == 0:
null_inputs[idx] = 1
if Xmask[-1,idx] and X[-1,idx] != state['null_sym_source']:
null_inputs[idx] = 1
if Ymask[-1,idx] and Y0[-1,idx] != state['null_sym_target']:
null_inputs[idx] = 1
valid_inputs = 1. - null_inputs
X = X[:,valid_inputs.nonzero()[0]]
Y = Y[:,valid_inputs.nonzero()[0]]
Y0 = Y0[:,valid_inputs.nonzero()[0]]
Xmask = Xmask[:,valid_inputs.nonzero()[0]]
Ymask = Ymask[:,valid_inputs.nonzero()[0]]
if len(valid_inputs.nonzero()[0]) <= 0:
return None
if n == 1:
X = X[:,0]
Y = Y[:,0]
Y0 = Y0[:,0]
Xmask = Xmask[:,0]
Ymask = Ymask[:,0]
if new_format:
return new_format(X, Xmask, Y0, Y, Ymask)
else:
return X, Xmask, Y, Ymask
new_format = lambda x,xm, y0, y, ym: {'x' : x, 'x_mask' :xm,
'y': y0, 'y_mask' : ym}
train_data = TMIteratorPytables(
batch_size = int(state['bs']),
target_lfiles = state['target'],
source_lfiles = state['source'],
output_format = lambda *args : out_format(*args,
new_format=new_format),
can_fit = False,
queue_size = 10,
cache_size = 10,
shuffle = True)
valid_data = None
test_data = None
return train_data, valid_data, test_data
rect = 'lambda x:x*(x>0)'
htanh = 'lambda x:x*(x>-1)*(x<1)'
def jobman(state, channel):
# load dataset
state['null_sym_source'] = 15000
state['null_sym_target'] = 15000
state['n_sym_source'] = state['null_sym_source'] + 1
state['n_sym_target'] = state['null_sym_target'] + 1
state['nouts'] = state['n_sym_target']
state['nins'] = state['n_sym_source']
rng = numpy.random.RandomState(state['seed'])
if state['loopIters'] > 0:
train_data, valid_data, test_data = get_data(state)
else:
train_data = None
valid_data = None
test_data = None
########### Training graph #####################
## 1. Inputs
if state['bs'] == 1:
x = TT.lvector('x')
x_mask = TT.vector('x_mask')
y = TT.lvector('y')
y0 = y
y_mask = TT.vector('y_mask')
else:
x = TT.lmatrix('x')
x_mask = TT.matrix('x_mask')
y = TT.lmatrix('y')
y0 = y
y_mask = TT.matrix('y_mask')
# 2. Layers and Operators
bs = state['bs']
embdim = state['dim_mlp']
# Source Sentence
emb = MultiLayer(
rng,
n_in=state['nins'],
n_hids=[state['rank_n_approx']],
activation=[state['rank_n_activ']],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
scale=state['weight_scale'],
name='emb')
emb_words = []
if state['rec_gating']:
gater_words = []
if state['rec_reseting']:
reseter_words = []
for si in xrange(state['encoder_stack']):
emb_words.append(MultiLayer(
rng,
n_in=state['rank_n_approx'],
n_hids=[embdim],
activation=['lambda x:x'],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
scale=state['weight_scale'],
name='emb_words_%d'%si))
if state['rec_gating']:
gater_words.append(MultiLayer(
rng,
n_in=state['rank_n_approx'],
n_hids=[state['dim']],
activation=['lambda x:x'],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
scale=state['weight_scale'],
learn_bias = False,
name='gater_words_%d'%si))
if state['rec_reseting']:
reseter_words.append(MultiLayer(
rng,
n_in=state['rank_n_approx'],
n_hids=[state['dim']],
activation=['lambda x:x'],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
scale=state['weight_scale'],
learn_bias = False,
name='reseter_words_%d'%si))
add_rec_step = []
rec_proj = []
if state['rec_gating']:
rec_proj_gater = []
if state['rec_reseting']:
rec_proj_reseter = []
for si in xrange(state['encoder_stack']):
if si > 0:
rec_proj.append(MultiLayer(
rng,
n_in=state['dim'],
n_hids=[embdim],
activation=['lambda x:x'],
init_fn=state['rec_weight_init_fn'],
weight_noise=state['weight_noise'],
scale=state['rec_weight_scale'],
name='rec_proj_%d'%si))
if state['rec_gating']:
rec_proj_gater.append(MultiLayer(
rng,
n_in=state['dim'],
n_hids=[state['dim']],
activation=['lambda x:x'],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
scale=state['weight_scale'],
learn_bias = False,
name='rec_proj_gater_%d'%si))
if state['rec_reseting']:
rec_proj_reseter.append(MultiLayer(
rng,
n_in=state['dim'],
n_hids=[state['dim']],
activation=['lambda x:x'],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
scale=state['weight_scale'],
learn_bias = False,
name='rec_proj_reseter_%d'%si))
add_rec_step.append(eval(state['rec_layer'])(
rng,
n_hids=state['dim'],
activation = state['activ'],
bias_scale = state['bias'],
scale=state['rec_weight_scale'],
init_fn=state['rec_weight_init_fn'],
weight_noise=state['weight_noise_rec'],
dropout=state['dropout_rec'],
gating=state['rec_gating'],
gater_activation=state['rec_gater'],
reseting=state['rec_reseting'],
reseter_activation=state['rec_reseter'],
name='add_h_%d'%si))
def _add_op(words_embeddings,
words_mask=None,
prev_val=None,
si = 0,
state_below = None,
gater_below = None,
reseter_below = None,
one_step=False,
bs=1,
init_state=None,
use_noise=True):
seqlen = words_embeddings.out.shape[0]//bs
rval = words_embeddings
gater = None
reseter = None
if state['rec_gating']:
gater = gater_below
if state['rec_reseting']:
reseter = reseter_below
if si > 0:
rval += rec_proj[si-1](state_below, one_step=one_step,
use_noise=use_noise)
if state['rec_gating']:
projg = rec_proj_gater[si-1](state_below, one_step=one_step,
use_noise = use_noise)
if gater: gater += projg
else: gater = projg
if state['rec_reseting']:
projg = rec_proj_reseter[si-1](state_below, one_step=one_step,
use_noise = use_noise)
if reseter: reseter += projg
else: reseter = projg
if not one_step:
rval= add_rec_step[si](
rval,
nsteps=seqlen,
batch_size=bs,
mask=words_mask,
gater_below = gater,
reseter_below = reseter,
one_step=one_step,
init_state=init_state,
use_noise = use_noise)
else:
rval= add_rec_step[si](
rval,
mask=words_mask,
state_before=prev_val,
gater_below = gater,
reseter_below = reseter,
one_step=one_step,
init_state=init_state,
use_noise = use_noise)
return rval
add_op = Operator(_add_op)
# Target Sentence
emb_t = MultiLayer(
rng,
n_in=state['nouts'],
n_hids=[state['rank_n_approx']],
activation=[state['rank_n_activ']],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
scale=state['weight_scale'],
name='emb_t')
emb_words_t = []
if state['rec_gating']:
gater_words_t = []
if state['rec_reseting']:
reseter_words_t = []
for si in xrange(state['decoder_stack']):
emb_words_t.append(MultiLayer(
rng,
n_in=state['rank_n_approx'],
n_hids=[embdim],
activation=['lambda x:x'],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
scale=state['weight_scale'],
name='emb_words_t_%d'%si))
if state['rec_gating']:
gater_words_t.append(MultiLayer(
rng,
n_in=state['rank_n_approx'],
n_hids=[state['dim']],
activation=['lambda x:x'],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
scale=state['weight_scale'],
learn_bias=False,
name='gater_words_t_%d'%si))
if state['rec_reseting']:
reseter_words_t.append(MultiLayer(
rng,
n_in=state['rank_n_approx'],
n_hids=[state['dim']],
activation=['lambda x:x'],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
scale=state['weight_scale'],
learn_bias=False,
name='reseter_words_t_%d'%si))
proj_everything_t = []
if state['rec_gating']:
gater_everything_t = []
if state['rec_reseting']:
reseter_everything_t = []
for si in xrange(state['decoder_stack']):
proj_everything_t.append(MultiLayer(
rng,
n_in=state['dim'],
n_hids=[embdim],
activation=['lambda x:x'],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
scale=state['weight_scale'],
name='proj_everything_t_%d'%si,
learn_bias = False))
if state['rec_gating']:
gater_everything_t.append(MultiLayer(
rng,
n_in=state['dim'],
n_hids=[state['dim']],
activation=['lambda x:x'],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
scale=state['weight_scale'],
name='gater_everything_t_%d'%si,
learn_bias = False))
if state['rec_reseting']:
reseter_everything_t.append(MultiLayer(
rng,
n_in=state['dim'],
n_hids=[state['dim']],
activation=['lambda x:x'],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
scale=state['weight_scale'],
name='reseter_everything_t_%d'%si,
learn_bias = False))
add_rec_step_t = []
rec_proj_t = []
if state['rec_gating']:
rec_proj_t_gater = []
if state['rec_reseting']:
rec_proj_t_reseter = []
for si in xrange(state['decoder_stack']):
if si > 0:
rec_proj_t.append(MultiLayer(
rng,
n_in=state['dim'],
n_hids=[embdim],
activation=['lambda x:x'],
init_fn=state['rec_weight_init_fn'],
weight_noise=state['weight_noise'],
scale=state['rec_weight_scale'],
name='rec_proj_%d'%si))
if state['rec_gating']:
rec_proj_t_gater.append(MultiLayer(
rng,
n_in=state['dim'],
n_hids=[state['dim']],
activation=['lambda x:x'],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
scale=state['weight_scale'],
learn_bias=False,
name='rec_proj_t_gater_%d'%si))
if state['rec_reseting']:
rec_proj_t_reseter.append(MultiLayer(
rng,
n_in=state['dim'],
n_hids=[state['dim']],
activation=['lambda x:x'],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
scale=state['weight_scale'],
learn_bias=False,
name='rec_proj_t_reseter_%d'%si))
add_rec_step_t.append(eval(state['rec_layer'])(
rng,
n_hids=state['dim'],
activation = state['activ'],
bias_scale = state['bias'],
scale=state['rec_weight_scale'],
init_fn=state['rec_weight_init_fn'],
weight_noise=state['weight_noise_rec'],
dropout=state['dropout_rec'],
gating=state['rec_gating'],
gater_activation=state['rec_gater'],
reseting=state['rec_reseting'],
reseter_activation=state['rec_reseter'],
name='add_h_t_%d'%si))
if state['encoder_stack'] > 1:
encoder_proj = []
for si in xrange(state['encoder_stack']):
encoder_proj.append(MultiLayer(
rng,
n_in=state['dim'],
n_hids=[state['dim'] * state['maxout_part']],
activation=['lambda x: x'],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
scale=state['weight_scale'],
name='encoder_proj_%d'%si,
learn_bias = (si == 0)))
encoder_act_layer = UnaryOp(activation=eval(state['unary_activ']),
indim = indim, pieces = pieces, rng=rng)
def _add_t_op(words_embeddings, everything = None, words_mask=None,
prev_val=None,one_step=False, bs=1,
init_state=None, use_noise=True,
gater_below = None,
reseter_below = None,
si = 0, state_below = None):
seqlen = words_embeddings.out.shape[0]//bs
rval = words_embeddings
gater = None
if state['rec_gating']:
gater = gater_below
reseter = None
if state['rec_reseting']:
reseter = reseter_below
if si > 0:
if isinstance(state_below, list):
state_below = state_below[-1]
rval += rec_proj_t[si-1](state_below,
one_step=one_step, use_noise=use_noise)
if state['rec_gating']:
projg = rec_proj_t_gater[si-1](state_below, one_step=one_step,
use_noise = use_noise)
if gater: gater += projg
else: gater = projg
if state['rec_reseting']:
projg = rec_proj_t_reseter[si-1](state_below, one_step=one_step,
use_noise = use_noise)
if reseter: reseter += projg
else: reseter = projg
if everything:
rval = rval + proj_everything_t[si](everything)
if state['rec_gating']:
everyg = gater_everything_t[si](everything, one_step=one_step, use_noise=use_noise)
if gater: gater += everyg
else: gater = everyg
if state['rec_reseting']:
everyg = reseter_everything_t[si](everything, one_step=one_step, use_noise=use_noise)
if reseter: reseter += everyg
else: reseter = everyg
if not one_step:
rval = add_rec_step_t[si](
rval,
nsteps=seqlen,
batch_size=bs,
mask=words_mask,
one_step=one_step,
init_state=init_state,
gater_below = gater,
reseter_below = reseter,
use_noise = use_noise)
else:
rval = add_rec_step_t[si](
rval,
mask=words_mask,
state_before=prev_val,
one_step=one_step,
gater_below = gater,
reseter_below = reseter,
use_noise = use_noise)
return rval
add_t_op = Operator(_add_t_op)
outdim = state['dim_mlp']
if not state['deep_out']:
outdim = state['rank_n_approx']
if state['bias_code']:
bias_code = []
for si in xrange(state['decoder_stack']):
bias_code.append(MultiLayer(
rng,
n_in=state['dim'],
n_hids=[state['dim']],
activation = [state['activ']],
bias_scale = [state['bias']],
scale=state['weight_scale'],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
name='bias_code_%d'%si))
if state['avg_word']:
word_code_nin = state['rank_n_approx']
word_code = MultiLayer(
rng,
n_in=word_code_nin,
n_hids=[outdim],
activation = 'lambda x:x',
bias_scale = [state['bias_mlp']/3],
scale=state['weight_scale'],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
learn_bias = False,
name='word_code')
proj_code = MultiLayer(
rng,
n_in=state['dim'],
n_hids=[outdim],
activation = 'lambda x: x',
bias_scale = [state['bias_mlp']/3],
scale=state['weight_scale'],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
learn_bias = False,
name='proj_code')
proj_h = []
for si in xrange(state['decoder_stack']):
proj_h.append(MultiLayer(
rng,
n_in=state['dim'],
n_hids=[outdim],
activation = 'lambda x: x',
bias_scale = [state['bias_mlp']/3],
scale=state['weight_scale'],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
name='proj_h_%d'%si))
if state['bigram']:
proj_word = MultiLayer(
rng,
n_in=state['rank_n_approx'],
n_hids=[outdim],
activation=['lambda x:x'],
bias_scale = [state['bias_mlp']/3],
init_fn=state['weight_init_fn'],
weight_noise=state['weight_noise'],
scale=state['weight_scale'],
learn_bias = False,
name='emb_words_lm')
if state['deep_out']:
indim = 0
pieces = 0
act_layer = UnaryOp(activation=eval(state['unary_activ']))
drop_layer = DropOp(rng=rng, dropout=state['dropout'])
if state['deep_out']:
indim = state['dim_mlp'] / state['maxout_part']
rank_n_approx = state['rank_n_approx']
rank_n_activ = state['rank_n_activ']
else:
indim = state['rank_n_approx']
rank_n_approx = 0
rank_n_activ = None
output_layer = SoftmaxLayer(
rng,
indim,
state['nouts'],
state['weight_scale'],
-1,
rank_n_approx = rank_n_approx,
rank_n_activ = rank_n_activ,
weight_noise=state['weight_noise'],
init_fn=state['weight_init_fn'],
name='out')
def _pop_op(everything, accum, everything_max = None,
everything_min = None, word = None, aword = None,
one_step=False, use_noise=True):
rval = proj_h[0](accum[0], one_step=one_step, use_noise=use_noise)
for si in xrange(1,state['decoder_stack']):
rval += proj_h[si](accum[si], one_step=one_step, use_noise=use_noise)
if state['mult_out']:
rval = rval * everything
else:
rval = rval + everything
if aword and state['avg_word']:
wcode = aword
if one_step:
if state['mult_out']:
rval = rval * wcode
else:
rval = rval + wcode
else:
if not isinstance(wcode, TT.TensorVariable):
wcode = wcode.out
shape = wcode.shape
rshape = rval.shape
rval = rval.reshape([rshape[0]/shape[0], shape[0], rshape[1]])
wcode = wcode.dimshuffle('x', 0, 1)
if state['mult_out']:
rval = rval * wcode
else:
rval = rval + wcode
rval = rval.reshape(rshape)
if word and state['bigram']:
if one_step:
if state['mult_out']:
rval *= proj_word(emb_t(word, use_noise=use_noise),
one_step=one_step, use_noise=use_noise)
else:
rval += proj_word(emb_t(word, use_noise=use_noise),
one_step=one_step, use_noise=use_noise)
else:
if isinstance(word, TT.TensorVariable):
shape = word.shape
ndim = word.ndim
else:
shape = word.shape
ndim = word.out.ndim
pword = proj_word(emb_t(word, use_noise=use_noise),
one_step=one_step, use_noise=use_noise)
shape_pword = pword.shape
if ndim == 1:
pword = Shift()(pword.reshape([shape[0], 1, outdim]))
else:
pword = Shift()(pword.reshape([shape[0], shape[1], outdim]))
if state['mult_out']:
rval *= pword.reshape(shape_pword)
else:
rval += pword.reshape(shape_pword)
if state['deep_out']:
rval = drop_layer(act_layer(rval), use_noise=use_noise)
return rval
pop_op = Operator(_pop_op)
# 3. Constructing the model
gater_below = None
if state['rec_gating']:
gater_below = gater_words[0](emb(x))
reseter_below = None
if state['rec_reseting']:
reseter_below = reseter_words[0](emb(x))
encoder_acts = [add_op(emb_words[0](emb(x)), x_mask,
bs=x_mask.shape[1],
si=0, gater_below=gater_below, reseter_below=reseter_below)]
if state['encoder_stack'] > 1:
everything = encoder_proj[0](last(encoder_acts[-1]))
for si in xrange(1,state['encoder_stack']):
gater_below = None
if state['rec_gating']:
gater_below = gater_words[si](emb(x))
reseter_below = None
if state['rec_reseting']:
reseter_below = reseter_words[si](emb(x))
encoder_acts.append(add_op(emb_words[si](emb(x)),
x_mask, bs=x_mask.shape[1],
si=si, state_below=encoder_acts[-1],
gater_below=gater_below,
reseter_below=reseter_below))
if state['encoder_stack'] > 1:
everything += encoder_proj[si](last(encoder_acts[-1]))
if state['encoder_stack'] <= 1:
encoder = encoder_acts[-1]
everything = LastState(ntimes=True,n=y.shape[0])(encoder)
else:
everything = encoder_act_layer(everything)
everything = everything.reshape([1, everything.shape[0], everything.shape[1]])
everything = LastState(ntimes=True,n=y.shape[0])(everything)
if state['bias_code']:
init_state = [bc(everything[-1]) for bc in bias_code]
else:
init_state = [None for bc in bias_code]
if state['avg_word']:
shape = x.shape
pword = emb(x).out.reshape([shape[0], shape[1], state['rank_n_approx']])
pword = pword * x_mask.dimshuffle(0, 1, 'x')
aword = pword.sum(0) / TT.maximum(1., x_mask.sum(0).dimshuffle(0, 'x'))
aword = word_code(aword, use_noise=False)
else:
aword = None
gater_below = None
if state['rec_gating']:
gater_below = gater_words_t[0](emb_t(y0))
reseter_below = None
if state['rec_reseting']:
reseter_below = reseter_words_t[0](emb_t(y0))
has_said = [add_t_op(emb_words_t[0](emb_t(y0)),
everything,
y_mask, bs=y_mask.shape[1],
gater_below = gater_below,
reseter_below = reseter_below,
init_state=init_state[0],
si=0)]
for si in xrange(1,state['decoder_stack']):
gater_below = None
if state['rec_gating']:
gater_below = gater_words_t[si](emb_t(y0))
reseter_below = None
if state['rec_reseting']:
reseter_below = reseter_words_t[si](emb_t(y0))
has_said.append(add_t_op(emb_words_t[si](emb_t(y0)),
everything,
y_mask, bs=y_mask.shape[1],
state_below = has_said[-1],
gater_below = gater_below,
reseter_below = reseter_below,
init_state=init_state[si],
si=si))
if has_said[0].out.ndim < 3:
for si in xrange(state['decoder_stack']):
shape_hs = has_said[si].shape
if y0.ndim == 1:
shape = y0.shape
has_said[si] = Shift()(has_said[si].reshape([shape[0], 1, state['dim_mlp']]))
else:
shape = y0.shape
has_said[si] = Shift()(has_said[si].reshape([shape[0], shape[1], state['dim_mlp']]))
has_said[si].out = TT.set_subtensor(has_said[si].out[0, :, :], init_state[si])
has_said[si] = has_said[si].reshape(shape_hs)
else:
for si in xrange(state['decoder_stack']):
has_said[si] = Shift()(has_said[si])
has_said[si].out = TT.set_subtensor(has_said[si].out[0, :, :], init_state[si])
model = pop_op(proj_code(everything), has_said, word=y0, aword = aword)
nll = output_layer.train(state_below=model, target=y0,
mask=y_mask, reg=None) / TT.cast(y.shape[0]*y.shape[1], 'float32')
valid_fn = None
noise_fn = None
x = TT.lvector(name='x')
n_steps = TT.iscalar('nsteps')
temp = TT.scalar('temp')
gater_below = None
if state['rec_gating']:
gater_below = gater_words[0](emb(x))
reseter_below = None
if state['rec_reseting']:
reseter_below = reseter_words[0](emb(x))
encoder_acts = [add_op(emb_words[0](emb(x),use_noise=False),
si=0,
use_noise=False,
gater_below=gater_below,
reseter_below=reseter_below)]
if state['encoder_stack'] > 1:
everything = encoder_proj[0](last(encoder_acts[-1]), use_noise=False)
for si in xrange(1,state['encoder_stack']):
gater_below = None
if state['rec_gating']:
gater_below = gater_words[si](emb(x))
reseter_below = None
if state['rec_reseting']:
reseter_below = reseter_words[si](emb(x))
encoder_acts.append(add_op(emb_words[si](emb(x),use_noise=False),
si=si,
state_below=encoder_acts[-1], use_noise=False,
gater_below = gater_below,
reseter_below = reseter_below))
if state['encoder_stack'] > 1:
everything += encoder_proj[si](last(encoder_acts[-1]), use_noise=False)
if state['encoder_stack'] <= 1:
encoder = encoder_acts[-1]
everything = last(encoder)
else:
everything = encoder_act_layer(everything)
init_state = []
for si in xrange(state['decoder_stack']):
if state['bias_code']:
init_state.append(TT.reshape(bias_code[si](everything,
use_noise=False), [1, state['dim']]))
else:
init_state.append(TT.alloc(numpy.float32(0), 1, state['dim']))
if state['avg_word']:
aword = emb(x,use_noise=False).out.mean(0)
aword = word_code(aword, use_noise=False)
else:
aword = None
def sample_fn(*args):
aidx = 0; word_tm1 = args[aidx]
aidx += 1; prob_tm1 = args[aidx]
has_said_tm1 = []
for si in xrange(state['decoder_stack']):
aidx += 1; has_said_tm1.append(args[aidx])
aidx += 1; ctx = args[aidx]
if state['avg_word']:
aidx += 1; awrd = args[aidx]
val = pop_op(proj_code(ctx), has_said_tm1, word=word_tm1,
aword=awrd, one_step=True, use_noise=False)
sample = output_layer.get_sample(state_below=val, temp=temp)
logp = output_layer.get_cost(
state_below=val.out.reshape([1, TT.cast(output_layer.n_in, 'int64')]),
temp=temp, target=sample.reshape([1,1]), use_noise=False)
gater_below = None
if state['rec_gating']:
gater_below = gater_words_t[0](emb_t(sample))
reseter_below = None
if state['rec_reseting']:
reseter_below = reseter_words_t[0](emb_t(sample))
has_said_t = [add_t_op(emb_words_t[0](emb_t(sample)),
ctx,
prev_val=has_said_tm1[0],
gater_below=gater_below,
reseter_below=reseter_below,
one_step=True, use_noise=True,
si=0)]
for si in xrange(1, state['decoder_stack']):
gater_below = None
if state['rec_gating']:
gater_below = gater_words_t[si](emb_t(sample))
reseter_below = None
if state['rec_reseting']:
reseter_below = reseter_words_t[si](emb_t(sample))
has_said_t.append(add_t_op(emb_words_t[si](emb_t(sample)),
ctx,
prev_val=has_said_tm1[si],
gater_below=gater_below,
reseter_below=reseter_below,
one_step=True, use_noise=True,
si=si, state_below=has_said_t[-1]))
for si in xrange(state['decoder_stack']):
if isinstance(has_said_t[si], list):
has_said_t[si] = has_said_t[si][-1]
rval = [sample, TT.cast(logp, 'float32')] + has_said_t
return rval
sampler_params = [everything]
if state['avg_word']:
sampler_params.append(aword)
states = [TT.alloc(numpy.int64(0), n_steps)]
states.append(TT.alloc(numpy.float32(0), n_steps))
states += init_state
outputs, updates = scan(sample_fn,
states = states,
params = sampler_params,
n_steps= n_steps,
name='sampler_scan'
)
samples = outputs[0]
probs = outputs[1]
sample_fn = theano.function(
[n_steps, temp, x], [samples, probs.sum()],
updates=updates,
profile=False, name='sample_fn')
model = LM_Model(
cost_layer = nll,
weight_noise_amount=state['weight_noise_amount'],
valid_fn = valid_fn,
sample_fn = sample_fn,
clean_before_noise_fn = False,
noise_fn = noise_fn,
indx_word=state['indx_word_target'],
indx_word_src=state['indx_word'],
character_level = False,
rng = rng)
if state['loopIters'] > 0: algo = SGD(model, state, train_data)
else: algo = None
def hook_fn():
if not hasattr(model, 'word_indxs'): model.load_dict()
if not hasattr(model, 'word_indxs_src'):
model.word_indxs_src = model.word_indxs
old_offset = train_data.offset
if state['sample_reset']: train_data.reset()
ns = 0
for sidx in xrange(state['sample_n']):
while True:
batch = train_data.next()
if batch:
break
x = batch['x']
y = batch['y']
#xbow = batch['x_bow']
masks = batch['x_mask']
if x.ndim > 1:
for idx in xrange(x.shape[1]):
ns += 1
if ns > state['sample_max']:
break
print 'Input: ',
for k in xrange(x[:,idx].shape[0]):
print model.word_indxs_src[x[:,idx][k]],
if model.word_indxs_src[x[:,idx][k]] == '<eol>':
break
print ''
print 'Target: ',
for k in xrange(y[:,idx].shape[0]):
print model.word_indxs[y[:,idx][k]],
if model.word_indxs[y[:,idx][k]] == '<eol>':
break
print ''
senlen = len(x[:,idx])
if len(numpy.where(masks[:,idx]==0)[0]) > 0:
senlen = numpy.where(masks[:,idx]==0)[0][0]
if senlen < 1:
continue
xx = x[:senlen, idx]
#xx = xx.reshape([xx.shape[0], 1])
model.get_samples(state['seqlen']+1, 1, xx)
else:
ns += 1
model.get_samples(state['seqlen']+1, 1, x)
if ns > state['sample_max']:
break
train_data.offset = old_offset
return
main = MainLoop(train_data, valid_data, None, model, algo, state, channel,
reset = state['reset'], hooks = hook_fn)
if state['reload']: main.load()
if state['loopIters'] > 0: main.main()
if state['sampler_test']:
# This is a test script: we only sample
if not hasattr(model, 'word_indxs'): model.load_dict()
if not hasattr(model, 'word_indxs_src'):
model.word_indxs_src = model.word_indxs
indx_word=pkl.load(open(state['word_indx'],'rb'))
try:
while True:
try:
seqin = raw_input('Input Sequence: ')
n_samples = int(raw_input('How many samples? '))
alpha = float(raw_input('Inverse Temperature? '))
seqin = seqin.lower()
seqin = seqin.split()
seqlen = len(seqin)
seq = numpy.zeros(seqlen+1, dtype='int64')
for idx,sx in enumerate(seqin):
try:
seq[idx] = indx_word[sx]
except:
seq[idx] = indx_word[state['oov']]
seq[-1] = state['null_sym_source']
except Exception:
print 'Something wrong with your input! Try again!'
continue
sentences = []
all_probs = []
for sidx in xrange(n_samples):
#import ipdb; ipdb.set_trace()
[values, probs] = model.sample_fn(seqlen * 3, alpha, seq)
sen = []
for k in xrange(values.shape[0]):
if model.word_indxs[values[k]] == '<eol>':
break
sen.append(model.word_indxs[values[k]])
sentences.append(" ".join(sen))
all_probs.append(-probs)
sprobs = numpy.argsort(all_probs)
for pidx in sprobs:
print pidx,"(%f):"%(-all_probs[pidx]),sentences[pidx]
print
except KeyboardInterrupt:
print 'Interrupted'
pass
if __name__=='__main__':
state = {}
state['target'] = ["/data/lisatmp3/chokyun/mt/joint_paper_hs/phrase_table.fr.h5"]
state['source'] = ["/data/lisatmp3/chokyun/mt/joint_paper_hs/phrase_table.en.h5"]
# target only
state['indx_word'] = "/data/lisatmp3/chokyun/mt/ivocab_source.pkl"
state['indx_word_target'] = "/data/lisatmp3/chokyun/mt/ivocab_target.pkl"
state['word_indx'] = "/data/lisatmp3/chokyun/mt/vocab.en.pkl"
state['oov'] = 'UNK'
state['bigram'] = True
state['bias_code'] = True
state['avg_word'] = True
def maxout(x):
shape = x.shape
if x.ndim == 1:
shape1 = TT.cast(shape[0] / state['maxout_part'], 'int64')
shape2 = TT.cast(state['maxout_part'], 'int64')
x = x.reshape([shape1, shape2])
x = x.max(1)
else:
shape1 = TT.cast(shape[1] / state['maxout_part'], 'int64')
shape2 = TT.cast(state['maxout_part'], 'int64')
x = x.reshape([shape[0], shape1, shape2])
x = x.max(2)
return x
state['eps'] = 1e-10
state['dim'] = 1000
state['dim_mlp'] = state['dim']
state['encoder_stack'] = 1
state['decoder_stack'] = 1
state['deep_out'] = True
state['mult_out'] = False
state['rank_n_approx'] = 100
state['rank_n_activ'] = 'lambda x: x'
state['rec_layer'] = 'RecurrentLayer'
state['rec_gating'] = True
state['rec_reseting'] = True
state['rec_gater'] = 'lambda x: TT.nnet.sigmoid(x)'
state['rec_reseter'] = 'lambda x: TT.nnet.sigmoid(x)'
state['activ'] = 'lambda x: TT.tanh(x)'
state['bias'] = 0.
state['bias_mlp'] = 0.
state['maxout_part'] = 2.
state["unary_activ"] = 'maxout'
state['rec_weight_init_fn'] = 'sample_weights_orth'
state['weight_init_fn'] = 'sample_weights_classic'
state['rec_weight_scale'] = 1.
state['weight_scale'] = 0.01
state['dropout'] = 1. # no dropout
state['dropout_rec'] = 1. # no dropout
state['weight_noise'] = False
state['weight_noise_rec'] = False
state['weight_noise_amount'] = 0.01
state['cutoff'] = 1.
state['cutoff_rescale_length'] = 0.
state['adarho'] = 0.95
state['adaeps'] = 1e-6
state['patience'] = 1
state['lr'] = 1.
state['minlr'] = 0
state['bs'] = 64
state['vbs'] = 64
state['reset'] = -1
state['seqlen'] = 30 # maximum sequence length
state['randstart'] = False
state['sample_reset'] = False
state['sample_n'] = 1
state['sample_max'] = 3
state['reload'] = False
state['loopIters'] = 50000000
state['timeStop'] = 24*60*7
state['minerr'] = -1
state['sampler_test'] = True
state['seed'] = 1234
state['trainFreq'] = 1
state['hookFreq'] = 100
state['validFreq'] = 500
state['saveFreq'] = 60 #min
state['profile'] = 0
state['prefix'] = 'model_phrase_'
state['overwrite'] = 1
jobman(state, None) | unknown | codeparrot/codeparrot-clean | ||
#! /usr/bin/python
#--------------------------------#
# File name: lagrange.py
# Author: Giovanni Antonaccio
# Email: giovanniantonaccio@gmail.com
# Date created: 09/30/2017
# Date last modified: 09/30/2017
# Python Version: 2.7
#--------------------------------#
from sympy import *
import json
x = Symbol('x')
#Enter sensor response here
xi = [1,2,4,7]
#Enter sensor ideal response here
yi = [3,6,8,12]
#Enter evaluation point here
point = 3 #To not evaluate: assign None
n=len(xi)
L = []
#auxiliary polynomial calculation
for j in range(n):
#num before j
product1 = 1
for i in range(j):
product1 = product1 * (x - xi[i])
#num after j
product2 = 1
for i in range(j+1,n):
product2 = product2 * (x - xi[i])
#den before j
product3 = 1
for i in range(j):
product3 = product3 * (xi[j] - xi[i])
#den after j
product4 = 1
for i in range(j+1,n):
product4 = product4 * (xi[j] - xi[i])
#calculating the auxiliary polynomial by joining all terms
num = expand(product1 * product2)
den = expand(product3 * product4)
L.append(num/den)
print ("\nL%s:" %(j))
pprint(L[j])
#calculation of the interpolating polynomial
pn = 0
for i in range(n):
pn = pn + yi[i]*L[i]
print("\n\nLagrange's Polinomial:")
pprint(pn)
#evaluates the expression at the given point
if point != None:
print("\n\nValue at point %i: %f" %(point,pn.evalf(subs={x:point})))
#prints the coefficients in fraction format
lagrange_final = Poly(pn, x)
coefficients = lagrange_final.coeffs()
print(coefficients)
j = {}
#transforms the coefficients to float (python format)
for i in range(len(coefficients)):
j["c"+str(i)] = float(coefficients[i].evalf())
print(json.dumps(j, sort_keys=True,indent=4))
with open('data.json', 'w') as outfile:
json.dump(j, outfile, sort_keys=True,indent=4) | unknown | codeparrot/codeparrot-clean | ||
bugfixes:
- "dnf - fix package installation when specifying architecture without version (e.g., ``libgcc.i686``) where a different architecture of the same package is already installed (https://github.com/ansible/ansible/issues/86156)." | unknown | github | https://github.com/ansible/ansible | changelogs/fragments/86156-dnf-multilib-arch.yml |
// Copyright 2020 The Cockroach Authors.
//
// Use of this software is governed by the CockroachDB Software License
// included in the /LICENSE file.
package sql
import (
"context"
"github.com/cockroachdb/cockroach/pkg/server/telemetry"
"github.com/cockroachdb/cockroach/pkg/sql/catalog"
"github.com/cockroachdb/cockroach/pkg/sql/catalog/descpb"
"github.com/cockroachdb/cockroach/pkg/sql/catalog/tabledesc"
"github.com/cockroachdb/cockroach/pkg/sql/pgwire/pgcode"
"github.com/cockroachdb/cockroach/pkg/sql/pgwire/pgerror"
"github.com/cockroachdb/cockroach/pkg/sql/privilege"
"github.com/cockroachdb/cockroach/pkg/sql/sem/tree"
"github.com/cockroachdb/cockroach/pkg/sql/sqltelemetry"
"github.com/cockroachdb/cockroach/pkg/util/log/eventpb"
)
type alterTableSetSchemaNode struct {
zeroInputPlanNode
newSchema string
prefix catalog.ResolvedObjectPrefix
tableDesc *tabledesc.Mutable
n *tree.AlterTableSetSchema
}
// AlterTableSetSchema sets the schema for a table, view or sequence.
// Privileges: DROP on source table/view/sequence, CREATE on destination schema.
func (p *planner) AlterTableSetSchema(
ctx context.Context, n *tree.AlterTableSetSchema,
) (planNode, error) {
if err := checkSchemaChangeEnabled(
ctx,
p.ExecCfg(),
"ALTER TABLE/VIEW/SEQUENCE SET SCHEMA",
); err != nil {
return nil, err
}
tn := n.Name.ToTableName()
requiredTableKind := tree.ResolveAnyTableKind
if n.IsView {
requiredTableKind = tree.ResolveRequireViewDesc
} else if n.IsSequence {
requiredTableKind = tree.ResolveRequireSequenceDesc
}
prefix, tableDesc, err := p.ResolveMutableTableDescriptor(
ctx, &tn, !n.IfExists, requiredTableKind)
if err != nil {
return nil, err
}
if tableDesc == nil {
// Noop.
return newZeroNode(nil /* columns */), nil
}
if err := checkViewMatchesMaterialized(tableDesc, n.IsView, n.IsMaterialized); err != nil {
return nil, err
}
if tableDesc.Temporary {
return nil, pgerror.Newf(pgcode.FeatureNotSupported,
"cannot move objects into or out of temporary schemas")
}
// The user needs DROP privilege on the table to set the schema.
err = p.CheckPrivilege(ctx, tableDesc, privilege.DROP)
if err != nil {
return nil, err
}
// Check if any objects depend on this table/view/sequence via its name.
// If so, then we disallow renaming, otherwise we allow it.
for _, dependent := range tableDesc.DependedOnBy {
if !dependent.ByID {
return nil, p.dependentError(
ctx, string(tableDesc.DescriptorType()), tableDesc.Name,
tableDesc.ParentID, dependent.ID, tableDesc.ID, "set schema on",
)
}
}
// Disallow schema changes if this table's schema is locked.
if err := p.checkSchemaChangeIsAllowed(ctx, tableDesc, n); err != nil {
return nil, err
}
return &alterTableSetSchemaNode{
newSchema: string(n.Schema),
prefix: prefix,
tableDesc: tableDesc,
n: n,
}, nil
}
func (n *alterTableSetSchemaNode) startExec(params runParams) error {
telemetry.Inc(sqltelemetry.SchemaChangeAlterCounterWithExtra(
tree.GetTableType(n.n.IsSequence, n.n.IsView, n.n.IsMaterialized),
n.n.TelemetryName(),
))
ctx := params.ctx
p := params.p
tableDesc := n.tableDesc
oldNameKey := descpb.NameInfo{
ParentID: tableDesc.GetParentID(),
ParentSchemaID: tableDesc.GetParentSchemaID(),
Name: tableDesc.GetName(),
}
kind := tree.GetTableType(tableDesc.IsSequence(), tableDesc.IsView(), tableDesc.GetIsMaterializedView())
oldName := tree.MakeTableNameFromPrefix(n.prefix.NamePrefix(), tree.Name(n.tableDesc.GetName()))
desiredSchemaID, err := p.prepareSetSchema(ctx, n.prefix.Database, tableDesc, n.newSchema)
if err != nil {
return err
}
// If the schema being changed to is the same as the current schema for the
// table, do a no-op.
if desiredSchemaID == oldNameKey.GetParentSchemaID() {
return nil
}
objectID, err := p.Descriptors().LookupObjectID(
ctx, p.txn, tableDesc.GetParentID(), desiredSchemaID, tableDesc.GetName(),
)
if err == nil && objectID != descpb.InvalidID {
return pgerror.Newf(pgcode.DuplicateRelation,
"relation %s already exists in schema %s", tableDesc.GetName(), n.newSchema)
} else if err != nil {
return err
}
// Set the tableDesc's new schema id to the desired schema's id.
tableDesc.SetParentSchemaID(desiredSchemaID)
b := p.txn.NewBatch()
if err := p.renameNamespaceEntry(ctx, b, oldNameKey, tableDesc); err != nil {
return err
}
if err := p.writeSchemaChange(
ctx, tableDesc, descpb.InvalidMutationID, tree.AsStringWithFQNames(n.n, params.Ann()),
); err != nil {
return err
}
if err := p.txn.Run(ctx, b); err != nil {
return err
}
newName, err := p.getQualifiedTableName(ctx, tableDesc)
if err != nil {
return err
}
return p.logEvent(ctx,
desiredSchemaID,
&eventpb.SetSchema{
DescriptorName: oldName.FQString(),
NewDescriptorName: newName.FQString(),
DescriptorType: kind,
},
)
}
// ReadingOwnWrites implements the planNodeReadingOwnWrites interface.
// This is because SET SCHEMA performs multiple KV operations on descriptors
// and expects to see its own writes.
func (n *alterTableSetSchemaNode) ReadingOwnWrites() {}
func (n *alterTableSetSchemaNode) Next(runParams) (bool, error) { return false, nil }
func (n *alterTableSetSchemaNode) Values() tree.Datums { return tree.Datums{} }
func (n *alterTableSetSchemaNode) Close(context.Context) {} | go | github | https://github.com/cockroachdb/cockroach | pkg/sql/alter_table_set_schema.go |
//// [tests/cases/compiler/aliasUsageInArray.ts] ////
//// [aliasUsageInArray_backbone.ts]
export class Model {
public someData: string;
}
//// [aliasUsageInArray_moduleA.ts]
import Backbone = require("./aliasUsageInArray_backbone");
export class VisualizationModel extends Backbone.Model {
// interesting stuff here
}
//// [aliasUsageInArray_main.ts]
import Backbone = require("./aliasUsageInArray_backbone");
import moduleA = require("./aliasUsageInArray_moduleA");
interface IHasVisualizationModel {
VisualizationModel: typeof Backbone.Model;
}
var xs: IHasVisualizationModel[] = [moduleA];
var xs2: typeof moduleA[] = [moduleA];
//// [aliasUsageInArray_backbone.js]
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Model = void 0;
class Model {
}
exports.Model = Model;
//// [aliasUsageInArray_moduleA.js]
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.VisualizationModel = void 0;
const Backbone = require("./aliasUsageInArray_backbone");
class VisualizationModel extends Backbone.Model {
}
exports.VisualizationModel = VisualizationModel;
//// [aliasUsageInArray_main.js]
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const moduleA = require("./aliasUsageInArray_moduleA");
var xs = [moduleA];
var xs2 = [moduleA]; | javascript | github | https://github.com/microsoft/TypeScript | tests/baselines/reference/aliasUsageInArray.js |
# Create your views here.
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
from easymode.tree import xml as tree
from easymode.tree.xml.query import XmlQuerySetChain
from easymode.xslt.response import render_to_response as render_xslt_to_response
from foobar import models as foobar_models
def index(request):
"""shows an informative page"""
return render_to_response('index.html', {}, context_instance=RequestContext(request))
def raw(request):
"""shows untransformed hierarchical xml output"""
foos = foobar_models.Foo.objects.all()
return HttpResponse(tree.xml(foos), mimetype='text/xml')
def chain(request):
"""shows how the XmlQuerySetChain can be used instead of @toxml decorator"""
bars = foobar_models.Bar.objects.all()
bazs = foobar_models.Baz.objects.all()
qsc = XmlQuerySetChain(bars, bazs)
return HttpResponse(tree.xml(qsc), mimetype='text/xml')
def xslt(request):
"""Shows xml output transformed with standard xslt"""
foos = foobar_models.Foo.objects.all()
return render_xslt_to_response('xslt/model-to-xml.xsl', foos, mimetype='text/xml')
def frontend(request, format='html'):
if format == 'xml':
return xslt(request)
foos = foobar_models.Foo.objects.all()
return render_xslt_to_response('frontend.xsl', foos) | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python2
# vim: set fileencoding=utf8
import os
import sys
import requests
import urlparse
import re
import argparse
import random
import select
############################################################
# wget exit status
wget_es = {
0: "No problems occurred.",
2: "User interference.",
1<<8: "Generic error code.",
2<<8: "Parse error - for instance, when parsing command-line " \
"optio.wgetrc or .netrc...",
3<<8: "File I/O error.",
4<<8: "Network failure.",
5<<8: "SSL verification failure.",
6<<8: "Username/password authentication failure.",
7<<8: "Protocol errors.",
8<<8: "Server issued an error response."
}
############################################################
s = '\x1b[%d;%dm%s\x1b[0m' # terminual color template
headers = {
"Accept":"text/html,application/xhtml+xml,application/xml; " \
"q=0.9,image/webp,*/*;q=0.8",
"Accept-Encoding":"text/html",
"Accept-Language":"en-US,en;q=0.8,zh-CN;q=0.6,zh;q=0.4,zh-TW;q=0.2",
"Content-Type":"application/x-www-form-urlencoded",
"User-Agent":"Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.36 " \
"(KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36"
}
ss = requests.session()
ss.headers.update(headers)
class nrop19(object):
def __init__(self, url=None):
self.url = url
self.download = self.play if args.play else self.download
def get_infos(self):
r = ss.get(self.url)
if r.ok:
n1 = re.search(r'so.addVariable\(\'file\',\'(\d+)\'', r.content)
n2 = re.search(r'so.addVariable\(\'seccode\',\'(.+?)\'', r.content)
n3 = re.search(r'so.addVariable\(\'max_vid\',\'(\d+)\'', r.content)
if n1 and n2 and n3:
apiurl = 'http://%s/getfile.php' \
% urlparse.urlparse(self.url).hostname
params = {
'VID': n1.group(1),
'mp4': '1',
'seccode': n2.group(1),
'max_vid': n3.group(1),
}
#tapiurl = apiurl + '?' + \
#'&'.join(['='.join(item) for item in params.items()])
#print tapiurl
r = requests.get(apiurl, params=params)
if r.ok:
dlink = re.search(
r'file=(http.+?)&', r.content).group(1)
name = re.search(
r'viewkey=([\d\w]+)', self.url).group(1)
infos = {
'name': '%s.mp4' % name,
'file': os.path.join(os.getcwd(), '%s.mp4' % name),
'dir_': os.getcwd(),
'dlink': dlink
}
if not args.get_url:
self.download(infos)
else:
print dlink
else:
print s % (1, 91, ' Error at get(apiurl)')
else:
print s % (1, 91, ' You are blocked')
def download(self, infos):
num = random.randint(0, 7) % 7
col = s % (2, num + 90, infos['file'])
print '\n ++ 正在下载: %s' % col
cookies = '; '.join(
['%s=%s' % (i, ii) for i, ii in ss.cookies.items()])
if args.aria2c:
cmd = 'aria2c -c -x10 -s10 ' \
'-o "%s.tmp" -d "%s" --header "User-Agent: %s" ' \
'--header "Cookie: %s" "%s"' \
% (infos['name'], infos['dir_'], \
headers['User-Agent'], cookies, infos['dlink'])
else:
cmd = 'wget -c -O "%s.tmp" --header "User-Agent: %s" ' \
'--header "Cookie: %s" "%s"' \
% (infos['file'], headers['User-Agent'], cookies, infos['dlink'])
status = os.system(cmd)
if status != 0: # other http-errors, such as 302.
wget_exit_status_info = wget_es[status]
print('\n\n ----### \x1b[1;91mERROR\x1b[0m ==> '\
'\x1b[1;91m%d (%s)\x1b[0m ###--- \n\n' \
% (status, wget_exit_status_info))
print s % (1, 91, ' ===> '), cmd
sys.exit(1)
else:
os.rename('%s.tmp' % infos['file'], infos['file'])
def play(self, infos):
num = random.randint(0, 7) % 7
col = s % (2, num + 90, infos['name'])
print '\n ++ play: %s' % col
cmd = 'mpv --really-quiet --cache 8140 --cache-default 8140 ' \
'--http-header-fields "user-agent:%s" "%s"' \
% (headers['User-Agent'], infos['dlink'])
os.system(cmd)
timeout = 1
ii, _, _ = select.select([sys.stdin], [], [], timeout)
if ii:
sys.exit(0)
else:
pass
def do(self):
self.get_infos()
def main(url):
if args.proxy:
ss.proxies = {
'http': args.proxy,
'https': args.proxy
}
x = nrop19(url)
x.do()
if __name__ == '__main__':
p = argparse.ArgumentParser(
description='download from 91porn.com')
p.add_argument('url', help='url of 91porn.com')
p.add_argument('-a', '--aria2c', action='store_true', \
help='download with aria2c')
p.add_argument('-p', '--play', action='store_true', \
help='play with mpv')
p.add_argument('-u', '--get_url', action='store_true', \
help='print download_url without download')
p.add_argument('--proxy', action='store', type=str, default=None, \
help='print download_url without download')
args = p.parse_args()
main(args.url) | unknown | codeparrot/codeparrot-clean | ||
from airflow.hooks.base_hook import BaseHook
from airflow import configuration
try:
snakebite_imported = True
from snakebite.client import Client, HAClient, Namenode
except ImportError:
snakebite_imported = False
from airflow.utils import AirflowException
class HDFSHookException(AirflowException):
pass
class HDFSHook(BaseHook):
'''
Interact with HDFS. This class is a wrapper around the snakebite library.
'''
def __init__(self, hdfs_conn_id='hdfs_default'):
if not snakebite_imported:
raise ImportError(
'This HDFSHook implementation requires snakebite, but '
'snakebite is not compatible with Python 3 '
'(as of August 2015). Please use Python 2 if you require '
'this hook -- or help by submitting a PR!')
self.hdfs_conn_id = hdfs_conn_id
def get_conn(self):
'''
Returns a snakebite HDFSClient object.
'''
use_sasl = False
if configuration.get('core', 'security') == 'kerberos':
use_sasl = True
connections = self.get_connections(self.hdfs_conn_id)
client = None
if len(connections) == 1:
client = Client(connections[0].host, connections[0].port,use_sasl=use_sasl)
elif len(connections) > 1:
nn = [Namenode(conn.host, conn.port) for conn in connections]
client = HAClient(nn, use_sasl=use_sasl)
else:
raise HDFSHookException("conn_id doesn't exist in the repository")
return client | unknown | codeparrot/codeparrot-clean |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.